├── .env
├── changelog.md
├── changelog.json
├── updates.json
├── .github
├── CODEOWNERS
├── ISSUE_TEMPLATE
│ ├── config.yml
│ ├── bug_report.yml
│ └── feature_request.yml
├── workflows
│ ├── github-cleanup.yml
│ ├── pr-checks.yml
│ ├── dockerhub.yml
│ ├── dockerhub-base.yml
│ ├── sonar.yml
│ ├── telegram-uploader-custom.yml
│ ├── print-secrets.yml
│ ├── auto-release.yml
│ ├── newapp-check.yml
│ ├── build-artifact.yml
│ └── build-apk.yml
├── issue-branch.yml
└── dependabot.yml
├── apks
├── README.md
├── revanced.keystore
├── populate_client_id.sh
└── options.json
├── src
├── __init__.py
├── downloader
│ ├── __init__.py
│ ├── factory.py
│ ├── apksos.py
│ ├── apkeep.py
│ ├── uptodown.py
│ ├── apkmonk.py
│ ├── github.py
│ ├── download.py
│ ├── apkmirror.py
│ ├── sources.py
│ └── apkpure.py
├── manager
│ ├── __init__.py
│ ├── release_manager.py
│ └── github.py
├── config.py
├── exceptions.py
├── patches_gen.py
├── utils.py
├── patches.py
├── parser.py
└── app.py
├── scripts
├── __init__.py
├── prefered_apps.py
├── status_check.py
└── add_apkmirror_app.py
├── .DS_Store
├── sonar-project.properties
├── .hadolint.yaml
├── entrypoint
├── .dockerignore
├── .gitignore
├── .shellcheckrc
├── docker-compose.yml
├── requirements.txt
├── docker-compose-local.yml
├── .markdownlint.yaml
├── TODOs.md
├── Dockerfile
├── pyproject.toml
├── .pre-commit-config.yaml
├── Dockerfile-base
├── .env.example
├── check_resource_updates.py
├── .env.my
└── main.py
/.env:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/changelog.md:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/changelog.json:
--------------------------------------------------------------------------------
1 | {}
2 |
--------------------------------------------------------------------------------
/updates.json:
--------------------------------------------------------------------------------
1 | {}
2 |
--------------------------------------------------------------------------------
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @nikhilbadyal
2 |
--------------------------------------------------------------------------------
/apks/README.md:
--------------------------------------------------------------------------------
1 | # To Dump Download APKs
2 |
--------------------------------------------------------------------------------
/src/__init__.py:
--------------------------------------------------------------------------------
1 | """Main Source Code."""
2 |
--------------------------------------------------------------------------------
/scripts/__init__.py:
--------------------------------------------------------------------------------
1 | """Common utilities."""
2 |
--------------------------------------------------------------------------------
/src/downloader/__init__.py:
--------------------------------------------------------------------------------
1 | """Downloader files."""
2 |
--------------------------------------------------------------------------------
/src/manager/__init__.py:
--------------------------------------------------------------------------------
1 | """Release patch resource manager."""
2 |
--------------------------------------------------------------------------------
/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nikhilbadyal/docker-py-revanced/HEAD/.DS_Store
--------------------------------------------------------------------------------
/sonar-project.properties:
--------------------------------------------------------------------------------
1 | sonar.projectKey=docker-py-revanced
2 | sonar.python.version=3.14
3 |
--------------------------------------------------------------------------------
/.hadolint.yaml:
--------------------------------------------------------------------------------
1 | # Following source doesn't work in most setups
2 | ignored:
3 | - SC1090
4 | - SC1091
5 |
--------------------------------------------------------------------------------
/apks/revanced.keystore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nikhilbadyal/docker-py-revanced/HEAD/apks/revanced.keystore
--------------------------------------------------------------------------------
/entrypoint:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -o errexit
4 | set -o pipefail
5 | set -o nounset
6 |
7 |
8 | python main.py
9 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | .git
3 | .idea
4 | Dockerfile
5 | LICENSE
6 | README.md
7 | docker-compose.yml
8 | venv
9 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.apk
2 | *.jar
3 | *.keystore
4 | *venv
5 | *.pyc
6 | **cache**
7 | changelog.md
8 | .idea
9 | *.json
10 | status.md
11 | *.zip
12 | apks/*
13 | *.rvp
14 | *.backup
15 |
--------------------------------------------------------------------------------
/.shellcheckrc:
--------------------------------------------------------------------------------
1 | enable=all
2 | source-path=SCRIPTDIR
3 | disable=SC2154
4 |
5 | # If you're having issues with shellcheck following source, disable the errors via:
6 | # disable=SC1090
7 | # disable=SC1091
8 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | services:
2 | revanced:
3 | image: nikhilbadyal/docker-py-revanced
4 | container_name: revanced-builder
5 | pull_policy: always
6 | env_file:
7 | - .env
8 | volumes:
9 | - .:/app:z
10 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | beautifulsoup4==4.13.4
2 | environs==14.1.1
3 | google-play-scraper==1.2.7
4 | lastversion==3.5.7
5 | loguru==0.7.3
6 | packaging==24.2
7 | pre-commit==4.2.0
8 | python-dotenv==1.1.0
9 | requests==2.32.4
10 | tqdm==4.67.1
11 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | blank_issues_enabled: true
2 | contact_links:
3 | - name: Community Support
4 | url: https://github.com/nikhilbadyal/docker-py-revanced/discussions/new/choose
5 | about: Please ask and answer questions here.
6 |
--------------------------------------------------------------------------------
/docker-compose-local.yml:
--------------------------------------------------------------------------------
1 | services:
2 | revanced:
3 | build:
4 | context: .
5 | dockerfile: Dockerfile
6 | image: revanced-builder
7 | container_name: revanced-builder
8 | env_file:
9 | - .env
10 | volumes:
11 | - .:/app:z
12 |
--------------------------------------------------------------------------------
/.markdownlint.yaml:
--------------------------------------------------------------------------------
1 | # Autoformatter friendly markdownlint config (all formatting rules disabled)
2 | default: true
3 | blank_lines: false
4 | bullet: false
5 | html: false
6 | indentation: false
7 | line_length: false
8 | spaces: false
9 | url: false
10 | whitespace: false
11 |
--------------------------------------------------------------------------------
/.github/workflows/github-cleanup.yml:
--------------------------------------------------------------------------------
1 | name: GitHub Cleanup
2 |
3 | on:
4 | workflow_call:
5 | secrets:
6 | GH_TOKEN:
7 | required: true
8 | workflow_dispatch:
9 | schedule:
10 | - cron: '0 */2 * * *'
11 |
12 | jobs:
13 | cleanup:
14 | name: GitHub Cleanup
15 | uses: nikhilbadyal/ghactions/.github/workflows/github-cleanup.yml@main
16 | secrets:
17 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
18 |
--------------------------------------------------------------------------------
/TODOs.md:
--------------------------------------------------------------------------------
1 | ## Things to work on
2 |
3 | | | |
4 | |:------------------------------------------------------|---------------:|
5 | | Parallelize app object creation. |
[ ] |
6 | | Parallelize app patching. | [ ] |
7 | | Ability to provide local patching resources | [X] |
8 | | Ability to provide changelog repo in update_changelog | [ ] |
9 |
--------------------------------------------------------------------------------
/.github/issue-branch.yml:
--------------------------------------------------------------------------------
1 | mode: chatops
2 | silent: true
3 | branchName: '${issue.number}-${issue.title,}'
4 | gitSafeReplacementChar: '-'
5 | autoLinkIssue: true
6 | autoCloseIssue: true
7 | openPR: true
8 | branches:
9 | - label:
10 | - bug🐞
11 | prefix: bug/
12 | - label:
13 | - feature-request👓
14 | prefix: feature/
15 | - label:
16 | - new-app💁
17 | prefix: new-app/
18 | - label:
19 | - documentation📄
20 | prefix: documentation/
21 | experimental:
22 | branchNameArgument: true
23 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM nikhilbadyal/docker-py-revanced-base
2 |
3 | # Copy and install Python dependencies
4 | COPY requirements.txt .
5 | RUN python -m pip install --no-cache-dir --upgrade pip && \
6 | pip install --no-cache-dir -r requirements.txt
7 |
8 |
9 | # Copy entrypoint script
10 | COPY ./entrypoint /entrypoint
11 | RUN sed -i 's/\r$//g' /entrypoint && chmod +x /entrypoint
12 |
13 | # Copy application code
14 | COPY . ${APP_HOME}
15 |
16 | # Set the default command to run the entrypoint script
17 | CMD ["bash","/entrypoint"]
18 |
--------------------------------------------------------------------------------
/.github/workflows/pr-checks.yml:
--------------------------------------------------------------------------------
1 | name: PR Checks
2 | env:
3 | DOCKER_BUILDKIT: 1
4 | COMPOSE_DOCKER_CLI_BUILD: 1
5 | on:
6 | pull_request:
7 | paths-ignore:
8 | - '*.md'
9 | concurrency:
10 | group: ${{ github.workflow }}-${{ github.ref }}
11 | cancel-in-progress: true
12 |
13 | jobs:
14 | build-apk-pr-check:
15 | uses: ./.github/workflows/build-artifact.yml
16 | with:
17 | CI_TEST: true
18 | secrets:
19 | ENVS: |
20 | CI_TEST=True
21 | ES_JAVA_OPTS: '-Xms4g -Xmx4g'
22 | PERSONAL_ACCESS_TOKEN=${{ secrets.GITHUB_TOKEN }}
23 |
--------------------------------------------------------------------------------
/.github/workflows/dockerhub.yml:
--------------------------------------------------------------------------------
1 | name: Push to DockerHub
2 | on:
3 | workflow_dispatch:
4 | push:
5 | branches:
6 | - 'main'
7 | paths:
8 | - '**.py'
9 | - 'README.md'
10 |
11 | concurrency:
12 | group: ${{ github.workflow }}-${{ github.ref }}
13 | cancel-in-progress: true
14 |
15 | jobs:
16 | dockerhub-push:
17 | uses: nikhilbadyal/ghactions/.github/workflows/dockerhub-push.yml@main
18 | if: github.repository == 'nikhilbadyal/docker-py-revanced'
19 | secrets:
20 | DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
21 | DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
22 |
--------------------------------------------------------------------------------
/.github/workflows/dockerhub-base.yml:
--------------------------------------------------------------------------------
1 | name: Push Base Image to DockerHub
2 | on:
3 | workflow_dispatch:
4 | push:
5 | branches:
6 | - 'main'
7 | paths:
8 | - 'Dockerfile-base'
9 |
10 | concurrency:
11 | group: ${{ github.workflow }}-${{ github.ref }}
12 | cancel-in-progress: true
13 |
14 | jobs:
15 | dockerhub-push:
16 | uses: nikhilbadyal/ghactions/.github/workflows/dockerhub-push.yml@main
17 | if: github.repository == 'nikhilbadyal/docker-py-revanced'
18 | with:
19 | IMAGE_TAG: ${{ github.repository }}-base:latest
20 | FILE: Dockerfile-base
21 | secrets:
22 | DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
23 | DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
24 |
--------------------------------------------------------------------------------
/scripts/prefered_apps.py:
--------------------------------------------------------------------------------
1 | """Update preferred apps."""
2 |
3 | import os
4 |
5 | import dotenv
6 | from loguru import logger
7 |
8 | from src.utils import default_build
9 |
10 |
11 | def update_patch_apps() -> None:
12 | """Update preferred apps."""
13 | dotenv_file = dotenv.find_dotenv()
14 | dotenv.load_dotenv(dotenv_file)
15 | patch_apps = os.environ.get("PATCH_APPS", default_build)
16 | logger.info(f"PATCH_APPS is currently {patch_apps}")
17 | os.environ["PATCH_APPS"] = os.environ["PREFERRED_PATCH_APPS"]
18 | new_patch_apps = os.environ["PATCH_APPS"]
19 | logger.info(f"PATCH_APPS is now {new_patch_apps}")
20 |
21 | dotenv.set_key(dotenv_file, "PATCH_APPS", os.environ["PATCH_APPS"])
22 |
23 |
24 | if __name__ == "__main__":
25 | update_patch_apps()
26 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 |
4 | - package-ecosystem: "github-actions"
5 | directory: "/"
6 | schedule:
7 | interval: "daily"
8 | labels:
9 | - dependencies🫂
10 | assignees:
11 | - "nikhilbadyal"
12 | reviewers:
13 | - "nikhilbadyal"
14 |
15 | - package-ecosystem: "docker"
16 | directory: "/"
17 | schedule:
18 | interval: "daily"
19 | labels:
20 | - dependencies🫂
21 | assignees:
22 | - "nikhilbadyal"
23 | reviewers:
24 | - "nikhilbadyal"
25 |
26 | - package-ecosystem: "pip"
27 | directory: "/"
28 | schedule:
29 | interval: "daily"
30 | labels:
31 | - dependencies🫂
32 | assignees:
33 | - "nikhilbadyal"
34 | reviewers:
35 | - "nikhilbadyal"
36 |
--------------------------------------------------------------------------------
/apks/populate_client_id.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 |
4 | client_id="test"
5 | path="apks/options.json"
6 | json_data=$(cat "${path}")
7 |
8 | new_object='{
9 | "patchName": "Spoof client",
10 | "options": [
11 | {
12 | "key": "client-id",
13 | "value": "'${client_id}'"
14 | }
15 | ]
16 | }'
17 | # Check if an object with the patchName "Spoof client" already exists
18 | existing_object_index=$(echo "${json_data}" | jq 'map(.patchName) | index("Spoof client")')
19 | echo "${existing_object_index}"
20 | if [[ ${existing_object_index} != "null" ]]; then
21 | echo "Exist"
22 | updated_json=$(echo "${json_data}" | jq ".[${existing_object_index}].options[0].value = \"${client_id}\"")
23 | else
24 | echo "Does not exist"
25 | updated_json=$(echo "${json_data}" | jq ". += [${new_object}]")
26 | fi
27 | echo "${updated_json}" > "${path}"
28 |
--------------------------------------------------------------------------------
/.github/workflows/sonar.yml:
--------------------------------------------------------------------------------
1 | name: Sonar Scan
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | workflow_dispatch:
8 |
9 |
10 | jobs:
11 | build:
12 | name: Build and analyze
13 | runs-on: ubuntu-latest
14 |
15 | steps:
16 | - uses: actions/checkout@v4
17 | with:
18 | fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
19 | - uses: SonarSource/sonarqube-scan-action@v6
20 | env:
21 | SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
22 | SONAR_HOST_URL: ${{ secrets.SONAR_HOST_URL }}
23 | # If you wish to fail your job when the Quality Gate is red, uncomment the
24 | # following lines. This would typically be used to fail a deployment.
25 | # - uses: SonarSource/sonarqube-quality-gate-action@v1
26 | # timeout-minutes: 5
27 | # env:
28 | # SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
29 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.ruff]
2 | line-length = 120
3 | target-version = "py313"
4 | fix = true
5 | show-fixes = true
6 | [tool.ruff.lint]
7 | select = [ "ALL"]
8 | ignore = [
9 | "D401",
10 | "ANN401",
11 | "S603",
12 | "S607",
13 | "ARG002", #unused-method-argument
14 | "PTH122", #os-path-splitext
15 | "TRY301", #raise-within-try
16 | "PERF203", #try-except-in-loop
17 | "UP004", #useless-object-inheritance
18 | "PLR0911", #too many returns
19 | "S310" # Audit URL open for permitted schemes.
20 | ]
21 | [tool.ruff.lint.pydocstyle]
22 | convention = "numpy"
23 |
24 | [tool.docformatter]
25 | recursive = true
26 | wrap-summaries = 120
27 | wrap-descriptions = 120
28 |
29 | [tool.black]
30 | line-length = 120
31 |
32 | [pycodestyle]
33 | max-line-length = 120
34 | exclude = ["venv"]
35 |
36 |
37 | [tool.mypy]
38 | ignore_missing_imports = true
39 | check_untyped_defs = true
40 | warn_unused_ignores = true
41 | warn_redundant_casts = true
42 | warn_unused_configs = true
43 |
44 | [tool.isort]
45 | line_length = 120
46 | skip = ["venv"]
47 | profile = "black"
48 |
--------------------------------------------------------------------------------
/src/manager/release_manager.py:
--------------------------------------------------------------------------------
1 | """Base release manager."""
2 |
3 | from typing import Self
4 |
5 | from loguru import logger
6 | from packaging.version import InvalidVersion, Version
7 |
8 | from src.app import APP
9 |
10 |
11 | class ReleaseManager(object):
12 | """Base Release manager."""
13 |
14 | def get_last_version(self: Self, app: APP, resource_name: str) -> str | list[str]:
15 | """Get last patched version."""
16 | raise NotImplementedError
17 |
18 | def should_trigger_build(self: Self, old_version: str, old_source: str, new_version: str, new_source: str) -> bool:
19 | """Function to check if we should trigger a build."""
20 | if old_source != new_source:
21 | logger.info(f"Trigger build because old source {old_source}, is different from new source {new_source}")
22 | return True
23 | logger.info(f"New version {new_version}, Old version {old_version}")
24 | try:
25 | return Version(new_version) > Version(old_version) # type: ignore[no-any-return]
26 | except InvalidVersion:
27 | logger.error("unable to parse version.")
28 | return False
29 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.yml:
--------------------------------------------------------------------------------
1 | name: Bug Report
2 | description: Encountered some issue while building?
3 | labels:
4 | - "bug🐞"
5 | assignees:
6 | - nikhilbadyal
7 |
8 | body:
9 | - type: checkboxes
10 | attributes:
11 | label: Request guidelines
12 | description: Please read the request guidelines before proceeding.
13 | options:
14 | - label: I've read the [issue guidelines](https://github.com/nikhilbadyal/docker-py-revanced#only-builder-support)
15 | required: true
16 |
17 | - type: input
18 | attributes:
19 | label: Name
20 | description: |
21 | Name of the app which failed to build.
22 | validations:
23 | required: true
24 |
25 | - type: textarea
26 | attributes:
27 | label: Error log or link to the logs if using Github Action
28 | description: |
29 | Please provide the error log or Github Action URL if building on GitHub
30 | validations:
31 | required: true
32 |
33 | - type: textarea
34 | attributes:
35 | label: ENVS
36 | description: |
37 | Please provide the ENVS used for building. If not using any write NA.
38 | render: Dotenv
39 | validations:
40 | required: true
41 |
42 | - type: textarea
43 | attributes:
44 | label: Additional info
45 | description: |
46 | Please provide any additional information that seems useful.
47 |
--------------------------------------------------------------------------------
/.github/workflows/telegram-uploader-custom.yml:
--------------------------------------------------------------------------------
1 | name: Telegram Custom Upload
2 |
3 | on:
4 | workflow_dispatch:
5 | inputs:
6 | API_ID:
7 | description: 'Telegram API ID'
8 | required: true
9 | type: number
10 | API_HASH:
11 | description: 'Telegram API HASH'
12 | required: true
13 | type: string
14 | BOT_TOKEN:
15 | description: 'Bot Token(Sender)'
16 | required: true
17 | type: string
18 | CHAT_ID:
19 | description: 'Chat ID(Receiver)'
20 | required: true
21 | type: number
22 | STICKER_ID:
23 | description: 'Sticker to Send'
24 | required: false
25 | default: 'CAACAgUAAxkBAAEY3d5jRDs4sojYzahDxU7wvl7_e1yRhwACuAYAApqD2VV9UCzjLNawRCoE'
26 | type: string
27 | CHANGELOG_GITHUB_REPOSITORY:
28 | description: 'Repo to Fetch Changelog'
29 | required: false
30 | type: string
31 |
32 | jobs:
33 | tg-upload:
34 | uses: nikhilbadyal/ghactions/.github/workflows/telegram-uploader.yml@main
35 | secrets:
36 | TELEGRAM_API_ID: ${{ inputs.API_ID }}
37 | TELEGRAM_API_HASH: ${{ inputs.API_HASH }}
38 | TELEGRAM_BOT_TOKEN: ${{ inputs.BOT_TOKEN }}
39 | TELEGRAM_CHAT_ID: ${{ inputs.CHAT_ID }}
40 | STICKER_ID: ${{ inputs.STICKER_ID }}
41 | CHANGELOG_GITHUB_REPOSITORY: ${{ inputs.CHANGELOG_GITHUB_REPOSITORY }}
42 |
--------------------------------------------------------------------------------
/.github/workflows/print-secrets.yml:
--------------------------------------------------------------------------------
1 | name: Print Github Secrets
2 |
3 | on:
4 | workflow_dispatch:
5 | inputs:
6 | ALL:
7 | description: "Whether to export all envs. Please be very careful. Make sure to delete the file after downloading."
8 | required: false
9 | type: boolean
10 | default: false
11 |
12 | jobs:
13 | create-envfile:
14 | runs-on: ubuntu-latest
15 |
16 | steps:
17 | - name: Checkout code
18 | uses: actions/checkout@v4
19 |
20 | - name: Save All Environment Variables to envs_all.env (If Enabled)
21 | if: inputs.ALL
22 | run: |
23 | python -c "import os; file = open('envs_all.env', 'w'); \
24 | file.write(os.getenv('ALL', '')); file.close()"
25 | shell: bash
26 | env:
27 | ALL: ${{ toJSON(secrets) }}
28 |
29 | - name: Save ENVS Secret to envs.env
30 | run: |
31 | python -c "import os; file = open('envs.env', 'w'); \
32 | file.write(os.getenv('ENVS', '')); file.close()"
33 | shell: bash
34 | env:
35 | ENVS: ${{ secrets.ENVS }}
36 |
37 | - name: Verify Created envs Files
38 | run: |
39 | echo "Checking if envs.env exist..."
40 | ls -lha envs.env*
41 |
42 | - name: Upload Environment Files as Artifact
43 | uses: actions/upload-artifact@v4.6.2
44 | with:
45 | name: github_secrets
46 | path: envs*
47 | if-no-files-found: error
48 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.yml:
--------------------------------------------------------------------------------
1 | name: Feature Request
2 | description: Have an awesome idea to improve the builder?
3 | labels:
4 | - "feature-request👓"
5 | assignees:
6 | - nikhilbadyal
7 |
8 | body:
9 | - type: markdown
10 | attributes:
11 | value: |
12 | **Thanks :heart: for taking the time to fill out this feature request report!**
13 | We kindly ask that you search to see if an issue [already exists](https://github.com/nikhilbadyal/docker-py-revanced/issues?q=label%3Afeature-request%F0%9F%91%93+) for your feature.
14 |
15 | - type: checkboxes
16 | attributes:
17 | label: Support guidelines
18 | description: Please read the request guidelines before proceeding.
19 | options:
20 | - label: I've read the [support guidelines](https://github.com/nikhilbadyal/docker-py-revanced#only-builder-support)
21 | required: true
22 |
23 | - type: textarea
24 | attributes:
25 | label: Description
26 | description: |
27 | A clear and concise description of the feature you're interested in.
28 | validations:
29 | required: true
30 |
31 | - type: textarea
32 | attributes:
33 | label: Motivation
34 | description: |
35 | Please explain why you believe this feature is necessary and how it will be beneficial for others.
36 | validations:
37 | required: true
38 |
39 | - type: textarea
40 | attributes:
41 | label: Additional Context
42 | description: |
43 | Please provide any additional information that seems useful.
44 | validations:
45 | required: false
46 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | exclude: "^docs/|/migrations/"
2 | default_stages: [ pre-commit ]
3 |
4 | repos:
5 | - repo: https://github.com/pre-commit/pre-commit-hooks
6 | rev: v6.0.0
7 | hooks:
8 | - id: check-added-large-files
9 | - id: check-ast
10 | - id: check-builtin-literals
11 | - id: check-case-conflict
12 | - id: check-docstring-first
13 | - id: check-executables-have-shebangs
14 | - id: check-json
15 | - id: check-merge-conflict
16 | - id: check-symlinks
17 | - id: check-yaml
18 | - id: debug-statements
19 | - id: detect-private-key
20 | - id: end-of-file-fixer
21 | - id: mixed-line-ending
22 | - id: requirements-txt-fixer
23 | - id: trailing-whitespace
24 |
25 | - repo: https://github.com/astral-sh/ruff-pre-commit
26 | rev: 'v0.14.10'
27 | hooks:
28 | - id: ruff
29 | args:
30 | - "--config=pyproject.toml"
31 | - "--unsafe-fixes"
32 |
33 | - repo: https://github.com/psf/black-pre-commit-mirror
34 | rev: 25.12.0
35 | hooks:
36 | - id: black
37 | args:
38 | - "--config=pyproject.toml"
39 |
40 | - repo: https://github.com/pycqa/isort
41 | rev: 7.0.0
42 | hooks:
43 | - id: isort
44 | args:
45 | - "--settings-path=pyproject.toml"
46 |
47 | - repo: https://github.com/pre-commit/mirrors-mypy
48 | rev: v1.19.1
49 | hooks:
50 | - id: mypy
51 | args:
52 | - '--strict'
53 | - "--config=pyproject.toml"
54 | additional_dependencies: [ types-requests,types-beautifulsoup4,types-pytz ]
55 |
56 | ci:
57 | autofix_commit_msg: |
58 | 🚨 Lint Fixes
59 | autofix_prs: true
60 | autoupdate_commit_msg: |
61 | ✨Update versions of tools in pre-commit configs to latest version
62 |
--------------------------------------------------------------------------------
/src/downloader/factory.py:
--------------------------------------------------------------------------------
1 | """Downloader Factory."""
2 |
3 | from src.config import RevancedConfig
4 | from src.downloader.apkeep import Apkeep
5 | from src.downloader.apkmirror import ApkMirror
6 | from src.downloader.apkmonk import ApkMonk
7 | from src.downloader.apkpure import ApkPure
8 | from src.downloader.apksos import ApkSos
9 | from src.downloader.download import Downloader
10 | from src.downloader.github import Github
11 | from src.downloader.sources import (
12 | APK_MIRROR_BASE_URL,
13 | APK_MONK_BASE_URL,
14 | APK_PURE_BASE_URL,
15 | APKEEP,
16 | APKS_SOS_BASE_URL,
17 | GITHUB_BASE_URL,
18 | UPTODOWN_SUFFIX,
19 | )
20 | from src.downloader.uptodown import UptoDown
21 | from src.exceptions import DownloadError
22 |
23 |
24 | class DownloaderFactory(object):
25 | """Downloader Factory."""
26 |
27 | @staticmethod
28 | def create_downloader(config: RevancedConfig, apk_source: str) -> Downloader:
29 | """Returns appropriate downloader.
30 |
31 | Args:
32 | ----
33 | config : Config
34 | apk_source : Source URL for APK
35 | """
36 | if apk_source.startswith(GITHUB_BASE_URL):
37 | return Github(config)
38 | if apk_source.startswith(APK_PURE_BASE_URL):
39 | return ApkPure(config)
40 | if apk_source.startswith(APKS_SOS_BASE_URL):
41 | return ApkSos(config)
42 | if apk_source.endswith(UPTODOWN_SUFFIX):
43 | return UptoDown(config)
44 | if apk_source.startswith(APK_MIRROR_BASE_URL):
45 | return ApkMirror(config)
46 | if apk_source.startswith(APK_MONK_BASE_URL):
47 | return ApkMonk(config)
48 | if apk_source.startswith(APKEEP):
49 | return Apkeep(config)
50 | msg = "No download factory found."
51 | raise DownloadError(msg, url=apk_source)
52 |
--------------------------------------------------------------------------------
/src/config.py:
--------------------------------------------------------------------------------
1 | """Revanced Configurations."""
2 |
3 | from pathlib import Path
4 | from typing import Self
5 |
6 | from environs import Env
7 |
8 | from src.utils import default_build, default_cli, default_patches, resource_folder
9 |
10 |
11 | class RevancedConfig(object):
12 | """Revanced Configurations."""
13 |
14 | def __init__(self: Self, env: Env) -> None:
15 | self.env = env
16 | self.temp_folder_name = resource_folder
17 | self.temp_folder = Path(self.temp_folder_name)
18 | self.ci_test = env.bool("CI_TEST", False)
19 | self.rip_libs_apps: list[str] = []
20 | self.existing_downloaded_apks = env.list("EXISTING_DOWNLOADED_APKS", [])
21 | self.personal_access_token = env.str("PERSONAL_ACCESS_TOKEN", None)
22 | self.dry_run = env.bool("DRY_RUN", False)
23 | self.global_cli_dl = env.str("GLOBAL_CLI_DL", default_cli)
24 | self.global_patches_dl = env.str("GLOBAL_PATCHES_DL", default_patches)
25 | self.global_keystore_name = env.str("GLOBAL_KEYSTORE_FILE_NAME", "revanced.keystore")
26 | self.global_options_file = env.str("GLOBAL_OPTIONS_FILE", "options.json")
27 | self.global_archs_to_build = env.list("GLOBAL_ARCHS_TO_BUILD", [])
28 | self.extra_download_files: list[str] = env.list("EXTRA_FILES", [])
29 | self.apk_editor = "apkeditor-output.jar"
30 | self.extra_download_files.append("https://github.com/REAndroid/APKEditor@apkeditor.jar")
31 | self.apps = sorted(env.list("PATCH_APPS", default_build))
32 | self.global_old_key = env.bool("GLOBAL_OLD_KEY", True)
33 | self.global_space_formatted = env.bool("GLOBAL_SPACE_FORMATTED_PATCHES", True)
34 | self.max_resource_workers = env.int("MAX_RESOURCE_WORKERS", 3)
35 | self.max_parallel_apps = env.int("MAX_PARALLEL_APPS", 4)
36 | self.disable_caching = env.bool("DISABLE_CACHING", False)
37 |
--------------------------------------------------------------------------------
/src/downloader/apksos.py:
--------------------------------------------------------------------------------
1 | """APK SOS Downloader Class."""
2 |
3 | from typing import Any, Self
4 |
5 | import requests
6 | from bs4 import BeautifulSoup
7 |
8 | from src.app import APP
9 | from src.downloader.download import Downloader
10 | from src.exceptions import APKSosAPKDownloadError
11 | from src.utils import bs4_parser, handle_request_response, request_header, request_timeout
12 |
13 |
14 | class ApkSos(Downloader):
15 | """Files downloader."""
16 |
17 | def extract_download_link(self: Self, page: str, app: str) -> tuple[str, str]:
18 | """Function to extract the download link from apkmirror html page.
19 |
20 | :param page: Url of the page
21 | :param app: Name of the app
22 | """
23 | r = requests.get(page, headers=request_header, allow_redirects=True, timeout=request_timeout)
24 | handle_request_response(r, page)
25 | soup = BeautifulSoup(r.text, bs4_parser)
26 | download_button = soup.find(class_="col-sm-12 col-md-8 text-center")
27 | possible_links = download_button.find_all("a") # type: ignore[union-attr]
28 | for possible_link in possible_links:
29 | if possible_link.get("href") and (_title := possible_link.get("title")):
30 | file_name = f"{app}.apk"
31 | if _title.endswith("Bundle"):
32 | file_name = f"{app}.zip"
33 | self._download(possible_link["href"], file_name)
34 | return file_name, possible_link["href"]
35 | msg = f"Unable to download {app}"
36 | raise APKSosAPKDownloadError(msg, url=page)
37 |
38 | def latest_version(self: Self, app: APP, **kwargs: Any) -> tuple[str, str]:
39 | """Function to download whatever the latest version of app from apkmirror.
40 |
41 | :param app: Name of the application
42 | :return: Version of downloaded apk
43 | """
44 | return self.extract_download_link(app.download_source, app.app_name)
45 |
--------------------------------------------------------------------------------
/src/manager/github.py:
--------------------------------------------------------------------------------
1 | """Github Manager."""
2 |
3 | import json
4 | import urllib.request
5 | from pathlib import Path
6 | from typing import Self
7 |
8 | from environs import Env
9 |
10 | from src.app import APP
11 | from src.manager.release_manager import ReleaseManager
12 | from src.utils import app_dump_key, branch_name, updates_file, updates_file_url
13 |
14 |
15 | class GitHubManager(ReleaseManager):
16 | """Release manager with GitHub."""
17 |
18 | def __init__(self: Self, env: Env) -> None:
19 | self.update_file_url = updates_file_url.format(
20 | github_repository=env.str("GITHUB_REPOSITORY"),
21 | branch_name=branch_name,
22 | updates_file=updates_file,
23 | )
24 | self.is_dry_run = env.bool("DRY_RUN", False)
25 |
26 | def get_last_version(self: Self, app: APP, resource_name: str) -> str | list[str]:
27 | """Get last patched version."""
28 | if self.is_dry_run:
29 | with Path(updates_file).open() as url:
30 | data = json.load(url)
31 | else:
32 | with urllib.request.urlopen(self.update_file_url) as url:
33 | data = json.load(url)
34 | if app.app_name in data and (resource := data[app.app_name].get(resource_name)):
35 | if isinstance(resource, list):
36 | return resource
37 | return str(resource)
38 | return "0"
39 |
40 | def get_last_version_source(self: Self, app: APP, resource_name: str) -> str | list[str]:
41 | """Get last patched version."""
42 | if self.is_dry_run:
43 | with Path(updates_file).open() as url:
44 | data = json.load(url)
45 | else:
46 | with urllib.request.urlopen(self.update_file_url) as url:
47 | data = json.load(url)
48 | if app.app_name in data and (resource := data[app.app_name][app_dump_key].get(resource_name)):
49 | if isinstance(resource, list):
50 | return resource
51 | return str(resource)
52 | return "0"
53 |
--------------------------------------------------------------------------------
/.github/workflows/auto-release.yml:
--------------------------------------------------------------------------------
1 | name: Auto Build & Release
2 | env:
3 | DOCKER_BUILDKIT: 1
4 | COMPOSE_DOCKER_CLI_BUILD: 1
5 | HAVE_TELEGRAM_API_ID: ${{ secrets.TELEGRAM_API_ID != '' }}
6 | on:
7 | workflow_dispatch:
8 | inputs:
9 | TELEGRAM_NO_ROOT_UPLOAD:
10 | description: "Upload Non Rooted APKs to Telegram"
11 | required: false
12 | type: boolean
13 | default: true
14 | TELEGRAM_ROOT_UPLOAD:
15 | description: "Upload Magisk Module from nikhilbadyal/revanced-magisk-module to Telegram"
16 | required: false
17 | type: boolean
18 | default: false
19 | schedule:
20 | - cron: '0 */4 * * *'
21 | jobs:
22 | release-check:
23 | permissions: write-all
24 | runs-on: ubuntu-latest
25 | concurrency:
26 | group: ${{ github.head_ref || github.run_id }}
27 | cancel-in-progress: true
28 | steps:
29 | - name: Checkout
30 | uses: actions/checkout@v4
31 |
32 | - name: Update Env for custom build
33 | run: |
34 | echo "${{ secrets.ENVS }}" >> .env
35 |
36 | - name: Setup python
37 | uses: actions/setup-python@main
38 | with:
39 | python-version: '3.x'
40 |
41 | - name: Install Requirements
42 | run: |
43 | python -m pip install --upgrade pip
44 | pip install -r requirements.txt
45 |
46 | - name: Should build?
47 | id: should_build
48 | shell: bash
49 | env:
50 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
51 | run: |
52 | should_build=$(python check_resource_updates.py)
53 | echo "SHOULD_BUILD=$should_build" >> $GITHUB_OUTPUT
54 | outputs:
55 | SHOULD_BUILD: ${{ steps.should_build.outputs.SHOULD_BUILD }}
56 |
57 | build-release:
58 | permissions: write-all
59 | needs: release-check
60 | uses: ./.github/workflows/build-apk.yml
61 | if: ${{ needs.release-check.outputs.SHOULD_BUILD }}
62 | secrets: inherit
63 | concurrency:
64 | group: Auto-Release-${{ github.head_ref || github.run_id }}
65 | cancel-in-progress: true
66 | with:
67 | TELEGRAM_NO_ROOT_UPLOAD: true
68 | PREFERRED_PATCH_APPS: ${{ needs.release-check.outputs.SHOULD_BUILD }}
69 |
--------------------------------------------------------------------------------
/Dockerfile-base:
--------------------------------------------------------------------------------
1 | # Use a specific version of the base Python image
2 | ARG PYTHON_VERSION=3.13.1-slim-bookworm
3 |
4 | FROM python:${PYTHON_VERSION} AS python
5 |
6 |
7 | # Set ARGs and ENVs
8 | ARG APP_HOME=/app
9 |
10 | ENV PYTHONUNBUFFERED 1
11 | ENV PYTHONDONTWRITEBYTECODE 1
12 | WORKDIR ${APP_HOME}
13 | ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' LC_ALL='en_US.UTF-8'
14 | ARG ZULU_REPO_VER=1.0.0-3
15 |
16 |
17 | # Update package lists and install required packages
18 | RUN apt-get -qq update && \
19 | apt-get -qq -y --no-install-recommends install gnupg software-properties-common locales curl tzdata git && \
20 | echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen && \
21 | locale-gen en_US.UTF-8 && \
22 | apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 0xB1998361219BD9C9 && \
23 | curl -sLO https://cdn.azul.com/zulu/bin/zulu-repo_${ZULU_REPO_VER}_all.deb && dpkg -i zulu-repo_${ZULU_REPO_VER}_all.deb && \
24 | apt-get -qq update && \
25 | apt-get -qq -y upgrade && \
26 | mkdir -p /usr/share/man/man1 && \
27 | echo "Package: zulu17-*\nPin: version 17.0.4-*\nPin-Priority: 1001" > /etc/apt/preferences && \
28 | apt-get -qq -y --no-install-recommends install zulu17-jdk=17.0.4-* && \
29 | apt-get -qq -y purge gnupg software-properties-common curl && \
30 | apt-get -y autoremove && \
31 | rm -rf /var/lib/apt/lists/* zulu-repo_${ZULU_REPO_VER}_all.deb
32 |
33 | # Set Java home environment variable
34 | ENV JAVA_HOME=/usr/lib/jvm/zulu17-ca-amd64
35 |
36 | # Ensure curl and jq are available for the next step
37 | RUN apt-get update && \
38 | apt-get install -y curl jq libssl3 && \
39 | rm -rf /var/lib/apt/lists/*
40 | ENV PATH="/usr/local/bin:${PATH}"
41 | # Now use them safely
42 | RUN set -eux; \
43 | ARCH="$(uname -m)"; \
44 | case "$ARCH" in \
45 | x86_64) ARCH_NAME="x86_64-unknown-linux-gnu" ;; \
46 | aarch64) ARCH_NAME="aarch64-unknown-linux-gnu" ;; \
47 | *) echo "Unsupported arch: $ARCH" && exit 1 ;; \
48 | esac; \
49 | URL=$(curl -s https://api.github.com/repos/EFForg/apkeep/releases/latest \
50 | | jq -r ".assets[] | select(.name == \"apkeep-${ARCH_NAME}\") | .browser_download_url"); \
51 | curl -L "$URL" -o /usr/local/bin/apkeep; \
52 | chmod +x /usr/local/bin/apkeep; \
53 | echo "Installed apkeep from $URL"; \
54 | /usr/local/bin/apkeep --version
55 |
56 | CMD ["bash"]
57 |
--------------------------------------------------------------------------------
/.env.example:
--------------------------------------------------------------------------------
1 | #Global:
2 | EXTRA_FILES=https://github.com/inotia00/VancedMicroG/releases/latest@VancedMicroG.apk,https://github.com/inotia00/mMicroG/releases@mmicrog.apk
3 | PATCH_APPS=youtube,youtube_revancify_red,youtube_revancify_blue,youtube_mmt,youtube_music,reddit
4 | GLOBAL_CLI_DL=https://github.com/revanced/revanced-cli
5 | GLOBAL_PATCHES_DL=https://github.com/revanced/revanced-patches
6 |
7 | #Example
8 | EXISTING_DOWNLOADED_APKS=twitter
9 | PERSONAL_ACCESS_TOKEN=ghp_asample_token
10 |
11 | #YouTube:
12 | YOUTUBE_CLI_DL=https://github.com/inotia00/revanced-cli
13 | YOUTUBE_PATCHES_DL=https://github.com/YT-Advanced/ReX-patches
14 | YOUTUBE_EXCLUDE_PATCH=custom-branding-icon-revancify-blue,custom-branding-icon-revancify-red,custom-branding-icon-mmt,custom-branding-youtube-name,enable-debug-logging
15 |
16 | #Example
17 | YOUTUBE_KEYSTORE_FILE_NAME=youtube.keystore
18 | YOUTUBE_ARCHS_TO_BUILD=arm64-v8a,armeabi-v7a
19 |
20 | #YouTube Revancify Red:
21 | YOUTUBE_REVANCIFY_RED_PACKAGE_NAME=com.google.android.youtube
22 | YOUTUBE_REVANCIFY_RED_DL_SOURCE=https://www.apkmirror.com/apk/google-inc/youtube/
23 | YOUTUBE_REVANCIFY_RED_CLI_DL=https://github.com/inotia00/revanced-cli
24 | YOUTUBE_REVANCIFY_RED_PATCHES_DL=https://github.com/YT-Advanced/ReX-patches
25 | YOUTUBE_REVANCIFY_RED_EXCLUDE_PATCH=custom-branding-icon-revancify-blue,custom-branding-icon-mmt,custom-branding-youtube-name,enable-debug-logging
26 |
27 | #YouTube Revancify Blue:
28 | YOUTUBE_REVANCIFY_BLUE_PACKAGE_NAME=com.google.android.youtube
29 | YOUTUBE_REVANCIFY_BLUE_DL_SOURCE=https://www.apkmirror.com/apk/google-inc/youtube/
30 | YOUTUBE_REVANCIFY_BLUE_CLI_DL=https://github.com/inotia00/revanced-cli
31 | YOUTUBE_REVANCIFY_BLUE_PATCHES_DL=https://github.com/YT-Advanced/ReX-patches
32 | YOUTUBE_REVANCIFY_BLUE_EXCLUDE_PATCH=custom-branding-icon-revancify-red,custom-branding-icon-mmt,custom-branding-youtube-name,enable-debug-logging
33 |
34 | #YouTube MMT:
35 | YOUTUBE_MMT_PACKAGE_NAME=com.google.android.youtube
36 | YOUTUBE_MMT_DL_SOURCE=https://www.apkmirror.com/apk/google-inc/youtube/
37 | YOUTUBE_MMT_CLI_DL=https://github.com/inotia00/revanced-cli
38 | YOUTUBE_MMT_PATCHES_DL=https://github.com/YT-Advanced/ReX-patches
39 | YOUTUBE_MMT_EXCLUDE_PATCH=custom-branding-icon-revancify-blue,custom-branding-icon-revancify-red,custom-branding-youtube-name,enable-debug-logging
40 |
41 | #YouTube Music:
42 | YOUTUBE_MUSIC_CLI_DL=https://github.com/inotia00/revanced-cli
43 | YOUTUBE_MUSIC_PATCHES_DL=https://github.com/YT-Advanced/ReX-patches
44 | YOUTUBE_MUSIC_EXCLUDE_PATCH=custom-branding-icon-mmt,custom-branding-icon-revancify-blue,custom-branding-icon-revancify-red,custom-branding-music-name,enable-compact-dialog,enable-debug-logging
45 |
46 | #Example
47 | YOUTUBE_MUSIC_VERSION=6.15.52
48 |
49 | #Reddit
50 | REDDIT_CLI_DL=https://github.com/inotia00/revanced-cli
51 | REDDIT_PATCHES_DL=https://github.com/YT-Advanced/ReX-patches
52 |
53 | #Example
54 | #Twitter
55 | TWITTER_VERSION=latest
56 | TWITTER_CLI_DL=local://cli.jar
57 | TWITTER_PATCHES_DL=local://patches.jar
58 |
--------------------------------------------------------------------------------
/check_resource_updates.py:
--------------------------------------------------------------------------------
1 | """Check patching resource updates."""
2 |
3 | from threading import Lock
4 |
5 | from environs import Env
6 | from loguru import logger
7 |
8 | from main import get_app
9 | from src.config import RevancedConfig
10 | from src.manager.github import GitHubManager
11 | from src.utils import default_build, patches_dl_list_key, patches_versions_key
12 |
13 |
14 | def check_if_build_is_required() -> bool:
15 | """Read resource version."""
16 | env = Env()
17 | env.read_env()
18 | config = RevancedConfig(env)
19 | needs_to_repatched = []
20 | resource_cache: dict[str, tuple[str, str]] = {}
21 | resource_lock = Lock()
22 | for app_name in env.list("PATCH_APPS", default_build):
23 | logger.info(f"Checking {app_name}")
24 | app_obj = get_app(config, app_name)
25 | old_patches_versions = GitHubManager(env).get_last_version(app_obj, patches_versions_key)
26 | old_patches_sources = GitHubManager(env).get_last_version_source(app_obj, patches_dl_list_key)
27 |
28 | # Backward compatibility for string version/source
29 | if isinstance(old_patches_versions, str):
30 | old_patches_versions = [old_patches_versions]
31 | if isinstance(old_patches_sources, str):
32 | old_patches_sources = [old_patches_sources]
33 |
34 | app_obj.download_patch_resources(config, resource_cache, resource_lock)
35 |
36 | new_patches_versions = app_obj.get_patch_bundles_versions()
37 | if len(old_patches_versions) != len(new_patches_versions) or len(old_patches_sources) != len(
38 | app_obj.patches_dl_list,
39 | ):
40 | caused_by = {
41 | "app_name": app_name,
42 | "patches": {
43 | "old_versions": old_patches_versions,
44 | "old_bundles": old_patches_sources,
45 | "new_versions": new_patches_versions,
46 | "new_bundles": app_obj.patches_dl_list,
47 | },
48 | }
49 | logger.info(
50 | f"New build can be triggered due to change in number of patch bundles or sources, info: {caused_by}",
51 | )
52 | needs_to_repatched.append(app_name)
53 | continue
54 |
55 | for old_version, old_source, new_version, new_source in zip(
56 | old_patches_versions,
57 | old_patches_sources,
58 | new_patches_versions,
59 | app_obj.patches_dl_list,
60 | strict=True,
61 | ):
62 | if GitHubManager(env).should_trigger_build(
63 | old_version,
64 | old_source,
65 | new_version,
66 | new_source,
67 | ):
68 | caused_by = {
69 | "app_name": app_name,
70 | "patches": {
71 | "old": old_version,
72 | "new": new_version,
73 | },
74 | }
75 | logger.info(f"New build can be triggered caused by {caused_by}")
76 | needs_to_repatched.append(app_name)
77 | break
78 | logger.info(f"{needs_to_repatched} are need to repatched.")
79 | if needs_to_repatched:
80 | print(",".join(needs_to_repatched)) # noqa: T201
81 | return True
82 | return False
83 |
84 |
85 | check_if_build_is_required()
86 |
--------------------------------------------------------------------------------
/src/downloader/apkeep.py:
--------------------------------------------------------------------------------
1 | """Apkeep Downloader Class."""
2 |
3 | import zipfile
4 | from subprocess import PIPE, Popen
5 | from time import perf_counter
6 | from typing import Any, Self
7 |
8 | from loguru import logger
9 |
10 | from src.app import APP
11 | from src.downloader.download import Downloader
12 | from src.exceptions import DownloadError
13 |
14 |
15 | class Apkeep(Downloader):
16 | """Apkeep-based Downloader."""
17 |
18 | def _run_apkeep(self: Self, package_name: str, version: str = "") -> str:
19 | """Run apkeep CLI to fetch APK from Google Play."""
20 | email = self.config.env.str("APKEEP_EMAIL")
21 | token = self.config.env.str("APKEEP_TOKEN")
22 |
23 | if not email or not token:
24 | msg = "APKEEP_EMAIL and APKEEP_TOKEN must be set in environment."
25 | raise DownloadError(msg)
26 |
27 | file_name = f"{package_name}.apk"
28 | file_path = self.config.temp_folder / file_name
29 | folder_path = self.config.temp_folder / package_name
30 | zip_path = self.config.temp_folder / f"{package_name}.zip"
31 |
32 | # If already downloaded, return it
33 | if file_path.exists():
34 | logger.debug(f"{file_name} already downloaded.")
35 | return file_name
36 | if zip_path.exists():
37 | logger.debug(f"{zip_path.name} already zipped and exists.")
38 | return zip_path.name
39 |
40 | # Build apkeep command
41 | cmd = [
42 | "apkeep",
43 | "-a",
44 | f"{package_name}@{version}" if version and version != "latest" else package_name,
45 | "-d",
46 | "google-play",
47 | "-e",
48 | email,
49 | "-t",
50 | token,
51 | "-o",
52 | "split_apk=true",
53 | self.config.temp_folder_name,
54 | ]
55 | logger.debug(f"Running command: {cmd}")
56 |
57 | start = perf_counter()
58 | process = Popen(cmd, stdout=PIPE)
59 | output = process.stdout
60 | if not output:
61 | msg = "Failed to send request for patching."
62 | raise DownloadError(msg)
63 | for line in output:
64 | logger.debug(line.decode(), flush=True, end="")
65 | process.wait()
66 | if process.returncode != 0:
67 | msg = f"Command failed with exit code {process.returncode} for app {package_name}"
68 | raise DownloadError(msg)
69 | logger.info(f"Downloading completed for app {package_name} in {perf_counter() - start:.2f} seconds.")
70 |
71 | if file_path.exists():
72 | return file_name
73 | if folder_path.exists() and folder_path.is_dir():
74 | # Zip the folder
75 | with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
76 | for file in folder_path.rglob("*"):
77 | arcname = file.relative_to(self.config.temp_folder)
78 | zipf.write(file, arcname)
79 | logger.debug(f"Zipped {folder_path} to {zip_path}")
80 | return zip_path.name
81 | msg = "APK file or folder not found after apkeep execution."
82 | raise DownloadError(msg)
83 |
84 | def latest_version(self: Self, app: APP, **kwargs: Any) -> tuple[str, str]:
85 | """Download latest version from Google Play via Apkeep."""
86 | file_name = self._run_apkeep(app.package_name)
87 | logger.info(f"Got file name as {file_name}")
88 | return file_name, f"apkeep://google-play/{app.package_name}"
89 |
--------------------------------------------------------------------------------
/src/downloader/uptodown.py:
--------------------------------------------------------------------------------
1 | """Upto Down Downloader."""
2 |
3 | from typing import Any, Self
4 |
5 | import requests
6 | from bs4 import BeautifulSoup, Tag
7 | from loguru import logger
8 |
9 | from src.app import APP
10 | from src.downloader.download import Downloader
11 | from src.exceptions import UptoDownAPKDownloadError
12 | from src.utils import bs4_parser, handle_request_response, request_header, request_timeout
13 |
14 |
15 | class UptoDown(Downloader):
16 | """Files downloader."""
17 |
18 | def extract_download_link(self: Self, page: str, app: str) -> tuple[str, str]:
19 | """Extract download link from uptodown url."""
20 | r = requests.get(page, headers=request_header, allow_redirects=True, timeout=request_timeout)
21 | handle_request_response(r, page)
22 | soup = BeautifulSoup(r.text, bs4_parser)
23 | detail_download_button = soup.find("button", id="detail-download-button")
24 |
25 | if not isinstance(detail_download_button, Tag):
26 | msg = f"Unable to download {app} from uptodown."
27 | raise UptoDownAPKDownloadError(msg, url=page)
28 |
29 | data_url = detail_download_button.get("data-url")
30 | download_url = f"https://dw.uptodown.com/dwn/{data_url}"
31 | file_name = f"{app}.apk"
32 | self._download(download_url, file_name)
33 |
34 | return file_name, download_url
35 |
36 | def specific_version(self: Self, app: APP, version: str) -> tuple[str, str]:
37 | """Function to download the specified version of app from uptodown.
38 |
39 | :param app: Name of the application
40 | :param version: Version of the application to download
41 | :return: Version of downloaded apk
42 | """
43 | logger.debug("downloading specified version of app from uptodown.")
44 | url = f"{app.download_source}/versions"
45 | html = requests.get(url, headers=request_header, timeout=request_timeout).text
46 | soup = BeautifulSoup(html, bs4_parser)
47 | detail_app_name = soup.find("h1", id="detail-app-name")
48 |
49 | if not isinstance(detail_app_name, Tag):
50 | msg = f"Unable to download {app} from uptodown."
51 | raise UptoDownAPKDownloadError(msg, url=url)
52 |
53 | app_code = detail_app_name.get("data-code")
54 | version_page = 1
55 | download_url = None
56 | version_found = False
57 |
58 | while not version_found:
59 | version_url = f"{app.download_source}/apps/{app_code}/versions/{version_page}"
60 | r = requests.get(version_url, headers=request_header, timeout=request_timeout)
61 | handle_request_response(r, version_url)
62 | json = r.json()
63 |
64 | if "data" not in json:
65 | break
66 |
67 | for item in json["data"]:
68 | if item["version"] == version:
69 | download_url = f"{item["versionURL"]}-x"
70 | version_found = True
71 | break
72 |
73 | version_page += 1
74 |
75 | if download_url is None:
76 | msg = f"Unable to download {app.app_name} from uptodown."
77 | raise UptoDownAPKDownloadError(msg, url=url)
78 |
79 | return self.extract_download_link(download_url, app.app_name)
80 |
81 | def latest_version(self: Self, app: APP, **kwargs: Any) -> tuple[str, str]:
82 | """Function to download the latest version of app from uptodown."""
83 | logger.debug("downloading latest version of app from uptodown.")
84 | page = f"{app.download_source}/download"
85 | return self.extract_download_link(page, app.app_name)
86 |
--------------------------------------------------------------------------------
/.github/workflows/newapp-check.yml:
--------------------------------------------------------------------------------
1 | name: Check for new Revanced apps
2 |
3 | on:
4 | schedule:
5 | - cron: "0 0 * * *"
6 | workflow_dispatch:
7 |
8 | concurrency:
9 | group: ${{ github.workflow }}-${{ github.ref }}
10 | cancel-in-progress: true
11 |
12 | permissions:
13 | contents: read
14 | issues: write
15 |
16 | jobs:
17 | status_check:
18 | if: github.repository == 'nikhilbadyal/docker-py-revanced'
19 | runs-on: ubuntu-latest
20 | steps:
21 | - name: Checkout repository
22 | uses: actions/checkout@main
23 |
24 | - name: Setup python
25 | uses: actions/setup-python@main
26 | with:
27 | python-version: '3.x'
28 | cache: 'pip'
29 |
30 | - name: Install Requirements
31 | run: |
32 | python -m pip install --upgrade pip
33 | pip install -r requirements.txt
34 |
35 | - name: Execute Status Check
36 | run: |
37 | EOF=$(dd if=/dev/urandom bs=15 count=1 status=none | base64)
38 | output=$(python -m scripts.status_check)
39 | echo "changelog<<$EOF" >> $GITHUB_OUTPUT
40 | echo "$output" >> $GITHUB_OUTPUT
41 | echo "$EOF" >> $GITHUB_OUTPUT
42 | id: status
43 |
44 | - name: Update Check
45 | run: |
46 | echo "${{ steps.status.outputs.changelog }}"
47 |
48 | - name: Upload status file
49 | uses: actions/upload-artifact@main
50 | with:
51 | name: issue_body.md
52 | path: status.md
53 | if-no-files-found: error
54 |
55 | - name: Update or Create Revanced Status Issue
56 | uses: actions/github-script@v7.0.1
57 | with:
58 | script: |
59 | const issueTitle = 'Revanced apps Status';
60 | const statusContent = `${{ steps.status.outputs.changelog }}`;
61 |
62 | // Search for existing issue with the specific title
63 | const issues = await github.rest.issues.listForRepo({
64 | owner: context.repo.owner,
65 | repo: context.repo.repo,
66 | state: 'open',
67 | labels: '💁new-app'
68 | });
69 |
70 | const existingIssue = issues.data.find(issue =>
71 | issue.title === issueTitle
72 | );
73 |
74 | if (existingIssue) {
75 | // Update existing issue
76 | const updateBody = [
77 | statusContent,
78 | '',
79 | '---',
80 | `*Last updated: ${new Date().toISOString().split('T')[0]} by automated workflow*`
81 | ].join('\n');
82 |
83 | await github.rest.issues.update({
84 | owner: context.repo.owner,
85 | repo: context.repo.repo,
86 | issue_number: existingIssue.number,
87 | body: updateBody
88 | });
89 |
90 | console.log(`Updated existing issue #${existingIssue.number}: ${issueTitle}`);
91 | } else {
92 | // Create new issue if none exists
93 | const createBody = [
94 | statusContent,
95 | '',
96 | '---',
97 | `*Created: ${new Date().toISOString().split('T')[0]} by automated workflow*`,
98 | '',
99 | 'This issue will be automatically updated daily with the latest Revanced apps status.'
100 | ].join('\n');
101 |
102 | const newIssue = await github.rest.issues.create({
103 | owner: context.repo.owner,
104 | repo: context.repo.repo,
105 | title: issueTitle,
106 | body: createBody,
107 | assignees: [context.repo.owner],
108 | labels: ['💁new-app']
109 | });
110 |
111 | console.log(`Created new issue #${newIssue.data.number}: ${issueTitle}`);
112 | }
113 |
--------------------------------------------------------------------------------
/src/downloader/apkmonk.py:
--------------------------------------------------------------------------------
1 | """APK Monk Downloader Class."""
2 |
3 | import re
4 | from typing import Any, Self
5 |
6 | import requests
7 | from bs4 import BeautifulSoup
8 |
9 | from scripts.status_check import combo_headers
10 | from src.app import APP
11 | from src.downloader.download import Downloader
12 | from src.downloader.sources import APK_MONK_BASE_URL
13 | from src.exceptions import APKMonkAPKDownloadError
14 | from src.utils import bs4_parser, handle_request_response, request_header, request_timeout
15 |
16 |
17 | class ApkMonk(Downloader):
18 | """Files downloader."""
19 |
20 | def extract_download_link(self: Self, page: str, app: str) -> tuple[str, str]:
21 | """Function to extract the download link from apkmonk html page.
22 |
23 | :param page: Url of the page
24 | :param app: Name of the app
25 | """
26 | file_name = f"{app}.apk"
27 | r = requests.get(page, headers=request_header, allow_redirects=True, timeout=request_timeout)
28 | handle_request_response(r, page)
29 | soup = BeautifulSoup(r.text, bs4_parser)
30 | download_scripts = soup.find_all("script", type="text/javascript")
31 | key_value_pattern = r"pkg=([^&]+)&key=([^']+)"
32 | url = None
33 | for script in download_scripts:
34 | if match := re.search(key_value_pattern, script.text):
35 | pkg_value = match.group(1)
36 | key_value = match.group(2)
37 | url = f"{APK_MONK_BASE_URL}/down_file?pkg={pkg_value}&key={key_value}"
38 | break
39 | if not url:
40 | msg = "Unable to get key-value link"
41 | raise APKMonkAPKDownloadError(
42 | msg,
43 | url=page,
44 | )
45 | request_header["User-Agent"] = combo_headers["User-Agent"]
46 | r = requests.get(url, headers=request_header, allow_redirects=True, timeout=request_timeout)
47 | handle_request_response(r, url)
48 | final_download_url = r.json()["url"]
49 | self._download(final_download_url, file_name)
50 | return file_name, final_download_url
51 |
52 | def specific_version(self: Self, app: APP, version: str, main_page: str = "") -> tuple[str, str]:
53 | """Function to download the specified version of app from apkmirror.
54 |
55 | :param app: Name of the application
56 | :param version: Version of the application to download
57 | :param main_page: Version of the application to download
58 | :return: Version of downloaded apk
59 | """
60 | r = requests.get(app.download_source, headers=request_header, allow_redirects=True, timeout=request_timeout)
61 | handle_request_response(r, app.download_source)
62 | soup = BeautifulSoup(r.text, bs4_parser)
63 | version_table = soup.find_all(class_="striped")
64 | for version_row in version_table:
65 | version_links = version_row.find_all("a")
66 | for link in version_links:
67 | app_version = link.text
68 | if app_version == app.app_version:
69 | download_link = link["href"]
70 | return self.extract_download_link(APK_MONK_BASE_URL + download_link, app.app_name)
71 | msg = "Unable to scrap version link"
72 | raise APKMonkAPKDownloadError(
73 | msg,
74 | url=app.download_source,
75 | )
76 |
77 | def latest_version(self: Self, app: APP, **kwargs: Any) -> tuple[str, str]:
78 | """Function to download whatever the latest version of app from apkmonkP.
79 |
80 | :param app: Name of the application
81 | :return: Version of downloaded apk
82 | """
83 | r = requests.get(app.download_source, headers=request_header, allow_redirects=True, timeout=request_timeout)
84 | handle_request_response(r, app.download_source)
85 | soup = BeautifulSoup(r.text, bs4_parser)
86 | latest_download_url = soup.find(id="download_button")["href"] # type: ignore[index]
87 | return self.extract_download_link(latest_download_url, app.app_name) # type: ignore[arg-type]
88 |
--------------------------------------------------------------------------------
/apks/options.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "patchName": "Spoof client",
4 | "options": [
5 | {
6 | "key": "client-id",
7 | "value": null
8 | }
9 | ]
10 | },
11 | {
12 | "patchName": "Custom branding name Reddit",
13 | "options": [
14 | {
15 | "key": "AppName",
16 | "value": "Reddit"
17 | }
18 | ]
19 | },
20 | {
21 | "patchName": "Overlay buttons",
22 | "options": [
23 | {
24 | "key": "OutlineIcon",
25 | "value": true
26 | },
27 | {
28 | "key": "BottomMargin",
29 | "value": "0.0dip"
30 | }
31 | ]
32 | },
33 | {
34 | "patchName": "Shorts overlay buttons",
35 | "options": [
36 | {
37 | "key": "IconType",
38 | "value": "TikTok"
39 | }
40 | ]
41 | },
42 | {
43 | "patchName": "SponsorBlock",
44 | "options": [
45 | {
46 | "key": "OutlineIcon",
47 | "value": true
48 | }
49 | ]
50 | },
51 | {
52 | "patchName": "Visual preferences icons",
53 | "options": [
54 | {
55 | "key": "MainSettings",
56 | "value": true
57 | },
58 | {
59 | "key": "ExtendedSettings",
60 | "value": true
61 | },
62 | {
63 | "key": "ExtendedIcon",
64 | "value": "Extension"
65 | }
66 | ]
67 | },
68 | {
69 | "patchName": "Settings",
70 | "options": [
71 | {
72 | "key": "InsertPosition",
73 | "value": "About"
74 | }
75 | ]
76 | },
77 | {
78 | "patchName": "Custom double tap length",
79 | "options": [
80 | {
81 | "key": "DoubleTapLengthArrays",
82 | "value": "3, 5, 10, 15, 20, 30, 60, 120, 180"
83 | }
84 | ]
85 | },
86 | {
87 | "patchName": "Tuck away preferences",
88 | "options": [
89 | {
90 | "key": "SettingElements",
91 | "value": "Data saving, Video quality preferences, Manage all history, Privacy, Try experimental new features, Purchases and memberships, Billing & payments, Connected apps, Live chat, Captions, About"
92 | }
93 | ]
94 | },
95 | {
96 | "patchName": "Reddit settings",
97 | "options": [
98 | {
99 | "key": "RedditSettings",
100 | "value": true
101 | }
102 | ]
103 | },
104 | {
105 | "patchName" : "Custom branding",
106 | "options" : [ {
107 | "key" : "appName",
108 | "value" : "ReVanced"
109 | } ]
110 | },
111 | {
112 | "patchName": "Custom branding wordmark header",
113 | "options": [
114 | {
115 | "key": "header",
116 | "value": "yt_premium_wordmark_header"
117 | }
118 | ]
119 | },
120 | {
121 | "patchName": "Premium heading",
122 | "options": [
123 | {
124 | "key": "UsePremiumHeading",
125 | "value": true
126 | }
127 | ]
128 | },
129 | {
130 | "patchName": "Custom Video Speed",
131 | "options": [
132 | {
133 | "key": "CustomSpeedArrays",
134 | "value": "0.25, 0.5, 0.75, 0.9, 1.0, 1.12, 1.25, 1.5, 1.75, 2.0, 2.5, 3.0, 4.0, 5.0"
135 | }
136 | ]
137 | },
138 | {
139 | "patchName": "Custom package name",
140 | "options": [
141 | {
142 | "key": "PackageNameYouTube",
143 | "value": "com.google.android.youtube.dockerpy"
144 | },
145 | {
146 | "key": "PackageNameYouTubeMusic",
147 | "value": "com.google.android.apps.youtube.music.dockerpy"
148 | }
149 | ]
150 | },
151 | {
152 | "patchName": "Theme",
153 | "options": [
154 | {
155 | "key": "DarkThemeBackgroundColor",
156 | "value": "@android:color/black"
157 | },
158 | {
159 | "key": "LightThemeBackgroundColor",
160 | "value": "@android:color/white"
161 | }
162 | ]
163 | },
164 | {
165 | "patchName": "GmsCore support",
166 | "options": [
167 | {
168 | "key": "packageNameYouTube",
169 | "value": "com.google.android.youtube.dockerpy"
170 | },
171 | {
172 | "key": "packageNameYouTubeMusic",
173 | "value": "com.google.android.apps.youtube.music.dockerpy"
174 | }
175 | ]
176 | }
177 | ]
178 |
--------------------------------------------------------------------------------
/.env.my:
--------------------------------------------------------------------------------
1 | # Global
2 | EXTRA_FILES=https://github.com/ReVanced/GmsCore/releases/latest@Revanced-Microg.apk
3 | PATCH_APPS=YOUTUBE_INOTIA00,YOUTUBE_ANDEA,YOUTUBE_MUSIC_INOTIA00,YOUTUBE_MUSIC_ANDEA,REDDIT_INOTIA00,REDDIT_ANDEA,YOUTUBE_REVANCED,YOUTUBE_MUSIC_REVANCED,SPOTIFY
4 | GLOBAL_CLI_DL=https://github.com/inotia00/revanced-cli/releases/latest
5 | GLOBAL_PATCHES_DL=https://github.com/revanced/revanced-patches/releases/latest
6 |
7 | # YouTube (Using inotia00 Patches)
8 | YOUTUBE_INOTIA00_DL_SOURCE=https://www.apkmirror.com/apk/google-inc/youtube/
9 | YOUTUBE_INOTIA00_PATCHES_DL=https://github.com/inotia00/revanced-patches/releases/latest-prerelease
10 | YOUTUBE_INOTIA00_EXCLUDE_PATCH=custom-branding-icon-youtube,custom-branding-name-youtube,enable-debug-logging,hide-fullscreen-button,custom-branding-icon-for-youtube,custom-branding-name-for-youtube
11 | YOUTUBE_INOTIA00_PACKAGE_NAME=com.google.android.youtube
12 |
13 | # YouTube (Using anddea Patches)
14 | YOUTUBE_ANDEA_DL_SOURCE=https://www.apkmirror.com/apk/google-inc/youtube/
15 | YOUTUBE_ANDEA_PATCHES_DL=https://github.com/anddea/revanced-patches/releases/latest-prerelease
16 | YOUTUBE_ANDEA_EXCLUDE_PATCH=custom-branding-icon-youtube,custom-branding-name-youtube,enable-debug-logging,hide-fullscreen-button,custom-branding-icon-for-youtube,custom-branding-name-for-youtube
17 | YOUTUBE_ANDEA_PACKAGE_NAME=com.google.android.youtube
18 |
19 |
20 | # YouTube (Using ReVanced Patches)
21 | YOUTUBE_REVANCED_DL_SOURCE=https://www.apkmirror.com/apk/google-inc/youtube/
22 | YOUTUBE_REVANCED_PATCHES_DL=https://github.com/ReVanced/revanced-patches/releases/latest
23 | YOUTUBE_REVANCED_EXCLUDE_PATCH=custom-branding-icon-youtube,custom-branding-name-youtube,enable-debug-logging,hide-fullscreen-button,custom-branding-icon-for-youtube,custom-branding-name-for-youtube
24 | YOUTUBE_REVANCED_PACKAGE_NAME=com.google.android.youtube
25 |
26 | # YouTube Music (Using inotia00 Patches)
27 | YOUTUBE_MUSIC_INOTIA00_DL_SOURCE=https://www.apkmirror.com/apk/google-inc/youtube-music/
28 | YOUTUBE_MUSIC_INOTIA00_PATCHES_DL=https://github.com/inotia00/revanced-patches/releases/latest-prerelease
29 | YOUTUBE_MUSIC_INOTIA00_EXCLUDE_PATCH=custom-branding-icon-youtube-music,custom-branding-name-youtube-music,enable-compact-dialog,enable-debug-logging,enable-old-player-layout,custom-branding-icon-for-youtube-music,custom-branding-name-for-youtube-music,custom-header-for-youtube-music
30 | YOUTUBE_MUSIC_INOTIA00_PACKAGE_NAME=com.google.android.apps.youtube.music
31 |
32 | # YouTube Music (Using anddea Patches)
33 | YOUTUBE_MUSIC_ANDEA_DL_SOURCE=https://www.apkmirror.com/apk/google-inc/youtube-music/
34 | YOUTUBE_MUSIC_ANDEA_PATCHES_DL=https://github.com/anddea/revanced-patches/releases/latest-prerelease
35 | YOUTUBE_MUSIC_ANDEA_EXCLUDE_PATCH=custom-branding-icon-youtube-music,custom-branding-name-youtube-music,enable-compact-dialog,enable-debug-logging,enable-old-player-layout,custom-branding-icon-for-youtube-music,custom-branding-name-for-youtube-music,custom-header-for-youtube-music
36 | YOUTUBE_MUSIC_ANDEA_PACKAGE_NAME=com.google.android.apps.youtube.music
37 |
38 |
39 | # YouTube Music (Using ReVanced Patches)
40 | YOUTUBE_MUSIC_REVANCED_DL_SOURCE=https://www.apkmirror.com/apk/google-inc/youtube-music/
41 | YOUTUBE_MUSIC_REVANCED_PATCHES_DL=https://github.com/ReVanced/revanced-patches/releases/latest
42 | YOUTUBE_MUSIC_REVANCED_EXCLUDE_PATCH=custom-branding-icon-youtube-music,custom-branding-name-youtube-music,enable-compact-dialog,enable-debug-logging,enable-old-player-layout,custom-branding-icon-for-youtube-music,custom-branding-name-for-youtube-music,custom-header-for-youtube-music
43 | YOUTUBE_MUSIC_REVANCED_PACKAGE_NAME=com.google.android.apps.youtube.music
44 |
45 | # Reddit (Using inotia00 Patches)
46 | REDDIT_INOTIA00_DL_SOURCE=https://www.apkmirror.com/apk/reddit-inc/reddit/
47 | REDDIT_INOTIA00_PATCHES_DL=https://github.com/inotia00/revanced-patches/releases/latest-prerelease
48 | REDDIT_INOTIA00_EXCLUDE_PATCH=change-package-name,custom-branding-name-for-reddit
49 | REDDIT_INOTIA00_PACKAGE_NAME=com.reddit.frontpage
50 |
51 | # Reddit (Using anddea Patches)
52 | REDDIT_ANDEA_DL_SOURCE=https://www.apkmirror.com/apk/reddit-inc/reddit/
53 | REDDIT_ANDEA_PATCHES_DL=https://github.com/anddea/revanced-patches/releases/latest-prerelease
54 | REDDIT_ANDEA_EXCLUDE_PATCH=change-package-name,custom-branding-name-for-reddit
55 | REDDIT_ANDEA_PACKAGE_NAME=com.reddit.frontpage
56 |
57 | #Spotify
58 | SPOTIFY_DL_SOURCE=apkeep
59 | SPOTIFY_PATCHES_DL=https://github.com/anddea/revanced-patches/releases/latest-prerelease
60 | SPOTIFY_PACKAGE_NAME=com.spotify.music
61 |
62 | # GitHub Repository
63 | GITHUB_REPOSITORY=nikhilbadyal/docker-py-revanced
64 |
--------------------------------------------------------------------------------
/src/exceptions.py:
--------------------------------------------------------------------------------
1 | """Possible Exceptions."""
2 |
3 | from typing import Any, Self
4 |
5 |
6 | class BuilderError(Exception):
7 | """Base class for all the project errors."""
8 |
9 | message = "Default Error message."
10 |
11 | def __init__(self: Self, *args: Any, **kwargs: Any) -> None:
12 | if args:
13 | self.message = args[0]
14 | super().__init__(self.message)
15 |
16 | def __str__(self: Self) -> str:
17 | """Return error message."""
18 | return self.message
19 |
20 |
21 | class ScrapingError(BuilderError):
22 | """Exception raised when the url cannot be scraped."""
23 |
24 | def __init__(self: Self, *args: Any, **kwargs: Any) -> None:
25 | """Initialize the APKMirrorIconScrapFailure exception.
26 |
27 | Args:
28 | ----
29 | *args: Variable length argument list.
30 | **kwargs: Arbitrary keyword arguments.
31 | url (str, optional): The URL of the failed icon scraping. Defaults to None.
32 | """
33 | super().__init__(*args)
34 | self.url = kwargs.get("url")
35 |
36 | def __str__(self: Self) -> str:
37 | """Exception message."""
38 | base_message = super().__str__()
39 | return f"Message - {base_message} Url - {self.url}"
40 |
41 |
42 | class APKMirrorIconScrapError(ScrapingError):
43 | """Exception raised when the icon cannot be scraped from apkmirror."""
44 |
45 |
46 | class APKComboIconScrapError(ScrapingError):
47 | """Exception raised when the icon cannot be scraped from apkcombo."""
48 |
49 |
50 | class APKPureIconScrapError(ScrapingError):
51 | """Exception raised when the icon cannot be scraped from apkpure."""
52 |
53 |
54 | class APKMonkIconScrapError(ScrapingError):
55 | """Exception raised when the icon cannot be scraped from apkmonk."""
56 |
57 |
58 | class DownloadError(BuilderError):
59 | """Generic Download failure."""
60 |
61 | def __init__(self: Self, *args: Any, **kwargs: Any) -> None:
62 | """Initialize the DownloadFailure exception.
63 |
64 | Args:
65 | ----
66 | *args: Variable length argument list.
67 | **kwargs: Arbitrary keyword arguments.
68 | url (str, optional): The URL of the failed icon scraping. Defaults to None.
69 | """
70 | super().__init__(*args)
71 | self.url = kwargs.get("url")
72 |
73 | def __str__(self: Self) -> str:
74 | """Exception message."""
75 | base_message = super().__str__()
76 | return f"Message - {base_message} Url - {self.url}"
77 |
78 |
79 | class APKDownloadError(DownloadError):
80 | """Exception raised when the apk cannot be scraped."""
81 |
82 |
83 | class APKMirrorAPKDownloadError(APKDownloadError):
84 | """Exception raised when downloading an APK from apkmirror failed."""
85 |
86 |
87 | class APKMonkAPKDownloadError(APKDownloadError):
88 | """Exception raised when downloading an APK from apkmonk failed."""
89 |
90 |
91 | class APKMirrorAPKNotFoundError(APKDownloadError):
92 | """Exception raised when apk doesn't exist on APKMirror."""
93 |
94 |
95 | class UptoDownAPKDownloadError(APKDownloadError):
96 | """Exception raised when downloading an APK from uptodown failed."""
97 |
98 |
99 | class APKPureAPKDownloadError(APKDownloadError):
100 | """Exception raised when downloading an APK from apkpure failed."""
101 |
102 |
103 | class APKSosAPKDownloadError(APKDownloadError):
104 | """Exception raised when downloading an APK from apksos failed."""
105 |
106 |
107 | class PatchingFailedError(BuilderError):
108 | """Patching Failed."""
109 |
110 |
111 | class AppNotFoundError(BuilderError):
112 | """Not a valid Revanced App."""
113 |
114 |
115 | class PatchesJsonLoadError(BuilderError):
116 | """Failed to load patches json."""
117 |
118 | def __init__(self: Self, *args: Any, **kwargs: Any) -> None:
119 | """Initialize the PatchesJsonLoadFailed exception.
120 |
121 | Args:
122 | ----
123 | *args: Variable length argument list.
124 | **kwargs: Arbitrary keyword arguments.
125 | file_name (str, optional): The name of json file. Defaults to None.
126 | """
127 | super().__init__(*args)
128 | self.file_name = kwargs.get("file_name")
129 |
130 | def __str__(self: Self) -> str:
131 | """Exception message."""
132 | base_message = super().__str__()
133 | return f"Message - {base_message} File - {self.file_name}"
134 |
--------------------------------------------------------------------------------
/.github/workflows/build-artifact.yml:
--------------------------------------------------------------------------------
1 | name: Build & Upload
2 | env:
3 | DOCKER_BUILDKIT: 1
4 | COMPOSE_DOCKER_CLI_BUILD: 1
5 | on:
6 | workflow_dispatch:
7 | workflow_call:
8 | secrets:
9 | ENVS:
10 | required: false
11 | DOCKER_PY_REVANCED_SECRETS:
12 | required: false
13 | REDDIT_CLIENT_ID:
14 | required: false
15 | inputs:
16 | CI_TEST:
17 | required: false
18 | type: boolean
19 | default: false
20 | COMMIT_CHANGELOG:
21 | type: boolean
22 | required: false
23 | default: true
24 | DEBUG_ENABLED:
25 | type: boolean
26 | description: 'Run the build with tmate debugging enabled.'
27 | required: false
28 | default: false
29 | PREFERRED_PATCH_APPS:
30 | description: "Apps to be patched. Overrides any env set"
31 | required: false
32 | type: string
33 |
34 | jobs:
35 | build-apk:
36 | permissions: write-all
37 | name: APK Build
38 | runs-on: ubuntu-latest
39 | timeout-minutes: 60
40 |
41 | steps:
42 | - name: Check out Git repository
43 | uses: actions/checkout@main
44 |
45 | - name: Update Env for custom build
46 | run: |
47 | echo "${{ secrets.ENVS }}" >> .env
48 | echo "GITHUB_REPOSITORY=${{ github.repository }}" >> .env
49 |
50 | - name: Update Env from secrets for custom build
51 | run: |
52 | echo "${{ secrets.DOCKER_PY_REVANCED_SECRETS }}" >> .env
53 | echo "Added envs"
54 |
55 | - name: Setup python
56 | uses: actions/setup-python@main
57 | with:
58 | python-version: '3.x'
59 |
60 | - name: Install Requirements
61 | if: ${{ inputs.PREFERRED_PATCH_APPS }}
62 | env:
63 | PREFERRED_PATCH_APPS: ${{ inputs.PREFERRED_PATCH_APPS }}
64 | run: |
65 | python -m pip install --upgrade pip
66 | pip install -r requirements.txt
67 |
68 | - name: Override Patch apps
69 | if: ${{ inputs.PREFERRED_PATCH_APPS }}
70 | env:
71 | PREFERRED_PATCH_APPS: ${{ inputs.PREFERRED_PATCH_APPS }}
72 | run: |
73 | python -m scripts.prefered_apps
74 |
75 | - name: Inject Reddit Client ID
76 | env:
77 | REDDIT_CLIENT_ID: ${{ secrets.REDDIT_CLIENT_ID }}
78 | if: env.REDDIT_CLIENT_ID != null
79 | run: |
80 | client_id="${REDDIT_CLIENT_ID}"
81 | path="apks/options.json"
82 | json_data=$(cat "${path}")
83 |
84 | new_object='{
85 | "patchName": "Spoof client",
86 | "options": [
87 | {
88 | "key": "client-id",
89 | "value": "'${client_id}'"
90 | }
91 | ]
92 | }'
93 | # Check if an object with the patchName "Spoof client" already exists
94 | existing_object_index=$(echo "${json_data}" | jq 'map(.patchName) | index("Spoof client")')
95 | echo "${existing_object_index}"
96 | if [[ ${existing_object_index} != "null" ]]; then
97 | echo "Patch entry already exists. Overriding client ID in it."
98 | updated_json=$(echo "${json_data}" | jq ".[${existing_object_index}].options[0].value = \"${client_id}\"")
99 | else
100 | echo "Patch entry doesn't exists. Adding new entry."
101 | updated_json=$(echo "${json_data}" | jq ". += [${new_object}]")
102 | fi
103 | echo "${updated_json}" > "${path}"
104 |
105 |
106 | - name: Setup tmate session
107 | uses: mxschmitt/action-tmate@master
108 | if: ${{ github.event_name == 'workflow_dispatch' && inputs.DEBUG_ENABLED }}
109 | with:
110 | detached: true
111 |
112 | - name: Build Revanced APKs
113 | if: ${{ true && !inputs.DEBUG_ENABLED }}
114 | run: |
115 | if [[ "${{ inputs.CI_TEST }}" =~ ^(true|True|1)$ ]]; then
116 | echo "In CI Testing. Using local compose file."
117 | docker compose -f docker-compose-local.yml up --build
118 | else
119 | echo "Using Prod compose file."
120 | docker compose up --build
121 | fi
122 | - name: Upload Build APKS
123 | uses: actions/upload-artifact@main
124 | if: ${{ true && !inputs.DEBUG_ENABLED }}
125 | with:
126 | name: Built-APKs
127 | path: |
128 | changelog.md
129 | changelog.json
130 | updates.json
131 | apks/*-output.apk
132 | apks/VancedMicroG.apk"
133 | if-no-files-found: error
134 |
135 | - name: Commit Update file
136 | if: ${{ inputs.COMMIT_CHANGELOG && !inputs.CI_TEST}}
137 | uses: stefanzweifel/git-auto-commit-action@master
138 | with:
139 | branch: changelogs
140 | skip_checkout: true
141 | file_pattern: 'changelog.md changelog.json updates.json'
142 | commit_message: 🚀New Build
143 | push_options: '--force'
144 |
--------------------------------------------------------------------------------
/src/patches_gen.py:
--------------------------------------------------------------------------------
1 | """Generate patches using cli."""
2 |
3 | import json
4 | import re
5 | import subprocess
6 | from pathlib import Path
7 | from typing import Any
8 |
9 |
10 | def extract_name_from_section(section: str) -> str | None:
11 | """Extract the name from a section."""
12 | name_match = re.search(r"Name: (.*?)\n", section)
13 | return name_match.group(1).strip() if name_match else None
14 |
15 |
16 | def extract_description_from_section(section: str) -> str:
17 | """Extract the description from a section."""
18 | description_match = re.search(r"Description: (.*?)\n", section)
19 | return description_match.group(1).strip() if description_match else ""
20 |
21 |
22 | def extract_enabled_state_from_section(section: str) -> bool:
23 | """Extract the enabled state from a section."""
24 | enabled_match = re.search(r"Enabled: (true|false)", section, re.IGNORECASE)
25 | return enabled_match.group(1).lower() == "true" if enabled_match else False
26 |
27 |
28 | def extract_package_info(package_section: str) -> dict[str, Any]:
29 | """Extract package name and versions from a package section."""
30 | package_name = package_section.split("\n")[0].strip()
31 | versions_match = re.search(r"Compatible versions:\s*((?:\d+\.\d+\.\d+\s*)+)", package_section)
32 | versions = versions_match.group(1).split() if versions_match else []
33 | return {"name": package_name, "versions": versions if versions else None}
34 |
35 |
36 | def extract_compatible_packages_from_section(section: str) -> list[dict[str, Any]]:
37 | """Extract compatible packages from a section."""
38 | if "Compatible packages:" not in section:
39 | return []
40 |
41 | package_sections = re.split(r"\s*Package name: ", section.split("Compatible packages:")[1])
42 | return [extract_package_info(package_section) for package_section in package_sections[1:]]
43 |
44 |
45 | def parse_option_match(match: tuple[str, ...]) -> dict[str, Any]:
46 | """Parse a single option match into a dictionary."""
47 | return {
48 | "title": match[0].strip(),
49 | "description": match[1].strip(),
50 | "required": match[2].lower() == "true",
51 | "key": match[3].strip(),
52 | "default": match[4].strip(),
53 | "possible_values": [v.strip() for v in match[5].split() if v.strip()] if match[5] else [],
54 | "type": match[6].strip(),
55 | }
56 |
57 |
58 | def extract_options_from_section(section: str) -> list[dict[str, Any]]:
59 | """Extract options from a section."""
60 | if "Options:" not in section:
61 | return []
62 |
63 | options_section = section.split("Options:")[1]
64 | option_matches = re.findall(
65 | r"Title: (.*?)\n\s*Description: (.*?)\n\s*Required: (true|false)\n\s*Key: (.*?)\n\s*Default: (.*?)\n(?:\s*Possible values:\s*(.*?))?\s*Type: (.*?)\n", # noqa: E501
66 | options_section,
67 | re.DOTALL,
68 | )
69 | return [parse_option_match(match) for match in option_matches]
70 |
71 |
72 | def parse_single_section(section: str) -> dict[str, Any]:
73 | """Parse a single section into a dictionary."""
74 | name = extract_name_from_section(section)
75 | description = extract_description_from_section(section)
76 | enabled = extract_enabled_state_from_section(section)
77 | compatible_packages = extract_compatible_packages_from_section(section)
78 | options = extract_options_from_section(section)
79 |
80 | return {
81 | "name": name,
82 | "description": description,
83 | "compatiblePackages": compatible_packages if compatible_packages else None,
84 | "use": enabled,
85 | "options": options,
86 | }
87 |
88 |
89 | def run_command_and_capture_output(patches_command: list[str]) -> str:
90 | """Run command and capture its output."""
91 | result = subprocess.run(patches_command, capture_output=True, text=True, check=True)
92 | return result.stdout
93 |
94 |
95 | def parse_text_to_json(text: str) -> list[dict[Any, Any]]:
96 | """Parse text output into JSON format."""
97 | sections = re.split(r"(?=Name:)", text)
98 | return [parse_single_section(section) for section in sections]
99 |
100 |
101 | def convert_command_output_to_json(
102 | jar_file_name: str,
103 | patches_file: str,
104 | ) -> list[dict[Any, Any]]:
105 | """
106 | Runs the ReVanced CLI command, processes the output, and saves it as a sorted JSON file.
107 |
108 | Args:
109 | jar_file_name (str): Name or path of the JAR file to run.
110 | patches_file (str): The patches file name or path to pass to the command.
111 | """
112 | command = ["java", "-jar", jar_file_name, "list-patches", "-ipuvo", patches_file]
113 | output = run_command_and_capture_output(command)
114 |
115 | parsed_data = parse_text_to_json(output)
116 |
117 | # Filter out invalid entries where "name" is None
118 | parsed_data = [entry for entry in parsed_data if entry["name"] is not None]
119 |
120 | # Sort the data by the "name" field
121 | parsed_data.sort(key=lambda x: x["name"])
122 |
123 | with Path("patches.json").open("w") as file:
124 | json.dump(parsed_data, file, indent=2)
125 |
126 | return parsed_data
127 |
--------------------------------------------------------------------------------
/src/downloader/github.py:
--------------------------------------------------------------------------------
1 | """Github Downloader."""
2 |
3 | import re
4 | from typing import Self
5 | from urllib.parse import urlparse
6 |
7 | import requests
8 | from lastversion import latest
9 | from loguru import logger
10 |
11 | from src.app import APP
12 | from src.config import RevancedConfig
13 | from src.downloader.download import Downloader
14 | from src.exceptions import DownloadError
15 | from src.utils import handle_request_response, request_timeout, update_changelog
16 |
17 |
18 | class Github(Downloader):
19 | """Files downloader."""
20 |
21 | MIN_PATH_SEGMENTS = 2 # Minimum path segments for valid GitHub URL
22 |
23 | def latest_version(self: Self, app: APP, **kwargs: dict[str, str]) -> tuple[str, str]:
24 | """Function to download files from GitHub repositories.
25 |
26 | :param app: App to download
27 | """
28 | logger.debug(f"Trying to download {app.app_name} from github")
29 | if self.config.dry_run:
30 | logger.debug(f"Skipping download of {app.app_name}. File already exists or dry running.")
31 | return app.app_name, f"local://{app.app_name}"
32 | owner = str(kwargs["owner"])
33 | repo_name = str(kwargs["name"])
34 | repo_url = f"https://api.github.com/repos/{owner}/{repo_name}/releases/latest"
35 | headers = {
36 | "Content-Type": "application/vnd.github.v3+json",
37 | }
38 | if self.config.personal_access_token:
39 | logger.debug("Using personal access token")
40 | headers["Authorization"] = f"Bearer {self.config.personal_access_token}"
41 | response = requests.get(repo_url, headers=headers, timeout=request_timeout)
42 | handle_request_response(response, repo_url)
43 | if repo_name == "revanced-patches":
44 | download_url = response.json()["assets"][1]["browser_download_url"]
45 | else:
46 | download_url = response.json()["assets"][0]["browser_download_url"]
47 | update_changelog(f"{owner}/{repo_name}", response.json())
48 | self._download(download_url, file_name=app.app_name)
49 | return app.app_name, download_url
50 |
51 | @staticmethod
52 | def _extract_repo_owner_and_tag(url: str) -> tuple[str, str, str]:
53 | """Extract repo owner and url from github url."""
54 | parsed_url = urlparse(url)
55 | path_segments = parsed_url.path.strip("/").split("/")
56 | if len(path_segments) < Github.MIN_PATH_SEGMENTS:
57 | msg = f"Invalid GitHub URL format: {url}"
58 | raise DownloadError(msg)
59 | github_repo_owner = path_segments[0]
60 | github_repo_name = path_segments[1]
61 | tag_position = 3
62 | if len(path_segments) > tag_position and path_segments[3] == "latest-prerelease":
63 | logger.info(f"Including pre-releases/beta for {github_repo_name} selection.")
64 | latest_tag = str(latest(f"{github_repo_owner}/{github_repo_name}", output_format="tag", pre_ok=True))
65 | release_tag = f"tags/{latest_tag}"
66 | else:
67 | release_tag = next(
68 | (f"tags/{path_segments[i + 1]}" for i, segment in enumerate(path_segments) if segment == "tag"),
69 | "latest",
70 | )
71 | return github_repo_owner, github_repo_name, release_tag
72 |
73 | @staticmethod
74 | def _get_release_assets(
75 | github_repo_owner: str,
76 | github_repo_name: str,
77 | release_tag: str,
78 | asset_filter: str,
79 | config: RevancedConfig,
80 | ) -> tuple[str, str]:
81 | """Get assets from given tag."""
82 | api_url = f"https://api.github.com/repos/{github_repo_owner}/{github_repo_name}/releases/{release_tag}"
83 | headers = {
84 | "Content-Type": "application/vnd.github.v3+json",
85 | }
86 | if config.personal_access_token:
87 | headers["Authorization"] = f"Bearer {config.personal_access_token}"
88 | response = requests.get(api_url, headers=headers, timeout=request_timeout)
89 | handle_request_response(response, api_url)
90 | update_changelog(f"{github_repo_owner}/{github_repo_name}", response.json())
91 | assets = response.json()["assets"]
92 | try:
93 | filter_pattern = re.compile(asset_filter)
94 | except re.error as e:
95 | msg = f"Invalid regex {asset_filter} pattern provided."
96 | raise DownloadError(msg) from e
97 | for asset in assets:
98 | assets_url = asset["browser_download_url"]
99 | assets_name = asset["name"]
100 | if match := filter_pattern.search(assets_url):
101 | logger.debug(f"Found {assets_name} to be downloaded from {assets_url}")
102 | return response.json()["tag_name"], match.group()
103 | return "", ""
104 |
105 | @staticmethod
106 | def patch_resource(repo_url: str, assets_filter: str, config: RevancedConfig) -> tuple[str, str]:
107 | """Fetch patch resource from repo url."""
108 | repo_owner, repo_name, latest_tag = Github._extract_repo_owner_and_tag(repo_url)
109 | return Github._get_release_assets(repo_owner, repo_name, latest_tag, assets_filter, config)
110 |
--------------------------------------------------------------------------------
/main.py:
--------------------------------------------------------------------------------
1 | """Entry point."""
2 |
3 | import sys
4 | from concurrent.futures import ThreadPoolExecutor, as_completed
5 | from threading import Lock
6 | from typing import Any
7 |
8 | from environs import Env
9 | from loguru import logger
10 |
11 | from src.app import APP
12 | from src.config import RevancedConfig
13 | from src.downloader.download import Downloader
14 | from src.exceptions import AppNotFoundError, BuilderError, PatchesJsonLoadError, PatchingFailedError
15 | from src.parser import Parser
16 | from src.patches import Patches
17 | from src.utils import check_java, delete_old_changelog, load_older_updates, save_patch_info, write_changelog_to_file
18 |
19 |
20 | def get_app(config: RevancedConfig, app_name: str) -> APP:
21 | """Get App object."""
22 | env_package_name = config.env.str(f"{app_name}_PACKAGE_NAME".upper(), None)
23 | package_name = env_package_name or Patches.get_package_name(app_name)
24 | return APP(app_name=app_name, package_name=package_name, config=config)
25 |
26 |
27 | def process_single_app(
28 | app_name: str,
29 | config: RevancedConfig,
30 | caches: tuple[
31 | dict[tuple[str, str], tuple[str, str]],
32 | dict[str, tuple[str, str]],
33 | Lock,
34 | Lock,
35 | ],
36 | ) -> dict[str, Any]:
37 | """Process a single app and return its update info."""
38 | download_cache, resource_cache, download_lock, resource_lock = caches
39 | logger.info(f"Trying to build {app_name}")
40 | try:
41 | app = get_app(config, app_name)
42 |
43 | # Use shared resource cache with thread safety
44 | app.download_patch_resources(config, resource_cache, resource_lock)
45 |
46 | patcher = Patches(config, app)
47 | parser = Parser(patcher, config)
48 | app_all_patches = patcher.get_app_configs(app)
49 |
50 | # Use shared APK cache with thread safety
51 | app.download_apk_for_patching(config, download_cache, download_lock)
52 |
53 | parser.include_exclude_patch(app, app_all_patches, patcher.patches_dict)
54 | logger.info(app)
55 | app_update_info = save_patch_info(app, {})
56 | parser.patch_app(app)
57 | except AppNotFoundError as e:
58 | logger.info(e)
59 | return {}
60 | except PatchesJsonLoadError:
61 | logger.exception("Patches.json not found")
62 | return {}
63 | except PatchingFailedError as e:
64 | logger.exception(e)
65 | return {}
66 | except BuilderError as e:
67 | logger.exception(f"Failed to build {app_name} because of {e}")
68 | return {}
69 | else:
70 | logger.info(f"Successfully completed {app_name}")
71 | return app_update_info
72 |
73 |
74 | def main() -> None:
75 | """Entry point."""
76 | env = Env()
77 | env.read_env()
78 | config = RevancedConfig(env)
79 | updates_info = {}
80 | Downloader.extra_downloads(config)
81 | if not config.dry_run:
82 | check_java()
83 | delete_old_changelog()
84 | updates_info = load_older_updates(env)
85 |
86 | logger.info(f"Will Patch only {len(config.apps)} apps-:\n{config.apps}")
87 |
88 | # Shared caches for reuse across all apps (empty if caching disabled)
89 | download_cache: dict[tuple[str, str], tuple[str, str]] = {}
90 | resource_cache: dict[str, tuple[str, str]] = {}
91 |
92 | # Thread-safe locks for cache access
93 | download_lock = Lock()
94 | resource_lock = Lock()
95 |
96 | # Clear caches if caching is disabled
97 | if config.disable_caching:
98 | download_cache.clear()
99 | resource_cache.clear()
100 |
101 | # Determine optimal number of workers (don't exceed number of apps or CPU cores)
102 | max_workers = min(len(config.apps), config.max_parallel_apps)
103 |
104 | if len(config.apps) == 1 or config.ci_test:
105 | # For single app or CI testing, use sequential processing
106 | caches = (download_cache, resource_cache, download_lock, resource_lock)
107 | for app_name in config.apps:
108 | app_updates = process_single_app(app_name, config, caches)
109 | updates_info.update(app_updates)
110 | else:
111 | # For multiple apps, use parallel processing
112 | logger.info(f"Processing {len(config.apps)} apps in parallel with {max_workers} workers")
113 |
114 | with ThreadPoolExecutor(max_workers=max_workers) as executor:
115 | # Submit all app processing tasks
116 | caches = (download_cache, resource_cache, download_lock, resource_lock)
117 | future_to_app = {
118 | executor.submit(process_single_app, app_name, config, caches): app_name for app_name in config.apps
119 | }
120 |
121 | # Collect results as they complete
122 | total_apps = len(config.apps)
123 |
124 | for completed_count, future in enumerate(as_completed(future_to_app), 1):
125 | app_name = future_to_app[future]
126 | try:
127 | app_updates = future.result()
128 | updates_info.update(app_updates)
129 | logger.info(f"Progress: {completed_count}/{total_apps} apps completed ({app_name})")
130 | except BuilderError as e:
131 | logger.exception(f"Error processing {app_name}: {e}")
132 | logger.info(f"Progress: {completed_count}/{total_apps} apps completed ({app_name} - FAILED)")
133 |
134 | write_changelog_to_file(updates_info)
135 |
136 |
137 | if __name__ == "__main__":
138 | try:
139 | main()
140 | except KeyboardInterrupt:
141 | logger.error("Script halted because of keyboard interrupt.")
142 | sys.exit(1)
143 |
--------------------------------------------------------------------------------
/src/downloader/download.py:
--------------------------------------------------------------------------------
1 | """Downloader Class."""
2 |
3 | import os
4 | import subprocess
5 | from pathlib import Path
6 | from queue import PriorityQueue
7 | from time import perf_counter
8 | from typing import Any, Self
9 |
10 | from loguru import logger
11 | from tqdm import tqdm
12 |
13 | from src.app import APP
14 | from src.config import RevancedConfig
15 | from src.exceptions import DownloadError
16 | from src.utils import handle_request_response, implement_method, session
17 |
18 |
19 | class Downloader(object):
20 | """Files downloader."""
21 |
22 | def __init__(self: Self, config: RevancedConfig) -> None:
23 | self._CHUNK_SIZE = 10485760
24 | self._QUEUE: PriorityQueue[tuple[float, str]] = PriorityQueue()
25 | self._QUEUE_LENGTH = 0
26 | self.config = config
27 | self.global_archs_priority: Any = None
28 | self.app_version: Any = None
29 |
30 | def _download(self: Self, url: str, file_name: str) -> None:
31 | if not url:
32 | msg = "No url provided to download"
33 | raise DownloadError(msg)
34 | if self.config.dry_run or self.config.temp_folder.joinpath(file_name).exists():
35 | logger.debug(f"Skipping download of {file_name} from {url}. File already exists or dry running.")
36 | return
37 | logger.info(f"Trying to download {file_name} from {url}")
38 | self._QUEUE_LENGTH += 1
39 | start = perf_counter()
40 | headers = {}
41 | if self.config.personal_access_token and "github" in url:
42 | logger.debug("Using personal access token")
43 | headers["Authorization"] = f"token {self.config.personal_access_token}"
44 | response = session.get(
45 | url,
46 | stream=True,
47 | headers=headers,
48 | )
49 | handle_request_response(response, url)
50 | total = int(response.headers.get("content-length", 0))
51 | bar = tqdm(
52 | desc=file_name,
53 | total=total,
54 | unit="iB",
55 | unit_scale=True,
56 | unit_divisor=1024,
57 | colour="green",
58 | )
59 | with self.config.temp_folder.joinpath(file_name).open("wb") as dl_file, bar:
60 | for chunk in response.iter_content(self._CHUNK_SIZE):
61 | size = dl_file.write(chunk)
62 | bar.update(size)
63 | self._QUEUE.put((perf_counter() - start, file_name))
64 | logger.debug(f"Downloaded {file_name}")
65 |
66 | def extract_download_link(self: Self, page: str, app: str) -> tuple[str, str]:
67 | """Extract download link from web page."""
68 | raise NotImplementedError(implement_method)
69 |
70 | def specific_version(self: Self, app: "APP", version: str) -> tuple[str, str]:
71 | """Function to download the specified version of app..
72 |
73 | :param app: Name of the application
74 | :param version: Version of the application to download
75 | :return: Version of downloaded apk
76 | """
77 | raise NotImplementedError(implement_method)
78 |
79 | def latest_version(self: Self, app: APP, **kwargs: Any) -> tuple[str, str]:
80 | """Function to download the latest version of app.
81 |
82 | :param app: Name of the application
83 | :return: Version of downloaded apk
84 | """
85 | raise NotImplementedError(implement_method)
86 |
87 | def convert_to_apk(self: Self, file_name: str) -> str:
88 | """Convert apks to apk."""
89 | if file_name.endswith(".apk"):
90 | return file_name
91 | output_apk_file = self.replace_file_extension(file_name, ".apk")
92 | output_path = f"{self.config.temp_folder}/{output_apk_file}"
93 | Path(output_path).unlink(missing_ok=True)
94 | subprocess.run(
95 | [
96 | "java",
97 | "-jar",
98 | f"{self.config.temp_folder}/{self.config.apk_editor}",
99 | "m",
100 | "-i",
101 | f"{self.config.temp_folder}/{file_name}",
102 | "-o",
103 | output_path,
104 | ],
105 | capture_output=True,
106 | check=True,
107 | )
108 | logger.info("Converted zip to apk.")
109 | return output_apk_file
110 |
111 | @staticmethod
112 | def replace_file_extension(filename: str, new_extension: str) -> str:
113 | """Replace the extension of a file."""
114 | base_name, _ = os.path.splitext(filename)
115 | return base_name + new_extension
116 |
117 | def download(self: Self, version: str, app: APP, **kwargs: Any) -> tuple[str, str]:
118 | """Public function to download apk to patch.
119 |
120 | :param version: version to download
121 | :param app: App to download
122 | """
123 | if self.config.dry_run:
124 | return "", ""
125 | if app in self.config.existing_downloaded_apks:
126 | logger.debug(f"Will not download {app.app_name} -v{version} from the internet.")
127 | return app.app_name, f"local://{app.app_name}"
128 | if version and version != "latest":
129 | file_name, app_dl = self.specific_version(app, version)
130 | else:
131 | file_name, app_dl = self.latest_version(app, **kwargs)
132 | return self.convert_to_apk(file_name), app_dl
133 |
134 | def direct_download(self: Self, dl: str, file_name: str) -> None:
135 | """Download from DL."""
136 | self._download(dl, file_name)
137 |
138 | @staticmethod
139 | def extra_downloads(config: RevancedConfig) -> None:
140 | """The function `extra_downloads` downloads extra files specified.
141 |
142 | Parameters
143 | ----------
144 | config : RevancedConfig
145 | The `config` parameter is an instance of the `RevancedConfig` class. It is used to provide
146 | configuration settings for the download process.
147 | """
148 | try:
149 | for extra in config.extra_download_files:
150 | url, file_name = extra.split("@")
151 | file_name_without_extension, file_extension = os.path.splitext(file_name)
152 | new_file_name = f"{file_name_without_extension}-output{file_extension}"
153 | APP.download(
154 | url,
155 | config,
156 | assets_filter=f".*{file_extension}",
157 | file_name=new_file_name,
158 | )
159 | except (ValueError, IndexError):
160 | logger.info("Unable to download extra file. Provide input in url@name.apk format.")
161 |
--------------------------------------------------------------------------------
/src/downloader/apkmirror.py:
--------------------------------------------------------------------------------
1 | """Downloader Class."""
2 |
3 | from typing import Any, Self
4 |
5 | import requests
6 | from bs4 import BeautifulSoup, Tag
7 | from loguru import logger
8 |
9 | from src.app import APP
10 | from src.downloader.download import Downloader
11 | from src.downloader.sources import APK_MIRROR_BASE_URL
12 | from src.exceptions import APKMirrorAPKDownloadError, ScrapingError
13 | from src.utils import bs4_parser, contains_any_word, handle_request_response, request_header, request_timeout, slugify
14 |
15 |
16 | class ApkMirror(Downloader):
17 | """Files downloader."""
18 |
19 | def _extract_force_download_link(self: Self, link: str, app: str) -> tuple[str, str]:
20 | """Extract force download link."""
21 | link_page_source = self._extract_source(link)
22 | notes_divs = self._extracted_search_source_div(link_page_source, "tab-pane")
23 | apk_type = self._extracted_search_source_div(link_page_source, "apkm-badge").get_text()
24 | extension = "zip" if apk_type == "BUNDLE" else "apk"
25 | possible_links = notes_divs.find_all("a")
26 | for possible_link in possible_links:
27 | if possible_link.get("href") and "download.php?id=" in possible_link.get("href"):
28 | file_name = f"{app}.{extension}"
29 | self._download(APK_MIRROR_BASE_URL + possible_link["href"], file_name)
30 | return file_name, APK_MIRROR_BASE_URL + possible_link["href"]
31 | msg = f"Unable to extract force download for {app}"
32 | raise APKMirrorAPKDownloadError(msg, url=link)
33 |
34 | def extract_download_link(self: Self, page: str, app: str) -> tuple[str, str]:
35 | """Function to extract the download link from apkmirror html page.
36 |
37 | :param page: Url of the page
38 | :param app: Name of the app
39 | """
40 | logger.debug(f"Extracting download link from\n{page}")
41 | download_button = self._extracted_search_div(page, "center")
42 | download_links = download_button.find_all("a")
43 | if final_download_link := next(
44 | (
45 | download_link["href"]
46 | for download_link in download_links
47 | if download_link.get("href") and "download/?key=" in download_link.get("href")
48 | ),
49 | None,
50 | ):
51 | return self._extract_force_download_link(APK_MIRROR_BASE_URL + final_download_link, app)
52 | msg = f"Unable to extract link from {app} version list"
53 | raise APKMirrorAPKDownloadError(msg, url=page)
54 |
55 | def get_download_page(self: Self, main_page: str) -> str:
56 | """Function to get the download page in apk_mirror.
57 |
58 | :param main_page: Main Download Page in APK mirror(Index)
59 | :return:
60 | """
61 | list_widget = self._extracted_search_div(main_page, "tab-pane noPadding")
62 | table_rows = list_widget.find_all(class_="table-row headerFont")
63 | links: dict[str, str] = {}
64 | apk_archs = ["arm64-v8a", "universal", "noarch"]
65 | for row in table_rows:
66 | if row.find(class_="accent_color"):
67 | apk_type = row.find(class_="apkm-badge").get_text()
68 | sub_url = row.find(class_="accent_color")["href"]
69 | text = row.text.strip()
70 | if apk_type == "APK" and (not contains_any_word(text, apk_archs)):
71 | continue
72 | links[apk_type] = f"{APK_MIRROR_BASE_URL}{sub_url}"
73 | if preferred_link := links.get("APK", links.get("BUNDLE")):
74 | return preferred_link
75 | msg = "Unable to extract download page"
76 | raise APKMirrorAPKDownloadError(msg, url=main_page)
77 |
78 | @staticmethod
79 | def _extract_source(url: str) -> str:
80 | """Extracts the source from the url incase of reuse."""
81 | response = requests.get(url, headers=request_header, timeout=request_timeout)
82 | handle_request_response(response, url)
83 | return response.text
84 |
85 | @staticmethod
86 | def _extracted_search_source_div(source: str, search_class: str) -> Tag:
87 | """Extract search div from source."""
88 | soup = BeautifulSoup(source, bs4_parser)
89 | return soup.find(class_=search_class) # type: ignore[return-value]
90 |
91 | def _extracted_search_div(self: Self, url: str, search_class: str) -> Tag:
92 | """Extract search div from url."""
93 | return self._extracted_search_source_div(self._extract_source(url), search_class)
94 |
95 | def specific_version(self: Self, app: APP, version: str, main_page: str = "") -> tuple[str, str]:
96 | """Function to download the specified version of app from apkmirror.
97 |
98 | :param app: Name of the application
99 | :param version: Version of the application to download
100 | :param main_page: Version of the application to download
101 | :return: Version of downloaded apk
102 | """
103 | if not main_page:
104 | version = version.replace(".", "-")
105 | apk_main_page = app.download_source
106 | version_page = apk_main_page + apk_main_page.split("/")[-2]
107 | main_page = f"{version_page}-{version}-release/"
108 | download_page = self.get_download_page(main_page)
109 | if app.app_version == "latest":
110 | try:
111 | logger.info(f"Trying to guess {app.app_name} version.")
112 | appsec_val = self._extracted_search_div(download_page, "appspec-value")
113 | appsec_version = str(appsec_val.find(text=lambda text: "Version" in text))
114 | app.app_version = slugify(appsec_version.split(":")[-1].strip())
115 | logger.info(f"Guessed {app.app_version} for {app.app_name}")
116 | except ScrapingError:
117 | pass
118 | return self.extract_download_link(download_page, app.app_name)
119 |
120 | def latest_version(self: Self, app: APP, **kwargs: Any) -> tuple[str, str]:
121 | """Function to download whatever the latest version of app from apkmirror.
122 |
123 | :param app: Name of the application
124 | :return: Version of downloaded apk
125 | """
126 | app_main_page = app.download_source
127 | versions_div = self._extracted_search_div(app_main_page, "listWidget p-relative")
128 | app_rows = versions_div.find_all(class_="appRow")
129 | version_urls = [
130 | app_row.find(class_="downloadLink")["href"]
131 | for app_row in app_rows
132 | if "beta" not in app_row.find(class_="appRowTitle").get_text().lower()
133 | and "alpha" not in app_row.find(class_="appRowTitle").get_text().lower()
134 | ]
135 | return self.specific_version(app, "latest", APK_MIRROR_BASE_URL + max(version_urls))
136 |
--------------------------------------------------------------------------------
/src/downloader/sources.py:
--------------------------------------------------------------------------------
1 | """APK Sources used."""
2 |
3 | APK_MIRROR_BASE_URL = "https://www.apkmirror.com"
4 | APK_MIRROR_BASE_APK_URL = f"{APK_MIRROR_BASE_URL}/apk"
5 | APK_MIRROR_PACKAGE_URL = f"{APK_MIRROR_BASE_URL}/?s=" + "{}"
6 | APK_MIRROR_APK_CHECK = f"{APK_MIRROR_BASE_URL}/wp-json/apkm/v1/app_exists/"
7 | UPTODOWN_SUFFIX = "en.uptodown.com/android"
8 | UPTODOWN_BASE_URL = "https://{}." + UPTODOWN_SUFFIX
9 | APK_PURE_BASE_URL = "https://apkpure.net"
10 | APK_PURE_URL = APK_PURE_BASE_URL + "/-/{}"
11 | APK_PURE_ICON_URL = APK_PURE_BASE_URL + "/search?q={}"
12 | APKS_SOS_BASE_URL = "https://apksos.com/download-app"
13 | APK_SOS_URL = APKS_SOS_BASE_URL + "/{}"
14 | GITHUB_BASE_URL = "https://github.com"
15 | PLAY_STORE_BASE_URL = "https://play.google.com"
16 | PLAY_STORE_APK_URL = f"{PLAY_STORE_BASE_URL}/store/apps/details?id=" + "{}"
17 | APK_COMBO_BASE_URL = "https://apkcombo.com"
18 | APK_COMBO_GENERIC_URL = APK_COMBO_BASE_URL + "/genericApp/{}"
19 | not_found_icon = "https://www.svgrepo.com/download/441689/page-not-found.svg"
20 | revanced_api = "https://api.revanced.app/v4/patches/list"
21 | APK_MONK_BASE_URL = "https://www.apkmonk.com"
22 | APKEEP = "apkeep"
23 | APK_MONK_APK_URL = APK_MONK_BASE_URL + "/app/{}/"
24 | APK_MONK_ICON_URL = "https://cdn.apkmonk.com/logos/{}"
25 | apk_sources = {
26 | "backdrops": f"{APK_MIRROR_BASE_APK_URL}/backdrops/backdrops-wallpapers/",
27 | "bacon": f"{APK_MIRROR_BASE_APK_URL}/onelouder-apps/baconreader-for-reddit/",
28 | "boost": f"{APK_MIRROR_BASE_APK_URL}/ruben-mayayo/boost-for-reddit/",
29 | "candyvpn": f"{APK_MIRROR_BASE_APK_URL}/liondev-io/candylink-vpn/",
30 | "duolingo": f"{APK_MIRROR_BASE_APK_URL}/duolingo/duolingo-duolingo/",
31 | "grecorder": f"{APK_MIRROR_BASE_APK_URL}/google-inc/google-recorder/",
32 | "icon_pack_studio": f"{APK_MIRROR_BASE_APK_URL}/smart-launcher-team/icon-pack-studio/",
33 | "infinity": f"{APK_MIRROR_BASE_APK_URL}/docile-alligator/infinity-for-reddit/",
34 | "inshorts": f"{APK_MIRROR_BASE_APK_URL}/inshorts-formerly-news-in-shorts/inshorts-news-in-60-words-2/",
35 | "instagram": f"{APK_MIRROR_BASE_APK_URL}/instagram/instagram-instagram/",
36 | "irplus": f"{APK_MIRROR_BASE_APK_URL}/binarymode/irplus-infrared-remote/",
37 | "lightroom": f"{APK_MIRROR_BASE_APK_URL}/adobe/lightroom/",
38 | "meme-generator-free": f"{APK_MIRROR_BASE_APK_URL}/zombodroid/meme-generator-free/",
39 | "messenger": f"{APK_MIRROR_BASE_APK_URL}/facebook-2/messenger/",
40 | "netguard": f"{APK_MIRROR_BASE_APK_URL}/marcel-bokhorst/netguard-no-root-firewall/",
41 | "nova_launcher": f"{APK_MIRROR_BASE_APK_URL}/teslacoil-software/nova-launcher/",
42 | "nyx-music-player": f"{APK_MIRROR_BASE_APK_URL}/awedea/nyx-music-player/",
43 | "pixiv": f"{APK_MIRROR_BASE_APK_URL}/pixiv-inc/pixiv/",
44 | "reddit": f"{APK_MIRROR_BASE_APK_URL}/redditinc/reddit/",
45 | "relay": f"{APK_MIRROR_BASE_APK_URL}/dbrady/relay-for-reddit-2/",
46 | "rif": f"{APK_MIRROR_BASE_APK_URL}/talklittle/reddit-is-fun/",
47 | "slide": f"{APK_MIRROR_BASE_APK_URL}/haptic-apps/slide-for-reddit/",
48 | "solidexplorer": f"{APK_MIRROR_BASE_APK_URL}/neatbytes/solid-explorer-beta/",
49 | "sonyheadphone": f"{APK_MIRROR_BASE_APK_URL}/sony-corporation/sony-headphones-connect/",
50 | "sync": f"{APK_MIRROR_BASE_APK_URL}/red-apps-ltd/sync-for-reddit/",
51 | "tasker": f"{APK_MIRROR_BASE_APK_URL}/joaomgcd/tasker-crafty-apps-eu/",
52 | "ticktick": f"{APK_MIRROR_BASE_APK_URL}/appest-inc/ticktick-to-do-list-with-reminder-day-planner/",
53 | "tiktok": f"{APK_MIRROR_BASE_APK_URL}/tiktok-pte-ltd/tik-tok-including-musical-ly/",
54 | "musically": f"{APK_MIRROR_BASE_APK_URL}/tiktok-pte-ltd/tik-tok-including-musical-ly/",
55 | "trakt": f"{APK_MIRROR_BASE_APK_URL}/trakt/trakt/",
56 | "twitch": f"{APK_MIRROR_BASE_APK_URL}/twitch-interactive-inc/twitch/",
57 | "twitter": f"{APK_MIRROR_BASE_APK_URL}/x-corp/twitter/",
58 | "vsco": f"{APK_MIRROR_BASE_APK_URL}/vsco/vsco-cam/",
59 | "warnwetter": f"{APK_MIRROR_BASE_APK_URL}/deutscher-wetterdienst/warnwetter/",
60 | "windy": f"{APK_MIRROR_BASE_APK_URL}/windy-weather-world-inc/windy-wind-weather-forecast/",
61 | "youtube": f"{APK_MIRROR_BASE_APK_URL}/google-inc/youtube/",
62 | "youtube_music": f"{APK_MIRROR_BASE_APK_URL}/google-inc/youtube-music/",
63 | "yuka": f"{APK_MIRROR_BASE_APK_URL}/yuka-apps/yuka-food-cosmetic-scan/",
64 | "strava": f"{APK_MIRROR_BASE_APK_URL}/strava-inc/strava-running-and-cycling-gps/",
65 | "vanced": f"{APK_MIRROR_BASE_APK_URL}/team-vanced/youtube-vanced/",
66 | "tumblr": f"{APK_MIRROR_BASE_APK_URL}/tumblr-inc/tumblr/",
67 | "fitnesspal": f"{APK_MIRROR_BASE_APK_URL}/myfitnesspal-inc/calorie-counter-myfitnesspal/",
68 | "facebook": f"{APK_MIRROR_BASE_APK_URL}/facebook-2/facebook/",
69 | "lemmy-sync": f"{APK_MIRROR_BASE_APK_URL}/sync-apps-ltd/sync-for-lemmy/",
70 | "xiaomi-wearable": f"{APK_MIRROR_BASE_APK_URL}/beijing-xiaomi-mobile-software-co-ltd/mi-wear-小米穿戴/",
71 | "my-expenses": UPTODOWN_BASE_URL.format("my-expenses"),
72 | "spotify": UPTODOWN_BASE_URL.format("spotify"),
73 | "joey": UPTODOWN_BASE_URL.format("joey-for-reddit"),
74 | "scbeasy": UPTODOWN_BASE_URL.format("scb-easy"),
75 | "expensemanager": UPTODOWN_BASE_URL.format("bishinews-expense-manager"),
76 | "androidtwelvewidgets": APK_PURE_URL,
77 | "reddit-news": APK_PURE_URL,
78 | "hex-editor": APK_PURE_URL,
79 | "photomath": APK_PURE_URL,
80 | "spotify-lite": APK_PURE_URL,
81 | "digitales": APK_PURE_URL,
82 | "finanz-online": APK_SOS_URL,
83 | "photos": f"{APK_MIRROR_BASE_APK_URL}/google-inc/photos/",
84 | "amazon": f"{APK_MIRROR_BASE_APK_URL}/amazon-mobile-llc/amazon-shopping/",
85 | "bandcamp": f"{APK_MIRROR_BASE_APK_URL}/bandcamp-inc/bandcamp/",
86 | "magazines": f"{APK_MIRROR_BASE_APK_URL}/google-inc/google-news/",
87 | "winrar": f"{APK_MIRROR_BASE_APK_URL}/rarlab-published-by-win-rar-gmbh/rar/",
88 | "soundcloud": f"{APK_MIRROR_BASE_APK_URL}/soundcloud/soundcloud-soundcloud/",
89 | "stocard": f"{APK_MIRROR_BASE_APK_URL}/stocard-gmbh/stocard-rewards-cards-wallet/",
90 | "willhaben": f"{APK_MIRROR_BASE_APK_URL}/willhaben/willhaben/",
91 | "proton-mail": f"{APK_MIRROR_BASE_APK_URL}/proton-technologies-ag/protonmail-encrypted-email/",
92 | "prime-video": f"{APK_MIRROR_BASE_APK_URL}/amazon-mobile-llc/amazon-prime-video/",
93 | "cricbuzz": f"{APK_MIRROR_BASE_APK_URL}/cricbuzz-com/cricbuzz-live-cricket-scores-news/",
94 | "crunchyroll": f"{APK_MIRROR_BASE_APK_URL}/crunchyroll-llc-2/crunchyroll/",
95 | "threads": f"{APK_MIRROR_BASE_APK_URL}/instagram/threads-an-instagram-app/",
96 | "orf-on": f"{APK_MIRROR_BASE_APK_URL}/orf-osterreichischer-rundfunk/orf-tvthek-video-on-demand-android-tv/",
97 | "pandora": f"{APK_MIRROR_BASE_APK_URL}/pandora/pandora-music-podcasts/",
98 | "cieid": f"{APK_MIRROR_BASE_APK_URL}/istituto-poligrafico-e-zecca-dello-stato-s-p-a/cieid/",
99 | "infinity-for-reddit-patreon": f"{APK_MIRROR_BASE_APK_URL}/docile-alligator/infinity-for-reddit-patreon-github-version/", # noqa: E501
100 | "infinity-for-reddit-plus": f"{APK_MIRROR_BASE_APK_URL}/docile-alligator/infinity-for-reddit-plus/",
101 | }
102 |
--------------------------------------------------------------------------------
/.github/workflows/build-apk.yml:
--------------------------------------------------------------------------------
1 | name: Build & Release
2 | env:
3 | DOCKER_BUILDKIT: 1
4 | COMPOSE_DOCKER_CLI_BUILD: 1
5 | HAVE_TELEGRAM_API_ID: ${{ secrets.TELEGRAM_API_ID != '' }}
6 | on:
7 | workflow_call:
8 | inputs:
9 | GITHUB_UPLOAD:
10 | description: "Upload to GitHub"
11 | required: false
12 | type: boolean
13 | default: true
14 | TELEGRAM_NO_ROOT_UPLOAD:
15 | description: "Upload Non Rooted APKs to Telegram"
16 | required: false
17 | type: boolean
18 | default: false
19 | TELEGRAM_ROOT_UPLOAD:
20 | description: "Upload Magisk Module from nikhilbadyal/revanced-magisk-module to Telegram"
21 | required: false
22 | type: boolean
23 | default: false
24 | APPRISE_NOTIFY:
25 | description: "Use Apprise to Notify"
26 | required: false
27 | type: boolean
28 | default: false
29 | CLEANUP:
30 | description: "Clear GitHub(Useful if Telegram upload is enabled)"
31 | required: false
32 | type: boolean
33 | default: false
34 | COMMIT_CHANGELOG:
35 | description: "Update Changelog"
36 | type: boolean
37 | required: false
38 | default: true
39 | DEBUG_ENABLED:
40 | type: boolean
41 | description: 'Run the build with tmate debugging enabled.'
42 | required: false
43 | default: false
44 | PREFERRED_PATCH_APPS:
45 | description: "Apps to be patched. Overrides any env set"
46 | required: false
47 | type: string
48 | workflow_dispatch:
49 | inputs:
50 | GITHUB_UPLOAD:
51 | description: "Upload to GitHub"
52 | required: false
53 | type: boolean
54 | default: true
55 | TELEGRAM_NO_ROOT_UPLOAD:
56 | description: "Upload Non Rooted APKs to Telegram"
57 | required: false
58 | type: boolean
59 | default: false
60 | TELEGRAM_ROOT_UPLOAD:
61 | description: "Upload Magisk Module from nikhilbadyal/revanced-magisk-module to Telegram"
62 | required: false
63 | type: boolean
64 | default: false
65 | APPRISE_NOTIFY:
66 | description: "Use Apprise to Notify"
67 | required: false
68 | type: boolean
69 | default: false
70 | CLEANUP:
71 | description: "Clear GitHub(Useful if Telegram upload is enabled)"
72 | required: false
73 | type: boolean
74 | default: false
75 | COMMIT_CHANGELOG:
76 | description: "Update Changelog"
77 | type: boolean
78 | required: false
79 | default: true
80 | DEBUG_ENABLED:
81 | type: boolean
82 | description: 'Run the build with tmate debugging enabled.'
83 | required: false
84 | default: false
85 | PREFERRED_PATCH_APPS:
86 | description: "Apps to be patched. Overrides any env set"
87 | required: false
88 | type: string
89 |
90 | concurrency:
91 | group: ${{ github.head_ref || github.run_id }}
92 | cancel-in-progress: true
93 | jobs:
94 | build-apk:
95 | uses: ./.github/workflows/build-artifact.yml
96 | with:
97 | COMMIT_CHANGELOG: ${{ inputs.COMMIT_CHANGELOG }}
98 | DEBUG_ENABLED: ${{ inputs.DEBUG_ENABLED }}
99 | PREFERRED_PATCH_APPS: ${{ inputs.PREFERRED_PATCH_APPS }}
100 | secrets:
101 | ENVS: ${{ secrets.ENVS }}
102 | DOCKER_PY_REVANCED_SECRETS: ${{ secrets.DOCKER_PY_REVANCED_SECRETS }}
103 | REDDIT_CLIENT_ID: ${{ secrets.REDDIT_CLIENT_ID }}
104 |
105 | upload-to-github:
106 | name: GitHub Upload
107 | runs-on: ubuntu-latest
108 | timeout-minutes: 30
109 | needs: build-apk
110 | if: inputs.GITHUB_UPLOAD
111 |
112 | steps:
113 | - name: Download Already Built APKs
114 | uses: actions/download-artifact@main
115 | with:
116 | name: Built-APKs
117 | - name: Get Date
118 | id: get-date
119 | run: |
120 | echo "date=$(TZ='Asia/Kolkata' date +"%Y.%m.%d-%H.%M.%S")" >> $GITHUB_OUTPUT
121 |
122 | - name: Delete Older Releases
123 | uses: nikhilbadyal/ghaction-rm-releases@v0.0.5
124 | with:
125 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
126 | RELEASE_PATTERN: "Build*"
127 |
128 | - name: Upload Build Artifact
129 | uses: ncipollo/release-action@main
130 | with:
131 | artifacts: "apks/*-output.apk,updates.json,changelog.json,changelog.md"
132 | token: ${{ secrets.GITHUB_TOKEN }}
133 | tag: Build-${{ steps.get-date.outputs.date }}
134 | artifactErrorsFailBuild: true
135 |
136 | - name: Sleep for 10 seconds
137 | run: |
138 | sleep 10
139 |
140 | upload-to-telegram:
141 | needs: [ upload-to-github ]
142 | uses: nikhilbadyal/ghactions/.github/workflows/telegram-uploader.yml@main
143 | if: inputs.TELEGRAM_NO_ROOT_UPLOAD
144 | secrets:
145 | TELEGRAM_API_ID: ${{ secrets.TELEGRAM_API_ID }}
146 | TELEGRAM_API_HASH: ${{ secrets.TELEGRAM_API_HASH }}
147 | TELEGRAM_BOT_TOKEN: ${{ secrets.TELEGRAM_BOT_TOKEN }}
148 | TELEGRAM_CHAT_ID: ${{ secrets.TELEGRAM_CHAT_ID }}
149 | TELEGRAM_STICKER_ID: ${{ secrets.TELEGRAM_STICKER_ID }}
150 | MESSAGE: ${{ secrets.MESSAGE_NON_ROOT }}
151 |
152 | upload-to-telegram-root:
153 | needs: [ upload-to-telegram ]
154 | uses: nikhilbadyal/ghactions/.github/workflows/telegram-uploader.yml@main
155 | if: inputs.TELEGRAM_ROOT_UPLOAD
156 | secrets:
157 | TELEGRAM_API_ID: ${{ secrets.TELEGRAM_API_ID }}
158 | TELEGRAM_API_HASH: ${{ secrets.TELEGRAM_API_HASH }}
159 | TELEGRAM_BOT_TOKEN: ${{ secrets.TELEGRAM_BOT_TOKEN }}
160 | TELEGRAM_CHAT_ID: ${{ secrets.TELEGRAM_CHAT_ID }}
161 | DOWNLOAD_GITHUB_REPOSITORY: ${{ secrets.ROOT_DOWNLOAD_GITHUB_REPOSITORY }}
162 | MESSAGE: ${{ secrets.MESSAGE_ROOT }}
163 |
164 | apprise-notifier:
165 | needs: build-apk
166 | name: Apprise Notifier
167 | runs-on: ubuntu-latest
168 | timeout-minutes: 30
169 | if: inputs.APPRISE_NOTIFY
170 |
171 | steps:
172 | - name: Download Already Built APKs
173 | uses: actions/download-artifact@main
174 | with:
175 | name: Built-APKs
176 |
177 | - name: Find all built apks
178 | id: ff
179 | run: |
180 | apk_list=""
181 | for filename in $(find . -name "*-output.apk" -type f); do
182 | apk_list="$apk_list,$filename"
183 | done
184 | apk_list=${apk_list:1}
185 | echo "apks=$apk_list" >> "$GITHUB_OUTPUT"
186 | is_present=$([ -n "${{ secrets.APPRISE_URL }}" ] && echo true || echo false );
187 | echo "has_apprise_url=$is_present" >> $GITHUB_OUTPUT
188 | - name: Print files
189 | run: echo "${{ steps.ff.outputs.apks }} ${{ steps.ff.outputs.has_apprise_url }}"
190 |
191 | - name: Upload to Telegram
192 | uses: nikhilbadyal/ghaction-apprise@main
193 | with:
194 | APPRISE_URL: ${{ secrets.APPRISE_URL }}
195 | APPRISE_NOTIFICATION_BODY: ${{ secrets.APPRISE_NOTIFICATION_BODY }}
196 | APPRISE_NOTIFICATION_TITLE: ${{ secrets.APPRISE_NOTIFICATION_TITLE }}
197 | APPRISE_ATTACHMENTS: ${{ steps.ff.outputs.apks }}
198 |
199 | cleanup:
200 | name: GitHub Cleanup
201 | if: inputs.CLEANUP
202 | needs: [ upload-to-telegram ]
203 | uses: ./.github/workflows/github-cleanup.yml
204 | secrets:
205 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
206 |
--------------------------------------------------------------------------------
/src/downloader/apkpure.py:
--------------------------------------------------------------------------------
1 | """APK Pure Downloader Class."""
2 |
3 | from functools import cmp_to_key
4 | from typing import Any, Self
5 | from urllib.parse import parse_qs, urlparse
6 |
7 | import requests
8 | from bs4 import BeautifulSoup
9 | from loguru import logger
10 |
11 | from src.app import APP
12 | from src.downloader.download import Downloader
13 | from src.exceptions import APKPureAPKDownloadError
14 | from src.utils import bs4_parser, handle_request_response, request_header, request_timeout, slugify
15 |
16 |
17 | class ApkPure(Downloader):
18 | """Files downloader."""
19 |
20 | default_archs_priority: tuple[str, ...] = ("arm64-v8a", "armeabi-v7a", "x86_64", "x86")
21 |
22 | @staticmethod
23 | def _select_preferred_dl(app: str, apk_dls: list[str], xapk_dls: list[str]) -> tuple[str | None, str | None]:
24 | file_name = None
25 | app_dl = None
26 | if apk_dls:
27 | file_name = f"{app}.apk"
28 | app_dl = apk_dls[0]
29 | elif xapk_dls:
30 | file_name = f"{app}.zip"
31 | app_dl = xapk_dls[0]
32 | return file_name, app_dl
33 |
34 | def _sort_by_priority(self: Self, arch_list: list[str] | tuple[str]) -> list[str]:
35 | """Specifically used to sort the arch list based on order of elements of default archs priority list."""
36 | return [darch for darch in self.default_archs_priority if darch in arch_list]
37 |
38 | def _get_apk_type(self: Self, dl: str) -> list[str] | None:
39 | """Extract apk type from download link."""
40 | query_params = parse_qs(urlparse(dl).query)
41 | return query_params.get("nc")
42 |
43 | def _compare_apk_types(self: Self, apk_type1: list[str], apk_type2: list[str]) -> int:
44 | """Compare two apk types for prioritization."""
45 | l1, l2 = len(apk_type1), len(apk_type2)
46 | if l1 != l2:
47 | # Longer list indicates support for multiple archs, higher priority
48 | return -1 if l1 > l2 else 1
49 |
50 | # Same length, compare by priority order
51 | priority = self.global_archs_priority or self.default_archs_priority
52 | for arch in priority:
53 | has_arch1 = arch in apk_type1
54 | has_arch2 = arch in apk_type2
55 | if has_arch1 != has_arch2:
56 | return -1 if has_arch1 else 1
57 | return 0
58 |
59 | def _compare_dls(self: Self, dl1: str, dl2: str) -> int:
60 | """Compare two dls of same type (apk or xapk) to prioritise the archs on lower indices."""
61 | apk_type1 = self._get_apk_type(dl1)
62 | apk_type2 = self._get_apk_type(dl2)
63 |
64 | if apk_type1 and apk_type2:
65 | return self._compare_apk_types(apk_type1, apk_type2)
66 | if not apk_type1 and apk_type2:
67 | return 1
68 | if apk_type1 and not apk_type2:
69 | return -1
70 | return 0
71 |
72 | def extract_download_link(self: Self, page: str, app: str) -> tuple[str, str]:
73 | """Function to extract the download link from apkpure download page.
74 |
75 | :param page: Url of the page
76 | :param app: Name of the app
77 | :return: Tuple of filename and app direct download link
78 | """
79 | logger.debug(f"Extracting download link from\n{page}")
80 | r = requests.get(page, headers=request_header, timeout=request_timeout)
81 | handle_request_response(r, page)
82 | soup = BeautifulSoup(r.text, bs4_parser)
83 | apks = soup.select("#version-list a.download-btn")
84 | _apk_dls: list[str] = []
85 | _xapk_dls: list[str] = []
86 | for apk in apks:
87 | if _apk_dl := apk.get("href"):
88 | if "/b/XAPK/" in _apk_dl:
89 | _xapk_dls.append(_apk_dl) # type: ignore # noqa: PGH003
90 | else:
91 | _apk_dls.append(_apk_dl) # type: ignore # noqa: PGH003
92 | _apk_dls.sort(key=cmp_to_key(self._compare_dls))
93 | _xapk_dls.sort(key=cmp_to_key(self._compare_dls))
94 | file_name, app_dl = self._select_preferred_dl(app, _apk_dls, _xapk_dls)
95 | if not file_name or not app_dl:
96 | msg = f"Unable to extract link from {app} version list"
97 | raise APKPureAPKDownloadError(msg, url=page)
98 | if app_version := soup.select_one("span.info-sdk > span"):
99 | self.app_version = slugify(app_version.get_text(strip=True))
100 | logger.info(f"Will be downloading {app}'s version {self.app_version}...")
101 | else:
102 | self.app_version = "latest"
103 | logger.info(f"Unable to guess latest version of {app}")
104 | return file_name, app_dl
105 |
106 | def specific_version(self: Self, app: APP, version: str) -> tuple[str, str]:
107 | """
108 | Downloads the specified version of an app from APKPure.
109 |
110 | Parameters
111 | ----------
112 | app : APP
113 | The application object containing metadata.
114 | version : str
115 | The specific version of the application to download.
116 |
117 | Returns
118 | -------
119 | tuple[str, str]
120 | A tuple containing:
121 | - The filename of the downloaded APK.
122 | - The direct download link of the APK.
123 |
124 | Raises
125 | ------
126 | APKPureAPKDownloadError
127 | If the specified version is not found.
128 | """
129 | self.global_archs_priority = tuple(self._sort_by_priority(app.archs_to_build))
130 | version_page = f"{app.download_source}/versions"
131 |
132 | response = requests.get(version_page, headers=request_header, timeout=request_timeout)
133 | handle_request_response(response, version_page)
134 |
135 | soup = BeautifulSoup(response.text, bs4_parser)
136 |
137 | for box in soup.select("ul.ver-wrap > *"):
138 | download_link = box.select_one("a.ver_download_link")
139 | if not download_link:
140 | continue
141 |
142 | found_version = download_link.get("data-dt-version")
143 | if found_version == version:
144 | download_page = download_link.get("href")
145 | file_name, download_source = self.extract_download_link(
146 | str(download_page),
147 | app.app_name,
148 | )
149 |
150 | app.app_version = self.app_version
151 | logger.info(f"Guessed {app.app_version} for {app.app_name}")
152 |
153 | self._download(download_source, file_name)
154 | return file_name, download_source
155 | msg = f"Unable to find specific version '{version}' for {app} from version list"
156 | raise APKPureAPKDownloadError(msg, url=version_page)
157 |
158 | def latest_version(self: Self, app: APP, **kwargs: Any) -> tuple[str, str]:
159 | """Function to download whatever the latest version of app from apkpure.
160 |
161 | :param app: Name of the application
162 | :return: Tuple of filename and app direct download link
163 | """
164 | self.global_archs_priority = tuple(self._sort_by_priority(app.archs_to_build))
165 | download_page = app.download_source + "/download"
166 | file_name, download_source = self.extract_download_link(download_page, app.app_name)
167 | app.app_version = self.app_version
168 | if self.app_version != "latest":
169 | logger.info(f"Guessed {app.app_version} for {app.app_name}")
170 | self._download(download_source, file_name)
171 | return file_name, download_source
172 |
--------------------------------------------------------------------------------
/scripts/status_check.py:
--------------------------------------------------------------------------------
1 | """Status check."""
2 |
3 | import re
4 | from pathlib import Path
5 |
6 | import requests
7 | from bs4 import BeautifulSoup, Tag
8 | from google_play_scraper import app as gplay_app
9 | from google_play_scraper.exceptions import GooglePlayScraperException
10 |
11 | from src.downloader.sources import (
12 | APK_COMBO_GENERIC_URL,
13 | APK_MIRROR_BASE_URL,
14 | APK_MIRROR_PACKAGE_URL,
15 | APK_MONK_APK_URL,
16 | APK_MONK_ICON_URL,
17 | APK_PURE_ICON_URL,
18 | PLAY_STORE_APK_URL,
19 | not_found_icon,
20 | revanced_api,
21 | )
22 | from src.exceptions import (
23 | APKComboIconScrapError,
24 | APKMirrorIconScrapError,
25 | APKMonkIconScrapError,
26 | APKPureIconScrapError,
27 | BuilderError,
28 | )
29 | from src.patches import Patches
30 | from src.utils import apkmirror_status_check, bs4_parser, handle_request_response, request_header, request_timeout
31 |
32 | no_of_col = 9
33 | combo_headers = {"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:109.0) Gecko/20100101 Firefox/116.0"}
34 |
35 |
36 | def apkcombo_scrapper(package_name: str) -> str:
37 | """Apkcombo scrapper."""
38 | apkcombo_url = APK_COMBO_GENERIC_URL.format(package_name)
39 | try:
40 | r = requests.get(apkcombo_url, headers=combo_headers, allow_redirects=True, timeout=request_timeout)
41 | handle_request_response(r, apkcombo_url)
42 | soup = BeautifulSoup(r.text, bs4_parser)
43 | avatar = soup.find(class_="avatar")
44 | if not isinstance(avatar, Tag):
45 | raise APKComboIconScrapError(url=apkcombo_url)
46 | icon_element = avatar.find("img")
47 | if not isinstance(icon_element, Tag):
48 | raise APKComboIconScrapError(url=apkcombo_url)
49 | url = icon_element.get("data-src")
50 | return re.sub(r"=.*$", "", url) # type: ignore[arg-type]
51 | except BuilderError as e:
52 | raise APKComboIconScrapError(url=apkcombo_url) from e
53 |
54 |
55 | def bigger_image(possible_links: list[str]) -> str:
56 | """Select image with higher dimension."""
57 | higher_dimension_url = ""
58 | max_dimension = 0
59 |
60 | for url in possible_links:
61 | dimensions = url.split("_")[-1].split(".")[0].split("x")
62 | width = int(dimensions[0])
63 | height = int(dimensions[1])
64 |
65 | area = width * height
66 |
67 | if area > max_dimension:
68 | max_dimension = area
69 | higher_dimension_url = url
70 |
71 | return higher_dimension_url
72 |
73 |
74 | def apkmonk_scrapper(package_name: str) -> str:
75 | """APKMonk scrapper."""
76 | apkmonk_url = APK_MONK_APK_URL.format(package_name)
77 | icon_logo = APK_MONK_ICON_URL.format(package_name)
78 | r = requests.get(apkmonk_url, headers=combo_headers, allow_redirects=True, timeout=request_timeout)
79 | handle_request_response(r, apkmonk_url)
80 | if head := BeautifulSoup(r.text, bs4_parser).head:
81 | parsed_head = BeautifulSoup(str(head), bs4_parser)
82 | href_elements = parsed_head.find_all(href=True)
83 | possible_link = []
84 | for element in href_elements:
85 | href_value = element.get("href")
86 | if href_value.startswith(icon_logo):
87 | possible_link.append(href_value)
88 | if possible_link:
89 | return bigger_image(possible_link)
90 | raise APKMonkIconScrapError(url=apkmonk_url)
91 |
92 |
93 | def apkmirror_scrapper(package_name: str) -> str:
94 | """Apkmirror URL."""
95 | response = apkmirror_status_check(package_name)
96 | search_url = APK_MIRROR_PACKAGE_URL.format(package_name)
97 | if response["data"][0]["exists"]:
98 | return _extracted_from_apkmirror_scrapper(search_url)
99 | raise APKMirrorIconScrapError(url=search_url)
100 |
101 |
102 | def _extracted_from_apkmirror_scrapper(search_url: str) -> str:
103 | r = requests.get(search_url, headers=request_header, timeout=request_timeout)
104 | handle_request_response(r, search_url)
105 | soup = BeautifulSoup(r.text, bs4_parser)
106 | icon_element = soup.select_one("div.bubble-wrap > img")
107 | if not icon_element:
108 | raise APKMirrorIconScrapError(url=search_url)
109 | sub_url = str(icon_element["src"])
110 | new_width = 500
111 | new_height = 500
112 | new_quality = 100
113 |
114 | # regular expression pattern to match w=xx&h=xx&q=xx
115 | pattern = r"(w=\d+&h=\d+&q=\d+)"
116 |
117 | return APK_MIRROR_BASE_URL + re.sub(pattern, f"w={new_width}&h={new_height}&q={new_quality}", sub_url)
118 |
119 |
120 | def gplay_icon_scrapper(package_name: str) -> str:
121 | """Scrap Icon from Gplay."""
122 | # noinspection PyBroadException
123 | try:
124 | return str(
125 | gplay_app(
126 | package_name,
127 | )["icon"],
128 | )
129 | except BuilderError as e:
130 | raise GooglePlayScraperException from e
131 |
132 |
133 | def apkpure_scrapper(package_name: str) -> str:
134 | """Scrap Icon from apkpure."""
135 | apkpure_url = APK_PURE_ICON_URL.format(package_name)
136 | try:
137 | r = requests.get(apkpure_url, headers=combo_headers, allow_redirects=True, timeout=request_timeout)
138 | handle_request_response(r, apkpure_url)
139 | soup = BeautifulSoup(r.text, bs4_parser)
140 | search_result = soup.find_all(class_="brand-info-top")
141 | for brand_info in search_result:
142 | if icon_element := brand_info.find(class_="icon"):
143 | return str(icon_element.get("src"))
144 | raise APKPureIconScrapError(url=apkpure_url)
145 | except BuilderError as e:
146 | raise APKPureIconScrapError(url=apkpure_url) from e
147 |
148 |
149 | def icon_scrapper(package_name: str) -> str:
150 | """Scrap Icon."""
151 | scraper_names = {
152 | "gplay_icon_scrapper": GooglePlayScraperException,
153 | "apkmirror_scrapper": APKMirrorIconScrapError,
154 | "apkmonk_scrapper": APKMonkIconScrapError,
155 | "apkpure_scrapper": APKPureIconScrapError,
156 | "apkcombo_scrapper": APKComboIconScrapError,
157 | }
158 |
159 | for scraper_name, error_type in scraper_names.items():
160 | # noinspection PyBroadException
161 | try:
162 | return str(globals()[scraper_name](package_name))
163 | except error_type:
164 | pass
165 | except Exception: # noqa: BLE001,S110
166 | pass
167 |
168 | return not_found_icon
169 |
170 |
171 | def generate_markdown_table(data: list[list[str]]) -> str:
172 | """Generate markdown table."""
173 | if not data:
174 | return "No data to generate for the table."
175 |
176 | table = (
177 | "| Package Name | App Icon | PlayStore| APKMirror |APKMonk |ApkPure | ApkCombo |Available patches |Supported?|\n" # noqa: E501
178 | "|--------------|----------|----------|-----------|--------|--------|----------|------------------|----------|\n"
179 | )
180 | for row in data:
181 | if len(row) != no_of_col:
182 | msg = f"Each row must contain {no_of_col} columns of data."
183 | raise ValueError(msg)
184 |
185 | table += f"| {row[0]} | {row[1]} | {row[2]} | {row[3]} |{row[4]} |{row[5]} | {row[6]} | {row[7]} | {row[8]} |\n"
186 |
187 | return table
188 |
189 |
190 | def main() -> None:
191 | """Entrypoint."""
192 | response = requests.get(revanced_api, timeout=request_timeout)
193 | handle_request_response(response, revanced_api)
194 |
195 | patches = response.json()
196 |
197 | possible_apps = set()
198 | for patch in patches:
199 | if patch.get("compatiblePackages", None):
200 | for compatible_package in patch["compatiblePackages"]:
201 | possible_apps.add(compatible_package)
202 |
203 | supported_app = set(Patches.support_app().keys())
204 | missing_support = sorted(possible_apps.difference(supported_app))
205 | output = "New app found which aren't supported.\n\n"
206 | data = [
207 | [
208 | app,
209 | f'
',
210 | f"[PlayStore Link]({PLAY_STORE_APK_URL.format(app)})",
211 | f"[APKMirror Link]({APK_MIRROR_PACKAGE_URL.format(app)})",
212 | f"[APKMonk Link]({APK_MONK_APK_URL.format(app)})",
213 | f"[APKPure Link]({APK_PURE_ICON_URL.format(app)})",
214 | f"[APKCombo Link]({APK_COMBO_GENERIC_URL.format(app)})",
215 | f"[Patches](https://revanced.app/patches?pkg={app})",
216 | "- [ ] ",
217 | ]
218 | for app in missing_support
219 | ]
220 | table = generate_markdown_table(data)
221 | output += table
222 | with Path("status.md").open("w", encoding="utf_8") as status:
223 | status.write(output)
224 | print(output) # noqa: T201
225 |
226 |
227 | if __name__ == "__main__":
228 | main()
229 |
--------------------------------------------------------------------------------
/src/utils.py:
--------------------------------------------------------------------------------
1 | """Utilities."""
2 |
3 | import inspect
4 | import json
5 | import re
6 | import subprocess
7 | import sys
8 | import time
9 | import urllib.error
10 | import urllib.request
11 | from datetime import datetime
12 | from pathlib import Path
13 | from typing import TYPE_CHECKING, Any
14 | from zoneinfo import ZoneInfo
15 |
16 | import requests
17 | from environs import Env
18 | from loguru import logger
19 | from requests import Response, Session
20 |
21 | if TYPE_CHECKING:
22 | from src.app import APP
23 |
24 | from src.downloader.sources import APK_MIRROR_APK_CHECK
25 | from src.exceptions import ScrapingError
26 |
27 | default_build = [
28 | "youtube",
29 | "youtube_music",
30 | ]
31 | possible_archs = ["armeabi-v7a", "x86", "x86_64", "arm64-v8a"]
32 | request_header = {
33 | "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
34 | "AppleWebKit/537.36 (HTML, like Gecko)"
35 | " Chrome/96.0.4664.93 Safari/537.36",
36 | "Authorization": "Basic YXBpLWFwa3VwZGF0ZXI6cm01cmNmcnVVakt5MDRzTXB5TVBKWFc4",
37 | "Content-Type": "application/json",
38 | }
39 | default_cli = "https://github.com/revanced/revanced-cli/releases/latest"
40 | default_patches = "https://github.com/revanced/revanced-patches/releases/latest"
41 | bs4_parser = "html.parser"
42 | changelog_file = "changelog.md"
43 | changelog_json_file = "changelog.json"
44 | request_timeout = 60
45 | session = Session()
46 | session.headers["User-Agent"] = request_header["User-Agent"]
47 | updates_file = "updates.json"
48 | updates_file_url = "https://raw.githubusercontent.com/{github_repository}/{branch_name}/{updates_file}"
49 | changelogs: dict[str, dict[str, str]] = {}
50 | time_zone = "Asia/Kolkata"
51 | app_version_key = "app_version"
52 | patches_versions_key = "patches_versions"
53 | cli_version_key = "cli_version"
54 | implement_method = "Please implement the method"
55 | status_code_200 = 200
56 | resource_folder = "apks"
57 | branch_name = "changelogs"
58 | app_dump_key = "app_dump"
59 | patches_dl_list_key = "patches_dl_list"
60 |
61 |
62 | def update_changelog(name: str, response: dict[str, str]) -> None:
63 | """The function `update_changelog` updates the changelog file.
64 |
65 | Parameters
66 | ----------
67 | name : str
68 | A string representing the name of the change or update.
69 | response : Dict[str, str]
70 | The `response` parameter is a dictionary that contains information about the changes made. The keys
71 | in the dictionary represent the type of change (e.g., "bug fix", "feature", "documentation"), and
72 | the values represent the specific changes made for each type.
73 | """
74 | app_change_log = format_changelog(name, response)
75 | changelogs[name] = app_change_log
76 |
77 |
78 | def format_changelog(name: str, response: dict[str, str]) -> dict[str, str]:
79 | """The `format_changelog` returns formatted changelog string.
80 |
81 | Parameters
82 | ----------
83 | name : str
84 | The `name` parameter is a string that represents the name of the changelog. It is used to create a
85 | collapsible section in the formatted changelog.
86 | response : Dict[str, str]
87 | The `response` parameter is a dictionary that contains information about a release. It has the
88 | following keys:
89 |
90 | Returns
91 | -------
92 | a formatted changelog as a dict.
93 | """
94 | final_name = f"[{name}]({response['html_url']})"
95 | return {
96 | "ResourceName": final_name,
97 | "Version": response["tag_name"],
98 | "Changelog": response["body"],
99 | "PublishedOn": response["published_at"],
100 | }
101 |
102 |
103 | def write_changelog_to_file(updates_info: dict[str, Any]) -> None:
104 | """The function `write_changelog_to_file` writes a given changelog json to a file."""
105 | markdown_table = inspect.cleandoc(
106 | """
107 | | Resource Name | Version | Changelog | Published On | Build By|
108 | |---------------|---------|-----------|--------------|---------|
109 | """,
110 | )
111 | for app_data in changelogs.values():
112 | name_link = app_data["ResourceName"]
113 | version = app_data["Version"]
114 | changelog = app_data["Changelog"]
115 | published_at = app_data["PublishedOn"]
116 | built_by = get_parent_repo()
117 |
118 | # Clean up changelog for markdown
119 | changelog = changelog.replace("\r\n", "
")
120 | changelog = changelog.replace("\n", "
")
121 | changelog = changelog.replace("|", "\\|")
122 |
123 | # Add row to the Markdown table string
124 | markdown_table += f"\n| {name_link} | {version} | {changelog} | {published_at} | {built_by} |"
125 | with Path(changelog_file).open("w", encoding="utf_8") as file1:
126 | file1.write(markdown_table)
127 | Path(changelog_json_file).write_text(json.dumps(changelogs, indent=4) + "\n")
128 | Path(updates_file).write_text(json.dumps(updates_info, indent=4, default=str) + "\n")
129 |
130 |
131 | def get_parent_repo() -> str:
132 | """The `get_parent_repo()` function returns the URL of the parent repository.
133 |
134 | Returns
135 | -------
136 | the URL of the parent repository, which is "https://github.com/nikhilbadyal/docker-py-revanced".
137 | """
138 | project_url = "https://github.com/nikhilbadyal/docker-py-revanced"
139 | return f"[Docker-py-revanced]({project_url})"
140 |
141 |
142 | def handle_request_response(response: Response, url: str) -> None:
143 | """The function handles the response of a GET request and raises an exception if the response code is not 200.
144 |
145 | Parameters
146 | ----------
147 | response : Response
148 | The parameter `response` is of type `Response`, which is likely referring to a response object from
149 | an HTTP request. This object typically contains information about the response received from the
150 | server, such as the status code, headers, and response body.
151 | url: str
152 | The url on which request was made
153 | """
154 | response_code = response.status_code
155 | if response_code != status_code_200:
156 | msg = f"Unable to downloaded assets. Reason - {response.text}"
157 | raise ScrapingError(msg, url=url)
158 |
159 |
160 | def slugify(string: str) -> str:
161 | """The `slugify` function converts a string to a slug format.
162 |
163 | Parameters
164 | ----------
165 | string : str
166 | The `string` parameter is a string that you want to convert to a slug format.
167 |
168 | Returns
169 | -------
170 | The function `slugify` returns a modified version of the input string in slug format.
171 | """
172 | # Convert to lowercase
173 | modified_string = string.lower()
174 |
175 | # Remove special characters
176 | modified_string = re.sub(r"[^\w\s-]", ".", modified_string)
177 |
178 | # Replace spaces with dashes
179 | modified_string = re.sub(r"\s+", ".", modified_string)
180 |
181 | # Remove consecutive dashes
182 | modified_string = re.sub(r"-+", ".", modified_string)
183 |
184 | # Remove leading and trailing dashes
185 | return modified_string.strip(".")
186 |
187 |
188 | def _check_version(output: str) -> None:
189 | """Check version."""
190 | if "Runtime Environment" not in output:
191 | raise subprocess.CalledProcessError(-1, "java -version")
192 | if "17" not in output and "20" not in output:
193 | raise subprocess.CalledProcessError(-1, "java -version")
194 |
195 |
196 | def check_java() -> None:
197 | """The function `check_java` checks if Java version 17 or higher is installed.
198 |
199 | Returns
200 | -------
201 | The function `check_java` does not return any value.
202 | """
203 | try:
204 | jd = subprocess.check_output(["java", "-version"], stderr=subprocess.STDOUT).decode("utf-8")
205 | jd = jd[1:-1]
206 | _check_version(jd)
207 | logger.debug("Cool!! Java is available")
208 | except subprocess.CalledProcessError:
209 | logger.error("Java>= 17 must be installed")
210 | sys.exit(-1)
211 |
212 |
213 | def delete_old_changelog() -> None:
214 | """The function `delete_old_changelog` deleted old changelog file."""
215 | Path(changelog_file).unlink(missing_ok=True)
216 |
217 |
218 | def apkmirror_status_check(package_name: str) -> Any:
219 | """The `apkmirror_status_check` function checks if an app exists on APKMirror.
220 |
221 | Parameters
222 | ----------
223 | package_name : str
224 | The `package_name` parameter is a string that represents the name of the app package to check on
225 | APKMirror.
226 |
227 | Returns
228 | -------
229 | the response from the APKMirror API as a JSON object.
230 | """
231 | body = {"pnames": [package_name]}
232 | response = requests.post(APK_MIRROR_APK_CHECK, json=body, headers=request_header, timeout=60)
233 | return response.json()
234 |
235 |
236 | def contains_any_word(string: str, words: list[str]) -> bool:
237 | """Checks if a string contains any word."""
238 | return any(word in string for word in words)
239 |
240 |
241 | def datetime_to_ms_epoch(dt: datetime) -> int:
242 | """Returns millis since epoch."""
243 | microseconds = time.mktime(dt.timetuple()) * 1000000 + dt.microsecond
244 | return round(microseconds / float(1000))
245 |
246 |
247 | def load_older_updates(env: Env) -> dict[str, Any]:
248 | """Load older updated from updates.json."""
249 | try:
250 | update_file_url = updates_file_url.format(
251 | github_repository=env.str("GITHUB_REPOSITORY"),
252 | branch_name=branch_name,
253 | updates_file=updates_file,
254 | )
255 | with urllib.request.urlopen(update_file_url) as url:
256 | return json.load(url) # type: ignore[no-any-return]
257 | except Exception as e: # noqa: BLE001
258 | logger.error(f"Failed to retrieve update file: {e}")
259 | return {}
260 |
261 |
262 | def save_patch_info(app: "APP", updates_info: dict[str, Any]) -> dict[str, Any]:
263 | """Save version info a patching resources used to a file."""
264 | updates_info[app.app_name] = {
265 | app_version_key: app.app_version,
266 | patches_versions_key: app.get_patch_bundles_versions(),
267 | cli_version_key: app.resource["cli"]["version"],
268 | "ms_epoch_since_patched": datetime_to_ms_epoch(datetime.now(ZoneInfo(time_zone))),
269 | "date_patched": datetime.now(ZoneInfo(time_zone)),
270 | "app_dump": app.for_dump(),
271 | }
272 | return updates_info
273 |
--------------------------------------------------------------------------------
/src/patches.py:
--------------------------------------------------------------------------------
1 | """Revanced Patches."""
2 |
3 | import contextlib
4 | from typing import Any, ClassVar, Self
5 |
6 | from loguru import logger
7 |
8 | from src.app import APP
9 | from src.config import RevancedConfig
10 | from src.exceptions import AppNotFoundError
11 | from src.patches_gen import convert_command_output_to_json
12 |
13 |
14 | class Patches(object):
15 | """Revanced Patches."""
16 |
17 | revanced_package_names: ClassVar[dict[str, str]] = {
18 | "com.reddit.frontpage": "reddit",
19 | "com.duolingo": "duolingo",
20 | "com.ss.android.ugc.trill": "tiktok",
21 | "com.zhiliaoapp.musically": "musically",
22 | "com.twitter.android": "twitter",
23 | "de.dwd.warnapp": "warnwetter",
24 | "com.spotify.music": "spotify",
25 | "com.awedea.nyx": "nyx-music-player",
26 | "ginlemon.iconpackstudio": "icon_pack_studio",
27 | "com.ticktick.task": "ticktick",
28 | "tv.twitch.android.app": "twitch",
29 | "com.myprog.hexedit": "hex-editor",
30 | "co.windyapp.android": "windy",
31 | "org.totschnig.myexpenses": "my-expenses",
32 | "com.backdrops.wallpapers": "backdrops",
33 | "com.ithebk.expensemanager": "expensemanager",
34 | "net.dinglisch.android.taskerm": "tasker",
35 | "net.binarymode.android.irplus": "irplus",
36 | "com.vsco.cam": "vsco",
37 | "com.zombodroid.MemeGenerator": "meme-generator-free",
38 | "com.teslacoilsw.launcher": "nova_launcher",
39 | "eu.faircode.netguard": "netguard",
40 | "com.instagram.android": "instagram",
41 | "com.nis.app": "inshorts",
42 | "pl.solidexplorer2": "solidexplorer",
43 | "com.adobe.lrmobile": "lightroom",
44 | "com.facebook.orca": "messenger",
45 | "com.google.android.apps.recorder": "grecorder",
46 | "tv.trakt.trakt": "trakt",
47 | "com.candylink.openvpn": "candyvpn",
48 | "com.sony.songpal.mdr": "sonyheadphone",
49 | "com.dci.dev.androidtwelvewidgets": "androidtwelvewidgets",
50 | "io.yuka.android": "yuka",
51 | "free.reddit.news": "relay",
52 | "com.rubenmayayo.reddit": "boost",
53 | "com.andrewshu.android.reddit": "rif",
54 | "com.laurencedawson.reddit_sync": "sync",
55 | "ml.docilealligator.infinityforreddit": "infinity",
56 | "me.ccrama.redditslide": "slide",
57 | "com.onelouder.baconreader": "bacon",
58 | "com.google.android.youtube": "youtube",
59 | "com.google.android.apps.youtube.music": "youtube_music",
60 | "com.mgoogle.android.gms": "microg",
61 | "jp.pxv.android": "pixiv",
62 | "com.strava": "strava",
63 | "com.microblink.photomath": "photomath",
64 | "o.o.joey": "joey",
65 | "com.vanced.android.youtube": "vanced",
66 | "com.spotify.lite": "spotify-lite",
67 | "at.gv.oe.app": "digitales",
68 | "com.scb.phone": "scbeasy",
69 | "reddit.news": "reddit-news",
70 | "at.gv.bmf.bmf2go": "finanz-online",
71 | "com.tumblr": "tumblr",
72 | "com.myfitnesspal.android": "fitnesspal",
73 | "com.facebook.katana": "facebook",
74 | "io.syncapps.lemmy_sync": "lemmy-sync",
75 | "com.xiaomi.wearable": "xiaomi-wearable",
76 | "com.google.android.apps.photos": "photos",
77 | "com.amazon.mShop.android.shopping": "amazon",
78 | "com.bandcamp.android": "bandcamp",
79 | "com.google.android.apps.magazines": "magazines",
80 | "com.rarlab.rar": "winrar",
81 | "com.soundcloud.android": "soundcloud",
82 | "de.stocard.stocard": "stocard",
83 | "at.willhaben": "willhaben",
84 | "ch.protonmail.android": "proton-mail",
85 | "com.amazon.avod.thirdpartyclient": "prime-video",
86 | "com.cricbuzz.android": "cricbuzz",
87 | "com.crunchyroll.crunchyroid": "crunchyroll",
88 | "com.instagram.barcelona": "threads",
89 | "com.nousguide.android.orftvthek": "orf-on",
90 | "com.pandora.android": "pandora",
91 | "it.ipzs.cieid": "cieid",
92 | "ml.docilealligator.infinityforreddit.patreon": "infinity-for-reddit-patreon",
93 | "ml.docilealligator.infinityforreddit.plus": "infinity-for-reddit-plus",
94 | }
95 |
96 | @staticmethod
97 | def get_package_name(app: str) -> str:
98 | """The function `get_package_name` takes an app name as input and returns the corresponding package name.
99 |
100 | Parameters
101 | ----------
102 | app : str
103 | The `app` parameter is a string that represents the name of an app.
104 |
105 | Returns
106 | -------
107 | a string, which is the package name corresponding to the given app name.
108 | """
109 | for package, app_name in Patches.revanced_package_names.items():
110 | if app_name.upper() == app.upper():
111 | return package
112 | msg = f"App {app} not supported officially yet. Please provide package name in env to proceed."
113 | raise AppNotFoundError(msg)
114 |
115 | @staticmethod
116 | def support_app() -> dict[str, str]:
117 | """The function returns a dictionary of supported app IDs.
118 |
119 | Returns
120 | -------
121 | a dictionary of supported apps.
122 | """
123 | return Patches.revanced_package_names
124 |
125 | def fetch_patches(self: Self, config: RevancedConfig, app: APP) -> None:
126 | """The function fetches patches from a JSON file.
127 |
128 | Parameters
129 | ----------
130 | config : RevancedConfig
131 | The `config` parameter is of type `RevancedConfig` and represents the configuration for the
132 | application.
133 | app : APP
134 | The `app` parameter is of type `APP`. It represents an instance of the `APP` class.
135 | """
136 | self.patches_dict[app.app_name] = []
137 |
138 | # Handle multiple patch bundles
139 | if hasattr(app, "patch_bundles") and app.patch_bundles:
140 | for bundle in app.patch_bundles:
141 | patches = convert_command_output_to_json(
142 | f"{config.temp_folder}/{app.resource["cli"]["file_name"]}",
143 | f"{config.temp_folder}/{bundle["file_name"]}",
144 | )
145 | self._process_patches(patches, app)
146 | elif "patches" in app.resource:
147 | # Fallback to single bundle for backward compatibility
148 | patches = convert_command_output_to_json(
149 | f"{config.temp_folder}/{app.resource["cli"]["file_name"]}",
150 | f"{config.temp_folder}/{app.resource["patches"]["file_name"]}",
151 | )
152 | self._process_patches(patches, app)
153 |
154 | app.no_of_patches = len(self.patches_dict[app.app_name])
155 |
156 | def _create_patch_dict(
157 | self: Self,
158 | patch: dict[Any, Any],
159 | app_name: str,
160 | version: str | list[str] | None,
161 | ) -> dict[str, str]:
162 | """Create a patch dictionary with the required fields.
163 |
164 | Parameters
165 | ----------
166 | patch : dict[Any, Any]
167 | The patch data
168 | app_name : str
169 | The app name or package name
170 | version : str | list[str] | None
171 | The version information
172 |
173 | Returns
174 | -------
175 | dict[str, str]
176 | Formatted patch dictionary
177 | """
178 | patch_dict = {x: patch[x] for x in ["name", "description"]}
179 | patch_dict["app"] = app_name
180 |
181 | if isinstance(version, list) and version:
182 | patch_dict["version"] = version[-1]
183 | elif version:
184 | patch_dict["version"] = version
185 | else:
186 | patch_dict["version"] = "all"
187 |
188 | return patch_dict
189 |
190 | def _is_duplicate_patch(self: Self, patch_name: str, app_name: str) -> bool:
191 | """Check if patch already exists to avoid duplicates.
192 |
193 | Parameters
194 | ----------
195 | patch_name : str
196 | The name of the patch to check
197 | app_name : str
198 | The app name to check in
199 |
200 | Returns
201 | -------
202 | bool
203 | True if patch already exists
204 | """
205 | return any(existing["name"] == patch_name for existing in self.patches_dict[app_name])
206 |
207 | def _process_universal_patch(self: Self, patch: dict[Any, Any]) -> None:
208 | """Process a universal patch (no compatible packages).
209 |
210 | Parameters
211 | ----------
212 | patch : dict[Any, Any]
213 | The patch data
214 | """
215 | patch_dict = self._create_patch_dict(patch, "universal", "all")
216 | self.patches_dict["universal_patch"].append(patch_dict)
217 |
218 | def _process_app_specific_patch(self: Self, patch: dict[Any, Any], app: APP) -> None:
219 | """Process patches that are specific to certain apps.
220 |
221 | Parameters
222 | ----------
223 | patch : dict[Any, Any]
224 | The patch data
225 | app : APP
226 | The app instance
227 | """
228 | for compatible_package in patch["compatiblePackages"]:
229 | package_name = compatible_package["name"]
230 | versions = compatible_package["versions"]
231 |
232 | if app.package_name == package_name:
233 | patch_dict = self._create_patch_dict(patch, package_name, versions)
234 |
235 | if not self._is_duplicate_patch(patch_dict["name"], app.app_name):
236 | self.patches_dict[app.app_name].append(patch_dict)
237 |
238 | def _process_patches(self: Self, patches: list[dict[Any, Any]], app: APP) -> None:
239 | """Process patches from a single bundle and add them to the patches dict.
240 |
241 | Parameters
242 | ----------
243 | patches : list[dict[Any, Any]]
244 | List of patches from a bundle
245 | app : APP
246 | The app instance
247 | """
248 | for patch in patches:
249 | if not patch["compatiblePackages"]:
250 | self._process_universal_patch(patch)
251 | else:
252 | self._process_app_specific_patch(patch, app)
253 |
254 | def __init__(self: Self, config: RevancedConfig, app: APP) -> None:
255 | self.patches_dict: dict[str, list[dict[str, str]]] = {"universal_patch": []}
256 | self.fetch_patches(config, app)
257 |
258 | def get(self: Self, app: str) -> tuple[list[dict[str, str]], str]:
259 | """The function `get` returns all patches and version for a given application.
260 |
261 | Parameters
262 | ----------
263 | app : str
264 | The `app` parameter is a string that represents the name of the application for which you want
265 | to retrieve patches.
266 |
267 | Returns
268 | -------
269 | a tuple containing two elements. The first element is a list of dictionaries representing
270 | patches for the given app. The second element is a string representing the version of the
271 | patches.
272 | """
273 | patches = self.patches_dict[app]
274 | version = "latest"
275 | with contextlib.suppress(StopIteration):
276 | version = next(i["version"] for i in patches if i["version"] != "all")
277 | return patches, version
278 |
279 | def _is_experimental_version(self: Self, app_version: str, recommended_version: str) -> bool:
280 | """Check if the app version is experimental (different from recommended).
281 |
282 | Parameters
283 | ----------
284 | app_version : str
285 | The requested app version
286 | recommended_version : str
287 | The recommended version from patches
288 |
289 | Returns
290 | -------
291 | bool
292 | True if the version is experimental
293 | """
294 | return app_version == "latest" or app_version > recommended_version or app_version < recommended_version
295 |
296 | def get_app_configs(self: Self, app: "APP") -> list[dict[str, str]]:
297 | """The function `get_app_configs` returns configurations for a given app.
298 |
299 | Parameters
300 | ----------
301 | app : "APP"
302 | The "app" parameter is the name of the application for which you want to get the
303 | configurations.
304 |
305 | Returns
306 | -------
307 | the total_patches, which is a list of dictionaries containing information about the patches for
308 | the given app. Each dictionary in the list contains the keys "Patches", "Version", and
309 | "Experimental".
310 | """
311 | total_patches, recommended_version = self.get(app=app.app_name)
312 | experiment = False
313 |
314 | if app.app_version:
315 | logger.debug(f"Picked {app} version {app.app_version:} from env.")
316 | experiment = self._is_experimental_version(app.app_version, recommended_version)
317 | recommended_version = app.app_version
318 |
319 | app.app_version = recommended_version
320 | app.experiment = experiment
321 | return total_patches
322 |
--------------------------------------------------------------------------------
/scripts/add_apkmirror_app.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | r"""CLI to register a new APKMirror app in the repo.
3 |
4 | This script automates the manual edits required when ReVanced adds support for a
5 | new app that's available on APKMirror. It updates:
6 | - src/downloader/sources.py: adds the APKMirror source mapping
7 | - src/patches.py: maps package name -> app key
8 | - README.md: appends the bullet link under the officially supported list
9 |
10 | Usage examples:
11 | python scripts/add_apkmirror_app.py \
12 | --package com.facebook.katana \
13 | --name facebook \
14 | --apkmirror-path facebook-2/facebook
15 |
16 | python scripts/add_apkmirror_app.py \
17 | --package com.facebook.katana \
18 | --name facebook \
19 | --apkmirror-url https://www.apkmirror.com/apk/facebook-2/facebook/
20 |
21 | Notes
22 | -----
23 | - APKMirror only. For other sources, extend the script accordingly.
24 | - Idempotent: if entries already exist, it will skip updating that file.
25 | """
26 |
27 | from __future__ import annotations
28 |
29 | import argparse
30 | import os
31 | import re
32 | from dataclasses import dataclass
33 | from pathlib import Path
34 |
35 | import requests
36 | from loguru import logger
37 |
38 | REPO_ROOT = Path(__file__).resolve().parents[1]
39 | ORG_APP_PARTS = 2
40 | APK_MIRROR_APP_EXISTS_URL = "https://www.apkmirror.com/wp-json/apkm/v1/app_exists/"
41 | DEFAULT_USER_AGENT = os.getenv("APKMIRROR_USER_AGENT", "nikhil")
42 | DEFAULT_BASIC_AUTH = os.getenv(
43 | "APKMIRROR_AUTH_BASIC",
44 | # base64("api-apkupdater:rm5rcfruUjKy04sMpyMPJXW8") as provided in the example
45 | "YXBpLWFwa3VwZGF0ZXI6cm01cmNmcnVVakt5MDRzTXB5TVBKWFc4",
46 | )
47 | DEFAULT_HTTP_TIMEOUT_SECS = 20
48 |
49 |
50 | def parse_args() -> argparse.Namespace:
51 | """Parse CLI arguments for registering an APKMirror app."""
52 | parser = argparse.ArgumentParser(description="Register a new APKMirror app")
53 | parser.add_argument("--package", required=True, help="Android package name, e.g., com.facebook.katana")
54 | parser.add_argument("--name", required=True, help="Short app key/name used in configs, e.g., facebook")
55 |
56 | apkmirror = parser.add_mutually_exclusive_group(required=False)
57 | apkmirror.add_argument(
58 | "--apkmirror-path",
59 | help="APKMirror path '/' without leading /apk/, e.g., 'facebook-2/facebook'",
60 | )
61 | apkmirror.add_argument(
62 | "--apkmirror-url",
63 | help="Full APKMirror app URL, e.g., https://www.apkmirror.com/apk/facebook-2/facebook/",
64 | )
65 |
66 | parser.add_argument(
67 | "--apkmirror-auth",
68 | default=DEFAULT_BASIC_AUTH,
69 | help="Base64 for Basic Authorization header to APKMirror API (env: APKMIRROR_AUTH_BASIC)",
70 | )
71 | parser.add_argument(
72 | "--user-agent",
73 | default=DEFAULT_USER_AGENT,
74 | help="User-Agent value for APKMirror API (env: APKMIRROR_USER_AGENT)",
75 | )
76 |
77 | parser.add_argument(
78 | "--dry-run",
79 | action="store_true",
80 | help="Show planned changes without writing files",
81 | )
82 |
83 | return parser.parse_args()
84 |
85 |
86 | def extract_apkmirror_path(url_or_path: str) -> tuple[str, str]:
87 | """Return (org, app) from a full URL or 'org/app' path.
88 |
89 | Accepted examples:
90 | - facebook-2/facebook
91 | - https://www.apkmirror.com/apk/facebook-2/facebook/
92 | - https://www.apkmirror.com/apk/facebook-2/facebook
93 | """
94 | raw = url_or_path.strip()
95 | if raw.startswith("http"):
96 | # Keep only the path after '/apk/'
97 | m = re.search(r"/apk/([^/?#]+)/([^/?#]+)/?", raw)
98 | if not m:
99 | msg = "Unable to parse APKMirror URL. Expected .../apk///."
100 | raise ValueError(
101 | msg,
102 | )
103 | org, app = m.group(1), m.group(2)
104 | else:
105 | # org/app
106 | parts = raw.strip("/").split("/")
107 | if len(parts) != ORG_APP_PARTS:
108 | msg = "--apkmirror-path must be '/'"
109 | raise ValueError(msg)
110 | org, app = parts
111 | return org, app
112 |
113 |
114 | def discover_apkmirror_path_via_api(package_name: str, auth_b64: str, user_agent: str) -> tuple[str, str]:
115 | """Query APKMirror app_exists API to discover the org/app path for a package.
116 |
117 | Tries `app.link` first, then falls back to `release.link`.
118 | Returns (org, app).
119 | """
120 | headers = {
121 | "Authorization": f"Basic {auth_b64}",
122 | "User-Agent": user_agent,
123 | "Content-Type": "application/json",
124 | }
125 | payload = {"pnames": [package_name]}
126 | resp = requests.post(
127 | APK_MIRROR_APP_EXISTS_URL,
128 | headers=headers,
129 | json=payload,
130 | timeout=DEFAULT_HTTP_TIMEOUT_SECS,
131 | )
132 | if resp.status_code != 200: # noqa: PLR2004
133 | msg = f"APKMirror app_exists API error: HTTP {resp.status_code}"
134 | raise RuntimeError(msg)
135 | data = resp.json()
136 | items = data.get("data") or []
137 | if not items:
138 | msg = f"No data returned from APKMirror for {package_name}"
139 | raise RuntimeError(msg)
140 |
141 | item = items[0]
142 | app_link = ((item.get("app") or {}).get("link")) or ((item.get("release") or {}).get("link"))
143 | if not app_link:
144 | msg = "APKMirror response missing app/release link"
145 | raise RuntimeError(msg)
146 |
147 | # Expect a path like: /apk///...
148 | m = re.search(r"/apk/([^/]+)/([^/]+)/", app_link)
149 | if not m:
150 | msg = "Unable to parse org/app from APKMirror response link"
151 | raise RuntimeError(msg)
152 | return m.group(1), m.group(2)
153 |
154 |
155 | def read_text(path: Path) -> str:
156 | """Read text from a file using UTF-8 encoding."""
157 | return path.read_text(encoding="utf-8")
158 |
159 |
160 | def write_text(path: Path, content: str) -> None:
161 | """Write text to a file using UTF-8 encoding."""
162 | path.write_text(content, encoding="utf-8")
163 |
164 |
165 | def _process_char_in_dict_parsing(
166 | ch: str,
167 | depth: int,
168 | in_str: str | None,
169 | *,
170 | esc: bool,
171 | ) -> tuple[int, str | None, bool]:
172 | """Process a single character during dictionary brace parsing.
173 |
174 | Returns: (new_depth, new_in_str, new_esc)
175 | """
176 | if in_str:
177 | if esc:
178 | return depth, in_str, False
179 | if ch == "\\":
180 | return depth, in_str, True
181 | if ch == in_str:
182 | return depth, None, False
183 | elif ch in ('"', "'"):
184 | return depth, ch, False
185 | elif ch == "{":
186 | return depth + 1, in_str, False
187 | elif ch == "}":
188 | return depth - 1, in_str, False
189 |
190 | return depth, in_str, esc
191 |
192 |
193 | @dataclass
194 | class DictInsertParams:
195 | """Parameters for dictionary key-value insertion."""
196 |
197 | content: str
198 | brace_start: int
199 | brace_end: int
200 | body: str
201 | indent: str
202 | key: str
203 | value_code: str
204 |
205 |
206 | def _find_dict_braces(content: str, open_match: re.Match[str]) -> tuple[int, int]:
207 | """Find the start and end indices of dictionary braces.
208 |
209 | Returns: (brace_start, brace_end)
210 | """
211 | # Find the '{' start index
212 | brace_start = content.find("{", open_match.start())
213 | if brace_start == -1:
214 | msg = "Malformed dictionary start: missing '{'"
215 | raise RuntimeError(msg)
216 |
217 | # Walk to matching '}' while handling strings and escape sequences
218 | i = brace_start
219 | depth = 0
220 | in_str: str | None = None
221 | esc = False
222 |
223 | while i < len(content):
224 | ch = content[i]
225 | depth, in_str, esc = _process_char_in_dict_parsing(ch, depth, in_str, esc=esc)
226 |
227 | if ch == "}" and depth == 0:
228 | return brace_start, i
229 |
230 | i += 1
231 |
232 | # If we reach here, we didn't find the matching closing brace
233 | msg = "Malformed dictionary: missing closing '}'"
234 | raise RuntimeError(msg)
235 |
236 |
237 | def _calculate_indentation(content: str, brace_start: int, body: str) -> str:
238 | """Calculate the proper indentation for a new dictionary entry."""
239 | # Look for first item indentation
240 | item_match = re.search(r"^(?P[ \t]+)\"[^\n]+\"\s*:\s*", body, re.MULTILINE)
241 | if item_match:
242 | return item_match.group("indent")
243 |
244 | # Fallback: compute from dictionary line indentation
245 | line_start = content.rfind("\n", 0, brace_start) + 1
246 | base_indent = content[line_start:brace_start].split("\n")[-1]
247 |
248 | # Count leading spaces/tabs of the line
249 | m_leading = re.match(r"^[ \t]*", base_indent)
250 | if not m_leading:
251 | msg = "Could not determine indentation for dictionary body"
252 | raise RuntimeError(msg)
253 | leading = m_leading.group(0)
254 | return leading + " " * 4
255 |
256 |
257 | def _key_exists_in_dict(body: str, key: str) -> bool:
258 | """Check if a key already exists in the dictionary body."""
259 | key_re = re.compile(rf"^[ \t]*\"{re.escape(key)}\"\s*:\s*", re.MULTILINE)
260 | return bool(key_re.search(body))
261 |
262 |
263 | def _insert_kv_entry(params: DictInsertParams) -> str:
264 | """Insert the key-value entry into the dictionary."""
265 | new_entry = f'\n{params.indent}"{params.key}": {params.value_code},'
266 | new_body = params.body + new_entry + "\n"
267 | return params.content[: params.brace_start + 1] + new_body + params.content[params.brace_end :]
268 |
269 |
270 | def insert_kv_into_dict(
271 | content: str,
272 | dict_var_pattern: str,
273 | key: str,
274 | value_code: str,
275 | ) -> tuple[str, bool]:
276 | r"""Insert a key/value into a Python dict literal for a variable.
277 |
278 | - dict_var_pattern: regex to match the variable assignment line that opens the dict
279 | e.g., r"revanced_package_names[\s\S]*?=\s*\{"
280 | - key: dictionary key to insert (without quotes)
281 | - value_code: full code for the value expression (already quoted/f-string as needed)
282 |
283 | Returns: (new_content, changed)
284 | """
285 | # Find dict opening
286 | open_match = re.search(dict_var_pattern, content)
287 | if not open_match:
288 | msg = "Could not locate dictionary with given pattern"
289 | raise RuntimeError(msg)
290 |
291 | # Find dictionary braces
292 | brace_start, brace_end = _find_dict_braces(content, open_match)
293 |
294 | # Get dictionary body
295 | body = content[brace_start + 1 : brace_end]
296 |
297 | # Check if key already exists
298 | if _key_exists_in_dict(body, key):
299 | return content, False
300 |
301 | # Calculate indentation
302 | indent = _calculate_indentation(content, brace_start, body)
303 |
304 | # Insert the new entry
305 | params = DictInsertParams(content, brace_start, brace_end, body, indent, key, value_code)
306 | new_content = _insert_kv_entry(params)
307 | return new_content, True
308 |
309 |
310 | def update_sources_py(app_key: str, org: str, app: str, *, dry_run: bool) -> bool:
311 | """Update `src/downloader/sources.py` with the APKMirror mapping.
312 |
313 | Returns True if a change was made.
314 | """
315 | path = REPO_ROOT / "src" / "downloader" / "sources.py"
316 | content = read_text(path)
317 | value_code = f'f"{ '{' }APK_MIRROR_BASE_APK_URL{ '}' }/{org}/{app}/"'
318 | pattern = r"apk_sources\s*=\s*\{"
319 | new_content, changed = insert_kv_into_dict(content, pattern, app_key, value_code)
320 | if changed and not dry_run:
321 | write_text(path, new_content)
322 | return changed
323 |
324 |
325 | def update_patches_py(package_name: str, app_key: str, *, dry_run: bool) -> bool:
326 | """Update `src/patches.py` with package -> app key mapping.
327 |
328 | Returns True if a change was made.
329 | """
330 | path = REPO_ROOT / "src" / "patches.py"
331 | content = read_text(path)
332 | value_code = f'"{app_key}"'
333 | # Match the dict assignment, accommodating type annotations
334 | pattern = r"revanced_package_names[\s\S]*?=\s*\{"
335 | new_content, changed = insert_kv_into_dict(content, pattern, package_name, value_code)
336 | if changed and not dry_run:
337 | write_text(path, new_content)
338 | return changed
339 |
340 |
341 | def update_readme_md(app_key: str, org: str, app: str, *, dry_run: bool) -> bool:
342 | """Insert the README bullet link for the new app.
343 |
344 | Returns True if a change was made.
345 | """
346 | path = REPO_ROOT / "README.md"
347 | content = read_text(path)
348 | bullet = f" - [{app_key}](https://www.apkmirror.com/apk/{org}/{app}/)"
349 |
350 | # Check if already present (exact label match, beginning of bullet)
351 | exists_pattern = re.compile(r"^\s*-\s*\[" + re.escape(app_key) + r"\]\(", flags=re.MULTILINE)
352 | if exists_pattern.search(content):
353 | return False
354 |
355 | # Locate the supported list block
356 | # Insert before the note that ends the list
357 | note = re.search(r"^\s*
`\*\*` - You can also patch any other app", content, re.MULTILINE)
358 | if note:
359 | insert_pos = note.start()
360 | new_content = content[:insert_pos] + bullet + "\n" + content[insert_pos:]
361 | else:
362 | # Fallback: append at end
363 | new_content = content.rstrip("\n") + "\n" + bullet + "\n"
364 |
365 | if not dry_run:
366 | write_text(path, new_content)
367 | return True
368 |
369 |
370 | def main() -> None:
371 | """Entry point: parse args, perform updates, and report results."""
372 | args = parse_args()
373 |
374 | if args.apkmirror_path or args.apkmirror_url:
375 | org, app = extract_apkmirror_path(args.apkmirror_path or args.apkmirror_url)
376 | else:
377 | org, app = discover_apkmirror_path_via_api(args.package, args.apkmirror_auth, args.user_agent)
378 |
379 | changed_any = False
380 | changed_sources = update_sources_py(args.name, org, app, dry_run=args.dry_run)
381 | changed_any = changed_any or changed_sources
382 |
383 | changed_patches = update_patches_py(args.package, args.name, dry_run=args.dry_run)
384 | changed_any = changed_any or changed_patches
385 |
386 | changed_readme = update_readme_md(args.name, org, app, dry_run=args.dry_run)
387 | changed_any = changed_any or changed_readme
388 |
389 | if not changed_any:
390 | logger.info("No changes needed; app may already be registered.")
391 |
392 |
393 | if __name__ == "__main__":
394 | main()
395 |
--------------------------------------------------------------------------------
/src/parser.py:
--------------------------------------------------------------------------------
1 | """Revanced Parser."""
2 |
3 | import json
4 | from subprocess import PIPE, Popen
5 | from time import perf_counter
6 | from typing import Any, Self
7 |
8 | from loguru import logger
9 |
10 | from src.app import APP
11 | from src.config import RevancedConfig
12 | from src.exceptions import PatchingFailedError
13 | from src.patches import Patches
14 | from src.utils import possible_archs
15 |
16 |
17 | class Parser(object):
18 | """Revanced Parser."""
19 |
20 | CLI_JAR = "-jar"
21 | APK_ARG = "-a"
22 | NEW_APK_ARG = "patch"
23 | PATCHES_ARG = "-p"
24 | OUTPUT_ARG = "-o"
25 | KEYSTORE_ARG = "--keystore"
26 | OPTIONS_ARG = "-O"
27 | ENABLE_ARG = "-e"
28 | DISABLE_ARG = "-d"
29 | EXCLUSIVE_ARG = "--exclusive"
30 |
31 | def __init__(self: Self, patcher: Patches, config: RevancedConfig) -> None:
32 | self._PATCHES: list[str] = []
33 | self._EXCLUDED: list[str] = []
34 | self.patcher = patcher
35 | self.config = config
36 |
37 | def format_option(self: Self, opt: dict[str, Any]) -> str:
38 | """
39 | The function `include` adds a given patch to the front of a list of patches.
40 |
41 | Parameters
42 | ----------
43 | opt : dict[str, Any]
44 | The `opt` parameter is a dictionary that represents the key-value pair of options
45 | of the patch to be included.
46 | """
47 | pair: str = opt["key"]
48 | if value := opt.get("value"):
49 | if isinstance(value, bool):
50 | pair += f'="{str(value).lower()}"'
51 | elif isinstance(value, (int, float)):
52 | pair += f"={value}" # Numbers should not be quoted
53 | elif isinstance(value, list):
54 | formatted_list = ",".join(map(str, value))
55 | pair += f'="[ {formatted_list} ]"' # Preserve list format
56 | else:
57 | pair += f'="{value}"'
58 | return pair
59 |
60 | def include(self: Self, name: str, options_list: list[dict[str, Any]]) -> None:
61 | """
62 | The function `include` adds a given patch to the front of a list of patches.
63 |
64 | Parameters
65 | ----------
66 | name : str
67 | The `name` parameter is a string that represents the name of the patch to be included.
68 | options_list : list[dict[str, Any]]
69 | Then `options_list` parameter is a list of dictionary that represents the options for all patches.
70 | """
71 | options_dict: dict[str, Any] = self.fetch_patch_options(name, options_list)
72 | options = options_dict.get("options", [])
73 | if options:
74 | for opt in options:
75 | pair = self.format_option(opt)
76 | self._PATCHES[:0] = [self.OPTIONS_ARG, pair]
77 | self._PATCHES[:0] = [self.ENABLE_ARG, name]
78 |
79 | def exclude(self: Self, name: str) -> None:
80 | """The `exclude` function adds a given patch to the list of excluded patches.
81 |
82 | Parameters
83 | ----------
84 | name : str
85 | The `name` parameter is a string that represents the name of the patch to be excluded.
86 | """
87 | self._PATCHES.extend([self.DISABLE_ARG, name])
88 | self._EXCLUDED.append(name)
89 |
90 | def get_excluded_patches(self: Self) -> list[str]:
91 | """The function `get_excluded_patches` is a getter method that returns a list of excluded patches.
92 |
93 | Returns
94 | -------
95 | The method is returning a list of excluded patches.
96 | """
97 | return self._EXCLUDED
98 |
99 | def get_all_patches(self: Self) -> list[str]:
100 | """The function "get_all_patches" is a getter method that returns the list of all patches.
101 |
102 | Returns
103 | -------
104 | The method is returning a list of all patches.
105 | """
106 | return self._PATCHES
107 |
108 | def invert_patch(self: Self, name: str) -> bool:
109 | """The function `invert_patch` takes a name as input, it toggles the status of the patch.
110 |
111 | Parameters
112 | ----------
113 | name : str
114 | The `name` parameter is a string that represents the name of a patch.
115 |
116 | Returns
117 | -------
118 | a boolean value. It returns True if the patch name is found in the list of patches and
119 | successfully inverted, and False if the patch name is not found in the list.
120 | """
121 | try:
122 | name = name.lower().replace(" ", "-")
123 | indices = [i for i in range(len(self._PATCHES)) if self._PATCHES[i] == name]
124 | for patch_index in indices:
125 | if self._PATCHES[patch_index - 1] == self.ENABLE_ARG:
126 | self._PATCHES[patch_index - 1] = self.DISABLE_ARG
127 | else:
128 | self._PATCHES[patch_index - 1] = self.ENABLE_ARG
129 | except ValueError:
130 | return False
131 | else:
132 | return True
133 |
134 | def enable_exclusive_mode(self: Self) -> None:
135 | """Enable exclusive mode - only explicitly enabled patches will run, all others disabled by default."""
136 | logger.info("Enabling exclusive mode for fast testing - only keeping one patch enabled.")
137 | # Clear all patches and keep only the first one enabled
138 | if self._PATCHES:
139 | # Find the first enable argument and its patch name
140 | for idx in range(0, len(self._PATCHES), 2):
141 | if idx < len(self._PATCHES) and self._PATCHES[idx] == self.ENABLE_ARG and idx + 1 < len(self._PATCHES):
142 | first_patch = self._PATCHES[idx + 1]
143 | # Clear all patches and set only the first one
144 | self._PATCHES = [self.ENABLE_ARG, first_patch]
145 | break
146 |
147 | def fetch_patch_options(self: Self, name: str, options_list: list[dict[str, Any]]) -> dict[str, Any]:
148 | """The function `fetch_patch_options` finds patch options for the patch.
149 |
150 | Parameters
151 | ----------
152 | name : str
153 | Then `name` parameter is a string that represents the name of the patch.
154 | options_list : list[dict[str, Any]]
155 | Then `options_list` parameter is a list of dictionary that represents the options for all patches.
156 | """
157 | return next(
158 | filter(lambda obj: obj.get("patchName") == name, options_list),
159 | {},
160 | )
161 |
162 | def _load_patch_options(self: Self, app: APP) -> list[dict[str, Any]]:
163 | """Load patch options from file.
164 |
165 | Parameters
166 | ----------
167 | app : APP
168 | The app instance
169 |
170 | Returns
171 | -------
172 | list[dict[str, Any]]
173 | List of patch options
174 | """
175 | options_list: list[dict[str, Any]] = [{}]
176 | try:
177 | with self.config.temp_folder.joinpath(app.options_file).open() as file:
178 | options_list = json.load(file)
179 | except FileNotFoundError as e:
180 | logger.warning(str(e))
181 | logger.debug("Setting options to empty list.")
182 | return options_list
183 |
184 | def _normalize_patch_name(self: Self, patch_name: str, *, space_formatted: bool) -> str:
185 | """Normalize patch name based on formatting preference.
186 |
187 | Parameters
188 | ----------
189 | patch_name : str
190 | The original patch name
191 | space_formatted : bool
192 | Whether to use space formatting
193 |
194 | Returns
195 | -------
196 | str
197 | Normalized patch name
198 | """
199 | return patch_name.lower().replace(" ", "-") if space_formatted else patch_name
200 |
201 | def _should_include_regular_patch(self: Self, patch_name: str, normalized_name: str, app: APP) -> bool:
202 | """Determine if a regular patch should be included.
203 |
204 | Parameters
205 | ----------
206 | patch_name : str
207 | The original patch name
208 | normalized_name : str
209 | The normalized patch name
210 | app : APP
211 | The app instance
212 |
213 | Returns
214 | -------
215 | bool
216 | True if patch should be included
217 | """
218 | exclude_list = app.exclude_request
219 | check_name = normalized_name if app.space_formatted else patch_name
220 | return check_name not in exclude_list
221 |
222 | def _should_include_universal_patch(self: Self, patch_name: str, normalized_name: str, app: APP) -> bool:
223 | """Determine if a universal patch should be included.
224 |
225 | Parameters
226 | ----------
227 | patch_name : str
228 | The original patch name
229 | normalized_name : str
230 | The normalized patch name
231 | app : APP
232 | The app instance
233 |
234 | Returns
235 | -------
236 | bool
237 | True if patch should be included
238 | """
239 | include_list = app.include_request
240 | check_name = normalized_name if app.space_formatted else patch_name
241 | return check_name in include_list
242 |
243 | def _process_regular_patches(
244 | self: Self,
245 | patches: list[dict[str, str]],
246 | app: APP,
247 | options_list: list[dict[str, Any]],
248 | ) -> None:
249 | """Process regular patches for include/exclude.
250 |
251 | Parameters
252 | ----------
253 | patches : list[dict[str, str]]
254 | List of regular patches
255 | app : APP
256 | The app instance
257 | options_list : list[dict[str, Any]]
258 | List of patch options
259 | """
260 | for patch in patches:
261 | patch_name = patch["name"]
262 | normalized_name = self._normalize_patch_name(patch_name, space_formatted=app.space_formatted)
263 |
264 | if self._should_include_regular_patch(patch_name, normalized_name, app):
265 | self.include(patch_name, options_list)
266 | else:
267 | self.exclude(patch_name)
268 |
269 | def _process_universal_patches(
270 | self: Self,
271 | universal_patches: list[dict[str, str]],
272 | app: APP,
273 | options_list: list[dict[str, Any]],
274 | ) -> None:
275 | """Process universal patches for include.
276 |
277 | Parameters
278 | ----------
279 | universal_patches : list[dict[str, str]]
280 | List of universal patches
281 | app : APP
282 | The app instance
283 | options_list : list[dict[str, Any]]
284 | List of patch options
285 | """
286 | for patch in universal_patches:
287 | patch_name = patch["name"]
288 | normalized_name = self._normalize_patch_name(patch_name, space_formatted=app.space_formatted)
289 |
290 | if self._should_include_universal_patch(patch_name, normalized_name, app):
291 | self.include(patch_name, options_list)
292 |
293 | def include_exclude_patch(
294 | self: Self,
295 | app: APP,
296 | patches: list[dict[str, str]],
297 | patches_dict: dict[str, list[dict[str, str]]],
298 | ) -> None:
299 | """The function `include_exclude_patch` includes and excludes patches for a given app."""
300 | options_list = self._load_patch_options(app)
301 |
302 | self._process_regular_patches(patches, app, options_list)
303 | self._process_universal_patches(patches_dict["universal_patch"], app, options_list)
304 |
305 | def _build_base_args(self: Self, app: APP) -> list[str]:
306 | """Build base arguments for ReVanced CLI."""
307 | return [
308 | self.CLI_JAR,
309 | app.resource["cli"]["file_name"],
310 | self.NEW_APK_ARG,
311 | app.download_file_name,
312 | ]
313 |
314 | def _add_patch_bundles(self: Self, args: list[str], app: APP) -> None:
315 | """Add patch bundle arguments to the command."""
316 | if hasattr(app, "patch_bundles") and app.patch_bundles:
317 | # Use multiple -p arguments for multiple bundles
318 | for bundle in app.patch_bundles:
319 | args.extend([self.PATCHES_ARG, bundle["file_name"]])
320 | else:
321 | # Fallback to single bundle for backward compatibility
322 | args.extend([self.PATCHES_ARG, app.resource["patches"]["file_name"]])
323 |
324 | def _add_output_and_keystore_args(self: Self, args: list[str], app: APP) -> None:
325 | """Add output file and keystore arguments."""
326 | args.extend(
327 | [
328 | self.OUTPUT_ARG,
329 | app.get_output_file_name(),
330 | self.KEYSTORE_ARG,
331 | app.keystore_name,
332 | "--force",
333 | ],
334 | )
335 |
336 | def _add_keystore_flags(self: Self, args: list[str], app: APP) -> None:
337 | """Add keystore-specific flags if needed."""
338 | if app.old_key:
339 | # https://github.com/ReVanced/revanced-cli/issues/272#issuecomment-1740587534
340 | old_key_flags = [
341 | "--keystore-entry-alias=alias",
342 | "--keystore-entry-password=ReVanced",
343 | "--keystore-password=ReVanced",
344 | ]
345 | args.extend(old_key_flags)
346 |
347 | def _add_architecture_args(self: Self, args: list[str], app: APP) -> None:
348 | """Add architecture-specific arguments."""
349 | if app.app_name in self.config.rip_libs_apps:
350 | excluded = set(possible_archs) - set(app.archs_to_build)
351 | for arch in excluded:
352 | args.extend(("--rip-lib", arch))
353 |
354 | # noinspection IncorrectFormatting
355 | def patch_app(
356 | self: Self,
357 | app: APP,
358 | ) -> None:
359 | """The function `patch_app` is used to patch an app using the Revanced CLI tool.
360 |
361 | Parameters
362 | ----------
363 | app : APP
364 | The `app` parameter is an instance of the `APP` class. It represents an application that needs
365 | to be patched.
366 | """
367 | args = self._build_base_args(app)
368 | self._add_patch_bundles(args, app)
369 | self._add_output_and_keystore_args(args, app)
370 |
371 | # Convert paths to absolute paths
372 | args[1::2] = [str(self.config.temp_folder.joinpath(arg)) for arg in args[1::2]]
373 |
374 | self._add_keystore_flags(args, app)
375 |
376 | if self.config.ci_test:
377 | self.enable_exclusive_mode()
378 | if self._PATCHES:
379 | args.extend(self._PATCHES)
380 |
381 | self._add_architecture_args(args, app)
382 | args.extend(("--purge",))
383 |
384 | start = perf_counter()
385 | logger.debug(f"Sending request to revanced cli for building with args java {args}")
386 | process = Popen(["java", *args], stdout=PIPE)
387 | output = process.stdout
388 | if not output:
389 | msg = "Failed to send request for patching."
390 | raise PatchingFailedError(msg)
391 | for line in output:
392 | logger.debug(line.decode(), flush=True, end="")
393 | process.wait()
394 | logger.info(f"Patching completed for app {app} in {perf_counter() - start:.2f} seconds.")
395 |
--------------------------------------------------------------------------------
/src/app.py:
--------------------------------------------------------------------------------
1 | """Class to represent apk to be patched."""
2 |
3 | import concurrent
4 | import hashlib
5 | import pathlib
6 | from concurrent.futures import ThreadPoolExecutor
7 | from datetime import datetime
8 | from threading import Lock
9 | from typing import Any, Self
10 | from zoneinfo import ZoneInfo
11 |
12 | from loguru import logger
13 |
14 | from src.config import RevancedConfig
15 | from src.downloader.sources import APKEEP, apk_sources
16 | from src.exceptions import BuilderError, DownloadError, PatchingFailedError
17 | from src.utils import slugify, time_zone
18 |
19 |
20 | class APP(object):
21 | """Patched APK."""
22 |
23 | def __init__(self: Self, app_name: str, package_name: str, config: RevancedConfig) -> None:
24 | """Initialize APP.
25 |
26 | Args:
27 | ----
28 | app_name (str): Name of the app.
29 | config (RevancedConfig): Configuration object.
30 | """
31 | self.app_name = app_name
32 | self.app_version = config.env.str(f"{app_name}_VERSION".upper(), None)
33 | self.experiment = False
34 | self.cli_dl = config.env.str(f"{app_name}_CLI_DL".upper(), config.global_cli_dl)
35 |
36 | # Support multiple patch bundles via comma-separated URLs
37 | patches_dl_raw = config.env.str(f"{app_name}_PATCHES_DL".upper(), config.global_patches_dl)
38 | self.patches_dl_list = [url.strip() for url in patches_dl_raw.split(",") if url.strip()]
39 | # Keep backward compatibility
40 | self.patches_dl = patches_dl_raw
41 |
42 | self.exclude_request: list[str] = config.env.list(f"{app_name}_EXCLUDE_PATCH".upper(), [])
43 | self.include_request: list[str] = config.env.list(f"{app_name}_INCLUDE_PATCH".upper(), [])
44 | self.resource: dict[str, dict[str, str]] = {}
45 | self.patch_bundles: list[dict[str, str]] = [] # Store multiple patch bundles
46 | self.no_of_patches: int = 0
47 | self.keystore_name = config.env.str(f"{app_name}_KEYSTORE_FILE_NAME".upper(), config.global_keystore_name)
48 | self.archs_to_build = config.env.list(f"{app_name}_ARCHS_TO_BUILD".upper(), config.global_archs_to_build)
49 | self.options_file = config.env.str(f"{app_name}_OPTIONS_FILE".upper(), config.global_options_file)
50 | self.download_file_name = ""
51 | self.download_dl = config.env.str(f"{app_name}_DL".upper(), "")
52 | self.download_source = config.env.str(f"{app_name}_DL_SOURCE".upper(), "")
53 | self.package_name = package_name
54 | self.old_key = config.env.bool(f"{app_name}_OLD_KEY".upper(), config.global_old_key)
55 | self.patches: list[dict[Any, Any]] = []
56 | self.space_formatted = config.env.bool(
57 | f"{app_name}_SPACE_FORMATTED_PATCHES".upper(),
58 | config.global_space_formatted,
59 | )
60 |
61 | def download_apk_for_patching(
62 | self: Self,
63 | config: RevancedConfig,
64 | download_cache: dict[tuple[str, str], tuple[str, str]],
65 | download_lock: Lock,
66 | ) -> None:
67 | """Download apk to be patched, skipping if already downloaded (matching source and version)."""
68 | from src.downloader.download import Downloader # noqa: PLC0415
69 | from src.downloader.factory import DownloaderFactory # noqa: PLC0415
70 |
71 | if self.download_dl:
72 | logger.info("Downloading apk to be patched using provided dl")
73 | self.download_file_name = f"{self.app_name}.apk"
74 | Downloader(config).direct_download(self.download_dl, self.download_file_name)
75 | else:
76 | logger.info("Downloading apk to be patched by scrapping")
77 | try:
78 | if not self.download_source:
79 | self.download_source = apk_sources[self.app_name.lower()].format(self.package_name)
80 | except KeyError as key:
81 | msg = f"App {self.app_name} not supported officially yet. Please provide download source in env."
82 | raise DownloadError(msg) from key
83 |
84 | # Get unique cache key for this app
85 | cache_key = self.get_download_cache_key()
86 |
87 | # Optimistic cache check (outside lock for better performance)
88 | if cache_key in download_cache:
89 | logger.info(f"Skipping download. Reusing APK from cache for {self.app_name} ({self.app_version})")
90 | self.download_file_name, self.download_dl = download_cache[cache_key]
91 | return
92 |
93 | # Thread-safe cache check and download
94 | with download_lock:
95 | # Double-check after acquiring lock to handle race conditions
96 | if cache_key in download_cache:
97 | logger.info(f"Skipping download. Reusing APK from cache for {self.app_name} ({self.app_version})")
98 | self.download_file_name, self.download_dl = download_cache[cache_key]
99 | return
100 |
101 | logger.info(f"Cache miss for {self.app_name} ({self.app_version}). Proceeding with download.")
102 | downloader = DownloaderFactory.create_downloader(config=config, apk_source=self.download_source)
103 | self.download_file_name, self.download_dl = downloader.download(self.app_version, self)
104 |
105 | # Save to cache using the unique cache key
106 | download_cache[cache_key] = (self.download_file_name, self.download_dl)
107 | logger.info(f"Added {self.app_name} ({self.app_version}) to download cache.")
108 |
109 | def get_download_cache_key(self: Self) -> tuple[str, str]:
110 | """Generate a unique cache key for APK downloads.
111 |
112 | For apkeep sources, includes package name to prevent cache collisions
113 | when multiple apps use the same version (e.g., "latest").
114 |
115 | Returns
116 | -------
117 | tuple[str, str]: Cache key as (source, identifier) where identifier
118 | includes package name for apkeep sources.
119 | """
120 | version = self.app_version or "latest"
121 |
122 | if self.download_source == APKEEP:
123 | # Use package@version format for apkeep to ensure uniqueness
124 | return (self.download_source, f"{self.package_name}@{version}")
125 |
126 | # For URL-based sources, source+version is already unique
127 | return (self.download_source, version)
128 |
129 | def get_output_file_name(self: Self) -> str:
130 | """The function returns a string representing the output file name.
131 |
132 | Returns
133 | -------
134 | a string that represents the output file name for an APK file.
135 | """
136 | current_date = datetime.now(ZoneInfo(time_zone))
137 | formatted_date = current_date.strftime("%Y%b%d.%I%M%p").upper()
138 | return (
139 | f"Re{self.app_name}-Version{slugify(self.app_version)}"
140 | f"-PatchVersion{slugify(self.patch_bundles[0]["version"])}-{formatted_date}-output.apk"
141 | )
142 |
143 | def get_patch_bundles_versions(self: Self) -> list[str]:
144 | """Get versions of all patch bundles."""
145 | return [bundle["version"] for bundle in self.patch_bundles]
146 |
147 | def __str__(self: "APP") -> str:
148 | """Returns the str representation of the app."""
149 | attrs = vars(self)
150 | return ", ".join([f"{key}: {value}" for key, value in attrs.items()])
151 |
152 | def for_dump(self: Self) -> dict[str, Any]:
153 | """Convert the instance of this class to json."""
154 | return self.__dict__
155 |
156 | @staticmethod
157 | def download(url: str, config: RevancedConfig, assets_filter: str, file_name: str = "") -> tuple[str, str]:
158 | """The `download` function downloads a file from a given URL & filters the assets based on a given filter.
159 |
160 | Parameters
161 | ----------
162 | url : str
163 | The `url` parameter is a string that represents the URL of the resource you want to download.
164 | It can be a URL from GitHub or a local file URL.
165 | config : RevancedConfig
166 | The `config` parameter is an instance of the `RevancedConfig` class. It is used to provide
167 | configuration settings for the download process.
168 | assets_filter : str
169 | The `assets_filter` parameter is a string that is used to filter the assets to be downloaded
170 | from a GitHub repository. It is used when the `url` parameter starts with "https://github". The
171 | `assets_filter` string is matched against the names of the assets in the repository, and only
172 | file_name : str
173 | The `file_name` parameter is a string that represents the name of the file that will be
174 | downloaded. If no value is provided for `file_name`, the function will generate a filename based
175 | on the URL of the file being downloaded.
176 |
177 | Returns
178 | -------
179 | tuple of strings, which is the tag,file name of the downloaded file.
180 | """
181 | from src.downloader.download import Downloader # noqa: PLC0415
182 |
183 | url = url.strip()
184 | tag = "latest"
185 | if url.startswith("https://github"):
186 | from src.downloader.github import Github # noqa: PLC0415
187 |
188 | tag, url = Github.patch_resource(url, assets_filter, config)
189 | if tag.startswith("tags/"):
190 | tag = tag.split("/")[-1]
191 | elif url.startswith("local://"):
192 | return tag, url.split("/")[-1]
193 | if not file_name:
194 | extension = pathlib.Path(url).suffix
195 | file_name = APP.generate_filename(url) + extension
196 | Downloader(config).direct_download(url, file_name)
197 | return tag, file_name
198 |
199 | def _setup_download_tasks(self: Self) -> list[tuple[str, str, None, str]]:
200 | """Setup download tasks for CLI and patch bundles."""
201 | download_tasks = [
202 | ("cli", self.cli_dl, None, ".*jar"),
203 | ]
204 |
205 | # Download multiple patch bundles
206 | for i, patches_url in enumerate(self.patches_dl_list):
207 | bundle_name = f"patches_{i}" if len(self.patches_dl_list) > 1 else "patches"
208 | download_tasks.append((bundle_name, patches_url, None, ".*rvp"))
209 |
210 | return download_tasks
211 |
212 | def _handle_cached_resource(self: Self, resource_name: str, tag: str, file_name: str) -> None:
213 | """Handle cached resource and update appropriate data structures."""
214 | if resource_name.startswith("patches"):
215 | self.patch_bundles.append(
216 | {
217 | "name": resource_name,
218 | "file_name": file_name,
219 | "version": tag,
220 | },
221 | )
222 | # Keep backward compatibility for single bundle
223 | if resource_name == "patches" or len(self.patches_dl_list) == 1:
224 | self.resource["patches"] = {
225 | "file_name": file_name,
226 | "version": tag,
227 | }
228 | else:
229 | self.resource[resource_name] = {
230 | "file_name": file_name,
231 | "version": tag,
232 | }
233 |
234 | def _handle_downloaded_resource(
235 | self: Self,
236 | resource_name: str,
237 | tag: str,
238 | file_name: str,
239 | download_tasks: list[tuple[str, str, RevancedConfig, str]],
240 | resource_cache: dict[str, tuple[str, str]],
241 | ) -> None:
242 | """Handle newly downloaded resource and update cache."""
243 | self._handle_cached_resource(resource_name, tag, file_name)
244 |
245 | # Update cache for the corresponding URL
246 | for task_name, task_url, _, _ in download_tasks:
247 | if task_name == resource_name:
248 | resource_cache[task_url.strip()] = (tag, file_name)
249 | break
250 |
251 | def _prepare_download_tasks(
252 | self: Self,
253 | config: RevancedConfig,
254 | ) -> list[tuple[str, str, RevancedConfig, str]]:
255 | """Prepare download tasks with configuration."""
256 | base_tasks = self._setup_download_tasks()
257 | return [(name, url, config, filter_pattern) for name, url, _, filter_pattern in base_tasks]
258 |
259 | def _filter_cached_resources(
260 | self: Self,
261 | download_tasks: list[tuple[str, str, RevancedConfig, str]],
262 | resource_cache: dict[str, tuple[str, str]],
263 | resource_lock: Lock,
264 | ) -> list[tuple[str, str, RevancedConfig, str]]:
265 | """Filter out cached resources and handle cached ones."""
266 | resources_to_download: list[tuple[str, str, RevancedConfig, str]] = []
267 |
268 | with resource_lock:
269 | for resource_name, raw_url, cfg, assets_filter in download_tasks:
270 | url = raw_url.strip()
271 | if url in resource_cache:
272 | logger.info(f"Skipping {resource_name} download, using cached resource: {url}")
273 | tag, file_name = resource_cache[url]
274 | self._handle_cached_resource(resource_name, tag, file_name)
275 | else:
276 | resources_to_download.append((resource_name, url, cfg, assets_filter))
277 |
278 | return resources_to_download
279 |
280 | def _download_and_cache_resources(
281 | self: Self,
282 | resources_to_download: list[tuple[str, str, RevancedConfig, str]],
283 | download_tasks: list[tuple[str, str, RevancedConfig, str]],
284 | config: RevancedConfig,
285 | resource_cache: dict[str, tuple[str, str]],
286 | resource_lock: Lock,
287 | ) -> None:
288 | """Download resources in parallel and update cache thread-safely."""
289 | with ThreadPoolExecutor(config.max_resource_workers) as executor:
290 | futures: dict[str, concurrent.futures.Future[tuple[str, str]]] = {}
291 |
292 | for resource_name, url, cfg, assets_filter in resources_to_download:
293 | futures[resource_name] = executor.submit(self.download, url, cfg, assets_filter)
294 |
295 | concurrent.futures.wait(futures.values())
296 | self._update_resource_cache(futures, resources_to_download, download_tasks, resource_cache, resource_lock)
297 |
298 | def _update_resource_cache(
299 | self: Self,
300 | futures: dict[str, concurrent.futures.Future[tuple[str, str]]],
301 | resources_to_download: list[tuple[str, str, RevancedConfig, str]],
302 | download_tasks: list[tuple[str, str, RevancedConfig, str]],
303 | resource_cache: dict[str, tuple[str, str]],
304 | resource_lock: Lock,
305 | ) -> None:
306 | """Update resource cache with downloaded resources."""
307 | with resource_lock:
308 | for resource_name, future in futures.items():
309 | try:
310 | tag, file_name = future.result()
311 | corresponding_url = next(url for name, url, _, _ in resources_to_download if name == resource_name)
312 | if corresponding_url not in resource_cache:
313 | self._handle_downloaded_resource(
314 | resource_name,
315 | tag,
316 | file_name,
317 | download_tasks,
318 | resource_cache,
319 | )
320 | logger.info(f"Added {resource_name} to resource cache: {corresponding_url}")
321 | else:
322 | logger.info(
323 | f"Resource {resource_name} was already cached by another thread: {corresponding_url}",
324 | )
325 | cached_tag, cached_file_name = resource_cache[corresponding_url]
326 | self._handle_cached_resource(resource_name, cached_tag, cached_file_name)
327 | except BuilderError as e:
328 | msg = f"Failed to download {resource_name} resource."
329 | raise PatchingFailedError(msg) from e
330 |
331 | def download_patch_resources(
332 | self: Self,
333 | config: RevancedConfig,
334 | resource_cache: dict[str, tuple[str, str]],
335 | resource_lock: Lock,
336 | ) -> None:
337 | """Download various resources required for patching.
338 |
339 | Parameters
340 | ----------
341 | config : RevancedConfig
342 | Configuration settings for the resource download tasks.
343 | resource_cache: dict[str, tuple[str, str]]
344 | Cache of previously downloaded resources.
345 | resource_lock: Lock
346 | Thread lock for safe access to resource_cache.
347 | """
348 | logger.info("Downloading resources for patching.")
349 |
350 | download_tasks = self._prepare_download_tasks(config)
351 | resources_to_download = self._filter_cached_resources(download_tasks, resource_cache, resource_lock)
352 |
353 | if resources_to_download:
354 | self._download_and_cache_resources(
355 | resources_to_download,
356 | download_tasks,
357 | config,
358 | resource_cache,
359 | resource_lock,
360 | )
361 |
362 | @staticmethod
363 | def generate_filename(url: str) -> str:
364 | """The function `generate_filename` takes URL as input and returns a hashed version of the URL as the filename.
365 |
366 | Parameters
367 | ----------
368 | url : str
369 | The `url` parameter is a string that represents a URL.
370 |
371 | Returns
372 | -------
373 | the encoded URL as a string.
374 | """
375 | encoded_url: str = hashlib.sha256(url.encode()).hexdigest()
376 | return encoded_url
377 |
--------------------------------------------------------------------------------