├── .coveragerc ├── .flake8 ├── .gitattributes ├── .github ├── dependabot.yml └── workflows │ └── run-tests.yml ├── .gitignore ├── .mypy.ini ├── CONTRIBUTING.md ├── Dockerfile ├── LICENSE ├── README.md ├── canonicalize-manifest ├── data ├── org.flathub.flatpak-external-data-checker.metainfo.xml └── source-state.schema.json ├── dependencies.apt.txt ├── flatpak-external-data-checker ├── requirements.txt ├── run-in-container.sh ├── src ├── __init__.py ├── checkers │ ├── __init__.py │ ├── anityachecker.py │ ├── chromiumchecker.py │ ├── debianrepochecker.py │ ├── electronchecker.py │ ├── gitchecker.py │ ├── gnomechecker.py │ ├── htmlchecker.py │ ├── jetbrainschecker.py │ ├── jsonchecker.py │ ├── pypichecker.py │ ├── rpmrepochecker.py │ ├── rustchecker.py │ ├── snapcraftchecker.py │ └── urlchecker.py ├── lib │ ├── __init__.py │ ├── appdata.py │ ├── checksums.py │ ├── errors.py │ ├── externaldata.py │ └── utils.py ├── main.py └── manifest.py └── tests ├── __init__.py ├── com.google.Chrome.yaml ├── com.jetbrains.PhpStorm.json ├── com.nordpass.NordPass.yaml ├── com.unity.UnityHub.yaml ├── com.valvesoftware.Steam.yml ├── com.virustotal.Uploader.yml ├── com.visualstudio.code.yaml ├── fedc.test.ElectronChecker.yml ├── firefox-sources.json ├── io.github.stedolan.jq.yml ├── net.invisible_island.xterm.appdata.xml ├── net.invisible_island.xterm.yml ├── org.chromium.Chromium.yaml ├── org.debian.tracker.pkg.apt.yml ├── org.externaldatachecker.Manifest.json ├── org.flatpak.Flatpak.yml ├── org.freedesktop.Sdk.Extension.rust-nightly.yml ├── org.gnome.baobab.json ├── org.x.xeyes.yml ├── phony-external-source-single-item.json ├── phony-external-source.json ├── phony-shared-module.json ├── phony-too-large-generated-sources.json ├── test_anityachecker.py ├── test_appdata.py ├── test_checker.py ├── test_chromiumchecker.py ├── test_debianrepochecker.py ├── test_electronchecker.py ├── test_gitchecker.py ├── test_gnomechecker.py ├── test_htmlchecker.py ├── test_jetbrainschecker.py ├── test_jsonchecker.py ├── test_loader.py ├── test_main.py ├── test_newlinepreservation.py ├── test_pypichecker.py ├── test_rpmrepochecker.py ├── test_rustchecker.py ├── test_snapcraftchecker.py ├── test_urlchecker.py └── test_util.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = src 3 | 4 | [report] 5 | exclude_lines = 6 | pragma: no cover 7 | raise NotImplementedError 8 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | # The default is 79 characters. A semi-official Python code formatter, Black 3 | # , defaults to 88, based on some 4 | # empirical research on reducing annoying line wrapping on real source code. 5 | max-line-length = 88 6 | 7 | # Black disagrees with some rules. 8 | # E203 - Whitespace before ':' 9 | extend-ignore = E203 10 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Ref: https://git-scm.com/docs/gitattributes 2 | * text=auto eol=lf -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | -------------------------------------------------------------------------------- /.github/workflows/run-tests.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | pull_request: 5 | push: 6 | branches: 7 | - master 8 | tags: 9 | - "**" 10 | 11 | env: 12 | DOCKER_REGISTRY: ghcr.io 13 | DOCKER_IMAGE: ${{ github.repository }} 14 | 15 | jobs: 16 | check-formatting: 17 | name: Check formatting 18 | runs-on: ubuntu-latest 19 | container: debian:bookworm 20 | permissions: 21 | contents: read 22 | 23 | steps: 24 | - uses: actions/checkout@v5 25 | 26 | - name: Install dependencies 27 | run: | 28 | apt-get update 29 | apt-get install -y \ 30 | $(xargs < dependencies.apt.txt) \ 31 | python3-setuptools 32 | python3 -m pip install --break-system-packages \ 33 | black 34 | 35 | - name: Run black 36 | run: | 37 | python3 -m black --check --diff . 38 | 39 | - name: Run flake8 40 | run: | 41 | python3 -m flake8 42 | 43 | check-typing: 44 | name: Static type check 45 | runs-on: ubuntu-latest 46 | container: debian:bookworm 47 | permissions: 48 | contents: read 49 | steps: 50 | - uses: actions/checkout@v5 51 | 52 | - name: Prime pip cache 53 | uses: actions/cache@v4 54 | with: 55 | path: /.pip 56 | key: | 57 | pip-${{ runner.os }}-${{ hashFiles('requirements.txt') }} 58 | restore-keys: | 59 | pip-${{ runner.os }}- 60 | 61 | - name: Install dependencies 62 | run: | 63 | apt-get update 64 | apt-get install -y \ 65 | $(xargs < dependencies.apt.txt) \ 66 | python3-setuptools 67 | python3 -m pip install --break-system-packages \ 68 | --cache-dir=/.pip \ 69 | mypy \ 70 | lxml-stubs 71 | 72 | - name: Prime mypy cache 73 | uses: actions/cache@v4 74 | with: 75 | path: .mypy_cache 76 | key: | 77 | mypy-${{ runner.os }}-${{ github.run_id }} 78 | restore-keys: | 79 | mypy-${{ runner.os }}- 80 | 81 | - name: Run mypy check 82 | run: | 83 | env PIP_BREAK_SYSTEM_PACKAGES=1 \ 84 | PIP_CACHE_DIR=/.pip python3 -m mypy \ 85 | --install-types \ 86 | --non-interactive \ 87 | src 88 | 89 | build-and-test-container: 90 | name: Build and test container 91 | runs-on: ubuntu-latest 92 | permissions: 93 | packages: write 94 | steps: 95 | - uses: actions/checkout@v5 96 | 97 | - name: Set up Docker Buildx 98 | uses: docker/setup-buildx-action@v3 99 | 100 | - name: Build container 101 | id: docker_build 102 | uses: docker/build-push-action@v6 103 | with: 104 | context: . 105 | file: ./Dockerfile 106 | tags: ${{ env.DOCKER_IMAGE }}:build 107 | push: false 108 | load: true 109 | cache-from: type=registry,ref=${{ env.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:latest 110 | cache-to: type=inline 111 | 112 | - name: Run tests in container 113 | run: | 114 | docker run -v $PWD:$PWD -w $PWD --entrypoint bash ${{ env.DOCKER_IMAGE }}:build -c \ 115 | "apt-get install python3-coverage && python3 -m coverage run -m unittest discover --verbose --buffer" 116 | 117 | - name: Submit code coverage to Coveralls.io 118 | env: 119 | COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} 120 | run: | 121 | sudo apt-get install -y \ 122 | python3-setuptools 123 | sudo python3 -m pip install \ 124 | coveralls 125 | python3 -m coveralls 126 | continue-on-error: true 127 | 128 | - name: Login to registry 129 | if: github.event_name != 'pull_request' 130 | uses: docker/login-action@v3 131 | with: 132 | registry: ${{ env.DOCKER_REGISTRY }} 133 | username: ${{ github.actor }} 134 | password: ${{ secrets.GITHUB_TOKEN }} 135 | 136 | - name: Push image to registry 137 | if: github.event_name != 'pull_request' 138 | uses: docker/build-push-action@v6 139 | with: 140 | context: . 141 | file: ./Dockerfile 142 | tags: ${{ env.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:latest 143 | push: true 144 | 145 | - name: Login to ghcr.io using Flathub credentials 146 | if: github.event_name != 'pull_request' 147 | uses: docker/login-action@v3 148 | with: 149 | registry: ghcr.io 150 | username: ${{ secrets.FLATHUB_ORG_USER }} 151 | password: ${{ secrets.FLATHUB_ORG_TOKEN }} 152 | 153 | - name: Push image to the old location 154 | if: github.event_name != 'pull_request' 155 | uses: docker/build-push-action@v6 156 | with: 157 | context: . 158 | file: ./Dockerfile 159 | tags: ghcr.io/flathub/flatpak-external-data-checker:latest 160 | push: true 161 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | /.coverage 3 | /coverage.xml 4 | /htmlcov 5 | /.mypy_cache 6 | -------------------------------------------------------------------------------- /.mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | 3 | [mypy-apt.*] 4 | ignore_missing_imports = True 5 | 6 | [mypy-apt_pkg.*] 7 | ignore_missing_imports = True 8 | 9 | [mypy-lxml.*] 10 | ignore_missing_imports = True 11 | 12 | [mypy-gi.*] 13 | ignore_missing_imports = True 14 | 15 | [mypy-elftools.*] 16 | ignore_missing_imports = True 17 | 18 | [mypy-editorconfig.*] 19 | ignore_missing_imports = True 20 | 21 | [mypy-magic.*] 22 | ignore_missing_imports = True 23 | 24 | [mypy-semver.*] 25 | ignore_missing_imports = True 26 | 27 | [mypy-github] 28 | ignore_missing_imports = True 29 | 30 | [mypy-ruamel.yaml.*] 31 | ignore_errors = True 32 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Code style 2 | 3 | All Python code should be formatted by [Black](https://github.com/psf/black). 4 | We don't make the rules! Install Black, then run: 5 | 6 | ``` 7 | black . 8 | ``` 9 | 10 | # Running tests 11 | 12 | There is a moderately-comprehensive test suite. Currently, it requires an 13 | internet connection and takes a few minutes to run. 14 | 15 | ```bash 16 | # Run all the tests (some of which need an internet connection): 17 | python3 -m unittest discover 18 | 19 | # Run one suite of tests 20 | python3 -m unittest tests.test_appdata 21 | 22 | # More information 23 | python3 -m unittest --help 24 | ``` 25 | 26 | # Dependencies 27 | 28 | See the `Dockerfile` for the Debian and PyPI dependencies. unappimage is 29 | optional, as is bubblewrap: we recommend one or the other. 30 | 31 | ## Using a `podman` container 32 | 33 | The easiest way to get all the dependencies this tool needs is to build & run 34 | the container image specified in the `Dockerfile`. There's a wrapper script, 35 | whose only dependency is the `podman` command: 36 | 37 | ```bash 38 | # Run all the tests (some of which need an internet connection): 39 | ./run-in-container.sh python3 -m unittest discover 40 | 41 | # Run one suite of tests 42 | ./run-in-container.sh python3 -m unittest tests.test_appdata 43 | 44 | # More information 45 | ./run-in-container.sh python3 -m unittest --help 46 | ``` 47 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:bookworm 2 | 3 | ENV DEBIAN_FRONTEND=noninteractive 4 | 5 | ADD dependencies.apt.txt ./ 6 | 7 | RUN sed -i "s/Types: deb/Types: deb deb-src/" /etc/apt/sources.list.d/debian.sources && \ 8 | apt-get update && \ 9 | xargs apt-get install --no-install-recommends -y < dependencies.apt.txt && \ 10 | apt-get --no-install-recommends -y build-dep python3-apt && \ 11 | apt-get clean && \ 12 | rmdir /var/cache/apt/archives/partial 13 | 14 | # All requirements should be satisfied by dependencies.apt.txt. Feed 15 | # requirements.txt through pip to check it is in synch, without installing 16 | # anything. 17 | # 18 | # The 'sed' invocation is required because pip doesn't know that the tarball 19 | # listed in that file is the Debian package. 20 | ADD requirements.txt ./ 21 | RUN sed -i 's/python-apt @ .*/python-apt/' requirements.txt && \ 22 | pip install --dry-run --report report.json --break-system-packages -r requirements.txt && \ 23 | cat report.json && \ 24 | jq -e '.install == []' report.json >/dev/null && \ 25 | rm report.json && \ 26 | rm -rf $HOME/.cache/pip 27 | 28 | COPY src /app/src 29 | COPY flatpak-external-data-checker /app/ 30 | COPY canonicalize-manifest /app/ 31 | 32 | RUN python3 -m compileall /app/src 33 | 34 | ENTRYPOINT [ "/app/flatpak-external-data-checker" ] 35 | -------------------------------------------------------------------------------- /canonicalize-manifest: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # canonicalize-manifest: Reformat a manifest to match the output of the full checker. 4 | # 5 | # Copyright © 2019 Endless Mobile, Inc. 6 | # 7 | # This program is free software; you can redistribute it and/or modify 8 | # it under the terms of the GNU General Public License as published by 9 | # the Free Software Foundation; either version 2 of the License, or 10 | # (at your option) any later version. 11 | # 12 | # This program is distributed in the hope that it will be useful, 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 | # GNU General Public License for more details. 16 | # 17 | # You should have received a copy of the GNU General Public License along 18 | # with this program; if not, write to the Free Software Foundation, Inc., 19 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 20 | 21 | import argparse 22 | import sys 23 | from pathlib import Path 24 | 25 | exe = Path(sys.argv[0]).resolve() 26 | sys.path.insert(0, str(exe.parent)) 27 | 28 | from src.lib.utils import read_manifest, dump_manifest # noqa: E402 29 | 30 | if __name__ == "__main__": 31 | parser = argparse.ArgumentParser( 32 | description="Reformat a manifest to match the output of the full checker." 33 | ) 34 | parser.add_argument( 35 | "manifest_path", help="Path to JSON or YAML manifest to reformat" 36 | ) 37 | args = parser.parse_args() 38 | 39 | dump_manifest(read_manifest(args.manifest_path), args.manifest_path) 40 | -------------------------------------------------------------------------------- /data/org.flathub.flatpak-external-data-checker.metainfo.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | org.flathub.flatpak-external-data-checker 4 | Flatpak External Data Checker 5 | A tool for checking if the external data used in Flatpak manifests is still up to date 6 | https://github.com/flathub-infra/flatpak-external-data-checker 7 | https://github.com/flathub-infra/flatpak-external-data-checker/issues 8 | 9 |

This is a tool for checking for outdated or broken links of external data in Flatpak manifests.

10 |
11 | CC0-1.0 12 | GPL-2.0 13 | 14 |
15 | -------------------------------------------------------------------------------- /data/source-state.schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "$defs": { 3 | "source-state": { 4 | "file": { 5 | "type": "object", 6 | "properties": { 7 | "url": { "type": "string" }, 8 | "checksum": { 9 | "type": "object", 10 | "properties": { 11 | "md5": { "type": "string" }, 12 | "sha1": { "type": "string" }, 13 | "sha256": { "type": "string" }, 14 | "sha512": { "type": "string" } 15 | } 16 | }, 17 | "version": { "type": "string" }, 18 | "timestamp": { "type": "string" } 19 | }, 20 | "required": [ "url", "checksum" ] 21 | }, 22 | "git": { 23 | "type": "object", 24 | "properties": { 25 | "url": { "type": "string" }, 26 | "commit": { "type": "string" }, 27 | "tag": { "type": "string" }, 28 | "branch": { "type": "string" }, 29 | "version": { "type": "string" }, 30 | "timestamp": { "type": "string" } 31 | }, 32 | "required": [ "url" ] 33 | } 34 | } 35 | }, 36 | "type": "object", 37 | "anyOf": [ 38 | { "$ref": "#/$defs/source-state/file" }, 39 | { "$ref": "#/$defs/source-state/git" } 40 | ] 41 | } 42 | -------------------------------------------------------------------------------- /dependencies.apt.txt: -------------------------------------------------------------------------------- 1 | bubblewrap 2 | gir1.2-glib-2.0 3 | gir1.2-json-1.0 4 | git 5 | jq 6 | python3-aiodns 7 | python3-aiohttp 8 | python3-apt 9 | python3-brotli 10 | python3-cairo 11 | python3-chardet 12 | python3-editorconfig 13 | python3-flake8 14 | python3-gi 15 | python3-github 16 | python3-jsonschema 17 | python3-lxml 18 | python3-magic 19 | python3-packaging 20 | python3-pip 21 | python3-pyelftools 22 | python3-requests 23 | python3-ruamel.yaml 24 | python3-semver 25 | python3-setuptools 26 | python3-toml 27 | squashfs-tools 28 | ssh-client 29 | -------------------------------------------------------------------------------- /flatpak-external-data-checker: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # flatpak-extra-data-checker: A tool for checking the status of 4 | # the extra data in a Flatpak manifest. 5 | # 6 | # Copyright © 2019 Endless Mobile, Inc. 7 | # 8 | # This program is free software; you can redistribute it and/or modify 9 | # it under the terms of the GNU General Public License as published by 10 | # the Free Software Foundation; either version 2 of the License, or 11 | # (at your option) any later version. 12 | # 13 | # This program is distributed in the hope that it will be useful, 14 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 15 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 16 | # GNU General Public License for more details. 17 | # 18 | # You should have received a copy of the GNU General Public License along 19 | # with this program; if not, write to the Free Software Foundation, Inc., 20 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 21 | 22 | import sys 23 | from pathlib import Path 24 | 25 | exe = Path(sys.argv[0]).resolve() 26 | sys.path.insert(0, str(exe.parent)) 27 | 28 | from src.main import main # noqa: E402 29 | if __name__ == '__main__': 30 | main() 31 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | PyGObject 2 | PyGithub 3 | aiohttp 4 | editorconfig 5 | jsonschema 6 | lxml 7 | packaging 8 | pyelftools 9 | python-apt @ http://deb.debian.org/debian/pool/main/p/python-apt/python-apt_2.6.0.tar.xz 10 | python-magic 11 | requests 12 | ruamel.yaml 13 | semver 14 | toml 15 | -------------------------------------------------------------------------------- /run-in-container.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | GIT_USER_NAME=$(git config user.name) 5 | GIT_USER_EMAIL=$(git config user.email) 6 | 7 | CWD=$(pwd) 8 | 9 | podman run --rm --privileged \ 10 | -v $HOME:$HOME:rslave \ 11 | -v $CWD:$CWD:rslave \ 12 | -w $CWD \ 13 | --entrypoint= \ 14 | -e GIT_AUTHOR_NAME="$GIT_USER_NAME" \ 15 | -e GIT_COMMITTER_NAME="$GIT_USER_NAME" \ 16 | -e GIT_AUTHOR_EMAIL="$GIT_USER_EMAIL" \ 17 | -e GIT_COMMITTER_EMAIL="$GIT_USER_EMAIL" \ 18 | -it ghcr.io/flathub-infra/flatpak-external-data-checker \ 19 | $* 20 | -------------------------------------------------------------------------------- /src/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flathub-infra/flatpak-external-data-checker/f7f982bd184a42c33d233d63214770d7479baf4f/src/__init__.py -------------------------------------------------------------------------------- /src/checkers/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import logging 4 | from distutils.version import LooseVersion 5 | from string import Template 6 | import datetime 7 | import json 8 | import re 9 | import zlib 10 | import sys 11 | import typing as t 12 | 13 | # pylint: disable=wrong-import-position 14 | if sys.version_info >= (3, 10): 15 | from typing import TypeAlias 16 | else: 17 | from typing_extensions import TypeAlias 18 | # pylint: enable=wrong-import-position 19 | 20 | import importlib 21 | import pkgutil 22 | 23 | import aiohttp 24 | from yarl import URL 25 | import jsonschema 26 | import ruamel.yaml 27 | import lxml.etree as ElementTree 28 | 29 | from ..lib import ( 30 | utils, 31 | NETWORK_ERRORS, 32 | WRONG_CONTENT_TYPES_FILE, 33 | WRONG_CONTENT_TYPES_ARCHIVE, 34 | FILE_URL_SCHEMES, 35 | ) 36 | from ..lib.externaldata import ( 37 | ExternalBase, 38 | ExternalData, 39 | ExternalState, 40 | ExternalFile, 41 | ) 42 | from ..lib.errors import ( 43 | CheckerMetadataError, 44 | CheckerQueryError, 45 | CheckerFetchError, 46 | ) 47 | from ..lib.checksums import ( 48 | MultiHash, 49 | MultiDigest, 50 | ) 51 | 52 | JSONType = t.Union[str, int, float, bool, None, t.Dict[str, t.Any], t.List[t.Any]] 53 | XMLElement: TypeAlias = ElementTree._Element # pylint: disable=protected-access 54 | 55 | yaml = ruamel.yaml.YAML(typ="safe") 56 | log = logging.getLogger(__name__) 57 | 58 | ALL_CHECKERS: t.List[t.Type[Checker]] = [] 59 | 60 | 61 | class Checker: 62 | """ 63 | Base class for implementing checkers 64 | 65 | :cvar PRIORITY: Alter the checker priority (lower is used first) 66 | """ 67 | 68 | PRIORITY: t.ClassVar[int] = 0 69 | CHECKER_DATA_TYPE: t.Optional[str] = None 70 | CHECKER_DATA_SCHEMA: t.Dict[str, t.Any] 71 | SUPPORTED_DATA_CLASSES: t.List[t.Type[ExternalBase]] = [ExternalData] 72 | session: aiohttp.ClientSession 73 | 74 | @classmethod 75 | def __init_subclass__(cls, *args, register: bool = True, **kwargs): 76 | super().__init_subclass__(*args, **kwargs) 77 | if register and cls not in ALL_CHECKERS: 78 | ALL_CHECKERS.append(cls) 79 | ALL_CHECKERS.sort(key=lambda c: c.PRIORITY) 80 | 81 | def __init__(self, session: aiohttp.ClientSession): 82 | self.session = session 83 | 84 | # pylint: disable=unused-argument 85 | @classmethod 86 | def get_json_schema(self, data_class: t.Type[ExternalBase]) -> t.Dict[str, t.Any]: 87 | if not hasattr(self, "CHECKER_DATA_SCHEMA"): 88 | raise NotImplementedError( 89 | "If schema is not declared, this method must be overridden" 90 | ) 91 | 92 | return self.CHECKER_DATA_SCHEMA 93 | 94 | @classmethod 95 | def should_check(cls, external_data: ExternalBase) -> bool: 96 | supported = any( 97 | isinstance(external_data, c) for c in cls.SUPPORTED_DATA_CLASSES 98 | ) 99 | applicable = ( 100 | cls.CHECKER_DATA_TYPE is not None 101 | and external_data.checker_data.get("type") == cls.CHECKER_DATA_TYPE 102 | ) 103 | return applicable and supported 104 | 105 | async def validate_checker_data(self, external_data: ExternalBase): 106 | assert any(isinstance(external_data, c) for c in self.SUPPORTED_DATA_CLASSES) 107 | schema = self.get_json_schema(type(external_data)) 108 | if not schema: 109 | return 110 | try: 111 | jsonschema.validate(external_data.checker_data, schema) 112 | except jsonschema.ValidationError as err: 113 | raise CheckerMetadataError("Invalid metadata schema") from err 114 | 115 | async def check(self, external_data: ExternalBase): 116 | raise NotImplementedError 117 | 118 | # Various helplers for checkers; assumed to be safely usable only from subclasses 119 | 120 | async def _get_json( 121 | self, 122 | url: t.Union[str, URL], 123 | headers: t.Optional[t.Dict[str, str]] = None, 124 | ) -> JSONType: 125 | url = URL(url) 126 | log.debug("Loading JSON from %s", url) 127 | if headers is None: 128 | headers = {} 129 | try: 130 | async with self.session.get(url, headers=headers) as response: 131 | if re.match(r".+\.ya?ml$", response.url.name): 132 | try: 133 | return yaml.load(await response.read()) 134 | except ruamel.yaml.error.YAMLError as err: 135 | raise CheckerQueryError("Failed to parse YAML") from err 136 | try: 137 | return await response.json(content_type=None) 138 | except (UnicodeDecodeError, json.JSONDecodeError) as err: 139 | raise CheckerQueryError("Failed to parse JSON") from err 140 | except NETWORK_ERRORS as err: 141 | raise CheckerQueryError from err 142 | 143 | async def _get_xml(self, url: URL) -> XMLElement: 144 | parser = ElementTree.XMLPullParser(load_dtd=False, resolve_entities=False) 145 | log.debug("Loading XML from %s", url) 146 | async with self.session.get(url) as resp: 147 | is_gzip = url.name.endswith(".gz") 148 | decompressor = zlib.decompressobj(16 + zlib.MAX_WBITS) 149 | async for chunk, _ in resp.content.iter_chunks(): 150 | parser.feed(decompressor.decompress(chunk) if is_gzip else chunk) 151 | return parser.close() 152 | 153 | @staticmethod 154 | def _version_parts(version: str) -> t.Dict[str, str]: 155 | """ 156 | Parse version string and return a dict of named components. 157 | """ 158 | version_list = LooseVersion(version).version 159 | tmpl_vars: t.Dict[str, t.Union[str, int]] 160 | tmpl_vars = {"version": version} 161 | for i, version_part in enumerate(version_list): 162 | tmpl_vars[f"version{i}"] = version_part 163 | if i == 0: 164 | tmpl_vars["major"] = version_part 165 | elif i == 1: 166 | tmpl_vars["minor"] = version_part 167 | elif i == 2: 168 | tmpl_vars["patch"] = version_part 169 | return {k: str(v) for k, v in tmpl_vars.items()} 170 | 171 | @classmethod 172 | def _substitute_template( 173 | cls, 174 | template_string: str, 175 | variables: t.Dict[str, t.Any], 176 | ) -> str: 177 | tmpl = Template(template_string) 178 | try: 179 | return tmpl.substitute(**variables) 180 | except (KeyError, ValueError) as err: 181 | raise CheckerMetadataError("Error substituting template") from err 182 | 183 | @classmethod 184 | def _get_pattern( 185 | cls, 186 | checker_data: t.Dict, 187 | pattern_name: str, 188 | expected_groups: int = 1, 189 | ) -> t.Optional[re.Pattern]: 190 | try: 191 | pattern_str = checker_data[pattern_name] 192 | except KeyError: 193 | return None 194 | 195 | try: 196 | pattern = re.compile(pattern_str) 197 | except re.error as err: 198 | raise CheckerMetadataError(f"Invalid regex '{pattern_str}'") from err 199 | if pattern.groups != expected_groups: 200 | raise CheckerMetadataError( 201 | f"Pattern '{pattern.pattern}' contains {pattern.groups} group(s) " 202 | f"instead of {expected_groups}" 203 | ) 204 | return pattern 205 | 206 | async def _complete_digests( 207 | self, url: t.Union[str, URL], digests: MultiDigest 208 | ) -> MultiDigest: 209 | """ 210 | Re-download given `url`, verify it against given `digests`, 211 | and return a `MultiDigest` with all digest types set. 212 | """ 213 | multihash = MultiHash() 214 | try: 215 | async with self.session.get(url) as resp: 216 | async for chunk, _ in resp.content.iter_chunks(): 217 | multihash.update(chunk) 218 | except NETWORK_ERRORS as err: 219 | raise CheckerFetchError from err 220 | new_digests = multihash.hexdigest() 221 | if new_digests != digests: 222 | raise CheckerFetchError( 223 | f"Checksum mismatch for {url}: " 224 | f"expected {digests}, got {new_digests}" 225 | ) 226 | return new_digests 227 | 228 | async def _set_new_version(self, source: ExternalBase, new_version: ExternalState): 229 | """ 230 | Set the `new_version` for `source`, ensuring common digest types are set. 231 | """ 232 | if ( 233 | isinstance(source, ExternalData) and isinstance(new_version, ExternalFile) 234 | ) and not ( 235 | source.current_version.checksum.digests & new_version.checksum.digests 236 | ): 237 | log.warning( 238 | "Source %s: %s didn't get a %s digest; available digests were %s", 239 | source, 240 | self.__class__.__name__, 241 | source.current_version.checksum.digests, 242 | new_version.checksum.digests, 243 | ) 244 | checksum = await self._complete_digests( 245 | new_version.url, new_version.checksum 246 | ) 247 | new_version = new_version._replace(checksum=checksum) 248 | 249 | source.set_new_version(new_version) 250 | 251 | async def _update_version( 252 | self, 253 | external_data: ExternalData, 254 | latest_version: str, 255 | latest_url: str, 256 | follow_redirects: bool = False, 257 | timestamp: t.Optional[datetime.datetime] = None, 258 | ): 259 | assert latest_version is not None 260 | assert latest_url is not None 261 | 262 | if ( 263 | latest_url == external_data.current_version.url 264 | and external_data.type != external_data.Type.EXTRA_DATA 265 | ): 266 | external_data.state |= external_data.State.LATEST 267 | return 268 | 269 | if external_data.type == ExternalData.Type.ARCHIVE: 270 | wrong_content_types = WRONG_CONTENT_TYPES_ARCHIVE 271 | else: 272 | wrong_content_types = WRONG_CONTENT_TYPES_FILE 273 | 274 | latest_url_scheme = URL(latest_url).scheme 275 | if latest_url_scheme not in FILE_URL_SCHEMES: 276 | raise CheckerMetadataError(f"Invalid URL scheme {latest_url_scheme}") 277 | 278 | try: 279 | new_version = await utils.get_extra_data_info_from_url( 280 | url=latest_url, 281 | follow_redirects=follow_redirects, 282 | session=self.session, 283 | content_type_deny=wrong_content_types, 284 | ) 285 | except NETWORK_ERRORS as err: 286 | raise CheckerFetchError from err 287 | 288 | new_version = new_version._replace( 289 | version=latest_version # pylint: disable=no-member 290 | ) 291 | if timestamp is not None: 292 | new_version = new_version._replace(timestamp=timestamp) 293 | external_data.set_new_version(new_version) 294 | 295 | 296 | def load_checkers(): 297 | for plugin_info in pkgutil.iter_modules(__path__): 298 | try: 299 | importlib.import_module(f".{plugin_info.name}", package=__name__) 300 | except ImportError as err: 301 | log.error("Can't load %s: %s", plugin_info.name, err) 302 | continue 303 | 304 | 305 | load_checkers() 306 | -------------------------------------------------------------------------------- /src/checkers/anityachecker.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import typing as t 3 | 4 | from yarl import URL 5 | 6 | from ..lib import OPERATORS_SCHEMA, NETWORK_ERRORS 7 | from ..lib.externaldata import ( 8 | ExternalBase, 9 | ExternalData, 10 | ExternalGitRepo, 11 | ExternalGitRef, 12 | ) 13 | from ..lib.utils import filter_versions 14 | from ..lib.errors import CheckerQueryError 15 | from . import Checker 16 | 17 | log = logging.getLogger(__name__) 18 | 19 | 20 | class AnityaChecker(Checker): 21 | CHECKER_DATA_TYPE = "anitya" 22 | CHECKER_DATA_SCHEMA = { 23 | "type": "object", 24 | "properties": { 25 | "baseurl": {"type": "string"}, 26 | "project-id": {"type": "number"}, 27 | "stable-only": {"type": "boolean"}, 28 | "versions": OPERATORS_SCHEMA, 29 | "url-template": {"type": "string"}, 30 | "tag-template": {"type": "string"}, 31 | }, 32 | } 33 | SUPPORTED_DATA_CLASSES = [ExternalData, ExternalGitRepo] 34 | 35 | @classmethod 36 | def get_json_schema(cls, data_class: t.Type[ExternalBase]): 37 | schema = super().get_json_schema(data_class).copy() 38 | if issubclass(data_class, ExternalGitRepo): 39 | schema["required"] = ["project-id", "tag-template"] 40 | else: 41 | schema["required"] = ["project-id", "url-template"] 42 | return schema 43 | 44 | async def check(self, external_data: ExternalBase): 45 | assert self.should_check(external_data) 46 | 47 | instance_url = external_data.checker_data.get( 48 | "baseurl", "https://release-monitoring.org" 49 | ) 50 | versions_url = URL(instance_url) / "api/v2/versions/" 51 | stable_only = external_data.checker_data.get("stable-only", True) 52 | constraints = external_data.checker_data.get("versions", {}).items() 53 | 54 | query = {"project_id": external_data.checker_data["project-id"]} 55 | try: 56 | async with self.session.get(versions_url % query) as response: 57 | result = await response.json() 58 | except NETWORK_ERRORS as err: 59 | raise CheckerQueryError from err 60 | 61 | if stable_only or constraints: 62 | if stable_only: 63 | versions = result["stable_versions"] 64 | else: 65 | versions = result["versions"] 66 | if constraints: 67 | versions = filter_versions(versions, constraints, sort=False) 68 | try: 69 | latest_version = versions[0] 70 | except IndexError as err: 71 | raise CheckerQueryError("Can't find matching version") from err 72 | else: 73 | latest_version = result["latest_version"] 74 | 75 | if isinstance(external_data, ExternalGitRepo): 76 | return await self._check_git(external_data, latest_version) 77 | assert isinstance(external_data, ExternalData) 78 | return await self._check_data(external_data, latest_version) 79 | 80 | async def _check_data(self, external_data: ExternalData, latest_version): 81 | url_template = external_data.checker_data["url-template"] 82 | latest_url = self._substitute_template( 83 | url_template, self._version_parts(latest_version) 84 | ) 85 | 86 | await self._update_version( 87 | external_data, latest_version, latest_url, follow_redirects=False 88 | ) 89 | 90 | async def _check_git(self, external_data: ExternalGitRepo, latest_version): 91 | tag_template = external_data.checker_data["tag-template"] 92 | latest_tag = self._substitute_template( 93 | tag_template, self._version_parts(latest_version) 94 | ) 95 | 96 | new_version = await ExternalGitRef( 97 | url=external_data.current_version.url, 98 | commit=None, 99 | tag=latest_tag, 100 | branch=None, 101 | version=latest_version, 102 | timestamp=None, 103 | ).fetch_remote() 104 | 105 | external_data.set_new_version(new_version) 106 | -------------------------------------------------------------------------------- /src/checkers/chromiumchecker.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import logging 3 | import re 4 | import typing as t 5 | 6 | import aiohttp 7 | 8 | from ..lib import NETWORK_ERRORS 9 | from ..lib.externaldata import ( 10 | ExternalBase, 11 | ExternalData, 12 | ExternalGitRepo, 13 | ExternalGitRef, 14 | ) 15 | from ..lib.utils import get_extra_data_info_from_url 16 | from ..lib.errors import CheckerMetadataError, CheckerFetchError 17 | from . import Checker 18 | 19 | log = logging.getLogger(__name__) 20 | 21 | 22 | class Component: 23 | NAME: str 24 | DATA_CLASS: t.Type[ExternalBase] 25 | 26 | def __init__( 27 | self, 28 | session: aiohttp.ClientSession, 29 | external_data: ExternalBase, 30 | latest_version: str, 31 | ) -> None: 32 | self.session = session 33 | self.external_data = external_data 34 | self.latest_version = latest_version 35 | 36 | assert latest_version is not None 37 | 38 | async def check(self) -> None: 39 | raise NotImplementedError 40 | 41 | async def update_external_source_version(self, latest_url): 42 | assert latest_url is not None 43 | 44 | try: 45 | new_version = await get_extra_data_info_from_url(latest_url, self.session) 46 | except NETWORK_ERRORS as err: 47 | raise CheckerFetchError from err 48 | else: 49 | new_version = new_version._replace( # pylint: disable=no-member 50 | version=self.latest_version 51 | ) 52 | self.external_data.set_new_version(new_version) 53 | 54 | 55 | class ChromiumComponent(Component): 56 | NAME = "chromium" 57 | DATA_CLASS = ExternalData 58 | 59 | _URL_FORMAT = ( 60 | "https://commondatastorage.googleapis.com" 61 | "/chromium-browser-official/chromium-{version}.tar.xz" 62 | ) 63 | # https://groups.google.com/a/chromium.org/g/chromium-packagers/c/wjv9UKg2u4w/m/SwSvLazmCAAJ 64 | _GENTOO_URL_FORMAT = ( 65 | "https://chromium-tarballs.distfiles.gentoo.org/chromium-{version}-linux.tar.xz" 66 | ) 67 | 68 | async def check(self) -> None: 69 | assert isinstance(self.external_data, ExternalData) 70 | 71 | try: 72 | latest_url = self._URL_FORMAT.format(version=self.latest_version) 73 | await self.update_external_source_version(latest_url) 74 | except CheckerFetchError as err: 75 | if ( 76 | isinstance(err.__cause__, aiohttp.ClientResponseError) 77 | and err.__cause__.status == 404 78 | ): 79 | log.error( 80 | "Chromium tarball is missing (falling back to alternate URL): %s", 81 | err, 82 | ) 83 | latest_url = self._GENTOO_URL_FORMAT.format(version=self.latest_version) 84 | await self.update_external_source_version(latest_url) 85 | else: 86 | raise 87 | 88 | 89 | class LLVMComponent(Component): 90 | class Version(t.NamedTuple): 91 | revision: str 92 | sub_revision: str 93 | 94 | _UPDATE_PY_URL_FORMAT = ( 95 | "https://chromium.googlesource.com/chromium/src/+" 96 | "/{version}/tools/clang/scripts/update.py" 97 | ) 98 | 99 | _UPDATE_PY_PARAMS = {"format": "TEXT"} 100 | 101 | _CLANG_REVISION_RE = re.compile(r"CLANG_REVISION = '(.*)'") 102 | _CLANG_SUB_REVISION_RE = re.compile(r"CLANG_SUB_REVISION = (\d+)") 103 | 104 | async def get_llvm_version(self) -> "LLVMComponent.Version": 105 | url = self._UPDATE_PY_URL_FORMAT.format(version=self.latest_version) 106 | async with self.session.get(url, params=self._UPDATE_PY_PARAMS) as response: 107 | result = await response.text() 108 | 109 | update_py = base64.b64decode(result).decode("utf-8") 110 | 111 | revision_match = self._CLANG_REVISION_RE.search(update_py) 112 | assert revision_match is not None, url 113 | 114 | sub_revision_match = self._CLANG_SUB_REVISION_RE.search(update_py) 115 | assert sub_revision_match is not None, url 116 | 117 | return LLVMComponent.Version( 118 | revision_match.group(1), sub_revision_match.group(1) 119 | ) 120 | 121 | 122 | class LLVMGitComponent(LLVMComponent): 123 | NAME = "llvm-git" 124 | DATA_CLASS = ExternalGitRepo 125 | 126 | _LLVM_REPO_URL = "https://github.com/llvm/llvm-project" 127 | 128 | async def check(self) -> None: 129 | assert isinstance(self.external_data, ExternalGitRepo) 130 | 131 | llvm_version = await self.get_llvm_version() 132 | 133 | new_version = ExternalGitRef( 134 | url=self.external_data.current_version.url, 135 | commit=llvm_version.revision, 136 | tag=None, 137 | branch=None, 138 | version=self.latest_version, 139 | timestamp=None, 140 | ) 141 | self.external_data.set_new_version(new_version) 142 | 143 | 144 | class LLVMPrebuiltComponent(LLVMComponent): 145 | NAME = "llvm-prebuilt" 146 | DATA_CLASS = ExternalData 147 | 148 | _PREBUILT_URL_FORMAT = ( 149 | "https://commondatastorage.googleapis.com" 150 | "/chromium-browser-clang/Linux_x64/clang-{revision}-{sub_revision}.tar.xz" 151 | ) 152 | 153 | async def check(self) -> None: 154 | assert isinstance(self.external_data, ExternalData) 155 | 156 | llvm_version = await self.get_llvm_version() 157 | 158 | latest_url = self._PREBUILT_URL_FORMAT.format( 159 | revision=llvm_version.revision, sub_revision=llvm_version.sub_revision 160 | ) 161 | await self.update_external_source_version(latest_url) 162 | 163 | 164 | class ChromiumChecker(Checker): 165 | CHECKER_DATA_TYPE = "chromium" 166 | SUPPORTED_DATA_CLASSES = [ExternalData, ExternalGitRepo] 167 | 168 | _COMPONENTS = { 169 | c.NAME: c for c in (ChromiumComponent, LLVMGitComponent, LLVMPrebuiltComponent) 170 | } 171 | 172 | CHECKER_DATA_SCHEMA = { 173 | "type": "object", 174 | "properties": { 175 | "component": { 176 | "type": "string", 177 | "enum": list(_COMPONENTS), 178 | }, 179 | }, 180 | "required": ["component"], 181 | } 182 | 183 | _CHROMIUM_VERSIONS_URL = "https://chromiumdash.appspot.com/fetch_releases" 184 | _CHROMIUM_VERSIONS_PARAMS = {"platform": "Linux", "channel": "Stable", "num": "1"} 185 | 186 | async def _get_latest_chromium(self) -> str: 187 | async with self.session.get( 188 | self._CHROMIUM_VERSIONS_URL, params=self._CHROMIUM_VERSIONS_PARAMS 189 | ) as response: 190 | result = await response.json() 191 | 192 | assert len(result) == 1, result 193 | return result[0]["version"] 194 | 195 | async def check(self, external_data: ExternalBase): 196 | assert self.should_check(external_data) 197 | 198 | component_name = external_data.checker_data.get( 199 | "component", ChromiumComponent.NAME 200 | ) 201 | 202 | component_class = self._COMPONENTS[component_name] 203 | if not isinstance(external_data, component_class.DATA_CLASS): 204 | raise CheckerMetadataError( 205 | f"Invalid source type for component {component_name}" 206 | ) 207 | 208 | latest_version = await self._get_latest_chromium() 209 | component = component_class(self.session, external_data, latest_version) 210 | await component.check() 211 | -------------------------------------------------------------------------------- /src/checkers/debianrepochecker.py: -------------------------------------------------------------------------------- 1 | # Debian Repo Checker: A checker that uses some metadata info from the 2 | # manifest file in order to check whether there are newer versions of 3 | # Debian package based external data modules. 4 | # 5 | # The contents of the x-checker-data for the module should be .e.g: 6 | # "x-checker-data": { 7 | # "type": "debian-repo", 8 | # "package-name": "YOUR_PACKAGE_NAME", 9 | # "root": "ROOT_URL_TO_THE_DEBIAN_REPO", 10 | # "dist": "DEBIAN_DIST", 11 | # "component": "DEBIAN_COMPONENT" 12 | # } 13 | # 14 | # Copyright (C) 2018 Endless Mobile, Inc. 15 | # 16 | # Authors: 17 | # Joaquim Rocha 18 | # 19 | # This program is free software; you can redistribute it and/or modify 20 | # it under the terms of the GNU General Public License as published by 21 | # the Free Software Foundation; either version 2 of the License, or 22 | # (at your option) any later version. 23 | # 24 | # This program is distributed in the hope that it will be useful, 25 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 26 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 27 | # GNU General Public License for more details. 28 | # 29 | # You should have received a copy of the GNU General Public License along 30 | # with this program; if not, write to the Free Software Foundation, Inc., 31 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 32 | 33 | import contextlib 34 | import logging 35 | import os 36 | import tempfile 37 | import urllib.parse 38 | import re 39 | import typing as t 40 | 41 | import apt 42 | import apt_pkg 43 | 44 | from ..lib.externaldata import ExternalFile, ExternalBase 45 | from ..lib.checksums import MultiDigest 46 | from ..lib.utils import get_timestamp_from_url 47 | from . import Checker 48 | 49 | apt_pkg.init() 50 | 51 | APT_NEEDED_DIRS = ( 52 | "etc/apt/apt.conf.d", 53 | "etc/apt/preferences.d", 54 | "etc/apt/trusted.gpg.d", 55 | "var/lib/apt/lists/partial", 56 | "var/cache/apt/archives/partial", 57 | "var/log/apt", 58 | "var/lib/dpkg", 59 | "var/lib/dpkg/updates", 60 | "var/lib/dpkg/info", 61 | ) 62 | DEB_HASH_MAP = { 63 | "SHA512": "sha512", 64 | "SHA256": "sha256", 65 | "SHA1": "sha1", 66 | "MD5Sum": "md5", 67 | } 68 | 69 | LOG = logging.getLogger(__name__) 70 | 71 | 72 | def read_deb_hashes(deb_hashes: apt_pkg.HashStringList) -> MultiDigest: 73 | digests: t.Dict[str, str] = {} 74 | deb_hash: apt_pkg.HashString 75 | for deb_hash in deb_hashes: # type: ignore 76 | hash_type = DEB_HASH_MAP.get(deb_hash.hashtype) 77 | if hash_type: 78 | digests[hash_type] = deb_hash.hashvalue 79 | return MultiDigest.from_source(digests) 80 | 81 | 82 | class LoggerAcquireProgress(apt.progress.text.AcquireProgress): 83 | def __init__(self, logger): 84 | class FileLike: 85 | def write(self, text): 86 | text = text.strip() 87 | if text: # ignore write("\r") 88 | logger.debug(text) 89 | 90 | def flush(self): 91 | pass 92 | 93 | # no fileno() to avoid SIGWINCH stuff 94 | 95 | super().__init__(FileLike()) 96 | 97 | def pulse(self, owner): 98 | """Disable percentage reporting within files.""" 99 | return apt.progress.base.AcquireProgress.pulse(self, owner) 100 | 101 | 102 | class DebianRepoChecker(Checker): 103 | CHECKER_DATA_TYPE = "debian-repo" 104 | CHECKER_DATA_SCHEMA = { 105 | "type": "object", 106 | "properties": { 107 | "package-name": {"type": "string"}, 108 | "root": {"type": "string"}, 109 | "dist": {"type": "string"}, 110 | "component": {"type": "string"}, 111 | "source": {"type": "boolean"}, 112 | }, 113 | "required": ["package-name", "root", "dist"], 114 | } 115 | 116 | async def check(self, external_data: ExternalBase): 117 | assert self.should_check(external_data) 118 | 119 | LOG.debug("Checking %s", external_data.filename) 120 | package_name = external_data.checker_data["package-name"] 121 | root = external_data.checker_data["root"] 122 | dist = external_data.checker_data["dist"] 123 | component = external_data.checker_data.get("component", "") 124 | src_pkg = external_data.checker_data.get("source", False) 125 | 126 | if not component and not dist.endswith("/"): 127 | LOG.warning( 128 | '%s is missing Debian repo "component"; for an ' 129 | 'exact URL, "dist" must end with /', 130 | package_name, 131 | ) 132 | return 133 | 134 | arch = self._translate_arch(external_data.arches[0]) 135 | cache: apt.Cache 136 | with self._load_repo(root, dist, component, arch, src_pkg) as cache: 137 | if src_pkg: 138 | src_record = apt_pkg.SourceRecords() 139 | source_version, source_files = None, None 140 | while src_record.lookup(package_name): 141 | source_version, source_files = src_record.version, src_record.files 142 | if not source_version: 143 | raise ValueError(f"No source package {package_name}") 144 | assert source_files is not None 145 | 146 | source_file = next(f for f in source_files if f.type == "tar") 147 | 148 | src_url = urllib.parse.urljoin(root.rstrip("/") + "/", source_file.path) 149 | 150 | new_version = ExternalFile( 151 | url=src_url, 152 | checksum=read_deb_hashes(source_file.hashes), 153 | size=source_file.size, 154 | # Strip epoch if present 155 | version=re.sub(r"^\d+:", "", source_version), 156 | timestamp=await get_timestamp_from_url(src_url, self.session), 157 | ) 158 | else: 159 | package = cache[package_name] 160 | candidate = package.candidate 161 | assert candidate is not None 162 | assert candidate.uri is not None 163 | 164 | new_version = ExternalFile( 165 | url=candidate.uri, 166 | # FIXME: apt.package.Version.{md5,sha1,sha256} can raise an 167 | # exception if given hash isn't set, while sha512 isn't accessible 168 | # at all. Raw hashes are handy, but accessible only through 169 | # protected property. 170 | checksum=read_deb_hashes(candidate._records.hashes), 171 | size=candidate.size, 172 | version=candidate.version, 173 | timestamp=await self._get_timestamp_for_candidate(candidate), 174 | ) 175 | 176 | external_data.set_new_version(new_version) 177 | 178 | def _translate_arch(self, arch: str) -> str: 179 | # Because architecture names in Debian differ from Flatpak's 180 | arches = {"x86_64": "amd64", "arm": "armel", "aarch64": "arm64"} 181 | return arches.get(arch, arch) 182 | 183 | async def _get_timestamp_for_candidate(self, candidate: apt.Version): 184 | # TODO: fetch package, parse changelog, get the date from there. python-apt can 185 | # fetch changelogs from Debian and Ubuntu's changelog server, but most packages 186 | # this checker will be used for are not from these repos. We'd have to open-code 187 | # it. 188 | # https://salsa.debian.org/apt-team/python-apt/blob/master/apt/package.py#L1245-1417 189 | assert candidate.uri 190 | return await get_timestamp_from_url(candidate.uri, self.session) 191 | 192 | @contextlib.contextmanager 193 | def _load_repo( 194 | self, deb_root: str, dist: str, component: str, arch: str, source=False 195 | ) -> t.Generator[apt.Cache, None, None]: 196 | with tempfile.TemporaryDirectory() as root: 197 | LOG.debug("Setting up apt directory structure in %s", root) 198 | 199 | for path in APT_NEEDED_DIRS: 200 | os.makedirs(os.path.join(root, path), exist_ok=True) 201 | 202 | # Create sources.list 203 | sources_list = os.path.join(root, "etc/apt/sources.list") 204 | with open(sources_list, "w") as f: 205 | # FIXME: import GPG key, remove 'trusted=yes' which skips GPG 206 | # verification 207 | if source: 208 | f.write(f"deb-src [trusted=yes] {deb_root} {dist} {component}\n") 209 | else: 210 | f.write( 211 | f"deb [arch={arch} trusted=yes] {deb_root} {dist} {component}\n" 212 | ) 213 | 214 | # Create empty dpkg status 215 | dpkg_status = os.path.join(root, "var/lib/dpkg/status") 216 | with open(dpkg_status, "w") as f: 217 | pass 218 | 219 | # Setup generic configuration 220 | apt_pkg.init() 221 | apt_pkg.config.set("Dir", root) 222 | apt_pkg.config.set("Dir::State::status", dpkg_status) 223 | apt_pkg.config.set("Acquire::Languages", "none") 224 | progress = LoggerAcquireProgress(LOG) 225 | 226 | # Create a new cache with the appropriate architecture 227 | apt_pkg.config.set("APT::Architecture", arch) 228 | apt_pkg.config.set("APT::Architectures", arch) 229 | cache = apt.Cache() 230 | cache.update(progress) 231 | cache.open() 232 | 233 | yield cache 234 | 235 | cache.close() 236 | -------------------------------------------------------------------------------- /src/checkers/electronchecker.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import base64 3 | from datetime import datetime 4 | import typing as t 5 | 6 | from yarl import URL 7 | import ruamel.yaml 8 | 9 | from ..lib import NETWORK_ERRORS 10 | from ..lib.externaldata import ( 11 | ExternalBase, 12 | ExternalData, 13 | ExternalFile, 14 | ) 15 | from ..lib.errors import CheckerQueryError 16 | from ..lib.checksums import MultiDigest 17 | from . import Checker 18 | from .jsonchecker import parse_timestamp 19 | 20 | yaml = ruamel.yaml.YAML(typ="safe") 21 | log = logging.getLogger(__name__) 22 | 23 | 24 | class ElectronChecker(Checker): 25 | CHECKER_DATA_TYPE = "electron-updater" 26 | CHECKER_DATA_SCHEMA = { 27 | "type": "object", 28 | "properties": { 29 | "url": {"type": "string", "format": "uri"}, 30 | }, 31 | } 32 | 33 | @staticmethod 34 | def _read_digests(obj: t.Dict) -> MultiDigest: 35 | digests: t.Dict[str, str] = {} 36 | for _k in MultiDigest._fields: # pylint: disable=no-member 37 | if _k in obj: 38 | digests[_k] = base64.b64decode(obj[_k]).hex() 39 | return MultiDigest(**digests) 40 | 41 | async def check(self, external_data: ExternalBase): 42 | assert isinstance(external_data, ExternalData) 43 | 44 | if "url" in external_data.checker_data: 45 | metadata_url = URL(external_data.checker_data["url"]) 46 | else: 47 | metadata_url = URL(external_data.current_version.url).join( 48 | URL("latest-linux.yml") 49 | ) 50 | 51 | try: 52 | async with self.session.get(metadata_url) as resp: 53 | metadata = yaml.load(await resp.read()) 54 | except NETWORK_ERRORS as err: 55 | raise CheckerQueryError from err 56 | 57 | if "files" in metadata: 58 | # Modern metadata format 59 | m_file = metadata["files"][0] 60 | file_url = metadata_url.join(URL(m_file["url"])) 61 | file_size = int(m_file["size"]) 62 | checksum = self._read_digests(m_file) 63 | else: 64 | # Old electron-updater 1.x metadata format; no size property 65 | file_url = metadata_url.join(URL(metadata["path"])) 66 | file_size = None 67 | checksum = self._read_digests(metadata) 68 | 69 | timestamp: t.Optional[datetime] 70 | if isinstance(metadata["releaseDate"], datetime): 71 | timestamp = metadata["releaseDate"] 72 | else: 73 | timestamp = parse_timestamp(metadata["releaseDate"]) 74 | 75 | new_version = ExternalFile( 76 | url=str(file_url), 77 | checksum=checksum, 78 | size=file_size, 79 | version=metadata["version"], 80 | timestamp=timestamp, 81 | ) 82 | 83 | await self._set_new_version(external_data, new_version) 84 | -------------------------------------------------------------------------------- /src/checkers/gitchecker.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | import typing as t 4 | 5 | import semver 6 | 7 | from ..lib import OPERATORS_SCHEMA 8 | from ..lib.externaldata import ExternalBase, ExternalGitRepo, ExternalGitRef 9 | from ..lib.utils import git_ls_remote, filter_versioned_items, FallbackVersion 10 | from ..lib.errors import CheckerQueryError, CheckerFetchError 11 | from . import Checker 12 | 13 | log = logging.getLogger(__name__) 14 | 15 | REF_TAG_PREFIX = "refs/tags/" 16 | REF_TAG_LW_SUFFIX = "^{}" 17 | 18 | 19 | class TagWithVersion(t.NamedTuple): 20 | commit: str 21 | tag: str 22 | annotated: bool 23 | version: str 24 | 25 | @classmethod 26 | def parse_version(cls, version: str): 27 | return FallbackVersion(version) 28 | 29 | @property 30 | def parsed_version(self): 31 | return self.parse_version(self.version) 32 | 33 | def __lt__(self, other): 34 | if self.tag == other.tag: 35 | return self.annotated and not other.annotated 36 | return self.parsed_version < other.parsed_version 37 | 38 | def __le__(self, other): 39 | return self == other or self < other 40 | 41 | def __gt__(self, other): 42 | if self.tag == other.tag: 43 | return not self.annotated and other.annotated 44 | return self.parsed_version > other.parsed_version 45 | 46 | def __ge__(self, other): 47 | return self == other or self > other 48 | 49 | 50 | class TagWithSemver(TagWithVersion): 51 | @classmethod 52 | def parse_version(self, version: str): 53 | return semver.VersionInfo.parse(version) 54 | 55 | 56 | TAG_VERSION_SCHEMES = { 57 | "loose": TagWithVersion, 58 | "semantic": TagWithSemver, 59 | } 60 | 61 | 62 | class GitChecker(Checker): 63 | PRIORITY = 95 64 | CHECKER_DATA_TYPE = "git" 65 | CHECKER_DATA_SCHEMA = { 66 | "type": "object", 67 | "properties": { 68 | "tag-pattern": {"type": "string", "format": "regex"}, 69 | "versions": OPERATORS_SCHEMA, 70 | "version-scheme": { 71 | "type": "string", 72 | "enum": list(TAG_VERSION_SCHEMES), 73 | }, 74 | "sort-tags": {"type": "boolean"}, 75 | }, 76 | } 77 | SUPPORTED_DATA_CLASSES = [ExternalGitRepo] 78 | 79 | @classmethod 80 | def should_check(cls, external_data: ExternalBase): 81 | return isinstance(external_data, ExternalGitRepo) 82 | 83 | async def validate_checker_data(self, external_data: ExternalBase): 84 | if external_data.checker_data.get("type") == self.CHECKER_DATA_TYPE: 85 | return await super().validate_checker_data(external_data) 86 | return None 87 | 88 | async def check(self, external_data: ExternalBase): 89 | assert self.should_check(external_data) 90 | assert isinstance(external_data, ExternalGitRepo) 91 | if external_data.checker_data.get("type") == self.CHECKER_DATA_TYPE: 92 | return await self._check_has_new(external_data) 93 | return await self._check_still_valid(external_data) 94 | 95 | @classmethod 96 | async def _check_has_new(cls, external_data: ExternalGitRepo): 97 | tag_re = cls._get_pattern(external_data.checker_data, "tag-pattern", 1) 98 | if tag_re is None: 99 | tag_re = re.compile(r"^(?:[vV])?((?:\d+\.)+\d+)$") 100 | 101 | version_scheme = external_data.checker_data.get("version-scheme", "loose") 102 | tag_cls = TAG_VERSION_SCHEMES[version_scheme] 103 | sort_tags = external_data.checker_data.get("sort-tags", True) 104 | constraints = [ 105 | (o, tag_cls.parse_version(v)) 106 | for o, v in external_data.checker_data.get("versions", {}).items() 107 | ] 108 | 109 | matching_tags = [] 110 | refs = await git_ls_remote(external_data.current_version.url) 111 | for ref, commit in refs.items(): 112 | if not ref.startswith(REF_TAG_PREFIX): 113 | continue 114 | tag = ref[len(REF_TAG_PREFIX) :] 115 | if tag.endswith(REF_TAG_LW_SUFFIX): 116 | annotated = False 117 | tag = tag[: -len(REF_TAG_LW_SUFFIX)] 118 | else: 119 | annotated = True 120 | tag_match = tag_re.match(tag) 121 | if not tag_match: 122 | continue 123 | version = tag_match.group(1) 124 | matching_tags.append(tag_cls(commit, tag, annotated, version)) 125 | 126 | if constraints: 127 | sorted_tags = filter_versioned_items( 128 | matching_tags, 129 | constraints=constraints, 130 | to_version=lambda t: t.parsed_version, 131 | sort=sort_tags, 132 | ) 133 | elif sort_tags: 134 | sorted_tags = sorted(matching_tags) 135 | else: 136 | sorted_tags = matching_tags 137 | 138 | try: 139 | latest_tag = sorted_tags[-1] 140 | except IndexError as err: 141 | raise CheckerQueryError( 142 | f"{external_data.current_version.url} has no tags matching " 143 | f"'{tag_re.pattern}'" 144 | ) from err 145 | 146 | new_version = ExternalGitRef( 147 | url=external_data.current_version.url, 148 | commit=latest_tag.commit, 149 | tag=latest_tag.tag, 150 | branch=None, 151 | version=latest_tag.version, 152 | timestamp=None, 153 | ) 154 | external_data.set_new_version(new_version) 155 | 156 | @staticmethod 157 | async def _check_still_valid(external_data: ExternalGitRepo): 158 | if ( 159 | external_data.current_version.commit is not None 160 | and external_data.current_version.tag is None 161 | and external_data.current_version.branch is None 162 | ): 163 | log.info( 164 | "Skipping source %s, commit is specified, but neither tag nor branch", 165 | external_data.filename, 166 | ) 167 | return 168 | 169 | if external_data.current_version.commit is None: 170 | log.info( 171 | "Skipping source %s, not pinned to commit", 172 | external_data.filename, 173 | ) 174 | return 175 | 176 | try: 177 | remote_version = await external_data.current_version.fetch_remote() 178 | except CheckerFetchError: 179 | external_data.state |= external_data.State.BROKEN 180 | raise 181 | 182 | external_data.set_new_version(remote_version, is_update=False) 183 | -------------------------------------------------------------------------------- /src/checkers/gnomechecker.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | import typing as t 4 | 5 | from enum import StrEnum 6 | from yarl import URL 7 | 8 | from ..lib import OPERATORS_SCHEMA, NETWORK_ERRORS 9 | from ..lib.errors import CheckerQueryError 10 | from ..lib.externaldata import ExternalBase, ExternalFile 11 | from ..lib.checksums import MultiDigest 12 | from ..lib.utils import filter_versions 13 | from . import Checker 14 | 15 | log = logging.getLogger(__name__) 16 | 17 | GNOME_MIRROR = URL("https://download.gnome.org/") 18 | 19 | 20 | class VersionScheme(StrEnum): 21 | DEFAULT = "default" 22 | ODD_MINOR_IS_UNSTABLE = "odd-minor-is-unstable" 23 | 24 | 25 | def _parse_checksums(text: str) -> t.Dict[str, str]: 26 | result = {} 27 | for line in text.splitlines(): 28 | digest, filename = line.strip().split(maxsplit=1) 29 | result[filename] = digest 30 | return result 31 | 32 | 33 | # Checkes whether any component of the version contains either alpha, beta or 34 | # rc. 35 | def _contains_keyword(version: str) -> bool: 36 | return any(kw in version for kw in ["alpha", "beta", "rc"]) 37 | 38 | 39 | def _is_stable( 40 | version: str, 41 | scheme: VersionScheme = VersionScheme.DEFAULT, 42 | ) -> bool: 43 | ver_list = version.split(".") 44 | if len(ver_list) < 2: 45 | # Single number, e.g. "41" 46 | return True 47 | major, minor = ver_list[:2] 48 | if scheme == VersionScheme.DEFAULT: 49 | if any(_contains_keyword(x) for x in ver_list[1:]): 50 | return False 51 | elif scheme == VersionScheme.ODD_MINOR_IS_UNSTABLE: 52 | if len(ver_list) >= 2: 53 | return (int(minor) % 2) == 0 54 | 55 | # XXX If we didn't see any indication that the version is a prerelease, 56 | # assume it's a normal (stable) release 57 | return True 58 | 59 | 60 | class GNOMEChecker(Checker): 61 | CHECKER_DATA_TYPE = "gnome" 62 | CHECKER_DATA_SCHEMA = { 63 | "type": "object", 64 | "properties": { 65 | "name": {"type": "string"}, 66 | "stable-only": {"type": "boolean"}, 67 | "version-scheme": { 68 | "type": "string", 69 | "enum": [ 70 | VersionScheme.DEFAULT, 71 | VersionScheme.ODD_MINOR_IS_UNSTABLE, 72 | ], 73 | }, 74 | "versions": OPERATORS_SCHEMA, 75 | }, 76 | "required": ["name"], 77 | } 78 | 79 | async def check(self, external_data: ExternalBase): 80 | project_name = external_data.checker_data["name"] 81 | stable_only = external_data.checker_data.get("stable-only", True) 82 | constraints = external_data.checker_data.get("versions", {}).items() 83 | version_scheme = external_data.checker_data.get( 84 | "version-scheme", VersionScheme.DEFAULT 85 | ) 86 | assert isinstance(project_name, str) 87 | 88 | proj_url = GNOME_MIRROR / "sources" / project_name 89 | try: 90 | async with self.session.get(proj_url / "cache.json") as cache_resp: 91 | # Some mirrors may sand invalid content-type; don't require it to be 92 | # application/json 93 | cache_json = await cache_resp.json(content_type=None) 94 | except NETWORK_ERRORS as err: 95 | raise CheckerQueryError from err 96 | _, downloads, versions, _ = cache_json 97 | 98 | filtered_versions = versions[project_name] 99 | if constraints: 100 | filtered_versions = filter_versions(filtered_versions, constraints) 101 | 102 | def _is_stable_wrapper(version): 103 | return _is_stable(version, version_scheme) 104 | 105 | try: 106 | if stable_only: 107 | try: 108 | latest_version = list( 109 | filter(_is_stable_wrapper, filtered_versions) 110 | )[-1] 111 | except IndexError: 112 | latest_version = filtered_versions[-1] 113 | log.warning( 114 | "Couldn't find any stable version for %s, selecting latest %s", 115 | project_name, 116 | latest_version, 117 | ) 118 | else: 119 | latest_version = filtered_versions[-1] 120 | except IndexError as e: 121 | raise CheckerQueryError("No matching versions") from e 122 | 123 | proj_files = downloads[project_name][latest_version] 124 | 125 | tarball_type, tarball_path = next( 126 | (prop, proj_files[prop]) 127 | for prop in ["tar.xz", "tar.bz2", "tar.gz"] 128 | if prop in proj_files 129 | ) 130 | 131 | checksum_path = proj_files.get( 132 | "sha256sum", 133 | re.sub(f"\\.{tarball_type}$", ".sha256sum", tarball_path), 134 | ) 135 | 136 | async with self.session.get(proj_url / checksum_path) as cs_resp: 137 | checksums = _parse_checksums(await cs_resp.text()) 138 | checksum = checksums[tarball_path.split("/")[-1]] 139 | 140 | new_version = ExternalFile( 141 | url=str(proj_url / tarball_path), 142 | checksum=MultiDigest(sha256=checksum), 143 | size=None, 144 | version=latest_version, 145 | timestamp=None, 146 | ) 147 | 148 | external_data.set_new_version(new_version) 149 | -------------------------------------------------------------------------------- /src/checkers/htmlchecker.py: -------------------------------------------------------------------------------- 1 | # HTML Checker: A checker to see if the url is pointing to the latest HTML Player. 2 | # 3 | # Consult the README for information on how to use this checker. 4 | # 5 | # Copyright © 2019 Bastien Nocera 6 | # 7 | # This program is free software; you can redistribute it and/or modify 8 | # it under the terms of the GNU General Public License as published by 9 | # the Free Software Foundation; either version 2 of the License, or 10 | # (at your option) any later version. 11 | # 12 | # This program is distributed in the hope that it will be useful, 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 | # GNU General Public License for more details. 16 | # 17 | # You should have received a copy of the GNU General Public License along 18 | # with this program; if not, write to the Free Software Foundation, Inc., 19 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 20 | 21 | import logging 22 | import re 23 | import urllib.parse 24 | import io 25 | import codecs 26 | import typing as t 27 | 28 | import aiohttp 29 | from yarl import URL 30 | import semver 31 | 32 | from ..lib import NETWORK_ERRORS, OPERATORS_SCHEMA 33 | from ..lib.externaldata import ExternalBase, ExternalData 34 | from ..lib.errors import CheckerQueryError, CheckerFetchError 35 | from . import Checker 36 | from ..lib.utils import filter_versioned_items, FallbackVersion 37 | 38 | log = logging.getLogger(__name__) 39 | 40 | 41 | def _semantic_version(version: str) -> semver.VersionInfo: 42 | try: 43 | return semver.VersionInfo.parse(version) 44 | except ValueError as err: 45 | raise CheckerQueryError("Can't parse version") from err 46 | 47 | 48 | _VERSION_SCHEMES = { 49 | "loose": FallbackVersion, 50 | "semantic": _semantic_version, 51 | } 52 | 53 | 54 | class HTMLChecker(Checker): 55 | CHECKER_DATA_TYPE = "html" 56 | CHECKER_DATA_SCHEMA = { 57 | "type": "object", 58 | "properties": { 59 | "url": {"type": "string", "format": "uri"}, 60 | "pattern": {"type": "string", "format": "regex"}, 61 | "version-pattern": {"type": "string", "format": "regex"}, 62 | "url-template": {"type": "string", "format": "regex"}, 63 | "sort-matches": {"type": "boolean"}, 64 | "versions": OPERATORS_SCHEMA, 65 | "version-scheme": { 66 | "type": "string", 67 | "enum": list(_VERSION_SCHEMES), 68 | }, 69 | }, 70 | "allOf": [ 71 | {"required": ["url"]}, 72 | { 73 | "if": {"required": ["version-pattern"]}, 74 | "then": {"required": ["url-template"]}, 75 | "else": {"required": ["pattern"]}, 76 | }, 77 | ], 78 | } 79 | 80 | @staticmethod 81 | async def _get_encoding(response: aiohttp.ClientResponse) -> str: 82 | # Loosely based on aiohttp.ClientResponse.get_encoding, but 83 | # avoids expensive charset detection via chardet.detect() call; 84 | # if we didn't get a proper charset right away, 85 | # we're most likely facing a HTTP response that isn't textual 86 | ctype = response.headers.get(aiohttp.hdrs.CONTENT_TYPE, "").lower() 87 | mimetype = aiohttp.helpers.parse_mimetype(ctype) 88 | encoding = mimetype.parameters.get("charset") 89 | if encoding: 90 | try: 91 | codecs.lookup(encoding) 92 | except LookupError as err: 93 | raise CheckerFetchError( 94 | f'Unknown encoding "{encoding}" from {response.url}' 95 | ) from err 96 | else: 97 | encoding = "utf-8" 98 | return encoding 99 | 100 | async def _get_text(self, url: t.Union[URL, str]) -> str: 101 | try: 102 | async with self.session.get(url) as response: 103 | encoding = await self._get_encoding(response) 104 | # We use streaming decoding in order to get decode error and abort the 105 | # check as early as possible, without preloading the whole raw contents 106 | # into memory 107 | decoder_cls = codecs.getincrementaldecoder(encoding) 108 | decoder = decoder_cls(errors="strict") 109 | with io.StringIO() as buf: 110 | async for chunk, _ in response.content.iter_chunks(): 111 | try: 112 | buf.write(decoder.decode(chunk)) 113 | except UnicodeDecodeError as err: 114 | raise CheckerQueryError from err 115 | return buf.getvalue() 116 | except NETWORK_ERRORS as err: 117 | raise CheckerQueryError from err 118 | 119 | async def check(self, external_data: ExternalBase): 120 | assert self.should_check(external_data) 121 | assert isinstance(external_data, ExternalData) 122 | 123 | url_tmpl = external_data.checker_data["url"] 124 | 125 | if external_data.parent: 126 | assert isinstance(external_data.parent, ExternalBase) 127 | parent_state = ( 128 | external_data.parent.new_version or external_data.parent.current_version 129 | ) 130 | parent_json = parent_state.json 131 | if parent_state.version: 132 | parent_json |= self._version_parts(parent_state.version) 133 | else: 134 | parent_json = {} 135 | 136 | url = self._substitute_template( 137 | url_tmpl, 138 | {f"parent_{k}": v for k, v in parent_json.items() if v is not None}, 139 | ) 140 | 141 | combo_pattern = self._get_pattern(external_data.checker_data, "pattern", 2) 142 | version_pattern = self._get_pattern( 143 | external_data.checker_data, "version-pattern", 1 144 | ) 145 | url_template = external_data.checker_data.get("url-template") 146 | sort_matches = external_data.checker_data.get("sort-matches", True) 147 | version_cls = _VERSION_SCHEMES[ 148 | external_data.checker_data.get("version-scheme", "loose") 149 | ] 150 | constraints = [ 151 | (o, version_cls(v)) 152 | for o, v in external_data.checker_data.get("versions", {}).items() 153 | ] 154 | assert combo_pattern or (version_pattern and url_template) 155 | 156 | html = await self._get_text(url) 157 | 158 | def _get_latest(pattern: re.Pattern, ver_group: int) -> re.Match: 159 | matches = filter_versioned_items( 160 | items=pattern.finditer(html), 161 | constraints=constraints, 162 | to_version=lambda m: version_cls(m.group(ver_group)), 163 | sort=sort_matches, 164 | ) 165 | if not matches: 166 | raise CheckerQueryError( 167 | f"Pattern '{pattern.pattern}' didn't match anything" 168 | ) 169 | 170 | try: 171 | # NOTE Returning last match when sort is requested and first match 172 | # otherwise doesn't seem sensible, but we need to retain backward 173 | # compatibility 174 | result = matches[-1 if sort_matches else 0] 175 | except IndexError as err: 176 | raise CheckerQueryError( 177 | f"Pattern '{pattern.pattern}' didn't match anything" 178 | ) from err 179 | 180 | log.debug("%s matched %s", pattern.pattern, result) 181 | return result 182 | 183 | if combo_pattern: 184 | latest_url, latest_version = _get_latest(combo_pattern, 2).group(1, 2) 185 | else: 186 | assert version_pattern and url_template 187 | latest_version = _get_latest(version_pattern, 1).group(1) 188 | latest_url = self._substitute_template( 189 | url_template, self._version_parts(latest_version) 190 | ) 191 | 192 | abs_url = urllib.parse.urljoin(base=url, url=latest_url) 193 | 194 | await self._update_version(external_data, latest_version, abs_url) 195 | -------------------------------------------------------------------------------- /src/checkers/jetbrainschecker.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import logging 3 | 4 | from ..lib.externaldata import ExternalBase, ExternalFile 5 | from ..lib.checksums import MultiDigest 6 | from . import Checker 7 | 8 | log = logging.getLogger(__name__) 9 | 10 | _JB_ARCH_MAP = { 11 | "x86_64": "linux", 12 | "aarch64": "linuxARM64", 13 | } 14 | 15 | 16 | class JetBrainsChecker(Checker): 17 | CHECKER_DATA_TYPE = "jetbrains" 18 | CHECKER_DATA_SCHEMA = { 19 | "type": "object", 20 | "properties": { 21 | "code": {"type": "string"}, 22 | # TODO: add enum here 23 | "release-type": {"type": "string"}, 24 | }, 25 | "required": ["code"], 26 | } 27 | 28 | async def check(self, external_data: ExternalBase): 29 | assert self.should_check(external_data) 30 | 31 | code = external_data.checker_data["code"] 32 | release_type = external_data.checker_data.get("release-type", "release") 33 | 34 | url = "https://data.services.jetbrains.com/products/releases" 35 | query = {"code": code, "latest": "true", "type": release_type} 36 | 37 | async with self.session.get(url, params=query) as response: 38 | result = await response.json() 39 | data = result[code][0] 40 | 41 | release = data["downloads"][_JB_ARCH_MAP[external_data.arches[0]]] 42 | 43 | async with self.session.get(release["checksumLink"]) as response: 44 | result = await response.text() 45 | checksum = result.split(" ")[0] 46 | 47 | new_version = ExternalFile( 48 | url=release["link"], 49 | checksum=MultiDigest(sha256=checksum), 50 | size=release["size"], 51 | version=data["version"], 52 | timestamp=datetime.datetime.strptime(data["date"], "%Y-%m-%d"), 53 | ) 54 | 55 | external_data.set_new_version(new_version) 56 | -------------------------------------------------------------------------------- /src/checkers/jsonchecker.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import json 3 | import re 4 | from datetime import datetime 5 | import typing as t 6 | import subprocess 7 | import os 8 | 9 | from yarl import URL 10 | 11 | from ..lib import utils 12 | from ..lib.externaldata import ( 13 | ExternalBase, 14 | ExternalData, 15 | ExternalGitRepo, 16 | ExternalGitRef, 17 | ) 18 | from ..lib.errors import CheckerQueryError 19 | from . import Checker, JSONType 20 | 21 | log = logging.getLogger(__name__) 22 | 23 | 24 | async def _jq(query: str, data: JSONType, variables: t.Dict[str, JSONType]) -> str: 25 | var_args = [] 26 | for var_name, var_value in variables.items(): 27 | var_args += ["--argjson", var_name, json.dumps(var_value)] 28 | 29 | jq_cmd = ["jq"] + var_args + ["-e", query] 30 | try: 31 | jq_stdout, _ = await utils.Command(jq_cmd).run(json.dumps(data).encode()) 32 | except subprocess.CalledProcessError as err: 33 | raise CheckerQueryError("Error running jq") from err 34 | 35 | try: 36 | result = json.loads(jq_stdout) 37 | except json.JSONDecodeError as err: 38 | raise CheckerQueryError("Error reading jq output") from err 39 | 40 | if isinstance(result, (str, int, float)): 41 | return str(result) 42 | 43 | raise CheckerQueryError(f"Invalid jq output type {type(result)}") 44 | 45 | 46 | def parse_timestamp(date_string: t.Optional[str]) -> t.Optional[datetime]: 47 | if date_string is None: 48 | return None 49 | try: 50 | return datetime.fromisoformat(re.sub(r"Z$", "+00:00", date_string)) 51 | except ValueError as err: 52 | raise CheckerQueryError("Failed to parse timestamp") from err 53 | 54 | 55 | class _Query(t.NamedTuple): 56 | name: str 57 | value_expr: str 58 | url_expr: t.Optional[str] 59 | 60 | 61 | class JSONChecker(Checker): 62 | CHECKER_DATA_TYPE = "json" 63 | CHECKER_DATA_SCHEMA = { 64 | "type": "object", 65 | "properties": { 66 | "url": {"type": "string", "format": "uri"}, 67 | "tag-query": {"type": "string"}, 68 | "tag-data-url": {"type": "string"}, 69 | "commit-query": {"type": "string"}, 70 | "commit-data-url": {"type": "string"}, 71 | "version-query": {"type": "string"}, 72 | "version-data-url": {"type": "string"}, 73 | "url-query": {"type": "string"}, 74 | "url-data-url": {"type": "string"}, 75 | "timestamp-query": {"type": "string"}, 76 | "timestamp-data-url": {"type": "string"}, 77 | }, 78 | } 79 | SUPPORTED_DATA_CLASSES = [ExternalData, ExternalGitRepo] 80 | 81 | @classmethod 82 | def get_json_schema(cls, data_class: t.Type[ExternalBase]): 83 | schema = super().get_json_schema(data_class).copy() 84 | if issubclass(data_class, ExternalGitRepo): 85 | schema["anyOf"] = schema.get("anyOf", []) + [ 86 | {"required": ["tag-query"]}, 87 | {"required": ["commit-query"]}, 88 | ] 89 | else: 90 | schema["required"] = schema.get("required", []) + [ 91 | "version-query", 92 | "url-query", 93 | ] 94 | return schema 95 | 96 | async def _get_json( 97 | self, 98 | url: t.Union[str, URL], 99 | headers: t.Optional[t.Dict[str, str]] = None, 100 | ) -> JSONType: 101 | url = URL(url) 102 | 103 | headers = headers.copy() if headers else {} 104 | if url.host == "api.github.com": 105 | github_token = os.environ.get("GITHUB_TOKEN") 106 | if github_token: 107 | headers["Authorization"] = f"token {github_token}" 108 | 109 | return await super()._get_json(url, headers) 110 | 111 | async def _query_sequence( 112 | self, 113 | queries: t.Iterable[_Query], 114 | json_vars: t.Dict[str, JSONType], 115 | init_json_data: JSONType = None, 116 | ) -> t.Dict[str, str]: 117 | results: t.Dict[str, str] = {} 118 | for query in queries: 119 | _vars = json_vars | results 120 | if query.url_expr: 121 | url = await _jq(query.url_expr, init_json_data, _vars) 122 | json_data = await self._get_json(url) 123 | else: 124 | json_data = init_json_data 125 | results[query.name] = await _jq(query.value_expr, json_data, _vars) 126 | return results 127 | 128 | @staticmethod 129 | def _read_q_seq( 130 | checker_data: t.Mapping, 131 | sequence: t.List[str], 132 | ) -> t.Iterable[_Query]: 133 | for query_name in sequence: 134 | q_prop = f"{query_name}-query" 135 | if q_prop not in checker_data: 136 | continue 137 | url_prop = f"{query_name}-data-url" 138 | yield _Query( 139 | name=query_name, 140 | value_expr=checker_data[q_prop], 141 | url_expr=checker_data.get(url_prop), 142 | ) 143 | 144 | async def check(self, external_data: ExternalBase): 145 | assert self.should_check(external_data) 146 | 147 | json_url = external_data.checker_data.get("url") 148 | json_data = await self._get_json(json_url) if json_url else None 149 | 150 | json_vars: t.Dict[str, JSONType] = {} 151 | 152 | if external_data.parent: 153 | assert isinstance(external_data.parent, ExternalBase) 154 | # XXX This seemingly redundant extra variable is needed to make Mypy happy 155 | parent_data: t.Dict[str, t.Optional[t.Dict[str, JSONType]]] 156 | parent_data = { 157 | "current": external_data.parent.current_version.json, 158 | "new": None, 159 | } 160 | if external_data.parent.new_version: 161 | parent_data["new"] = external_data.parent.new_version.json 162 | json_vars["parent"] = parent_data 163 | 164 | if isinstance(external_data, ExternalGitRepo): 165 | return await self._check_git(json_data, json_vars, external_data) 166 | else: 167 | assert isinstance(external_data, ExternalData) 168 | return await self._check_data(json_data, json_vars, external_data) 169 | 170 | async def _check_data( 171 | self, 172 | json_data: JSONType, 173 | json_vars: t.Dict[str, JSONType], 174 | external_data: ExternalData, 175 | ): 176 | checker_data = external_data.checker_data 177 | results = await self._query_sequence( 178 | self._read_q_seq( 179 | checker_data, ["tag", "commit", "version", "url", "timestamp"] 180 | ), 181 | json_vars, 182 | json_data, 183 | ) 184 | latest_version = results["version"] 185 | latest_url = results["url"] 186 | latest_timestamp = parse_timestamp(results.get("timestamp")) 187 | 188 | await self._update_version( 189 | external_data, 190 | latest_version, 191 | latest_url, 192 | follow_redirects=False, 193 | timestamp=latest_timestamp, 194 | ) 195 | 196 | async def _check_git( 197 | self, 198 | json_data: JSONType, 199 | json_vars: t.Dict[str, JSONType], 200 | external_data: ExternalGitRepo, 201 | ): 202 | checker_data = external_data.checker_data 203 | results = await self._query_sequence( 204 | self._read_q_seq(checker_data, ["tag", "commit", "version", "timestamp"]), 205 | json_vars, 206 | json_data, 207 | ) 208 | new_version = ExternalGitRef( 209 | url=external_data.current_version.url, 210 | commit=results.get("commit"), 211 | tag=results.get("tag"), 212 | branch=None, 213 | version=results.get("version"), 214 | timestamp=parse_timestamp(results.get("timestamp")), 215 | ) 216 | 217 | if new_version.commit is None: 218 | new_version = await new_version.fetch_remote() 219 | 220 | external_data.set_new_version(new_version) 221 | -------------------------------------------------------------------------------- /src/checkers/pypichecker.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from datetime import datetime 3 | import re 4 | import typing as t 5 | 6 | from packaging.version import Version as PackagingVersion 7 | 8 | from ..lib import OPERATORS_SCHEMA 9 | from ..lib.externaldata import ExternalFile, ExternalBase 10 | from ..lib.checksums import MultiDigest 11 | from ..lib.utils import filter_versioned_items 12 | from ..lib.errors import CheckerQueryError 13 | from . import Checker 14 | 15 | log = logging.getLogger(__name__) 16 | 17 | PYPI_INDEX = "https://pypi.org/pypi" 18 | BDIST_RE = re.compile(r"^(\S+)-(\d[\d\.\w]*\d)-(\S+)-(\S+)-(\S+).whl$") 19 | 20 | 21 | # We want to keep the original version string on parsed version object 22 | class Version(PackagingVersion): 23 | def __init__(self, version: str): 24 | super().__init__(version) 25 | self.orig_str = version 26 | 27 | 28 | def _filter_downloads( 29 | pypy_releases: t.Dict[str, t.List[t.Dict]], 30 | constraints: t.List[t.Tuple[str, Version]], 31 | packagetype: str, 32 | stable_only: bool = False, 33 | ) -> t.Generator[t.Tuple[Version, t.Dict, datetime], None, None]: 34 | releases = filter_versioned_items( 35 | ((Version(v), d) for v, d in pypy_releases.items()), 36 | constraints, 37 | to_version=lambda r: r[0], 38 | sort=True, 39 | ) 40 | for pypi_version, pypi_downloads in releases: 41 | if stable_only and pypi_version.pre: 42 | continue 43 | for download in pypi_downloads: 44 | if download["packagetype"] != packagetype: 45 | continue 46 | if download["python_version"] not in ["source", "py3", "py2.py3"]: 47 | continue 48 | if download["packagetype"] == "bdist_wheel": 49 | # Make sure we get only noarch wheels 50 | if not download["filename"].endswith("-any.whl"): 51 | continue 52 | date = datetime.fromisoformat(download["upload_time_iso_8601"].rstrip("Z")) 53 | yield (pypi_version, download, date) 54 | 55 | 56 | class PyPIChecker(Checker): 57 | CHECKER_DATA_TYPE = "pypi" 58 | CHECKER_DATA_SCHEMA = { 59 | "type": "object", 60 | "properties": { 61 | "name": {"type": "string"}, 62 | "packagetype": {"type": "string", "enum": ["sdist", "bdist_wheel"]}, 63 | "versions": OPERATORS_SCHEMA, 64 | "stable-only": {"type": "boolean"}, 65 | }, 66 | "required": ["name"], 67 | } 68 | 69 | async def check(self, external_data: ExternalBase): 70 | package_name = external_data.checker_data["name"] 71 | package_type = external_data.checker_data.get("packagetype", "sdist") 72 | constraints = [ 73 | (o, Version(v)) 74 | for o, v in external_data.checker_data.get("versions", {}).items() 75 | ] 76 | stable_only = external_data.checker_data.get("stable-only", True) 77 | 78 | async with self.session.get(f"{PYPI_INDEX}/{package_name}/json") as response: 79 | pypi_data = await response.json() 80 | 81 | if constraints: 82 | releases = pypi_data["releases"] 83 | else: 84 | releases = {pypi_data["info"]["version"]: pypi_data["urls"]} 85 | 86 | downloads = list( 87 | _filter_downloads(releases, constraints, package_type, stable_only) 88 | ) 89 | 90 | try: 91 | pypi_version, pypi_download, pypi_date = downloads[-1] 92 | except IndexError as err: 93 | raise CheckerQueryError( 94 | f"Couldn't find {package_type} for package {package_name}" 95 | ) from err 96 | 97 | checksum = MultiDigest.from_source(pypi_download["digests"]) 98 | 99 | new_version = ExternalFile( 100 | url=pypi_download["url"], 101 | checksum=checksum, 102 | size=pypi_download["size"], 103 | version=pypi_version.orig_str, 104 | timestamp=pypi_date, 105 | ) 106 | external_data.set_new_version(new_version) 107 | -------------------------------------------------------------------------------- /src/checkers/rpmrepochecker.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | import logging 3 | 4 | from yarl import URL 5 | 6 | from ..lib.externaldata import ExternalBase, ExternalFile 7 | from ..lib.checksums import MultiDigest 8 | from ..lib.utils import FallbackVersion 9 | from . import Checker, XMLElement 10 | 11 | 12 | log = logging.getLogger(__name__) 13 | 14 | 15 | class RPMRepoChecker(Checker): 16 | CHECKER_DATA_TYPE = "rpm-repo" 17 | CHECKER_DATA_SCHEMA = { 18 | "type": "object", 19 | "properties": { 20 | "root": {"type": "string", "format": "uri"}, 21 | "package-name": {"type": "string"}, 22 | }, 23 | "required": ["root", "package-name"], 24 | } 25 | _XMLNS = { 26 | "": "http://linux.duke.edu/metadata/common", 27 | "repo": "http://linux.duke.edu/metadata/repo", 28 | "rpm": "http://linux.duke.edu/metadata/rpm", 29 | } 30 | 31 | @classmethod 32 | def _file_from_xml(cls, rpm: XMLElement, repo_root: URL): 33 | def child_prop(child: str, prop: str): 34 | child_el = rpm.find(child, cls._XMLNS) 35 | assert child_el is not None, child 36 | value = child_el.get(prop) 37 | assert value is not None, prop 38 | return value 39 | 40 | digests = {} 41 | for cs_elem in rpm.findall("checksum", cls._XMLNS): 42 | cs_elem_type = cs_elem.get("type") 43 | if cs_elem_type: 44 | digests[cs_elem_type] = cs_elem.text 45 | 46 | return ExternalFile( 47 | url=str(repo_root.join(URL(child_prop("location", "href")))), 48 | checksum=MultiDigest.from_source(digests), 49 | size=int(child_prop("size", "archive")), 50 | version=child_prop("version", "ver"), 51 | timestamp=datetime.utcfromtimestamp(int(child_prop("time", "file"))), 52 | ) 53 | 54 | async def check(self, external_data: ExternalBase): 55 | assert self.should_check(external_data) 56 | 57 | repo_root = URL(external_data.checker_data["root"].rstrip("/") + "/") 58 | package_name = external_data.checker_data["package-name"] 59 | package_arch = external_data.arches[0] 60 | 61 | repomd_xml_url = repo_root.join(URL("repodata/repomd.xml")) 62 | repomd_xml = await self._get_xml(repomd_xml_url) 63 | 64 | primary_location_el = repomd_xml.find( 65 | 'repo:data[@type="primary"]/repo:location', 66 | namespaces=self._XMLNS, 67 | ) 68 | assert primary_location_el is not None 69 | primary_location_href = primary_location_el.get("href") 70 | assert primary_location_href is not None 71 | 72 | primary_xml_url = repo_root.join(URL(primary_location_href)) 73 | primary_xml = await self._get_xml(primary_xml_url) 74 | 75 | log.debug("Looking up package %s arch %s", package_name, package_arch) 76 | external_files = [] 77 | for package_el in primary_xml.findall( 78 | f'package[name="{package_name}"][arch="{package_arch}"]', 79 | namespaces=self._XMLNS, 80 | ): 81 | external_files.append(self._file_from_xml(package_el, repo_root)) 82 | 83 | new_version = max(external_files, key=lambda e: FallbackVersion(e.version)) 84 | 85 | external_data.set_new_version(new_version) 86 | -------------------------------------------------------------------------------- /src/checkers/rustchecker.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import logging 3 | import re 4 | 5 | import toml 6 | 7 | from ..lib.externaldata import ExternalBase, ExternalFile 8 | from ..lib.checksums import MultiDigest 9 | from . import Checker 10 | 11 | log = logging.getLogger(__name__) 12 | 13 | 14 | VERSION_RE = re.compile(r"^(\S+)\s+\((\S+)\s+(\S+)\)") 15 | 16 | 17 | class RustChecker(Checker): 18 | CHECKER_DATA_TYPE = "rust" 19 | CHECKER_DATA_SCHEMA = { 20 | "type": "object", 21 | "properties": { 22 | "channel": {"type": "string", "enum": ["stable", "beta", "nightly"]}, 23 | "package": {"type": "string"}, 24 | "target": {"type": "string"}, 25 | }, 26 | "required": ["package", "target"], 27 | } 28 | 29 | async def check(self, external_data: ExternalBase): 30 | assert self.should_check(external_data) 31 | 32 | channel = external_data.checker_data.get("channel", "stable") 33 | package_name = external_data.checker_data["package"] 34 | target_name = external_data.checker_data["target"] 35 | 36 | url = f"https://static.rust-lang.org/dist/channel-rust-{channel}.toml" 37 | 38 | async with self.session.get(url) as response: 39 | data = toml.loads(await response.text()) 40 | 41 | package = data["pkg"][package_name] 42 | target = package["target"][target_name] 43 | 44 | release_date = datetime.datetime.fromisoformat(data["date"]) 45 | version_match = VERSION_RE.match(package["version"]) 46 | assert version_match 47 | version, _, _ = version_match.groups() 48 | if channel == "nightly": 49 | appstream_version = "{0}-{1:%Y%m%d}".format(version, release_date) 50 | else: 51 | appstream_version = version 52 | 53 | if target["available"]: 54 | new_version = ExternalFile( 55 | url=target["xz_url"], 56 | checksum=MultiDigest(sha256=target["xz_hash"]), 57 | size=None, 58 | version=appstream_version, 59 | timestamp=release_date, 60 | ) 61 | external_data.set_new_version(new_version) 62 | -------------------------------------------------------------------------------- /src/checkers/snapcraftchecker.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import logging 3 | import hashlib 4 | 5 | from ..lib.externaldata import ExternalBase, ExternalFile 6 | from ..lib.checksums import MultiHash 7 | from . import Checker 8 | 9 | log = logging.getLogger(__name__) 10 | 11 | 12 | class SnapcraftChecker(Checker): 13 | CHECKER_DATA_TYPE = "snapcraft" 14 | CHECKER_DATA_SCHEMA = { 15 | "type": "object", 16 | "properties": { 17 | "name": {"type": "string"}, 18 | "channel": {"type": "string"}, 19 | }, 20 | "required": ["name", "channel"], 21 | } 22 | 23 | _arches = {"x86_64": "amd64", "aarch64": "arm64", "arm": "armhf", "i386": "i386"} 24 | _BLOCK_SIZE = 65536 25 | 26 | async def _get_digests(self, url: str, sha3_384: str): 27 | assert self.session is not None 28 | multihash = MultiHash() 29 | sha3 = hashlib.sha3_384() 30 | 31 | async with self.session.get(url) as response: 32 | async for data in response.content.iter_chunked(self._BLOCK_SIZE): 33 | multihash.update(data) 34 | sha3.update(data) 35 | 36 | if sha3.hexdigest() == sha3_384: 37 | return multihash.hexdigest() 38 | 39 | async def check(self, external_data: ExternalBase): 40 | assert self.should_check(external_data) 41 | 42 | name = external_data.checker_data["name"] 43 | channel = external_data.checker_data["channel"] 44 | 45 | url = f"http://api.snapcraft.io/v2/snaps/info/{name}" 46 | header = {"Snap-Device-Series": "16"} 47 | 48 | async with self.session.get(url, headers=header) as response: 49 | js = await response.json() 50 | 51 | data = [ 52 | x 53 | for x in js["channel-map"] 54 | if x["channel"]["architecture"] == self._arches[external_data.arches[0]] 55 | and x["channel"]["name"] == channel 56 | ][0] 57 | 58 | if external_data.current_version.url != data["download"]["url"]: 59 | log.debug("Downloading file from %s; may take a while", url) 60 | multidigest = await self._get_digests( 61 | data["download"]["url"], data["download"]["sha3-384"] 62 | ) 63 | 64 | if multidigest: 65 | new_version = ExternalFile( 66 | url=data["download"]["url"], 67 | checksum=multidigest, 68 | size=data["download"]["size"], 69 | version=data["version"], 70 | timestamp=datetime.datetime.strptime( 71 | data["channel"]["released-at"], "%Y-%m-%dT%H:%M:%S.%f%z" 72 | ), 73 | ) 74 | 75 | external_data.set_new_version(new_version) 76 | -------------------------------------------------------------------------------- /src/checkers/urlchecker.py: -------------------------------------------------------------------------------- 1 | # URL Checker: verifies if an external data URL is still accessible. Does not need an 2 | # x-checker-data entry and works with all external data types that have a URL. However, 3 | # if you're dealing with a generic link that redirects to a versioned archive that 4 | # changes, e.g.: 5 | # 6 | # http://example.com/last-version -> http://example.com/prog_1.2.3.gz 7 | # 8 | # Then you can specify some some metadata in the manifest file to tell the checker where 9 | # to look: 10 | # 11 | # "x-checker-data": { 12 | # "type": "rotating-url", 13 | # "url": "http://example.com/last-version" 14 | # } 15 | # 16 | # Copyright © 2018-2019 Endless Mobile, Inc. 17 | # 18 | # Authors: 19 | # Joaquim Rocha 20 | # Will Thompson 21 | # 22 | # This program is free software; you can redistribute it and/or modify 23 | # it under the terms of the GNU General Public License as published by 24 | # the Free Software Foundation; either version 2 of the License, or 25 | # (at your option) any later version. 26 | # 27 | # This program is distributed in the hope that it will be useful, 28 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 29 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 30 | # GNU General Public License for more details. 31 | # 32 | # You should have received a copy of the GNU General Public License along 33 | # with this program; if not, write to the Free Software Foundation, Inc., 34 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 35 | import logging 36 | import re 37 | import tempfile 38 | 39 | from ..lib.externaldata import ExternalBase, ExternalData 40 | from ..lib import utils, NETWORK_ERRORS, HTTP_CLIENT_HEADERS 41 | from ..lib.errors import CheckerFetchError 42 | from . import Checker 43 | 44 | log = logging.getLogger(__name__) 45 | 46 | 47 | def extract_version(checker_data, url): 48 | """ 49 | If checker_data contains a "pattern", matches 'url' against it and returns the 50 | first capture group (which is assumed to be the version number). 51 | """ 52 | try: 53 | pattern = checker_data["pattern"] 54 | except KeyError: 55 | return None 56 | 57 | m = re.match(pattern, url) 58 | if m is None: 59 | return None 60 | 61 | return m.group(1) 62 | 63 | 64 | class URLChecker(Checker): 65 | PRIORITY = 99 66 | CHECKER_DATA_TYPE = "rotating-url" 67 | CHECKER_DATA_SCHEMA = { 68 | "type": "object", 69 | "properties": { 70 | "url": {"type": "string", "format": "uri"}, 71 | "pattern": {"type": "string", "format": "regex"}, 72 | "strip-query": {"type": "boolean"}, 73 | }, 74 | "required": ["url"], 75 | } 76 | 77 | @classmethod 78 | def should_check(cls, external_data: ExternalBase): 79 | return isinstance(external_data, ExternalData) and ( 80 | external_data.checker_data.get("type") == cls.CHECKER_DATA_TYPE 81 | or external_data.type == external_data.Type.EXTRA_DATA 82 | ) 83 | 84 | async def validate_checker_data(self, external_data: ExternalBase): 85 | if external_data.checker_data.get("type") == self.CHECKER_DATA_TYPE: 86 | return await super().validate_checker_data(external_data) 87 | return None 88 | 89 | async def check(self, external_data: ExternalBase): 90 | assert self.should_check(external_data) 91 | 92 | is_rotating = external_data.checker_data.get("type") == self.CHECKER_DATA_TYPE 93 | if is_rotating: 94 | url = external_data.checker_data["url"] 95 | else: 96 | url = external_data.current_version.url 97 | 98 | strip_query = external_data.checker_data.get("strip-query", False) 99 | 100 | version_string = None 101 | 102 | try: 103 | if strip_query: 104 | async with self.session.head( 105 | url, allow_redirects=True, headers=HTTP_CLIENT_HEADERS 106 | ) as head: 107 | url = str(head.url.with_query("")) 108 | 109 | if url.endswith(".AppImage"): 110 | with tempfile.NamedTemporaryFile("w+b") as tmpfile: 111 | new_version = await utils.get_extra_data_info_from_url( 112 | url, session=self.session, dest_io=tmpfile 113 | ) 114 | version_string = await utils.extract_appimage_version( 115 | tmpfile, 116 | ) 117 | if url.endswith(".deb"): 118 | with tempfile.NamedTemporaryFile("w+b") as tmpfile: 119 | new_version = await utils.get_extra_data_info_from_url( 120 | url, session=self.session, dest_io=tmpfile 121 | ) 122 | version_string = utils.extract_deb_version( 123 | tmpfile, 124 | ) 125 | else: 126 | new_version = await utils.get_extra_data_info_from_url( 127 | url, session=self.session 128 | ) 129 | except NETWORK_ERRORS as err: 130 | if not is_rotating: 131 | external_data.state |= external_data.State.BROKEN 132 | raise CheckerFetchError from err 133 | 134 | if is_rotating and not version_string: 135 | version_string = extract_version( 136 | external_data.checker_data, 137 | new_version.url, 138 | ) 139 | 140 | if version_string is not None: 141 | log.debug("%s is version %s", external_data.filename, version_string) 142 | new_version = new_version._replace( # pylint: disable=no-member 143 | version=version_string 144 | ) 145 | 146 | if not is_rotating: 147 | new_version = new_version._replace(url=url) # pylint: disable=no-member 148 | 149 | external_data.set_new_version( 150 | new_version, 151 | is_update=( 152 | is_rotating and external_data.current_version.url != new_version.url 153 | ), 154 | ) 155 | -------------------------------------------------------------------------------- /src/lib/__init__.py: -------------------------------------------------------------------------------- 1 | import re 2 | import operator 3 | 4 | import aiohttp 5 | 6 | 7 | TIMEOUT_CONNECT = 10 8 | TIMEOUT_TOTAL = 60 * 10 9 | 10 | # With the default urllib User-Agent, dl.discordapp.net returns 403 11 | USER_AGENT = ( 12 | "flatpak-external-data-checker/1.0 " 13 | "(+https://github.com/flathub-infra/flatpak-external-data-checker)" 14 | ) 15 | 16 | HTTP_CLIENT_HEADERS = {"User-Agent": USER_AGENT} 17 | 18 | HTTP_CHUNK_SIZE = 1024 * 64 19 | 20 | NETWORK_ERRORS = ( 21 | aiohttp.ClientError, 22 | aiohttp.ServerConnectionError, 23 | aiohttp.ServerDisconnectedError, 24 | aiohttp.ServerTimeoutError, 25 | ) 26 | 27 | WRONG_CONTENT_TYPES_FILE = [ 28 | re.compile(r"^text/html$"), 29 | re.compile(r"^application/xhtml(\+.+)?$"), 30 | ] 31 | WRONG_CONTENT_TYPES_ARCHIVE = [ 32 | re.compile(r"^text/.*$"), 33 | ] + WRONG_CONTENT_TYPES_FILE 34 | 35 | FILE_URL_SCHEMES = ["http", "https"] 36 | 37 | OPERATORS = { 38 | "<": operator.lt, 39 | "<=": operator.le, 40 | ">": operator.gt, 41 | ">=": operator.ge, 42 | "==": operator.eq, 43 | "!=": operator.ne, 44 | } 45 | OPERATORS_SCHEMA = { 46 | "type": "object", 47 | "properties": {o: {"type": "string"} for o in list(OPERATORS)}, 48 | "additionalProperties": False, 49 | "minProperties": 1, 50 | } 51 | -------------------------------------------------------------------------------- /src/lib/appdata.py: -------------------------------------------------------------------------------- 1 | # Copyright © 2019 Endless Mobile, Inc. 2 | # 3 | # Authors: 4 | # Will Thompson 5 | # 6 | # This program is free software; you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation; either version 2 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License along 17 | # with this program; if not, write to the Free Software Foundation, Inc., 18 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 19 | 20 | """ 21 | Add a new at the start of the element in an appdata file, 22 | preserving as much formatting as is feasible and inserting that element if it is 23 | missing. 24 | """ 25 | 26 | import io 27 | import sys 28 | import typing as t 29 | 30 | # pylint: disable=wrong-import-position 31 | if sys.version_info >= (3, 10): 32 | from typing import TypeAlias 33 | else: 34 | from typing_extensions import TypeAlias 35 | # pylint: enable=wrong-import-position 36 | 37 | import lxml.etree as ElementTree 38 | 39 | 40 | XMLElement: TypeAlias = ElementTree._Element # pylint: disable=protected-access 41 | 42 | DEFAULT_INDENT = " " 43 | 44 | 45 | def _fill_padding(ele: XMLElement): 46 | parent = ele.getparent() 47 | assert parent is not None 48 | index = parent.index(ele) 49 | level = sum(1 for _ in ele.iterancestors()) 50 | if len(parent) > 1: 51 | if index == len(parent) - 1: 52 | if len(parent) > 2: 53 | ele.tail = parent[index - 1].tail 54 | parent[index - 1].tail = parent[index - 2].tail 55 | else: 56 | ele.tail = "\n" + DEFAULT_INDENT * (level - 1) 57 | parent[index - 1].tail = parent.text 58 | else: 59 | ele.tail = parent.text 60 | else: 61 | parent.text = "\n" + DEFAULT_INDENT * level 62 | ele.tail = "\n" + DEFAULT_INDENT * (level - 1) 63 | 64 | 65 | def add_release( 66 | src: t.Union[t.IO, str], 67 | dst: t.Union[t.IO, str], 68 | version: str, 69 | date: str, 70 | ): 71 | parser = ElementTree.XMLParser(load_dtd=False, resolve_entities=False) 72 | tree = ElementTree.parse(src, parser=parser) 73 | root = tree.getroot() 74 | 75 | releases = root.find("releases") 76 | 77 | if releases is None: 78 | releases = ElementTree.Element("releases") 79 | root.append(releases) 80 | _fill_padding(releases) 81 | 82 | release = ElementTree.Element("release", version=version, date=date) 83 | releases.insert(0, release) 84 | _fill_padding(release) 85 | 86 | description = ElementTree.Element("description") 87 | 88 | # Give a closing rather than it being 89 | # self-closing 90 | description.text = "" 91 | 92 | # Indent the opening tag one level 93 | # deeper than the tag. 94 | if releases.text: 95 | release.text = "\n" + ((releases.text[1::2]) * 3) 96 | 97 | # Indent the closing tag by the same amount as the opening 98 | # tag (which we know to be the first child of since 99 | # we just prepended it above) 100 | description.tail = releases.text 101 | release.append(description) 102 | 103 | tree.write( 104 | dst, 105 | # XXX: lxml uses single quotes for doctype line if generated with 106 | # xml_declaration=True, 107 | doctype='', # type: ignore[call-arg] 108 | encoding="utf-8", 109 | pretty_print=True, 110 | ) 111 | 112 | 113 | def add_release_to_file(appdata_path: str, version: str, date: str): 114 | with io.BytesIO() as buf: 115 | add_release(appdata_path, buf, version, date) 116 | 117 | with open(appdata_path, "wb") as f: 118 | f.write(buf.getvalue()) 119 | -------------------------------------------------------------------------------- /src/lib/checksums.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import hashlib 4 | import logging 5 | import typing as t 6 | 7 | from .errors import SourceUpdateError 8 | 9 | 10 | log = logging.getLogger(__name__) 11 | 12 | 13 | class MultiDigest(t.NamedTuple): 14 | md5: t.Optional[str] = None 15 | sha1: t.Optional[str] = None 16 | sha256: t.Optional[str] = None 17 | sha512: t.Optional[str] = None 18 | 19 | @classmethod 20 | def from_source(cls, source: t.Dict) -> MultiDigest: 21 | # pylint: disable=no-member 22 | digests = {k: source[k] for k in cls._fields if k in source} 23 | assert digests, source 24 | return cls(**digests) 25 | 26 | @property 27 | def digests(self) -> t.Set[str]: 28 | # pylint: disable=no-member 29 | return {k for k in self._fields if getattr(self, k) is not None} 30 | 31 | def __eq__(self, other): 32 | assert isinstance(other, type(self)), other 33 | # Iterate digest types from strongest to weakest, 34 | # if both sides have a type in common - compare it and return result 35 | for kind in reversed(self._fields): # pylint: disable=no-member 36 | self_digest = getattr(self, kind) 37 | other_digest = getattr(other, kind) 38 | if self_digest is not None and other_digest is not None: 39 | return self_digest == other_digest 40 | # If no common digest type found, we can't compare, raise an error 41 | raise ValueError(f"No common digest type for {self} and {other}") 42 | 43 | def __ne__(self, other): 44 | return not self == other 45 | 46 | def update_source(self, source: t.Dict): 47 | # Find digest types that are both not null in self and set in the source 48 | to_update = { 49 | kind: digest 50 | for kind, digest in self._asdict().items() # pylint: disable=no-member 51 | if kind in source and digest is not None 52 | } 53 | if not to_update: 54 | # We don't have a common digest type with the source, bail out 55 | raise SourceUpdateError(f"No matching digest type for {self} in {source}") 56 | log.debug("Updating %s in %s", to_update.keys(), source) 57 | source.update(to_update) 58 | 59 | 60 | class MultiHash: 61 | __slots__ = ("md5", "sha1", "sha256", "sha512") 62 | 63 | def __init__(self, *args, **kwargs): 64 | self.md5 = hashlib.md5(*args, **kwargs) # nosec 65 | self.sha1 = hashlib.sha1(*args, **kwargs) # nosec 66 | self.sha256 = hashlib.sha256(*args, **kwargs) 67 | self.sha512 = hashlib.sha512(*args, **kwargs) 68 | 69 | def update(self, data): 70 | self.md5.update(data) 71 | self.sha1.update(data) 72 | self.sha256.update(data) 73 | self.sha512.update(data) 74 | 75 | def hexdigest(self): 76 | return MultiDigest( 77 | md5=self.md5.hexdigest(), 78 | sha1=self.sha1.hexdigest(), 79 | sha256=self.sha256.hexdigest(), 80 | sha512=self.sha512.hexdigest(), 81 | ) 82 | -------------------------------------------------------------------------------- /src/lib/errors.py: -------------------------------------------------------------------------------- 1 | import typing as t 2 | 3 | 4 | class FlatpakExternalDataCheckerError(Exception): 5 | """Base class for errors in the proram""" 6 | 7 | def __init__(self, message: t.Optional[str] = None): 8 | super().__init__(message) 9 | self.message = message or self.__doc__ 10 | 11 | def __str__(self): 12 | if self.__cause__ is not None: 13 | return f"{self.message}: {self.__cause__}" 14 | return self.message 15 | 16 | 17 | class ManifestError(FlatpakExternalDataCheckerError): 18 | """Error processing flatpak-builder manifest""" 19 | 20 | 21 | class ManifestLoadError(ManifestError): 22 | """Error loading flatpak-builder manifest""" 23 | 24 | 25 | class ManifestFileOpenError(ManifestLoadError): 26 | """Can't open manifest file""" 27 | 28 | 29 | class ManifestFileTooLarge(ManifestLoadError): 30 | """Manifest file size is too big""" 31 | 32 | 33 | class ManifestUpdateError(ManifestError): 34 | """Error updating flatpak-builder manifest""" 35 | 36 | 37 | class SourceLoadError(ManifestLoadError): 38 | """Error loading flatpak-builder source item""" 39 | 40 | 41 | class SourceUnsupported(SourceLoadError): 42 | """Don't know how to handle flatpak-builder source item""" 43 | 44 | 45 | class SourceUpdateError(ManifestUpdateError): 46 | """Error updating flatpak-builder source""" 47 | 48 | 49 | class AppdataError(ManifestError): 50 | """Error processing metainfo.xml""" 51 | 52 | 53 | class AppdataLoadError(AppdataError, ManifestLoadError): 54 | """Error loading metainfo.xml""" 55 | 56 | 57 | class AppdataNotFound(AppdataLoadError): 58 | """Can't find metainfo.xml""" 59 | 60 | 61 | class AppdataUpdateError(AppdataError, ManifestUpdateError): 62 | """Error updating metainfo.xml""" 63 | 64 | 65 | class CheckerError(FlatpakExternalDataCheckerError): 66 | """Error checking a flatpak-builder source""" 67 | 68 | 69 | class CheckerMetadataError(CheckerError): 70 | """Error processing checker metadata""" 71 | 72 | 73 | class CheckerRemoteError(CheckerError): 74 | """Error processing remote data""" 75 | 76 | 77 | class CheckerQueryError(CheckerRemoteError): 78 | """Error querying for new versions""" 79 | 80 | 81 | class CheckerFetchError(CheckerRemoteError): 82 | """Error downloading upstream source""" 83 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flathub-infra/flatpak-external-data-checker/f7f982bd184a42c33d233d63214770d7479baf4f/tests/__init__.py -------------------------------------------------------------------------------- /tests/com.google.Chrome.yaml: -------------------------------------------------------------------------------- 1 | app-id: com.google.Chrome 2 | modules: 3 | - name: chrome 4 | sources: 5 | - type: extra-data 6 | filename: chrome.deb 7 | url: https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb 8 | sha256: "0000000000000000000000000000000000000000000000000000000000000000" 9 | size: 0 10 | -------------------------------------------------------------------------------- /tests/com.jetbrains.PhpStorm.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "com.jetbrains.PhpStorm", 3 | "modules": [ 4 | { 5 | "name": "phpstorm", 6 | "buildsystem": "simple", 7 | "build-commands": [], 8 | "sources": [ 9 | { 10 | "type": "extra-data", 11 | "filename": "phpstorm.tar.gz", 12 | "url": "https://download.jetbrains.com/webide/PhpStorm-2019.2.5.tar.gz", 13 | "sha256": "f26c6c81e161ba1ab2b9c78316d3689ba53a69c327bb74a1bfb31332c98e3d1d", 14 | "size": 357405037, 15 | "x-checker-data": { 16 | "type": "jetbrains", 17 | "code": "PS" 18 | } 19 | } 20 | ] 21 | } 22 | ] 23 | } 24 | -------------------------------------------------------------------------------- /tests/com.nordpass.NordPass.yaml: -------------------------------------------------------------------------------- 1 | id: com.nordpass.NordPass 2 | modules: 3 | - name: nordpass 4 | buildsystem: simple 5 | build-commands: [] 6 | sources: 7 | - type: extra-data 8 | filename: nordpass.snap 9 | url: https://api.snapcraft.io/api/v1/snaps/download/00CQ2MvSr0Ex7zwdGhCYTa0ZLMw3H6hf_23.snap 10 | sha256: 9b05211728caaabb170844aca43285079dea991358116ccb297840f84dde5464 11 | size: 64237568 12 | x-checker-data: 13 | type: snapcraft 14 | name: nordpass 15 | channel: stable 16 | -------------------------------------------------------------------------------- /tests/com.unity.UnityHub.yaml: -------------------------------------------------------------------------------- 1 | app-id: com.unity.UnityHub 2 | modules: 3 | - name: unityhub 4 | sources: 5 | - type: extra-data 6 | filename: UnityHubSetup.AppImage 7 | url: https://public-cdn.cloud.unity3d.com/hub/prod/UnityHub.AppImage 8 | sha256: "0000000000000000000000000000000000000000000000000000000000000000" 9 | size: 0 10 | -------------------------------------------------------------------------------- /tests/com.valvesoftware.Steam.yml: -------------------------------------------------------------------------------- 1 | id: com.valvesoftware.Steam 2 | modules: 3 | 4 | - name: python-modules 5 | sources: 6 | 7 | - type: file 8 | url: https://files.pythonhosted.org/packages/6d/38/c21ef5034684ffc0412deefbb07d66678332290c14bb5269c85145fbd55e/setuptools-50.3.2-py3-none-any.whl 9 | sha256: 2c242a0856fbad7efbe560df4a7add9324f340cf48df43651e9604924466794a 10 | x-checker-data: 11 | type: pypi 12 | name: setuptools 13 | packagetype: bdist_wheel 14 | 15 | - type: file 16 | url: https://files.pythonhosted.org/packages/64/c2/b80047c7ac2478f9501676c988a5411ed5572f35d1beff9cae07d321512c/PyYAML-5.3.1.tar.gz 17 | sha256: b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d 18 | x-checker-data: 19 | type: pypi 20 | name: PyYAML 21 | packagetype: sdist 22 | 23 | - type: file 24 | url: https://files.pythonhosted.org/packages/7a/c2/bf87cef932c45cb7b7a79a0a954e3307fcff209c7639182a2b9ae0127959/vdf-3.1-py2.py3-none-any.whl 25 | sha256: a5da182b3ef888d45f39862725bc7bb2836515c9fc329843001e506e73bb5cd4 26 | x-checker-data: 27 | type: pypi 28 | name: vdf 29 | versions: 30 | ==: "3.2" 31 | packagetype: bdist_wheel 32 | 33 | 34 | - type: file 35 | url: "https://files.pythonhosted.org/packages/3e/02/b09732ca4b14405ff159c470a612979acfc6e8645dc32f83ea0129709f7a/Pillow-7.2.0.tar.gz" 36 | sha256: "97f9e7953a77d5a70f49b9a48da7776dc51e9b738151b22dacf101641594a626" 37 | x-checker-data: 38 | type: pypi 39 | name: Pillow 40 | packagetype: bdist_wheel 41 | 42 | - type: file 43 | url: http://example.com/allow-prerelease 44 | sha256: x 45 | x-checker-data: 46 | type: pypi 47 | name: borgbackup 48 | versions: { "<": "1.2" } 49 | stable-only: false 50 | 51 | - type: file 52 | url: http://example.com/disallow-prerelease 53 | sha256: x 54 | x-checker-data: 55 | type: pypi 56 | name: borgbackup 57 | versions: { "<": "1.2" } 58 | stable-only: true 59 | -------------------------------------------------------------------------------- /tests/com.virustotal.Uploader.yml: -------------------------------------------------------------------------------- 1 | id: com.virustotal.Uploader 2 | modules: 3 | - name: extra-cmake-modules 4 | sources: 5 | - type: git 6 | url: https://invent.kde.org/frameworks/extra-cmake-modules.git 7 | tag: v5.77.0 8 | commit: 760a013c3ff553b23d456c7787ed76981ec5142f 9 | x-checker-data: 10 | type: git 11 | tag-pattern: ^v(\d[\d.]+\d)$ 12 | 13 | - name: yara 14 | sources: 15 | # Invalid (non-existent) tag specified 16 | - type: git 17 | url: https://github.com/VirusTotal/yara.git 18 | tag: "4.0.4" 19 | commit: 814b6296f4ce389c8c16b5508b56f1f3d9af554d 20 | 21 | - name: yara-python 22 | sources: 23 | # Not pinned to commit or tag, should be skipped 24 | - type: git 25 | url: https://github.com/VirusTotal/yara-python.git 26 | branch: v4.0.x 27 | 28 | - name: vt-py 29 | sources: 30 | # Current is valid, no updates 31 | - type: git 32 | url: https://github.com/VirusTotal/vt-py.git 33 | tag: "0.5.4" 34 | commit: 61ddafaa1f6fc9eaea4cbef32f6636495de22623 35 | x-checker-data: 36 | type: git 37 | tag-pattern: ^(0.5.4)$ 38 | sort-tags: false 39 | 40 | - name: jansson 41 | sources: 42 | # No commit specified, should be skipped 43 | - type: git 44 | url: "https://github.com/akheron/jansson.git" 45 | tag: v2.13.1 46 | - type: script 47 | dest-filename: autogen.sh 48 | commands: 49 | - autoreconf -ifv 50 | 51 | - name: protobuf-c 52 | sources: 53 | # Only commit specified, should be skipped 54 | - type: git 55 | url: https://github.com/VirusTotal/protobuf-c.git 56 | commit: 1390409f4ee4e26d0635310995b516eb702c3f9e 57 | 58 | - name: c-vtapi 59 | sources: 60 | # Outdated branch tip (invalid commit) 61 | - type: git 62 | url: "https://github.com/VirusTotal/c-vtapi.git" 63 | branch: master 64 | commit: 0e525b24fa028b9203fdab46478531e910702b0d 65 | 66 | - name: qt-virustotal-uploader 67 | sources: 68 | # Valid tag/commit 69 | - type: git 70 | url: "https://github.com/VirusTotal/qt-virustotal-uploader.git" 71 | tag: v1.2 72 | commit: be03674181c512b8c8935be104a42b90ccd3935a 73 | 74 | - name: bluez-qt 75 | sources: 76 | # find specific version 77 | - type: git 78 | url: https://invent.kde.org/frameworks/bluez-qt.git 79 | tag: v5.77.0 80 | commit: 760a013c3ff553b23d456c7787ed76981ec5142f 81 | x-checker-data: 82 | type: git 83 | tag-pattern: ^v(\d[\d.]+\d)$ 84 | versions: 85 | ==: "5.90.0" 86 | 87 | - name: easyeffects 88 | sources: 89 | # filter versions 90 | - type: git 91 | url: https://github.com/wwmm/easyeffects.git 92 | tag: v4.8.4 93 | commit: 9bc7dd83bf3958b171ea4b9cc0710ef34c8d50fc 94 | x-checker-data: 95 | type: git 96 | tag-pattern: ^v([\d.]+)$ 97 | versions: 98 | <: '4.8.6' 99 | -------------------------------------------------------------------------------- /tests/com.visualstudio.code.yaml: -------------------------------------------------------------------------------- 1 | id: com.visualstudio.code 2 | modules: 3 | - name: vscode 4 | sources: 5 | - type: extra-data 6 | filename: code.rpm 7 | only-arches: [x86_64] 8 | url: http://packages.microsoft.com/repos/code/pool/main/c/code/code_1.55.2-1618307277_amd64.deb 9 | sha256: e95b4a40c7fd77fbf197a30531101a7fc296b1f87a206e7062322404243caace 10 | size: 71023804 11 | x-checker-data: 12 | type: rpm-repo 13 | package-name: code 14 | root: https://packages.microsoft.com/yumrepos/vscode/ 15 | - type: extra-data 16 | filename: code.rpm 17 | only-arches: [aarch64] 18 | url: http://packages.microsoft.com/repos/code/pool/main/c/code/code_1.55.2-1618306574_arm64.deb 19 | sha256: d2e3fab8d4213feb583e580ab1b1022ad8a903c299ea4c03e657b0f3e55df500 20 | size: 69962978 21 | x-checker-data: 22 | type: rpm-repo 23 | package-name: code 24 | root: https://packages.microsoft.com/yumrepos/vscode/ 25 | -------------------------------------------------------------------------------- /tests/fedc.test.ElectronChecker.yml: -------------------------------------------------------------------------------- 1 | id: fedc.test.ElectronChecker 2 | modules: 3 | - name: etcher 4 | sources: 5 | - type: file 6 | url: https://github.com/balena-io/etcher/releases/download/v1.7.2/balenaEtcher-1.7.2-x64.AppImage 7 | sha512: 360142fb3e6a0b67f17f2cd1bf90e8c9663a5e4658bae9ed754081315fa8034d68cddffa6419ae4bde5eb60a9ccfed8c8236c9a939cd5b1f9131e28bdf3924c2 8 | x-checker-data: 9 | type: electron-updater 10 | url: https://github.com/balena-io/etcher/releases/download/v1.18.11/latest-linux.yml 11 | 12 | - name: Lunar-Client 13 | sources: 14 | - type: extra-data 15 | url: https://launcherupdates.lunarclientcdn.com/Lunar%20Client-2.9.2.AppImage 16 | sha256: a005879b62395fe71d7ad5b01ee7fc7fe43b59629bd83f23cee35380a13ba8c8 17 | x-checker-data: 18 | type: electron-updater 19 | url: https://launcherupdates.lunarclientcdn.com/latest-linux.yml 20 | -------------------------------------------------------------------------------- /tests/firefox-sources.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type": "extra-data", 4 | "filename": "firefox.tar.bz2", 5 | "url": "https://archive.mozilla.org/pub/firefox/releases/1.2.3/linux-x86_64/en-US/firefox-1.2.3.tar.bz2", 6 | "sha256": "0000000000000000000000000000000000000000000000000000000000000000", 7 | "size": 0 8 | }, 9 | { 10 | "filename": "foo.xpi", 11 | "type": "extra-data", 12 | "url": "https://archive.mozilla.org/pub/firefox/releases/1.2.3/linux-x86_64/xpi/foo.xpi", 13 | "sha256": "0000000000000000000000000000000000000000000000000000000000000000", 14 | "size": 0 15 | }, 16 | { 17 | "filename": "bar.xpi", 18 | "type": "extra-data", 19 | "url": "https://archive.mozilla.org/pub/firefox/releases/1.2.3/linux-x86_64/xpi/bar.xpi", 20 | "sha256": "0000000000000000000000000000000000000000000000000000000000000000", 21 | "size": 0 22 | } 23 | ] 24 | -------------------------------------------------------------------------------- /tests/io.github.stedolan.jq.yml: -------------------------------------------------------------------------------- 1 | id: io.github.stedolan.jq 2 | modules: 3 | - name: jq 4 | sources: 5 | - type: archive 6 | url: https://github.com/stedolan/jq/releases/download/jq-1.4/jq-1.4.tar.gz 7 | sha256: "0000000000000000000000000000000000000000000000000000000000000000" 8 | x-checker-data: 9 | type: json 10 | url: https://api.github.com/repos/stedolan/jq/releases/latest 11 | version-query: '.tag_name | sub("^jq-"; "")' 12 | url-query: '.assets[] | select(.name=="jq-" + $version + ".tar.gz") | .browser_download_url' 13 | 14 | - type: archive 15 | dest-filename: jq-1.4.tarball.tar.gz 16 | url: https://api.github.com/repos/stedolan/jq/tarball/jq-1.4 17 | sha256: "0000000000000000000000000000000000000000000000000000000000000000" 18 | x-checker-data: 19 | type: json 20 | url: https://api.github.com/repos/stedolan/jq/releases/13660432 21 | version-query: '.tag_name | sub("^jq-"; "")' 22 | url-query: '.tarball_url' 23 | timestamp-query: '.published_at' 24 | modules: 25 | 26 | - name: oniguruma 27 | buildsystem: cmake-ninja 28 | sources: 29 | - type: git 30 | url: https://github.com/kkos/oniguruma.git 31 | tag: v6.9.4 32 | commit: e03900b038a274ee2f1341039e9003875c11e47d 33 | x-checker-data: 34 | type: json 35 | url: https://api.github.com/repos/kkos/oniguruma/releases/latest 36 | tag-query: '.tag_name' 37 | version-query: '$tag | sub("^[vV]"; "")' 38 | timestamp-query: '.published_at' 39 | 40 | - name: yasm 41 | sources: 42 | - type: git 43 | url: https://github.com/yasm/yasm.git 44 | x-checker-data: 45 | type: json 46 | url: https://api.github.com/repos/yasm/yasm/releases/latest 47 | tag-query: '.tag_name' 48 | version-query: '.tag_name | sub("^[vV]"; "")' 49 | 50 | - name: openal-soft 51 | sources: 52 | - type: git 53 | url: https://github.com/kcat/openal-soft.git 54 | x-checker-data: 55 | type: json 56 | url: https://api.github.com/repos/kcat/openal-soft/git/refs/tags 57 | commit-query: last | .object.sha 58 | tag-query: last | .ref | split("/") | last 59 | timestamp-data-url: last | .object.url 60 | timestamp-query: .committer.date 61 | 62 | - name: tdesktop 63 | sources: 64 | - type: git 65 | url: https://github.com/telegramdesktop/tdesktop.git 66 | tag: v2.6.0 67 | commit: 740ffb3c6426d62ac1a54e68d5a13f91479baf9a 68 | x-checker-data: 69 | type: json 70 | url: https://api.github.com/repos/telegramdesktop/tdesktop/releases/tags/v3.7.3 71 | tag-query: '.tag_name' 72 | version-query: '.tag_name | sub("^[vV]"; "")' 73 | 74 | - name: tg_owt 75 | sources: 76 | - type: git 77 | url: https://github.com/desktop-app/tg_owt.git 78 | x-checker-data: 79 | type: json 80 | parent-id: tdesktop-git-0 81 | commit-data-url: >- 82 | "https://github.com/telegramdesktop/tdesktop/raw/\($parent.new.tag)/snap/snapcraft.yaml" 83 | commit-query: .parts.webrtc."source-commit" 84 | 85 | - name: lib_webrtc 86 | sources: 87 | - type: git 88 | url: https://github.com/desktop-app/lib_webrtc.git 89 | commit: 810973807a7f492393e7f9b8ceb3a1e89dc7eb16 90 | x-checker-data: 91 | type: json 92 | url: https://httpbingo.org/json 93 | tag-query: .some_gibberish_query 94 | version-query: '$tag | sub("^[vV]"; "")' 95 | 96 | - name: tg_angle 97 | sources: 98 | - type: git 99 | url: https://github.com/desktop-app/tg_angle.git 100 | commit: 4b798c3fd44d48eb36eaee7d8ebfaf7c2b8a0229 101 | x-checker-data: 102 | type: json 103 | url: https://httpbingo.org/404 104 | tag-query: .some_gibberish_query 105 | 106 | -------------------------------------------------------------------------------- /tests/net.invisible_island.xterm.appdata.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | net.invisible_island.xterm 4 | 5 | xterm 6 | The standard terminal emulator for the X Window System 7 | 8 | CC0-1.0 9 | MIT 10 | 11 | 12 |

13 | The xterm program is a terminal emulator for the X Window System. 14 | It provides DEC VT102 and Tektronix 4014 compatible terminals for programs that 15 | can't use the window system directly. 16 |

17 |
18 |
19 | -------------------------------------------------------------------------------- /tests/net.invisible_island.xterm.yml: -------------------------------------------------------------------------------- 1 | id: net.invisible_island.xterm 2 | modules: 3 | - name: xterm 4 | sources: 5 | - type: git 6 | url: https://github.com/ThomasDickey/xterm-snapshots.git 7 | tag: xterm-364 8 | commit: 6b07755324d62a64cfcf11ef750670c2ede583ca 9 | x-checker-data: 10 | type: anitya 11 | project-id: 5272 12 | tag-template: xterm-$version 13 | is-main-source: true 14 | modules: 15 | - name: libXaw 16 | sources: &libXaw-sources 17 | - type: archive 18 | url: https://xorg.freedesktop.org/releases/individual/lib/libXaw-1.0.12.tar.bz2 19 | sha256: 96fc314874fce9979556321d1d6ee00b5baf32fb333b7278853b4983bc3cdbf6 20 | x-checker-data: 21 | type: anitya 22 | project-id: 1766 23 | url-template: https://xorg.freedesktop.org/releases/individual/lib/libXaw-$version.tar.gz 24 | 25 | - name: libXaw-32bit 26 | sources: *libXaw-sources 27 | -------------------------------------------------------------------------------- /tests/org.chromium.Chromium.yaml: -------------------------------------------------------------------------------- 1 | app-id: org.chromium.Chromium 2 | modules: 3 | - name: chromium 4 | sources: 5 | - type: archive 6 | url: https://commondatastorage.googleapis.com/chromium-browser-official/chromium-100.0.4845.0.tar.xz 7 | sha256: a68d31f77a6b7700a5161d82f5932c2822f85f7ae68ad51be3d3cf689a3fe2b0 8 | x-checker-data: 9 | type: chromium 10 | component: chromium 11 | is-main-source: true 12 | - type: archive 13 | url: https://commondatastorage.googleapis.com/chromium-browser-clang/Linux_x64/clang-libs-llvmorg-14-init-12246-g7787a8f1-2.tgz 14 | sha256: cf6b516a4e410d79439a150927fc8b450b325e2a6349395ae153c9d2dd6c6ed2 15 | x-checker-data: 16 | type: chromium 17 | component: llvm-prebuilt 18 | - type: git 19 | url: https://github.com/llvm/llvm-project 20 | commit: 01b87444cb02c38147dccc7049b32675de860d47 21 | x-checker-data: 22 | type: chromium 23 | component: llvm-git 24 | -------------------------------------------------------------------------------- /tests/org.debian.tracker.pkg.apt.yml: -------------------------------------------------------------------------------- 1 | id: org.debian.tracker.pkg.apt 2 | modules: 3 | - name: python-apt 4 | sources: 5 | - type: archive 6 | url: "http://deb.debian.org/debian/python-apt-source.tar.xz" 7 | sha256: "0000000000000000000000000000000000000000000000000000000000000000" 8 | x-checker-data: 9 | type: debian-repo 10 | root: http://deb.debian.org/debian/ 11 | dist: bookworm 12 | component: main 13 | package-name: python-apt 14 | source: true 15 | modules: 16 | 17 | - name: apt 18 | sources: 19 | - type: file 20 | only-arches: [ "aarch64" ] 21 | url: "http://deb.debian.org/debian/apt-aarch64.deb" 22 | sha256: "0000000000000000000000000000000000000000000000000000000000000000" 23 | x-checker-data: 24 | type: debian-repo 25 | root: http://deb.debian.org/debian 26 | dist: bookworm 27 | component: main 28 | package-name: apt 29 | -------------------------------------------------------------------------------- /tests/org.externaldatachecker.Manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | /* A comment, as accepted by json-glib and hence flatpak-builder */ 3 | "app-id": "org.externaldata.Checker", 4 | "branch": "stable", 5 | "runtime": "org.freedesktop.Platform", 6 | "runtime-version": "1.6", 7 | "sdk": "org.freedesktop.Sdk", 8 | "command": "checker", 9 | "_unused": "a multi-line string 10 | which is accepted by json-glib 11 | even though it's not valid JSON", 12 | "modules": [ 13 | "phony-shared-module.json", 14 | "non-existent-shared-module.json", 15 | "non-existent-shared-module.yml", 16 | { 17 | "name": "phony", 18 | "sources": [ 19 | { 20 | "type": "svn", 21 | "url": "nowhere" 22 | }, 23 | { 24 | "type": "extra-data", 25 | "url": "file:///etc/passwd", 26 | "sha256": "000000000000000000000000000000000000000000000000000000000000000000" 27 | }, 28 | { 29 | "type": "file", 30 | "url": "data:nothing" 31 | }, 32 | { 33 | "type": "extra-data", 34 | "filename": "invalid-checker-data", 35 | "url": "nowhere", 36 | "x-checker-data": [{}] 37 | }, 38 | { 39 | "type": "file", 40 | "dest-filename": "ignoreme.txt", 41 | "url": "https://httpbingo.org/status/200", 42 | "sha256": "000000000000000000000000000000000000000000000000000000000000000000" 43 | }, 44 | { 45 | "type": "extra-data", 46 | "only-arches": ["i386", "x86_64"], 47 | "dest-filename": "another-file.txt", 48 | "url": "https://some-gibberish-phony-url.phony/phony-text-file.txt", 49 | "sha256": "000000000000000000000000000000000000000000000000000000000000000000" 50 | }, 51 | { 52 | "type": "extra-data", 53 | "filename": "dropbox.tgz", 54 | "only-arches": ["x86_64"], 55 | "url": "https://clientupdates.dropboxstatic.com/BROKEN_LINK", 56 | "sha256": "0000000000000000000000000000000000000000000000000000000000000000", 57 | "size": 1234567, 58 | "x-checker-data": { 59 | "type": "rotating-url", 60 | "url": "https://httpbingo.org/redirect-to?url=https%3A%2F%2Fhttpbingo.org%2Fbase64%2F4puE&status_code=302", 61 | "pattern": ".*base(..).*" 62 | } 63 | }, 64 | { 65 | "type": "extra-data", 66 | "filename": "relative-redirect.txt", 67 | "url": "https://httpbingo.org/status/404", 68 | "sha256": "0000000000000000000000000000000000000000000000000000000000000000", 69 | "size": 0, 70 | "x-checker-data": { 71 | "type": "rotating-url", 72 | "url": "https://httpbingo.org/redirect-to?url=/base64/MzAtNTAgZmVyYWwgaG9ncyEK" 73 | } 74 | }, 75 | { 76 | "type": "extra-data", 77 | "filename": "hogs.txt", 78 | "url": "https://httpbingo.org/base64/MzAtNTAgZmVyYWwgaG9ncyEK", 79 | "sha256": "e4d67702da4eeeb2f15629b65bf6767c028a511839c14ed44d9f34479eaa2b94", 80 | "size": 18 81 | }, 82 | { 83 | "type": "extra-data", 84 | "filename": "strip-query.txt", 85 | "url": "https://httpbingo.org/response-headers", 86 | "//": "curl -s https://httpbingo.org/response-headers|sha256sum", 87 | "sha256": "93a8f337fa182b33b88e66aecb41dde58f49bfec2e059fe9e17ed227786750a0", 88 | "size": 66, 89 | "x-checker-data": { 90 | "type": "rotating-url", 91 | "url": "https://httpbingo.org/redirect-to?url=/response-headers?This-Should-Be=Removed", 92 | "strip-query": true 93 | } 94 | }, 95 | { 96 | "type": "extra-data", 97 | "filename": "http-404.txt", 98 | "url": "https://httpbingo.org/status/404", 99 | "sha256": "0000000000000000000000000000000000000000000000000000000000000000" 100 | 101 | }, 102 | { 103 | "type": "extra-data", 104 | "filename": "http-500.txt", 105 | "url": "https://httpbingo.org/status/500", 106 | "sha256": "0000000000000000000000000000000000000000000000000000000000000000" 107 | 108 | }, 109 | { 110 | "type": "extra-data", 111 | "filename": "incomplete-read.txt", 112 | "url": "https://httpbingo.org/response-headers?Content-Length=1000", 113 | "sha256": "0000000000000000000000000000000000000000000000000000000000000000" 114 | 115 | }, 116 | { 117 | "type": "extra-data", 118 | "filename": "incomplete-read-2.txt", 119 | "url": "https://httpbin.org/response-headers?Content-Length=1000", 120 | "sha256": "0000000000000000000000000000000000000000000000000000000000000000" 121 | 122 | }, 123 | "phony-external-source.json", 124 | "phony-external-source-single-item.json", 125 | "phony-too-large-generated-sources.json" 126 | ] 127 | }, 128 | { 129 | "name": "same-source-ref", 130 | "sources": [ 131 | /* Make sure we don't collect the same source multiple times */ 132 | "phony-external-source.json" 133 | ] 134 | } 135 | ] 136 | } 137 | -------------------------------------------------------------------------------- /tests/org.flatpak.Flatpak.yml: -------------------------------------------------------------------------------- 1 | id: org.flatpak.Flatpak 2 | modules: 3 | - name: ostree 4 | sources: 5 | - type: git 6 | url: https://github.com/ostreedev/ostree.git 7 | tag: v2020.7 8 | commit: 32a3a1297312e566df3141c6c7e3b99709e474b1 9 | x-checker-data: 10 | type: anitya 11 | project-id: 10899 12 | stable-only: true 13 | tag-template: v$version 14 | 15 | - name: flatpak 16 | sources: 17 | - type: archive 18 | url: https://github.com/flatpak/flatpak/releases/download/1.8.2/flatpak-1.8.2.tar.xz 19 | sha256: 7926625df7c2282a5ee1a8b3c317af53d40a663b1bc6b18a2dc8747e265085b0 20 | x-checker-data: 21 | type: anitya 22 | project-id: 6377 23 | stable-only: false 24 | versions: 25 | ==: "1.10.1" 26 | url-template: https://github.com/flatpak/flatpak/releases/download/$version/flatpak-$version.tar.xz 27 | 28 | - name: boost 29 | sources: 30 | - type: archive 31 | url: https://archives.boost.io/release/1.74.0/source/boost_1_74_0.tar.bz2 32 | sha256: 83bfc1507731a0906e387fc28b7ef5417d591429e51e788417fe9ff025e116b1 33 | x-checker-data: 34 | type: anitya 35 | project-id: 6845 36 | stable-only: true 37 | url-template: https://archives.boost.io/release/$version/source/boost_${major}_${minor}_$patch.tar.bz2 38 | 39 | - name: glib-networking 40 | sources: 41 | - type: archive 42 | url: https://download.gnome.org/sources/glib-networking/2.74/glib-networking-2.74.0.tar.xz 43 | sha256: 1f185aaef094123f8e25d8fa55661b3fd71020163a0174adb35a37685cda613b 44 | x-checker-data: 45 | type: anitya 46 | project-id: 21353 47 | # Leaving stable-only unset to at least exercise the default path 48 | url-template: https://download.gnome.org/sources/glib-networking/$version0.$version1/glib-networking-$version.tar.xz 49 | 50 | - name: gnuradio-iqbal 51 | buildsystem: cmake 52 | sources: 53 | - type: git 54 | url: https://github.com/osmocom/gr-iqbal.git 55 | tag: v0.38.2 56 | commit: fbee239a6fb36dd2fb564f6e6a0d393c4bc844db 57 | x-checker-data: 58 | type: anitya 59 | project-id: 14456 60 | stable-only: true 61 | # Version filter that can't match any version 62 | versions: 63 | '>': '0.38.2' 64 | '<': '0.38.1' 65 | tag-template: v$version 66 | -------------------------------------------------------------------------------- /tests/org.freedesktop.Sdk.Extension.rust-nightly.yml: -------------------------------------------------------------------------------- 1 | id: org.freedesktop.Sdk.Extension.rust-nightly 2 | modules: 3 | - name: rust 4 | buildsystem: simple 5 | sources: 6 | - type: archive 7 | only-arches: 8 | - x86_64 9 | url: https://static.rust-lang.org/dist/2020-08-15/rust-nightly-x86_64-unknown-linux-gnu.tar.gz 10 | sha256: 24b4681187654778817652273a68a4d55f5090604cd14b1f1c3ff8785ad24b99 11 | x-checker-data: 12 | type: rust 13 | package: rust 14 | channel: nightly 15 | target: x86_64-unknown-linux-gnu 16 | -------------------------------------------------------------------------------- /tests/org.gnome.baobab.json: -------------------------------------------------------------------------------- 1 | { 2 | "app-id" : "org.gnome.baobab", 3 | "runtime" : "org.gnome.Platform", 4 | "runtime-version" : "3.38", 5 | "sdk" : "org.gnome.Sdk", 6 | "command" : "baobab", 7 | "modules" : [ 8 | { 9 | "name" : "alleyoop", 10 | "sources" : [ 11 | { 12 | "type": "archive", 13 | "url": "https://download.gnome.org/sources/alleyoop/0.9/alleyoop-0.9.8.tar.xz", 14 | "sha256": "0000000000000000000000000000000000000000000000000000000000000000", 15 | "x-checker-data": { 16 | "type": "gnome", 17 | "name": "alleyoop" 18 | } 19 | } 20 | ] 21 | }, 22 | { 23 | "name" : "pygobject", 24 | "sources" : [ 25 | { 26 | "type": "archive", 27 | "url": "https://download.gnome.org/sources/pygobject/3.36/pygobject-3.36.0.tar.xz", 28 | "sha256": "0000000000000000000000000000000000000000000000000000000000000000", 29 | "x-checker-data": { 30 | "type": "gnome", 31 | "name": "pygobject", 32 | "stable-only": false, 33 | "version-scheme": "odd-minor-is-unstable", 34 | "versions": { 35 | "<": "3.38" 36 | } 37 | } 38 | } 39 | ] 40 | }, 41 | { 42 | "name" : "baobab", 43 | "sources" : [ 44 | { 45 | "type": "archive", 46 | "url": "https://download.gnome.org/sources/baobab/3.34/baobab-3.34.0.tar.xz", 47 | "sha256": "0000000000000000000000000000000000000000000000000000000000000000", 48 | "x-checker-data": { 49 | "type": "gnome", 50 | "name": "baobab" 51 | } 52 | } 53 | ] 54 | }, 55 | { 56 | "name" : "gedit", 57 | "sources" : [ 58 | { 59 | "type": "archive", 60 | "url": "https://download.gnome.org/sources/gedit/40/gedit-40.1.tar.xz", 61 | "sha256": "0000000000000000000000000000000000000000000000000000000000000000", 62 | "x-checker-data": { 63 | "type": "gnome", 64 | "name": "gedit" 65 | } 66 | } 67 | ] 68 | }, 69 | { 70 | "name" : "tracker", 71 | "sources" : [ 72 | { 73 | "type": "archive", 74 | "url": "https://download.gnome.org/sources/tracker/3.4/tracker-3.4.2.tar.xz", 75 | "sha256": "0000000000000000000000000000000000000000000000000000000000000000", 76 | "x-checker-data": { 77 | "type": "gnome", 78 | "name": "tracker", 79 | "version-scheme": "odd-minor-is-unstable" 80 | } 81 | } 82 | ] 83 | }, 84 | { 85 | "name" : "cairo-static", 86 | "sources" : [ 87 | { 88 | "type": "archive", 89 | "url": "https://gitlab.freedesktop.org/cairo/cairo/-/archive/1.17.6/cairo-1.17.6.tar.gz", 90 | "sha256": "0000000000000000000000000000000000000000000000000000000000000000", 91 | "x-checker-data": { 92 | "type": "gnome", 93 | "name": "cairo", 94 | "version-scheme": "odd-minor-is-unstable", 95 | "versions": { 96 | ">": "9999.0.0" 97 | } 98 | } 99 | } 100 | ] 101 | } 102 | ] 103 | } 104 | -------------------------------------------------------------------------------- /tests/org.x.xeyes.yml: -------------------------------------------------------------------------------- 1 | id: org.x.xeyes 2 | modules: 3 | - name: ico 4 | sources: 5 | - type: archive 6 | url: https://www.x.org/releases/individual/app/ico-1.0.4.tar.bz2 7 | sha256: "0000000000000000000000000000000000000000000000000000000000000000" 8 | x-checker-data: 9 | type: html 10 | url: https://www.x.org/releases/individual/app/ 11 | version-pattern: ico-(1\.0\.5)\.tar\.bz2 12 | url-template: https://www.x.org/releases/individual/app/ico-$version.tar.bz2 13 | 14 | - name: libXScrnSaver 15 | sources: 16 | - type: archive 17 | url: https://www.x.org/releases/individual/lib/libXScrnSaver-1.2.2.tar.bz2 18 | sha256: "0000000000000000000000000000000000000000000000000000000000000000" 19 | x-checker-data: 20 | type: html 21 | url: https://www.x.org/releases/individual/lib/ 22 | pattern: (libXScrnSaver-([\d\.]+\d).tar.bz2) 23 | 24 | - name: qrupdate 25 | sources: 26 | - type: archive 27 | url: https://sourceforge.net/projects/qrupdate/files/qrupdate/1.1/qrupdate-1.1.0.tar.gz 28 | sha256: "0000000000000000000000000000000000000000000000000000000000000000" 29 | x-checker-data: 30 | type: html 31 | url: https://sourceforge.net/projects/qrupdate/rss 32 | pattern: (https://sourceforge.net/.+/qrupdate-([\d\.]+\d)\.tar\.gz)/download 33 | sort-matches: false 34 | 35 | - name: libX11 36 | sources: 37 | - type: archive 38 | url: http://some-incorrect.url/libX11.tar.gz 39 | sha256: "0000000000000000000000000000000000000000000000000000000000000000" 40 | x-checker-data: 41 | type: html 42 | url: https://www.x.org/releases/individual/lib/ 43 | version-pattern: libX11-([\d\.]+).tar.gz 44 | url-template: libX11-$version.tar.gz 45 | versions: 46 | ==: 1.7.5 47 | 48 | - name: semver 49 | sources: 50 | - type: file 51 | url: http://example.com/semver.txt 52 | sha256: "0000000000000000000000000000000000000000000000000000000000000000" 53 | x-checker-data: 54 | type: html 55 | # printf '%s\n' v1.0.0 v1.0.0+patch1 v2.0.0-rc1 v2.0.0 | base64 56 | url: http://httpbingo.org/base64/djEuMC4wCnYxLjAuMCtwYXRjaDEKdjIuMC4wLXJjMQp2Mi4wLjAK 57 | version-pattern: v(\d.*) 58 | url-template: http://httpbingo.org/base64/encode/$version 59 | versions: 60 | <: 2.0.0-alpha 61 | version-scheme: semantic 62 | 63 | - name: libFS 64 | sources: 65 | - type: archive 66 | url: http://some-incorrect.url/libFS-1.0.7.tar.bz2 67 | sha256: "0000000000000000000000000000000000000000000000000000000000000000" 68 | x-checker-data: 69 | type: html 70 | url: https://www.x.org/releases/individual/lib/ 71 | url-template: http://some-incorrect.url/libFS-$version.tar.bz2 72 | version-pattern: http://some-incorrect.url/libFS-([\d\.]+).tar.bz2 73 | 74 | - name: libdoesntexist 75 | sources: 76 | - type: extra-data 77 | filename: libdoesntexist.tar 78 | url: https://httpbingo.org/status/500 79 | sha256: "0000000000000000000000000000000000000000000000000000000000000000" 80 | x-checker-data: 81 | type: html 82 | url: https://httpbingo.org/base64/MS4wLjAK 83 | version-pattern: (\d[\d\.]+\d) 84 | url-template: https://httpbingo.org/status/404 85 | 86 | - name: parent-child 87 | sources: 88 | - type: file 89 | url: http://example.com/parent.txt 90 | sha256: "0000000000000000000000000000000000000000000000000000000000000000" 91 | x-checker-data: 92 | type: html 93 | # echo 'Version: 1.0.0' | base64 -w 0 94 | url: http://httpbingo.org/base64/VmVyc2lvbjogMS4wLjAK 95 | version-pattern: Version:\s+([\d\.]+) 96 | url-template: https://httpbingo.org/response-headers?version=$version 97 | source-id: html-parent 98 | 99 | - type: file 100 | url: http://example.com/child.txt 101 | sha256: "0000000000000000000000000000000000000000000000000000000000000000" 102 | x-checker-data: 103 | type: html 104 | url: https://httpbingo.org/response-headers?version=$parent_version 105 | version-pattern: (\d\.\d\.\d) 106 | url-template: https://httpbingo.org/response-headers?version=$version 107 | parent-id: html-parent 108 | -------------------------------------------------------------------------------- /tests/phony-external-source-single-item.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "extra-data", 3 | "url": "https://some-gibberish-phony-url.phony/phony-archive.tar.gz", 4 | "sha256": "000000000000000000000000000000000000000000000000000000000000000000" 5 | } 6 | -------------------------------------------------------------------------------- /tests/phony-external-source.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type": "extra-data", 4 | "url": "https://some-gibberish-phony-url.phony/phony-archive.tar.gz", 5 | "sha256": "000000000000000000000000000000000000000000000000000000000000000000" 6 | } 7 | ] 8 | -------------------------------------------------------------------------------- /tests/phony-shared-module.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "phony-shared-module", 3 | "sources": [ 4 | { 5 | "type": "archive", 6 | "url": "https://some-gibberish-phony-url.phony/phony-archive.tar.gz", 7 | "sha256": "000000000000000000000000000000000000000000000000000000000000000000" 8 | } 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /tests/test_anityachecker.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | from distutils.version import LooseVersion 4 | 5 | from src.manifest import ManifestChecker 6 | from src.lib.externaldata import ExternalFile, ExternalGitRef 7 | from src.lib.checksums import MultiDigest 8 | from src.lib.utils import init_logging 9 | 10 | TEST_MANIFEST = os.path.join(os.path.dirname(__file__), "org.flatpak.Flatpak.yml") 11 | 12 | 13 | class TestAnityaChecker(unittest.IsolatedAsyncioTestCase): 14 | def setUp(self): 15 | init_logging() 16 | 17 | async def test_check(self): 18 | checker = ManifestChecker(TEST_MANIFEST) 19 | ext_data = await checker.check() 20 | 21 | self.assertEqual(len(ext_data), 5) 22 | for data in ext_data: 23 | if data.filename == "glib-networking-2.74.0.tar.xz": 24 | self.assertIsNotNone(data.new_version) 25 | self.assertIsInstance(data.new_version, ExternalFile) 26 | self.assertRegex( 27 | data.new_version.url, 28 | r"^https://download.gnome.org/sources/glib-networking/\d+.\d+/glib-networking-[\d.]+.tar.xz$", # noqa: E501 29 | ) 30 | self.assertIsNotNone(data.new_version.version) 31 | self.assertGreater( 32 | LooseVersion(data.new_version.version), LooseVersion("2.76") 33 | ) 34 | self.assertIsInstance(data.new_version.size, int) 35 | self.assertGreater(data.new_version.size, 0) 36 | self.assertIsNotNone(data.new_version.checksum) 37 | self.assertIsInstance(data.new_version.checksum, MultiDigest) 38 | self.assertNotEqual( 39 | data.new_version.checksum, 40 | MultiDigest( 41 | sha256="1f185aaef094123f8e25d8fa55661b3fd71020163a0174adb35a37685cda613b", # noqa: E501 42 | ), 43 | ) 44 | elif data.filename == "boost_1_74_0.tar.bz2": 45 | self.assertIsNotNone(data.new_version) 46 | self.assertIsInstance(data.new_version, ExternalFile) 47 | self.assertRegex( 48 | data.new_version.url, 49 | r"^https://archives\.boost\.io/release/[\d.]+/source/boost_[\d]+_[\d]+_[\d]+.tar.bz2$", # noqa: E501 50 | ) 51 | self.assertIsNotNone(data.new_version.version) 52 | self.assertGreater( 53 | LooseVersion(data.new_version.version), LooseVersion("1.74.0") 54 | ) 55 | self.assertIsInstance(data.new_version.size, int) 56 | self.assertGreater(data.new_version.size, 0) 57 | self.assertIsNotNone(data.new_version.checksum) 58 | self.assertIsInstance(data.new_version.checksum, MultiDigest) 59 | self.assertNotEqual( 60 | data.new_version.checksum, 61 | MultiDigest( 62 | sha256="83bfc1507731a0906e387fc28b7ef5417d591429e51e788417fe9ff025e116b1" # noqa: E501 63 | ), 64 | ) 65 | elif data.filename == "flatpak-1.8.2.tar.xz": 66 | self.assertIsNotNone(data.new_version) 67 | self.assertIsInstance(data.new_version, ExternalFile) 68 | self.assertRegex( 69 | data.new_version.url, 70 | r"^https://github.com/flatpak/flatpak/releases/download/[\w\d.]+/flatpak-[\w\d.]+.tar.xz$", # noqa: E501 71 | ) 72 | self.assertIsNotNone(data.new_version.version) 73 | self.assertEqual( 74 | LooseVersion(data.new_version.version), LooseVersion("1.10.1") 75 | ) 76 | self.assertIsInstance(data.new_version.size, int) 77 | self.assertGreater(data.new_version.size, 0) 78 | self.assertIsNotNone(data.new_version.checksum) 79 | self.assertIsInstance(data.new_version.checksum, MultiDigest) 80 | self.assertNotEqual( 81 | data.new_version.checksum, 82 | MultiDigest( 83 | sha256="7926625df7c2282a5ee1a8b3c317af53d40a663b1bc6b18a2dc8747e265085b0" # noqa: E501 84 | ), 85 | ) 86 | elif data.filename == "ostree.git": 87 | self.assertIsNotNone(data.new_version) 88 | self.assertIsInstance(data.new_version, ExternalGitRef) 89 | self.assertIsNotNone(data.new_version.commit) 90 | self.assertIsNotNone(data.new_version.tag) 91 | self.assertNotEqual( 92 | data.new_version.commit, data.current_version.commit 93 | ) 94 | self.assertNotEqual(data.new_version.tag, data.current_version.tag) 95 | self.assertIsNotNone(data.new_version.version) 96 | self.assertGreater( 97 | LooseVersion(data.new_version.version), LooseVersion("2020.7") 98 | ) 99 | self.assertNotEqual( 100 | data.new_version.commit, data.current_version.commit 101 | ) 102 | elif data.filename == "gr-iqbal.git": 103 | self.assertIsNone(data.new_version) 104 | else: 105 | self.fail(f"Unknown data {data.filename}") 106 | 107 | 108 | if __name__ == "__main__": 109 | unittest.main() 110 | -------------------------------------------------------------------------------- /tests/test_appdata.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # Copyright © 2019 Endless Mobile, Inc. 3 | # 4 | # Authors: 5 | # Will Thompson 6 | # 7 | # This program is free software; you can redistribute it and/or modify 8 | # it under the terms of the GNU General Public License as published by 9 | # the Free Software Foundation; either version 2 of the License, or 10 | # (at your option) any later version. 11 | # 12 | # This program is distributed in the hope that it will be useful, 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 | # GNU General Public License for more details. 16 | # 17 | # You should have received a copy of the GNU General Public License along 18 | # with this program; if not, write to the Free Software Foundation, Inc., 19 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 20 | 21 | import unittest 22 | from io import BytesIO 23 | 24 | from src.lib.appdata import add_release 25 | 26 | 27 | class TestAddRelease(unittest.TestCase): 28 | def _do_test(self, before, expected): 29 | in_ = BytesIO(before.encode()) 30 | out = BytesIO() 31 | add_release(in_, out, "4.5.6", "2020-02-02") 32 | # FIXME lxml pretty print always adds trailing newline 33 | self.assertMultiLineEqual(expected + "\n", out.getvalue().decode()) 34 | 35 | def test_simple(self): 36 | self._do_test( 37 | """ 38 | 39 | 40 | 41 | 42 | 43 | 44 | """.strip(), 45 | """ 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | """.strip(), 56 | ) 57 | 58 | @unittest.expectedFailure 59 | def test_four_space_no_releases_element(self): 60 | # FIXME: This ends up indenting correctly, but 61 | # and incorrectly get the default 2-space 62 | # indent. 63 | self._do_test( 64 | """ 65 | 66 | 67 | com.example.Workaround 68 | My history is a mystery 69 | 70 | """.strip(), 71 | """ 72 | 73 | 74 | com.example.Workaround 75 | My history is a mystery 76 | 77 | 78 | 79 | 80 | 81 | 82 | """.strip(), 83 | ) 84 | 85 | def test_four_space_one_prior_release(self): 86 | self._do_test( 87 | """ 88 | 89 | 90 | 91 | 92 | 93 | 94 | """.strip(), 95 | """ 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | """.strip(), 106 | ) 107 | 108 | def test_four_space_many_prior_releases(self): 109 | self._do_test( 110 | """ 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | """.strip(), 119 | """ 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | """.strip(), 131 | ) 132 | 133 | def test_mixed_indentation(self): 134 | """This input uses 3-space indentation for one existing and 4-space 135 | for another. Match the top one.""" 136 | self._do_test( 137 | """ 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | """.strip(), 146 | """ 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | """.strip(), 158 | ) 159 | 160 | @unittest.expectedFailure 161 | def test_release_attribute_ordering(self): 162 | """It would be nice to follow the attribute order on any existing s. 163 | Currently we always emit version then date. I checked 18 repos and it was a 164 | 10-8 split.""" 165 | self._do_test( 166 | """ 167 | 168 | 169 | 170 | 171 | 172 | 173 | """.strip(), 174 | """ 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | """.strip(), 183 | ) 184 | 185 | def test_comment(self): 186 | self._do_test( 187 | """ 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | """.strip(), 196 | """ 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | """.strip(), 208 | ) 209 | 210 | def test_no_releases(self): 211 | self._do_test( 212 | """ 213 | 214 | 215 | 216 | """.strip(), 217 | """ 218 | 219 | 220 | 221 | 222 | 223 | 224 | 225 | 226 | """.strip(), 227 | ) 228 | 229 | def test_empty_releases(self): 230 | """No whitespace is generated between and .""" 231 | self._do_test( 232 | """ 233 | 234 | 235 | 236 | 237 | """.strip(), 238 | """ 239 | 240 | 241 | 242 | 243 | 244 | 245 | 246 | 247 | """.strip(), 248 | # but we can live with it in this edge case for now 249 | ) 250 | 251 | def test_double_comment_within_root(self): 252 | self._do_test( 253 | """ 254 | 255 | 256 | 257 | 261 | First element needed 262 | 263 | """.strip(), 264 | """ 265 | 266 | 267 | 268 | 272 | First element needed 273 | 274 | 275 | 276 | 277 | 278 | 279 | """.strip(), 280 | ) 281 | 282 | def test_comment_outside_root(self): 283 | # appdata files often include a copyright comment between the 292 | 293 | 297 | 298 | 299 | """.strip(), 300 | """ 301 | 302 | 303 | 307 | 308 | 309 | 310 | 311 | 312 | 313 | 314 | """.strip(), 315 | ) 316 | 317 | def test_amp_as_amp(self): 318 | self._do_test( 319 | """ 320 | 321 | 322 | 🍦 & 🎂 323 | 324 | """.strip(), 325 | """ 326 | 327 | 328 | 🍦 & 🎂 329 | 330 | 331 | 332 | 333 | 334 | 335 | """.strip(), 336 | ) 337 | 338 | @unittest.expectedFailure 339 | def test_amp_as_codepoint(self): 340 | """& becomes &.""" 341 | self._do_test( 342 | """ 343 | 344 | 345 | 🦝 & 🍒 346 | 347 | """.strip(), 348 | """ 349 | 350 | 351 | 🦝 & 🍒 352 | 353 | 354 | 355 | 356 | 357 | 358 | """.strip(), 359 | ) 360 | 361 | 362 | if __name__ == "__main__": 363 | unittest.main() 364 | -------------------------------------------------------------------------------- /tests/test_chromiumchecker.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import unittest 4 | from distutils.version import LooseVersion 5 | 6 | from src.manifest import ManifestChecker 7 | from src.lib.externaldata import ( 8 | ExternalData, 9 | ExternalFile, 10 | ExternalGitRef, 11 | ExternalGitRepo, 12 | ) 13 | from src.lib.checksums import MultiDigest 14 | from src.lib.utils import init_logging 15 | 16 | TEST_MANIFEST = os.path.join(os.path.dirname(__file__), "org.chromium.Chromium.yaml") 17 | 18 | 19 | class TestChromiumChecker(unittest.IsolatedAsyncioTestCase): 20 | def setUp(self): 21 | init_logging(logging.INFO) 22 | 23 | async def test_check(self): 24 | checker = ManifestChecker(TEST_MANIFEST) 25 | ext_data = await checker.check() 26 | 27 | self.assertEqual(len(ext_data), 3) 28 | for data in ext_data: 29 | self.assertIsNotNone(data.new_version) 30 | self.assertIsNotNone(data.new_version.version) 31 | self.assertGreater( 32 | LooseVersion(data.new_version.version), LooseVersion("100.0.4845.0") 33 | ) 34 | 35 | if isinstance(data, ExternalData): 36 | self.assertIsInstance(data.new_version, ExternalFile) 37 | self.assertIsNotNone(data.new_version.checksum) 38 | self.assertIsInstance(data.new_version.checksum, MultiDigest) 39 | if data.filename.startswith("chromium-"): 40 | self.assertRegex( 41 | data.new_version.url, 42 | r"^https://(commondatastorage.googleapis.com/chromium-browser-official|chromium-tarballs.distfiles.gentoo.org)/chromium-[\d.]+\.tar\.xz$", # noqa: E501 43 | ) 44 | self.assertNotEqual( 45 | data.new_version.checksum, 46 | MultiDigest( 47 | sha256="a68d31f77a6b7700a5161d82f5932c2822f85f7ae68ad51be3d3cf689a3fe2b0" # noqa: E501 48 | ), 49 | ) 50 | elif data.filename.startswith("clang-"): 51 | self.assertRegex( 52 | data.new_version.url, 53 | r"^https://commondatastorage.googleapis.com/chromium-browser-clang/Linux_x64/clang-.*\.tar\.xz$", # noqa: E501 54 | ) 55 | self.assertNotEqual( 56 | data.new_version.checksum, 57 | MultiDigest( 58 | sha256="cf6b516a4e410d79439a150927fc8b450b325e2a6349395ae153c9d2dd6c6ed2" # noqa: E501 59 | ), 60 | ) 61 | else: 62 | self.fail(f"unexpected extra-data filename {data.filename}") 63 | elif isinstance(data, ExternalGitRepo): 64 | self.assertEqual(data.filename, "llvm-project") 65 | self.assertIsInstance(data.new_version, ExternalGitRef) 66 | self.assertIsNotNone(data.new_version.commit) 67 | self.assertNotEqual( 68 | data.new_version.commit, data.current_version.commit 69 | ) 70 | else: 71 | self.fail(repr(type(data))) 72 | -------------------------------------------------------------------------------- /tests/test_debianrepochecker.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import os 3 | import datetime 4 | 5 | from src.manifest import ManifestChecker 6 | from src.lib.utils import init_logging 7 | from src.lib.checksums import MultiDigest 8 | 9 | TEST_MANIFEST = os.path.join( 10 | os.path.dirname(__file__), "org.debian.tracker.pkg.apt.yml" 11 | ) 12 | 13 | 14 | class TestDebianRepoChecker(unittest.IsolatedAsyncioTestCase): 15 | def setUp(self): 16 | init_logging() 17 | 18 | async def test_check(self): 19 | checker = ManifestChecker(TEST_MANIFEST) 20 | ext_data = await checker.check() 21 | for data in ext_data: 22 | self.assertIsNotNone(data) 23 | self.assertIsNotNone(data.new_version) 24 | self.assertIsNotNone(data.new_version.url) 25 | self.assertIsNotNone(data.new_version.checksum) 26 | self.assertIsNotNone(data.new_version.version) 27 | self.assertIsNotNone(data.new_version.timestamp) 28 | self.assertIsInstance(data.new_version.timestamp, datetime.date) 29 | self.assertNotEqual(data.new_version.url, data.current_version.url) 30 | self.assertIsInstance(data.new_version.checksum, MultiDigest) 31 | self.assertNotEqual( 32 | data.new_version.checksum, data.current_version.checksum 33 | ) 34 | self.assertRegex( 35 | data.new_version.url, r"http://deb.debian.org/debian/pool/main/.+" 36 | ) 37 | if data.filename == "python-apt-source.tar.xz": 38 | self.assertRegex( 39 | data.new_version.url, 40 | r"http://deb.debian.org/debian/pool/main/p/python-apt/python-apt_(\d[\d\.-]+\d).tar.xz", # noqa: E501 41 | ) 42 | elif data.filename == "apt-aarch64.deb": 43 | self.assertRegex( 44 | data.new_version.url, 45 | r"http://deb.debian.org/debian/pool/main/a/apt/apt_(\d[\d\.-]+\d)_arm64.deb", # noqa: E501 46 | ) 47 | else: 48 | self.fail(f"Unknown data {data.filename}") 49 | -------------------------------------------------------------------------------- /tests/test_electronchecker.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | import datetime 4 | 5 | from src.manifest import ManifestChecker 6 | from src.lib.utils import init_logging 7 | from src.lib.checksums import MultiDigest 8 | 9 | TEST_MANIFEST = os.path.join(os.path.dirname(__file__), "fedc.test.ElectronChecker.yml") 10 | 11 | 12 | class TestElectronChecker(unittest.IsolatedAsyncioTestCase): 13 | def setUp(self): 14 | init_logging() 15 | 16 | async def test_check(self): 17 | checker = ManifestChecker(TEST_MANIFEST) 18 | ext_data = await checker.check() 19 | for data in ext_data: 20 | self.assertIsNotNone(data) 21 | self.assertIsNotNone(data.new_version) 22 | self.assertIsInstance(data.new_version.url, str) 23 | self.assertIsInstance(data.new_version.checksum, MultiDigest) 24 | self.assertIsInstance(data.new_version.size, int) 25 | self.assertIsInstance(data.new_version.version, str) 26 | self.assertIsInstance(data.new_version.timestamp, datetime.date) 27 | self.assertNotEqual(data.new_version.url, data.current_version.url) 28 | -------------------------------------------------------------------------------- /tests/test_gitchecker.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import os 3 | import unittest 4 | 5 | from src.manifest import ManifestChecker 6 | from src.lib.externaldata import ExternalGitRepo, ExternalGitRef 7 | from src.lib.utils import init_logging 8 | from src.checkers.gitchecker import TagWithVersion, TagWithSemver 9 | 10 | TEST_MANIFEST = os.path.join(os.path.dirname(__file__), "com.virustotal.Uploader.yml") 11 | 12 | 13 | class TestGitChecker(unittest.IsolatedAsyncioTestCase): 14 | def setUp(self): 15 | init_logging() 16 | 17 | def test_sort_tags(self): 18 | t1 = TagWithVersion("x1", "v1.1", False, "1.1") 19 | t1a = TagWithVersion("x2", "v1.1", True, "1.1") 20 | t2 = TagWithVersion("y1", "v1.1.1", False, "1.1.1") 21 | t3 = TagWithVersion("z1", "v1.2", False, "1.2") 22 | t3a = TagWithVersion("z2", "v1.2", True, "1.2") 23 | self.assertTrue(t1a <= t1 < t3 and t3 >= t3a > t1) 24 | sorted_tags = [t1a, t1, t2, t3a, t3] 25 | shaked_tags = [t1, t1a, t3, t3a, t2] 26 | self.assertEqual(sorted(shaked_tags), sorted_tags) 27 | self.assertEqual(sorted(shaked_tags, reverse=True), sorted_tags[::-1]) 28 | 29 | def test_sort_tags_semver(self): 30 | ts1 = TagWithSemver("x1", "v0.3", False, "0.3.0") 31 | ts1a = TagWithSemver("x1", "v0.3", True, "0.3.0") 32 | ts2 = TagWithSemver("x1", "v0.3.1", False, "0.3.1") 33 | ts2a = TagWithSemver("x1", "v0.3.1", True, "0.3.1") 34 | ts3 = TagWithSemver("x1", "v0.4.0-beta.1", False, "0.4.0-beta.1") 35 | ts3a = TagWithSemver("x1", "v0.4.0-beta.1", True, "0.4.0-beta.1") 36 | ts4 = TagWithSemver("x1", "v0.4.0", False, "0.4.0") 37 | ts4a = TagWithSemver("x1", "v0.4.0", True, "0.4.0") 38 | self.assertTrue(ts1a <= ts1 < ts3 and ts3 >= ts3a > ts1) 39 | sorted_tags_sem = [ts1a, ts1, ts2a, ts2, ts3a, ts3, ts4a, ts4] 40 | shaked_tags_sem = [ts2, ts1, ts4, ts1a, ts4a, ts3, ts3a, ts2a] 41 | self.assertEqual(sorted(shaked_tags_sem), sorted_tags_sem) 42 | self.assertEqual(sorted(shaked_tags_sem, reverse=True), sorted_tags_sem[::-1]) 43 | 44 | async def test_check_and_update(self): 45 | checker = ManifestChecker(TEST_MANIFEST) 46 | ext_data = await checker.check() 47 | 48 | self.assertEqual(len(ext_data), 10) 49 | for data in ext_data: 50 | self.assertIsInstance(data, ExternalGitRepo) 51 | self.assertIsInstance(data.current_version, ExternalGitRef) 52 | if data.filename == "jansson.git": 53 | self.assertEqual(data.state, data.State.UNKNOWN) 54 | self.assertIsNone(data.new_version) 55 | elif data.filename == "c-vtapi.git": 56 | self.assertEqual(data.state, data.State.BROKEN) 57 | self.assertIsNotNone(data.new_version) 58 | self.assertEqual(data.current_version.url, data.new_version.url) 59 | self.assertNotEqual( 60 | data.current_version.commit, data.new_version.commit 61 | ) 62 | self.assertFalse(data.current_version.matches(data.new_version)) 63 | elif data.filename == "qt-virustotal-uploader.git": 64 | self.assertIn(data.State.VALID, data.state) 65 | self.assertIsNone(data.new_version) 66 | elif data.filename == "protobuf-c.git": 67 | self.assertEqual(data.state, data.State.UNKNOWN) 68 | self.assertIsNone(data.new_version) 69 | elif data.filename == "yara.git": 70 | self.assertIn(data.State.BROKEN, data.state) 71 | self.assertIsNone(data.new_version) 72 | elif data.filename == "yara-python.git": 73 | self.assertEqual(data.state, data.State.UNKNOWN) 74 | self.assertIsNone(data.new_version) 75 | elif data.filename == "vt-py.git": 76 | self.assertIn(data.State.VALID, data.state) 77 | self.assertIsNone(data.new_version) 78 | elif data.filename == "extra-cmake-modules.git": 79 | self.assertIsNotNone(data.new_version) 80 | self.assertIsNone(data.new_version.branch) 81 | self.assertIsNotNone(data.new_version.commit) 82 | self.assertIsNotNone(data.new_version.tag) 83 | self.assertIsNotNone(data.new_version.version) 84 | self.assertNotEqual(data.new_version.tag, data.current_version.tag) 85 | self.assertNotEqual( 86 | data.new_version.commit, data.current_version.commit 87 | ) 88 | self.assertRegex(data.new_version.tag, r"^[vV][\d.]+$") 89 | self.assertRegex(data.new_version.version, r"^[\d.]+$") 90 | elif data.filename == "bluez-qt.git": 91 | self.assertEqual(data.new_version.tag, "v5.90.0") 92 | elif data.filename == "easyeffects.git": 93 | self.assertEqual(data.new_version.tag, "v4.8.5") 94 | else: 95 | self.fail(f"Unknown data {data.filename}") 96 | self._test_update_data(data, copy.deepcopy(data.source)) 97 | 98 | def _test_update_data(self, data, orig_source): 99 | data.update() 100 | if data.filename == "qt-virustotal-uploader.git": 101 | self.assertEqual(data.source, orig_source) 102 | if data.filename == "protobuf-c.git": 103 | self.assertEqual(data.source, orig_source) 104 | elif data.filename == "yara.git": 105 | self.assertEqual(data.source, orig_source) 106 | elif data.filename == "yara-python.git": 107 | self.assertEqual(data.source, orig_source) 108 | elif data.filename == "jansson.git": 109 | self.assertEqual(data.source, orig_source) 110 | elif data.filename == "c-vtapi.git": 111 | self.assertNotEqual(data.source, orig_source) 112 | self.assertEqual(data.source.keys(), orig_source.keys()) 113 | self.assertIn("commit", data.source) 114 | self.assertNotIn("tag", data.source) 115 | self.assertIn("branch", data.source) 116 | self.assertEqual(data.source["commit"], data.new_version.commit) 117 | elif data.filename == "vt-py.git": 118 | self.assertEqual(data.source, orig_source) 119 | elif data.filename == "extra-cmake-modules.git": 120 | self.assertNotEqual(data.source, orig_source) 121 | self.assertIn("tag", data.source) 122 | self.assertIn("commit", data.source) 123 | self.assertNotEqual(data.source["commit"], orig_source["commit"]) 124 | self.assertNotEqual(data.source["tag"], orig_source["tag"]) 125 | 126 | 127 | if __name__ == "__main__": 128 | unittest.main() 129 | -------------------------------------------------------------------------------- /tests/test_gnomechecker.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # Copyright © 2020–2021 Maximiliano Sandoval 3 | # 4 | # Authors: 5 | # Maximiliano Sandoval 6 | # 7 | # This program is free software; you can redistribute it and/or modify 8 | # it under the terms of the GNU General Public License as published by 9 | # the Free Software Foundation; either version 2 of the License, or 10 | # (at your option) any later version. 11 | # 12 | # This program is distributed in the hope that it will be useful, 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 | # GNU General Public License for more details. 16 | # 17 | # You should have received a copy of the GNU General Public License along 18 | # with this program; if not, write to the Free Software Foundation, Inc., 19 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 20 | 21 | import os 22 | import unittest 23 | from distutils.version import LooseVersion 24 | 25 | from src.lib.utils import init_logging 26 | from src.manifest import ManifestChecker 27 | from src.lib.checksums import MultiDigest 28 | from src.checkers.gnomechecker import _is_stable, VersionScheme 29 | 30 | TEST_MANIFEST = os.path.join(os.path.dirname(__file__), "org.gnome.baobab.json") 31 | 32 | 33 | class TestGNOMEChecker(unittest.IsolatedAsyncioTestCase): 34 | def setUp(self): 35 | init_logging() 36 | 37 | def test_is_stable(self): 38 | self.assertTrue(_is_stable("3.28.0")) 39 | self.assertTrue(_is_stable("41")) 40 | self.assertTrue(_is_stable("41.1")) 41 | self.assertTrue(_is_stable("41.2")) 42 | self.assertTrue(_is_stable("4.1")) 43 | self.assertTrue(_is_stable("4.2")) 44 | self.assertTrue(_is_stable("1.7")) 45 | self.assertTrue(_is_stable("1.2")) 46 | self.assertTrue(_is_stable("2.7")) 47 | self.assertTrue(_is_stable("2.2")) 48 | 49 | self.assertFalse(_is_stable("4.rc")) 50 | self.assertFalse(_is_stable("4.2.beta")) 51 | self.assertFalse(_is_stable("4.alpha.0")) 52 | self.assertFalse(_is_stable("48.0.alpha2")) 53 | self.assertFalse(_is_stable("48.alpha2")) 54 | 55 | def test_minor_scheme_is_stable(self): 56 | scheme = VersionScheme.ODD_MINOR_IS_UNSTABLE 57 | self.assertTrue(_is_stable("1", scheme)) 58 | self.assertTrue(_is_stable("2", scheme)) 59 | 60 | self.assertTrue(_is_stable("1.2", scheme)) 61 | self.assertTrue(_is_stable("1.2.0", scheme)) 62 | self.assertTrue(_is_stable("2.2", scheme)) 63 | self.assertTrue(_is_stable("2.2.1", scheme)) 64 | self.assertTrue(_is_stable("2.2.2", scheme)) 65 | self.assertTrue(_is_stable("1.2.2", scheme)) 66 | self.assertTrue(_is_stable("1.2.1", scheme)) 67 | self.assertTrue(_is_stable("0.10.0", scheme)) 68 | 69 | self.assertFalse(_is_stable("1.1", scheme)) 70 | self.assertFalse(_is_stable("1.1.0", scheme)) 71 | self.assertFalse(_is_stable("2.3", scheme)) 72 | self.assertFalse(_is_stable("2.1.1", scheme)) 73 | self.assertFalse(_is_stable("2.3.2", scheme)) 74 | self.assertFalse(_is_stable("1.1.2", scheme)) 75 | self.assertFalse(_is_stable("1.3.1", scheme)) 76 | self.assertFalse(_is_stable("0.11.0", scheme)) 77 | 78 | async def test_check(self): 79 | checker = ManifestChecker(TEST_MANIFEST) 80 | ext_data = await checker.check() 81 | 82 | for data in ext_data: 83 | if data.filename == "cairo-1.17.6.tar.gz": 84 | self.assertIsNone(data.new_version) 85 | continue 86 | 87 | self.assertIsNotNone(data.new_version) 88 | self.assertIsNotNone(data.new_version.checksum) 89 | self.assertIsInstance(data.new_version.checksum, MultiDigest) 90 | self.assertNotEqual( 91 | data.new_version.checksum, 92 | MultiDigest( 93 | sha256="0000000000000000000000000000000000000000000000000000000000000000" # noqa: E501 94 | ), 95 | ) 96 | self.assertIsNotNone(data.new_version.version) 97 | self.assertIsInstance(data.new_version.version, str) 98 | 99 | if data.filename == "baobab-3.34.0.tar.xz": 100 | self._test_stable_only(data) 101 | elif data.filename == "pygobject-3.36.0.tar.xz": 102 | self._test_include_unstable(data) 103 | self.assertLess( 104 | LooseVersion(data.new_version.version), LooseVersion("3.38.0") 105 | ) 106 | elif data.filename == "alleyoop-0.9.8.tar.xz": 107 | self._test_non_standard_version(data) 108 | elif data.filename == "tracker-3.4.2.tar.xz": 109 | self.assertIsNotNone(data.new_version) 110 | 111 | def _test_stable_only(self, data): 112 | self.assertEqual(data.filename, "baobab-3.34.0.tar.xz") 113 | self.assertRegex( 114 | data.new_version.url, 115 | r"^https://download\.gnome\.org/sources/baobab/.+/baobab-.+\.tar\.xz$", # noqa: E501 116 | ) 117 | 118 | def _test_include_unstable(self, data): 119 | self.assertEqual(data.filename, "pygobject-3.36.0.tar.xz") 120 | self.assertRegex( 121 | data.new_version.url, 122 | r"^https://download\.gnome\.org/sources/pygobject/.+/pygobject-.+\.tar\.xz$", # noqa: E501 123 | ) 124 | 125 | def _test_non_standard_version(self, data): 126 | self.assertEqual(data.filename, "alleyoop-0.9.8.tar.xz") 127 | self.assertEqual( 128 | data.new_version.version, 129 | "0.9.8", 130 | ) 131 | 132 | 133 | if __name__ == "__main__": 134 | unittest.main() 135 | -------------------------------------------------------------------------------- /tests/test_htmlchecker.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # Copyright © 2019 Bastien Nocera 3 | # Copyright © 2019 Endless Mobile, Inc. 4 | # 5 | # Authors: 6 | # Bastien Nocera 7 | # Will Thompson 8 | # 9 | # This program is free software; you can redistribute it and/or modify 10 | # it under the terms of the GNU General Public License as published by 11 | # the Free Software Foundation; either version 2 of the License, or 12 | # (at your option) any later version. 13 | # 14 | # This program is distributed in the hope that it will be useful, 15 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 16 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 17 | # GNU General Public License for more details. 18 | # 19 | # You should have received a copy of the GNU General Public License along 20 | # with this program; if not, write to the Free Software Foundation, Inc., 21 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 22 | 23 | import os 24 | import base64 25 | import unittest 26 | from distutils.version import LooseVersion 27 | 28 | import aiohttp 29 | 30 | from src.lib.utils import init_logging 31 | from src.manifest import ManifestChecker 32 | from src.lib.checksums import MultiDigest 33 | from src.checkers.htmlchecker import HTMLChecker 34 | from src.lib.errors import CheckerError 35 | 36 | 37 | class TestHTMLTools(unittest.IsolatedAsyncioTestCase): 38 | SAMPLES = { 39 | "utf-8": "🙋, 🌍!\n…" 40 | # TODO we want to test other encodings, but httbin(go)/base64/ supports only 41 | # utf-8 42 | } 43 | 44 | async def asyncSetUp(self): 45 | self.session = aiohttp.ClientSession( 46 | raise_for_status=True, timeout=aiohttp.ClientTimeout(total=5) 47 | ) 48 | 49 | async def asyncTearDown(self): 50 | await self.session.close() 51 | 52 | def _encoded_url(self, data: bytes): 53 | return "https://httpbingo.org/base64/decode/" + base64.b64encode(data).decode() 54 | 55 | async def test_get_text(self): 56 | checker = HTMLChecker(self.session) 57 | 58 | for charset, sample in self.SAMPLES.items(): 59 | self.assertEqual( 60 | await checker._get_text(self._encoded_url(sample.encode(charset))), 61 | sample, 62 | ) 63 | 64 | with self.assertRaises(CheckerError): 65 | await checker._get_text("https://httpbingo.org/image/jpeg") 66 | 67 | 68 | TEST_MANIFEST = os.path.join(os.path.dirname(__file__), "org.x.xeyes.yml") 69 | 70 | 71 | class TestHTMLChecker(unittest.IsolatedAsyncioTestCase): 72 | def setUp(self): 73 | init_logging() 74 | 75 | async def test_check(self): 76 | checker = ManifestChecker(TEST_MANIFEST) 77 | ext_data = await checker.check() 78 | self._test_check_with_url_template( 79 | self._find_by_filename(ext_data, "ico-1.0.4.tar.bz2") 80 | ) 81 | self._test_combo_pattern( 82 | self._find_by_filename(ext_data, "libXScrnSaver-1.2.2.tar.bz2") 83 | ) 84 | self._test_combo_pattern_nosort( 85 | self._find_by_filename(ext_data, "qrupdate-1.1.0.tar.gz") 86 | ) 87 | self._test_version_filter(self._find_by_filename(ext_data, "libX11.tar.gz")) 88 | self._test_semver_filter(self._find_by_filename(ext_data, "semver.txt")) 89 | self._test_no_match(self._find_by_filename(ext_data, "libFS-1.0.7.tar.bz2")) 90 | self._test_invalid_url(self._find_by_filename(ext_data, "libdoesntexist.tar")) 91 | self._test_parent_child( 92 | self._find_by_filename(ext_data, "parent.txt"), 93 | self._find_by_filename(ext_data, "child.txt"), 94 | ) 95 | 96 | def _test_check_with_url_template(self, data): 97 | self.assertIsNotNone(data) 98 | self.assertEqual(data.filename, "ico-1.0.4.tar.bz2") 99 | self.assertIsNotNone(data.new_version) 100 | self.assertEqual( 101 | data.new_version.url, 102 | "https://www.x.org/releases/individual/app/ico-1.0.5.tar.bz2", 103 | ) 104 | self.assertIsInstance(data.new_version.size, int) 105 | self.assertGreater(data.new_version.size, 0) 106 | self.assertIsNotNone(data.new_version.checksum) 107 | self.assertIsInstance(data.new_version.checksum, MultiDigest) 108 | self.assertEqual( 109 | data.new_version.checksum, 110 | MultiDigest( 111 | sha256="d73b62f29eb98d850f16b76d759395180b860b613fbe1686b18eee99a6e3773f" # noqa: E501 112 | ), 113 | ) 114 | 115 | def _test_combo_pattern(self, data): 116 | self.assertIsNotNone(data) 117 | self.assertRegex(data.filename, r"libXScrnSaver-[\d\.-]+.tar.bz2") 118 | self.assertIsNotNone(data.new_version) 119 | self.assertLessEqual( 120 | LooseVersion("1.2.2"), LooseVersion(data.new_version.version) 121 | ) 122 | self.assertRegex( 123 | data.new_version.url, 124 | r"^https?://www.x.org/releases/individual/lib/libXScrnSaver-[\d\.-]+.tar.bz2$", # noqa: E501 125 | ) 126 | self.assertNotEqual( 127 | data.new_version.checksum, 128 | MultiDigest( 129 | sha256="0000000000000000000000000000000000000000000000000000000000000000" # noqa: E501 130 | ), 131 | ) 132 | 133 | def _test_combo_pattern_nosort(self, data): 134 | self.assertIsNotNone(data) 135 | self.assertRegex(data.filename, r"qrupdate-[\d\.-]+.tar.gz") 136 | self.assertIsNotNone(data.new_version) 137 | self.assertLessEqual( 138 | LooseVersion("1.1.0"), LooseVersion(data.new_version.version) 139 | ) 140 | self.assertRegex( 141 | data.new_version.url, 142 | r"^https://sourceforge\.net/projects/qrupdate/.+/qrupdate-\d[\d\.]+\d\.tar\.gz$", # noqa: E501 143 | ) 144 | self.assertNotEqual( 145 | data.new_version.checksum, 146 | MultiDigest( 147 | sha256="0000000000000000000000000000000000000000000000000000000000000000" # noqa: E501 148 | ), 149 | ) 150 | 151 | def _test_version_filter(self, data): 152 | self.assertIsNotNone(data) 153 | self.assertIsNotNone(data.new_version) 154 | self.assertEqual(data.new_version.version, "1.7.5") 155 | 156 | def _test_semver_filter(self, data): 157 | self.assertIsNotNone(data) 158 | self.assertIsNotNone(data.new_version) 159 | self.assertIsNotNone(data.new_version.version) 160 | self.assertEqual(data.new_version.version, "1.0.0+patch1") 161 | 162 | def _test_no_match(self, data): 163 | self.assertIsNotNone(data) 164 | self.assertIsNone(data.new_version) 165 | 166 | def _test_invalid_url(self, data): 167 | self.assertIsNotNone(data) 168 | self.assertIsNone(data.new_version) 169 | 170 | def _test_parent_child(self, parent, child): 171 | self.assertIs(child.parent, parent) 172 | self.assertIsNotNone(parent.new_version) 173 | self.assertIsNotNone(child.new_version) 174 | self.assertEqual( 175 | child.new_version.checksum, 176 | # curl https://httpbingo.org/response-headers?version=1.0.0 | sha256sum 177 | MultiDigest( 178 | sha256="81f3779437618c7f9ff38b53ce6f5ed99e626ba82a7c31107400a2ef97592882" # noqa: E501 179 | ), 180 | ) 181 | self.assertEqual(parent.new_version.checksum, child.new_version.checksum) 182 | 183 | def _find_by_filename(self, ext_data, filename): 184 | for data in ext_data: 185 | if data.filename == filename: 186 | return data 187 | else: 188 | return None 189 | 190 | 191 | if __name__ == "__main__": 192 | unittest.main() 193 | -------------------------------------------------------------------------------- /tests/test_jetbrainschecker.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | 4 | from src.manifest import ManifestChecker 5 | from src.lib.utils import init_logging 6 | from src.lib.checksums import MultiDigest 7 | 8 | TEST_MANIFEST = os.path.join(os.path.dirname(__file__), "com.jetbrains.PhpStorm.json") 9 | 10 | 11 | class TestJetBrainsChecker(unittest.IsolatedAsyncioTestCase): 12 | def setUp(self): 13 | init_logging() 14 | 15 | async def test_check(self): 16 | checker = ManifestChecker(TEST_MANIFEST) 17 | ext_data = await checker.check() 18 | 19 | data = self._find_by_filename(ext_data, "phpstorm.tar.gz") 20 | self.assertIsNotNone(data) 21 | self.assertEqual(data.filename, "phpstorm.tar.gz") 22 | self.assertIsNotNone(data.new_version) 23 | self.assertRegex( 24 | data.new_version.url, 25 | r"^https://download\.jetbrains\.com/webide/PhpStorm-.+\.tar\.gz$", # noqa: E501 26 | ) 27 | self.assertIsInstance(data.new_version.size, int) 28 | self.assertGreater(data.new_version.size, 0) 29 | self.assertIsNotNone(data.new_version.checksum) 30 | self.assertIsInstance(data.new_version.checksum, MultiDigest) 31 | self.assertNotEqual( 32 | data.new_version.checksum, 33 | MultiDigest( 34 | sha256="0000000000000000000000000000000000000000000000000000000000000000" # noqa: E501 35 | ), 36 | ) 37 | 38 | def _find_by_filename(self, ext_data, filename): 39 | for data in ext_data: 40 | if data.filename == filename: 41 | return data 42 | else: 43 | return None 44 | 45 | 46 | if __name__ == "__main__": 47 | unittest.main() 48 | -------------------------------------------------------------------------------- /tests/test_jsonchecker.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | import datetime 4 | 5 | from src.manifest import ManifestChecker 6 | from src.lib.utils import init_logging 7 | from src.lib.externaldata import ExternalFile, ExternalGitRef 8 | from src.lib.checksums import MultiDigest 9 | 10 | TEST_MANIFEST = os.path.join(os.path.dirname(__file__), "io.github.stedolan.jq.yml") 11 | 12 | 13 | class TestJSONChecker(unittest.IsolatedAsyncioTestCase): 14 | def setUp(self): 15 | init_logging() 16 | 17 | async def test_check(self): 18 | checker = ManifestChecker(TEST_MANIFEST) 19 | ext_data = await checker.check() 20 | 21 | self.assertEqual(len(ext_data), 9) 22 | for data in ext_data: 23 | self.assertIsNotNone(data) 24 | if data.filename == "jq-1.4.tar.gz": 25 | self.assertIsInstance(data.new_version, ExternalFile) 26 | self.assertNotEqual(data.current_version.url, data.new_version.url) 27 | self.assertRegex( 28 | data.new_version.url, 29 | r"^https://github.com/jqlang/jq/releases/download/jq-[0-9\.\w]+/jq-[0-9\.\w]+\.tar.gz$", # noqa: E501 30 | ) 31 | self.assertIsInstance(data.new_version.size, int) 32 | self.assertGreater(data.new_version.size, 0) 33 | self.assertIsNotNone(data.new_version.checksum) 34 | self.assertIsInstance(data.new_version.checksum, MultiDigest) 35 | self.assertNotEqual( 36 | data.new_version.checksum, 37 | MultiDigest( 38 | sha256="0000000000000000000000000000000000000000000000000000000000000000" # noqa: E501 39 | ), 40 | ) 41 | elif data.filename == "jq-1.4.tarball.tar.gz": 42 | self.assertEqual( 43 | data.new_version.timestamp, 44 | datetime.datetime.fromisoformat("2018-11-02T01:54:23+00:00"), 45 | ) 46 | elif data.filename == "oniguruma.git": 47 | self.assertIsInstance(data.new_version, ExternalGitRef) 48 | self.assertEqual(data.current_version.url, data.new_version.url) 49 | self.assertIsNotNone(data.new_version.tag) 50 | self.assertIsNotNone(data.new_version.commit) 51 | self.assertNotEqual(data.new_version.tag, data.current_version.tag) 52 | self.assertNotEqual( 53 | data.new_version.commit, data.current_version.commit 54 | ) 55 | self.assertNotEqual( 56 | data.new_version.commit, "e03900b038a274ee2f1341039e9003875c11e47d" 57 | ) 58 | self.assertIsNotNone(data.new_version.version) 59 | self.assertIsNotNone(data.new_version.timestamp) 60 | elif data.filename == "yasm.git": 61 | self.assertIsInstance(data.new_version, ExternalGitRef) 62 | self.assertEqual(data.current_version.url, data.new_version.url) 63 | self.assertIsNotNone(data.new_version.tag) 64 | self.assertIsNotNone(data.new_version.commit) 65 | self.assertNotEqual(data.new_version.tag, data.current_version.tag) 66 | self.assertIsNotNone(data.new_version.version) 67 | self.assertIsNone(data.new_version.timestamp) 68 | elif data.filename == "openal-soft.git": 69 | self.assertIsInstance(data.new_version, ExternalGitRef) 70 | self.assertEqual(data.current_version.url, data.new_version.url) 71 | self.assertIsNotNone(data.new_version.tag) 72 | self.assertIsNotNone(data.new_version.commit) 73 | self.assertIsNotNone(data.new_version.timestamp) 74 | self.assertIsInstance(data.new_version.timestamp, datetime.datetime) 75 | elif data.filename == "tdesktop.git": 76 | self.assertIsNotNone(data.new_version) 77 | self.assertEqual(data.new_version.tag, "v3.7.3") 78 | elif data.filename == "tg_owt.git": 79 | self.assertIsNotNone(data.new_version) 80 | self.assertEqual( 81 | data.new_version.commit, "63a934db1ed212ebf8aaaa20f0010dd7b0d7b396" 82 | ) 83 | elif data.filename == "lib_webrtc.git": 84 | self.assertIsNone(data.new_version) 85 | elif data.filename == "tg_angle.git": 86 | self.assertIsNone(data.new_version) 87 | else: 88 | self.fail(f"Unhandled data {data.filename}") 89 | -------------------------------------------------------------------------------- /tests/test_main.py: -------------------------------------------------------------------------------- 1 | import os 2 | import subprocess 3 | import shutil 4 | import tempfile 5 | import unittest 6 | from unittest.mock import patch 7 | 8 | from src import main 9 | 10 | 11 | TEST_MANIFEST = os.path.join( 12 | os.path.dirname(__file__), "net.invisible_island.xterm.yml" 13 | ) 14 | TEST_APPDATA = os.path.join( 15 | os.path.dirname(__file__), "net.invisible_island.xterm.appdata.xml" 16 | ) 17 | 18 | 19 | @patch.dict(os.environ) 20 | class TestEntrypoint(unittest.IsolatedAsyncioTestCase): 21 | def setUp(self): 22 | self._clear_environment() 23 | self.test_dir = tempfile.TemporaryDirectory() 24 | self.manifest_filename = os.path.basename(TEST_MANIFEST) 25 | self.appdata_filename = os.path.basename(TEST_APPDATA) 26 | self.manifest_path = os.path.join(self.test_dir.name, self.manifest_filename) 27 | self.appdata_path = os.path.join(self.test_dir.name, self.appdata_filename) 28 | self._run_cmd(["git", "init"]) 29 | self._run_cmd(["git", "config", "user.name", "Test Runner"]) 30 | self._run_cmd(["git", "config", "user.email", "test@localhost"]) 31 | shutil.copy(TEST_MANIFEST, self.manifest_path) 32 | shutil.copy(TEST_APPDATA, self.appdata_path) 33 | self._run_cmd(["git", "add", self.manifest_filename]) 34 | self._run_cmd(["git", "add", self.appdata_filename]) 35 | self._run_cmd(["git", "commit", "-a", "-m", "Initial commit"]) 36 | 37 | def tearDown(self): 38 | self.test_dir.cleanup() 39 | 40 | def _clear_environment(self): 41 | unwanted_vars = [ 42 | "EMAIL", 43 | "GIT_AUTHOR_NAME", 44 | "GIT_AUTHOR_EMAIL", 45 | "GIT_COMMITTER_NAME", 46 | "GIT_COMMITTER_EMAIL", 47 | ] 48 | for var in unwanted_vars: 49 | os.environ.pop(var, None) 50 | 51 | def _run_cmd(self, cmd, **kwargs): 52 | return subprocess.run(cmd, cwd=self.test_dir.name, check=True, **kwargs) 53 | 54 | def _get_commit_data(self, rev="HEAD"): 55 | data = {} 56 | for name, fmt in [ 57 | ("commit", "%H"), 58 | ("subject", "%s"), 59 | ("body", "%b"), 60 | ("author_name", "%an"), 61 | ("author_email", "%ae"), 62 | ("committer_name", "%cn"), 63 | ("committer_email", "%ce"), 64 | ]: 65 | cmd = ["git", "show", "--no-patch", f"--pretty=format:{fmt}", rev] 66 | proc = self._run_cmd(cmd, stdout=subprocess.PIPE) 67 | output = proc.stdout.decode("utf-8") 68 | data[name] = output 69 | 70 | return data 71 | 72 | async def test_full_run(self): 73 | args1 = main.parse_cli_args(["--update", "--commit-only", self.manifest_path]) 74 | self.assertEqual(await main.run_with_args(args1), (2, 0, True)) 75 | 76 | commit_data = self._get_commit_data() 77 | self.assertEqual(commit_data["subject"], "Update libXaw and xterm modules") 78 | self.assertEqual(commit_data["author_name"], "Test Runner") 79 | self.assertEqual(commit_data["author_email"], "test@localhost") 80 | self.assertEqual(commit_data["committer_name"], "Test Runner") 81 | self.assertEqual(commit_data["committer_email"], "test@localhost") 82 | 83 | body_lines = commit_data["body"].splitlines() 84 | self.assertEqual(len(body_lines), 2) 85 | self.assertRegex(body_lines[0], r"^libXaw: Update libXaw-1.0.12.tar.bz2 to ") 86 | self.assertRegex(body_lines[1], r"^xterm: Update xterm-snapshots.git to ") 87 | 88 | args2 = main.parse_cli_args([self.manifest_path]) 89 | self.assertEqual(await main.run_with_args(args2), (0, 0, False)) 90 | 91 | async def test_git_envvars(self): 92 | os.environ["GIT_AUTHOR_NAME"] = "Some Guy" 93 | os.environ["GIT_AUTHOR_EMAIL"] = "someguy@localhost" 94 | args1 = main.parse_cli_args(["--update", "--commit-only", self.manifest_path]) 95 | self.assertEqual(await main.run_with_args(args1), (2, 0, True)) 96 | 97 | commit_data = self._get_commit_data() 98 | self.assertEqual(commit_data["subject"], "Update libXaw and xterm modules") 99 | self.assertEqual(commit_data["author_name"], "Some Guy") 100 | self.assertEqual(commit_data["author_email"], "someguy@localhost") 101 | self.assertEqual(commit_data["committer_name"], "Test Runner") 102 | self.assertEqual(commit_data["committer_email"], "test@localhost") 103 | 104 | 105 | class TestForceForkTristate(unittest.TestCase): 106 | def test_neither_fork_arg(self): 107 | args = main.parse_cli_args([TEST_MANIFEST]) 108 | self.assertIsNone(args.fork) 109 | 110 | def test_always_fork_arg(self): 111 | args = main.parse_cli_args(["--always-fork", TEST_MANIFEST]) 112 | self.assertTrue(args.fork) 113 | 114 | def test_never_fork_arg(self): 115 | args = main.parse_cli_args(["--never-fork", TEST_MANIFEST]) 116 | self.assertFalse(args.fork) 117 | 118 | def test_both_fork_args(self): 119 | with self.assertRaises(SystemExit): 120 | main.parse_cli_args(["--always-fork", "--never-fork", TEST_MANIFEST]) 121 | -------------------------------------------------------------------------------- /tests/test_newlinepreservation.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # Copyright (C) 2021 Carles Pastor Badosa 3 | # 4 | # Authors: 5 | # Carles Pastor Badosa 6 | # 7 | # This program is free software; you can redistribute it and/or modify 8 | # it under the terms of the GNU General Public License as published by 9 | # the Free Software Foundation; either version 2 of the License, or 10 | # (at your option) any later version. 11 | # 12 | # This program is distributed in the hope that it will be useful, 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 | # GNU General Public License for more details. 16 | # 17 | # You should have received a copy of the GNU General Public License along 18 | # with this program; if not, write to the Free Software Foundation, Inc., 19 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 20 | 21 | import unittest 22 | import os 23 | import tempfile 24 | 25 | from src.lib.utils import ( 26 | _check_newline, 27 | dump_manifest, 28 | read_manifest, 29 | ) 30 | 31 | 32 | MANIFEST_WITH_NEWLINE = """{ 33 | "ends in newline": true 34 | } 35 | """ 36 | 37 | MANIFEST_NO_NEWLINE = """{ 38 | "ends in newline": false 39 | }""" 40 | 41 | 42 | class TestNewlinePreservation(unittest.TestCase): 43 | def test_newline(self): 44 | with tempfile.TemporaryDirectory() as d: 45 | fp = os.path.join(d, "trailingnewline.json") 46 | with open(fp, "w") as f: 47 | f.write(MANIFEST_WITH_NEWLINE) 48 | with open(fp, "r") as f: 49 | self.assertTrue(_check_newline(f)) 50 | manifest = read_manifest(fp) 51 | dump_manifest(manifest, fp) 52 | with open(fp, "r") as f: 53 | self.assertTrue(_check_newline(f)) 54 | 55 | def test_no_newline(self): 56 | with tempfile.TemporaryDirectory() as d: 57 | fp = os.path.join(d, "notrailingnewline.json") 58 | with open(fp, "w") as f: 59 | f.write(MANIFEST_NO_NEWLINE) 60 | with open(fp, "r") as f: 61 | self.assertFalse(_check_newline(f)) 62 | manifest = read_manifest(fp) 63 | dump_manifest(manifest, fp) 64 | with open(fp, "r") as f: 65 | self.assertFalse(_check_newline(f)) 66 | 67 | 68 | if __name__ == "__main__": 69 | unittest.main() 70 | -------------------------------------------------------------------------------- /tests/test_pypichecker.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | 4 | from packaging.version import Version 5 | 6 | from src.manifest import ManifestChecker 7 | from src.lib.utils import init_logging 8 | from src.lib.checksums import MultiDigest 9 | 10 | TEST_MANIFEST = os.path.join(os.path.dirname(__file__), "com.valvesoftware.Steam.yml") 11 | 12 | 13 | class TestPyPIChecker(unittest.IsolatedAsyncioTestCase): 14 | def setUp(self): 15 | init_logging() 16 | 17 | async def test_check(self): 18 | checker = ManifestChecker(TEST_MANIFEST) 19 | ext_data = await checker.check() 20 | 21 | self.assertEqual(len(ext_data), 6) 22 | for data in ext_data: 23 | if data.filename != "Pillow-7.2.0.tar.gz": 24 | self.assertIsNotNone(data.new_version) 25 | self.assertIsNotNone(data.new_version.url) 26 | self.assertIsNotNone(data.new_version.checksum) 27 | self.assertIsNotNone(data.new_version.version) 28 | self.assertNotEqual(data.new_version.url, data.current_version.url) 29 | self.assertIsInstance(data.new_version.checksum, MultiDigest) 30 | self.assertNotEqual( 31 | data.new_version.checksum, data.current_version.checksum 32 | ) 33 | if data.filename == "setuptools-50.3.2-py3-none-any.whl": 34 | self.assertRegex( 35 | data.new_version.url, 36 | r"https://files.pythonhosted.org/packages/[a-f0-9/]+/setuptools-[\d\.]+-[\S\.]+-none-any.whl", # noqa: E501 37 | ) 38 | elif data.filename == "PyYAML-5.3.1.tar.gz": 39 | self.assertRegex( 40 | data.new_version.url, 41 | r"https://files.pythonhosted.org/packages/[a-f0-9/]+/(?i:PyYAML-)[\d\.]+.(tar.(gz|xz|bz2)|zip)", # noqa: E501 42 | ) 43 | elif data.filename == "vdf-3.1-py2.py3-none-any.whl": 44 | self.assertRegex( 45 | data.new_version.url, 46 | r"https://files.pythonhosted.org/packages/[a-f0-9/]+/vdf-[\d\.]+-[\S\.]+-none-any.whl", # noqa: E501 47 | ) 48 | self.assertEqual(data.new_version.version, "3.2") 49 | elif data.filename == "Pillow-7.2.0.tar.gz": 50 | self.assertIsNone(data.new_version) 51 | elif data.filename == "allow-prerelease": 52 | # Avoid false-success on the `disallow-prerelease` source assertions 53 | # in case there wasn't any prerelease on PyPI 54 | self.assertIsNotNone(data.new_version) 55 | self.assertIsNotNone(data.new_version.version) 56 | self.assertIsNotNone(Version(data.new_version.version).pre) 57 | elif data.filename == "disallow-prerelease": 58 | self.assertIsNotNone(data.new_version) 59 | self.assertIsNotNone(data.new_version.version) 60 | self.assertIsNone(Version(data.new_version.version).pre) 61 | else: 62 | self.fail(f"Unknown data {data.filename}") 63 | -------------------------------------------------------------------------------- /tests/test_rpmrepochecker.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import os 3 | 4 | from src.manifest import ManifestChecker 5 | from src.lib.utils import init_logging 6 | from src.lib.checksums import MultiDigest 7 | 8 | TEST_MANIFEST = os.path.join(os.path.dirname(__file__), "com.visualstudio.code.yaml") 9 | 10 | 11 | class TestRPMRepoChecker(unittest.IsolatedAsyncioTestCase): 12 | def setUp(self): 13 | init_logging() 14 | 15 | async def test_check(self): 16 | checker = ManifestChecker(TEST_MANIFEST) 17 | ext_data = await checker.check() 18 | for data in ext_data: 19 | self.assertIsNotNone(data) 20 | self.assertIsNotNone(data.new_version) 21 | self.assertIsNotNone(data.new_version.url) 22 | self.assertIsNotNone(data.new_version.checksum) 23 | self.assertIsNotNone(data.new_version.version) 24 | self.assertNotEqual(data.new_version.url, data.current_version.url) 25 | self.assertIsInstance(data.new_version.checksum, MultiDigest) 26 | self.assertNotEqual( 27 | data.new_version.checksum, data.current_version.checksum 28 | ) 29 | self.assertRegex( 30 | data.new_version.url, 31 | r"https://packages\.microsoft\.com/yumrepos/.+?/code-.+\.{0}\.rpm".format( # noqa: E501 32 | data.arches[0] 33 | ), 34 | ) 35 | -------------------------------------------------------------------------------- /tests/test_rustchecker.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | 4 | from src.manifest import ManifestChecker 5 | from src.lib.utils import init_logging 6 | from src.lib.checksums import MultiDigest 7 | 8 | TEST_MANIFEST = os.path.join( 9 | os.path.dirname(__file__), "org.freedesktop.Sdk.Extension.rust-nightly.yml" 10 | ) 11 | 12 | 13 | class TestRustChecker(unittest.IsolatedAsyncioTestCase): 14 | def setUp(self): 15 | init_logging() 16 | 17 | async def test_check(self): 18 | checker = ManifestChecker(TEST_MANIFEST) 19 | ext_data = await checker.check() 20 | 21 | self.assertEqual(len(ext_data), 1) 22 | data = ext_data[0] 23 | self.assertIsNotNone(data.new_version) 24 | self.assertRegex( 25 | data.new_version.url, 26 | r"^https://static.rust-lang.org/dist/[\-\d]+/rust-nightly-x86_64-unknown-linux-gnu.tar.xz$", # noqa: E501 27 | ) 28 | self.assertIsNone(data.new_version.size) 29 | self.assertIsNotNone(data.new_version.checksum) 30 | self.assertIsInstance(data.new_version.checksum, MultiDigest) 31 | self.assertNotEqual( 32 | data.new_version.checksum, 33 | MultiDigest( 34 | sha256="24b4681187654778817652273a68a4d55f5090604cd14b1f1c3ff8785ad24b99" # noqa: E501 35 | ), 36 | ) 37 | 38 | 39 | if __name__ == "__main__": 40 | unittest.main() 41 | -------------------------------------------------------------------------------- /tests/test_snapcraftchecker.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | 4 | from src.manifest import ManifestChecker 5 | from src.lib.utils import init_logging 6 | from src.lib.checksums import MultiDigest 7 | 8 | TEST_MANIFEST = os.path.join(os.path.dirname(__file__), "com.nordpass.NordPass.yaml") 9 | 10 | 11 | class TestSnapctaftChecker(unittest.IsolatedAsyncioTestCase): 12 | def setUp(self): 13 | init_logging() 14 | 15 | async def test_check(self): 16 | checker = ManifestChecker(TEST_MANIFEST) 17 | ext_data = await checker.check() 18 | 19 | data = self._find_by_filename(ext_data, "nordpass.snap") 20 | self.assertIsNotNone(data) 21 | self.assertEqual(data.filename, "nordpass.snap") 22 | self.assertIsNotNone(data.new_version) 23 | self.assertRegex( 24 | data.new_version.url, 25 | r"^https://api\.snapcraft\.io/api/v1/snaps/download/[A-Za-z0-9]{32}_[0-9]+.snap$", # noqa: E501 26 | ) 27 | self.assertIsInstance(data.new_version.size, int) 28 | self.assertGreater(data.new_version.size, 0) 29 | self.assertIsNotNone(data.new_version.checksum) 30 | self.assertIsInstance(data.new_version.checksum, MultiDigest) 31 | self.assertNotEqual( 32 | data.new_version.checksum, 33 | MultiDigest( 34 | sha256="0000000000000000000000000000000000000000000000000000000000000000" # noqa: E501 35 | ), 36 | ) 37 | 38 | def _find_by_filename(self, ext_data, filename): 39 | for data in ext_data: 40 | if data.filename == filename: 41 | return data 42 | else: 43 | return None 44 | 45 | 46 | if __name__ == "__main__": 47 | unittest.main() 48 | -------------------------------------------------------------------------------- /tests/test_urlchecker.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import os 3 | 4 | from src.manifest import ManifestChecker 5 | from src.lib.utils import init_logging 6 | from src.lib.checksums import MultiDigest 7 | 8 | 9 | class TestURLChecker(unittest.IsolatedAsyncioTestCase): 10 | def setUp(self): 11 | init_logging() 12 | 13 | async def test_check_appimage(self): 14 | checker = ManifestChecker( 15 | os.path.join(os.path.dirname(__file__), "com.unity.UnityHub.yaml") 16 | ) 17 | ext_data = await checker.check() 18 | 19 | data = self._find_by_filename(ext_data, "UnityHubSetup.AppImage") 20 | self.assertIsNotNone(data) 21 | self.assertEqual(data.filename, "UnityHubSetup.AppImage") 22 | self.assertIsNotNone(data.new_version) 23 | self.assertIsInstance(data.new_version.size, int) 24 | self.assertGreater(data.new_version.size, 0) 25 | self.assertIsNotNone(data.new_version.checksum) 26 | self.assertIsInstance(data.new_version.checksum, MultiDigest) 27 | self.assertNotEqual( 28 | data.new_version.checksum, 29 | MultiDigest( 30 | sha256="0000000000000000000000000000000000000000000000000000000000000000" # noqa: E501 31 | ), 32 | ) 33 | self.assertIsNotNone(data.new_version.version) 34 | 35 | async def test_check_deb(self): 36 | checker = ManifestChecker( 37 | os.path.join(os.path.dirname(__file__), "com.google.Chrome.yaml") 38 | ) 39 | ext_data = await checker.check() 40 | 41 | data = self._find_by_filename(ext_data, "chrome.deb") 42 | self.assertIsNotNone(data) 43 | self.assertEqual(data.filename, "chrome.deb") 44 | self.assertIsNotNone(data.new_version) 45 | self.assertIsInstance(data.new_version.size, int) 46 | self.assertGreater(data.new_version.size, 0) 47 | self.assertIsNotNone(data.new_version.checksum) 48 | self.assertIsInstance(data.new_version.checksum, MultiDigest) 49 | self.assertNotEqual( 50 | data.new_version.checksum, 51 | MultiDigest( 52 | sha256="0000000000000000000000000000000000000000000000000000000000000000" # noqa: E501 53 | ), 54 | ) 55 | self.assertIsNotNone(data.new_version.version) 56 | 57 | def _find_by_filename(self, ext_data, filename): 58 | for data in ext_data: 59 | if data.filename == filename: 60 | return data 61 | else: 62 | return None 63 | 64 | 65 | if __name__ == "__main__": 66 | unittest.main() 67 | --------------------------------------------------------------------------------