├── __init__.py ├── config ├── __init__.py └── config.toml.default ├── modules ├── __init__.py ├── exceptions.py ├── updaters │ ├── util_update_checker.py │ ├── __init__.py │ ├── Rescuezilla.py │ ├── ShredOS.py │ ├── GPartedLive.py │ ├── ArchLinux.py │ ├── OpenSUSERolling.py │ ├── TrueNAS.py │ ├── Tails.py │ ├── Clonezilla.py │ ├── Manjaro.py │ ├── RockyLinux.py │ ├── OpenSUSE.py │ ├── LinuxMint.py │ ├── KaliLinux.py │ ├── TempleOS.py │ ├── ChromeOS.py │ ├── Debian.py │ ├── OPNsense.py │ ├── UltimateBootCD.py │ ├── Ubuntu.py │ ├── Windows11.py │ ├── Proxmox.py │ ├── Windows10.py │ ├── HirensBootCDPE.py │ ├── Fedora.py │ ├── SystemRescue.py │ ├── MemTest86Plus.py │ ├── HDAT2.py │ ├── SuperGrub2.py │ ├── FreeDOS.py │ └── GenericUpdater.py ├── WindowsConsumerDownload.py └── utils.py ├── MANIFEST.in ├── requirements.txt ├── .github ├── ISSUE_TEMPLATE │ ├── feature_request.md │ └── bug_report.md └── workflows │ └── python-publish.yml ├── setup.py ├── .gitignore ├── README.md ├── sisou.py └── LICENSE /__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /config/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /modules/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include config.toml.default -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | beautifulsoup4==4.12.2 2 | requests==2.31.0 3 | tqdm==4.65.0 4 | PGPy13==0.6.1rc1 -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the feature and solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Additional context** 17 | Add any other context or screenshots about the feature request here. 18 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. ... 17 | 18 | **Expected behavior** 19 | A clear and concise description of what you expected to happen. 20 | 21 | **Screenshots** 22 | If applicable, add screenshots to help explain your problem. 23 | 24 | **Information (please complete the following information):** 25 | - Installer [e.g. Ubuntu, Windows10] 26 | - Python version [e.g. 3.11.4] 27 | - Traceback if applicable 28 | 29 | **Additional context** 30 | Add any other context about the problem here. 31 | -------------------------------------------------------------------------------- /modules/exceptions.py: -------------------------------------------------------------------------------- 1 | class DownloadLinkNotFoundError(Exception): 2 | """ 3 | Custom exception to represent an error when the download link for a file is not found. 4 | """ 5 | 6 | pass 7 | 8 | 9 | class VersionNotFoundError(Exception): 10 | """ 11 | Custom exception to represent an error when a specific version is not found. 12 | This exception can be raised when a program is unable to find a specific version of a resource. 13 | 14 | Example: 15 | try: 16 | version = GenericUpdater._get_latest_version() 17 | except VersionNotFoundError as e: 18 | print(f"Error: {e}") 19 | """ 20 | 21 | pass 22 | 23 | 24 | class IntegrityCheckError(Exception): 25 | """ 26 | Custom exception to represent an error when an integrity check fails. 27 | This exception can be raised when a program performs an integrity check (e.g., hash verification) on a file, 28 | and the check fails, indicating that the file's content may have been altered or corrupted. 29 | 30 | Example: 31 | if not md5_hash_check(file, expected_md5_hash): 32 | raise IntegrityCheckError("MD5 hash verification failed.") 33 | """ 34 | 35 | pass 36 | -------------------------------------------------------------------------------- /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package using Twine when a release is created 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries 3 | 4 | # This workflow uses actions that are not certified by GitHub. 5 | # They are provided by a third-party and are governed by 6 | # separate terms of service, privacy policy, and support 7 | # documentation. 8 | 9 | name: Upload Python Package 10 | 11 | on: 12 | release: 13 | types: [published] 14 | 15 | permissions: 16 | contents: read 17 | 18 | jobs: 19 | deploy: 20 | 21 | runs-on: ubuntu-latest 22 | 23 | steps: 24 | - uses: actions/checkout@v3 25 | - name: Set up Python 26 | uses: actions/setup-python@v3 27 | with: 28 | python-version: '3.x' 29 | - name: Install dependencies 30 | run: | 31 | python -m pip install --upgrade pip 32 | pip install build 33 | - name: Build package 34 | run: python -m build 35 | - name: Publish package 36 | uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 37 | with: 38 | user: __token__ 39 | password: ${{ secrets.PYPI_API_TOKEN }} 40 | -------------------------------------------------------------------------------- /modules/updaters/util_update_checker.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from functools import cache 3 | 4 | import requests 5 | 6 | 7 | @cache 8 | def github_get_latest_version(owner: str, repo: str) -> dict: 9 | """Gets the latest version of a software via it's GitHub repository 10 | 11 | Args: 12 | owner (str): Owner of the repository 13 | repo (str): Name of the repository 14 | 15 | Returns: 16 | dict: the full release information 17 | """ 18 | api_url = f"https://api.github.com/repos/{owner}/{repo}" 19 | 20 | logging.debug(f"Fetching latest release from {api_url}") 21 | 22 | release = requests.get(f"{api_url}/releases/latest").json() 23 | 24 | logging.debug(f"GitHub release fetched from {api_url}: {release}") 25 | 26 | return release 27 | 28 | 29 | def parse_github_release(release: dict) -> dict: 30 | """Parses a github release into a shorter, easier to read format 31 | 32 | Args: 33 | release (dict): Release information 34 | 35 | Returns: 36 | dict: The name of the tag, a dictionary representing {"filename": "url"}, 37 | """ 38 | res = { 39 | "tag": release["tag_name"], 40 | "files": {}, 41 | "text": release["body"], 42 | "source_code": release["zipball_url"], 43 | } 44 | 45 | for asset in release["assets"]: 46 | res["files"][asset["name"]] = asset["browser_download_url"] 47 | 48 | logging.debug(f"GitHub release parsed: {res}") 49 | 50 | return res 51 | -------------------------------------------------------------------------------- /modules/updaters/__init__.py: -------------------------------------------------------------------------------- 1 | from .ArchLinux import ArchLinux 2 | from .ChromeOS import ChromeOS 3 | from .Clonezilla import Clonezilla 4 | from .Debian import Debian 5 | from .Fedora import Fedora 6 | from .FreeDOS import FreeDOS 7 | from .GenericUpdater import GenericUpdater 8 | from .GPartedLive import GPartedLive 9 | from .HDAT2 import HDAT2 10 | from .HirensBootCDPE import HirensBootCDPE 11 | from .KaliLinux import KaliLinux 12 | from .LinuxMint import LinuxMint 13 | from .Manjaro import Manjaro 14 | from .MemTest86Plus import MemTest86Plus 15 | from .OpenSUSE import OpenSUSE 16 | from .OpenSUSERolling import OpenSUSERolling 17 | from .OPNsense import OPNsense 18 | from .Proxmox import Proxmox 19 | from .Rescuezilla import Rescuezilla 20 | from .RockyLinux import RockyLinux 21 | from .ShredOS import ShredOS 22 | from .SuperGrub2 import SuperGrub2 23 | from .SystemRescue import SystemRescue 24 | from .Tails import Tails 25 | from .TempleOS import TempleOS 26 | from .TrueNAS import TrueNAS 27 | from .Ubuntu import Ubuntu 28 | from .UltimateBootCD import UltimateBootCD 29 | from .Windows10 import Windows10 30 | from .Windows11 import Windows11 31 | 32 | __all__ = [ 33 | "ArchLinux", 34 | "Debian", 35 | "ChromeOS", 36 | "Clonezilla", 37 | "Fedora", 38 | "FreeDOS", 39 | "GenericUpdater", 40 | "GPartedLive", 41 | "HDAT2", 42 | "ShredOS", 43 | "HirensBootCDPE", 44 | "KaliLinux", 45 | "LinuxMint", 46 | "Manjaro", 47 | "MemTest86Plus", 48 | "OpenSUSE", 49 | "OpenSUSERolling", 50 | "OPNsense", 51 | "RockyLinux", 52 | "Tails", 53 | "Rescuezilla", 54 | "SuperGrub2", 55 | "SystemRescue", 56 | "TempleOS", 57 | "Ubuntu", 58 | "UltimateBootCD", 59 | "TrueNAS", 60 | "Windows10", 61 | "Windows11", 62 | ] 63 | -------------------------------------------------------------------------------- /modules/updaters/Rescuezilla.py: -------------------------------------------------------------------------------- 1 | from functools import cache 2 | from pathlib import Path 3 | 4 | import requests 5 | 6 | from modules.updaters.GenericUpdater import GenericUpdater 7 | from modules.updaters.util_update_checker import ( 8 | github_get_latest_version, 9 | parse_github_release, 10 | ) 11 | from modules.utils import parse_hash, sha256_hash_check 12 | 13 | FILE_NAME = "rescuezilla-[[VER]]-64bit.[[EDITION]].iso" 14 | 15 | 16 | class Rescuezilla(GenericUpdater): 17 | """ 18 | A class representing an updater for Rescuezilla. 19 | 20 | Attributes: 21 | valid_editions (list[str]): List of valid editions to use 22 | edition (str): Edition to download 23 | release_info (dict): Github release information 24 | 25 | Note: 26 | This class inherits from the abstract base class GenericUpdater. 27 | """ 28 | 29 | def __init__(self, folder_path: Path, edition: str) -> None: 30 | self.valid_editions = ["bionic", "focal", "jammy", "noble"] 31 | self.edition = edition.lower() 32 | 33 | file_path = folder_path / FILE_NAME 34 | super().__init__(file_path) 35 | 36 | release = github_get_latest_version("rescuezilla", "rescuezilla") 37 | 38 | self.release_info = parse_github_release(release) 39 | 40 | @cache 41 | def _get_download_link(self) -> str: 42 | return self.release_info["files"][ 43 | str(self._get_complete_normalized_file_path(absolute=False)) 44 | ] 45 | 46 | def check_integrity(self) -> bool: 47 | sha256_url = self.release_info["files"]["SHA256SUM"] 48 | 49 | sha256_sums = requests.get(sha256_url).text 50 | 51 | sha256_sum = parse_hash( 52 | sha256_sums, 53 | [str(self._get_complete_normalized_file_path(absolute=False))], 54 | 0, 55 | ) 56 | 57 | return sha256_hash_check( 58 | self._get_complete_normalized_file_path(absolute=True), 59 | sha256_sum, 60 | ) 61 | 62 | @cache 63 | def _get_latest_version(self) -> list[str]: 64 | return self._str_to_version(self.release_info["tag"]) 65 | -------------------------------------------------------------------------------- /modules/updaters/ShredOS.py: -------------------------------------------------------------------------------- 1 | from functools import cache 2 | from pathlib import Path 3 | 4 | from modules.updaters.GenericUpdater import GenericUpdater 5 | from modules.updaters.util_update_checker import ( 6 | github_get_latest_version, 7 | parse_github_release, 8 | ) 9 | from modules.utils import parse_hash, sha1_hash_check 10 | 11 | FILE_NAME = " shredos-[[VER]].img " 12 | 13 | 14 | class ShredOS(GenericUpdater): 15 | """ 16 | A class representing an updater for ShredOS. 17 | 18 | Attributes: 19 | valid_editions (list[str]): List of valid editions to use 20 | edition (str): Edition to download 21 | 22 | Note: 23 | This class inherits from the abstract base class GenericUpdater. 24 | """ 25 | 26 | def __init__(self, folder_path: Path) -> None: 27 | file_path = folder_path / FILE_NAME 28 | super().__init__(file_path) 29 | 30 | release = github_get_latest_version("PartialVolume", "shredos.x86_64") 31 | 32 | self.release_info = parse_github_release(release) 33 | 34 | @cache 35 | def _get_download_link(self) -> str: 36 | return next( 37 | download_link 38 | for filename, download_link in self.release_info["files"].items() 39 | if filename.endswith(".img") and "x86-64" in filename 40 | ) 41 | 42 | def check_integrity(self) -> bool: 43 | sha1_sums = self.release_info["text"] 44 | 45 | sha1_sum = parse_hash( 46 | sha1_sums, 47 | [ 48 | "sha1", 49 | self._version_to_str(self._get_latest_version()), 50 | "x86-64", 51 | ".img", 52 | ], 53 | 1, 54 | ) 55 | 56 | return sha1_hash_check( 57 | self._get_complete_normalized_file_path(absolute=True), 58 | sha1_sum, 59 | ) 60 | 61 | @cache 62 | def _get_latest_version(self) -> list[str]: 63 | version = self.release_info["tag"] 64 | 65 | start_index = version.find("v") 66 | end_index = version.find("_") 67 | 68 | version = version[start_index + 1 : end_index] 69 | 70 | return self._str_to_version(version) 71 | -------------------------------------------------------------------------------- /modules/updaters/GPartedLive.py: -------------------------------------------------------------------------------- 1 | from functools import cache 2 | from pathlib import Path 3 | 4 | import requests 5 | 6 | from modules.exceptions import IntegrityCheckError 7 | from modules.updaters.GenericUpdater import GenericUpdater 8 | from modules.utils import parse_hash, sha256_hash_check 9 | 10 | DOMAIN = "https://gparted.org" 11 | FILE_NAME = "gparted-live-[[VER]]-amd64.iso" 12 | 13 | 14 | class GPartedLive(GenericUpdater): 15 | """ 16 | A class representing an updater for GParted Live. 17 | 18 | Note: 19 | This class inherits from the abstract base class GenericUpdater. 20 | """ 21 | 22 | def __init__(self, folder_path: Path) -> None: 23 | file_path = folder_path / FILE_NAME 24 | super().__init__(file_path) 25 | 26 | self.checksum_file: str = requests.get( 27 | "https://gparted.org/gparted-live/stable/CHECKSUMS.TXT" 28 | ).text.strip() 29 | 30 | @cache 31 | def _get_download_link(self) -> str: 32 | ver = self._version_to_str(self._get_latest_version()) 33 | return f"https://downloads.sourceforge.net/gparted/gparted-live-{GPartedLive._get_gparted_version_style(ver)}-amd64.iso" 34 | 35 | def check_integrity(self) -> bool: 36 | checksums: list[str] = self.checksum_file.split("###") 37 | for checksum in checksums: 38 | if "SHA256" in checksum: 39 | sha256_sums = checksum 40 | break 41 | else: 42 | raise IntegrityCheckError("Could not find SHA256 sum") 43 | 44 | sha256_hash = parse_hash(sha256_sums, ["amd64.iso"], 0) 45 | 46 | return sha256_hash_check( 47 | self._get_complete_normalized_file_path(absolute=True), sha256_hash 48 | ) 49 | 50 | @cache 51 | def _get_latest_version(self) -> list[str]: 52 | # Get last line of the checksum file 53 | version = self.checksum_file.splitlines()[-1] 54 | # Get last "word" of the line (Which is the file name) 55 | version = version.split()[-1] 56 | # Only keep the version numbers and join them with a dot in between each of them 57 | version = ".".join(version.split("-")[2:4]) 58 | 59 | return self._str_to_version(version) 60 | 61 | @cache 62 | @staticmethod 63 | def _get_gparted_version_style(version: str): 64 | """ 65 | Convert the version string from "x.y.z.a" to "x.y.z-a" format, as used by GParted Live. 66 | 67 | Parameters: 68 | version (str): The version string in "x.y.z.a" format. 69 | 70 | Returns: 71 | str: The version string in "x.y.z-a" format. 72 | """ 73 | return "-".join(version.rsplit(".", 1)) 74 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """Based off of pypa/sampleproject 2 | https://raw.githubusercontent.com/pypa/sampleproject/db5806e0a3204034c51b1c00dde7d5eb3fa2532e/setup.py 3 | """ 4 | 5 | import pathlib 6 | 7 | # Always prefer setuptools over distutils 8 | from setuptools import find_packages, setup 9 | 10 | here = pathlib.Path(__file__).parent.resolve() 11 | 12 | # Get the long description from the README file 13 | long_description = (here / "README.md").read_text(encoding="utf-8") 14 | 15 | setup( 16 | name="sisou", # Required 17 | version="1.4.1", # Required 18 | description="A powerful tool to conveniently update all of your ISO files!", # Optional 19 | long_description=long_description, # Optional 20 | long_description_content_type="text/markdown", # Optional 21 | url="https://github.com/JoshuaVandaele/SuperISOUpdater", # Optional 22 | author="Joshua Vandaele", # Optional 23 | author_email="joshua@vandaele.software", # Optional 24 | classifiers=[ # Optional 25 | # https://pypi.org/classifiers/ 26 | "Development Status :: 5 - Production/Stable", 27 | "Intended Audience :: End Users/Desktop", 28 | "Topic :: Internet", 29 | "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", 30 | # Specify the Python versions you support here. In particular, ensure 31 | # that you indicate you support Python 3. These classifiers are *not* 32 | # checked by 'pip install'. See instead 'python_requires' below. 33 | "Programming Language :: Python :: 3.10", 34 | "Programming Language :: Python :: 3.11", 35 | "Programming Language :: Python :: 3.12", 36 | "Programming Language :: Python :: 3.13", 37 | "Programming Language :: Python :: 3 :: Only", 38 | ], 39 | keywords="ventoy, updater, os, iso, updater, sisou, cli", # Optional 40 | packages=find_packages(), # Required 41 | py_modules=["sisou"], # Required 42 | include_package_data=True, 43 | package_data={"": ["config.toml.default"]}, 44 | python_requires=">=3.10, <4", 45 | install_requires=[ 46 | "beautifulsoup4==4.12.2", 47 | "requests==2.31.0", 48 | "tqdm==4.65.0", 49 | "PGPy13==0.6.1rc1", 50 | ], # Optional 51 | # extras_require={ 52 | # "dev": [""], 53 | # "test": [""], 54 | # }, 55 | entry_points={ # Optional 56 | "console_scripts": [ 57 | "sisou = sisou:main", 58 | ], 59 | }, 60 | project_urls={ # Optional 61 | "Bug Reports": "https://github.com/JoshuaVandaele/SuperISOUpdater/issues", 62 | "Source": "https://github.com/JoshuaVandaele/SuperISOUpdater/", 63 | }, 64 | zip_safe=False, 65 | ) 66 | -------------------------------------------------------------------------------- /modules/updaters/ArchLinux.py: -------------------------------------------------------------------------------- 1 | from functools import cache 2 | from pathlib import Path 3 | 4 | import requests 5 | from bs4 import BeautifulSoup 6 | 7 | from modules.exceptions import VersionNotFoundError 8 | from modules.updaters.GenericUpdater import GenericUpdater 9 | from modules.utils import parse_hash, sha256_hash_check 10 | 11 | DOMAIN = "https://geo.mirror.pkgbuild.com" 12 | DOWNLOAD_PAGE_URL = f"{DOMAIN}/iso/latest" 13 | FILE_NAME = "archlinux-[[VER]]-x86_64.iso" 14 | 15 | 16 | class ArchLinux(GenericUpdater): 17 | """ 18 | A class representing an updater for Arch Linux. 19 | 20 | Attributes: 21 | download_page (requests.Response): The HTTP response containing the download page HTML. 22 | soup_download_page (BeautifulSoup): The parsed HTML content of the download page. 23 | 24 | Note: 25 | This class inherits from the abstract base class GenericUpdater. 26 | """ 27 | 28 | def __init__(self, folder_path: Path) -> None: 29 | file_path = folder_path / FILE_NAME 30 | super().__init__(file_path) 31 | 32 | self.download_page = requests.get(DOWNLOAD_PAGE_URL) 33 | 34 | if self.download_page.status_code != 200: 35 | raise ConnectionError( 36 | f"Failed to fetch the download page from '{DOWNLOAD_PAGE_URL}'" 37 | ) 38 | 39 | self.soup_download_page = BeautifulSoup( 40 | self.download_page.content, features="html.parser" 41 | ) 42 | 43 | @cache 44 | def _get_download_link(self) -> str: 45 | latest_version_str = self._version_to_str(self._get_latest_version()) 46 | return f"{DOWNLOAD_PAGE_URL}/{FILE_NAME.replace('[[VER]]', latest_version_str)}" 47 | 48 | def check_integrity(self) -> bool: 49 | sha256_url = "https://geo.mirror.pkgbuild.com/iso/latest/sha256sums.txt" 50 | 51 | sha256_sums = requests.get(sha256_url).text 52 | 53 | sha256_sum = parse_hash( 54 | sha256_sums, 55 | [str(self._get_complete_normalized_file_path(absolute=False))], 56 | 0, 57 | ) 58 | 59 | return sha256_hash_check( 60 | self._get_complete_normalized_file_path(absolute=True), 61 | sha256_sum, 62 | ) 63 | 64 | @cache 65 | def _get_latest_version(self) -> list[str]: 66 | download_a_tags = self.soup_download_page.find_all("a", href=True) 67 | if not download_a_tags: 68 | raise VersionNotFoundError("We were not able to parse the download page") 69 | download_a_tag = next( 70 | a_tag for a_tag in download_a_tags if "archlinux" in a_tag.get("href") 71 | ) 72 | if not download_a_tag: 73 | raise VersionNotFoundError("We were not able to parse the download links") 74 | 75 | return self._str_to_version(download_a_tag.getText().split("-")[1]) 76 | -------------------------------------------------------------------------------- /modules/updaters/OpenSUSERolling.py: -------------------------------------------------------------------------------- 1 | from functools import cache 2 | from pathlib import Path 3 | 4 | import logging 5 | 6 | import re 7 | import requests 8 | 9 | from modules.updaters.GenericUpdater import GenericUpdater 10 | from modules.utils import parse_hash, sha256_hash_check 11 | 12 | DOMAIN = "https://download.opensuse.org" 13 | DOWNLOAD_PAGE_URL = f"{DOMAIN}/download/tumbleweed/iso" 14 | FILE_NAME = "openSUSE-[[EDITION]]-x86_64-[[VER]].iso" 15 | 16 | 17 | class OpenSUSERolling(GenericUpdater): 18 | """ 19 | A class representing an updater for OpenSUSE. 20 | 21 | Attributes: 22 | valid_editions (list[str]): List of valid editions to use 23 | edition (str): Edition to download 24 | 25 | Note: 26 | This class inherits from the abstract base class GenericUpdater. 27 | """ 28 | 29 | def __init__(self, folder_path: Path, edition: str) -> None: 30 | self.valid_editions = ["MicroOS-DVD", "Tumbleweed-DVD", "Tumbleweed-NET", "Tumbleweed-GNOME-Live", "Tumbleweed-KDE-Live", "Tumbleweed-XFCE-Live", "Tumbleweed-Rescue-CD"] 31 | self.edition = edition 32 | 33 | self.download_page_url = DOWNLOAD_PAGE_URL 34 | file_path = folder_path / FILE_NAME 35 | super().__init__(file_path) 36 | 37 | def _capitalize_edition(self) -> str: 38 | for capEdition in self.valid_editions: 39 | if capEdition.lower() is self.edition.lower(): 40 | return capEdition 41 | # shouldn't get here 42 | return self.edition 43 | 44 | @cache 45 | def _get_download_link(self) -> str: 46 | isoFile = FILE_NAME.replace("[[EDITION]]", self._capitalize_edition()).replace("[[VER]]","Current") 47 | return f"{self.download_page_url}/{isoFile}" 48 | 49 | 50 | def check_integrity(self) -> bool: 51 | sha256_url = f"{self._get_download_link()}.sha256" 52 | 53 | sha256_sums = requests.get(sha256_url).text 54 | 55 | sha256_sum = parse_hash(sha256_sums, [], 0) 56 | 57 | return sha256_hash_check( 58 | self._get_complete_normalized_file_path(absolute=True), 59 | sha256_sum, 60 | ) 61 | 62 | def _get_latest_version(self) -> list[str]: 63 | sha256_url = f"{self._get_download_link()}.sha256" 64 | sha256_sums = requests.get(sha256_url).text 65 | return self._str_to_version(sha256_sums.split(" ")[-1]) 66 | 67 | def _str_to_version(self, version_str: str): 68 | version = "0" 69 | pattern = r'^.*Snapshot(\d*)-.*$' 70 | 71 | match = re.search(pattern, version_str) 72 | if match: 73 | version = match.group(1) 74 | 75 | logging.debug( 76 | f"[OpenSUSERolling._parse_version] parsing:{version_str}, found version:{version}" 77 | ) 78 | return [version] 79 | 80 | def _version_to_str(self, version): 81 | return f"Snapshot{version[0]}-Media" 82 | -------------------------------------------------------------------------------- /modules/updaters/TrueNAS.py: -------------------------------------------------------------------------------- 1 | from functools import cache 2 | from pathlib import Path 3 | 4 | import requests 5 | from bs4 import BeautifulSoup 6 | 7 | from modules.updaters.GenericUpdater import GenericUpdater 8 | from modules.utils import sha256_hash_check 9 | 10 | DOMAIN = "https://www.truenas.com" 11 | DOWNLOAD_PAGE_URL = f"{DOMAIN}/download-truenas-[[EDITION]]" 12 | FILE_NAME = "TrueNAS-[[EDITION]]-[[VER]].iso" 13 | 14 | 15 | class TrueNAS(GenericUpdater): 16 | """ 17 | A class representing an updater for TrueNAS. 18 | 19 | Attributes: 20 | valid_editions (list[str]): List of valid editions to use 21 | edition (str): Edition to download 22 | download_page (requests.Response): The HTTP response containing the download page HTML. 23 | soup_download_page (BeautifulSoup): The parsed HTML content of the download page. 24 | 25 | Note: 26 | This class inherits from the abstract base class GenericUpdater. 27 | """ 28 | 29 | def __init__(self, folder_path: Path, edition: str) -> None: 30 | self.valid_editions = ["core", "scale"] 31 | self.edition = edition.lower() 32 | 33 | file_path = folder_path / FILE_NAME 34 | super().__init__(file_path) 35 | 36 | self.download_page_url = DOWNLOAD_PAGE_URL.replace("[[EDITION]]", self.edition) 37 | 38 | self.download_page = requests.get(self.download_page_url) 39 | 40 | if self.download_page.status_code != 200: 41 | raise ConnectionError( 42 | f"Failed to fetch the download page from '{DOWNLOAD_PAGE_URL}'" 43 | ) 44 | 45 | self.soup_download_page = BeautifulSoup( 46 | self.download_page.content, features="html.parser" 47 | ) 48 | 49 | @cache 50 | def _get_download_link(self) -> str: 51 | a_tag = self.soup_download_page.find("a", attrs={"id": "downloadTrueNAS"}) 52 | 53 | if not a_tag: 54 | raise LookupError("Could not find HTML tag containing download URL") 55 | 56 | return a_tag["href"] # type: ignore 57 | 58 | def check_integrity(self) -> bool: 59 | sha256_url = f"{self._get_download_link()}.sha256" 60 | 61 | # Only 1 sum in file 62 | sha256_sums = requests.get(sha256_url).text.split() 63 | 64 | # for some reason TrueNAS has two different formats for their sha256 file 65 | if sha256_sums[0] == "SHA256": 66 | sha256_sum = sha256_sums[-1] 67 | else: 68 | sha256_sum = sha256_sums[0] 69 | 70 | return sha256_hash_check( 71 | self._get_complete_normalized_file_path(absolute=True), 72 | sha256_sum, 73 | ) 74 | 75 | @cache 76 | def _get_latest_version(self) -> list[str]: 77 | download_link = self._get_download_link() 78 | version = download_link.split("-")[-1] 79 | 80 | return self._str_to_version(version.replace(".iso", "")) 81 | -------------------------------------------------------------------------------- /modules/updaters/Tails.py: -------------------------------------------------------------------------------- 1 | from functools import cache 2 | from pathlib import Path 3 | 4 | import requests 5 | from bs4 import BeautifulSoup 6 | 7 | from modules.exceptions import VersionNotFoundError 8 | from modules.updaters.GenericUpdater import GenericUpdater 9 | from modules.utils import pgp_check 10 | 11 | DOMAIN = "https://mirrors.edge.kernel.org" 12 | DOWNLOAD_PAGE_URL = f"{DOMAIN}/tails/stable" 13 | FILE_NAME = "tails-amd64-[[VER]].iso" 14 | PUB_KEY_URL = "https://tails.net/tails-signing.key" 15 | 16 | 17 | class Tails(GenericUpdater): 18 | """ 19 | A class representing an updater for Linux Mint. 20 | 21 | Attributes: 22 | valid_editions (list[str]): List of valid editions to use 23 | download_page (requests.Response): The HTTP response containing the download page HTML. 24 | soup_download_page (BeautifulSoup): The parsed HTML content of the download page. 25 | 26 | Note: 27 | This class inherits from the abstract base class GenericUpdater. 28 | """ 29 | 30 | def __init__(self, folder_path: Path) -> None: 31 | file_path = folder_path / FILE_NAME 32 | super().__init__(file_path) 33 | 34 | self.download_page = requests.get(DOWNLOAD_PAGE_URL) 35 | 36 | if self.download_page.status_code != 200: 37 | raise ConnectionError( 38 | f"Failed to fetch the download page from '{DOWNLOAD_PAGE_URL}'" 39 | ) 40 | 41 | self.soup_download_page = BeautifulSoup( 42 | self.download_page.content, features="html.parser" 43 | ) 44 | 45 | @cache 46 | def _get_download_link(self) -> str: 47 | latest_version_str = self._version_to_str(self._get_latest_version()) 48 | return f"{DOWNLOAD_PAGE_URL}/tails-amd64-{latest_version_str}/{self._get_complete_normalized_file_path(absolute=False)}" 49 | 50 | def check_integrity(self) -> bool: 51 | sig_url = f"{self._get_download_link()}.sig" 52 | 53 | sig = requests.get(sig_url).content 54 | pub_key = requests.get(PUB_KEY_URL).content 55 | 56 | return pgp_check( 57 | self._get_complete_normalized_file_path(absolute=True), sig, pub_key 58 | ) 59 | 60 | @cache 61 | def _get_latest_version(self) -> list[str]: 62 | download_a_tags = self.soup_download_page.find_all("a", href=True) 63 | if not download_a_tags: 64 | raise VersionNotFoundError("We were not able to parse the download page") 65 | 66 | local_version = self._get_local_version() 67 | latest = local_version or [] 68 | 69 | for a_tag in download_a_tags: 70 | href = a_tag.get("href") 71 | if not "tails-amd64" in href: 72 | continue 73 | version = href.split("-")[-1] 74 | if not version[0].isnumeric(): 75 | continue 76 | version_number = self._str_to_version(version[:-1]) 77 | if self._compare_version_numbers(latest, version_number): 78 | latest = version_number 79 | 80 | return latest 81 | -------------------------------------------------------------------------------- /modules/updaters/Clonezilla.py: -------------------------------------------------------------------------------- 1 | from functools import cache 2 | from pathlib import Path 3 | 4 | import requests 5 | from bs4 import BeautifulSoup, Tag 6 | 7 | from modules.exceptions import IntegrityCheckError, VersionNotFoundError 8 | from modules.updaters.GenericUpdater import GenericUpdater 9 | from modules.utils import parse_hash, sha256_hash_check 10 | 11 | DOMAIN = "https://clonezilla.org" 12 | FILE_NAME = "clonezilla-live-[[VER]]-amd64.iso" 13 | 14 | 15 | class Clonezilla(GenericUpdater): 16 | """ 17 | A class representing an updater for Clonezilla. 18 | 19 | Note: 20 | This class inherits from the abstract base class GenericUpdater. 21 | """ 22 | 23 | def __init__(self, folder_path: Path) -> None: 24 | file_path = folder_path / FILE_NAME 25 | super().__init__(file_path) 26 | 27 | @cache 28 | def _get_download_link(self) -> str: 29 | ver = self._version_to_str(self._get_latest_version()) 30 | repo = "https://downloads.sourceforge.net" 31 | return f"{repo}/clonezilla/clonezilla-live-{Clonezilla._get_clonezilla_version_style(ver)}-amd64.iso" 32 | 33 | def check_integrity(self) -> bool: 34 | r = requests.get(f"{DOMAIN}/downloads/stable/checksums-contents.php") 35 | soup = BeautifulSoup(r.content, features="html.parser") 36 | pre: Tag | None = soup.find("pre") # type: ignore 37 | if not pre: 38 | raise IntegrityCheckError( 39 | "Unable to extract `
` elements from checksum"
40 | )
41 |
42 | checksums: list[str] = pre.text.split("###")
43 | for checksum in checksums:
44 | if "SHA256" in checksum:
45 | sha256_sums = checksum
46 | break
47 | else:
48 | raise IntegrityCheckError("Could not find SHA256 sum")
49 |
50 | sha256_hash = parse_hash(sha256_sums, ["amd64.iso"], 0)
51 |
52 | return sha256_hash_check(
53 | self._get_complete_normalized_file_path(absolute=True), sha256_hash
54 | )
55 |
56 | @cache
57 | def _get_latest_version(self) -> list[str]:
58 | r = requests.get(f"{DOMAIN}/downloads/stable/changelog-contents.php")
59 | soup = BeautifulSoup(r.content, features="html.parser")
60 | first_paragraph: Tag | None = soup.find("p") # type: ignore
61 | if not first_paragraph:
62 | raise VersionNotFoundError(
63 | "Unable to extract `` elements from changelog"
64 | )
65 | version = first_paragraph.getText().split()[-1]
66 | return self._str_to_version(version.replace("-", "."))
67 |
68 | @cache
69 | @staticmethod
70 | def _get_clonezilla_version_style(version: str):
71 | """
72 | Convert the version string from "x.y.z" to "x.y-z" format, as used by Clonezilla.
73 |
74 | Parameters:
75 | version (str): The version string in "x.y.z.a" format.
76 |
77 | Returns:
78 | str: The version string in "x.y.z-a" format.
79 | """
80 | return "-".join(version.rsplit(".", 1))
81 |
--------------------------------------------------------------------------------
/modules/updaters/Manjaro.py:
--------------------------------------------------------------------------------
1 | import re
2 | from functools import cache
3 | from pathlib import Path
4 |
5 | import requests
6 |
7 | from modules.exceptions import VersionNotFoundError
8 | from modules.updaters.GenericUpdater import GenericUpdater
9 | from modules.utils import (
10 | md5_hash_check,
11 | parse_hash,
12 | sha256_hash_check,
13 | sha512_hash_check,
14 | )
15 |
16 | DOMAIN = "https://gitlab.manjaro.org"
17 | DOWNLOAD_PAGE_URL = f"{DOMAIN}/web/iso-info/-/raw/master/file-info.json"
18 | FILE_NAME = "manjaro-[[EDITION]]-[[VER]]-linux.iso"
19 |
20 |
21 | class Manjaro(GenericUpdater):
22 | """
23 | A class representing an updater for Manjaro.
24 |
25 | Attributes:
26 | valid_editions (list[str]): List of valid editions to use
27 | edition (str): Edition to download
28 | file_info_json (dict[Any, Any]): JSON file containing file information for each edition
29 |
30 | Note:
31 | This class inherits from the abstract base class GenericUpdater.
32 | """
33 |
34 | def __init__(self, folder_path: Path, edition: str) -> None:
35 | self.valid_editions = [
36 | "plasma",
37 | "xfce",
38 | "gnome",
39 | "cinnamon",
40 | "i3",
41 | ]
42 | self.edition = edition.lower()
43 | file_path = folder_path / FILE_NAME
44 | super().__init__(file_path)
45 |
46 | self.file_info_json = requests.get(DOWNLOAD_PAGE_URL).json()
47 | self.file_info_json["releases"] = (
48 | self.file_info_json["official"] | self.file_info_json["community"]
49 | )
50 |
51 | @cache
52 | def _get_download_link(self) -> str:
53 | return self.file_info_json["releases"][self.edition]["image"]
54 |
55 | def check_integrity(self) -> bool:
56 | checksum_url = self.file_info_json["releases"][self.edition]["checksum"]
57 |
58 | checksums = requests.get(checksum_url).text
59 |
60 | checksum = parse_hash(checksums, [], 0)
61 |
62 | if checksum_url.endswith(".sha512"):
63 | return sha512_hash_check(
64 | self._get_complete_normalized_file_path(absolute=True),
65 | checksum,
66 | )
67 | elif checksum_url.endswith(".sha256"):
68 | return sha256_hash_check(
69 | self._get_complete_normalized_file_path(absolute=True),
70 | checksum,
71 | )
72 | elif checksum_url.endswith(".md5"):
73 | return md5_hash_check(
74 | self._get_complete_normalized_file_path(absolute=True),
75 | checksum,
76 | )
77 | else:
78 | raise ValueError("Unknown checksum type")
79 |
80 | @cache
81 | def _get_latest_version(self) -> list[str]:
82 | download_link = self._get_download_link()
83 |
84 | latest_version_regex = re.search(
85 | r"manjaro-\w+-(.+?)-",
86 | download_link,
87 | )
88 |
89 | if latest_version_regex:
90 | return self._str_to_version(latest_version_regex.group(1))
91 |
92 | raise VersionNotFoundError("Could not find the latest available version")
93 |
--------------------------------------------------------------------------------
/modules/updaters/RockyLinux.py:
--------------------------------------------------------------------------------
1 | from functools import cache
2 | from pathlib import Path
3 |
4 | import requests
5 | from bs4 import BeautifulSoup
6 |
7 | from modules.exceptions import VersionNotFoundError
8 | from modules.updaters.GenericUpdater import GenericUpdater
9 | from modules.utils import parse_hash, sha256_hash_check
10 |
11 | DOMAIN = "https://download.rockylinux.org"
12 | DOWNLOAD_PAGE_URL = f"{DOMAIN}/pub/rocky"
13 | FILE_NAME = "Rocky-[[VER]]-x86_64-[[EDITION]].iso"
14 |
15 |
16 | class RockyLinux(GenericUpdater):
17 | """
18 | A class representing an updater for Rocky Linux.
19 |
20 | Attributes:
21 | valid_editions (list[str]): List of valid editions to use
22 | edition (str): Edition to download
23 | download_page (requests.Response): The HTTP response containing the download page HTML.
24 | soup_download_page (BeautifulSoup): The parsed HTML content of the download page.
25 |
26 | Note:
27 | This class inherits from the abstract base class GenericUpdater.
28 | """
29 |
30 | def __init__(self, folder_path: Path, edition: str) -> None:
31 | self.valid_editions = ["dvd", "boot", "minimal"]
32 | self.edition = edition.lower()
33 |
34 | file_path = folder_path / FILE_NAME
35 | super().__init__(file_path)
36 |
37 | self.download_page = requests.get(DOWNLOAD_PAGE_URL)
38 |
39 | if self.download_page.status_code != 200:
40 | raise ConnectionError(
41 | f"Failed to fetch the download page from '{DOWNLOAD_PAGE_URL}'"
42 | )
43 |
44 | self.soup_download_page = BeautifulSoup(
45 | self.download_page.content, features="html.parser"
46 | )
47 |
48 | @cache
49 | def _get_download_link(self) -> str:
50 | latest_version_str = self._version_to_str(self._get_latest_version())
51 | return f"{DOWNLOAD_PAGE_URL}/{latest_version_str}/isos/x86_64/{self._get_complete_normalized_file_path(absolute=False)}"
52 |
53 | def check_integrity(self) -> bool:
54 | sha256_url = f"{self._get_download_link()}.CHECKSUM"
55 |
56 | sha256_sums = requests.get(sha256_url).text
57 |
58 | sha256_sum = parse_hash(
59 | sha256_sums,
60 | [str(self._get_complete_normalized_file_path(absolute=False)), "="],
61 | -1,
62 | )
63 |
64 | return sha256_hash_check(
65 | self._get_complete_normalized_file_path(absolute=True),
66 | sha256_sum,
67 | )
68 |
69 | @cache
70 | def _get_latest_version(self) -> list[str]:
71 | download_a_tags = self.soup_download_page.find_all("a", href=True)
72 | if not download_a_tags:
73 | raise VersionNotFoundError("We were not able to parse the download page")
74 |
75 | local_version = self._get_local_version()
76 | latest = local_version or []
77 |
78 | for a_tag in download_a_tags:
79 | href = a_tag.get("href")
80 | if not href[0].isnumeric():
81 | continue
82 | version_number = self._str_to_version(href[:-1])
83 | if self._compare_version_numbers(latest, version_number):
84 | latest = version_number
85 |
86 | return latest
87 |
--------------------------------------------------------------------------------
/modules/updaters/OpenSUSE.py:
--------------------------------------------------------------------------------
1 | from functools import cache
2 | from pathlib import Path
3 |
4 | import requests
5 |
6 | from modules.updaters.GenericUpdater import GenericUpdater
7 | from modules.utils import parse_hash, sha256_hash_check
8 |
9 | DOMAIN = "https://download.opensuse.org"
10 | DOWNLOAD_PAGE_URL = f"{DOMAIN}/download/distribution/[[EDITION]]"
11 | FILE_NAME = "openSUSE-[[EDITION]]-[[VER]]-DVD-x86_64-Current.iso"
12 |
13 |
14 | class OpenSUSE(GenericUpdater):
15 | """
16 | A class representing an updater for OpenSUSE.
17 |
18 | Attributes:
19 | valid_editions (list[str]): List of valid editions to use
20 | edition (str): Edition to download
21 |
22 | Note:
23 | This class inherits from the abstract base class GenericUpdater.
24 | """
25 |
26 | def __init__(self, folder_path: Path, edition: str) -> None:
27 | self.valid_editions = ["leap", "leap-micro", "jump"]
28 | self.edition = edition.lower()
29 |
30 | self.download_page_url = DOWNLOAD_PAGE_URL.replace("[[EDITION]]", self.edition)
31 |
32 | file_path = folder_path / FILE_NAME
33 | super().__init__(file_path)
34 |
35 | def _capitalize_edition(self) -> str:
36 | return "-".join([s.capitalize() for s in self.edition.split("-")])
37 |
38 | @cache
39 | def _get_download_link(self) -> str:
40 | latest_version_str = self._version_to_str(self._get_latest_version())
41 | url = f"{self.download_page_url}/{latest_version_str}"
42 |
43 | edition_page = requests.get(f"{url}?jsontable").json()["data"]
44 |
45 | if any("product" in item["name"] for item in edition_page):
46 | url += "/product"
47 |
48 | if self.edition != "leap-micro":
49 | latest_version_str += "-NET"
50 |
51 | return f"{url}/iso/openSUSE-{self._capitalize_edition()}-{latest_version_str}-x86_64{"-Current" if self.edition != "leap-micro" else ""}.iso"
52 |
53 | def check_integrity(self) -> bool:
54 | sha256_url = f"{self._get_download_link()}.sha256"
55 |
56 | sha256_sums = requests.get(sha256_url).text
57 |
58 | sha256_sum = parse_hash(sha256_sums, [], 0)
59 |
60 | return sha256_hash_check(
61 | self._get_complete_normalized_file_path(absolute=True),
62 | sha256_sum,
63 | )
64 |
65 | @cache
66 | def _get_latest_version(self) -> list[str]:
67 | r = requests.get(f"{self.download_page_url}?jsontable")
68 |
69 | data = r.json()["data"]
70 |
71 | local_version = self._get_local_version()
72 | latest = local_version or []
73 |
74 | for i in range(len(data)):
75 | if "42" in data[i]["name"]:
76 | continue
77 | version_number = self._str_to_version(data[i]["name"][:-1])
78 | if self._compare_version_numbers(latest, version_number):
79 | sub_r = requests.get(f"{self.download_page_url}/{self._version_to_str(version_number)}?jsontable")
80 | sub_data = sub_r.json()["data"]
81 | if not any("iso" in item["name"] or "product" in item["name"] for item in sub_data):
82 | continue
83 |
84 | latest = version_number
85 |
86 | return latest
87 |
--------------------------------------------------------------------------------
/modules/updaters/LinuxMint.py:
--------------------------------------------------------------------------------
1 | from functools import cache
2 | from pathlib import Path
3 |
4 | import requests
5 | from bs4 import BeautifulSoup
6 |
7 | from modules.exceptions import VersionNotFoundError
8 | from modules.updaters.GenericUpdater import GenericUpdater
9 | from modules.utils import parse_hash, sha256_hash_check
10 |
11 | DOMAIN = "https://mirrors.edge.kernel.org"
12 | DOWNLOAD_PAGE_URL = f"{DOMAIN}/linuxmint/stable/"
13 | FILE_NAME = "linuxmint-[[VER]]-[[EDITION]]-64bit.iso"
14 |
15 |
16 | class LinuxMint(GenericUpdater):
17 | """
18 | A class representing an updater for Linux Mint.
19 |
20 | Attributes:
21 | valid_editions (list[str]): List of valid editions to use
22 | edition (str): Edition to download
23 | download_page (requests.Response): The HTTP response containing the download page HTML.
24 | soup_download_page (BeautifulSoup): The parsed HTML content of the download page.
25 |
26 | Note:
27 | This class inherits from the abstract base class GenericUpdater.
28 | """
29 |
30 | def __init__(self, folder_path: Path, edition: str) -> None:
31 | self.valid_editions = ["cinnamon", "mate", "xfce"]
32 | self.edition = edition.lower()
33 |
34 | file_path = folder_path / FILE_NAME
35 | super().__init__(file_path)
36 |
37 | self.download_page = requests.get(DOWNLOAD_PAGE_URL)
38 |
39 | if self.download_page.status_code != 200:
40 | raise ConnectionError(
41 | f"Failed to fetch the download page from '{DOWNLOAD_PAGE_URL}'"
42 | )
43 |
44 | self.soup_download_page = BeautifulSoup(
45 | self.download_page.content, features="html.parser"
46 | )
47 |
48 | @cache
49 | def _get_download_link(self) -> str:
50 | latest_version_str = self._version_to_str(self._get_latest_version())
51 | return f"{DOWNLOAD_PAGE_URL}/{latest_version_str}/{self._get_complete_normalized_file_path(absolute=False)}"
52 |
53 | def check_integrity(self) -> bool:
54 | latest_version_str = self._version_to_str(self._get_latest_version())
55 |
56 | sha256_url = f"https://mirrors.edge.kernel.org/linuxmint/stable/{latest_version_str}/sha256sum.txt"
57 |
58 | sha256_sums = requests.get(sha256_url).text
59 |
60 | sha256_sum = parse_hash(
61 | sha256_sums,
62 | [str(self._get_complete_normalized_file_path(absolute=False))],
63 | 0,
64 | )
65 |
66 | return sha256_hash_check(
67 | self._get_complete_normalized_file_path(absolute=True),
68 | sha256_sum,
69 | )
70 |
71 | @cache
72 | def _get_latest_version(self) -> list[str]:
73 | download_a_tags = self.soup_download_page.find_all("a", href=True)
74 | if not download_a_tags:
75 | raise VersionNotFoundError("We were not able to parse the download page")
76 |
77 | local_version = self._get_local_version()
78 | latest = local_version or []
79 |
80 | for a_tag in download_a_tags:
81 | href = a_tag.get("href")
82 | if not href[0].isnumeric():
83 | continue
84 | version_number = self._str_to_version(href[:-1])
85 | if self._compare_version_numbers(latest, version_number):
86 | latest = version_number
87 |
88 | return latest
89 |
--------------------------------------------------------------------------------
/modules/updaters/KaliLinux.py:
--------------------------------------------------------------------------------
1 | from functools import cache
2 | from pathlib import Path
3 | from urllib.parse import urljoin
4 |
5 | import requests
6 | from bs4 import BeautifulSoup
7 |
8 | from modules.exceptions import VersionNotFoundError
9 | from modules.updaters.GenericUpdater import GenericUpdater
10 | from modules.utils import parse_hash, sha256_hash_check
11 |
12 | DOMAIN = "https://cdimage.kali.org"
13 | DOWNLOAD_PAGE_URL = urljoin(DOMAIN, "current/")
14 | FILE_NAME = "kali-linux-[[VER]]-[[EDITION]]-amd64.iso"
15 |
16 |
17 | class KaliLinux(GenericUpdater):
18 | """
19 | A class representing an updater for Kali Linux.
20 |
21 | Attributes:
22 | valid_editions (list[str]): List of valid editions to use
23 | edition (str): Edition to download
24 | download_page (requests.Response): The HTTP response containing the download page HTML.
25 | soup_download_page (BeautifulSoup): The parsed HTML content of the download page.
26 |
27 | Note:
28 | This class inherits from the abstract base class GenericUpdater.
29 | """
30 |
31 | def __init__(self, folder_path: Path, edition: str) -> None:
32 | self.valid_editions = [
33 | "installer",
34 | "installer-netinst",
35 | "installer-purple",
36 | "live",
37 | ]
38 | self.edition = edition.lower()
39 |
40 | file_path = folder_path / FILE_NAME
41 | super().__init__(file_path)
42 |
43 | self.download_page = requests.get(DOWNLOAD_PAGE_URL)
44 |
45 | if self.download_page.status_code != 200:
46 | raise ConnectionError(
47 | f"Failed to fetch the download page from '{DOWNLOAD_PAGE_URL}'"
48 | )
49 |
50 | self.soup_download_page = BeautifulSoup(
51 | self.download_page.content, features="html.parser"
52 | )
53 |
54 | @cache
55 | def _get_download_link(self) -> str:
56 | return urljoin(
57 | DOWNLOAD_PAGE_URL,
58 | str(self._get_complete_normalized_file_path(absolute=False)),
59 | )
60 |
61 | def check_integrity(self) -> bool:
62 | sha256_url = urljoin(DOWNLOAD_PAGE_URL, "SHA256SUMS")
63 |
64 | sha256_sums = requests.get(sha256_url).text
65 |
66 | sha256_sum = parse_hash(
67 | sha256_sums,
68 | [str(self._get_complete_normalized_file_path(absolute=False))],
69 | 0,
70 | )
71 |
72 | return sha256_hash_check(
73 | self._get_complete_normalized_file_path(absolute=True),
74 | sha256_sum,
75 | )
76 |
77 | @cache
78 | def _get_latest_version(self) -> list[str]:
79 | download_a_tags = self.soup_download_page.find_all("a", href=True)
80 | if not download_a_tags:
81 | raise VersionNotFoundError("We were not able to parse the download page")
82 |
83 | latest = next(
84 | href
85 | for a_tag in download_a_tags
86 | if str(
87 | self._get_normalized_file_path(
88 | absolute=False,
89 | version=None,
90 | edition=self.edition if self.has_edition() else None, # type: ignore
91 | lang=self.lang if self.has_lang() else None, # type: ignore
92 | )
93 | ).split("[[VER]]")[-1]
94 | in (href := a_tag.get("href"))
95 | )
96 |
97 | return self._str_to_version(latest.split("-")[2])
98 |
--------------------------------------------------------------------------------
/modules/updaters/TempleOS.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from functools import cache
3 | from pathlib import Path
4 |
5 | import requests
6 | from bs4 import BeautifulSoup, Tag
7 |
8 | from modules.exceptions import VersionNotFoundError
9 | from modules.updaters.GenericUpdater import GenericUpdater
10 | from modules.utils import md5_hash_check, parse_hash
11 |
12 | DOMAIN = "https://templeos.org"
13 | DOWNLOAD_PAGE_URL = f"{DOMAIN}/Downloads"
14 | FILE_NAME = "TempleOS_[[EDITION]]_[[VER]].ISO"
15 |
16 |
17 | class TempleOS(GenericUpdater):
18 | """
19 | A class representing an updater for TempleOS.
20 |
21 | Attributes:
22 | valid_editions (list[str]): List of valid editions to use
23 | edition (str): Edition to download
24 | download_page (requests.Response): The HTTP response containing the download page HTML.
25 | soup_download_page (BeautifulSoup): The parsed HTML content of the download page.
26 | server_file_name (str): The name of the file to download on the server
27 |
28 | Note:
29 | This class inherits from the abstract base class GenericUpdater.
30 | """
31 |
32 | def __init__(self, folder_path: Path, edition: str) -> None:
33 | self.valid_editions = ["Distro", "Lite"]
34 | self.edition = edition
35 |
36 | file_path = folder_path / FILE_NAME
37 | super().__init__(file_path)
38 |
39 | # Make the parameter case insensitive, and find back the correct case using valid_editions
40 | self.edition = next(
41 | valid_ed
42 | for valid_ed in self.valid_editions
43 | if valid_ed.lower() == self.edition.lower()
44 | )
45 |
46 | self.download_page = requests.get(DOWNLOAD_PAGE_URL)
47 |
48 | if self.download_page.status_code != 200:
49 | raise ConnectionError(
50 | f"Failed to fetch the download page from '{DOWNLOAD_PAGE_URL}'"
51 | )
52 |
53 | self.soup_download_page = BeautifulSoup(
54 | self.download_page.content, features="html.parser"
55 | )
56 |
57 | self.server_file_name = (
58 | f"TempleOS{'Lite' if self.edition == 'Lite' else ''}.ISO"
59 | )
60 |
61 | @cache
62 | def _get_download_link(self) -> str:
63 | return f"{DOWNLOAD_PAGE_URL}/{self.server_file_name}"
64 |
65 | def check_integrity(self) -> bool:
66 | md5_url = f"{DOWNLOAD_PAGE_URL}/md5sums.txt"
67 |
68 | md5_sums = requests.get(md5_url).text
69 |
70 | md5_sum = parse_hash(md5_sums, [self.server_file_name], 0)
71 |
72 | return md5_hash_check(
73 | self._get_complete_normalized_file_path(absolute=True),
74 | md5_sum,
75 | )
76 |
77 | @cache
78 | def _get_latest_version(self) -> list[str]:
79 | file_list_soup: Tag | None = self.soup_download_page.find("pre") # type: ignore
80 | if not file_list_soup:
81 | raise VersionNotFoundError("Could not find download links list")
82 |
83 | page_text = file_list_soup.getText()
84 |
85 | next_line_has_date = False
86 | date: str | None = None
87 | for line in page_text.splitlines():
88 | if self.server_file_name in line:
89 | next_line_has_date = True
90 | continue
91 | if next_line_has_date:
92 | # Remove last element (size)
93 | date = " ".join(line.strip().split()[1:-1])
94 | break
95 | if not date:
96 | raise VersionNotFoundError("Could not find date on download page")
97 |
98 | datetime_date = datetime.strptime(date, r"%d-%b-%Y %H:%M")
99 |
100 | return self._str_to_version(str(datetime.timestamp(datetime_date)))
101 |
--------------------------------------------------------------------------------
/modules/updaters/ChromeOS.py:
--------------------------------------------------------------------------------
1 | import zipfile
2 | from functools import cache
3 | from pathlib import Path
4 |
5 | import requests
6 |
7 | from modules.exceptions import IntegrityCheckError
8 | from modules.updaters.GenericUpdater import GenericUpdater
9 | from modules.utils import download_file, sha1_hash_check
10 |
11 | DOMAIN = "https://dl.google.com"
12 | FILE_NAME = "chromeos_[[VER]]_[[EDITION]].img"
13 |
14 |
15 | class ChromeOS(GenericUpdater):
16 | """
17 | A class representing an updater for ChromeOS.
18 |
19 | Attributes:
20 | valid_editions (list[str]): List of valid editions to use
21 | edition (str): Edition to download
22 | chromium_releases_info (list[dict]): List of release info for each edition
23 | cur_edition_info: Release info for the selected edition
24 |
25 | Note:
26 | This class inherits from the abstract base class GenericUpdater.
27 | """
28 |
29 | def __init__(self, folder_path: Path, edition: str) -> None:
30 | self.valid_editions = ["ltc", "ltr", "stable"]
31 | self.edition = edition.lower()
32 |
33 | file_path = Path(folder_path) / FILE_NAME
34 | super().__init__(file_path)
35 |
36 | self.chromium_releases_info: list[dict] = requests.get(
37 | f"{DOMAIN}/dl/edgedl/chromeos/recovery/cloudready_recovery2.json"
38 | ).json()
39 |
40 | self.cur_edition_info: dict = next(
41 | d
42 | for d in self.chromium_releases_info
43 | if d["channel"].lower() == self.edition
44 | )
45 |
46 | @cache
47 | def _get_download_link(self) -> str:
48 | return self.cur_edition_info["url"]
49 |
50 | def check_integrity(self) -> bool:
51 | sha1_sum = self.cur_edition_info["sha1"]
52 |
53 | return sha1_hash_check(
54 | self._get_complete_normalized_file_path(absolute=True).with_suffix(".zip"),
55 | sha1_sum,
56 | )
57 |
58 | def install_latest_version(self) -> None:
59 | """
60 | Download and install the latest version of the software.
61 |
62 | Raises:
63 | IntegrityCheckError: If the integrity check of the downloaded file fails.
64 | """
65 | download_link = self._get_download_link()
66 |
67 | new_file = self._get_complete_normalized_file_path(absolute=True)
68 |
69 | archive_path = Path(new_file).with_suffix(".zip")
70 |
71 | local_file = self._get_local_file()
72 |
73 | download_file(download_link, archive_path)
74 |
75 | try:
76 | integrity_check = self.check_integrity()
77 | except Exception as e:
78 | raise IntegrityCheckError(
79 | "Integrity check failed: An error occurred"
80 | ) from e
81 |
82 | if not integrity_check:
83 | archive_path.unlink()
84 | raise IntegrityCheckError("Integrity check failed: Hashes do not match")
85 |
86 | with zipfile.ZipFile(archive_path) as z:
87 | file_list = z.namelist()
88 |
89 | file_ext = "bin"
90 | to_extract = next(
91 | file for file in file_list if file.lower().endswith(file_ext)
92 | )
93 |
94 | extracted_file = Path(z.extract(to_extract, path=new_file.parent))
95 | try:
96 | extracted_file.rename(new_file)
97 | except FileExistsError:
98 | # On Windows, files are not overwritten by default, so we need to remove the old file first
99 | new_file.unlink()
100 | extracted_file.rename(new_file)
101 |
102 | archive_path.unlink()
103 | if local_file:
104 | local_file.unlink()
105 |
106 | @cache
107 | def _get_latest_version(self) -> list[str]:
108 | return self._str_to_version(self.cur_edition_info["version"])
109 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # ISO/Image files
2 | *.iso
3 | *.img
4 | *.wim
5 |
6 | # Byte-compiled / optimized / DLL files
7 | __pycache__/
8 | *.py[cod]
9 | *$py.class
10 |
11 | # C extensions
12 | *.so
13 |
14 | # Distribution / packaging
15 | .Python
16 | build/
17 | develop-eggs/
18 | dist/
19 | downloads/
20 | eggs/
21 | .eggs/
22 | lib/
23 | lib64/
24 | parts/
25 | sdist/
26 | var/
27 | wheels/
28 | share/python-wheels/
29 | *.egg-info/
30 | .installed.cfg
31 | *.egg
32 | MANIFEST
33 |
34 | # PyInstaller
35 | # Usually these files are written by a python script from a template
36 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
37 | *.manifest
38 | *.spec
39 |
40 | # Installer logs
41 | pip-log.txt
42 | pip-delete-this-directory.txt
43 |
44 | # Unit test / coverage reports
45 | htmlcov/
46 | .tox/
47 | .nox/
48 | .coverage
49 | .coverage.*
50 | .cache
51 | nosetests.xml
52 | coverage.xml
53 | *.cover
54 | *.py,cover
55 | .hypothesis/
56 | .pytest_cache/
57 | cover/
58 |
59 | # Translations
60 | *.mo
61 | *.pot
62 |
63 | # Django stuff:
64 | *.log
65 | local_settings.py
66 | db.sqlite3
67 | db.sqlite3-journal
68 |
69 | # Flask stuff:
70 | instance/
71 | .webassets-cache
72 |
73 | # Scrapy stuff:
74 | .scrapy
75 |
76 | # Sphinx documentation
77 | docs/_build/
78 |
79 | # PyBuilder
80 | .pybuilder/
81 | target/
82 |
83 | # Jupyter Notebook
84 | .ipynb_checkpoints
85 |
86 | # IPython
87 | profile_default/
88 | ipython_config.py
89 |
90 | # pyenv
91 | # For a library or package, you might want to ignore these files since the code is
92 | # intended to run in multiple environments; otherwise, check them in:
93 | # .python-version
94 |
95 | # pipenv
96 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
97 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
98 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
99 | # install all needed dependencies.
100 | #Pipfile.lock
101 |
102 | # poetry
103 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
104 | # This is especially recommended for binary packages to ensure reproducibility, and is more
105 | # commonly ignored for libraries.
106 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
107 | #poetry.lock
108 |
109 | # pdm
110 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
111 | #pdm.lock
112 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
113 | # in version control.
114 | # https://pdm.fming.dev/#use-with-ide
115 | .pdm.toml
116 |
117 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
118 | __pypackages__/
119 |
120 | # Celery stuff
121 | celerybeat-schedule
122 | celerybeat.pid
123 |
124 | # SageMath parsed files
125 | *.sage.py
126 |
127 | # Environments
128 | .env
129 | .venv
130 | env/
131 | venv/
132 | ENV/
133 | env.bak/
134 | venv.bak/
135 |
136 | # Spyder project settings
137 | .spyderproject
138 | .spyproject
139 |
140 | # Rope project settings
141 | .ropeproject
142 |
143 | # mkdocs documentation
144 | /site
145 |
146 | # mypy
147 | .mypy_cache/
148 | .dmypy.json
149 | dmypy.json
150 |
151 | # Pyre type checker
152 | .pyre/
153 |
154 | # pytype static type analyzer
155 | .pytype/
156 |
157 | # Cython debug symbols
158 | cython_debug/
159 |
160 | # PyCharm
161 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
162 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
163 | # and can be added to the global gitignore or merged into this file. For a more nuclear
164 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
165 | #.idea/
166 |
--------------------------------------------------------------------------------
/modules/updaters/Debian.py:
--------------------------------------------------------------------------------
1 | from functools import cache
2 | from pathlib import Path
3 |
4 | import requests
5 | from bs4 import BeautifulSoup, Tag
6 |
7 | from modules.exceptions import VersionNotFoundError
8 | from modules.updaters.GenericUpdater import GenericUpdater
9 | from modules.utils import parse_hash, sha256_hash_check
10 |
11 | DOMAIN = "https://cdimage.debian.org"
12 | DOWNLOAD_PAGE_URL = f"{DOMAIN}/debian-cd/current-live/amd64/iso-hybrid/"
13 | FILE_NAME = "debian-live-[[VER]]-amd64-[[EDITION]].iso"
14 |
15 |
16 | class Debian(GenericUpdater):
17 | """
18 | A class representing an updater for Debian.
19 |
20 | Attributes:
21 | valid_editions (list[str]): List of valid editions to use
22 | edition (str): Edition to download
23 | download_page (requests.Response): The HTTP response containing the download page HTML.
24 | soup_download_page (BeautifulSoup): The parsed HTML content of the download page.
25 | soup_index_list (Tag): The index list containing the downloadable files.
26 |
27 | Note:
28 | This class inherits from the abstract base class GenericUpdater.
29 | """
30 |
31 | def __init__(self, folder_path: Path, edition: str) -> None:
32 | self.valid_editions = [
33 | "cinnamon",
34 | "gnome",
35 | "kde",
36 | "lxde",
37 | "lxqt",
38 | "mate",
39 | "standard",
40 | "xfce",
41 | ]
42 |
43 | self.edition = edition.lower()
44 |
45 | file_path = folder_path / FILE_NAME
46 | super().__init__(file_path)
47 |
48 | # Make the parameter case insensitive, and find back the correct case using valid_editions
49 |
50 | self.download_page = requests.get(DOWNLOAD_PAGE_URL)
51 |
52 | if self.download_page.status_code != 200:
53 | raise ConnectionError(
54 | f"Failed to fetch the download page from '{DOWNLOAD_PAGE_URL}'"
55 | )
56 |
57 | self.soup_download_page = BeautifulSoup(
58 | self.download_page.content, features="html.parser"
59 | )
60 |
61 | self.soup_index_list: Tag = self.soup_download_page.find(
62 | "table", attrs={"id": "indexlist"}
63 | ) # type: ignore
64 |
65 | if not self.soup_index_list:
66 | raise ConnectionError(
67 | "We couldn't find the list of indexes containing the download URLs"
68 | )
69 |
70 | @cache
71 | def _get_download_link(self) -> str:
72 | return f"{DOWNLOAD_PAGE_URL}/{self._get_complete_normalized_file_path(absolute=False)}"
73 |
74 | def check_integrity(self) -> bool:
75 | sha256_url = f"{DOWNLOAD_PAGE_URL}/SHA256SUMS"
76 |
77 | sha256_sums = requests.get(sha256_url).text
78 |
79 | sha256_sum = parse_hash(
80 | sha256_sums,
81 | [str(self._get_complete_normalized_file_path(absolute=False))],
82 | 0,
83 | )
84 |
85 | return sha256_hash_check(
86 | self._get_complete_normalized_file_path(absolute=True),
87 | sha256_sum,
88 | )
89 |
90 | @cache
91 | def _get_latest_version(self) -> list[str]:
92 | download_a_tags = self.soup_index_list.find_all("a", href=True)
93 | if not download_a_tags:
94 | raise VersionNotFoundError("We were not able to parse the download page")
95 |
96 | latest = next(
97 | href
98 | for a_tag in download_a_tags
99 | if str(
100 | self._get_normalized_file_path(
101 | absolute=False,
102 | version=None,
103 | edition=self.edition if self.has_edition() else None, # type: ignore
104 | lang=self.lang if self.has_lang() else None, # type: ignore
105 | )
106 | ).split("[[VER]]")[-1]
107 | in (href := a_tag.get("href"))
108 | )
109 |
110 | return self._str_to_version(latest.split("-")[2])
111 |
--------------------------------------------------------------------------------
/modules/updaters/OPNsense.py:
--------------------------------------------------------------------------------
1 | import bz2
2 | import re
3 | from functools import cache
4 | from pathlib import Path
5 |
6 | import requests
7 | from bs4 import BeautifulSoup
8 |
9 | from modules.exceptions import VersionNotFoundError
10 | from modules.updaters.GenericUpdater import GenericUpdater
11 | from modules.utils import parse_hash, sha256_hash_check
12 |
13 | DOMAIN = "https://pkg.opnsense.org"
14 | DOWNLOAD_PAGE_URL = f"{DOMAIN}/releases/mirror"
15 | FILE_NAME = "OPNsense-[[VER]]-[[EDITION]]-amd64.[[EXT]]"
16 |
17 |
18 | class OPNsense(GenericUpdater):
19 | """
20 | A class representing an updater for OPNsense.
21 |
22 | Attributes:
23 | valid_editions (list[str]): List of valid editions to use
24 | edition (str): Edition to download
25 | download_page (requests.Response): The HTTP response containing the download page HTML.
26 | soup_download_page (BeautifulSoup): The parsed HTML content of the download page.
27 |
28 | Note:
29 | This class inherits from the abstract base class GenericUpdater.
30 | """
31 |
32 | def __init__(self, folder_path: Path, edition: str) -> None:
33 | self.valid_editions = ["dvd", "nano", "serial", "vga"]
34 | self.edition = edition.lower()
35 |
36 | file_extension = "iso" if self.edition == "dvd" else "img"
37 |
38 | file_path = folder_path / FILE_NAME.replace("[[EXT]]", file_extension)
39 | super().__init__(file_path)
40 |
41 | self.download_page = requests.get(DOWNLOAD_PAGE_URL)
42 |
43 | if self.download_page.status_code != 200:
44 | raise ConnectionError(
45 | f"Failed to fetch the download page from '{DOWNLOAD_PAGE_URL}'"
46 | )
47 |
48 | self.soup_download_page = BeautifulSoup(
49 | self.download_page.content, features="html.parser"
50 | )
51 |
52 | @cache
53 | def _get_download_link(self) -> str:
54 | return f"{DOWNLOAD_PAGE_URL}/{self._get_complete_normalized_file_path(absolute=False)}.bz2"
55 |
56 | def check_integrity(self) -> bool:
57 | latest_version_str = self._version_to_str(self._get_latest_version())
58 |
59 | sha256_url = (
60 | f"{DOWNLOAD_PAGE_URL}/OPNsense-{latest_version_str}-checksums-amd64.sha256"
61 | )
62 |
63 | sha256_sums = requests.get(sha256_url).text
64 |
65 | sha256_sum = parse_hash(
66 | sha256_sums,
67 | [self.edition],
68 | -1,
69 | )
70 |
71 | return sha256_hash_check(
72 | self._get_complete_normalized_file_path(absolute=True),
73 | sha256_sum,
74 | )
75 |
76 | def install_latest_version(self) -> None:
77 | super().install_latest_version()
78 | bz2_path = self._get_complete_normalized_file_path(absolute=True).with_suffix(
79 | ".bz2"
80 | )
81 |
82 | if bz2_path.exists():
83 | bz2_path.unlink()
84 |
85 | self._get_complete_normalized_file_path(absolute=True).rename(bz2_path)
86 |
87 | bz2_file = bz2.BZ2File(bz2_path)
88 | data = bz2_file.read()
89 |
90 | extracted_filepath = bz2_path.with_suffix(self.file_path.suffix)
91 | with open(extracted_filepath, "wb") as new_file:
92 | new_file.write(data)
93 |
94 | bz2_path.unlink()
95 |
96 | @cache
97 | def _get_latest_version(self) -> list[str]:
98 | download_a_tags = self.soup_download_page.find_all("a", href=True)
99 | if not download_a_tags:
100 | raise VersionNotFoundError("We were not able to parse the download page")
101 |
102 | local_version = self._get_local_version()
103 | latest = local_version or []
104 |
105 | for a_tag in download_a_tags:
106 | href = a_tag.get("href")
107 | if not self.edition in href:
108 | continue
109 | version_number = self._str_to_version(href.split("-")[1])
110 | if self._compare_version_numbers(latest, version_number):
111 | latest = version_number
112 |
113 | return latest
114 |
--------------------------------------------------------------------------------
/modules/updaters/UltimateBootCD.py:
--------------------------------------------------------------------------------
1 | from functools import cache
2 | from pathlib import Path
3 | from random import shuffle
4 |
5 | import requests
6 | from bs4 import BeautifulSoup, Tag
7 |
8 | from modules.exceptions import VersionNotFoundError
9 | from modules.updaters.GenericUpdater import GenericUpdater
10 | from modules.utils import parse_hash, sha256_hash_check
11 |
12 | DOMAIN = "https://www.ultimatebootcd.com"
13 | DOWNLOAD_PAGE_URL = f"{DOMAIN}/download.html"
14 | MIRRORS = [
15 | "https://mirror.clientvps.com/ubcd",
16 | "http://mirror.koddos.net/ubcd",
17 | "https://mirror.lyrahosting.com/ubcd",
18 | ]
19 | FILE_NAME = "ubcd[[VER]].iso"
20 |
21 |
22 | class UltimateBootCD(GenericUpdater):
23 | """
24 | A class representing an updater for Ultimate Boot CD.
25 |
26 | Attributes:
27 | download_page (requests.Response): The HTTP response containing the download page HTML.
28 | soup_download_page (BeautifulSoup): The parsed HTML content of the download page.
29 | mirrors (list[str])
30 | mirror (str)
31 | download_table (Tag)
32 |
33 | Note:
34 | This class inherits from the abstract base class GenericUpdater.
35 | """
36 |
37 | def __init__(self, folder_path: Path) -> None:
38 | file_path = folder_path / FILE_NAME
39 | super().__init__(file_path)
40 |
41 | self.download_page = requests.get(DOWNLOAD_PAGE_URL)
42 |
43 | if self.download_page.status_code != 200:
44 | raise ConnectionError(
45 | f"Failed to fetch the download page from '{DOWNLOAD_PAGE_URL}'"
46 | )
47 |
48 | self.soup_download_page = BeautifulSoup(
49 | self.download_page.content, features="html.parser"
50 | )
51 |
52 | self.mirrors = MIRRORS
53 | shuffle(self.mirrors)
54 |
55 | self.download_table: Tag | None = None
56 | for mirror in self.mirrors:
57 | self.mirror_page = requests.get(mirror)
58 |
59 | if self.mirror_page.status_code != 200:
60 | continue
61 |
62 | self.soup_mirror_page = BeautifulSoup(
63 | self.mirror_page.content, features="html.parser"
64 | )
65 |
66 | self.download_table = self.soup_mirror_page.find("table") # type: ignore
67 | if self.download_table:
68 | self.mirror = mirror
69 | break
70 |
71 | if not self.mirror_page:
72 | raise ConnectionError(f"Could not connect to any mirrors!")
73 |
74 | if not self.download_table:
75 | raise LookupError(f"Could not find table of downloads in any mirrors")
76 |
77 | @cache
78 | def _get_download_link(self) -> str:
79 | latest_version: list[str] = self._get_latest_version()
80 | return f"{self.mirror}/ubcd{self._version_to_str(latest_version)}.iso"
81 |
82 | def check_integrity(self) -> bool:
83 | nowrap_tds: list[Tag] = self.soup_download_page.find_all(
84 | "td", attrs={"nowrap": "true"}
85 | )
86 |
87 | tts: list[Tag] = next(td.find_all("tt") for td in nowrap_tds if td.find("tt"))
88 |
89 | sha256_sum: str = next(
90 | parse_hash(tt.getText(), [], -1) for tt in tts if "SHA-256" in tt.getText()
91 | )
92 |
93 | return sha256_hash_check(
94 | self._get_complete_normalized_file_path(absolute=True), sha256_sum
95 | )
96 |
97 | @cache
98 | def _get_latest_version(self) -> list[str]:
99 | download_a_tags = self.download_table.find_all("a", href=True) # type: ignore
100 | if not download_a_tags:
101 | raise VersionNotFoundError("We were not able to parse the download page")
102 |
103 | versions_href = [
104 | href
105 | for a_tag in download_a_tags
106 | if FILE_NAME.split("[[VER]]")[0] in (href := a_tag.get("href"))
107 | and (href.endswith(".iso"))
108 | ]
109 |
110 | version = 0
111 | for version_href in versions_href:
112 | version_href_number = int("".join(filter(str.isdigit, version_href)))
113 | if version_href_number > version:
114 | version = version_href_number
115 |
116 | return self._str_to_version(str(version))
117 |
--------------------------------------------------------------------------------
/modules/updaters/Ubuntu.py:
--------------------------------------------------------------------------------
1 | from functools import cache
2 | from pathlib import Path
3 |
4 | import requests
5 | from bs4 import BeautifulSoup
6 |
7 | from modules.exceptions import VersionNotFoundError
8 | from modules.updaters.GenericUpdater import GenericUpdater
9 | from modules.utils import parse_hash, sha256_hash_check
10 |
11 | DOMAIN = "https://releases.ubuntu.com"
12 | DOWNLOAD_PAGE_URL = f"{DOMAIN}"
13 | FILE_NAME = "ubuntu-[[EDITION]]-[[VER]]-desktop-amd64.iso"
14 |
15 |
16 | class Ubuntu(GenericUpdater):
17 | """
18 | A class representing an updater for Ubuntu.
19 |
20 | Attributes:
21 | valid_editions (list[str]): List of valid editions to use
22 | edition (str): Edition to download
23 | download_page (requests.Response): The HTTP response containing the download page HTML.
24 | soup_download_page (BeautifulSoup): The parsed HTML content of the download page.
25 |
26 | Note:
27 | This class inherits from the abstract base class GenericUpdater.
28 | """
29 |
30 | def __init__(self, folder_path: Path, edition: str) -> None:
31 | self.valid_editions = ["LTS", "Interim"]
32 | self.edition = edition
33 |
34 | file_path = folder_path / FILE_NAME
35 | super().__init__(file_path)
36 |
37 | # Make the parameter case insensitive, and find back the correct case using valid_editions
38 | self.edition = next(
39 | valid_ed
40 | for valid_ed in self.valid_editions
41 | if valid_ed.lower() == self.edition.lower()
42 | )
43 |
44 | self.download_page = requests.get(DOWNLOAD_PAGE_URL)
45 |
46 | if self.download_page.status_code != 200:
47 | raise ConnectionError(
48 | f"Failed to fetch the download page from '{DOWNLOAD_PAGE_URL}'"
49 | )
50 |
51 | self.soup_download_page = BeautifulSoup(
52 | self.download_page.content, features="html.parser"
53 | )
54 |
55 | @cache
56 | def _get_download_link(self) -> str:
57 | latest_version_str = self._version_to_str(self._get_latest_version())
58 | return f"{DOMAIN}/{latest_version_str}/{f'ubuntu-{latest_version_str}-desktop-amd64.iso'}"
59 |
60 | def check_integrity(self) -> bool:
61 | latest_version_str = self._version_to_str(self._get_latest_version())
62 |
63 | sha256_url = f"{DOWNLOAD_PAGE_URL}/{latest_version_str}/SHA256SUMS"
64 |
65 | sha256_sums = requests.get(sha256_url).text
66 |
67 | sha256_sum = parse_hash(
68 | sha256_sums, [f"ubuntu-{latest_version_str}-desktop-amd64.iso"], 0
69 | )
70 |
71 | return sha256_hash_check(
72 | self._get_complete_normalized_file_path(absolute=True),
73 | sha256_sum,
74 | )
75 |
76 | @cache
77 | def _get_latest_version(self) -> list[str]:
78 | download_categories = self.soup_download_page.find_all(
79 | "div", attrs={"class": "col-4"}
80 | )
81 | if not download_categories:
82 | raise VersionNotFoundError(
83 | "We were not able to parse the download categories"
84 | )
85 | downloads = next(
86 | download_category
87 | for download_category in download_categories
88 | if download_category.find("h4", string=f"{self.edition} Releases")
89 | )
90 | if not downloads:
91 | raise VersionNotFoundError(
92 | f"We were not able to parse the {self.edition} downloads"
93 | )
94 | latest = downloads.find("a", href=True)
95 | if not latest:
96 | raise VersionNotFoundError(
97 | f"We were not able to find {self.edition} downloads"
98 | )
99 | # This is `x.y`, however to get `x.y.z` we need to go to `/x.y`
100 | xy_version = latest.getText().split()[1]
101 |
102 | version_page = requests.get(f"{DOWNLOAD_PAGE_URL}/{xy_version}")
103 |
104 | soup_version_page = BeautifulSoup(version_page.content, features="html.parser")
105 |
106 | title = soup_version_page.find("title")
107 |
108 | if not title:
109 | raise VersionNotFoundError(
110 | "We were not able to find the title of the version page"
111 | )
112 |
113 | title_text = title.getText()
114 |
115 | # Extract version from string "Ubuntu x.y.z (Name of Release)"
116 | return self._str_to_version(title_text.split()[1])
117 |
--------------------------------------------------------------------------------
/modules/updaters/Windows11.py:
--------------------------------------------------------------------------------
1 | from functools import cache
2 | from pathlib import Path
3 |
4 | import requests
5 | from bs4 import BeautifulSoup, Tag
6 |
7 | from modules.exceptions import VersionNotFoundError
8 | from modules.updaters.GenericUpdater import GenericUpdater
9 | from modules.utils import sha256_hash_check
10 | from modules.WindowsConsumerDownload import WindowsConsumerDownloader
11 |
12 | DOMAIN = "https://www.microsoft.com"
13 | DOWNLOAD_PAGE_URL = f"{DOMAIN}/en-us/software-download/windows11"
14 | FILE_NAME = "Win11_[[VER]]_EnglishInternational_x64v2.iso"
15 |
16 |
17 | class Windows11(GenericUpdater):
18 | """
19 | A class representing an updater for Windows 11.
20 |
21 | Attributes:
22 | download_page (requests.Response): The HTTP response containing the download page HTML.
23 | soup_download_page (BeautifulSoup): The parsed HTML content of the download page.
24 |
25 | Note:
26 | This class inherits from the abstract base class GenericUpdater.
27 | """
28 |
29 | def __init__(self, folder_path: Path, lang: str) -> None:
30 | self.valid_langs = [
31 | "Arabic",
32 | "Brazilian Portuguese",
33 | "Bulgarian",
34 | "Chinese",
35 | "Chinese",
36 | "Croatian",
37 | "Czech",
38 | "Danish",
39 | "Dutch",
40 | "English",
41 | "English International",
42 | "Estonian",
43 | "Finnish",
44 | "French",
45 | "French Canadian",
46 | "German",
47 | "Greek",
48 | "Hebrew",
49 | "Hungarian",
50 | "Italian",
51 | "Japanese",
52 | "Korean",
53 | "Latvian",
54 | "Lithuanian",
55 | "Norwegian",
56 | "Polish",
57 | "Portuguese",
58 | "Romanian",
59 | "Russian",
60 | "Serbian Latin",
61 | "Slovak",
62 | "Slovenian",
63 | "Spanish",
64 | "Spanish (Mexico)",
65 | "Swedish",
66 | "Thai",
67 | "Turkish",
68 | "Ukrainian",
69 | ]
70 | self.lang = lang
71 | file_path = folder_path / FILE_NAME
72 | super().__init__(file_path)
73 | # Make the parameter case insensitive, and find back the correct case using valid_editions
74 | self.lang = next(
75 | valid_lang
76 | for valid_lang in self.valid_langs
77 | if valid_lang.lower() == self.lang.lower()
78 | )
79 | self.version_splitter = "H"
80 | self.headers = {
81 | "User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:100.0) Gecko/20100101 Firefox/100.0",
82 | "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
83 | "referer": "folfy.blue",
84 | }
85 |
86 | self.download_page = requests.get(DOWNLOAD_PAGE_URL, headers=self.headers)
87 |
88 | if self.download_page.status_code != 200:
89 | raise ConnectionError(
90 | f"Failed to fetch the download page from '{DOWNLOAD_PAGE_URL}'"
91 | )
92 |
93 | self.soup_download_page = BeautifulSoup(
94 | self.download_page.content, features="html.parser"
95 | )
96 |
97 | self.hash: str | None = None
98 |
99 | @cache
100 | def _get_download_link(self) -> str:
101 | return WindowsConsumerDownloader.windows_consumer_download("11", self.lang)
102 |
103 | def check_integrity(self) -> bool:
104 | if not self.hash:
105 | return False
106 |
107 | return sha256_hash_check(
108 | self._get_complete_normalized_file_path(absolute=True),
109 | WindowsConsumerDownloader.windows_consumer_file_hash("11", self.lang),
110 | )
111 |
112 | @cache
113 | def _get_latest_version(self) -> list[str]:
114 | header: Tag | None = self.soup_download_page.find("div", class_="row").find("div").find("p", string=lambda text: "Version" in text) # type: ignore
115 | if not header:
116 | raise VersionNotFoundError(
117 | "Could not find header containing version information"
118 | )
119 |
120 | return [
121 | version_number.strip()
122 | for version_number in header.getText()
123 | .split("Version")[1]
124 | .replace(")", "")
125 | .split("H")
126 | ]
127 |
--------------------------------------------------------------------------------
/modules/updaters/Proxmox.py:
--------------------------------------------------------------------------------
1 | from functools import cache
2 | from pathlib import Path
3 |
4 | import requests
5 | from bs4 import BeautifulSoup, Tag
6 |
7 | from modules.exceptions import VersionNotFoundError
8 | from modules.updaters.GenericUpdater import GenericUpdater
9 | from modules.utils import parse_hash, sha256_hash_check
10 |
11 | DOMAIN = "https://enterprise.proxmox.com"
12 | DOWNLOAD_PAGE_URL = f"{DOMAIN}/iso"
13 | FILE_NAME = "proxmox-[[EDITION]]_[[VER]].iso"
14 |
15 |
16 | class Proxmox(GenericUpdater):
17 | """
18 | A class representing an updater for Proxmox.
19 |
20 | Attributes:
21 | valid_editions (list[str]): List of valid editions to use
22 | edition (str): Edition to download
23 | download_page (requests.Response): The HTTP response containing the download page HTML.
24 | soup_download_page (BeautifulSoup): The parsed HTML content of the download page.
25 |
26 | Note:
27 | This class inherits from the abstract base class GenericUpdater.
28 | """
29 |
30 | def __init__(self, folder_path: Path, edition: str) -> None:
31 | self.valid_editions = [
32 | "ve",
33 | "mail-gateway",
34 | "backup-server",
35 | ]
36 | self.edition = edition
37 |
38 | file_path = folder_path / FILE_NAME
39 | super().__init__(file_path)
40 |
41 | # Make the parameter case insensitive, and find back the correct case using valid_editions
42 | self.edition = next(
43 | valid_ed
44 | for valid_ed in self.valid_editions
45 | if valid_ed.lower() == self.edition.lower()
46 | )
47 |
48 | self.download_page = requests.get(DOWNLOAD_PAGE_URL)
49 |
50 | if self.download_page.status_code != 200:
51 | raise ConnectionError(
52 | f"Failed to fetch the download page from '{self.download_page.url}'"
53 | )
54 |
55 | self.soup_download_page = BeautifulSoup(
56 | self.download_page.content, features="html.parser"
57 | )
58 |
59 | @cache
60 | def _get_download_link(self) -> str:
61 | latest_version_str = self._version_to_str(self._get_latest_version())
62 |
63 | return f"{DOWNLOAD_PAGE_URL}/{FILE_NAME.replace('[[VER]]', latest_version_str).replace("[[EDITION]]", self.edition)}"
64 |
65 | def check_integrity(self) -> bool:
66 | sha256_url = f"{DOWNLOAD_PAGE_URL}/SHA256SUMS"
67 |
68 | sha256_sums = requests.get(sha256_url).text
69 |
70 | sha256_sum = parse_hash(
71 | sha256_sums,
72 | [str(self._get_complete_normalized_file_path(absolute=False))],
73 | 0,
74 | )
75 |
76 | return sha256_hash_check(
77 | self._get_complete_normalized_file_path(absolute=True),
78 | sha256_sum,
79 | )
80 |
81 | def _get_latest_version(self) -> list[str]:
82 | def parse_version(href: str) -> list[str]:
83 | return self._str_to_version(href.split("_")[1].split(".iso")[0])
84 |
85 | downloads_list: Tag | None = self.soup_download_page.find("pre") # type: ignore
86 | if not downloads_list:
87 | raise VersionNotFoundError("We were not able to parse the download page")
88 |
89 | download_items = downloads_list.find_all("a")
90 | if not download_items:
91 | raise VersionNotFoundError(
92 | "We were not able to parse the list of download links"
93 | )
94 |
95 | download_links: list[str] = [
96 | href
97 | for download_link in download_items
98 | if self.edition in (href := download_link.get("href"))
99 | ]
100 | if not download_links:
101 | raise VersionNotFoundError(
102 | "We were not able to find links for this edition"
103 | )
104 |
105 | latest_version = []
106 | for link in download_links:
107 | version = parse_version(link)
108 | is_greater_version = self._compare_version_numbers(latest_version, version)
109 | if is_greater_version:
110 | latest_version = version
111 |
112 | return latest_version
113 |
114 | def _version_to_str(self, version: list[str]) -> str:
115 | dash_something: str = version.pop()
116 | return f"{self.version_splitter.join(str(i) for i in version)}-{dash_something}"
117 |
118 | def _str_to_version(self, version_str: str) -> list[str]:
119 | version: list[str] = [
120 | version_number.strip()
121 | for version_number in version_str.split(self.version_splitter)
122 | ]
123 | dash_something: list[str] = version.pop().split("-")
124 | return version + dash_something
125 |
--------------------------------------------------------------------------------
/modules/updaters/Windows10.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from functools import cache
3 | from pathlib import Path
4 |
5 | import requests
6 | from bs4 import BeautifulSoup, Tag
7 |
8 | from modules.exceptions import VersionNotFoundError
9 | from modules.updaters.GenericUpdater import GenericUpdater
10 | from modules.utils import sha256_hash_check
11 | from modules.WindowsConsumerDownload import WindowsConsumerDownloader
12 |
13 | DOMAIN = "https://www.microsoft.com"
14 | DOWNLOAD_PAGE_URL = f"{DOMAIN}/en-us/software-download/windows10ISO"
15 | FILE_NAME = "Win10_[[VER]]_[[LANG]]_x64v1.iso"
16 |
17 |
18 | class Windows10(GenericUpdater):
19 | """
20 | A class representing an updater for Windows 10.
21 |
22 | Attributes:
23 | download_page (requests.Response): The HTTP response containing the download page HTML.
24 | soup_download_page (BeautifulSoup): The parsed HTML content of the download page.
25 |
26 | Note:
27 | This class inherits from the abstract base class GenericUpdater.
28 | """
29 |
30 | def __init__(self, folder_path: Path, lang: str) -> None:
31 | self.valid_langs = [
32 | "Arabic",
33 | "Brazilian Portuguese",
34 | "Bulgarian",
35 | "Chinese",
36 | "Chinese",
37 | "Croatian",
38 | "Czech",
39 | "Danish",
40 | "Dutch",
41 | "English",
42 | "English International",
43 | "Estonian",
44 | "Finnish",
45 | "French",
46 | "French Canadian",
47 | "German",
48 | "Greek",
49 | "Hebrew",
50 | "Hungarian",
51 | "Italian",
52 | "Japanese",
53 | "Korean",
54 | "Latvian",
55 | "Lithuanian",
56 | "Norwegian",
57 | "Polish",
58 | "Portuguese",
59 | "Romanian",
60 | "Russian",
61 | "Serbian Latin",
62 | "Slovak",
63 | "Slovenian",
64 | "Spanish",
65 | "Spanish (Mexico)",
66 | "Swedish",
67 | "Thai",
68 | "Turkish",
69 | "Ukrainian",
70 | ]
71 | self.lang = lang
72 | file_path = folder_path / FILE_NAME
73 | super().__init__(file_path)
74 | # Make the parameter case insensitive, and find back the correct case using valid_editions
75 | self.lang = next(
76 | valid_lang
77 | for valid_lang in self.valid_langs
78 | if valid_lang.lower() == self.lang.lower()
79 | )
80 |
81 | self.version_splitter = "H"
82 | self.headers = {
83 | "User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:100.0) Gecko/20100101 Firefox/100.0",
84 | "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
85 | "referer": "folfy.blue",
86 | }
87 |
88 | self.download_page = requests.get(DOWNLOAD_PAGE_URL, headers=self.headers)
89 |
90 | if self.download_page.status_code != 200:
91 | raise ConnectionError(
92 | f"Failed to fetch the download page from '{DOWNLOAD_PAGE_URL}'"
93 | )
94 |
95 | self.soup_download_page = BeautifulSoup(
96 | self.download_page.content, features="html.parser"
97 | )
98 |
99 | self.hash: str | None = None
100 |
101 | @cache
102 | def _get_download_link(self) -> str:
103 | return WindowsConsumerDownloader.windows_consumer_download("10", self.lang)
104 |
105 | def check_integrity(self) -> bool:
106 | logging.warning(
107 | "The integrity check for Windows 10 is currently disabled due to a bug on Microsoft's end."
108 | )
109 | return True or sha256_hash_check(
110 | self._get_complete_normalized_file_path(absolute=True),
111 | WindowsConsumerDownloader.windows_consumer_file_hash("10", self.lang),
112 | )
113 |
114 | @cache
115 | def _get_latest_version(self) -> list[str]:
116 | software_download_tag: Tag | None = self.soup_download_page.find("div", attrs={"id": "SoftwareDownload_EditionSelection"}) # type: ignore
117 | if not software_download_tag:
118 | raise VersionNotFoundError(
119 | "Could not find the software download section containing version information"
120 | )
121 |
122 | update_header = software_download_tag.find("h2")
123 |
124 | if not update_header:
125 | raise VersionNotFoundError(
126 | "Could not find header containing version information"
127 | )
128 |
129 | return [
130 | version_number.strip()
131 | for version_number in update_header.getText().split("Version")[1].split("H")
132 | ]
133 |
--------------------------------------------------------------------------------
/modules/updaters/HirensBootCDPE.py:
--------------------------------------------------------------------------------
1 | from functools import cache
2 | from pathlib import Path
3 |
4 | import requests
5 | from bs4 import BeautifulSoup
6 | from bs4.element import Tag
7 |
8 | from modules.exceptions import DownloadLinkNotFoundError, VersionNotFoundError
9 | from modules.updaters.GenericUpdater import GenericUpdater
10 | from modules.utils import sha256_hash_check
11 |
12 | DOMAIN = "https://www.hirensbootcd.org/"
13 | DOWNLOAD_PAGE_URL = f"{DOMAIN}/download"
14 | FILE_NAME = "HBCD_PE_[[VER]]_x64.iso"
15 |
16 |
17 | class HirensBootCDPE(GenericUpdater):
18 | """
19 | A class representing an updater for Hiren's Boot CD PE.
20 |
21 | Attributes:
22 | download_page (requests.Response): The HTTP response containing the download page HTML.
23 | soup_download_page (BeautifulSoup): The parsed HTML content of the download page.
24 |
25 | Note:
26 | This class inherits from the abstract base class GenericUpdater.
27 | """
28 |
29 | def __init__(self, folder_path: Path) -> None:
30 | file_path = folder_path / FILE_NAME
31 | super().__init__(file_path)
32 |
33 | self.download_page = requests.get(DOWNLOAD_PAGE_URL)
34 |
35 | if self.download_page.status_code != 200:
36 | raise ConnectionError(
37 | f"Failed to fetch the download page from '{DOWNLOAD_PAGE_URL}'"
38 | )
39 |
40 | self.soup_download_page = BeautifulSoup(
41 | self.download_page.content, features="html.parser"
42 | )
43 |
44 | @cache
45 | def _get_download_link(self) -> str:
46 | download_tag: Tag | None = self._find_in_table("Filename")
47 |
48 | if not download_tag:
49 | raise DownloadLinkNotFoundError(
50 | "Failed to find the `Tag` containing the download link"
51 | )
52 |
53 | href_attributes = download_tag.find_all(href=True)
54 | if not href_attributes:
55 | raise DownloadLinkNotFoundError("No download link found in the `Tag`")
56 |
57 | return href_attributes[0].get("href")
58 |
59 | def check_integrity(self) -> bool:
60 | """
61 | Check the integrity of the downloaded file by verifying its SHA-256 hash against the one provided on the website.
62 |
63 | Returns:
64 | bool: True if the integrity check passes, False otherwise.
65 |
66 | Raises:
67 | LookupError: If the SHA-256 hash or its container Tag is not found in the download page.
68 | """
69 | sha256_tag: Tag | None = self._find_in_table("SHA-256")
70 |
71 | if not sha256_tag:
72 | raise LookupError("Failed to find the `Tag` containing the SHA-256 value")
73 |
74 | return sha256_hash_check(
75 | self._get_complete_normalized_file_path(absolute=True), sha256_tag.getText()
76 | )
77 |
78 | @cache
79 | def _get_latest_version(self) -> list[str]:
80 | s: Tag | None = self.soup_download_page.find(
81 | "div", attrs={"class": "post-content"}
82 | ) # type: ignore
83 | if not s:
84 | raise VersionNotFoundError(
85 | "Could not find the div containing version information"
86 | )
87 |
88 | s = s.find("span") # type: ignore
89 | if not s:
90 | raise VersionNotFoundError(
91 | "Could not find the span containing the version information"
92 | )
93 |
94 | return self._str_to_version(
95 | s.getText()
96 | .split("(v")[1] # Parse from Hiren’s BootCD PE x64 (v1.0.2) – ISO Content
97 | .split(")")[0]
98 | )
99 |
100 | def _find_in_table(self, row_name_contains: str) -> Tag | None:
101 | """
102 | Find the HTML Tag containing specific information in the download page table.
103 |
104 | Args:
105 | row_name_contains (str): A string that identifies the row in the table.
106 |
107 | Returns:
108 | Tag | None: The HTML Tag containing the desired information, or None if not found.
109 |
110 | Raises:
111 | LookupError: If the table or the specified row_name_contains is not found in the download page.
112 | """
113 | s: Tag | None = self.soup_download_page.find("div", attrs={"class": "table-1"}) # type: ignore
114 |
115 | if not s:
116 | raise LookupError(
117 | "Could not find the table containing download information"
118 | )
119 |
120 | next_is_result = False
121 | for tr in s.find_all("tr"):
122 | for td in tr.find_all("td"):
123 | if next_is_result:
124 | return td
125 | if row_name_contains in td.getText():
126 | next_is_result = True
127 |
128 | raise LookupError(f"Failed to find '{row_name_contains}' in the table")
129 |
--------------------------------------------------------------------------------
/modules/updaters/Fedora.py:
--------------------------------------------------------------------------------
1 | from functools import cache
2 | from pathlib import Path
3 |
4 | import requests
5 | from bs4 import BeautifulSoup, Tag
6 |
7 | from modules.exceptions import VersionNotFoundError
8 | from modules.updaters.GenericUpdater import GenericUpdater
9 | from modules.utils import parse_hash, sha256_hash_check
10 |
11 | DOMAIN = "https://fedoraproject.org"
12 | DOWNLOAD_PAGE_URL = f"{DOMAIN}/spins/[[EDITION]]/download/"
13 | FILE_NAME = "Fedora-[[EDITION]]-Live-x86_64-[[VER]].iso"
14 |
15 |
16 | class Fedora(GenericUpdater):
17 | """
18 | A class representing an updater for Fedora.
19 |
20 | Attributes:
21 | valid_editions (list[str]): List of valid editions to use
22 | edition (str): Edition to download
23 | download_page (requests.Response): The HTTP response containing the download page HTML.
24 | soup_download_page (BeautifulSoup): The parsed HTML content of the download page.
25 |
26 | Note:
27 | This class inherits from the abstract base class GenericUpdater.
28 | """
29 |
30 | def __init__(self, folder_path: Path, edition: str) -> None:
31 | self.valid_editions = [
32 | "Budgie",
33 | "Cinnamon",
34 | "KDE",
35 | "LXDE",
36 | "MATE_Compiz",
37 | "SoaS",
38 | "Sway",
39 | "Xfce",
40 | "i3",
41 | ]
42 | self.edition = edition
43 |
44 | file_path = folder_path / FILE_NAME
45 | super().__init__(file_path)
46 |
47 | # Make the parameter case insensitive, and find back the correct case using valid_editions
48 | self.edition = next(
49 | valid_ed
50 | for valid_ed in self.valid_editions
51 | if valid_ed.lower() == self.edition.lower()
52 | )
53 |
54 | # Weird exception they have
55 | url_edition = self.edition.lower() if self.edition != "MATE_Compiz" else "mate"
56 |
57 | self.download_page = requests.get(
58 | DOWNLOAD_PAGE_URL.replace("[[EDITION]]", url_edition)
59 | )
60 |
61 | if self.download_page.status_code != 200:
62 | raise ConnectionError(
63 | f"Failed to fetch the download page from '{self.download_page.url}'"
64 | )
65 |
66 | self.soup_download_page = BeautifulSoup(
67 | self.download_page.content, features="html.parser"
68 | )
69 |
70 | @cache
71 | def _get_download_link(self) -> str:
72 | latest_version = self._get_latest_version()
73 | return f"https://download.fedoraproject.org/pub/fedora/linux/releases/{latest_version[0]}/Spins/x86_64/iso/Fedora-{self.edition}-Live-x86_64-{latest_version[0]}-{latest_version[1]}{'.'+latest_version[2] if len(latest_version)>2 else ''}.iso"
74 |
75 | def check_integrity(self) -> bool:
76 | latest_version = self._get_latest_version()
77 | sha256_url = f"https://download.fedoraproject.org/pub/fedora/linux/releases/{latest_version[0]}/Spins/x86_64/iso/Fedora-Spins-{latest_version[0]}-{latest_version[1]}{'.'+latest_version[2] if len(latest_version)>2 else ''}-x86_64-CHECKSUM"
78 |
79 | sha256_sums = requests.get(sha256_url).text
80 |
81 | sha256_sum = parse_hash(sha256_sums, [f"SHA256 (Fedora-{self.edition}"], -1)
82 |
83 | return sha256_hash_check(
84 | self._get_complete_normalized_file_path(absolute=True),
85 | sha256_sum,
86 | )
87 |
88 | @cache
89 | def _get_latest_version(self) -> list[str]:
90 | downloads_list: Tag | None = self.soup_download_page.find(
91 | "div", attrs={"class": "spins-theme"}
92 | ) # type: ignore
93 | if not downloads_list:
94 | raise VersionNotFoundError(
95 | "We were not able to parse the download categories"
96 | )
97 |
98 | download_items = downloads_list.find_all(
99 | "div", attrs={"class": "fp-download-item blue"}
100 | )
101 | if not download_items:
102 | raise VersionNotFoundError(
103 | "We were not able to parse the list of downloads"
104 | )
105 |
106 | downloads = next(
107 | download
108 | for download in download_items
109 | if download.find("span", string="Live ISO")
110 | )
111 | if not downloads:
112 | raise VersionNotFoundError(
113 | "We were not able to parse the Live ISO download item"
114 | )
115 | download_a_tag = downloads.find("a", href=True)
116 | if not download_a_tag:
117 | raise VersionNotFoundError("We were not able to find the LTS download link")
118 |
119 | return self._str_to_version(
120 | download_a_tag.get("href")
121 | .split("x86_64-")[1]
122 | .split(".iso")[0]
123 | .replace("-", self.version_splitter)
124 | )
125 |
--------------------------------------------------------------------------------
/modules/updaters/SystemRescue.py:
--------------------------------------------------------------------------------
1 | import re
2 | from functools import cache
3 | from pathlib import Path
4 |
5 | import requests
6 | from bs4 import BeautifulSoup
7 | from bs4.element import Tag
8 |
9 | from modules.exceptions import DownloadLinkNotFoundError, VersionNotFoundError
10 | from modules.updaters.GenericUpdater import GenericUpdater
11 | from modules.utils import parse_hash, sha256_hash_check
12 |
13 | DOMAIN = "https://www.system-rescue.org"
14 | DOWNLOAD_PAGE_URL = f"{DOMAIN}/Download"
15 | FILE_NAME = "systemrescue-[[VER]]-amd64.iso"
16 |
17 |
18 | class SystemRescue(GenericUpdater):
19 | """
20 | A class representing an updater for SystemRescue.
21 |
22 | Attributes:
23 | download_page (requests.Response): The HTTP response containing the download page HTML.
24 | soup_download_page (BeautifulSoup): The parsed HTML content of the download page.
25 |
26 | Note:
27 | This class inherits from the abstract base class GenericUpdater.
28 | """
29 |
30 | def __init__(self, folder_path: Path) -> None:
31 | """
32 | Initialize a SystemRescue updater object.
33 |
34 | Args:
35 | folder_path (Path): The path to the folder where the SystemRescue file is stored.
36 |
37 | Raises:
38 | ConnectionError: If the download page cannot be fetched successfully.
39 | """
40 | file_path = folder_path / FILE_NAME
41 | super().__init__(file_path)
42 |
43 | self.download_page = requests.get(DOWNLOAD_PAGE_URL)
44 |
45 | if self.download_page.status_code != 200:
46 | raise ConnectionError(
47 | f"Failed to fetch the download page from '{DOWNLOAD_PAGE_URL}'"
48 | )
49 |
50 | self.soup_download_page = BeautifulSoup(
51 | self.download_page.content, features="html.parser"
52 | )
53 |
54 | @cache
55 | def _get_download_link(self) -> str:
56 | download_tag: Tag | None = self._find_in_table("Fastly")
57 |
58 | if not download_tag:
59 | raise DownloadLinkNotFoundError(
60 | "Failed to find the `Tag` containing the download link"
61 | )
62 |
63 | href_attributes = download_tag.find_all(href=True)
64 | if not href_attributes:
65 | raise DownloadLinkNotFoundError("No download link found in the `Tag`")
66 |
67 | return href_attributes[0].get("href")
68 |
69 | def check_integrity(self) -> bool:
70 | version_str = self._version_to_str(self._get_latest_version())
71 | sha256_download_link = f"{DOMAIN}/releases/{version_str}/systemrescue-{version_str}-amd64.iso.sha256"
72 |
73 | r = requests.get(sha256_download_link)
74 | sha256_checksum = parse_hash(
75 | r.text,
76 | [str(self._get_normalized_file_path(False, self._get_latest_version()))],
77 | 0,
78 | )
79 |
80 | return sha256_hash_check(
81 | self._get_complete_normalized_file_path(absolute=True),
82 | sha256_checksum,
83 | )
84 |
85 | @cache
86 | def _get_latest_version(self) -> list[str]:
87 | download_link = self._get_download_link()
88 |
89 | latest_version_regex = re.search(
90 | r"releases\/(.+)\/", # Parse from https://fastly-cdn.system-rescue.org/releases/10.01/systemrescue-10.01-amd64.iso
91 | download_link,
92 | )
93 |
94 | if latest_version_regex:
95 | return self._str_to_version(latest_version_regex.group(1))
96 |
97 | raise VersionNotFoundError("Could not find the latest available version")
98 |
99 | def _find_in_table(self, row_name_contains: str) -> Tag | None:
100 | """
101 | Find the HTML Tag containing specific information in the download page table.
102 |
103 | Args:
104 | row_name_contains (str): A string that identifies the row in the table.
105 |
106 | Returns:
107 | Tag | None: The HTML Tag containing the desired information, or None if not found.
108 |
109 | Raises:
110 | LookupError: If the table or the specified row_name_contains is not found in the download page.
111 | """
112 | s: Tag | None = self.soup_download_page.find("div", attrs={"id": "colcenter"}) # type: ignore
113 |
114 | if not s:
115 | raise LookupError(
116 | "Could not find the div containing the table with download information"
117 | )
118 |
119 | s = s.find("table") # type: ignore
120 |
121 | if not s:
122 | raise LookupError(
123 | "Could not find the table containing download information"
124 | )
125 |
126 | for tr in s.find_all("tr"):
127 | for td in tr.find_all("td"):
128 | if row_name_contains in td.getText():
129 | return td
130 |
131 | raise LookupError(f"Failed to find '{row_name_contains}' in the table")
132 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Super ISO Updater
2 |
3 | Super ISO Updater is a powerful tool that provides a convenient way to check for updates and install the latest versions of various ISO files. It is specifically designed to work with a Ventoy drive and supports a wide range of ISOs.
4 |
5 | ## Getting Started
6 |
7 | ### Prerequisites
8 |
9 | - Python 3.12 installed on your system.
10 |
11 | ### Installation
12 |
13 | #### Using pip
14 |
15 | 1. Open your terminal or command prompt.
16 | 2. Install the package using the following command:
17 |
18 | ```sh
19 | python -m pip install sisou
20 | ```
21 |
22 | #### Using git
23 |
24 | 1. Clone this repository locally by running
25 |
26 | ```sh
27 | git clone https://github.com/JoshuaVandaele/SuperISOUpdater
28 | ```
29 |
30 | 2. Navigate into the newly created directory by running
31 |
32 | ```sh
33 | cd SuperISOUpdater
34 | ```
35 |
36 | 3. Install the module using the following command:
37 |
38 | ```sh
39 | python -m pip install .
40 | ```
41 |
42 | ### Updating
43 |
44 | To update the package to the latest version, run the following command:
45 |
46 | ```sh
47 | python -m pip install --upgrade sisou
48 | ```
49 |
50 | ## Usage
51 |
52 | To use SISOU, follow these steps:
53 |
54 | ### Running the script
55 |
56 | ```sh
57 | sisou
58 | ```
59 |
60 | #### Example on Windows
61 |
62 | ```sh
63 | sisou E:
64 | ```
65 |
66 | #### Example on Linux
67 |
68 | ```sh
69 | sisou /run/media/joshua/Ventoy/
70 | ```
71 |
72 | ### Logging
73 |
74 | The script generates logs during its execution. You can control the log level using the `-l` or `--log-level` argument when running the script. The available log levels are: DEBUG, INFO, WARNING, ERROR, and CRITICAL. By default, the log level is set to INFO.
75 |
76 | To set a specific log level, use the `-l` option followed by the desired log level:
77 |
78 | ```sh
79 | sisou -l DEBUG
80 | ```
81 |
82 | You can also specify a log file using the `-f` or `--log-file` argument to save the logs to a file instead of displaying them in the console:
83 |
84 | ```sh
85 | sisou -f /path/to/log_file.log
86 | ```
87 |
88 | ## Customization
89 |
90 | The `sisou.py` script uses a configuration file (`config.toml`) to define the ISOs to be updated. You can customize this configuration file to add or remove ISOs from the update process.
91 |
92 | To customize the ISOs, open the `config.toml` file and edit the relevant sections. Each ISO is associated with an updater class (e.g., `Ubuntu`, `MemTest86Plus`, etc.). You can enable or disable ISOs by modifying the corresponding values in the configuration file.
93 |
94 | _NOTE: Be cautious when modifying the configuration file, as incorrect changes may cause the script to malfunction._
95 |
96 | By default, the script uses the `config.toml` file located in the same directory as the Ventoy drive.
97 |
98 | You can specify a custom configuration file using the `-c` or `--config-file` argument when running the script:
99 |
100 | ```sh
101 | sisou -c /path/to/config.toml
102 | ```
103 |
104 | ## Supported ISOs
105 |
106 | The tool currently supports the following ISOs:
107 |
108 | - **Diagnostic Tools**
109 | - Hiren's BootCD PE
110 | - MemTest86 Plus
111 | - SystemRescue
112 | - UltimateBootCD
113 | - Rescuezilla (editions: "bionic", "focal", "jammy", "noble")
114 | - **Boot Repair**
115 | - Super Grub 2
116 | - **Disk Utilities**
117 | - Clonezilla
118 | - GParted Live
119 | - ShredOS
120 | - HDAT2 (editions: "full", "lite", "diskette")
121 | - **Operating Systems**
122 | - **Linux**
123 | - Arch Linux
124 | - Debian (editions: "standard", "cinnamon", "kde", "gnome", "lxde", "lxqt", "mate", "xfce")
125 | - Ubuntu (editions: "LTS", "interim")
126 | - Fedora (editions: "Budgie", "Cinnamon", "KDE", "LXDE", "MATE_Compiz", "SoaS", "Sway", "Xfce", "i3")
127 | - Kali Linux (editions: "installer", "installer-netinst", "installer-purple", "live")
128 | - Linux Mint (editions: "cinnamon", "mate", "xfce")
129 | - Manjaro (editions: "plasma", "xfce", "gnome", "cinnamon", "i3")
130 | - OpenSUSE (editions: "leap", "leap-micro", "jump")
131 | - OpenSUSE Rolling (editions: "MicroOS-DVD", "Tumbleweed-DVD", "Tumbleweed-NET", "Tumbleweed-GNOME-Live", "Tumbleweed-KDE-Live", "Tumbleweed-XFCE-Live", "Tumbleweed-Rescue-CD")
132 | - OPNsense (editions: "dvd", "nano", "serial", "vga")
133 | - Proxmox (editions: "ve", "mail-gateway", "backup-server")
134 | - Rocky Linux (editions: "dvd", "boot", "minimal")
135 | - Tails
136 | - ChromeOS (editions: "ltc", "ltr", "stable")
137 | - **Windows**
138 | - Windows 11 (Multi-edition ISO, Any language)
139 | - Windows 10 (Multi-edition ISO, Any language)
140 | - **BSD**
141 | - TrueNAS (editions: "scale", "core")
142 | - **Other**
143 | - FreeDOS (editions: "BonusCD", "FloppyEdition", "FullUSB", "LegacyCD", "LiteUSB", "LiveCD")
144 | - TempleOS (editions: "Distro", "Lite")
145 |
146 | ## Contribute
147 |
148 | If you have any suggestions, bug reports, or feature requests, feel free to open an issue or submit a pull request. Your contributions are highly appreciated!
149 |
150 | ## License
151 |
152 | This project is licensed under the [GPLv3 License](./LICENSE).
153 |
154 | ---
155 |
156 | Thank you for using Super ISO Updater! If you encounter any issues or need assistance, please don't hesitate to reach out. Happy updating!
157 |
--------------------------------------------------------------------------------
/config/config.toml.default:
--------------------------------------------------------------------------------
1 | # Diagnostic Tools
2 |
3 | [DiagnosticTools]
4 | enabled = true
5 | directory = "Diagnostic_Tools"
6 |
7 | [DiagnosticTools.HirensBootCDPE]
8 | enabled = true
9 |
10 | [DiagnosticTools.MemTest86Plus]
11 | enabled = true
12 |
13 | [DiagnosticTools.SystemRescue]
14 | enabled = true
15 |
16 | [DiagnosticTools.UltimateBootCD]
17 | enabled = true
18 |
19 | [DiagnosticTools.Rescuezilla]
20 | enabled = true
21 | # Available editions:
22 | # "bionic", "focal", "jammy", "noble"
23 | editions = ["noble"]
24 |
25 | # Boot Repair
26 |
27 | [BootRepair]
28 | enabled = true
29 | directory = "Boot_Repair"
30 |
31 | [BootRepair.SuperGrub2]
32 | enabled = true
33 |
34 | # Disk Utilities
35 |
36 | [DiskUtils]
37 | enabled = true
38 | directory = "Disk_Utilities"
39 |
40 | [DiskUtils.Clonezilla]
41 | enabled = true
42 |
43 | [DiskUtils.GPartedLive]
44 | enabled = true
45 |
46 | [DiskUtils.ShredOS]
47 | enabled = true
48 |
49 | [DiskUtils.HDAT2]
50 | enabled = true
51 | # Available editions:
52 | # "full", "lite", "diskette"
53 | editions = ["full"]
54 |
55 | # Operating Systems
56 |
57 | [OperatingSystems]
58 | enabled = true
59 | directory = "Operating_Systems"
60 |
61 | # Linux
62 | [OperatingSystems.Linux]
63 | enabled = true
64 | directory = "Linux"
65 |
66 | [OperatingSystems.Linux.ArchLinux]
67 | enabled = true
68 |
69 | [OperatingSystems.Linux.Debian]
70 | enabled = true
71 | # Available editions:
72 | # "standard", "cinnamon", "kde", "gnome", "lxde", "lxqt", "mate", "xfce"
73 | editions = ["kde"]
74 |
75 | [OperatingSystems.Linux.Ubuntu]
76 | enabled = true
77 | # Available editions:
78 | # "LTS", "interim"
79 | editions = ["LTS"]
80 |
81 | [OperatingSystems.Linux.Fedora]
82 | enabled = true
83 | # Available editions:
84 | # "Budgie", "Cinnamon", "KDE", "LXDE", "MATE_Compiz", "SoaS", "Sway", "Xfce", "i3"
85 | editions = ["KDE"]
86 |
87 | [OperatingSystems.Linux.KaliLinux]
88 | enabled = true
89 | # Available editions:
90 | # "installer", "installer-netinst", "installer-purple", "live"
91 | editions = ["installer", "live"]
92 |
93 | [OperatingSystems.Linux.LinuxMint]
94 | enabled = true
95 | # Available editions:
96 | # "cinnamon", "mate", "xfce"
97 | editions = ["cinnamon"]
98 |
99 | [OperatingSystems.Linux.Manjaro]
100 | enabled = true
101 | # Available editions:
102 | # "plasma", "xfce", "gnome", "cinnamon", "i3"
103 | editions = ["plasma"]
104 |
105 | [OperatingSystems.Linux.OpenSUSE]
106 | enabled = true
107 | # Available editions:
108 | # "leap", "leap-micro", "jump"
109 | editions = ["leap"]
110 |
111 | [OperatingSystems.Linux.OpenSUSERolling]
112 | enabled = true
113 | # Available editions:
114 | # "MicroOS-DVD", "Tumbleweed-DVD", "Tumbleweed-NET", "Tumbleweed-GNOME-Live", "Tumbleweed-KDE-Live", "Tumbleweed-XFCE-Live", "Tumbleweed-Rescue-CD"
115 | editions = ["Tumbleweed-NET", "Tumbleweed-DVD"]
116 |
117 | [OperatingSystems.Linux.Proxmox]
118 | enabled = true
119 | # Available editions:
120 | # "ve", "mail-gateway", "backup-server"
121 | editions = ["ve"]
122 |
123 | [OperatingSystems.Linux.RockyLinux]
124 | enabled = true
125 | # Available editions:
126 | # "dvd", "boot", "minimal"
127 | editions = ["dvd"]
128 |
129 | [OperatingSystems.Linux.Tails]
130 | enabled = true
131 |
132 | [OperatingSystems.Linux.ChromeOS]
133 | enabled = true
134 | # Available editions:
135 | # "ltc", "ltr", "stable"
136 | editions = ["stable"]
137 |
138 | # Windows
139 | [OperatingSystems.Windows]
140 | enabled = true
141 | directory = "Windows"
142 |
143 | [OperatingSystems.Windows.Windows11]
144 | enabled = true
145 | # Available languages:
146 | # "Arabic", "Brazilian Portuguese", "Bulgarian", "Chinese", "Chinese", "Croatian", "Czech", "Danish", "Dutch", "English", "English International", "Estonian", "Finnish", "French", "French Canadian", "German", "Greek", "Hebrew", "Hungarian", "Italian", "Japanese", "Korean", "Latvian", "Lithuanian", "Norwegian", "Polish", "Portuguese", "Romanian", "Russian", "Serbian Latin", "Slovak", "Slovenian", "Spanish", "Spanish (Mexico)", "Swedish", "Thai", "Turkish", "Ukrainian"
147 | langs = ["English International"]
148 |
149 | [OperatingSystems.Windows.Windows10]
150 | enabled = true
151 | # Available languages:
152 | # "Arabic", "Brazilian Portuguese", "Bulgarian", "Chinese", "Chinese", "Croatian", "Czech", "Danish", "Dutch", "English", "English International", "Estonian", "Finnish", "French", "French Canadian", "German", "Greek", "Hebrew", "Hungarian", "Italian", "Japanese", "Korean", "Latvian", "Lithuanian", "Norwegian", "Polish", "Portuguese", "Romanian", "Russian", "Serbian Latin", "Slovak", "Slovenian", "Spanish", "Spanish (Mexico)", "Swedish", "Thai", "Turkish", "Ukrainian"
153 | langs = ["English International"]
154 |
155 | [OperatingSystems.BSD]
156 | enabled = true
157 | directory = "BSD"
158 |
159 | [OperatingSystems.BSD.TrueNAS]
160 | enabled = true
161 | # Available editions:
162 | # Scale, CORE
163 | editions = ["core"]
164 |
165 | # Other
166 | [OperatingSystems.Other]
167 | enabled = true
168 | directory = "Other"
169 |
170 | [OperatingSystems.Other.OPNsense]
171 | enabled = true
172 | # Available editions:
173 | # "dvd", "nano", "serial", "vga"
174 | editions = ["vga"]
175 |
176 | [OperatingSystems.Other.FreeDOS]
177 | enabled = true
178 | # Available editions:
179 | # "BonusCD", "FloppyEdition", "FullUSB", "LegacyCD", "LiteUSB", "LiveCD"
180 | editions = ["LiveCD"]
181 |
182 | [OperatingSystems.Other.TempleOS]
183 | enabled = true
184 | # Available editions:
185 | # "Distro", "Lite"
186 | editions = ["Distro"]
187 |
--------------------------------------------------------------------------------
/modules/updaters/MemTest86Plus.py:
--------------------------------------------------------------------------------
1 | import zipfile
2 | from functools import cache
3 | from pathlib import Path
4 |
5 | import requests
6 | from bs4 import BeautifulSoup
7 | from bs4.element import Tag
8 |
9 | from modules.exceptions import (
10 | DownloadLinkNotFoundError,
11 | IntegrityCheckError,
12 | VersionNotFoundError,
13 | )
14 | from modules.updaters.GenericUpdater import GenericUpdater
15 | from modules.utils import download_file, parse_hash, sha256_hash_check
16 |
17 | DOMAIN = "https://www.memtest.org"
18 | DOWNLOAD_PAGE_URL = f"{DOMAIN}"
19 | FILE_NAME = "Memtest86plus-[[VER]].iso"
20 |
21 |
22 | class MemTest86Plus(GenericUpdater):
23 | """
24 | A class representing an updater for MemTest86+.
25 |
26 | Attributes:
27 | download_page (requests.Response): The HTTP response containing the download page HTML.
28 | soup_download_page (BeautifulSoup): The parsed HTML content of the download page.
29 | soup_download_card (Tag): The specific HTML Tag containing the download information card.
30 |
31 | Note:
32 | This class inherits from the abstract base class GenericUpdater.
33 | """
34 |
35 | def __init__(self, folder_path: Path) -> None:
36 | """
37 | Initialize the MemTest86Plus updater.
38 |
39 | Args:
40 | folder_path (str): The path to the folder where the MemTest86+ ISO file is stored.
41 |
42 | Raises:
43 | ConnectionError: If the download page could not be fetched successfully.
44 | DownloadLinkNotFoundError: If the card containing download information is not found.
45 | """
46 | file_path = folder_path / FILE_NAME
47 | super().__init__(file_path)
48 |
49 | self.download_page = requests.get(DOWNLOAD_PAGE_URL)
50 |
51 | if self.download_page.status_code != 200:
52 | raise ConnectionError(
53 | f"Failed to fetch the download page from '{DOWNLOAD_PAGE_URL}'"
54 | )
55 |
56 | self.soup_download_page = BeautifulSoup(
57 | self.download_page.content, features="html.parser"
58 | )
59 | self.soup_download_card: Tag = self.soup_download_page.find(
60 | "div", attrs={"class": "col-xxl-4"}
61 | ) # type: ignore
62 |
63 | if not self.soup_download_card:
64 | raise DownloadLinkNotFoundError(
65 | "Could not find the card containing download information"
66 | )
67 |
68 | @cache
69 | def _get_download_link(self) -> str:
70 | download_element: Tag | None = self.soup_download_card.find("a", string="Linux ISO (64 bits)") # type: ignore
71 | if not download_element:
72 | raise DownloadLinkNotFoundError("Could not find the download link")
73 | return f"{DOWNLOAD_PAGE_URL}{download_element.get('href')}"
74 |
75 | def check_integrity(self) -> bool:
76 | """
77 | Check the integrity of the downloaded file by verifying its SHA-256 hash against the one provided on the website.
78 |
79 | Returns:
80 | bool: True if the integrity check passes, False otherwise.
81 | """
82 | version_str = self._version_to_str(self._get_latest_version())
83 | sha_256_url = f"{DOWNLOAD_PAGE_URL}/download/v{version_str}/sha256sum.txt"
84 | sha_256_checksums_str: str = requests.get(sha_256_url).text
85 | sha_256_checksum: str = parse_hash(sha_256_checksums_str, ["64.iso"], 0)
86 |
87 | return sha256_hash_check(
88 | self._get_complete_normalized_file_path(absolute=True).with_suffix(".zip"),
89 | sha_256_checksum,
90 | )
91 |
92 | def install_latest_version(self) -> None:
93 | """
94 | Download and install the latest version of the software.
95 |
96 | Raises:
97 | ValueError: If no download link is available for the latest version.
98 | IntegrityCheckError: If the integrity check of the downloaded file fails.
99 | """
100 | download_link = self._get_download_link()
101 |
102 | new_file = self._get_complete_normalized_file_path(absolute=True)
103 |
104 | archive_path = new_file.with_suffix(".zip")
105 |
106 | download_file(download_link, archive_path)
107 |
108 | local_file = self._get_local_file()
109 |
110 | if not self.check_integrity():
111 | archive_path.unlink()
112 | raise IntegrityCheckError("Integrity check failed")
113 |
114 | with zipfile.ZipFile(archive_path) as z:
115 | file_list = z.namelist()
116 | iso = next(file for file in file_list if file.endswith(".iso"))
117 | extracted_file = Path(z.extract(iso, path=new_file.parent))
118 |
119 | if local_file:
120 | local_file.unlink()
121 | archive_path.unlink()
122 |
123 | try:
124 | extracted_file.rename(new_file)
125 | except FileExistsError:
126 | # On Windows, files are not overwritten by default, so we need to remove the old file first
127 | new_file.unlink()
128 | extracted_file.rename(new_file)
129 |
130 | @cache
131 | def _get_latest_version(self) -> list[str]:
132 | card_title: Tag | None = self.soup_download_card.find(
133 | "span", attrs={"class": "text-primary fs-2"}
134 | ) # type: ignore
135 |
136 | if not card_title:
137 | raise VersionNotFoundError("Could not find the latest version")
138 |
139 | return self._str_to_version(
140 | card_title.getText().split("v")[-1] # Parse from Memtest86+ v 0.0
141 | )
142 |
--------------------------------------------------------------------------------
/modules/updaters/HDAT2.py:
--------------------------------------------------------------------------------
1 | from functools import cache
2 | from pathlib import Path
3 | from urllib.parse import urljoin
4 |
5 | import requests
6 | from bs4 import BeautifulSoup
7 | from bs4.element import Tag
8 |
9 | from modules.updaters.GenericUpdater import GenericUpdater
10 | from modules.utils import md5_hash_check, parse_hash
11 |
12 | DOMAIN = "https://www.hdat2.com"
13 | DOWNLOAD_PAGE_URL = f"{DOMAIN}/download.html"
14 | FILE_NAME = "HDAT2_[[EDITION]]_[[VER]].[[EXT]]"
15 |
16 |
17 | class HDAT2(GenericUpdater):
18 | """
19 | A class representing an updater for HDAT2.
20 |
21 | Attributes:
22 | valid_editions (list[str]): List of valid editions to use
23 | edition (str): Edition to download
24 | download_page (requests.Response): The HTTP response containing the download page HTML.
25 | soup_download_page (BeautifulSoup): The parsed HTML content of the download page.
26 |
27 | Note:
28 | This class inherits from the abstract base class GenericUpdater.
29 | """
30 |
31 | def __init__(self, folder_path: Path, edition: str) -> None:
32 | self.valid_editions = ["full", "lite", "diskette"]
33 | self.edition = edition.lower()
34 |
35 | if self.edition == "diskette":
36 | extension = "IMG"
37 | else:
38 | extension = "ISO"
39 |
40 | self.file_name = FILE_NAME.replace("[[EXT]]", extension)
41 |
42 | file_path = folder_path / self.file_name
43 | super().__init__(file_path)
44 |
45 | self.download_page = requests.get(DOWNLOAD_PAGE_URL)
46 |
47 | if self.download_page.status_code != 200:
48 | raise ConnectionError(
49 | f"Failed to fetch the download page from '{DOWNLOAD_PAGE_URL}'"
50 | )
51 |
52 | self.soup_download_page = BeautifulSoup(
53 | self.download_page.content, features="html.parser"
54 | )
55 |
56 | @cache
57 | def _get_download_link(self) -> str:
58 | version_str = self._version_to_str(self._get_latest_version())
59 | match self.edition:
60 | case "full":
61 | soup = self._find_in_table([version_str], ["LITE", "IMG", "EXE"])
62 | case "lite":
63 | soup = self._find_in_table([version_str, "LITE"], ["IMG", "EXE"])
64 | case "diskette":
65 | soup = self._find_in_table([version_str, "HDAT2IMG"], ["ISO", "EXE"])
66 | case _:
67 | raise NotImplementedError(
68 | f"Edition {self.edition} is not implemented yet."
69 | )
70 |
71 | a_tag = soup.find("a", href=True)
72 |
73 | if not a_tag:
74 | raise LookupError("Could not find HTML tag containing download link")
75 |
76 | return urljoin(DOMAIN, a_tag["href"]) # type: ignore
77 |
78 | def check_integrity(self) -> bool:
79 | version_str = self._version_to_str(self._get_latest_version())
80 | match self.edition:
81 | case "full":
82 | soup = self._find_in_table([version_str], ["LITE"])
83 | case "lite":
84 | soup = self._find_in_table([version_str, "LITE"])
85 | case "diskette":
86 | soup = self._find_in_table([version_str, "HDAT2IMG"])
87 | case _:
88 | raise NotImplementedError(
89 | f"Edition {self.edition} is not implemented yet."
90 | )
91 | tag_with_hash = soup.find(lambda tag: "MD5=" in tag.text)
92 | if not tag_with_hash:
93 | raise LookupError("Could not find HTML tag containing MD5 hash")
94 |
95 | md5_sum = parse_hash(tag_with_hash.text, ["MD5=", version_str], -1).replace(
96 | "MD5=", ""
97 | )
98 |
99 | return md5_hash_check(
100 | self._get_complete_normalized_file_path(absolute=True),
101 | md5_sum,
102 | )
103 |
104 | @cache
105 | def _get_latest_version(self) -> list[str]:
106 | version_tag = self.soup_download_page.find("font", {"color": "blue"})
107 |
108 | if not version_tag:
109 | raise LookupError(
110 | "Could not find the HTML tag containing the version number"
111 | )
112 |
113 | version_text = version_tag.get_text(strip=True)
114 |
115 | return self._str_to_version(
116 | version_text.split()[2] # Get 'x.y' from 'Latest version x.y date'
117 | )
118 |
119 | def _find_in_table(
120 | self,
121 | row_name_contains: list[str],
122 | row_name_doesnt_contain: list[str] | None = None,
123 | ) -> Tag:
124 | """
125 | Find the HTML Tag containing specific information in the download page table.
126 |
127 | Args:
128 | row_name_contains (list[str]): Strings that identify the row in the table.
129 | row_name_doesnt_contain (list[str]): Strings that shouldn't be in the row
130 |
131 | Returns:
132 | Tag: The HTML Tag containing the desired information, or None if not found.
133 |
134 | Raises:
135 | LookupError: If the table or the specified row_name_contains is not found in the download page.
136 | """
137 | if not row_name_doesnt_contain:
138 | row_name_doesnt_contain = []
139 | s: Tag | None = self.soup_download_page.find("table", attrs={"bgcolor": "#B3B3B3"}) # type: ignore
140 |
141 | if not s:
142 | raise LookupError(
143 | "Could not find the table containing download information"
144 | )
145 |
146 | for tr in s.find_all("tr"):
147 | text = tr.getText()
148 | if any(string in text for string in row_name_doesnt_contain):
149 | continue
150 | if all(string in text for string in row_name_contains):
151 | return tr
152 |
153 | raise LookupError(f"Failed to find value in the table")
154 |
--------------------------------------------------------------------------------
/modules/updaters/SuperGrub2.py:
--------------------------------------------------------------------------------
1 | import zipfile
2 | from functools import cache
3 | from pathlib import Path
4 |
5 | import requests
6 | from bs4 import BeautifulSoup
7 | from bs4.element import Tag
8 |
9 | from modules.exceptions import (
10 | DownloadLinkNotFoundError,
11 | IntegrityCheckError,
12 | VersionNotFoundError,
13 | )
14 | from modules.updaters.GenericUpdater import GenericUpdater
15 | from modules.utils import download_file, parse_hash, sha256_hash_check
16 |
17 | DOMAIN = "https://www.supergrubdisk.org"
18 | DOWNLOAD_PAGE_URL = f"{DOMAIN}/category/download/supergrub2diskdownload/"
19 | FILE_NAME = "SuperGrub2-[[VER]].img"
20 |
21 |
22 | class SuperGrub2(GenericUpdater):
23 | """
24 | A class representing an updater for SuperGrub2.
25 |
26 | Attributes:
27 | download_page (requests.Response): The HTTP response containing the download page HTML.
28 | soup_download_page (BeautifulSoup): The parsed HTML content of the download page.
29 |
30 | Note:
31 | This class inherits from the abstract base class GenericUpdater.
32 | """
33 |
34 | def __init__(self, folder_path: Path) -> None:
35 | file_path = folder_path / FILE_NAME
36 | super().__init__(file_path)
37 |
38 | self.download_page = requests.get(DOWNLOAD_PAGE_URL)
39 |
40 | if self.download_page.status_code != 200:
41 | raise ConnectionError(
42 | f"Failed to fetch the download page from '{DOWNLOAD_PAGE_URL}'"
43 | )
44 |
45 | self.soup_download_page = BeautifulSoup(
46 | self.download_page.content, features="html.parser"
47 | )
48 |
49 | self.soup_latest_download_article: Tag = self.soup_download_page.find("article") # type: ignore
50 | if not self.soup_latest_download_article:
51 | raise DownloadLinkNotFoundError(
52 | "Could not find the article containing download information"
53 | )
54 |
55 | @cache
56 | def _get_download_link(self) -> str:
57 | download_tag = self._find_in_table("Download supergrub2")
58 |
59 | if not download_tag:
60 | raise DownloadLinkNotFoundError(
61 | "We were not able to find the link to the SourceForge in the table"
62 | )
63 |
64 | href_attributes = download_tag.find_all(href=True)
65 |
66 | if not href_attributes:
67 | raise DownloadLinkNotFoundError("No download link found in the `Tag`")
68 |
69 | sourceforge_url = href_attributes[0].get("href")
70 | return sourceforge_url
71 |
72 | def check_integrity(self, archive_to_check: Path) -> bool:
73 | sha256_sums_tag = self.soup_latest_download_article.find_all("pre")
74 | if not sha256_sums_tag:
75 | raise IntegrityCheckError("Couldn't find the SHA256 sum")
76 | sha256_sums_tag = sha256_sums_tag[-1]
77 | sha256_checksums_str = sha256_sums_tag.getText()
78 | sha_256_checksum: str = parse_hash(
79 | sha256_checksums_str,
80 | [
81 | f"supergrub2-{self._get_latest_version()[0]}",
82 | ".img.zip",
83 | ],
84 | 0,
85 | )
86 |
87 | return sha256_hash_check(archive_to_check, sha_256_checksum)
88 |
89 | def install_latest_version(self) -> None:
90 | download_link: str = self._get_download_link()
91 |
92 | new_file = self._get_complete_normalized_file_path(absolute=True)
93 |
94 | archive_path = new_file.with_suffix(".zip")
95 |
96 | download_file(download_link, archive_path)
97 |
98 | local_file = self._get_local_file()
99 |
100 | if not self.check_integrity(archive_path):
101 | archive_path.unlink()
102 | raise IntegrityCheckError("Integrity check failed")
103 |
104 | with zipfile.ZipFile(archive_path) as z:
105 | file_list = z.namelist()
106 | iso = next(file for file in file_list if file.endswith(".img"))
107 | extracted_file = Path(z.extract(iso, path=new_file.parent))
108 |
109 | if local_file:
110 | local_file.unlink()
111 | archive_path.unlink()
112 |
113 | try:
114 | extracted_file.rename(new_file)
115 | except FileExistsError:
116 | # On Windows, files are not overwritten by default, so we need to remove the old file first
117 | new_file.unlink()
118 | extracted_file.rename(new_file)
119 |
120 | @cache
121 | def _get_latest_version(self) -> list[str]:
122 | download_table: Tag | None = self.soup_latest_download_article.find("table", attrs={"cellpadding": "5px"}) # type: ignore
123 | if not download_table:
124 | raise VersionNotFoundError(
125 | "We were not able to find the table of download which contains the version number"
126 | )
127 |
128 | download_table_header: Tag | None = download_table.find("h2") # type: ignore
129 | if not download_table_header:
130 | raise VersionNotFoundError(
131 | "We were not able to find the header containing the version number"
132 | )
133 |
134 | header: str = download_table_header.getText().lower()
135 | return self._str_to_version(
136 | header.replace("super grub2 disk", "")
137 | .strip() # Parse from "Super Grub2 Disk 2.06s2-beta1"
138 | .replace("s", self.version_splitter)
139 | .replace("-beta", self.version_splitter)
140 | )
141 |
142 | def _find_in_table(self, row_name_contains: str) -> Tag | None:
143 | """
144 | Find the HTML Tag containing specific information in the download page table.
145 |
146 | Args:
147 | row_name_contains (str): A string that identifies the row in the table.
148 |
149 | Returns:
150 | Tag | None: The HTML Tag containing the desired information, or None .
151 | """
152 | download_table: Tag | None = self.soup_latest_download_article.find("table", attrs={"cellpadding": "5px"}) # type: ignore
153 |
154 | if not download_table:
155 | raise LookupError("Could not find the table with download information")
156 |
157 | for tr in download_table.find_all("tr"):
158 | for td in tr.find_all("td"):
159 | if row_name_contains in td.getText():
160 | return td
161 | return None
162 |
--------------------------------------------------------------------------------
/modules/updaters/FreeDOS.py:
--------------------------------------------------------------------------------
1 | import glob
2 | import logging
3 | import re
4 | import zipfile
5 | from functools import cache
6 | from pathlib import Path
7 |
8 | import requests
9 | from bs4 import BeautifulSoup
10 |
11 | from modules.exceptions import IntegrityCheckError, VersionNotFoundError
12 | from modules.updaters.GenericUpdater import GenericUpdater
13 | from modules.utils import download_file, parse_hash, sha256_hash_check
14 |
15 | DOMAIN = "https://www.ibiblio.org"
16 | DOWNLOAD_PAGE_URL = f"{DOMAIN}/pub/micro/pc-stuff/freedos/files/distributions/"
17 | FILE_NAME = "FreeDOS-[[VER]]-[[EDITION]].[[EXT]]"
18 |
19 |
20 | class FreeDOS(GenericUpdater):
21 | """
22 | A class representing an updater for FreeDOS.
23 |
24 | Attributes:
25 | download_page (requests.Response): The HTTP response containing the download page HTML.
26 | soup_download_page (BeautifulSoup): The parsed HTML content of the download page.
27 |
28 | Note:
29 | This class inherits from the abstract base class GenericUpdater.
30 | """
31 |
32 | def __init__(self, folder_path: Path, edition: str) -> None:
33 | self.valid_editions = [
34 | "BonusCD",
35 | "FloppyEdition",
36 | "FullUSB",
37 | "LegacyCD",
38 | "LiteUSB",
39 | "LiveCD",
40 | ]
41 |
42 | self.edition = edition
43 | file_path = folder_path / FILE_NAME
44 | super().__init__(file_path)
45 |
46 | # Make the parameter case insensitive, and find back the correct case using valid_editions
47 | self.edition = next(
48 | valid_ed
49 | for valid_ed in self.valid_editions
50 | if valid_ed.lower() == self.edition.lower()
51 | )
52 |
53 | self.download_page = requests.get(DOWNLOAD_PAGE_URL)
54 |
55 | if self.download_page.status_code != 200:
56 | raise ConnectionError(
57 | f"Failed to fetch the download page from '{DOWNLOAD_PAGE_URL}'"
58 | )
59 |
60 | self.soup_download_page = BeautifulSoup(
61 | self.download_page.content, features="html.parser"
62 | )
63 |
64 | @cache
65 | def _get_download_link(self) -> str:
66 | latest_version = self._get_latest_version()
67 | latest_version_str = self._version_to_str(latest_version)
68 | return f"{DOWNLOAD_PAGE_URL}/{latest_version_str}/official/FD{''.join(latest_version)}-{self.edition}.zip"
69 |
70 | def check_integrity(self) -> bool:
71 | latest_version_str = self._version_to_str(self._get_latest_version())
72 | checksums_url = f"{DOWNLOAD_PAGE_URL}/{latest_version_str}/official/verify.txt"
73 |
74 | checksums = requests.get(checksums_url).text
75 |
76 | try:
77 | sha256_sums = next(
78 | sums for sums in checksums.split("\n\n") if "sha256" in sums
79 | )
80 | except StopIteration as e:
81 | raise IntegrityCheckError(
82 | "Could not find the sha256 hash in the hash list file"
83 | ) from e
84 |
85 | sha256_sum = parse_hash(sha256_sums, [self.edition], 0)
86 |
87 | return sha256_hash_check(
88 | self._get_normalized_file_path(
89 | True, self._get_latest_version(), self.edition
90 | ).with_suffix(".zip"),
91 | sha256_sum,
92 | )
93 |
94 | def install_latest_version(self) -> None:
95 | """
96 | Download and install the latest version of the software.
97 |
98 | Raises:
99 | IntegrityCheckError: If the integrity check of the downloaded file fails.
100 | """
101 | download_link = self._get_download_link()
102 |
103 | new_file = self._get_complete_normalized_file_path(absolute=True)
104 | archive_path = new_file.with_suffix(".zip")
105 |
106 | local_file = self._get_local_file()
107 |
108 | download_file(download_link, archive_path)
109 |
110 | try:
111 | integrity_check = self.check_integrity()
112 | except IntegrityCheckError as e:
113 | raise e
114 | except Exception as e:
115 | raise IntegrityCheckError(
116 | "Integrity check failed: An error occurred"
117 | ) from e
118 |
119 | if not integrity_check:
120 | archive_path.unlink()
121 | raise IntegrityCheckError("Integrity check failed: Hashes do not match")
122 |
123 | with zipfile.ZipFile(archive_path) as z:
124 | file_list = z.namelist()
125 | try:
126 | file_ext = ".ISO"
127 | to_extract = next(
128 | file for file in file_list if file.upper().endswith(file_ext)
129 | )
130 | except StopIteration:
131 | file_ext = ".IMG"
132 | to_extract = next(
133 | file for file in file_list if file.upper().endswith(file_ext)
134 | )
135 |
136 | extracted_file = Path(z.extract(to_extract, path=new_file.parent))
137 | try:
138 | extracted_file.rename(new_file.with_suffix(file_ext))
139 | except FileExistsError:
140 | # On Windows, files are not overwritten by default, so we need to remove the old file first
141 | new_file.unlink()
142 | extracted_file.rename(new_file.with_suffix(file_ext))
143 |
144 | archive_path.unlink()
145 | if local_file:
146 | os.remove(local_file) # type: ignore
147 |
148 | def _get_local_file(self) -> Path | None:
149 | file_path = self._get_normalized_file_path(
150 | absolute=True,
151 | version=None,
152 | edition=self.edition if self.has_edition() else None, # type: ignore
153 | lang=self.lang if self.has_lang() else None, # type: ignore
154 | )
155 |
156 | local_files = glob.glob(
157 | str(file_path.with_suffix(".*")).replace("[[VER]]", "*")
158 | )
159 |
160 | if local_files:
161 | return Path(local_files[0])
162 | logging.debug(
163 | f"[FreeDOS._get_local_file] No local file found for {self.__class__.__name__}"
164 | )
165 | return None
166 |
167 | @cache
168 | def _get_latest_version(self) -> list[str]:
169 | download_a_tags = self.soup_download_page.find_all("a", href=True)
170 | if not download_a_tags:
171 | raise VersionNotFoundError("We were not able to parse the download page")
172 |
173 | latest_version = self._get_local_version()
174 | version_regex = re.compile(r"^(([0-9]+)(\.?))+$")
175 | for a_tag in download_a_tags:
176 | href = a_tag.get("href")
177 | version: str = href[:-1]
178 | if version_regex.fullmatch(version):
179 | compared_version = self._str_to_version(version)
180 | if latest_version:
181 | if self._compare_version_numbers(latest_version, compared_version):
182 | latest_version = compared_version
183 | else:
184 | latest_version = compared_version
185 |
186 | if not latest_version:
187 | raise VersionNotFoundError("Could not find a valid version")
188 |
189 | return latest_version
190 |
--------------------------------------------------------------------------------
/sisou.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import logging
3 | from abc import ABCMeta
4 | from functools import cache
5 | from pathlib import Path
6 | from typing import Type
7 |
8 | import modules.updaters
9 | from modules.updaters import GenericUpdater
10 | from modules.utils import parse_config
11 |
12 |
13 | @cache
14 | def get_available_updaters() -> list[Type[GenericUpdater]]:
15 | """Get a list of available updaters.
16 |
17 | Returns:
18 | list[Type[GenericUpdater]]: A list of available updater classes.
19 | """
20 | return [
21 | getattr(modules.updaters, updater)
22 | for updater in dir(modules.updaters)
23 | if updater != "GenericUpdater"
24 | and isinstance(getattr(modules.updaters, updater), ABCMeta)
25 | and issubclass(getattr(modules.updaters, updater), GenericUpdater)
26 | ]
27 |
28 |
29 | def setup_logging(log_level: str, log_file: Path | None):
30 | """Set up logging configurations.
31 |
32 | Args:
33 | log_level (str): The log level. Valid choices: "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL".
34 | log_file (Path | None): The path to the log file. If None, log to console.
35 |
36 | Raises:
37 | ValueError: If the log_level is invalid.
38 | """
39 | numeric_log_level = getattr(logging, log_level, None)
40 |
41 | logging.basicConfig(
42 | level=numeric_log_level,
43 | format="%(asctime)s - %(levelname)s - %(message)s",
44 | filename=log_file,
45 | )
46 |
47 | logging.debug("Logging started")
48 |
49 |
50 | def run_updater(updater: GenericUpdater):
51 | """Run a single updater.
52 |
53 | Args:
54 | updater (GenericUpdater): The updater instance to run.
55 | """
56 | installer_for = f"{updater.__class__.__name__}{' '+updater.edition if updater.has_edition() else ''}" # type: ignore
57 |
58 | logging.info(f"[{installer_for}] Checking for updates...")
59 |
60 | try:
61 | if updater.check_for_updates():
62 | logging.info(
63 | f"[{installer_for}] Updates available. Downloading and installing the latest version..."
64 | )
65 | updater.install_latest_version()
66 | logging.info(f"[{installer_for}] Update completed successfully!")
67 | else:
68 | logging.info(f"[{installer_for}] No updates available.")
69 | except:
70 | logging.exception(
71 | f"[{installer_for}] An error occurred while updating. See traceback below."
72 | )
73 |
74 |
75 | def run_updaters(
76 | install_path: Path, config: dict, updater_list: list[Type[GenericUpdater]]
77 | ):
78 | """Run updaters based on the provided configuration.
79 |
80 | Args:
81 | install_path (Path): The installation path.
82 | config (dict): The configuration dictionary.
83 | updater_list (list[Type[GenericUpdater]]): A list of available updater classes.
84 | """
85 | for key, value in config.items():
86 | # If the key's name is the name of an updater, run said updater using the values as argument, otherwise assume it's a folder's name
87 | if key in [updater.__name__ for updater in updater_list]:
88 | updater_class = next(
89 | updater for updater in updater_list if updater.__name__ == key
90 | )
91 |
92 | updaters: list[GenericUpdater] = []
93 |
94 | params: list[dict] = [{}]
95 |
96 | editions = value.get("editions", [])
97 | langs = value.get("langs", [])
98 |
99 | if editions and langs:
100 | params = [
101 | {"edition": edition, "lang": lang}
102 | for edition in editions
103 | for lang in langs
104 | ]
105 | elif editions:
106 | params = [{"edition": edition} for edition in editions]
107 | elif langs:
108 | params = [{"lang": lang} for lang in langs]
109 |
110 | for param in params:
111 | try:
112 | updaters.append(updater_class(install_path, **param))
113 | except Exception:
114 | installer_for = f"{key} {param}"
115 | logging.exception(
116 | f"[{installer_for}] An error occurred while trying to add the installer. See traceback below."
117 | )
118 | # Run updater(s)
119 | for updater in updaters:
120 | run_updater(updater)
121 |
122 | else:
123 | run_updaters(install_path / key, value, updater_list)
124 |
125 |
126 | def main():
127 | """Main function to run the update process."""
128 | parser = argparse.ArgumentParser(description="Process a file and set log level")
129 |
130 | # Add the positional argument for the file path
131 | parser.add_argument("ventoy_path", help="Path to the Ventoy drive")
132 |
133 | # Add the optional argument for log level
134 | parser.add_argument(
135 | "-l",
136 | "--log-level",
137 | choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"],
138 | default="INFO",
139 | help="Set the log level (default: INFO)",
140 | )
141 |
142 | # Add the optional argument for log file
143 | parser.add_argument(
144 | "-f", "--log-file", help="Path to the log file (default: log to console)"
145 | )
146 |
147 | # Add the optional argument for config file
148 | parser.add_argument(
149 | "-c", "--config-file", help="Path to the config file (default: config.toml)"
150 | )
151 |
152 | args = parser.parse_args()
153 |
154 | log_file = Path(args.log_file) if args.log_file else None
155 | setup_logging(args.log_level, log_file)
156 |
157 | ventoy_path = Path(args.ventoy_path).resolve()
158 |
159 | config_file = Path(args.config_file) if args.config_file else None
160 | if not config_file:
161 | logging.info(
162 | "No config file specified. Trying to find config.toml in the current directory..."
163 | )
164 | config_file = Path() / "config.toml"
165 |
166 | if not config_file.is_file():
167 | logging.info(
168 | "No config file specified. Trying to find config.toml in the ventoy drive..."
169 | )
170 | config_file = ventoy_path / "config.toml"
171 |
172 | if not config_file.is_file():
173 | logging.info(
174 | "No config.toml found in the ventoy drive. Generating one from config.toml.default..."
175 | )
176 | with open(
177 | Path(__file__).parent / "config" / "config.toml.default"
178 | ) as default_config_file:
179 | config_file.parent.mkdir(parents=True, exist_ok=True)
180 | with open(config_file, "w") as new_config_file:
181 | new_config_file.write(default_config_file.read())
182 | logging.info(
183 | "Generated config.toml in the ventoy drive. Please edit it to your liking and run sisou again."
184 | )
185 | return
186 |
187 | config = parse_config(config_file)
188 | if not config:
189 | raise ValueError("Configuration file could not be parsed or is empty")
190 |
191 | available_updaters: list[Type[GenericUpdater]] = get_available_updaters()
192 |
193 | run_updaters(ventoy_path, config, available_updaters)
194 |
195 | logging.debug("Finished execution")
196 |
197 |
198 | if __name__ == "__main__":
199 | main()
200 |
--------------------------------------------------------------------------------
/modules/WindowsConsumerDownload.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import re
3 | import uuid
4 | from datetime import datetime
5 |
6 | import requests
7 |
8 |
9 | class WindowsConsumerDownloader:
10 | """
11 | A class to obtain a Windows ISO download URL for a specific Windows version and language.
12 | """
13 |
14 | _SESSION_ID = uuid.uuid4()
15 | _PROFILE_ID = "606624d44113"
16 | _ORG_ID = "y6jn8c31"
17 |
18 | _HEADERS = {
19 | "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:134.0) Gecko/20100101 Firefox/134.0",
20 | "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
21 | "referer": "localhost",
22 | }
23 |
24 | _session_authorized = False
25 | _download_page_cache = {}
26 | _language_skuIDs_cache = {}
27 | _download_link_cache = {}
28 |
29 | @staticmethod
30 | def windows_consumer_file_hash(windows_version: str, lang: str) -> str:
31 | """
32 | Obtain a Windows ISO download URL for a specific Windows version and language.
33 |
34 | Args:
35 | windows_version (str): The desired Windows version. Valid options are '11', '10', or '8'.
36 | Default is '11'.
37 | lang (str): The desired language for the Windows ISO. Default is 'English International'.
38 | See https://www.microsoft.com/en-us/software-download/windows11 for a list of available languages
39 |
40 | Returns:
41 | str: Download link for the given Windows version and language
42 | """
43 | matches = re.search(
44 | rf"FileHash(.+\n+)+?^<\/tr>.+{lang}.+\n(.+)<",
45 | WindowsConsumerDownloader._get_download_page(windows_version),
46 | re.MULTILINE,
47 | )
48 |
49 | if not matches or not matches.groups():
50 | raise LookupError("Could not find SHA256 hash")
51 |
52 | file_hash = matches.group(2)
53 | return file_hash
54 |
55 | @staticmethod
56 | def _get_download_page(windows_version: str) -> str:
57 | match windows_version:
58 | case "11":
59 | url_segment = f"windows{windows_version}"
60 | case "10" | "8":
61 | url_segment = f"windows{windows_version}ISO"
62 | case _:
63 | raise NotImplementedError(
64 | "The valid Windows versions are '11', '10', or '8'."
65 | )
66 |
67 | if not url_segment in WindowsConsumerDownloader._download_page_cache:
68 | download_page = requests.get(
69 | f"https://www.microsoft.com/en-us/software-download/{url_segment}",
70 | headers=WindowsConsumerDownloader._HEADERS,
71 | )
72 |
73 | if download_page.status_code != 200:
74 | raise RuntimeError(
75 | f"Could not load the Windows {windows_version} download page."
76 | )
77 |
78 | WindowsConsumerDownloader._download_page_cache[url_segment] = (
79 | download_page.text
80 | )
81 |
82 | return WindowsConsumerDownloader._download_page_cache[url_segment]
83 |
84 | @staticmethod
85 | def windows_consumer_download(windows_version: str, lang: str) -> str:
86 | """
87 | Obtain a Windows ISO download URL for a specific Windows version and language.
88 |
89 | Args:
90 | windows_version (str): The desired Windows version. Valid options are '11', '10', or '8'.
91 | Default is '11'.
92 | lang (str): The desired language for the Windows ISO. Default is 'English International'.
93 | See https://www.microsoft.com/en-us/software-download/windows11 for a list of available languages
94 |
95 | Returns:
96 | str: Download link for the given Windows version and language
97 | """
98 | matches = re.search(
99 | r'