├── .gitignore ├── .gitattributes ├── nx_apphub_cli ├── __init__.py ├── pyproject.toml ├── exceptions.py ├── extractor.py ├── apprun.py ├── sandbox.py ├── generator.py ├── utils.py ├── config.py ├── cli.py ├── appdir_lint.py ├── manager.py ├── downloader.py └── builder.py ├── .pylintrc ├── setup.py ├── .github └── ISSUE_TEMPLATE │ └── bug_report.md ├── LICENSE └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | build/ 2 | *.egg-info/ 3 | venv/ 4 | __pycache__ 5 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /nx_apphub_cli/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # SPDX-License-Identifier: BSD-3-Clause 3 | # Copyright <2025> > 4 | 5 | from .builder import prepare_appimage, setup_appimage_directories 6 | from .cli import main 7 | from .config import load_yaml_config 8 | from .downloader import get_latest_deb 9 | from .extractor import extract_deb 10 | from .manager import install, remove, update, downgrade, search 11 | from .utils import ensure_executable, cleanup_cache 12 | 13 | # <--- 14 | # ---> 15 | __all__ = [ 16 | "main", 17 | "load_yaml_config", 18 | "get_latest_deb", 19 | "extract_deb", 20 | "prepare_appimage", 21 | "setup_appimage_directories", 22 | "install", 23 | "remove", 24 | "update", 25 | "downgrade", 26 | "search", 27 | ] 28 | 29 | __version__ = "1.0.0" 30 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | ignore-patterns=venv 3 | 4 | [MESSAGES CONTROL] 5 | # Disable warnings that are appropriate for CLI tools, not libraries. 6 | disable = 7 | W0718, 8 | W0703, 9 | W1309, 10 | C0114, 11 | C0116, 12 | C0301, 13 | W1514, 14 | W0707, 15 | 16 | [FORMAT] 17 | max-line-length = 120 18 | 19 | [VARIABLES] 20 | # Allow intentionally unused variables like "_ = value" 21 | ignored-variables = _ 22 | 23 | # Treat names starting with "_" as intentionally unused 24 | additional-builtins = _ 25 | 26 | [REPORTS] 27 | # Use simple output 28 | output-format = text 29 | reports = no 30 | 31 | [DESIGN] 32 | # Nx-apphub-cli commands have long functions; don't warn about it 33 | max-branches = 20 34 | max-locals = 25 35 | max-returns = 10 36 | 37 | [TYPECHECK] 38 | # Allow dynamic attributes added via SimpleNamespace 39 | ignored-classes = types.SimpleNamespace 40 | 41 | [LOGGING] 42 | logging-format-style = new 43 | -------------------------------------------------------------------------------- /nx_apphub_cli/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=42", "wheel"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "nx-apphub-cli" 7 | version = "1.0.0" 8 | description = "NX AppHub CLI — Lightweight command-line tool for managing and building applications in Nitrux." 9 | authors = [ 10 | { name = "Uri Herrera", email = "uri_herrera@nxos.org" } 11 | ] 12 | license = { text = "BSD-3-Clause" } 13 | readme = "README.md" 14 | requires-python = ">=3.6" 15 | dependencies = [ 16 | "requests", 17 | "pyyaml", 18 | "tqdm", 19 | "python-debian", 20 | "pyelftools" 21 | ] 22 | 23 | [project.urls] 24 | Homepage = "https://github.com/Nitrux/nx-apphub" 25 | 26 | [project.scripts] 27 | nx-apphub-cli = "nx_apphub_cli.cli:main" 28 | 29 | [project.optional-dependencies] 30 | dev = ["isort"] 31 | 32 | [tool.isort] 33 | line_length = 100 34 | multi_line_output = 3 35 | include_trailing_comma = true 36 | force_grid_wrap = 0 37 | use_parentheses = true 38 | known_first_party = ["nx_apphub_cli"] 39 | skip = [".venv", "build", "dist"] 40 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # SPDX-License-Identifier: BSD-3-Clause 3 | # Copyright <2025> > 4 | 5 | from setuptools import setup, find_packages 6 | 7 | # <--- 8 | # ---> 9 | setup( 10 | name="nx-apphub-cli", 11 | version="1.0.0", 12 | packages=find_packages(), 13 | install_requires=[ 14 | "requests", 15 | "pyyaml", 16 | "tqdm", 17 | "python-debian", 18 | "pyelftools", 19 | ], 20 | entry_points={ 21 | "console_scripts": [ 22 | "nx-apphub-cli=nx_apphub_cli.cli:main" 23 | ] 24 | }, 25 | author="Uri Herrera", 26 | author_email="uri_herrera@nxos.org", 27 | description="NX AppHub CLI — Lightweight command-line tool for managing and building applications in Nitrux.", 28 | url="https://github.com/Nitrux/nx-apphub", 29 | classifiers=[ 30 | "Programming Language :: Python :: 3", 31 | "License :: OSI Approved :: BSD 3 Clause License", 32 | "Operating System :: POSIX :: Linux" 33 | ], 34 | python_requires='>=3.6', 35 | ) 36 | -------------------------------------------------------------------------------- /nx_apphub_cli/exceptions.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # SPDX-License-Identifier: BSD-3-Clause 3 | # Copyright <2025> > 4 | 5 | # <--- 6 | # ---> 7 | # -- Define exception classes. 8 | 9 | class NxAppHubError(Exception): 10 | """Base class for all nx-apphub-cli errors.""" 11 | 12 | 13 | class ConfigError(NxAppHubError): 14 | """Raised when the YAML configuration file is invalid.""" 15 | 16 | 17 | class DownloadError(NxAppHubError): 18 | """Raised when a download operation fails.""" 19 | 20 | 21 | class ExtractionError(NxAppHubError): 22 | """Raised when extracting packages or files fails.""" 23 | 24 | 25 | class BuildError(NxAppHubError): 26 | """Raised when building an AppDir or AppImage fails.""" 27 | 28 | 29 | class RepoError(NxAppHubError): 30 | """Raised when repository metadata or sources are invalid.""" 31 | 32 | 33 | class SandboxError(NxAppHubError): 34 | """Raised when sandbox configuration or validation fails.""" 35 | 36 | 37 | class ManagerError(NxAppHubError): 38 | """Raised when install, update, or removal operations fail.""" 39 | 40 | 41 | class GeneratorError(NxAppHubError): 42 | """Raised when generating YAML or metadata fails.""" 43 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: 🐞 Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | 7 | --- 8 | 9 | # Pre-submission Checklist 10 | - [ ] I have searched the issue tracker for duplicates. 11 | - [ ] I am reporting an issue specific to this distribution, not an upstream software bug (e.g., a bug in Firefox itself). 12 | - [ ] I have read the disclaimer at the bottom of this form. 13 | 14 | # Issue Description 15 | 16 | **Describe the bug** 17 | A clear and concise description of what the bug is. 18 | 19 | **Steps to reproduce** 20 | 1. Go to '...' 21 | 2. Click on '....' 22 | 3. Scroll down to '....' 23 | 4. See the error 24 | 25 | **Expected behavior** 26 | Describe what you expected to happen when the bug occurred. 27 | 28 | **Media (Screenshots / Video)** 29 | Please include screenshots or screen recordings to help explain your problem. 30 | - *Note: You can drag and drop images or video files (.mp4, .mov) directly into this text field.* 31 | 32 | **Additional context** 33 | Add any other context about the problem here, including specific solutions or workarounds you've tried already. 34 | 35 | --- 36 | 37 | # Environment Information 38 | 39 | **OS version and Desktop information** 40 | Open System Settings, go to **About System**, click "Copy to clipboard in English," and paste the content below. 41 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2025, Nitrux 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | 1. Redistributions of source code must retain the above copyright notice, this 9 | list of conditions and the following disclaimer. 10 | 11 | 2. Redistributions in binary form must reproduce the above copyright notice, 12 | this list of conditions and the following disclaimer in the documentation 13 | and/or other materials provided with the distribution. 14 | 15 | 3. Neither the name of the copyright holder nor the names of its 16 | contributors may be used to endorse or promote products derived from 17 | this software without specific prior written permission. 18 | 19 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 20 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 21 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 23 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 24 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 25 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 26 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 27 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | -------------------------------------------------------------------------------- /nx_apphub_cli/extractor.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # SPDX-License-Identifier: BSD-3-Clause 3 | # Copyright <2025> > 4 | 5 | import shutil 6 | import subprocess 7 | from pathlib import Path 8 | 9 | from .exceptions import ExtractionError 10 | 11 | # <--- 12 | # ---> 13 | # -- Extract .deb files into the correct package directory. 14 | 15 | def extract_deb(deb_path, package_name, quiet=True): 16 | """Extracts a .deb package into its designated AppDir.""" 17 | 18 | if deb_path is None: 19 | return 20 | 21 | package_dir = Path.home() / ".cache/nx-apphub-cli" / package_name 22 | app_dir = package_dir / "AppDir" 23 | deb_dir = package_dir / "debs" 24 | 25 | app_dir.mkdir(parents=True, exist_ok=True) 26 | temp_dir = deb_dir / "temp" 27 | temp_dir.mkdir(parents=True, exist_ok=True) 28 | 29 | if not quiet: 30 | print(f"🗄️ Extracting {deb_path}...") 31 | 32 | try: 33 | subprocess.run( 34 | ["ar", "x", deb_path], 35 | cwd=temp_dir, 36 | check=True, 37 | stdout=subprocess.DEVNULL, 38 | stderr=subprocess.PIPE 39 | ) 40 | 41 | archive_files = list(temp_dir.glob("data.tar.*")) 42 | if not archive_files: 43 | raise ExtractionError(f"No valid data archive found in {deb_path}.") 44 | 45 | data_archive = archive_files[0] 46 | 47 | if data_archive.suffix in [".xz", ".gz"]: 48 | subprocess.run( 49 | ["tar", "xf", str(data_archive), "-C", str(app_dir)], 50 | check=True, 51 | stderr=subprocess.PIPE 52 | ) 53 | elif data_archive.suffix == ".zst": 54 | decompressed_archive = temp_dir / "data.tar" 55 | subprocess.run( 56 | ["unzstd", "-d", str(data_archive), "-o", str(decompressed_archive)], 57 | check=True, 58 | stdout=subprocess.DEVNULL, 59 | stderr=subprocess.PIPE 60 | ) 61 | subprocess.run( 62 | ["tar", "xf", str(decompressed_archive), "-C", str(app_dir)], 63 | check=True, 64 | stderr=subprocess.PIPE 65 | ) 66 | else: 67 | raise ExtractionError(f"Unsupported archive format in {deb_path}: {data_archive.suffix}") 68 | 69 | if not quiet: 70 | print(f"🗃️ Extracted {deb_path} successfully.") 71 | 72 | # -- Ensure that libraries are correctly moved without overwriting existing ones. 73 | 74 | lib_dir = app_dir / "usr/lib" 75 | lib_dir.mkdir(parents=True, exist_ok=True) 76 | 77 | for extracted_file in (app_dir / "lib").glob("*.so*"): 78 | target_file = lib_dir / extracted_file.name 79 | if not target_file.exists(): 80 | shutil.move(str(extracted_file), str(target_file)) 81 | if not quiet: 82 | print(f"Moved {extracted_file} → {target_file}") 83 | 84 | except subprocess.CalledProcessError as e: 85 | error_msg = e.stderr.decode(errors='replace').strip() if e.stderr else str(e) 86 | raise ExtractionError(f"Extraction failed for {deb_path}: {error_msg}") from e 87 | 88 | finally: 89 | if temp_dir.exists(): 90 | shutil.rmtree(temp_dir) 91 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # NX AppHub CLI | [![License](https://img.shields.io/badge/License-BSD_3--Clause-blue.svg)](https://opensource.org/licenses/BSD-3-Clause) 2 | 3 |

4 | 5 |

6 | 7 | # Introduction 8 | 9 | NX AppHub CLI is a lightweight command-line tool for managing and building applications in Nitrux. 10 | 11 | > [!WARNING] 12 | > NX AppHub CLI primarily targets Nitrux OS, and using this utility in other distributions may or may not work. Compatibility with other distributions is incidental, not intentional. 13 | 14 | For more in-depth information about NX AppHub CLI, please see the [Wiki](https://github.com/Nitrux/nx-apphub/wiki). 15 | 16 | ## Requirements 17 | 18 | - Nitrux 5.0.0 and newer. 19 | > [!NOTE] 20 | > To use `nx-apphub-cli` in previous versions of Nitrux use a container; see our tutorial on [how to use Distrobox](https://nxos.org/tutorial/how-to-use-distrobox-in-nitrux/). 21 | - Python 3.10 and newer. 22 | 23 | ### Runtime Requirements 24 | 25 | ``` 26 | appstream 27 | binutils 28 | bubblewrap 29 | file 30 | firejail 31 | fuse3 32 | git 33 | kf6-breeze-icon-theme || luv-icon-theme 34 | libfuse2t64 || libfuse2 35 | patchelf 36 | zstd 37 | ``` 38 | 39 | # Installation 40 | 41 | To install NX AppHub CLI we recommend using `pipx`. 42 | 43 | ## Single-user 44 | 45 | ``` 46 | pipx install git+https://github.com/Nitrux/nx-apphub.git 47 | ``` 48 | 49 | > [!WARNING] 50 | > pipx will install `nx-apphub-cli` to `$HOME/.local/bin`, run `pipx ensurepath` to add this directory to `$PATH`. 51 | 52 | 53 | ## System-wide 54 | 55 | ``` 56 | sudo pipx install --global git+https://github.com/Nitrux/nx-apphub.git 57 | ``` 58 | 59 | # Uninstallation 60 | 61 | To uninstall NX AppHub CLI, do the following. 62 | 63 | ## Single-user 64 | 65 | ``` 66 | pipx uninstall nx-apphub-cli 67 | ``` 68 | 69 | 70 | ## System-wide 71 | 72 | ``` 73 | sudo pipx uninstall --global nx-apphub-cli 74 | ``` 75 | 76 | 77 | # Usage 78 | 79 | To use NX AppHub CLI check the commands below. 80 | 81 | - `install`→ Install one or more applications. 82 | - `remove` → Remove one or more installed applications. 83 | - `update` → Update one or more installed applications. 84 | - `downgrade` → Downgrade one or more installed applications. 85 | - `search` → Search for specific applications. 86 | - `show` → Show installed applications. 87 | - `build` → Build an AppImage from a local YAML file. 88 | - `--appdir-lint` → Optionally debug missing shared libraries in an AppImage. 89 | - `generate` → Generate YAML template from package metadata. 90 | - `--package` → Specify package name. 91 | - `--distro` → Choose the distribution from which to get metadata. 92 | - `--release` → The release of the selected distribution. 93 | - `--arch` → Specify the target architecture. 94 | - `--output` → The file name of the generated YAML file. 95 | - `--description-output` → The file name of the generated metadata file. 96 | 97 | 98 | ## Examples 99 | 100 | ``` 101 | nx-apphub-cli install inkscape 102 | 103 | nx-apphub-cli remove fiery 104 | 105 | nx-apphub-cli update nano 106 | 107 | nx-apphub-cli downgrade mc 108 | 109 | nx-apphub-cli search nano mc fiery 110 | 111 | nx-apphub-cli show 112 | 113 | nx-apphub-cli build app.yml 114 | ↪ (debug) nx-apphub-cli build app.yml --appdir-lint squashfs-root/ 115 | 116 | nx-apphub-cli generate \ 117 | --package mc \ 118 | --distro debian \ 119 | --release testing \ 120 | --arch amd64 \ 121 | --components main \ 122 | --output mc.yml \ 123 | --description-output app_description.md 124 | ``` 125 | 126 | # Licensing 127 | 128 | The license for this repository and its contents is **BSD-3-Clause**. 129 | 130 | # Issues 131 | 132 | If you find problems with the contents of this repository, please create an issue and use the **🐞 Bug report** template. 133 | 134 | ## Submitting a bug report 135 | 136 | Before submitting a bug, you should look at the [existing bug reports]([url](https://github.com/Nitrux/nx-apphub/issues)) to verify that no one has reported the bug already. 137 | 138 | ©2025 Nitrux Latinoamericana S.C. 139 | -------------------------------------------------------------------------------- /nx_apphub_cli/apprun.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # SPDX-License-Identifier: BSD-3-Clause 3 | # Copyright <2025> > 4 | 5 | from datetime import datetime 6 | 7 | from .exceptions import BuildError 8 | from .config import get_apprunconf_value 9 | from .utils import get_architecture 10 | from .sandbox import get_sandbox_exec_block 11 | 12 | # <--- 13 | # ---> 14 | def generate_apprun(app_dir, config): 15 | """Generate the AppRun script dynamically inside the AppImage.""" 16 | apprun_path = app_dir / "AppRun" 17 | 18 | # -- Fetch and validate settings from YAML. 19 | 20 | exec_cmd = get_apprunconf_value(config, "exec", expected_type=str) 21 | setpath = get_apprunconf_value(config, "setpath", default="/usr/bin", expected_type=str) 22 | setlibpath = get_apprunconf_value(config, "setlibpath", default="/usr/lib", expected_type=str) 23 | envvars = get_apprunconf_value(config, "envvars", default={}, expected_type=dict) 24 | 25 | # -- Append to the base path in the generated AppRun. 26 | 27 | yaml_ld_value = envvars.pop("LD_LIBRARY_PATH", None) 28 | ld_append_line = "" 29 | if yaml_ld_value: 30 | if isinstance(yaml_ld_value, list): 31 | extra_ld = ":".join(str(v) for v in yaml_ld_value if v) 32 | else: 33 | extra_ld = str(yaml_ld_value).strip() 34 | if extra_ld: 35 | ld_append_line = f'\nexport LD_LIBRARY_PATH="$LD_LIBRARY_PATH:{extra_ld}"' 36 | 37 | # -- Generate environment variable exports dynamically. 38 | 39 | env_exports = "\n".join([f'export {key}="{str(value).replace("\"", "\\\"")}"' for key, value in envvars.items()]) 40 | 41 | # -- Conditionally add initialization for Qt environment variables **only if they exist in envvars**. 42 | 43 | qt_keys = [ 44 | "QT_QPA_PLATFORM", 45 | "QT_PLUGIN_PATH", 46 | "QT_QML_IMPORT_PATH", 47 | "QML_IMPORT_PATH", 48 | "QML2_IMPORT_PATH", 49 | "QTWEBENGINEPROCESS_PATH", 50 | "QTWEBENGINE_RESOURCES_PATH", 51 | "QTWEBENGINE_LOCALES_PATH", 52 | "QT_QUICK_CONTROLS_STYLE", 53 | "QT_QUICK_CONTROLS_MOBILE", 54 | "QT_DEBUG_PLUGINS", 55 | ] 56 | 57 | qt_lines = [] 58 | for key in qt_keys: 59 | if key in envvars: 60 | qt_lines.append(f'if [ -z "${{{key}+x}}" ]; then export {key}=""; fi') 61 | 62 | qt_env_init = "\n".join(qt_lines) 63 | 64 | # -- Determine multiarch triplet dynamically. 65 | 66 | arch_map = { 67 | "x86_64": "x86_64-linux-gnu", 68 | "aarch64": "aarch64-linux-gnu", 69 | "arm64": "aarch64-linux-gnu", 70 | } 71 | 72 | arch = get_architecture() 73 | multiarch_triplet = arch_map.get(arch) 74 | if multiarch_triplet is None: 75 | raise BuildError(f"Unsupported architecture '{arch}' for AppRun generation.") 76 | 77 | 78 | # -- Construct the script. 79 | 80 | current_year = datetime.now().year 81 | copyright_str = f"# Copyright <{current_year}> >" 82 | 83 | sandbox_exec_block = get_sandbox_exec_block(config.get("sandbox", {}), exec_cmd) 84 | 85 | apprun_script = f"""#!/usr/bin/env bash 86 | 87 | # SPDX-License-Identifier: BSD-3-Clause 88 | {copyright_str} 89 | 90 | 91 | # -- Exit on errors. 92 | 93 | set -eu 94 | 95 | 96 | # -- Get the running directory of the AppImage. 97 | 98 | REALPATH=$(readlink -f "$0") 99 | APPDIR=$(dirname "$REALPATH") 100 | 101 | 102 | # -- Ensure environment variables are always set to avoid unbound variable errors. 103 | 104 | if [ -z "${{PATH+x}}" ]; then export PATH=""; fi 105 | if [ -z "${{LD_LIBRARY_PATH+x}}" ]; then export LD_LIBRARY_PATH=""; fi 106 | if [ -z "${{XDG_DATA_DIRS+x}}" ]; then export XDG_DATA_DIRS=""; fi 107 | 108 | 109 | # -- Initialize Qt environment variables if required. 110 | 111 | {qt_env_init} 112 | 113 | 114 | # -- Set environment variables for proper execution inside the AppImage. 115 | 116 | export PATH="$APPDIR{setpath}:$APPDIR/usr/sbin" 117 | export LD_LIBRARY_PATH="$APPDIR{setlibpath}:$APPDIR{setlibpath}/{multiarch_triplet}:$APPDIR{setlibpath}64:$APPDIR/lib:$APPDIR/lib64:$APPDIR/lib/{multiarch_triplet}:$APPDIR/lib64/{multiarch_triplet}"{ld_append_line} 118 | export XDG_DATA_DIRS="$APPDIR/usr/share:$XDG_DATA_DIRS" 119 | 120 | 121 | # -- Additional environment variables from YAML. 122 | 123 | {env_exports} 124 | 125 | 126 | # -- Run the application. 127 | 128 | {sandbox_exec_block} 129 | """ 130 | 131 | with open(apprun_path, "w", encoding="utf-8") as f: 132 | f.write(apprun_script) 133 | 134 | apprun_path.chmod(0o755) 135 | -------------------------------------------------------------------------------- /nx_apphub_cli/sandbox.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # SPDX-License-Identifier: BSD-3-Clause 3 | # Copyright <2025> > 4 | 5 | import os 6 | from pathlib import Path 7 | 8 | # <--- 9 | # ---> 10 | # -- Bubblewrap flag mappings. 11 | 12 | bwrap_boolean_flags = { 13 | "ro-root": ["--ro-bind", "/", "/"], 14 | "dev": ["--dev-bind", "/dev", "/dev"], 15 | "proc": ["--proc", "/proc"], 16 | "tmpfs": ["--tmpfs", "/tmp"], 17 | "mqueue": ["--mqueue", "/dev/mqueue"], 18 | "ro-home": ["--ro-bind", "$HOME", "$HOME"], 19 | "no-net": ["--unshare-net"], 20 | "no-ipc": ["--unshare-ipc"], 21 | "no-pid": ["--unshare-pid"], 22 | "unshare-user": ["--unshare-user"], 23 | "unshare-uts": ["--unshare-uts"], 24 | "unshare-cgroup": ["--unshare-cgroup"], 25 | "new-session": ["--new-session"], 26 | "cap-drop-all": ["--cap-drop", "ALL"], 27 | "die-with-parent": ["--die-with-parent"], 28 | "clearenv": ["--clearenv"] 29 | } 30 | 31 | bwrap_list_flags = { 32 | "bwrap_env": lambda _k, _v: [], 33 | "bwrap_unset-env": lambda _k, _v: [], 34 | "cap-drop": lambda _k, v: ["--cap-drop", v], 35 | "bind": lambda _k, v: ["--bind"] + v.split(":", 1), 36 | "ro-bind": lambda _k, v: ["--ro-bind"] + v.split(":", 1), 37 | "bind-try": lambda _k, v: ["--bind-try"] + v.split(":", 1), 38 | "ro-bind-try": lambda _k, v: ["--ro-bind-try"] + v.split(":", 1), 39 | "remount-ro": lambda _k, v: ["--remount-ro", v] 40 | } 41 | 42 | bwrap_key_value_flags = { 43 | "hostname": "--hostname", 44 | "chdir": "--chdir", 45 | "file-label": "--file-label", 46 | "exec-label": "--exec-label", 47 | "seccomp": "--seccomp" 48 | } 49 | 50 | 51 | def get_known_apparmor_profiles(): 52 | """Return a set of AppArmor profiles found in /etc/apparmor.d/.""" 53 | profile_dir = "/etc/apparmor.d/" 54 | try: 55 | return {f for f in os.listdir(profile_dir) if not f.startswith(".")} 56 | except FileNotFoundError: 57 | return set() 58 | 59 | 60 | def generate_firejail_profile(profile_name: str): 61 | """Generate a minimal Firejail profile and save it.""" 62 | 63 | profile_dir = Path.home() / ".local/share/nx-apphub-cli/firejail.d" 64 | profile_dir.mkdir(parents=True, exist_ok=True) 65 | profile_path = profile_dir / f"{profile_name}.profile" 66 | profile_content = f"""# Minimal Firejail profile for {profile_name} 67 | 68 | # Enable a default firewall 69 | 70 | netfilter 71 | 72 | # Restrict filesystem access 73 | 74 | private 75 | noroot 76 | restrict-namespaces 77 | seccomp 78 | disable-mnt 79 | private-cache 80 | private-cwd 81 | private-dev 82 | caps 83 | """ 84 | 85 | # -- Write the profile to the file. 86 | 87 | with open(profile_path, "w", encoding="utf-8") as f: 88 | f.write(profile_content) 89 | 90 | print(f"🔒 Firejail profile saved to: {profile_path}\n") 91 | return profile_path 92 | 93 | 94 | def get_sandbox_exec_block(sandbox: dict, exec_cmd: str) -> str: 95 | sandbox_type = sandbox.get("type", "none") 96 | 97 | if sandbox_type == "firejail": 98 | profile_name = f"{sandbox.get('name', 'default-appbox')}-profile" 99 | firejail_profile_path = ( 100 | Path.home() 101 | / ".local/share/nx-apphub-cli/firejail.d" 102 | / f"{profile_name}.profile" 103 | ) 104 | 105 | if not firejail_profile_path.exists(): 106 | generate_firejail_profile(profile_name) 107 | 108 | apparmor_profile = sandbox.get("aa_profile", "none") 109 | cmd = f'"$APPDIR{exec_cmd}" "$@"' 110 | firejail_profile_str = str(firejail_profile_path) 111 | 112 | if apparmor_profile != "none": 113 | return ( 114 | f'exec /usr/bin/firejail --profile="{firejail_profile_str}" ' 115 | f'--apparmor="{apparmor_profile}" {cmd}' 116 | ) 117 | return f'exec /usr/bin/firejail --profile="{firejail_profile_str}" {cmd}' 118 | 119 | if sandbox_type == "bwrap": 120 | argv = ["/usr/bin/bwrap"] 121 | 122 | for key, flag in bwrap_boolean_flags.items(): 123 | if sandbox.get(key): 124 | argv.extend(flag) 125 | 126 | for key, transform in bwrap_list_flags.items(): 127 | for item in sandbox.get(key, []): 128 | if isinstance(item, str): 129 | item = item.replace("~", "$HOME") 130 | argv.extend(transform(key, item)) 131 | 132 | for item in sandbox.get("bwrap_env", []): 133 | if isinstance(item, dict): 134 | for k, v in item.items(): 135 | argv.extend(["--setenv", k, v]) 136 | 137 | for item in sandbox.get("bwrap_unset-env", []): 138 | argv.extend(["--unsetenv", item]) 139 | 140 | for key, flag in bwrap_key_value_flags.items(): 141 | if key in sandbox: 142 | argv.extend([flag, str(sandbox[key])]) 143 | 144 | argv.append(f"$APPDIR{exec_cmd}") 145 | argv.append("$@") 146 | 147 | rendered = [] 148 | for arg in argv: 149 | if arg == "/usr/bin/bwrap" or arg.startswith("--"): 150 | rendered.append(arg) 151 | elif arg == "$@": 152 | rendered.append('"$@"') 153 | else: 154 | escaped = arg.replace('"', '\\"') 155 | rendered.append(f'"{escaped}"') 156 | 157 | return "exec " + " ".join(rendered) 158 | 159 | return f'exec "$APPDIR{exec_cmd}" "$@"' 160 | -------------------------------------------------------------------------------- /nx_apphub_cli/generator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # SPDX-License-Identifier: BSD-3-Clause 3 | # Copyright <2025> > 4 | 5 | import gzip 6 | import lzma 7 | import re 8 | from io import BytesIO 9 | 10 | import requests 11 | 12 | from .exceptions import GeneratorError 13 | 14 | # <--- 15 | # ---> 16 | # -- Packages to exclude from being added to the YAML. 17 | 18 | excluded_packages = { 19 | "libc6", 20 | "libglib2.0-0t64", 21 | "libglib2.0-0", 22 | "libgcc-s1", 23 | "libstdc++6", 24 | "libglx0", 25 | "libegl1", 26 | "libgl1" 27 | } 28 | 29 | distro_mirrors = { 30 | "debian": [ 31 | "https://ftp.debian.org/debian", 32 | "https://uk.mirrors.clouvider.net/debian", 33 | "https://atl.mirrors.clouvider.net/debian", 34 | "https://ftp.tu-clausthal.de/debian", 35 | ], 36 | "ubuntu": [ 37 | "https://archive.ubuntu.com/ubuntu", 38 | "https://security.ubuntu.com/ubuntu", 39 | "https://mirrors.kernel.org/ubuntu", 40 | ], 41 | "devuan": [ 42 | "http://deb.devuan.org/merged", 43 | ], 44 | "kde-neon": [ 45 | "https://origin.archive.neon.kde.org/stable", 46 | ], 47 | "nitrux": [ 48 | "https://packagecloud.io/nitrux/mauikit/debian", 49 | "https://packagecloud.io/nitrux/area51/debian", 50 | ] 51 | } 52 | 53 | 54 | def fetch_repository_metadata(distro, release, arch, components): 55 | metadata = "" 56 | mirrors = distro_mirrors.get(distro) 57 | 58 | if mirrors is None: 59 | raise GeneratorError(f"Unknown distribution '{distro}'. Supported: {', '.join(distro_mirrors.keys())}") 60 | 61 | for mirror in mirrors: 62 | for component in components: 63 | base_url = f"{mirror}/dists/{release}/{component}/binary-{arch}/" 64 | urls_to_try = [base_url + "Packages.gz", base_url + "Packages.xz"] 65 | 66 | for url in urls_to_try: 67 | try: 68 | r = requests.get(url, timeout=10) 69 | 70 | if r.status_code == 404: 71 | continue 72 | 73 | r.raise_for_status() 74 | 75 | content = r.content 76 | if url.endswith(".gz"): 77 | with gzip.open(BytesIO(content), 'rt', encoding='utf-8', errors='ignore') as f: 78 | metadata += f.read() 79 | elif url.endswith(".xz"): 80 | with lzma.open(BytesIO(content), 'rt', encoding='utf-8', errors='ignore') as f: 81 | metadata += f.read() 82 | 83 | break 84 | 85 | except requests.exceptions.RequestException as e: 86 | print(f"🚧 Could not fetch metadata from the repository.") 87 | print(f" ↪ URL: {url}") 88 | 89 | if hasattr(e, 'response') and e.response is not None: 90 | print(f" ↪ Issue: The server returned a '{e.response.status_code} {e.response.reason}' error.") 91 | else: 92 | print(f" ↪ Issue: {e}") 93 | print() 94 | print("👉 Tip: A '404 Not Found' error usually means the combination of distribution, release, or component is incorrect.") 95 | print() 96 | 97 | return metadata 98 | 99 | 100 | def parse_package_info(package_name, metadata): 101 | packages = metadata.split('\n\n') 102 | for entry in packages: 103 | if f"Package: {package_name}\n" in entry: 104 | return entry 105 | return None 106 | 107 | 108 | def extract_field(entry, field): 109 | pattern = re.compile(rf"^{field}: (.+)$", re.MULTILINE) 110 | match = pattern.search(entry) 111 | return match.group(1) if match else None 112 | 113 | 114 | def parse_dependencies(dep_line): 115 | if not dep_line: 116 | return [] 117 | deps = [] 118 | for dep in dep_line.split(','): 119 | name = dep.strip().split('|')[0].strip().split(' ')[0] 120 | if name not in excluded_packages: 121 | deps.append(name) 122 | return deps 123 | 124 | 125 | def parse_fields(entry): 126 | fields = {} 127 | current_key = None 128 | buffer = [] 129 | 130 | for line in entry.splitlines(): 131 | if line.strip() == "": 132 | continue 133 | if re.match(r"^[A-Z][A-Za-z0-9-]*: ", line): 134 | if current_key: 135 | fields[current_key] = " ".join(buffer).strip() 136 | current_key, value = line.split(":", 1) 137 | buffer = [value.strip()] 138 | elif current_key: 139 | buffer.append(line.strip()) 140 | 141 | if current_key: 142 | fields[current_key] = " ".join(buffer).strip() 143 | 144 | return fields 145 | 146 | 147 | def generate_yaml(package_name, distro, release, arch, components, integration_key="gui", runtime="classic"): 148 | metadata = fetch_repository_metadata(distro, release, arch, components) 149 | 150 | if not metadata: 151 | raise GeneratorError( 152 | f"Could not fetch metadata for {distro}/{release}.\n" 153 | "Please check your internet connection or the distribution parameters (release, components)." 154 | ) 155 | 156 | entry = parse_package_info(package_name, metadata) 157 | if not entry: 158 | raise GeneratorError( 159 | f"Unable to find '{package_name}' in the repository metadata.\n" 160 | f" ↪ (Searched in Distro: {distro}, Release: {release}, Components: {', '.join(components)})\n" 161 | "\n" 162 | "👉 Tip: The package might be in a different component. " 163 | "Try adding them to your command, e.g.: --components main universe" 164 | ) 165 | 166 | fields = parse_fields(entry) 167 | 168 | version = fields.get("Version", "latest") 169 | depends = fields.get("Depends", "") 170 | deps = parse_dependencies(depends) 171 | 172 | deps.append(package_name) 173 | 174 | distro_entry = { 175 | "distro": distro, 176 | "release": release, 177 | "arch": arch, 178 | "components": components 179 | } 180 | 181 | yaml_data = { 182 | "buildinfo": { 183 | "name": package_name, 184 | "version": version, 185 | "binarypath": "/usr/bin/REPLACE-ME", 186 | "distrorepo": [distro_entry], 187 | "deps": deps, 188 | "runtime": runtime 189 | }, 190 | "apprunconf": { 191 | "exec": "/usr/bin/REPLACE-ME", 192 | "setpath": "/usr/bin", 193 | "setlibpath": "/usr/lib", 194 | "envvars": {}, 195 | "extra_rpaths" : [], 196 | "prebuild_commands": [] 197 | }, 198 | "sandbox": { 199 | "type": "none" 200 | }, 201 | "integration": { 202 | "type": integration_key 203 | } 204 | } 205 | return yaml_data, fields 206 | 207 | 208 | def generate_description_md(fields): 209 | name = fields.get("Package", "UNKNOWN") 210 | desc_raw = fields.get("Description", "No summary available") 211 | try: 212 | summary = desc_raw.split("--", 1)[-1].strip() 213 | except IndexError: 214 | summary = desc_raw.splitlines()[0] if desc_raw else "No summary available" 215 | 216 | full_desc = fields.get("Description", "No description provided.") 217 | apphub_category = fields.get("Category", "Not specified in metadata.") 218 | homepage = fields.get("Homepage", "https://example.com") 219 | license_name = fields.get("License", "Not specified in metadata") 220 | 221 | depends = fields.get("Depends", "").split(",") 222 | depends = [d.strip().split(" ")[0].split("|")[0] for d in depends if d] 223 | 224 | markdown = f"""# {name} 225 | 226 | ## Summary 227 | 228 | {summary} 229 | 230 | ## Description 231 | 232 | {full_desc} 233 | 234 | ## Category 235 | 236 | {apphub_category} 237 | 238 | ## Homepage 239 | 240 | [{homepage}]({homepage}) 241 | 242 | ## License 243 | 244 | {license_name} 245 | """ 246 | 247 | return markdown 248 | -------------------------------------------------------------------------------- /nx_apphub_cli/utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # SPDX-License-Identifier: BSD-3-Clause 3 | # Copyright <2025> > 4 | 5 | import os 6 | import platform 7 | import re 8 | import shutil 9 | import signal 10 | from pathlib import Path 11 | from concurrent.futures import ThreadPoolExecutor, as_completed 12 | from threading import Lock, Event 13 | 14 | import requests 15 | from tqdm import tqdm 16 | 17 | from .exceptions import ConfigError, DownloadError 18 | 19 | 20 | # -- Define base directories. 21 | 22 | cache_dir = Path.home() / ".cache/nx-apphub-cli" 23 | local_bin = Path.home() / ".local/bin" 24 | appimagetool_path = local_bin / "appimagetool" 25 | go_appimagetool_path = local_bin / "go-appimagetool" 26 | uruntime_path = local_bin / "uruntime" 27 | 28 | 29 | # -- Utility functions. 30 | 31 | def ensure_executable(path): 32 | """Ensure a file is executable.""" 33 | os.chmod(path, 0o755) 34 | 35 | 36 | def get_architecture(): 37 | """Return the system architecture for downloading the correct AppImageTool version.""" 38 | arch_map = { 39 | "x86_64": "x86_64", 40 | "aarch64": "aarch64", 41 | "arm64": "aarch64", 42 | } 43 | return arch_map.get(platform.machine(), "x86_64") 44 | 45 | 46 | def cleanup_cache(package_name=None): 47 | """Remove the cache directory for a specific package or skip full cache cleanup.""" 48 | 49 | if package_name: 50 | target_dir = cache_dir / package_name 51 | 52 | if target_dir.exists(): 53 | print(f"\n🧹 Cleaning up build cache for: {package_name}...\n") 54 | shutil.rmtree(target_dir, ignore_errors=True) 55 | else: 56 | print(f"\n🚨 Warning: No build cache found for: {package_name}. Skipping cleanup.\n") 57 | else: 58 | print("\nℹ️ Skipping full cache cleanup. Only removing package-specific cache.") 59 | 60 | 61 | def get_appimagetool(quiet=True): 62 | """Ensure appimagetool is available by downloading it if missing.""" 63 | if not appimagetool_path.exists(): 64 | if not quiet: 65 | print("appimagetool not found! Downloading from GitHub...") 66 | local_bin.mkdir(parents=True, exist_ok=True) 67 | 68 | # -- Detect system architecture and download the correct executable. 69 | 70 | arch = get_architecture() 71 | tool_url = f"https://github.com/AppImage/appimagetool/releases/latest/download/appimagetool-{arch}.AppImage" 72 | 73 | try: 74 | response = requests.get(tool_url, stream=True, timeout=20) 75 | response.raise_for_status() 76 | 77 | with open(appimagetool_path, "wb") as tool_file: 78 | for chunk in response.iter_content(1024): 79 | tool_file.write(chunk) 80 | 81 | appimagetool_path.chmod(0o755) 82 | if not quiet: 83 | print(f"✅ appimagetool downloaded and saved to {appimagetool_path}") 84 | 85 | except requests.RequestException as e: 86 | raise DownloadError(f"Error downloading appimagetool: {e}") from e 87 | 88 | return appimagetool_path 89 | 90 | 91 | def get_go_appimagetool(quiet=True): 92 | """Ensure go-appimagetool is available by downloading it if missing.""" 93 | if not go_appimagetool_path.exists(): 94 | if not quiet: 95 | print("go-appimagetool not found! Downloading from GitHub...") 96 | local_bin.mkdir(parents=True, exist_ok=True) 97 | 98 | arch = get_architecture() 99 | 100 | latest_url = "https://github.com/probonopd/go-appimage/releases/expanded_assets/continuous" 101 | try: 102 | response = requests.get(latest_url, timeout=20) 103 | response.raise_for_status() 104 | 105 | pattern = rf'href="([^"]*appimagetool-.*-{arch}\.AppImage)"' 106 | match = re.search(pattern, response.text) 107 | 108 | if match: 109 | download_url = f"https://github.com{match.group(1)}" 110 | response = requests.get(download_url, stream=True, timeout=20) 111 | response.raise_for_status() 112 | 113 | with open(go_appimagetool_path, "wb") as tool_file: 114 | for chunk in response.iter_content(1024): 115 | tool_file.write(chunk) 116 | 117 | go_appimagetool_path.chmod(0o755) 118 | if not quiet: 119 | print(f"✅ go-appimagetool downloaded and saved to {go_appimagetool_path}") 120 | 121 | else: 122 | raise DownloadError(f"Could not find a matching go-appimagetool build for architecture: {arch}") 123 | 124 | except requests.RequestException as e: 125 | raise DownloadError(f"Error downloading Go-based appimagetool: {e}") from e 126 | 127 | return go_appimagetool_path 128 | 129 | 130 | def get_uruntime(quiet=True): 131 | """Ensure uruntime is available by downloading it if missing.""" 132 | 133 | if not uruntime_path.exists(): 134 | if not quiet: 135 | print("❌ Error: uruntime not found! Downloading from GitHub...") 136 | 137 | local_bin.mkdir(parents=True, exist_ok=True) 138 | 139 | arch = get_architecture() 140 | uruntime_filename = f"uruntime-appimage-dwarfs-{arch}" 141 | 142 | tool_url = f"https://github.com/VHSgunzo/uruntime/releases/latest/download/{uruntime_filename}" 143 | 144 | try: 145 | response = requests.get(tool_url, stream=True, timeout=20) 146 | response.raise_for_status() 147 | 148 | with open(uruntime_path, "wb") as f: 149 | for chunk in response.iter_content(1024): 150 | f.write(chunk) 151 | 152 | uruntime_path.chmod(0o755) 153 | if not quiet: 154 | print(f"✅ uruntime downloaded and saved to {uruntime_path}") 155 | 156 | except requests.RequestException as e: 157 | raise DownloadError(f"Error downloading uruntime: {e}") from e 158 | 159 | return uruntime_path 160 | 161 | 162 | def concurrent_downloads(dependencies, base_repos, ppa_repos, cache_name): 163 | from .downloader import get_latest_deb 164 | from .extractor import extract_deb 165 | 166 | if not dependencies: 167 | print("📦 No dependencies listed.") 168 | return 169 | 170 | print(f"📥 Downloading {len(dependencies)} dependencies:\n") 171 | 172 | download_tasks = [] 173 | for dep in dependencies: 174 | if isinstance(dep, dict): 175 | pkg_name = dep["name"] 176 | repo_id = dep.get("repo") 177 | if repo_id: 178 | repo_list = [ppa_repos.get(repo_id)] 179 | if repo_list[0] is None: 180 | raise ConfigError(f"Unknown repo ID: '{repo_id}' for package: '{pkg_name}'.") 181 | else: 182 | repo_list = base_repos 183 | else: 184 | pkg_name = dep 185 | repo_list = base_repos 186 | 187 | download_tasks.append((pkg_name, repo_list)) 188 | 189 | terminal_width = shutil.get_terminal_size((80, 20)).columns 190 | 191 | try: 192 | with tqdm( 193 | total=len(download_tasks), 194 | desc=" ⏬ Fetching PKGs", 195 | unit="pkg", 196 | ncols=terminal_width, 197 | dynamic_ncols=False, 198 | bar_format="{desc} {percentage:3.0f}%|{bar}| {n_fmt}/{total_fmt} • {rate_fmt:<14}", 199 | leave=True 200 | ) as progress: 201 | with ThreadPoolExecutor(max_workers=3) as executor: 202 | log_lock = Lock() 203 | stop_event = Event() 204 | 205 | future_to_pkg = { 206 | executor.submit(get_latest_deb, pkg_name, repo_list, cache_name, log_lock, stop_event=stop_event): pkg_name 207 | for pkg_name, repo_list in download_tasks 208 | } 209 | 210 | has_failed = False 211 | first_exception = None 212 | 213 | for future in as_completed(future_to_pkg): 214 | try: 215 | deb_path = future.result() 216 | 217 | if has_failed: 218 | continue 219 | 220 | if deb_path: 221 | extract_deb(deb_path, cache_name) 222 | 223 | progress.update(1) 224 | 225 | except Exception as e: 226 | if not has_failed: 227 | has_failed = True 228 | first_exception = e 229 | stop_event.set() 230 | executor.shutdown(wait=False, cancel_futures=True) 231 | 232 | os.kill(os.getpid(), signal.SIGINT) 233 | 234 | if has_failed: 235 | progress.close() 236 | cleanup_cache(cache_name) 237 | raise DownloadError(f"AppImage build failed! {first_exception}") from first_exception 238 | 239 | except KeyboardInterrupt: 240 | try: 241 | executor.shutdown(wait=False, cancel_futures=True) 242 | except NameError: 243 | pass 244 | 245 | cleanup_cache(cache_name) 246 | raise 247 | -------------------------------------------------------------------------------- /nx_apphub_cli/config.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # SPDX-License-Identifier: BSD-3-Clause 3 | # Copyright <2025> > 4 | 5 | import os 6 | 7 | from pathlib import Path 8 | 9 | import yaml 10 | 11 | from .exceptions import ConfigError 12 | from .sandbox import get_known_apparmor_profiles, bwrap_boolean_flags, bwrap_list_flags, bwrap_key_value_flags 13 | 14 | # <--- 15 | # ---> 16 | # -- Base cache directory. 17 | 18 | cache_dir = Path.home() / ".cache/nx-apphub-cli" 19 | 20 | 21 | # -- Load YAML configuration. 22 | 23 | def load_yaml_config(config_path): 24 | """Load and parse a YAML configuration file and return its contents as a dict.""" 25 | 26 | if not os.path.isfile(config_path): 27 | raise ConfigError( 28 | f"'{config_path}' is not a valid YAML file. " 29 | "Are you sure you passed the full path to a YAML file, not a directory?" 30 | ) 31 | 32 | try: 33 | with open(config_path, "r", encoding="utf-8") as f: 34 | data = yaml.safe_load(f) 35 | 36 | if not data: 37 | raise ConfigError(f"'{config_path}' is empty or invalid.") 38 | 39 | return data 40 | 41 | except yaml.YAMLError as e: 42 | raise ConfigError(f"YAML parsing error in '{config_path}': {e}") from e 43 | except Exception as e: 44 | raise ConfigError(f"Unexpected error while loading '{config_path}': {e}") from e 45 | 46 | 47 | def get_apprunconf_value(config, key, default=None, expected_type=None): 48 | """Fetch values from the 'apprunconf' section of the YAML configuration with type validation.""" 49 | value = config.get("apprunconf", {}).get(key, default) 50 | 51 | if expected_type and not isinstance(value, expected_type): 52 | raise ConfigError( 53 | f"Invalid type for 'apprunconf.{key}'. " 54 | f"Expected {expected_type.__name__}, got {type(value).__name__}. " 55 | "Please correct the YAML configuration before proceeding." 56 | ) 57 | 58 | return value.strip() if isinstance(value, str) else value 59 | 60 | 61 | def validate_yaml_config(config): 62 | """Validate the structure and types of the YAML configuration.""" 63 | 64 | if not isinstance(config, dict): 65 | raise ConfigError("Top-level YAML structure must be a mapping (dictionary).") 66 | 67 | required_sections = { 68 | "buildinfo": { 69 | "name": str, 70 | "version": str, 71 | "binarypath": str 72 | }, 73 | "apprunconf": { 74 | "exec": str, 75 | "setpath": str, 76 | "setlibpath": str, 77 | "envvars": dict, 78 | } 79 | } 80 | 81 | # -- Validate required sections and keys. 82 | 83 | for section, keys in required_sections.items(): 84 | if section not in config: 85 | raise ConfigError(f"Missing required section '{section}' in YAML.") 86 | 87 | for key, expected_type in keys.items(): 88 | value = config[section].get(key) 89 | if value is None: 90 | raise ConfigError(f"Missing required key '{key}' in section '{section}' of YAML.") 91 | if not isinstance(value, expected_type): 92 | raise ConfigError( 93 | f"Invalid type for '{section}.{key}'. " 94 | f"Expected {expected_type.__name__}, got {type(value).__name__}." 95 | ) 96 | 97 | # -- Validate apprunconf section. 98 | 99 | apprunconf = config.get("apprunconf", {}) 100 | 101 | extra = apprunconf.get("extra_rpaths") 102 | if extra is None: 103 | apprunconf["extra_rpaths"] = [] 104 | elif isinstance(extra, str): 105 | apprunconf["extra_rpaths"] = [extra] 106 | elif isinstance(extra, list): 107 | if not all(isinstance(x, str) for x in extra): 108 | raise ConfigError("'apprunconf.extra_rpaths' list must contain only strings.") 109 | else: 110 | raise ConfigError("'apprunconf.extra_rpaths' must be a string or a list of strings.") 111 | 112 | prebuild = apprunconf.get("prebuild_commands") 113 | if prebuild is None: 114 | apprunconf["prebuild_commands"] = [] 115 | elif isinstance(prebuild, list): 116 | if not all(isinstance(x, str) for x in prebuild): 117 | raise ConfigError("'apprunconf.prebuild_commands' list must contain only strings.") 118 | else: 119 | raise ConfigError("'apprunconf.prebuild_commands' must be a list of strings.") 120 | 121 | config["apprunconf"] = apprunconf 122 | 123 | # -- Validate sandbox section. 124 | 125 | sandbox = config.get("sandbox", {}) 126 | if not isinstance(sandbox, dict): 127 | raise ConfigError("'sandbox' must be a dictionary.") 128 | 129 | sandbox_type = sandbox.get("type", "none") 130 | if sandbox_type not in ("bwrap", "firejail", "none"): 131 | raise ConfigError("'sandbox.type' must be one of: bwrap, firejail, none.") 132 | 133 | # -- Validate integration section early (needed for Firejail). 134 | 135 | integration = config.get("integration", {}) 136 | if not isinstance(integration, dict): 137 | raise ConfigError("'integration' must be a dictionary.") 138 | 139 | integration_type = integration.get("type") 140 | if integration_type not in ("cli", "gui", "wm"): 141 | raise ConfigError("'integration.type' must be one of: cli, gui, wm.") 142 | 143 | if integration_type == "wm" and sandbox_type != "none": 144 | raise ConfigError( 145 | "Window manager integration must not use a sandbox. " 146 | "Set 'sandbox.type' to 'none' when using integration.type: wm." 147 | ) 148 | 149 | # -- Validate distrorepo architecture consistency and allowed values. 150 | 151 | distrorepo = config["buildinfo"].get("distrorepo", {}) 152 | if isinstance(distrorepo, list): 153 | arches = set() 154 | for entry in distrorepo: 155 | arch = entry.get("arch") 156 | distro = entry.get("distro") 157 | 158 | if not arch: 159 | raise ConfigError("Missing 'arch' key in distrorepo entry.") 160 | 161 | if not distro: 162 | raise ConfigError("Missing 'distro' key in distrorepo entry.") 163 | 164 | if distro == "ubuntu" and arch != "amd64": 165 | raise ConfigError("'distrorepo.arch' for 'ubuntu' must be: amd64.") 166 | 167 | if distro == "ubuntu-ports" and arch not in ("arm64", "riscv64"): 168 | raise ConfigError("'distrorepo.arch' for 'ubuntu-ports' must be: arm64 or riscv64.") 169 | 170 | arches.add(arch) 171 | 172 | if len(arches) > 1: 173 | raise ConfigError("'distrorepo.arch' must not have mixed architectures.") 174 | 175 | def validate_firejail(sandbox): 176 | required = {"name"} 177 | optional = {"aa_profile"} 178 | 179 | for key in required: 180 | if key not in sandbox: 181 | raise ConfigError(f"Missing required 'sandbox.{key}' key for Firejail.") 182 | 183 | if "aa_profile" in sandbox: 184 | if not isinstance(sandbox["aa_profile"], str): 185 | raise ConfigError("'sandbox.aa_profile' must be a string.") 186 | 187 | profile = sandbox["aa_profile"] 188 | if profile != "none": 189 | known_profiles = get_known_apparmor_profiles() 190 | if profile not in known_profiles: 191 | print(f"🚨 Warning: aa_profile '{profile}' does not match any profile in /etc/apparmor.d/") 192 | print("\n 👉 To fix this, create or rename the profile file or set 'aa_profile: none'.\n") 193 | 194 | for key in sandbox.keys(): 195 | if key not in ({"type"} | required | optional): 196 | raise ConfigError(f"Unknown key 'sandbox.{key}' for Firejail.") 197 | 198 | def validate_bwrap(sandbox): 199 | allowed_keys = {"type"} | bwrap_boolean_flags.keys() | bwrap_list_flags.keys() | bwrap_key_value_flags.keys() 200 | 201 | for key in sandbox.keys(): 202 | if key not in allowed_keys: 203 | raise ConfigError(f"Unknown key 'sandbox.{key}' in Bwrap config.") 204 | 205 | for key in bwrap_boolean_flags: 206 | if key in sandbox and not isinstance(sandbox[key], bool): 207 | raise ConfigError(f"'sandbox.{key}' must be a boolean.") 208 | 209 | for key in bwrap_list_flags: 210 | if key in sandbox: 211 | if not isinstance(sandbox[key], list): 212 | raise ConfigError(f"'sandbox.{key}' must be a list.") 213 | 214 | if key == "bwrap_env": 215 | for item in sandbox[key]: 216 | if not isinstance(item, dict) or len(item) != 1: 217 | raise ConfigError("Each item in 'sandbox.bwrap_env' must be a dictionary with a single key-value pair.") 218 | for k, v in item.items(): 219 | if not isinstance(k, str) or not isinstance(v, str): 220 | raise ConfigError("'sandbox.bwrap_env' entries must have string key-value pairs.") 221 | 222 | elif key == "bwrap_unset-env": 223 | if not all(isinstance(v, str) for v in sandbox[key]): 224 | raise ConfigError("'sandbox.bwrap_unset-env' entries must be strings.") 225 | 226 | for key in bwrap_key_value_flags: 227 | if key in sandbox and not isinstance(sandbox[key], (str, int)): 228 | raise ConfigError(f"'sandbox.{key}' must be a string or integer.") 229 | 230 | # -- Firejail validation (disallow GUI apps). 231 | 232 | if sandbox_type == "firejail": 233 | validate_firejail(sandbox) 234 | if integration_type in ("gui", "wm"): 235 | raise ConfigError( 236 | "Firejail sandboxing is only supported for CLI apps. " 237 | "Use Bubblewrap (bwrap) for GUI applications instead." 238 | ) 239 | elif sandbox_type == "bwrap": 240 | validate_bwrap(sandbox) 241 | 242 | # -- Validate runtime. 243 | 244 | allowed_runtimes = {"classic", "go", "uruntime"} 245 | runtime = config["buildinfo"].get("runtime", "classic") 246 | 247 | if not isinstance(runtime, str) or runtime not in allowed_runtimes: 248 | raise ConfigError(f"'buildinfo.runtime' must be one of: {', '.join(sorted(allowed_runtimes))}.") 249 | 250 | print("✅ YAML validation passed successfully.\n") 251 | -------------------------------------------------------------------------------- /nx_apphub_cli/cli.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # SPDX-License-Identifier: BSD-3-Clause 3 | # Copyright <2025> > 4 | 5 | import argparse 6 | import re 7 | import subprocess 8 | import sys 9 | import types 10 | from datetime import datetime 11 | from io import StringIO 12 | from pathlib import Path 13 | 14 | import yaml 15 | 16 | from .exceptions import NxAppHubError, ConfigError, BuildError 17 | from .appdir_lint import run_linter 18 | from .builder import prepare_appimage, setup_appimage_directories 19 | from .config import load_yaml_config, validate_yaml_config 20 | from .generator import generate_yaml, generate_description_md 21 | from .manager import install, remove, search, show, update, downgrade 22 | from .utils import get_architecture, concurrent_downloads 23 | 24 | # <--- 25 | # ---> 26 | def main(): 27 | """Entry point for the nx-apphub-cli command-line interface.""" 28 | try: 29 | parser = argparse.ArgumentParser( 30 | prog="nx-apphub-cli", 31 | description="NX AppHub CLI — Lightweight command-line tool for managing and building applications in Nitrux." 32 | ) 33 | 34 | subparsers = parser.add_subparsers( 35 | dest="command", 36 | title="Commands", 37 | metavar="" 38 | ) 39 | 40 | # -- Management commands. 41 | 42 | subparser_install = subparsers.add_parser("install", help="Install one or more applications") 43 | subparser_install.add_argument("app_names", nargs="+", type=str, help="Name(s) of application(s) to install") 44 | 45 | subparser_remove = subparsers.add_parser("remove", help="Remove one or more installed applications") 46 | subparser_remove.add_argument("app_names", nargs="+", type=str, help="Name(s) of application(s) to remove") 47 | 48 | subparser_update = subparsers.add_parser("update", help="Update one or more installed applications") 49 | subparser_update.add_argument("app_names", nargs="+", type=str, help="Name(s) of application(s) to update") 50 | 51 | subparser_downgrade = subparsers.add_parser("downgrade", help="Downgrade one or more installed applications") 52 | subparser_downgrade.add_argument("app_names", nargs="+", type=str, help="Name(s) of application(s) to downgrade") 53 | 54 | subparser_search = subparsers.add_parser("search", help="Search for specific applications") 55 | subparser_search.add_argument("app_names", nargs="+", type=str, help="Name(s) of application(s) to search for") 56 | 57 | subparsers.add_parser("show", help="Show installed applications") 58 | 59 | # -- Building command (requires YAML file). 60 | 61 | subparser_build = subparsers.add_parser("build", help="Build an AppImage from a local YAML file") 62 | subparser_build.add_argument("config", metavar="CONFIG", type=str, help="Path to YAML configuration file") 63 | subparser_build.add_argument("--appdir-lint", metavar="APPDIR", type=str, help="Run appdir-lint after build on the specified extracted AppDir") 64 | 65 | subparser_generate = subparsers.add_parser("generate", help="Generate YAML template from package metadata") 66 | subparser_generate.add_argument("--package", required=True, help="Package name") 67 | subparser_generate.add_argument("--distro", required=True, help="Distribution name (e.g., ubuntu)") 68 | subparser_generate.add_argument("--release", required=True, help="Release codename (e.g., oracular)") 69 | subparser_generate.add_argument("--arch", default="amd64", help="Architecture (default: amd64)") 70 | subparser_generate.add_argument("--components", nargs="*", default=["main"], help="APT components (default: main)") 71 | subparser_generate.add_argument("--output", default="app.yml", help="Output YAML file") 72 | subparser_generate.add_argument("--description-output", help="Output application metadata file") 73 | subparser_generate.add_argument("--integration-type", choices=["cli", "gui", "wm"], default="gui", help="Integration type: cli, gui, or wm (default: gui)") 74 | 75 | args = parser.parse_args() 76 | 77 | if not args.command: 78 | parser.print_help() 79 | sys.exit(1) 80 | 81 | if args.command == "install": 82 | install(args.app_names) 83 | elif args.command == "remove": 84 | remove(args.app_names) 85 | elif args.command == "update": 86 | update(args.app_names) 87 | elif args.command == "downgrade": 88 | downgrade(args.app_names) 89 | elif args.command == "search": 90 | search(args.app_names) 91 | elif args.command == "show": 92 | show() 93 | elif args.command == "build": 94 | print("\n[ 🛠 Building local AppImage... ]\n") 95 | 96 | config = load_yaml_config(args.config) 97 | validate_yaml_config(config) 98 | 99 | package_name = config["buildinfo"]["name"] 100 | 101 | setup_appimage_directories(package_name, config["buildinfo"]["binarypath"]) 102 | 103 | distrorepo = config.get("buildinfo", {}).get("distrorepo", {}) 104 | 105 | if isinstance(distrorepo, list): 106 | repo_groups = [distrorepo] 107 | elif isinstance(distrorepo, dict): 108 | repo_groups = distrorepo.values() 109 | else: 110 | repo_groups = [] 111 | 112 | for repo_group in repo_groups: 113 | for repo in repo_group: 114 | for key in ["distro", "release", "arch"]: 115 | if key not in repo: 116 | raise ConfigError(f"Missing required key '{key}' in repo: {repo}") 117 | 118 | distrorepo = config["buildinfo"].get("distrorepo", {}) 119 | 120 | if isinstance(distrorepo, list): 121 | base_repos = distrorepo 122 | ppa_repos = {} 123 | elif isinstance(distrorepo, dict): 124 | base_repos = distrorepo.get("base", []) 125 | ppa_repos = {ppa["id"]: ppa for ppa in distrorepo.get("ppas", [])} 126 | else: 127 | base_repos = [] 128 | ppa_repos = {} 129 | 130 | dependencies = config["buildinfo"].get("deps", []) 131 | 132 | concurrent_downloads(dependencies, base_repos, ppa_repos, package_name) 133 | 134 | print() 135 | prepare_appimage(config) 136 | 137 | print("✅ AppImage creation complete!\n") 138 | 139 | if args.appdir_lint: 140 | app_name = config["buildinfo"]["name"] 141 | app_version = config["buildinfo"].get("version", "latest") 142 | arch = get_architecture() 143 | appimage_path = Path.cwd() / f"{app_name}-{app_version}-{arch}.AppImage" 144 | 145 | lint_target = Path(args.appdir_lint).expanduser() 146 | 147 | if not lint_target.exists(): 148 | if not appimage_path.exists(): 149 | raise BuildError(f"AppImage not found: {appimage_path}") 150 | 151 | print("📦 Extracting AppImage to squashfs-root/...") 152 | 153 | try: 154 | subprocess.run( 155 | [str(appimage_path), "--appimage-extract"], 156 | check=True, 157 | stdout=subprocess.DEVNULL, 158 | stderr=subprocess.DEVNULL 159 | ) 160 | except PermissionError: 161 | print(f"\n🚨 Warning: AppImage '{appimage_path.name}' is not executable. Attempting to fix permissions...") 162 | try: 163 | appimage_path.chmod(0o755) 164 | subprocess.run( 165 | [str(appimage_path), "--appimage-extract"], 166 | check=True, 167 | stdout=subprocess.DEVNULL, 168 | stderr=subprocess.DEVNULL 169 | ) 170 | print("✅ Successfully fixed and extracted AppImage.\n") 171 | except Exception as e: 172 | raise BuildError(f"Failed to extract AppImage after fixing permissions: {e}") 173 | except subprocess.CalledProcessError as e: 174 | raise BuildError(f"Failed to extract AppImage: {e}") 175 | 176 | lint_target = Path("squashfs-root") 177 | 178 | lint_args = types.SimpleNamespace( 179 | appdir=str(lint_target), 180 | yaml=args.config 181 | ) 182 | 183 | try: 184 | run_linter(lint_args) 185 | except Exception as e: 186 | print(f"❌ appdir-lint failed: {e}") 187 | 188 | elif args.command == "generate": 189 | integration_key = args.integration_type 190 | 191 | yaml_data, fields = generate_yaml( 192 | args.package, 193 | args.distro, 194 | args.release, 195 | args.arch, 196 | args.components, 197 | integration_key=integration_key 198 | ) 199 | if yaml_data: 200 | current_year = datetime.now().year 201 | header_lines = [ 202 | "# YAML build file", 203 | f"# nx-apphub-cli {current_year} (c) Nitrux Latinoamericana S.C.", 204 | "" 205 | ] 206 | 207 | with open(args.output, "w", encoding="utf-8") as f: 208 | f.write("\n".join(header_lines) + "\n") 209 | 210 | yaml_buffer = StringIO() 211 | yaml.dump( 212 | yaml_data, 213 | yaml_buffer, 214 | sort_keys=False, 215 | allow_unicode=True, 216 | default_flow_style=False, 217 | indent=2, 218 | width=100, 219 | ) 220 | yaml_str = yaml_buffer.getvalue() 221 | 222 | yaml_str = yaml_str.replace("\napprunconf:", "\n\napprunconf:") 223 | yaml_str = yaml_str.replace("\nsandbox:", "\n\nsandbox:") 224 | yaml_str = yaml_str.replace("\nintegration:", "\n\nintegration:") 225 | 226 | yaml_str = re.sub( 227 | r'(deps:\n)((?: - .*\n)+)', 228 | lambda m: m.group(1) + re.sub(r'^ ', ' ', m.group(2), flags=re.MULTILINE), 229 | yaml_str 230 | ) 231 | 232 | yaml_str = re.sub( 233 | r'(base:\n)((?: - .*\n)+)', 234 | lambda m: m.group(1) + re.sub(r'^ ', ' ', m.group(2), flags=re.MULTILINE), 235 | yaml_str 236 | ) 237 | 238 | yaml_str = re.sub( 239 | r'^ {2}(- distro:)', 240 | r' \1', 241 | yaml_str, 242 | flags=re.MULTILINE 243 | ) 244 | 245 | yaml_str = re.sub( 246 | r'(components:\n)((?: - .*\n)+)', 247 | lambda m: m.group(1) + re.sub(r'^ ', ' ', m.group(2), flags=re.MULTILINE), 248 | yaml_str 249 | ) 250 | 251 | yaml_str = re.sub( 252 | r'^( {4})(release|arch|components):', 253 | r' \2:', 254 | yaml_str, 255 | flags=re.MULTILINE 256 | ) 257 | 258 | f.write(yaml_str) 259 | 260 | print(f"✅ YAML template written to: {args.output}") 261 | 262 | if args.description_output and fields: 263 | md = generate_description_md(fields) 264 | with open(args.description_output, "w", encoding="utf-8") as desc: 265 | desc.write(md) 266 | print(f"📝 Description template written to: {args.description_output}") 267 | print() 268 | else: 269 | parser.print_help() 270 | sys.exit(1) 271 | except KeyboardInterrupt: 272 | print("🛑 Interrupted by SIGINT. Exiting cleanly.\n") 273 | sys.exit(130) 274 | except NxAppHubError as e: 275 | print(f"\n❌ Error: {e}\n") 276 | sys.exit(1) 277 | 278 | if __name__ == "__main__": 279 | main() 280 | -------------------------------------------------------------------------------- /nx_apphub_cli/appdir_lint.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # SPDX-License-Identifier: BSD-3-Clause 3 | # Copyright <2025> > 4 | 5 | import os 6 | import subprocess 7 | import gzip 8 | import argparse 9 | from datetime import datetime 10 | from io import BytesIO 11 | from pathlib import Path 12 | import re 13 | 14 | import requests 15 | import yaml 16 | from elftools.elf.elffile import ELFFile 17 | 18 | from .exceptions import BuildError 19 | 20 | # <--- 21 | # ---> 22 | def detect_appdir(path): 23 | """Normalize a user-provided path and detect whether it points to a squashfs-root directory.""" 24 | path = Path(path).expanduser().resolve() 25 | if path.name == "squashfs-root": 26 | return path 27 | if (path / "squashfs-root").is_dir(): 28 | return path / "squashfs-root" 29 | return path 30 | 31 | 32 | def is_elf(path): 33 | """Return True if the given file starts with an ELF header.""" 34 | try: 35 | with open(path, "rb") as f: 36 | return f.read(4) == b"\x7fELF" 37 | except Exception: 38 | return False 39 | 40 | 41 | def library_exists_in_appdir(libname, appdir): 42 | """Check whether a library file with the given name exists somewhere inside the AppDir.""" 43 | for _, _, files in os.walk(appdir): 44 | for file in files: 45 | if file == libname or file.startswith(libname + "."): 46 | return True 47 | return False 48 | 49 | 50 | def find_missing_libs(appdir): 51 | """Scan ELF files in the AppDir and return a mapping of missing libraries to the binaries requiring them.""" 52 | missing = {} 53 | for root, _, files in os.walk(appdir): 54 | for file in files: 55 | full_path = Path(root) / file 56 | if not is_elf(full_path): 57 | continue 58 | try: 59 | result = subprocess.check_output(['ldd', str(full_path)], 60 | stderr=subprocess.DEVNULL, 61 | text=True) 62 | except subprocess.CalledProcessError: 63 | continue 64 | for line in result.splitlines(): 65 | if '=> not found' in line: 66 | lib = line.split('=>')[0].strip() 67 | if library_exists_in_appdir(lib, appdir): 68 | continue 69 | missing.setdefault(lib, []).append(str(full_path)) 70 | return missing 71 | 72 | 73 | def is_valid_appdir(appdir_path): 74 | """Return True if the given path appears to be a minimally valid AppDir.""" 75 | if not appdir_path.is_dir(): 76 | return False 77 | 78 | app_run = appdir_path / "AppRun" 79 | usr_dir = appdir_path / "usr" 80 | 81 | if not app_run.is_file(): 82 | return False 83 | 84 | if not usr_dir.is_dir(): 85 | return False 86 | 87 | return True 88 | 89 | 90 | def suggest_providing_packages(missing_libs, repos, quiet=True): 91 | """ 92 | Suggest Debian/Ubuntu packages that may provide the given missing libraries 93 | by scanning Contents-*.gz files from the appropriate repositories. 94 | """ 95 | suggestions = {} 96 | seen_urls = set() 97 | 98 | lib_patterns = { 99 | lib: re.compile(rf"{re.escape(lib)}(\s|$)") for lib in missing_libs 100 | } 101 | 102 | if isinstance(repos, dict): 103 | repos = repos.get('base', []) + repos.get('ppas', []) 104 | 105 | for repo in repos: 106 | distro = repo.get("distro", "").lower() 107 | release = repo.get("release") 108 | arch = repo.get("arch") 109 | components = repo.get("components", ["main"]) 110 | 111 | if not (distro and release and arch): 112 | if not quiet: 113 | print(f"⚠️ Skipping invalid repo definition: {repo}") 114 | continue 115 | 116 | if distro == "debian": 117 | mirrors = ["https://ftp.debian.org/debian"] 118 | subpath = "dists" 119 | elif distro == "ubuntu": 120 | mirrors = ["https://archive.ubuntu.com/ubuntu"] 121 | subpath = "dists" 122 | elif distro == "ubuntu-ports": 123 | mirrors = ["https://ports.ubuntu.com/ubuntu-ports"] 124 | subpath = "dists" 125 | elif distro == "devuan": 126 | mirrors = ["http://deb.devuan.org/merged"] 127 | subpath = "dists" 128 | elif distro == "kde-neon": 129 | mirrors = ["https://origin.archive.neon.kde.org/stable"] 130 | subpath = "dists" 131 | elif distro == "nitrux": 132 | if not quiet: 133 | print("⏩ Skipping Nitrux repository (no Contents file provided).") 134 | continue 135 | else: 136 | if not quiet: 137 | print(f"⏩ Unknown distro '{distro}', skipping.") 138 | continue 139 | 140 | for mirror in mirrors: 141 | for component in components: 142 | if distro in ("debian", "devuan"): 143 | url = f"{mirror}/{subpath}/{release}/{component}/Contents-{arch}.gz" 144 | else: 145 | url = f"{mirror}/{subpath}/{release}/Contents-{arch}.gz" 146 | 147 | if url in seen_urls: 148 | continue 149 | seen_urls.add(url) 150 | 151 | if not quiet: 152 | print(f"📥 Downloading: {url}") 153 | 154 | try: 155 | response = requests.get(url, timeout=20) 156 | response.raise_for_status() 157 | 158 | if not quiet: 159 | print(f"📑 Parsing: {url}\n") 160 | 161 | with gzip.open(BytesIO(response.content), 'rt', 162 | encoding='utf-8', 163 | errors='ignore') as f: 164 | for line in f: 165 | line = line.strip() 166 | if not line: 167 | continue 168 | parts = line.rsplit(None, 1) 169 | if len(parts) != 2: 170 | continue 171 | path, pkg = parts 172 | for lib, pattern in lib_patterns.items(): 173 | if pattern.search(path): 174 | if not quiet: 175 | print(f"✅ Matched {lib} → {pkg} in {url}") 176 | suggestions.setdefault(lib, set()).add(pkg) 177 | if not quiet: 178 | print() 179 | except Exception as e: 180 | if not quiet: 181 | print(f"⚠️ Failed to process {url}: {e}") 182 | continue 183 | 184 | return {lib: sorted(set(pkgs)) for lib, pkgs in suggestions.items()} 185 | 186 | 187 | def _read_dynamic_elf(path): 188 | """Extract DT_NEEDED, RPATH, and RUNPATH entries from an ELF binary.""" 189 | with open(path, "rb") as f: 190 | elf = ELFFile(f) 191 | dyn = elf.get_section_by_name(".dynamic") 192 | if dyn is None: 193 | return [], None, None 194 | needed = [] 195 | rpath = None 196 | runpath = None 197 | for tag in dyn.iter_tags(): 198 | t = tag.entry.d_tag 199 | if t == "DT_NEEDED": 200 | needed.append(tag.needed) 201 | elif t == "DT_RPATH": 202 | r = tag.rpath 203 | rpath = r.decode() if isinstance(r, bytes) else r 204 | elif t == "DT_RUNPATH": 205 | r = tag.runpath 206 | runpath = r.decode() if isinstance(r, bytes) else r 207 | return needed, rpath, runpath 208 | 209 | 210 | def _expand_origin_paths(raw, origin): 211 | """Expand $ORIGIN variables inside RPATH/RUNPATH entries.""" 212 | if not raw: 213 | return [] 214 | out = [] 215 | for entry in raw.split(":"): 216 | out.append(entry.replace("$ORIGIN", str(origin)).replace("${ORIGIN}", str(origin))) 217 | return out 218 | 219 | 220 | def _elf_search_paths(binary_path, appdir_path, rpath, runpath, extra=None): 221 | """Construct a list of library search paths for an ELF binary.""" 222 | origin = Path(binary_path).parent.resolve() 223 | paths = [] 224 | env = os.environ.get("LD_LIBRARY_PATH") 225 | if env: 226 | paths.extend([p for p in env.split(":") if p]) 227 | paths.extend(_expand_origin_paths(runpath, origin)) 228 | paths.extend(_expand_origin_paths(rpath, origin)) 229 | if appdir_path: 230 | paths.extend([ 231 | str(appdir_path / "usr/lib"), 232 | str(appdir_path / "usr/lib64"), 233 | str(appdir_path / "usr/lib/x86_64-linux-gnu"), 234 | str(appdir_path / "usr/lib/aarch64-linux-gnu"), 235 | str(appdir_path / "lib"), 236 | str(appdir_path / "lib64"), 237 | ]) 238 | if extra: 239 | paths.extend(extra) 240 | seen = set() 241 | uniq = [] 242 | for p in paths: 243 | if p and p not in seen: 244 | seen.add(p) 245 | uniq.append(p) 246 | return uniq 247 | 248 | 249 | def verify_elf_dependencies(binary, appdir): 250 | """Return detailed dependency resolution information for one ELF binary.""" 251 | b = Path(binary).resolve() 252 | needed, rpath, runpath = _read_dynamic_elf(b) 253 | cpaths = _elf_search_paths(b, Path(appdir).resolve(), rpath, runpath) 254 | resolved = {} 255 | missing = [] 256 | for soname in needed: 257 | hit = None 258 | for base in cpaths: 259 | cand = Path(base) / soname 260 | if cand.exists(): 261 | hit = str(cand) 262 | break 263 | if hit: 264 | resolved[soname] = hit 265 | else: 266 | missing.append(soname) 267 | return { 268 | "binary": str(b), 269 | "needed": needed, 270 | "rpath": rpath, 271 | "runpath": runpath, 272 | "search_paths": cpaths, 273 | "resolved": resolved, 274 | "missing": missing 275 | } 276 | 277 | 278 | def run_elf_checks(appdir): 279 | """Perform ELF dependency checks on all ELF binaries inside the AppDir.""" 280 | results = [] 281 | for root, _, files in os.walk(appdir): 282 | for file in files: 283 | full_path = Path(root) / file 284 | if is_elf(full_path): 285 | try: 286 | results.append(verify_elf_dependencies(str(full_path), appdir)) 287 | except Exception: 288 | pass 289 | return results 290 | 291 | 292 | def write_elf_report(appdir, details, report_path=None): 293 | """Write a textual report detailing ELF dependency issues.""" 294 | if report_path is None: 295 | ts = datetime.now().strftime("%Y%m%d-%H%M%S") 296 | report_dir = Path.home() / ".cache/nx-apphub-cli/reports" 297 | report_dir.mkdir(parents=True, exist_ok=True) 298 | report_path = report_dir / f"elf-check-{Path(appdir).name}-{ts}.txt" 299 | 300 | with open(report_path, "w", encoding="utf-8") as f: 301 | for d in details: 302 | f.write(d["binary"] + "\n") 303 | if d["runpath"] is not None: 304 | f.write(f" RUNPATH: {d['runpath']}\n") 305 | if d["rpath"] is not None: 306 | f.write(f" RPATH: {d['rpath']}\n") 307 | if d["needed"]: 308 | f.write(f" NEEDED: {', '.join(d['needed'])}\n") 309 | if d["missing"]: 310 | f.write(f" MISSING: {', '.join(d['missing'])}\n") 311 | f.write("\n") 312 | return str(report_path) 313 | 314 | 315 | def run_linter(args=None): 316 | """Entry point for running AppDir lint checks and reporting missing libraries.""" 317 | if args is None: 318 | parser = argparse.ArgumentParser(description="Check missing shared libraries in an AppDir.") 319 | parser.add_argument("appdir", type=str, help="Path to the AppDir or squashfs-root directory") 320 | args = parser.parse_args() 321 | 322 | appdir_path = detect_appdir(args.appdir) 323 | 324 | # --- Handle uruntime symlink (squashfs-root -> AppDir). 325 | 326 | if appdir_path.is_symlink(): 327 | resolved = appdir_path.resolve() 328 | print(f"ℹ️ squashfs-root is a symlink — resolved to: {resolved}") 329 | appdir_path = resolved 330 | 331 | if not is_valid_appdir(appdir_path): 332 | raise BuildError(f"Invalid or incomplete AppDir: {appdir_path}") 333 | 334 | print() 335 | print(f"🔍 Scanning AppDir: {appdir_path}\n") 336 | missing = find_missing_libs(appdir_path) 337 | 338 | details = run_elf_checks(appdir_path) 339 | any_missing = [d for d in details if d["missing"]] 340 | if any_missing: 341 | report_path = write_elf_report(appdir_path, any_missing) 342 | print(f"🧩 ELF dependency details: {report_path}\n") 343 | 344 | if not missing: 345 | print("✅ No missing shared libraries found.\n") 346 | return 347 | 348 | print("🚨 Missing shared libraries:\n") 349 | for lib, sources in sorted(missing.items()): 350 | print(f"{lib} — required by:") 351 | for src in sorted(set(sources)): 352 | print(f" ↪ {src}") 353 | print() 354 | 355 | # -- Load YAML config to retrieve repositories. 356 | 357 | yaml_path = getattr(args, "yaml", None) 358 | if yaml_path and os.path.isfile(yaml_path): 359 | with open(yaml_path, "r", encoding="utf-8") as f: 360 | config = yaml.safe_load(f) or {} 361 | 362 | repos = config.get("buildinfo", {}).get("distrorepo", []) 363 | if isinstance(repos, dict): 364 | repos = repos.get("base", []) or [] 365 | 366 | print("💡 Suggesting Debian packages that may provide the missing libraries...\n") 367 | suggestions = suggest_providing_packages(missing.keys(), repos) 368 | for lib in missing: 369 | pkgs = suggestions.get(lib) 370 | if pkgs: 371 | print(f" ➤ {lib}: suggested packages → {', '.join(sorted(pkgs))}") 372 | else: 373 | print(f" ➤ {lib}: no suggestion found") 374 | print() 375 | -------------------------------------------------------------------------------- /nx_apphub_cli/manager.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # SPDX-License-Identifier: BSD-3-Clause 4 | 5 | import platform 6 | import shutil 7 | import subprocess 8 | import tarfile 9 | from pathlib import Path 10 | 11 | from .builder import prepare_appimage 12 | from .config import load_yaml_config 13 | from .utils import cleanup_cache, concurrent_downloads, get_architecture 14 | from .exceptions import ManagerError, NxAppHubError 15 | 16 | # <--- 17 | # ---> 18 | # -- Ensure directories exist. 19 | 20 | system_arch = get_architecture() 21 | 22 | repo_base_dir = Path.home() / ".local/share/nx-apphub-cli" 23 | repo_dir = repo_base_dir / "apps" 24 | backup_dir = repo_base_dir / "backups" 25 | install_dir = Path.home() / ".local/bin/nx-apphub" 26 | 27 | # -- Create all necessary directories. 28 | 29 | for directory in [repo_base_dir, repo_dir, backup_dir, install_dir]: 30 | directory.mkdir(parents=True, exist_ok=True) 31 | 32 | 33 | def ensure_repo_updated(): 34 | """Ensure the application repository is cloned and up-to-date.""" 35 | 36 | git_repo_url = "https://github.com/Nitrux/nx-apphub-apps.git" 37 | 38 | if repo_dir.exists() and not (repo_dir / ".git").exists(): 39 | print(f"🚨️ Warning: {repo_dir} is not a valid Git repository. Removing...\n") 40 | shutil.rmtree(repo_dir) 41 | repo_dir.mkdir(parents=True, exist_ok=True) 42 | 43 | if (repo_dir / ".git").exists(): 44 | try: 45 | status_result = subprocess.run( 46 | ["git", "-C", str(repo_dir), "status", "--porcelain"], 47 | check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True 48 | ) 49 | untracked = [ 50 | line[3:] 51 | for line in status_result.stdout.splitlines() 52 | if line.startswith("??") 53 | ] 54 | if untracked: 55 | print(f"🚨 Warning: Repository has untracked files: {', '.join(untracked)}") 56 | print(" 🔹 Changes might be overwritten or cause conflicts.\n") 57 | 58 | pull_result = subprocess.run( 59 | ["git", "-C", str(repo_dir), "pull"], 60 | stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, 61 | check=False 62 | ) 63 | if pull_result.returncode == 0: 64 | print("🔄 Applications repository updated.\n") 65 | else: 66 | print("🚨 Warning: Failed to update repository. Continuing with existing version.\n") 67 | 68 | except Exception as e: 69 | print(f"🚨 Warning: Git update check failed ({e}). Continuing...\n") 70 | 71 | if not (repo_dir / ".git").exists(): 72 | print("🔄 Applications repository is missing or empty. Cloning fresh copy...\n") 73 | try: 74 | if any(repo_dir.iterdir()): 75 | shutil.rmtree(repo_dir) 76 | repo_dir.mkdir(parents=True, exist_ok=True) 77 | 78 | subprocess.run( 79 | ["git", "clone", "--depth=1", git_repo_url, str(repo_dir)], 80 | check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL 81 | ) 82 | except subprocess.CalledProcessError as e: 83 | raise ManagerError("Failed to clone app repository. Try again.") from e 84 | except Exception as e: 85 | raise ManagerError(f"Unexpected failure during clone: {e}") from e 86 | 87 | 88 | def install(app_names): 89 | """Fetch YAML metadata, build AppImage, and store metadata for multiple applications.""" 90 | 91 | if not isinstance(app_names, list): 92 | app_names = [app_names] 93 | 94 | print(f"\n[ ⚡ Installing: {', '.join(app_names)} ]\n") 95 | 96 | ensure_repo_updated() 97 | 98 | to_build = [] 99 | printed_installed_msg = False 100 | 101 | for app_name in app_names: 102 | app_yaml_path = repo_dir / "apps" / system_arch / app_name / "app.yml" 103 | 104 | if not app_yaml_path.exists(): 105 | print(f" ❌ Error: No YAML found for {app_name} ({system_arch}) in repository.\n") 106 | continue 107 | 108 | config = load_yaml_config(app_yaml_path) 109 | app_version = config["buildinfo"].get("version", "unknown") 110 | 111 | if not app_version or app_version == "unknown": 112 | print(f" ❌ Error: No valid version found for {app_name}. Skipping installation.\n") 113 | continue 114 | 115 | installed_appbox = next(install_dir.glob(f"{app_name}-*-{system_arch}.AppBox"), None) 116 | if installed_appbox: 117 | parts = installed_appbox.stem.split("-") 118 | installed_version = "-".join(parts[1:-1]) if len(parts) > 2 else "unknown" 119 | 120 | print(f" ℹ️ {app_name} is already installed (version {installed_version}). Skipping installation.") 121 | printed_installed_msg = True 122 | continue 123 | 124 | to_build.append((app_name, config)) 125 | 126 | if printed_installed_msg: 127 | print() 128 | 129 | for index, (app_name, config) in enumerate(to_build): 130 | repos_config = config["buildinfo"].get("distrorepo", {}) 131 | if not repos_config: 132 | print(f" ❌ Error: No 'distrorepo' specified for {app_name}. Skipping installation.\n") 133 | continue 134 | 135 | base_repos = repos_config if isinstance(repos_config, list) else repos_config.get("base", []) 136 | ppa_repos = {} if isinstance(repos_config, list) else { 137 | ppa["id"]: ppa for ppa in repos_config.get("ppas", []) 138 | } 139 | 140 | dependencies = config["buildinfo"].get("deps", []) 141 | 142 | concurrent_downloads(dependencies, base_repos, ppa_repos, app_name) 143 | 144 | print("\n🛠 Building AppBox...\n") 145 | prepare_appimage(config, install_mode=True) 146 | 147 | built_appbox = install_dir / f"{app_name}-{config['buildinfo'].get('version')}-{system_arch}.AppBox" 148 | if not built_appbox.exists(): 149 | cleanup_cache(app_name) 150 | raise ManagerError(f"Failed to find the built {built_appbox} file.") 151 | 152 | print(f"✅ Installation successful!\n\n 📦 Available at: {built_appbox}") 153 | print() 154 | 155 | if index < len(to_build) - 1: 156 | print() 157 | 158 | print("🎉 All requested applications have been processed!\n") 159 | 160 | 161 | def remove(app_names): 162 | """Remove one or more installed AppBoxes.""" 163 | 164 | if isinstance(app_names, str): 165 | app_names = [app_names] 166 | 167 | print(f"\n[ 🗑 Removing: {', '.join(app_names)} ]\n") 168 | 169 | removed_apps = [] 170 | missing_apps = [] 171 | firejail_profiles_deleted = [] 172 | 173 | for app_name in app_names: 174 | app_file = next(install_dir.glob(f"{app_name}-*-{system_arch}.AppBox"), None) 175 | 176 | if not app_file: 177 | missing_apps.append(f" ❌ {app_name} (Not Installed)") 178 | continue 179 | 180 | try: 181 | app_file.unlink() 182 | removed_apps.append(f" ✅ {app_name} (Deleted)") 183 | 184 | firejail_profile = Path.home() / ".local/share/nx-apphub-cli/firejail.d" / f"{app_name}-profile.profile" 185 | if firejail_profile.exists(): 186 | firejail_profile.unlink() 187 | firejail_profiles_deleted.append(firejail_profile.name) 188 | 189 | except PermissionError: 190 | missing_apps.append(f" ❌ {app_name} (Permission Denied)") 191 | continue 192 | 193 | if removed_apps: 194 | print("🟢 Uninstalled:\n\n" + "\n".join(removed_apps)) 195 | 196 | if firejail_profiles_deleted: 197 | print(f"\n🔒 Firejail profile(s) deleted: {', '.join(sorted(firejail_profiles_deleted))}") 198 | 199 | if missing_apps: 200 | if removed_apps or firejail_profiles_deleted: 201 | print() 202 | print("🔴 Skipped:\n\n" + "\n".join(missing_apps)) 203 | 204 | print() 205 | print("🎉 All requested applications have been processed!\n") 206 | 207 | 208 | def search(app_names): 209 | """Search for specific applications in the local repository.""" 210 | 211 | print(f"\n[ 🔍 Searching for: {', '.join(app_names)} ]\n") 212 | 213 | ensure_repo_updated() 214 | 215 | found_apps = [] 216 | missing_apps = [] 217 | 218 | for app_name in app_names: 219 | search_path = repo_dir / "apps" / system_arch 220 | 221 | if not search_path.exists(): 222 | missing_apps.append(f" ❌ {app_name} (Architecture directory '{system_arch}' missing in repository)") 223 | continue 224 | 225 | matched_paths = [ 226 | p for p in search_path.glob("*") 227 | if app_name in p.name 228 | ] 229 | 230 | if not matched_paths: 231 | missing_apps.append(f" ❌ {app_name} (Unknown application)") 232 | continue 233 | 234 | for p in matched_paths: 235 | app_yaml = p / "app.yml" 236 | if app_yaml.exists(): 237 | config = load_yaml_config(app_yaml) 238 | version = config["buildinfo"].get("version", "unknown") 239 | found_apps.append(f" ✅ {p.name} - Version: {version} - Arch: {system_arch}") 240 | else: 241 | missing_apps.append(f" ❌ {p.name} (Missing YAML)") 242 | 243 | if found_apps: 244 | print("\n🟢 Found Applications:\n") 245 | print("\n".join(found_apps), "\n") 246 | 247 | if missing_apps: 248 | print("\n🔴 Not Found:\n") 249 | print("\n".join(missing_apps), "\n") 250 | 251 | 252 | def update(app_names): 253 | """Update one or more AppBoxes only if a newer version is available.""" 254 | 255 | if isinstance(app_names, str): 256 | app_names = [app_names] 257 | 258 | app_names = list(dict.fromkeys(app_names)) 259 | 260 | print(f"\n[ 📤 Updating: {', '.join(app_names)} ]\n") 261 | 262 | ensure_repo_updated() 263 | 264 | for app_name in app_names: 265 | print(f"\n[ 🔄 Checking updates for: {app_name} ]\n") 266 | 267 | installed_app = next(install_dir.glob(f"{app_name}-*-{system_arch}.AppBox"), None) 268 | 269 | if not installed_app: 270 | print(f" ❌ Error: {app_name} is not installed. Cannot update.\n") 271 | continue 272 | 273 | installed_parts = installed_app.stem.split("-") 274 | installed_version = "-".join(installed_parts[1:-1]) if len(installed_parts) > 2 else "unknown" 275 | 276 | app_yaml_path = repo_dir / "apps" / system_arch / app_name / "app.yml" 277 | 278 | if not app_yaml_path.exists(): 279 | print(f" ❌ Error: No YAML found for {app_name} ({system_arch}) in repository.\n") 280 | print() 281 | continue 282 | 283 | config = load_yaml_config(app_yaml_path) 284 | latest_version = config["buildinfo"].get("version", "unknown") 285 | 286 | if not latest_version or latest_version == "unknown": 287 | print(f" ❌ Error: No valid version information found for {app_name}. Aborting update.\n") 288 | continue 289 | 290 | if installed_version == latest_version: 291 | print(f" ✅ {app_name} is already up to date (version {installed_version}).\n") 292 | continue 293 | 294 | print(f" 🔄 New version available: {latest_version} (Installed: {installed_version})\n") 295 | 296 | try: 297 | installed_app.chmod(0o644) 298 | except OSError as e: 299 | print(f"🚨️ Warning: Failed to modify permissions of {installed_app}. Reason: {e}") 300 | 301 | backup_name = backup_dir / f"{app_name}-{installed_version}-{system_arch}.tar" 302 | try: 303 | with tarfile.open(backup_name, "w") as tar: 304 | tar.add(installed_app, arcname=installed_app.name) 305 | print(f"📦 Backup created: {backup_name}") 306 | except Exception as e: 307 | print(f"❌ Error creating backup for {app_name}: {e}") 308 | continue 309 | 310 | try: 311 | installed_app.unlink() 312 | except OSError as e: 313 | print(f"❌ Error deleting {installed_app}: {e}") 314 | continue 315 | 316 | try: 317 | install([app_name]) 318 | except NxAppHubError as e: 319 | print(f"❌ Update failed: {e}") 320 | print(" Restoring backup...") 321 | 322 | try: 323 | with tarfile.open(backup_name, "r") as tar: 324 | tar.extractall(path=install_dir) 325 | restored_appbox = install_dir / f"{app_name}-{installed_version}-{system_arch}.AppBox" 326 | if restored_appbox.exists(): 327 | restored_appbox.chmod(0o755) 328 | print(f"♻️ Restored {app_name} to version {installed_version}\n") 329 | else: 330 | print(f"❌ Failed to restore {app_name}.") 331 | except Exception as restore_err: 332 | print(f"❌ Critical error: Could not restore backup for {app_name}. Reason: {restore_err}") 333 | continue 334 | 335 | new_appbox = install_dir / f"{app_name}-{latest_version}-{system_arch}.AppBox" 336 | if new_appbox.exists(): 337 | print(f"✅ {app_name} successfully updated to version {latest_version}!\n") 338 | else: 339 | print(f"❌ Update failed: No new AppImage found. Restoring backup...") 340 | try: 341 | with tarfile.open(backup_name, "r") as tar: 342 | tar.extractall(path=install_dir) 343 | restored_appbox = install_dir / f"{app_name}-{installed_version}-{system_arch}.AppBox" 344 | if restored_appbox.exists(): 345 | restored_appbox.chmod(0o755) 346 | print(f"♻️ Restored {app_name} to version {installed_version}\n") 347 | except Exception as e: 348 | print(f"❌ Critical error: Could not restore backup for {app_name}. Reason: {e}") 349 | 350 | 351 | def downgrade(app_names): 352 | """Restore specific backups of multiple AppBoxes.""" 353 | 354 | if isinstance(app_names, str): 355 | app_names = [app_names] 356 | 357 | print(f"\n[ ⏳ Downgrading: {', '.join(app_names)} ]\n") 358 | 359 | for app_name in app_names: 360 | print(f"\n🔽 Processing downgrade for: {app_name}...\n") 361 | 362 | backups = sorted(backup_dir.glob(f"{app_name}-*-{system_arch}.tar"), reverse=True) 363 | 364 | if not backups: 365 | print(f" ❌ No backups found for {app_name}.\n") 366 | continue 367 | 368 | print("📦 Available backups:\n") 369 | for i, backup in enumerate(backups, 1): 370 | print(f" {i}. {backup.name}") 371 | 372 | while True: 373 | choice = input(f"\n🔢 Enter the number of the backup to restore for {app_name} (default = latest): ").strip() 374 | if choice == "": 375 | index = 0 376 | break 377 | if choice.isdigit() and 1 <= int(choice) <= len(backups): 378 | index = int(choice) - 1 379 | break 380 | print("\n ⛔ Invalid selection. Please enter a valid number.") 381 | 382 | selected_backup = backups[index] 383 | 384 | print(f"\n🔄 Restoring backup: {selected_backup.name}...\n") 385 | 386 | try: 387 | with tarfile.open(selected_backup, "r") as tar: 388 | extracted_files = tar.getnames() 389 | tar.extractall(path=install_dir) 390 | 391 | restored_appbox = None 392 | for file in extracted_files: 393 | restored_path = install_dir / file 394 | if restored_path.suffix == ".AppBox" and restored_path.exists(): 395 | restored_appbox = restored_path 396 | break 397 | 398 | if not restored_appbox: 399 | print(f"❌ Error: Restoration failed! No valid AppBox found in {install_dir}.\n") 400 | continue 401 | 402 | restored_appbox.chmod(0o755) 403 | print(f"✅ Successfully restored {app_name} to {restored_appbox.name}\n") 404 | 405 | for newer_version in install_dir.glob(f"{app_name}-*-{system_arch}.AppBox"): 406 | if newer_version != restored_appbox: 407 | try: 408 | newer_version.unlink() 409 | except OSError as e: 410 | print(f"🚨️ Warning: Failed to remove newer version {newer_version}. Reason: {e}") 411 | 412 | except (tarfile.TarError, OSError) as e: 413 | print(f"❌ Error: Could not restore {app_name} from backup. Reason: {e}") 414 | 415 | print("🎉 All requested applications have been processed!\n") 416 | 417 | 418 | def format_size(size_bytes): 419 | for unit in ["B", "KiB", "MiB", "GiB", "TiB"]: 420 | if size_bytes < 1024: 421 | return f"{size_bytes:.2f} {unit}" 422 | size_bytes /= 1024 423 | return f"{size_bytes:.2f} PiB" 424 | 425 | 426 | def show(): 427 | """Show installed AppBoxes.""" 428 | print("\n[ 📦 Installed AppBoxes ]\n") 429 | 430 | installed_apps = list(install_dir.glob(f"*-{system_arch}.AppBox")) 431 | 432 | if not installed_apps: 433 | print("❌ No applications installed.\n") 434 | return 435 | 436 | installed_apps.sort(key=lambda app: app.stat().st_size, reverse=True) 437 | 438 | total_size = 0 439 | 440 | for app in installed_apps: 441 | size = app.stat().st_size 442 | total_size += size 443 | print(f" ✅ {app.name} ({format_size(size)})") 444 | 445 | print(f"\n📁 Total: {len(installed_apps)} installed in {install_dir}\n") 446 | print(f"📦 Size: {format_size(total_size)}\n") 447 | 448 | 449 | # -- Export functions. 450 | 451 | __all__ = ["install", "remove", "update", "downgrade", "search", "show"] 452 | -------------------------------------------------------------------------------- /nx_apphub_cli/downloader.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # SPDX-License-Identifier: BSD-3-Clause 3 | # Copyright <2025> > 4 | 5 | import gzip 6 | import lzma 7 | import random 8 | import time 9 | from io import BytesIO 10 | from urllib.parse import urljoin, urlparse 11 | from collections import defaultdict 12 | from pathlib import Path 13 | from threading import Lock 14 | from concurrent.futures import ThreadPoolExecutor, as_completed 15 | 16 | import requests 17 | from debian import debian_support 18 | from tqdm import tqdm 19 | from requests.adapters import HTTPAdapter 20 | from urllib3.util.retry import Retry 21 | 22 | from .exceptions import DownloadError 23 | 24 | # <--- 25 | # ---> 26 | # -- Base cache directory for downloads. 27 | 28 | cache_dir = Path.home() / ".cache/nx-apphub-cli" 29 | 30 | 31 | # -- Mirrors for supported distributions. 32 | 33 | debian_mirrors = [ 34 | "https://ftp.debian.org/debian", 35 | "https://uk.mirrors.clouvider.net/debian", 36 | "https://atl.mirrors.clouvider.net/debian", 37 | "https://ftp.tu-clausthal.de/debian", 38 | ] 39 | 40 | ubuntu_mirrors = [ 41 | "https://archive.ubuntu.com/ubuntu", 42 | "https://security.ubuntu.com/ubuntu", 43 | "https://mirrors.kernel.org/ubuntu", 44 | ] 45 | 46 | ubuntu_ports_mirrors = [ 47 | "https://ports.ubuntu.com/ubuntu-ports", 48 | ] 49 | 50 | devuan_mirrors = [ 51 | "http://deb.devuan.org/merged", 52 | ] 53 | 54 | kde_neon_mirrors = [ 55 | "https://origin.archive.neon.kde.org/stable", 56 | ] 57 | 58 | nitrux_mirrors = [ 59 | "https://packagecloud.io/nitrux/mauikit/debian", 60 | "https://packagecloud.io/nitrux/area51/debian", 61 | ] 62 | 63 | zbkit_mirrors = [ 64 | "https://packagecloud.io/nitrux/zbkit/debian", 65 | ] 66 | 67 | 68 | # -- Caching and Locks 69 | 70 | cache_lock = Lock() 71 | metadata_cache = {} 72 | 73 | 74 | # -- Use retry strategy and session reuse for connection pooling. 75 | 76 | retry_strategy = Retry( 77 | total=3, 78 | status_forcelist=[429, 500, 502, 503, 504], 79 | backoff_factor=0.3, 80 | ) 81 | adapter = HTTPAdapter(max_retries=retry_strategy) 82 | 83 | session = requests.Session() 84 | session.mount("http://", adapter) 85 | session.mount("https://", adapter) 86 | 87 | 88 | def get_mirrors_for_distro(distro): 89 | return { 90 | "debian": debian_mirrors, 91 | "ubuntu": ubuntu_mirrors, 92 | "ubuntu-ports": ubuntu_ports_mirrors, 93 | "devuan": devuan_mirrors, 94 | "kde-neon": kde_neon_mirrors, 95 | "nitrux": nitrux_mirrors, 96 | "zbkit": zbkit_mirrors 97 | }.get(distro, None) 98 | 99 | 100 | def build_probe_tasks(repos, pkg_name, quiet): 101 | """ 102 | Build probe tasks that randomly distribute mirrors to balance load, 103 | avoiding hitting the same mirror with multiple concurrent requests unnecessarily. 104 | """ 105 | tasks = [] 106 | for repo in repos: 107 | if "ppa" in repo: 108 | continue 109 | 110 | distro = repo.get("distro", "").lower() 111 | release = repo.get("release") 112 | arch = repo.get("arch") 113 | components = repo.get("components", ["main"]) 114 | 115 | if not (distro and release and arch): 116 | if not quiet: 117 | print(f"❌ Error: Missing required repo keys for {pkg_name}: {repo}") 118 | continue 119 | 120 | mirror_list = get_mirrors_for_distro(distro) 121 | if not mirror_list: 122 | if not quiet: 123 | print(f"⚠️ Skipping unknown distro: {distro}") 124 | continue 125 | 126 | # -- Randomize the mirror list to spread load across mirrors. 127 | 128 | mirror_list = mirror_list[:] 129 | random.shuffle(mirror_list) 130 | 131 | # -- Only add one mirror per component at a time to reduce load. 132 | 133 | for component in components: 134 | for mirror in mirror_list: 135 | tasks.append((mirror, release, arch, pkg_name, component)) 136 | 137 | return tasks 138 | 139 | 140 | def get_latest_deb(pkg_name, repos, package_name, log_lock, stop_event=None, quiet=True): 141 | """Download the latest .deb package for the given pkg_name by probing all mirrors concurrently using threads.""" 142 | 143 | excluded_packages = { 144 | "dbus-user-session", 145 | "libc6", 146 | "libdrm2", 147 | "libegl-mesa0", 148 | "libegl1", 149 | "libgbm1", 150 | "libgcc-s1", 151 | "libgl1", 152 | "libgl1-mesa-dri", 153 | "libgl1-mesa-glx", 154 | "libglapi-mesa", 155 | "libgles2", 156 | "libglib2.0-0", 157 | "libglib2.0-0t64", 158 | "libglib2.0-bin", 159 | "libglx-mesa0", 160 | "libglx0", 161 | "libopengl0", 162 | "libstdc++6", 163 | "libsystemd0", 164 | "libsystemd-shared", 165 | "libwayland-client0", 166 | "libwayland-cursor0", 167 | "libwayland-egl1", 168 | "libwayland-server0", 169 | "mesa-libgallium", 170 | "mesa-vulkan-drivers", 171 | "sudo", 172 | "systemd", 173 | "systemd-sysv", 174 | "udev" 175 | } 176 | 177 | if pkg_name in excluded_packages: 178 | if not quiet: 179 | print(f"\n\n ⚠️ Skipping {pkg_name}: This package is a core system library and should not be bundled in the AppImage.\n") 180 | return None 181 | 182 | if stop_event and stop_event.is_set(): 183 | raise DownloadError("Download cancelled.") 184 | 185 | package_dir = cache_dir / package_name 186 | deb_dir = package_dir / "debs" 187 | deb_dir.mkdir(parents=True, exist_ok=True) 188 | 189 | if not repos: 190 | raise DownloadError(f"No valid repositories provided for {pkg_name}.") 191 | 192 | probe_tasks = build_probe_tasks(repos, pkg_name, quiet) 193 | 194 | fetch_failures = [] 195 | no_metadata = [] 196 | candidates = [] 197 | mirror_logs = [] 198 | 199 | for repo in repos: 200 | if "ppa" in repo: 201 | candidate = fetch_from_ppa(pkg_name, repo, deb_dir, quiet) 202 | if candidate: 203 | candidates.append(candidate) 204 | 205 | def probe_mirror(task): 206 | if stop_event and stop_event.is_set(): 207 | return (task, None, None, Exception("Download cancelled")) 208 | 209 | mirror, release, arch, _, component = task 210 | try: 211 | result, status_msg = fetch_package_metadata(mirror, release, arch, pkg_name, component, stop_event=stop_event) 212 | return (task, result, status_msg, None) 213 | except Exception as e: 214 | return (task, None, None, e) 215 | 216 | with ThreadPoolExecutor(max_workers=5) as executor: 217 | future_to_task = {executor.submit(probe_mirror, task): task for task in probe_tasks} 218 | for future in as_completed(future_to_task): 219 | if stop_event and stop_event.is_set(): 220 | executor.shutdown(wait=False, cancel_futures=True) 221 | raise DownloadError("Download cancelled during mirror probing.") 222 | 223 | mirror, _, _, pkg_name_task, component = future_to_task[future] 224 | try: 225 | _, result, status_msg, exception = future.result() 226 | if result: 227 | filename, version_str = result 228 | version = debian_support.Version(version_str) 229 | deb_url = f"{mirror}/{filename}" 230 | candidates.append({ 231 | "version": version, 232 | "version_str": version_str, 233 | "url": deb_url, 234 | "path": deb_dir / f"{pkg_name_task}.deb", 235 | "source": f"{mirror} [{component}]" 236 | }) 237 | elif status_msg and not quiet: 238 | if "Unable to fetch metadata" in status_msg: 239 | fetch_failures.append(status_msg) 240 | elif "No metadata" in status_msg: 241 | no_metadata.append(status_msg) 242 | else: 243 | mirror_logs.append(status_msg) 244 | elif exception and not quiet: 245 | if "Download cancelled" not in str(exception): 246 | mirror_logs.append(f"⛔ Unhandled error for: {pkg_name_task} from: {mirror} [{component}]: {exception}") 247 | except Exception as e: 248 | if not quiet: 249 | mirror_logs.append(f"⛔ Unexpected error for: {pkg_name}: {e}") 250 | 251 | if not quiet: 252 | if fetch_failures: 253 | tqdm.write("\n" + "\n".join(f" {msg}" for msg in fetch_failures)) 254 | if no_metadata: 255 | tqdm.write("\n" + "\n".join(f" {msg}" for msg in no_metadata)) 256 | if mirror_logs: 257 | tqdm.write("\n" + "\n".join(f" {msg}" for msg in mirror_logs)) 258 | 259 | if not candidates: 260 | msg = f"Unable to find '{pkg_name}' after probing {len(probe_tasks)} mirror/component pairs. Aborting." 261 | if log_lock: 262 | with log_lock: 263 | tqdm.write(f"❌ {msg}") 264 | tqdm.write("") 265 | raise DownloadError(msg) 266 | 267 | version_groups = defaultdict(list) 268 | for c in candidates: 269 | version_groups[c["version"]].append(c) 270 | 271 | sorted_versions = sorted(version_groups.keys(), reverse=True) 272 | 273 | shuffled_candidates = [] 274 | for version in sorted_versions: 275 | mirrors = version_groups[version] 276 | random.shuffle(mirrors) 277 | shuffled_candidates.extend(mirrors) 278 | 279 | best = shuffled_candidates[0] 280 | 281 | if not quiet: 282 | with log_lock: 283 | tqdm.write("") 284 | tqdm.write(f" 📦 Package: {pkg_name}") 285 | tqdm.write(f" 🔹 Version: {best['version_str']}") 286 | tqdm.write(f" 🔹 Source: {best['source']}\n") 287 | tqdm.write(f" 📥 Downloading: {pkg_name} from: {best['url']}...\n") 288 | 289 | if stop_event and stop_event.is_set(): 290 | raise DownloadError("Download cancelled.") 291 | 292 | download_errors = [] 293 | 294 | for candidate in shuffled_candidates: 295 | if stop_event and stop_event.is_set(): break 296 | url = candidate["url"] 297 | path = candidate["path"] 298 | try: 299 | return download_file(url, path, quiet=quiet) 300 | except DownloadError as e: 301 | download_errors.append(f"{pkg_name}: {e} ← {url}") 302 | 303 | for candidate in shuffled_candidates: 304 | url = candidate["url"] 305 | path = candidate["path"] 306 | try: 307 | if not quiet: 308 | tqdm.write(f" 🔁 Retrying download for: {pkg_name} from: {url}") 309 | return download_file(url, path, quiet=quiet) 310 | except DownloadError as e: 311 | download_errors.append(f"{pkg_name} (retry): {e} ← {url}") 312 | 313 | if not quiet and download_errors and log_lock: 314 | with log_lock: 315 | tqdm.write("\n" + "\n".join(f" ⚠️ {msg}" for msg in download_errors) + "\n") 316 | 317 | msg = f"All mirrors failed to download: {pkg_name}." 318 | 319 | if log_lock: 320 | with log_lock: 321 | tqdm.write(f"❌ {msg}") 322 | tqdm.write("") 323 | raise DownloadError(msg) 324 | 325 | 326 | def fetch_package_metadata(mirror, release, arch, pkg_name, component="main", stop_event=None, retries=3): 327 | """Fetch the package filename and version from repository metadata, with retry and .xz fallback.""" 328 | 329 | base_url = f"{mirror}/dists/{release}/{component}/binary-{arch}/" 330 | urls_to_try = [base_url + "Packages.gz", base_url + "Packages.xz"] 331 | 332 | delay_range = (0.2, 0.6) 333 | cache_key = (mirror, release, arch, component) 334 | 335 | with cache_lock: 336 | if cache_key in metadata_cache: 337 | lines = metadata_cache[cache_key] 338 | else: 339 | lines = None 340 | 341 | if lines is None: 342 | for url in urls_to_try: 343 | if stop_event and stop_event.is_set(): 344 | return None, "Download cancelled" 345 | 346 | for attempt in range(1, retries + 1): 347 | if stop_event and stop_event.is_set(): 348 | return None, "Download cancelled" 349 | 350 | try: 351 | response = session.get(url, timeout=20, stream=True) 352 | 353 | if response.status_code == 404: 354 | break 355 | 356 | response.raise_for_status() 357 | content = response.content 358 | if url.endswith(".gz"): 359 | with gzip.open(BytesIO(content), "rt", encoding="utf-8", errors="ignore") as f: 360 | lines = f.readlines() 361 | elif url.endswith(".xz"): 362 | with lzma.open(BytesIO(content), "rt", encoding="utf-8", errors="ignore") as f: 363 | lines = f.readlines() 364 | if lines: 365 | with cache_lock: 366 | metadata_cache[cache_key] = lines 367 | break 368 | 369 | except requests.exceptions.RequestException as e: 370 | if attempt < retries: 371 | time.sleep(random.uniform(*delay_range)) 372 | continue 373 | 374 | if isinstance(e, requests.exceptions.Timeout): 375 | reason = "⌛ Timeout" 376 | elif isinstance(e, requests.exceptions.ConnectionError): 377 | reason = "🔌 Connection error" 378 | elif isinstance(e, requests.exceptions.HTTPError) and e.response is not None: 379 | reason = f"HTTP {e.response.status_code}" 380 | else: 381 | reason = e.__class__.__name__ 382 | 383 | mirror_host = urlparse(url).hostname 384 | return None, f"⭢ 🚧 Unable to fetch metadata from: {mirror_host}: {reason} (after {retries} attempts)" 385 | 386 | if lines: 387 | break 388 | 389 | if not lines: 390 | return None, f"⛔ No metadata for: '{pkg_name}' from: '{mirror}' in [{component}]" 391 | 392 | current_package = None 393 | filename = None 394 | version = None 395 | 396 | for line in lines: 397 | line = line.strip() 398 | 399 | if line.startswith("Package: "): 400 | current_package = line.split("Package: ")[1] 401 | filename = None 402 | version = None 403 | 404 | elif line.startswith("Version: ") and current_package == pkg_name: 405 | version = line.split("Version: ")[1] 406 | 407 | elif line.startswith("Filename: ") and current_package == pkg_name: 408 | filename = line.split("Filename: ")[1] 409 | 410 | if current_package == pkg_name and filename and version: 411 | return (filename, version), None 412 | 413 | return None, f"⛔ No metadata for: '{pkg_name}' from: '{mirror}' in [{component}]" 414 | 415 | 416 | def fetch_from_ppa(pkg_name, repo, deb_dir, quiet=True): 417 | 418 | ppa = repo["ppa"].strip() 419 | if not ppa or "/" not in ppa: 420 | if not quiet: 421 | tqdm.write(f"⛔ Invalid PPA format: {ppa}. Expected format: '/'.") 422 | return None 423 | 424 | distro = repo.get("distro", "ubuntu").lower() 425 | release = repo.get("release") 426 | arch = repo.get("arch") 427 | 428 | if not (distro and release and arch): 429 | if not quiet: 430 | tqdm.write(f"❌ Error: Missing required repo keys for {pkg_name}: {repo}") 431 | return None 432 | 433 | base_url = f"https://ppa.launchpadcontent.net/{ppa}/{distro}".rstrip('/') 434 | 435 | try: 436 | result, _ = fetch_package_metadata(base_url, release, arch, pkg_name) 437 | if result: 438 | filename, version_str = result 439 | version = debian_support.Version(version_str) 440 | full_url = urljoin(base_url + "/", filename) 441 | deb_path = deb_dir / f"{pkg_name}.deb" 442 | 443 | return { 444 | "version": version, 445 | "version_str": version_str, 446 | "url": full_url, 447 | "path": deb_path, 448 | "source": f"{base_url} [ppa]" 449 | } 450 | 451 | except Exception as e: 452 | if not quiet: 453 | tqdm.write(f"⛔ Failed to fetch metadata for: {pkg_name} from: {base_url}: {e}") 454 | 455 | return None 456 | 457 | 458 | def download_file(url, destination, quiet=True): 459 | try: 460 | response = session.get(url, timeout=20, stream=True) 461 | response.raise_for_status() 462 | 463 | dl_chunk_size = 1024 * 1024 464 | 465 | with open(destination, "wb") as f: 466 | for chunk in response.iter_content(chunk_size=dl_chunk_size): 467 | if chunk: 468 | f.write(chunk) 469 | 470 | if not quiet: 471 | tqdm.write(f" 🎉 Successfully downloaded: {destination}\n") 472 | 473 | return destination 474 | 475 | except requests.exceptions.RequestException as e: 476 | if isinstance(e, requests.exceptions.HTTPError) and e.response is not None: 477 | raise DownloadError(f"🧾 HTTP {e.response.status_code}") from e 478 | elif isinstance(e, requests.exceptions.SSLError): 479 | raise DownloadError("🔒 SSL error") from e 480 | elif isinstance(e, requests.exceptions.Timeout): 481 | raise DownloadError("⌛ Timeout") from e 482 | elif isinstance(e, requests.exceptions.ConnectionError): 483 | if "NameResolutionError" in str(e): 484 | raise DownloadError("🌐 DNS resolution failed") from e 485 | raise DownloadError("🔌 Connection failed") from e 486 | else: 487 | raise DownloadError(f"⚠️ {e.__class__.__name__}") from e 488 | -------------------------------------------------------------------------------- /nx_apphub_cli/builder.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # SPDX-License-Identifier: BSD-3-Clause 3 | # Copyright <2025> > 4 | 5 | import os 6 | import sys 7 | import platform 8 | import shutil 9 | import subprocess 10 | from pathlib import Path 11 | 12 | from .exceptions import BuildError 13 | from .config import get_apprunconf_value 14 | from .utils import cleanup_cache, get_appimagetool, get_go_appimagetool, get_uruntime, get_architecture 15 | from .apprun import generate_apprun 16 | 17 | # <--- 18 | # ---> 19 | # -- Base working directory for all packages. 20 | 21 | app_base_dir = Path.home() / ".cache/nx-apphub-cli" 22 | local_bin = Path.home() / ".local/bin" 23 | 24 | 25 | def setup_appimage_directories(app_name, binary_path): 26 | """Ensure required directories exist for AppImage building.""" 27 | package_dir = app_base_dir / app_name 28 | app_dir = package_dir / "AppDir" 29 | bin_dir = app_dir / "usr/bin" 30 | 31 | app_dir.mkdir(parents=True, exist_ok=True) 32 | bin_dir.mkdir(parents=True, exist_ok=True) 33 | 34 | extracted_binary_path = app_dir / binary_path.lstrip("/") 35 | return extracted_binary_path, app_dir 36 | 37 | 38 | # -- Get an icon from the default icon themes. Search in /usr/share/icons and use /usr/share/pixmaps as a fallback. 39 | 40 | icon_themes = ["breeze-dark", "breeze", "Adwaita", "Luv", "hicolor"] 41 | 42 | 43 | def find_system_icon(icon_name, app_dir, preferred_theme=None): 44 | """Search for the system icon in the specified or standard themes, preferring exact matches.""" 45 | search_themes = [preferred_theme] + icon_themes if preferred_theme else icon_themes 46 | icon_exts = [".png", ".svg", ".xpm"] 47 | 48 | for ext in icon_exts: 49 | for theme in search_themes: 50 | theme_path = app_dir / f"usr/share/icons/{theme}" 51 | if theme_path.exists(): 52 | exact_match = theme_path / f"{icon_name}{ext}" 53 | if exact_match.exists(): 54 | return exact_match 55 | for icon_file in theme_path.rglob(f"{icon_name}{ext}"): 56 | return icon_file 57 | 58 | pixmaps_path = app_dir / "usr/share/pixmaps" 59 | for ext in icon_exts: 60 | exact_match = pixmaps_path / f"{icon_name}{ext}" 61 | if exact_match.exists(): 62 | return exact_match 63 | for icon_file in pixmaps_path.glob(f"{icon_name}{ext}"): 64 | return icon_file 65 | 66 | return None 67 | 68 | 69 | def get_icon_name_from_desktop(app_dir): 70 | """Attempt to extract icon name from a .desktop file.""" 71 | for file in app_dir.glob("*.desktop"): 72 | with file.open(encoding="utf-8") as f: 73 | for line in f: 74 | if line.startswith("Icon="): 75 | return line.strip().split("=", 1)[1] 76 | return None 77 | 78 | 79 | def copy_system_icon(app_name, app_dir, config, icon_path, quiet=True): 80 | """Copy the icon referenced in the .desktop file to the root of AppDir with the correct name and extension.""" 81 | 82 | icon_name = get_icon_name_from_desktop(app_dir) or app_name 83 | 84 | if icon_path: 85 | icon_path = Path(icon_path) 86 | if icon_path.exists(): 87 | icon_dest = app_dir / f"{icon_name}{icon_path.suffix}" 88 | shutil.copy(icon_path, icon_dest) 89 | if not quiet: 90 | print(f"✔️ Using provided icon: {icon_dest.name}") 91 | return 92 | 93 | integration_type = config.get("integration", {}).get("type", "gui") 94 | 95 | if integration_type == "wm": 96 | system_icon = find_system_icon("preferences-system-windows", Path("/")) 97 | else: 98 | system_icon = ( 99 | find_system_icon(icon_name, app_dir) 100 | or find_system_icon("utilities-terminal", Path("/")) 101 | ) 102 | 103 | if system_icon: 104 | icon_dest = app_dir / f"{icon_name}{system_icon.suffix}" 105 | shutil.copy(system_icon, icon_dest) 106 | if not quiet: 107 | print(f"✔️ Using icon from AppDir: {icon_dest.name}") 108 | return 109 | 110 | raise BuildError( 111 | f"No icon found for '{icon_name}'. " 112 | "This usually happens when the icon theme is missing in the AppDir or build system." 113 | ) 114 | 115 | 116 | def fix_desktop_entry(app_name, app_dir, binary_path, hide_from_menu=False): 117 | """Ensure the AppImage contains a valid .desktop file.""" 118 | 119 | desktop_dir = app_dir / "usr/share/applications" 120 | existing_desktops = list(desktop_dir.glob("*.desktop")) if desktop_dir.exists() else [] 121 | 122 | def is_valid_desktop(path): 123 | try: 124 | with path.open(encoding="utf-8") as f: 125 | lines = f.readlines() 126 | has_name = any(line.strip().startswith("Name=") for line in lines) 127 | has_exec = any(line.strip().startswith("Exec=") for line in lines) 128 | return has_name and has_exec 129 | except Exception: 130 | return False 131 | 132 | desktop_file_path = None 133 | 134 | # -- Prefer exact match. 135 | 136 | exact_match = desktop_dir / f"{app_name}.desktop" 137 | if exact_match.exists() and is_valid_desktop(exact_match): 138 | desktop_file_path = exact_match 139 | else: 140 | 141 | # -- Look for partial matches. 142 | 143 | for file in existing_desktops: 144 | if app_name in file.name and is_valid_desktop(file): 145 | desktop_file_path = file 146 | break 147 | 148 | # -- Fallback to any valid one. 149 | 150 | if not desktop_file_path: 151 | for file in existing_desktops: 152 | if is_valid_desktop(file): 153 | desktop_file_path = file 154 | break 155 | 156 | if desktop_file_path: 157 | 158 | # -- Copy to top-level AppDir. 159 | 160 | target_path = app_dir / desktop_file_path.name 161 | if desktop_file_path != target_path: 162 | shutil.copy(desktop_file_path, target_path) 163 | desktop_file_path = target_path 164 | 165 | # -- Patch Exec if necessary. 166 | 167 | lines = desktop_file_path.read_text(encoding="utf-8").splitlines() 168 | updated_lines = [] 169 | found_nodisplay = False 170 | 171 | for line in lines: 172 | if line.startswith("Exec="): 173 | if f"/usr/bin/{binary_path.name}" not in line: 174 | updated_lines.append(f"Exec=/usr/bin/{binary_path.name}") 175 | else: 176 | updated_lines.append(line) 177 | elif line.startswith("NoDisplay="): 178 | found_nodisplay = True 179 | updated_lines.append(line) 180 | else: 181 | updated_lines.append(line) 182 | 183 | if hide_from_menu and not found_nodisplay: 184 | updated_lines.append("NoDisplay=true") 185 | 186 | desktop_file_path.write_text("\n".join(updated_lines) + "\n", encoding="utf-8") 187 | 188 | else: 189 | 190 | # -- Generate new minimal .desktop. 191 | 192 | desktop_file_path = app_dir / f"{app_name}.desktop" 193 | desktop_content = f"""[Desktop Entry] 194 | Type=Application 195 | Name={app_name} 196 | Exec=/usr/bin/{binary_path.name} 197 | Terminal=true 198 | Categories=Utility; 199 | Icon={app_name} 200 | {'NoDisplay=true' if hide_from_menu else ''} 201 | """ 202 | desktop_file_path.write_text(desktop_content.strip() + "\n", encoding="utf-8") 203 | 204 | 205 | def prepare_window_manager_launcher(app_dir): 206 | """Locate and prepare a session launcher (.desktop) for a window manager inside the AppDir. 207 | 208 | Prioritize Wayland session launchers over X11. 209 | """ 210 | session_dirs = [ 211 | app_dir / "usr/share/wayland-sessions", 212 | app_dir / "usr/share/xsessions" 213 | ] 214 | 215 | for session_dir in session_dirs: 216 | if session_dir.is_dir(): 217 | matches = sorted(session_dir.glob("*.desktop")) 218 | if matches: 219 | launcher_path = matches[0] 220 | target_path = app_dir / launcher_path.name 221 | 222 | shutil.copy(launcher_path, target_path) 223 | 224 | with target_path.open("r", encoding="utf-8") as f: 225 | lines = f.readlines() 226 | 227 | updated_lines = [] 228 | icon_found = False 229 | nodisplay_found = False 230 | 231 | for line in lines: 232 | stripped = line.strip() 233 | if stripped.startswith("Type="): 234 | updated_lines.append("Type=Application\n") 235 | elif stripped.startswith("Icon="): 236 | updated_lines.append("Icon=preferences-system-windows\n") 237 | icon_found = True 238 | elif stripped.startswith("NoDisplay="): 239 | updated_lines.append("NoDisplay=true\n") 240 | nodisplay_found = True 241 | else: 242 | updated_lines.append(line if line.endswith("\n") else line + "\n") 243 | 244 | if not icon_found: 245 | updated_lines.append("Icon=preferences-system-windows\n") 246 | if not nodisplay_found: 247 | updated_lines.append("NoDisplay=true\n") 248 | 249 | with target_path.open("w", encoding="utf-8") as f: 250 | f.writelines(updated_lines) 251 | 252 | return target_path 253 | 254 | return None 255 | 256 | 257 | def patch_binary_rpath(binary_path, config): 258 | """Patch the RPATH of the application binary to use $ORIGIN with the correct paths.""" 259 | 260 | # -- Fetch settings from YAML. 261 | 262 | setlibpath = get_apprunconf_value(config, "setlibpath", default="/usr/lib", expected_type=str) 263 | 264 | # -- Determine multiarch triplet dynamically. 265 | 266 | arch_map = { 267 | "x86_64": "x86_64-linux-gnu", 268 | "aarch64": "aarch64-linux-gnu", 269 | "arm64": "aarch64-linux-gnu", 270 | } 271 | 272 | arch = get_architecture() 273 | multiarch_triplet = arch_map.get(arch) 274 | 275 | if not multiarch_triplet: 276 | raise BuildError(f"Unsupported architecture detected: {arch}. Aborting.") 277 | 278 | # -- Patch the RPATH of the executable; the paths are relative to the path of the binary. 279 | 280 | try: 281 | rpath_parts = [ 282 | 283 | # -- Prefer AppDir/usr first. 284 | 285 | f"$ORIGIN/../..{setlibpath}", 286 | f"$ORIGIN/../..{setlibpath}/{multiarch_triplet}", 287 | f"$ORIGIN/../..{setlibpath}64", 288 | 289 | # -- Qt-specific plugin locations under /usr. 290 | 291 | f"$ORIGIN/../../..{setlibpath}/{multiarch_triplet}/qt5/qml", 292 | f"$ORIGIN/../../..{setlibpath}/{multiarch_triplet}/qt6/qml", 293 | f"$ORIGIN/../../..{setlibpath}/{multiarch_triplet}/qt5/plugins", 294 | f"$ORIGIN/../../..{setlibpath}/{multiarch_triplet}/qt6/plugins", 295 | f"$ORIGIN/../../..{setlibpath}/qt5/libexec", 296 | f"$ORIGIN/../../..{setlibpath}/qt5/bin", 297 | f"$ORIGIN/../../..{setlibpath}/qt6/libexec", 298 | f"$ORIGIN/../../..{setlibpath}/qt6/bin", 299 | 300 | # -- /lib multiarch. 301 | 302 | f"$ORIGIN/../../lib/{multiarch_triplet}", 303 | f"$ORIGIN/../../lib64/{multiarch_triplet}", 304 | 305 | # -- /lib non-multiarch (add these). 306 | 307 | f"$ORIGIN/../../lib", 308 | f"$ORIGIN/../../lib64", 309 | ] 310 | extras = config.get("apprunconf", {}).get("extra_rpaths", []) 311 | if isinstance(extras, str): 312 | extras = [extras] 313 | elif isinstance(extras, tuple): 314 | extras = list(extras) 315 | extras = [p for p in extras if isinstance(p, str) and p.strip()] 316 | ordered = [] 317 | seen = set() 318 | for p in rpath_parts + extras: 319 | if p not in seen: 320 | ordered.append(p) 321 | seen.add(p) 322 | 323 | rpath_value = ":".join(ordered) 324 | 325 | subprocess.run( 326 | ["patchelf", "--set-rpath", rpath_value, "--force-rpath", str(binary_path)], 327 | check=True 328 | ) 329 | print(f"🩹 Patched RPATH for: {binary_path}") 330 | except subprocess.CalledProcessError as e: 331 | raise BuildError(f"Failed to patch RPATH for {binary_path}: {e}") from e 332 | 333 | 334 | def package_appdir(app_name, app_dir, output_file, appimagetool_binary, runtime, config, quiet=True): 335 | if not quiet: 336 | print(f"\n🛠 Packaging AppDir: {output_file} ...") 337 | 338 | try: 339 | env = os.environ.copy() 340 | cmd = [] 341 | 342 | if runtime == "go": 343 | env["VERSION"] = config["buildinfo"]["version"] 344 | cmd = [str(appimagetool_binary), str(app_dir)] 345 | elif runtime == "uruntime": 346 | cmd = [ 347 | str(appimagetool_binary), 348 | "--appimage-mkdwarfs", 349 | "-f", 350 | "--set-owner", "0", 351 | "--set-group", "0", 352 | "--no-history", 353 | "--no-create-timestamp", 354 | "--compression", "zstd:level=22", 355 | "-S22", 356 | "-B6", 357 | "--header", str(appimagetool_binary), 358 | "-i", str(app_dir), 359 | "-o", str(output_file) 360 | ] 361 | else: 362 | cmd = [str(appimagetool_binary), str(app_dir), str(output_file)] 363 | 364 | with open(os.devnull, 'w') as devnull: 365 | subprocess.run( 366 | cmd, 367 | check=True, 368 | stdout=sys.stdout if not quiet else devnull, 369 | stderr=sys.stderr if not quiet else devnull, 370 | env=env 371 | ) 372 | 373 | if runtime == "go": 374 | output_dir = output_file.parent 375 | appimages = list(output_dir.glob("*.AppImage")) 376 | 377 | if not appimages: 378 | cleanup_cache(app_name) 379 | raise BuildError("No AppImage found after Go appimagetool build.") 380 | 381 | built_appimage = appimages[0] 382 | built_appimage.rename(output_file) 383 | output_file.chmod(0o755) 384 | 385 | if runtime == "uruntime": 386 | output_file.chmod(0o755) 387 | 388 | if not output_file.exists(): 389 | cleanup_cache(app_name) 390 | raise BuildError(f"Expected AppImage not found: {output_file}") 391 | 392 | if not quiet: 393 | print(f"✅ AppImage built successfully: {output_file}") 394 | 395 | cleanup_cache(app_name) 396 | 397 | except subprocess.CalledProcessError as e: 398 | cleanup_cache(app_name) 399 | raise BuildError(f"AppImage build failed! {e}") from e 400 | 401 | 402 | def prepare_appimage(config, install_mode=False, quiet=True): 403 | """Prepare and build an AppImage with the version in the filename.""" 404 | 405 | app_name = config["buildinfo"]["name"] 406 | version = config["buildinfo"].get("version", "unknown") 407 | binary_path = config["buildinfo"].get("binarypath") 408 | 409 | if not binary_path: 410 | cleanup_cache(app_name) 411 | raise BuildError(f"No binary path specified for {app_name}. Aborting.") 412 | 413 | # -- Ensure AppDir is properly set up before running any commands. 414 | 415 | extracted_binary_path, app_dir = setup_appimage_directories(app_name, binary_path) 416 | 417 | if not extracted_binary_path.exists(): 418 | cleanup_cache(app_name) 419 | raise BuildError(f"Binary {extracted_binary_path} not found!. Aborting.") 420 | 421 | # -- Run prebuild commands inside the AppDir. 422 | 423 | prebuild_commands = config.get("apprunconf", {}).get("prebuild_commands", []) 424 | if prebuild_commands: 425 | print(f"🔧 Running prebuild commands for {app_name} inside {app_dir}...\n") 426 | env = os.environ.copy() 427 | env["APPDIR"] = str(app_dir) 428 | 429 | for cmd in prebuild_commands: 430 | cmd_resolved = cmd.replace("$APPDIR", str(app_dir)) 431 | try: 432 | subprocess.run( 433 | cmd_resolved, 434 | shell=True, 435 | check=True, 436 | env=env, 437 | cwd=app_dir, 438 | stdout=subprocess.DEVNULL, 439 | stderr=subprocess.PIPE 440 | ) 441 | print(f" 🤖 Command executed: {cmd_resolved}") 442 | print() 443 | except subprocess.CalledProcessError as e: 444 | cleanup_cache(app_name) 445 | err_output = e.stderr.decode(errors='replace') 446 | raise BuildError( 447 | f"Failed to execute prebuild command: '{cmd_resolved}'.\n" 448 | f"Output: {err_output}" 449 | ) from e 450 | 451 | 452 | # -- Select AppImage runtime tool based on runtime. 453 | 454 | runtime = config.get("buildinfo", {}).get("runtime", "classic") 455 | 456 | if runtime == "classic": 457 | appimagetool_binary = get_appimagetool(quiet=quiet) 458 | elif runtime == "go": 459 | appimagetool_binary = get_go_appimagetool(quiet=quiet) 460 | elif runtime == "uruntime": 461 | appimagetool_binary = get_uruntime(quiet=quiet) 462 | else: 463 | cleanup_cache(app_name) 464 | raise BuildError(f"Unknown runtime '{runtime}' specified in buildinfo.") 465 | 466 | # -- Move binary to correct location BEFORE generating AppRun. 467 | 468 | bin_dir = app_dir / "usr/bin" 469 | new_binary_path = bin_dir / extracted_binary_path.name 470 | 471 | if extracted_binary_path != new_binary_path: 472 | shutil.move(str(extracted_binary_path), str(new_binary_path)) 473 | if not quiet: 474 | print(f"📂 Moved binary: {extracted_binary_path} → {new_binary_path}") 475 | 476 | # -- Generate metadata & AppRun. 477 | 478 | print(f"🧳 Generating AppRun and metadata for: {app_name}...") 479 | print() 480 | generate_apprun(app_dir, config) 481 | 482 | integration = config.get("integration", {}) 483 | integration_type = integration.get("type", "gui") 484 | 485 | hide_from_menu = integration_type == "cli" 486 | 487 | if integration_type == "wm": 488 | wm_launcher = prepare_window_manager_launcher(app_dir) 489 | if not wm_launcher: 490 | cleanup_cache(app_name) 491 | raise BuildError("No session launcher (.desktop) found in Wayland/X11 session directories.") 492 | else: 493 | fix_desktop_entry(app_name, app_dir, new_binary_path, hide_from_menu=hide_from_menu) 494 | try: 495 | copy_system_icon(app_name, app_dir, config, config["buildinfo"].get("iconpath", None)) 496 | except (FileNotFoundError, BuildError) as e: 497 | cleanup_cache(app_name) 498 | raise BuildError(str(e)) from e 499 | 500 | # -- Determine the final AppImage location. 501 | 502 | output_dir = Path.home() / ".local/bin/nx-apphub" if install_mode else Path.cwd() 503 | output_dir.mkdir(parents=True, exist_ok=True) 504 | 505 | # -- Use versioned filename for tracking updates. 506 | 507 | file_ext = "AppBox" if install_mode else "AppImage" 508 | output_file = output_dir / f"{app_name}-{version}-{platform.machine().lower()}.{file_ext}" 509 | 510 | # -- Patch binary RPATH before building the AppImage. 511 | 512 | patch_binary_rpath(str(new_binary_path), config) 513 | 514 | # -- Build the final AppImage. 515 | 516 | package_appdir(app_name, app_dir, output_file, appimagetool_binary, runtime, config, quiet) 517 | 518 | if not quiet: 519 | print(f"📦 AppImage ready: {output_file}\n") 520 | --------------------------------------------------------------------------------