├── appimagelint ├── __init__.py ├── __main__.py ├── reports │ ├── __init__.py │ ├── report_base.py │ └── json_report.py ├── symbols.py ├── models │ ├── __init__.py │ ├── test_result.py │ └── appimage.py ├── colors.py ├── cache │ ├── exceptions.py │ ├── __init__.py │ ├── paths.py │ ├── cache_base.py │ ├── codebase_hasher.py │ ├── distro_codenames.py │ ├── io.py │ ├── runtime_cache.py │ ├── package_version_maps.py │ ├── json_cache_impl_base.py │ └── common.py ├── services │ ├── __init__.py │ ├── binarywalker.py │ ├── result_formatter.py │ ├── gnu_lib_versions_symbol_finder.py │ └── appimagemounter.py ├── checks │ ├── __init__.py │ ├── check_base.py │ ├── glibc_abi.py │ ├── glibcxx_abi.py │ ├── libkeyutils_abi.py │ ├── gnu_abi_check_base.py │ └── icons.py ├── _logging.py ├── _util.py └── cli.py ├── resources ├── screenshot.png ├── appimagelint.desktop ├── AppRun.sh ├── appimagelint.appdata.xml └── appimagelint.svg ├── .gitignore ├── README.md ├── setup.py ├── .travis.yml └── ci └── build-appimage.sh /appimagelint/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /appimagelint/__main__.py: -------------------------------------------------------------------------------- 1 | from .cli import run 2 | 3 | 4 | run() 5 | -------------------------------------------------------------------------------- /resources/screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probonopd/appimagelint/master/resources/screenshot.png -------------------------------------------------------------------------------- /appimagelint/reports/__init__.py: -------------------------------------------------------------------------------- 1 | from .report_base import ReportBase 2 | from .json_report import JSONReport 3 | -------------------------------------------------------------------------------- /appimagelint/symbols.py: -------------------------------------------------------------------------------- 1 | class Symbols: 2 | CHECK = '\u2714' # might also use 2713 3 | CROSS = '\u2716' # might also use 2715 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | .appimage-build*/ 3 | __pycache__/ 4 | *.py[c|o] 5 | *build*/ 6 | *dist/ 7 | *.egg-info/ 8 | *.AppImage 9 | *.spec 10 | -------------------------------------------------------------------------------- /appimagelint/models/__init__.py: -------------------------------------------------------------------------------- 1 | from .appimage import AppImage 2 | from .test_result import TestResult 3 | 4 | 5 | __all__ = ("AppImage", "TestResult") 6 | -------------------------------------------------------------------------------- /resources/appimagelint.desktop: -------------------------------------------------------------------------------- 1 | [Desktop Entry] 2 | Name=appimagelint 3 | Exec=appimagelint 4 | Icon=appimagelint 5 | Type=Application 6 | Terminal=true 7 | Categories=Utility; 8 | -------------------------------------------------------------------------------- /appimagelint/colors.py: -------------------------------------------------------------------------------- 1 | class Colors: 2 | PINK = "\033[95m" 3 | BLUE = "\033[94m" 4 | GREEN = "\033[92m" 5 | YELLOW = "\033[93m" 6 | RED = "\033[91m" 7 | ENDC = "\033[0m" 8 | BOLD = "\033[1m" 9 | UNDERLINE = "\033[4m" 10 | 11 | -------------------------------------------------------------------------------- /appimagelint/cache/exceptions.py: -------------------------------------------------------------------------------- 1 | from typing import Union, Iterable, Mapping 2 | 3 | 4 | class OutOfDateError(Exception): 5 | def __init__(self, message: str, cached_data: Union[Iterable, Mapping] = None): 6 | self.args = (message,) 7 | self.cached_data = cached_data 8 | -------------------------------------------------------------------------------- /appimagelint/services/__init__.py: -------------------------------------------------------------------------------- 1 | from .appimagemounter import AppImageMounter 2 | from .binarywalker import BinaryWalker 3 | from .gnu_lib_versions_symbol_finder import GnuLibVersionSymbolsFinder 4 | 5 | 6 | __all__ = ("AppImageMounter", "BinaryWalker", "GnuLibVersionSymbolsFinder",) 7 | -------------------------------------------------------------------------------- /appimagelint/reports/report_base.py: -------------------------------------------------------------------------------- 1 | class ReportBase: 2 | def __init__(self, results: dict): 3 | self._results = results 4 | 5 | def to_str(self): 6 | raise NotImplementedError 7 | 8 | def write(self, path: str): 9 | raise NotImplementedError 10 | -------------------------------------------------------------------------------- /resources/AppRun.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | this_dir=$(dirname "$0") 4 | 5 | # add own bin dir as fallback 6 | # might come in handy if readelf binary is missing on the system (not sure if that's even possible, though) 7 | export PATH="$PATH":"$this_dir"/usr/bin 8 | 9 | "$this_dir"/usr/bin/python -m appimagelint "$@" 10 | -------------------------------------------------------------------------------- /appimagelint/checks/__init__.py: -------------------------------------------------------------------------------- 1 | from .check_base import CheckBase 2 | from .gnu_abi_check_base import GnuAbiCheckBase 3 | from .glibc_abi import GlibcABICheck 4 | from .glibcxx_abi import GlibcxxABICheck 5 | from .icons import IconsCheck 6 | 7 | 8 | __all__ = ("CheckBase", "GnuAbiCheckBase", "GlibcABICheck", "GlibcxxABICheck", "IconsCheck",) 9 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # appimagelint 2 | 3 | appimagelint is a tool to check [AppImage](https://appimage.org/) files for common issues. 4 | 5 | appimagelint runs a variety of checks on AppImages and reports the 6 | results in human-readable form in the console log. 7 | 8 |  9 | -------------------------------------------------------------------------------- /appimagelint/models/test_result.py: -------------------------------------------------------------------------------- 1 | class TestResult: 2 | def __init__(self, success: bool, id: str, message: str): 3 | self._success = success 4 | self._message = message 5 | self._id = id 6 | 7 | def success(self): 8 | return self._success 9 | 10 | def message(self): 11 | return self._message 12 | 13 | def id(self): 14 | return self._id 15 | 16 | def __repr__(self): 17 | return "TestResult({}, {})".format(self._success, repr(self._message)) 18 | -------------------------------------------------------------------------------- /appimagelint/checks/check_base.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from typing import Iterator 4 | 5 | from ..models import AppImage, TestResult 6 | 7 | 8 | class CheckBase: 9 | def __init__(self, appimage: AppImage): 10 | self._appimage = appimage 11 | 12 | def run(self) -> Iterator[TestResult]: 13 | raise NotImplementedError 14 | 15 | @staticmethod 16 | def get_logger() -> logging.Logger: 17 | raise NotImplementedError 18 | 19 | @staticmethod 20 | def name(): 21 | raise NotImplementedError 22 | -------------------------------------------------------------------------------- /appimagelint/cache/__init__.py: -------------------------------------------------------------------------------- 1 | from .._logging import make_logger 2 | 3 | 4 | def _get_logger(): 5 | return make_logger("cache") 6 | 7 | 8 | from .exceptions import OutOfDateError 9 | from .io import load_json, store_json 10 | from .cache_base import CacheBase 11 | from .json_cache_impl_base import JSONCacheImplBase 12 | from .distro_codenames import DebianCodenameMapCache 13 | from .package_version_maps import PackageVersionMapsCache 14 | 15 | 16 | __all__ = ("OutOfDateError", "store_json", "load_json", "CacheBase", "DebianCodenameMapCache",) 17 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from setuptools import setup, find_packages 4 | 5 | 6 | setup( 7 | name="appimagelint", 8 | version="0.0.1", 9 | packages=find_packages(), 10 | license="MIT", 11 | long_description=open(os.path.join(os.path.dirname(__file__), "README.md")).read(), 12 | install_requires=[ 13 | "coloredlogs", 14 | "packaging", 15 | "requests", 16 | "xdg", 17 | "pillow", 18 | ], 19 | entry_points={ 20 | "console_scripts": [ 21 | "appimagelint = appimagelint.cli:run", 22 | ], 23 | }, 24 | ) 25 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: required 2 | dist: xenial 3 | language: python 4 | 5 | python: 6 | - 3.6 7 | 8 | script: 9 | - bash ci/build-appimage.sh 10 | 11 | after_success: 12 | - ls -lh 13 | - wget -c https://github.com/probonopd/uploadtool/raw/master/upload.sh 14 | # make sure only pushes to rewrite create a new release, otherwise pretend PR and upload to transfer.sh 15 | - if [ "$TRAVIS_BRANCH" != "master" ]; then export TRAVIS_EVENT_TYPE=pull_request; fi 16 | - bash ./upload.sh appimagelint*.AppImage* 17 | 18 | branches: 19 | except: 20 | - # Do not build tags that we create when we upload to GitHub Releases 21 | - /^(?i:continuous)$/ 22 | -------------------------------------------------------------------------------- /appimagelint/models/appimage.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | 4 | class AppImage: 5 | def __init__(self, path: str, custom_runtime: str = None): 6 | if not os.path.exists(path): 7 | raise FileNotFoundError("file not found: {}".format(path)) 8 | 9 | self._path = os.path.abspath(path) 10 | 11 | self._custom_runtime = custom_runtime 12 | 13 | def path(self): 14 | return self._path 15 | 16 | def mount(self): 17 | # must not import near top of file to avoid problems with the circular dependency this helper method creates 18 | from ..services import AppImageMounter 19 | 20 | return AppImageMounter(self, self._custom_runtime) 21 | -------------------------------------------------------------------------------- /appimagelint/cache/paths.py: -------------------------------------------------------------------------------- 1 | import os 2 | import xdg 3 | 4 | 5 | def data_cache_path(): 6 | path = os.path.abspath(os.path.join(xdg.XDG_CACHE_HOME, "appimagelint")) 7 | os.makedirs(path, exist_ok=True) 8 | return path 9 | 10 | 11 | def debian_glibc_versions_data_path(): 12 | return os.path.join(data_cache_path(), "debian_glibc_versions.json") 13 | 14 | 15 | def ubuntu_glibc_versions_data_path(): 16 | return os.path.join(data_cache_path(), "ubuntu_glibc_versions.json") 17 | 18 | 19 | def debian_codename_map_path(): 20 | return os.path.join(data_cache_path(), "debian_codenames.json") 21 | 22 | 23 | def debian_glibcxx_versions_data_path(): 24 | return os.path.join(data_cache_path(), "debian_glibcxx_versions.json") 25 | 26 | 27 | def ubuntu_glibcxx_versions_data_path(): 28 | return os.path.join(data_cache_path(), "ubuntu_glibcxx_versions.json") 29 | -------------------------------------------------------------------------------- /resources/appimagelint.appdata.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | appimagelint 4 | MIT 5 | MIT 6 | appimagelint 7 | Check AppImage files for common issues 8 | 9 | Check AppImages for compatibility, best practices etc. Powerful functionality combined with simple usage and human-friendly feedback. 10 | 11 | appimagelint.desktop 12 | https://github.com/TheAssassin/appimagelint/ 13 | 14 | 15 | https://github.com/TheAssassin/appimagelint/blob/944f85f74ede650a86ce01a18217d8834e2b3bb1/resources/screenshot.png 16 | 17 | 18 | 19 | appimagelint.desktop 20 | 21 | 22 | -------------------------------------------------------------------------------- /appimagelint/checks/glibc_abi.py: -------------------------------------------------------------------------------- 1 | from . import GnuAbiCheckBase 2 | from ..cache.package_version_maps import DebianGlibcVersionsCache, UbuntuGlibcVersionsCache 3 | from ..models import AppImage 4 | from ..services import GnuLibVersionSymbolsFinder 5 | 6 | 7 | class GlibcABICheck(GnuAbiCheckBase): 8 | def __init__(self, appimage: AppImage): 9 | super().__init__(appimage) 10 | 11 | @staticmethod 12 | def _library_id(): 13 | return "glibc" 14 | 15 | @staticmethod 16 | def name(): 17 | return "GNU libc ABI check" 18 | 19 | @classmethod 20 | def _detect_versions_in_file(cls, path): 21 | return cls._gnu_lib_versions_symbol_finder.detect_gnu_lib_versions("GLIBC_", path) 22 | 23 | @classmethod 24 | def _get_debian_versions_map(cls): 25 | return DebianGlibcVersionsCache.get_data() 26 | 27 | @classmethod 28 | def _get_ubuntu_versions_map(cls): 29 | return UbuntuGlibcVersionsCache.get_data() 30 | -------------------------------------------------------------------------------- /appimagelint/checks/glibcxx_abi.py: -------------------------------------------------------------------------------- 1 | from . import GnuAbiCheckBase 2 | from ..cache.package_version_maps import DebianGlibcxxVersionsCache, UbuntuGlibcxxVersionsCache 3 | from ..models import AppImage 4 | from ..services import GnuLibVersionSymbolsFinder 5 | 6 | 7 | class GlibcxxABICheck(GnuAbiCheckBase): 8 | def __init__(self, appimage: AppImage): 9 | super().__init__(appimage) 10 | 11 | @staticmethod 12 | def _library_id(): 13 | return "glibcxx" 14 | 15 | @staticmethod 16 | def name(): 17 | return "GNU libstdc++ ABI check" 18 | 19 | @classmethod 20 | def _detect_versions_in_file(cls, path): 21 | return cls._gnu_lib_versions_symbol_finder.detect_gnu_lib_versions("GLIBCXX_", path) 22 | 23 | @classmethod 24 | def _get_debian_versions_map(cls): 25 | return DebianGlibcxxVersionsCache.get_data() 26 | 27 | @classmethod 28 | def _get_ubuntu_versions_map(cls): 29 | return UbuntuGlibcxxVersionsCache.get_data() 30 | -------------------------------------------------------------------------------- /appimagelint/cache/cache_base.py: -------------------------------------------------------------------------------- 1 | # interface for caches 2 | from typing import Union, Mapping, Iterable 3 | 4 | 5 | class CacheBase: 6 | """ 7 | Interface for caches. 8 | """ 9 | 10 | @classmethod 11 | def force_update(cls): 12 | """ 13 | Force cache update. 14 | :return: 15 | """ 16 | 17 | raise NotImplementedError() 18 | 19 | @classmethod 20 | def get_data(cls, raise_on_error=False): 21 | """ 22 | Returns cached data. 23 | If the data is out of date, the method will attempt to update them (if possible). 24 | In case fetching the latest data fails, the method will return the cached data anyway unless raise_on_error 25 | is set to True. 26 | If this is impossible, an :class:`OutOfDateError` is thrown even if raise_on_error is set to False. 27 | 28 | :return: data represented by cache class 29 | :raises OutOfDateError: in case data is out of date, see method docstring for details 30 | """ 31 | 32 | raise NotImplementedError() 33 | -------------------------------------------------------------------------------- /appimagelint/checks/libkeyutils_abi.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from . import GnuAbiCheckBase 4 | from .._logging import make_logger 5 | from ..cache.package_version_maps import DebianGlibcVersionsCache, UbuntuGlibcVersionsCache 6 | from ..models import AppImage 7 | from ..services import GnuLibVersionSymbolsFinder 8 | 9 | 10 | class LibkeyfileABICheck(GnuAbiCheckBase): 11 | def __init__(self, appimage: AppImage): 12 | super().__init__(appimage) 13 | 14 | @staticmethod 15 | def get_logger() -> logging.Logger: 16 | return make_logger("libkeyfile_abi_check") 17 | 18 | @staticmethod 19 | def name(): 20 | return "libkeyfile ABI check" 21 | 22 | @classmethod 23 | def _detect_versions_in_file(cls, path): 24 | return GnuLibVersionSymbolsFinder.detect_gnu_lib_versions("KEYFILE_", path) 25 | 26 | @classmethod 27 | def _get_debian_versions_map(cls): 28 | return DebianGlibcVersionsCache.get_data() 29 | 30 | @classmethod 31 | def _get_ubuntu_versions_map(cls): 32 | return UbuntuGlibcVersionsCache.get_data() 33 | -------------------------------------------------------------------------------- /appimagelint/cache/codebase_hasher.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import os 3 | 4 | from typing import Iterable 5 | 6 | from appimagelint import cache 7 | 8 | 9 | class CodebaseHasher: 10 | def __init__(self, modules: Iterable=None): 11 | if modules is None: 12 | modules = [cache] 13 | 14 | self._modules = modules 15 | 16 | @staticmethod 17 | def _get_module_path(module): 18 | return module.__file__ 19 | 20 | def _calculate_hash(self, digest_impl): 21 | d = digest_impl() 22 | 23 | for module in self._modules: 24 | dirpath = os.path.dirname(self._get_module_path(module)) 25 | for root, dirs, files in os.walk(dirpath): 26 | for file in files: 27 | path = os.path.join(root, file) 28 | if not path.endswith(".py"): 29 | continue 30 | 31 | with open(path, "rb") as f: 32 | d.update(f.read()) 33 | 34 | return d.hexdigest() 35 | 36 | def digest_md5(self): 37 | return self._calculate_hash(hashlib.md5) 38 | -------------------------------------------------------------------------------- /appimagelint/services/binarywalker.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | 4 | class BinaryWalker: 5 | """ 6 | Walks a directory using os.walk() and yields all ELF binaries within. 7 | """ 8 | 9 | def __init__(self, path: str): 10 | self._root_path = path 11 | self._walk_res = os.walk(self._root_path) 12 | self._last_res = next(self._walk_res) 13 | 14 | def __iter__(self): 15 | return self 16 | 17 | def __next__(self): 18 | def is_elf(path): 19 | with open(path, "rb") as f: 20 | sig = f.read(4) 21 | return sig == b"\x7fELF" 22 | 23 | while self._last_res is not None: 24 | while self._last_res[2]: 25 | to_test = self._last_res[2].pop() 26 | 27 | abspath = os.path.join(self._last_res[0], to_test) 28 | 29 | if os.path.isfile(abspath): 30 | if not os.path.islink(abspath): 31 | if is_elf(abspath): 32 | return abspath 33 | 34 | self._last_res = next(self._walk_res) 35 | 36 | raise StopIteration 37 | -------------------------------------------------------------------------------- /appimagelint/_logging.py: -------------------------------------------------------------------------------- 1 | import coloredlogs 2 | import logging 3 | 4 | 5 | def setup(loglevel=logging.INFO, with_timestamps=False, force_colors=False, log_locations=False): 6 | fmt = "%(name)s[%(process)s] [%(levelname)s] %(message)s" 7 | 8 | if with_timestamps: 9 | fmt = "%(asctime)s " + fmt 10 | 11 | if log_locations: 12 | fmt = "%(pathname)s:%(lineno)d:\n" + fmt 13 | 14 | # basic logging setup 15 | styles = coloredlogs.DEFAULT_FIELD_STYLES 16 | styles["pathname"] = { 17 | "color": "magenta", 18 | } 19 | styles["levelname"] = { 20 | "color": "cyan", 21 | } 22 | 23 | kwargs = dict(fmt=fmt, styles=styles) 24 | 25 | if force_colors: 26 | kwargs["isatty"] = True 27 | 28 | coloredlogs.install(loglevel, **kwargs) 29 | 30 | # set up logger 31 | logger = logging.getLogger("main") 32 | logger.setLevel(loglevel) 33 | 34 | 35 | def make_logger(context: str = ""): 36 | logger_prefix = "appimagelint" 37 | 38 | logger_name = logger_prefix 39 | 40 | if context: 41 | logger_name += "." + str(context) 42 | 43 | return logging.getLogger(logger_name) 44 | -------------------------------------------------------------------------------- /appimagelint/cache/distro_codenames.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import requests 4 | 5 | from . import JSONCacheImplBase 6 | from .paths import debian_codename_map_path 7 | 8 | 9 | class DebianCodenameMapCache(JSONCacheImplBase): 10 | @classmethod 11 | def _cache_file_path(cls): 12 | return debian_codename_map_path() 13 | 14 | @classmethod 15 | def _fetch_data(cls): 16 | # avoids circular import issues 17 | from .common import get_debian_releases 18 | 19 | rv = {} 20 | 21 | for suite in get_debian_releases(): 22 | headers = {"Range": "bytes=0-512"} 23 | url = "https://ftp.fau.de/debian/dists/{}/Release".format(suite) 24 | response = requests.get(url, headers=headers) 25 | response.raise_for_status() 26 | 27 | for line in response.text.splitlines(): 28 | prefix = "Codename:" 29 | 30 | if line.startswith(prefix): 31 | rv[suite] = line.split(prefix)[-1].strip() 32 | break 33 | else: 34 | raise ValueError("could not find Release file for suite {} on Debian mirror".format(suite)) 35 | 36 | return rv 37 | -------------------------------------------------------------------------------- /appimagelint/_util.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tempfile 3 | 4 | from typing import Iterable 5 | 6 | 7 | def make_tempdir(): 8 | # outside CI environments it's wise to use a ramdisk for storing the temporary data 9 | temp_base = None 10 | if os.path.isdir("/dev/shm"): 11 | temp_base = "/dev/shm" 12 | 13 | return tempfile.TemporaryDirectory(suffix=".tmp", prefix="appimagelint-", dir=temp_base) 14 | 15 | 16 | def get_version_key(version) -> Iterable[int]: 17 | """ 18 | Converts a version into keys used for sorting, max() etc, by human standards (e.g., to recognize that 0.1.10 is 19 | larger than 0.1.9). 20 | 21 | :param version: version to parse and convert 22 | :return: keys for version 23 | """ 24 | return [int(i) for i in version.split(".")] 25 | 26 | 27 | def max_version(data: Iterable[str]) -> str: 28 | """ 29 | Get maximum version by human standards (semver like: 0.1.10 is larger than 0.1.9). 30 | 31 | Basically a wrapper of max() with a custom key command. 32 | 33 | :param data: versions in x.y.z... format (where x,y,z are ints) 34 | :return: max version 35 | """ 36 | 37 | try: 38 | return max(data, key=get_version_key) 39 | except ValueError as e: 40 | raise ValueError("passed empty sequence") from e 41 | -------------------------------------------------------------------------------- /appimagelint/services/result_formatter.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | from appimagelint.colors import Colors 5 | from appimagelint.models.test_result import TestResult 6 | from appimagelint.symbols import Symbols 7 | 8 | 9 | class ResultFormatter: 10 | _default_format = "[{symbol}] {message}" 11 | 12 | def __init__(self, fmt: str = None, use_colors: bool = None): 13 | self._fmt = fmt or self._default_format 14 | 15 | if use_colors is None: 16 | use_colors = sys.stdin.isatty() 17 | self._use_colors = use_colors 18 | 19 | def format(self, result: TestResult): 20 | symbol = "" 21 | 22 | if self._use_colors: 23 | if result.success(): 24 | symbol += Colors.GREEN 25 | symbol += Symbols.CHECK 26 | else: 27 | symbol += Colors.RED 28 | symbol += Symbols.CROSS 29 | 30 | symbol += Colors.ENDC 31 | 32 | else: 33 | if result.success(): 34 | symbol += Symbols.CHECK 35 | else: 36 | symbol += Symbols.CROSS 37 | 38 | return self._fmt.format(symbol=symbol, message=result.message()) 39 | 40 | def __repr__(self): 41 | return "ResultFormatter({})".format(repr(self._default_format)) 42 | -------------------------------------------------------------------------------- /appimagelint/reports/json_report.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from appimagelint._logging import make_logger 4 | from . import ReportBase 5 | 6 | 7 | class JSONReport(ReportBase): 8 | @staticmethod 9 | def _get_logger(): 10 | return make_logger("json_report") 11 | 12 | def _make_json(self): 13 | obj = { 14 | "results": { 15 | path: [ 16 | { 17 | "name": check.name(), 18 | "results": [ 19 | { 20 | "id": res.id(), 21 | "success": res.success(), 22 | "message": res.message() 23 | } for res in results 24 | ] 25 | } for check, results in checks.items() 26 | ] for path, checks in self._results.items() 27 | } 28 | } 29 | 30 | return obj 31 | 32 | def to_str(self): 33 | return json.dumps(self._make_json(), indent=4) 34 | 35 | def write(self, path: str): 36 | logger = self._get_logger() 37 | 38 | logger.info("Writing JSON report to {}".format(path)) 39 | with open(path, "w") as f: 40 | json.dump(self._make_json(), f, indent=4) 41 | -------------------------------------------------------------------------------- /appimagelint/cache/io.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import time 4 | from typing import Union, Iterable, Mapping 5 | 6 | from . import OutOfDateError, _get_logger 7 | from .codebase_hasher import CodebaseHasher 8 | 9 | 10 | # use a method to simulate "const values" 11 | def cache_timeout(): 12 | # update caches every week 13 | return 7 * 24 * 60 * 60 14 | 15 | 16 | def store_json(path: str, data: Union[Mapping, Iterable]): 17 | json_root = { 18 | "codebase_digest": CodebaseHasher().digest_md5(), 19 | "data": data, 20 | } 21 | 22 | with open(path, "w") as f: 23 | json.dump(json_root, f, indent=2) 24 | 25 | 26 | def load_json(path): 27 | try: 28 | with open(path, "r") as f: 29 | json_root = json.load(f) 30 | except FileNotFoundError as e: 31 | raise OutOfDateError("cache file missing, update required") from e 32 | 33 | cached_codebase_digest = json_root["codebase_digest"] 34 | data = json_root["data"] 35 | 36 | mtime = os.path.getmtime(path) 37 | if mtime + cache_timeout() < time.time(): 38 | # should be safe to ignore, forwarding data 39 | raise OutOfDateError("cache file outdated, update required", cached_data=data) 40 | 41 | codebase_digest = CodebaseHasher().digest_md5() 42 | try: 43 | if cached_codebase_digest != codebase_digest: 44 | # should be safe to ignore, forwarding data 45 | raise OutOfDateError("codebase changed since last update, forcing update", cached_data=data) 46 | 47 | return data 48 | 49 | # capture all "invalid data format" kind of errors and force update 50 | except KeyError as e: 51 | raise OutOfDateError("file in invalid format, forcing update") from e 52 | -------------------------------------------------------------------------------- /ci/build-appimage.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | set -x 4 | set -e 5 | 6 | # use RAM disk if possible 7 | if [ "$CI" == "" ] && [ -d /dev/shm ]; then 8 | TEMP_BASE=/dev/shm 9 | else 10 | TEMP_BASE=/tmp 11 | fi 12 | 13 | BUILD_DIR=$(mktemp -d -p "$TEMP_BASE" appimagelint-build-XXXXXX) 14 | 15 | cleanup () { 16 | if [ -d "$BUILD_DIR" ]; then 17 | rm -rf "$BUILD_DIR" 18 | fi 19 | } 20 | 21 | trap cleanup EXIT 22 | 23 | # store repo root as variable 24 | REPO_ROOT=$(readlink -f $(dirname $(dirname "$0"))) 25 | OLD_CWD=$(readlink -f .) 26 | 27 | SETUPPY_VERSION=$(cat "$REPO_ROOT"/setup.py | grep version | cut -d'"' -f2) 28 | 29 | pushd "$BUILD_DIR" 30 | 31 | mkdir -p AppDir 32 | 33 | COMMIT=$(cd "$REPO_ROOT" && git rev-parse --short HEAD) 34 | echo "$COMMIT" > AppDir/commit 35 | 36 | wget https://github.com/TheAssassin/linuxdeploy/releases/download/continuous/linuxdeploy-x86_64.AppImage 37 | wget https://raw.githubusercontent.com/linuxdeploy/linuxdeploy-plugin-conda/master/linuxdeploy-plugin-conda.sh 38 | 39 | chmod +x linuxdeploy*.AppImage 40 | chmod +x linuxdeploy*.sh 41 | 42 | export CONDA_PACKAGES="Pillow" 43 | export PIP_REQUIREMENTS="." 44 | export PIP_WORKDIR="$REPO_ROOT" 45 | export OUTPUT=appimagelint-x86_64.AppImage 46 | export VERSION="$SETUPPY_VERSION-git$COMMIT" 47 | 48 | mkdir -p AppDir/usr/share/metainfo/ && cp "$REPO_ROOT"/resources/appimagelint.appdata.xml AppDir/usr/share/metainfo/ 49 | 50 | ./linuxdeploy-x86_64.AppImage --appdir AppDir --plugin conda \ 51 | -e $(which readelf) \ 52 | -i "$REPO_ROOT"/resources/appimagelint.svg -d "$REPO_ROOT"/resources/appimagelint.desktop \ 53 | --output appimage --custom-apprun "$REPO_ROOT"/resources/AppRun.sh 54 | 55 | # test AppImage with itself 56 | ./appimagelint-x86_64.AppImage appimagelint-x86_64.AppImage --json-report appimagelint-report.json 57 | cat appimagelint-report.json 58 | 59 | mv appimagelint*.AppImage "$OLD_CWD" 60 | -------------------------------------------------------------------------------- /appimagelint/cache/runtime_cache.py: -------------------------------------------------------------------------------- 1 | import os 2 | import subprocess 3 | import time 4 | 5 | from appimagelint.cache import OutOfDateError, _get_logger 6 | from .io import cache_timeout 7 | from .paths import data_cache_path 8 | from . import CacheBase 9 | 10 | 11 | class AppImageRuntimeCache(CacheBase): 12 | @classmethod 13 | def _cached_runtime_path(cls): 14 | return os.path.join(data_cache_path(), "runtime") 15 | 16 | @classmethod 17 | def force_update(cls): 18 | path = cls._cached_runtime_path() 19 | 20 | try: 21 | subprocess.check_call([ 22 | "wget", "-q", "https://github.com/AppImage/AppImageKit/releases/download/continuous/runtime-x86_64", 23 | "-O", path 24 | ]) 25 | 26 | # no need to bother AppImageLauncher etc. 27 | os.chmod(path, 0o755) 28 | with open(path, "rb+") as f: 29 | f.seek(8) 30 | f.write(b"\x00\x00\x00") 31 | 32 | except Exception: 33 | # clean up data after exception if possible to make the tool force an update during the next run 34 | if os.path.exists(path): 35 | os.remove(path) 36 | 37 | raise 38 | 39 | 40 | @classmethod 41 | def get_data(cls, raise_on_error=False) -> str: 42 | logger = _get_logger() 43 | path = cls._cached_runtime_path() 44 | 45 | update_needed = False 46 | 47 | if not os.path.exists(path): 48 | logger.debug("AppImage runtime file not found") 49 | update_needed = True 50 | else: 51 | mtime = os.path.getmtime(cls._cached_runtime_path()) 52 | if mtime + cache_timeout() < time.time(): 53 | logger.debug("AppImage runtime older than cache timeout") 54 | update_needed = True 55 | 56 | if update_needed: 57 | try: 58 | logger.debug("updating AppImage runtime") 59 | cls.force_update() 60 | except Exception: 61 | # can be handled gracefully by the user, if required 62 | if raise_on_error: 63 | raise 64 | else: 65 | logger.warning("AppImage runtime needs update, but update failed, skipping") 66 | 67 | return path 68 | -------------------------------------------------------------------------------- /appimagelint/cache/package_version_maps.py: -------------------------------------------------------------------------------- 1 | from typing import Callable, Union, Mapping, Iterable 2 | 3 | from . import CacheBase, JSONCacheImplBase 4 | from .common import get_debian_package_versions_map, get_debian_glibcxx_versions_map, get_ubuntu_glibcxx_versions_map, \ 5 | get_ubuntu_package_versions_map 6 | from .paths import ubuntu_glibcxx_versions_data_path, debian_glibcxx_versions_data_path, \ 7 | debian_glibc_versions_data_path, ubuntu_glibc_versions_data_path 8 | 9 | 10 | def _make_cache_class(distro: str, package: str, get_map_callback: Callable, file_path: str): 11 | class _PackageVersionMap(JSONCacheImplBase): 12 | @classmethod 13 | def _cache_file_path(cls): 14 | return file_path 15 | 16 | @classmethod 17 | def _fetch_data(cls): 18 | cls._get_logger().info("Fetching {} version data for {}".format(package, distro)) 19 | return get_map_callback() 20 | 21 | return _PackageVersionMap 22 | 23 | 24 | DebianGlibcVersionsCache = _make_cache_class( 25 | "debian", "glibc", lambda: get_debian_package_versions_map("glibc"), debian_glibc_versions_data_path() 26 | ) 27 | DebianGlibcxxVersionsCache = _make_cache_class( 28 | "debian", "glibcxx", get_debian_glibcxx_versions_map, debian_glibcxx_versions_data_path() 29 | ) 30 | UbuntuGlibcVersionsCache = _make_cache_class( 31 | "ubuntu", "glibc", lambda: get_ubuntu_package_versions_map("glibc"), ubuntu_glibc_versions_data_path() 32 | ) 33 | UbuntuGlibcxxVersionsCache = _make_cache_class( 34 | "ubuntu", "glibcxx", get_ubuntu_glibcxx_versions_map, ubuntu_glibcxx_versions_data_path() 35 | ) 36 | 37 | 38 | # "aggregator" for the other caches 39 | # can be used for convenient updates 40 | # does not return any data 41 | class PackageVersionMapsCache(CacheBase): 42 | _classes = [ 43 | DebianGlibcVersionsCache, 44 | DebianGlibcxxVersionsCache, 45 | UbuntuGlibcVersionsCache, 46 | UbuntuGlibcxxVersionsCache 47 | ] 48 | 49 | @classmethod 50 | def force_update(cls): 51 | for c in cls._classes: 52 | c.force_update() 53 | 54 | @classmethod 55 | def get_data(cls, raise_on_error=False) -> Union[Mapping, Iterable]: 56 | raise NotImplementedError 57 | 58 | @classmethod 59 | def update_if_necessary(cls): 60 | for c in cls._classes: 61 | c.get_data() 62 | -------------------------------------------------------------------------------- /appimagelint/services/gnu_lib_versions_symbol_finder.py: -------------------------------------------------------------------------------- 1 | import os 2 | import subprocess 3 | from typing import List 4 | 5 | from .._logging import make_logger 6 | from ..services import BinaryWalker 7 | 8 | 9 | class GnuLibVersionSymbolsFinder: 10 | @classmethod 11 | def _get_logger(self): 12 | return make_logger("gnu_lib_versions_symbols_finder") 13 | 14 | def __init__(self, query_reqs: bool = True, query_deps: bool = False): 15 | self._query_reqs = query_reqs 16 | self._query_deps = query_deps 17 | 18 | def detect_gnu_lib_versions(self, pattern, path): 19 | env = dict(os.environ) 20 | env["LC_ALL"] = "C" 21 | env["LANGUAGE"] = "C" 22 | 23 | versions = [] 24 | 25 | data: str = subprocess.check_output(["readelf", "-V", path], env=env).decode() 26 | lines: List[str] = data.splitlines() 27 | 28 | elf_sections = [] 29 | if self._query_deps: 30 | elf_sections.append(".gnu.version_d") 31 | if self._query_reqs: 32 | elf_sections.append(".gnu.version_r") 33 | 34 | for elf_section_name in elf_sections: 35 | in_req_section = False 36 | for line in lines: 37 | if elf_section_name in line: 38 | in_req_section = True 39 | elif not in_req_section: 40 | continue 41 | 42 | # end of section 43 | if not line.strip(): 44 | break 45 | 46 | parts = line.split() 47 | 48 | for index, part in enumerate(parts): 49 | try: 50 | if part.lower() != "name:": 51 | continue 52 | except IndexError: 53 | continue 54 | 55 | symbol = parts[index+1] 56 | 57 | if pattern in symbol: 58 | version = symbol.split(pattern)[1] 59 | 60 | for c in version: 61 | if c not in "0123456789.": 62 | self._get_logger().debug("ignoring invalid version {} (parsed from {})".format( 63 | repr(version), repr(symbol)) 64 | ) 65 | break 66 | else: 67 | versions.append(version) 68 | 69 | return versions 70 | 71 | def check_all_executables(self, prefix: str, dirpath: str): 72 | if not os.path.isdir(dirpath): 73 | raise FileNotFoundError("could not find directory {}".format(repr(dirpath))) 74 | 75 | versions = set() 76 | 77 | for binary in BinaryWalker(dirpath): 78 | versions.update(self.detect_gnu_lib_versions(prefix, binary)) 79 | 80 | return versions 81 | -------------------------------------------------------------------------------- /appimagelint/services/appimagemounter.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shlex 3 | import subprocess 4 | 5 | from ..models import AppImage 6 | from .._logging import make_logger 7 | 8 | 9 | class AppImageMounter: 10 | _logger = make_logger("appimagemounter") 11 | 12 | def __init__(self, appimage: AppImage, custom_runtime_path: str = None): 13 | self._appimage = appimage 14 | self._custom_runtime = custom_runtime_path 15 | 16 | self._mountpoint: str = None 17 | self._proc: subprocess.Popen = None 18 | 19 | def mountpoint(self): 20 | return self._mountpoint 21 | 22 | def mount(self): 23 | self._logger.debug("mounting AppImage {}".format(self._appimage.path())) 24 | 25 | env = dict(os.environ) 26 | 27 | if self._custom_runtime: 28 | self._logger.debug("using custom runtime to mount AppImage") 29 | env["TARGET_APPIMAGE"] = os.path.abspath(self._appimage.path()) 30 | args = [self._custom_runtime] 31 | else: 32 | args = [self._appimage.path()] 33 | 34 | args.append("--appimage-mount") 35 | 36 | self._logger.debug("calling {}".format(" ".join((shlex.quote(i) for i in args)))) 37 | self._proc = subprocess.Popen(args, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) 38 | self._logger.debug("process ID: {}".format(self._proc.pid)) 39 | 40 | while True: 41 | # it's an error if we couldn't read the mountpoint from stdout but the process terminated 42 | if self._proc.poll() is not None: 43 | raise OSError("process exited before we could read AppImage mountpoint" 44 | "(exit code {})".format(self._proc.poll())) 45 | 46 | line = self._proc.stdout.readline().decode().strip(" \t\n") 47 | self._logger.debug("read line from stdout: {}".format(line)) 48 | 49 | if os.path.exists(line): 50 | self._mountpoint = line 51 | break 52 | 53 | self._logger.debug("mount path: {}".format(self._mountpoint)) 54 | 55 | def unmount(self): 56 | self._logger.debug("unmounting AppImage") 57 | 58 | try: 59 | self._proc.terminate() 60 | retcode = self._proc.wait(5) 61 | except subprocess.TimeoutExpired: 62 | self._logger.debug("failed to terminate process normally, killing") 63 | 64 | try: 65 | self._proc.kill() 66 | retcode = self._proc.wait(12) 67 | except subprocess.TimeoutExpired: 68 | self._logger.debug("failed to kill process") 69 | raise 70 | 71 | # TODO: check error code 72 | 73 | def __enter__(self) -> str: 74 | self.mount() 75 | return self.mountpoint() 76 | 77 | def __exit__(self, exc_type, exc_val, exc_tb): 78 | self.unmount() 79 | self._mountpoint = None 80 | -------------------------------------------------------------------------------- /appimagelint/cache/json_cache_impl_base.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import Iterable, Mapping, Union 3 | 4 | from . import load_json, OutOfDateError, _get_logger, store_json, CacheBase 5 | 6 | 7 | class JSONCacheImplBase(CacheBase): 8 | """ 9 | Template method kind of class that requires very little configuration by actual instances and implements most 10 | functionality already, based on primitives. 11 | """ 12 | 13 | @classmethod 14 | def _get_logger(cls): 15 | return _get_logger() 16 | 17 | @classmethod 18 | def _cache_file_path(cls): 19 | """ 20 | Get cache file path. Must be overridden by subclasses. 21 | :return: cache file's path 22 | """ 23 | raise NotImplementedError 24 | 25 | @classmethod 26 | def _fetch_data(cls): 27 | """ 28 | Fetch cache. Must be overridden by subclasses. 29 | :return: cache file's path 30 | """ 31 | raise NotImplementedError 32 | 33 | @classmethod 34 | def _load(cls): 35 | return load_json(cls._cache_file_path()) 36 | 37 | @classmethod 38 | def _store(cls, data): 39 | store_json(cls._cache_file_path(), data) 40 | 41 | @classmethod 42 | def force_update(cls): 43 | """ 44 | Force cache update. 45 | :return: 46 | """ 47 | 48 | data = cls._fetch_data() 49 | cls._store(data) 50 | 51 | @classmethod 52 | def get_data(cls, raise_on_error=False) -> Union[Mapping, Iterable]: 53 | """ 54 | Returns cached data. 55 | If the data is out of date, the method will attempt to update them (if possible). 56 | In case fetching the latest data fails, the method will return the cached data anyway unless raise_on_error 57 | is set to True. 58 | If this is impossible, an :class:`OutOfDateError` is thrown even if raise_on_error is set to False. 59 | 60 | :return: data represented by cache class 61 | :raises OutOfDateError: in case data is out of date, see method docstring for details 62 | """ 63 | 64 | logger = cls._get_logger() 65 | 66 | cached_data = None 67 | 68 | try: 69 | cached_data = cls._load() 70 | except OutOfDateError as e: 71 | logger.debug("OutOfDateError: {}".format(" ".join(e.args))) 72 | 73 | # store cached data for use in next block (if possible, i.e., it's valid data) 74 | if e.cached_data is not None: 75 | cached_data = e.cached_data 76 | else: 77 | logger.debug("{} still up to date, no update required".format(os.path.basename(cls._cache_file_path()))) 78 | return cached_data 79 | 80 | logger.debug("data out of date, updating") 81 | 82 | try: 83 | new_data = cls._fetch_data() 84 | except Exception as e: 85 | if not raise_on_error: 86 | if cached_data is not None: 87 | logger.warning("codebase changed since last update, but updating failed, using cached data") 88 | logger.exception(e) 89 | return cached_data 90 | else: 91 | raise 92 | 93 | raise 94 | 95 | cls._store(new_data) 96 | return new_data 97 | -------------------------------------------------------------------------------- /appimagelint/cli.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | import os 4 | import sys 5 | 6 | from appimagelint.checks import IconsCheck 7 | from .cache.runtime_cache import AppImageRuntimeCache 8 | from .reports import JSONReport 9 | from .services.result_formatter import ResultFormatter 10 | from .models import AppImage 11 | from . import _logging 12 | from .checks import GlibcABICheck, GlibcxxABICheck 13 | 14 | 15 | def get_version(): 16 | try: 17 | import pkg_resources 18 | version = pkg_resources.require("appimagelint")[0].version 19 | except ImportError: 20 | version = "unknown" 21 | 22 | APPDIR = os.environ.get("APPDIR", None) 23 | 24 | git_commit = "unknown" 25 | 26 | if APPDIR is not None: 27 | try: 28 | with open(os.path.join(APPDIR, "commit")) as f: 29 | git_commit = f.read().strip(" \n\r") 30 | except FileNotFoundError: 31 | pass 32 | 33 | version += "-git" + git_commit 34 | 35 | return version 36 | 37 | 38 | def parse_args(): 39 | parser = argparse.ArgumentParser( 40 | prog="appimagelint", 41 | description="Run compatibility and other checks on AppImages automatically, " 42 | "and provide human-understandable feedback" 43 | ) 44 | 45 | parser.add_argument("--version", 46 | dest="display_version", 47 | action="version", version=get_version(), 48 | help="Display version and exit" 49 | ) 50 | 51 | parser.add_argument("--debug", 52 | dest="loglevel", 53 | action="store_const", const=logging.DEBUG, default=logging.INFO, 54 | help="Display debug messages") 55 | 56 | parser.add_argument("--log-source-location", 57 | dest="log_message_locations", 58 | action="store_const", const=True, default=False, 59 | help="Print message locations (might be picked up by IDEs to allow for jumping to the source)") 60 | 61 | parser.add_argument("--log-timestamps", 62 | dest="log_timestamps", 63 | action="store_const", const=True, default=False, 64 | help="Log timestamps (useful for debugging build times etc.)") 65 | 66 | parser.add_argument("--force-colors", 67 | dest="force_colors", 68 | action="store_const", const=True, default=False, 69 | help="Force colored output") 70 | 71 | parser.add_argument("--json-report", 72 | dest="json_report", nargs="?", default=None, 73 | help="Write results to file in machine-readable form (JSON)") 74 | 75 | parser.add_argument("path", 76 | nargs="+", 77 | help="AppImage to review") 78 | 79 | args = parser.parse_args() 80 | 81 | return args 82 | 83 | 84 | def run(): 85 | args = parse_args() 86 | 87 | if getattr(args, "display_version", False): 88 | print(get_version()) 89 | return 90 | 91 | # setup 92 | _logging.setup( 93 | args.loglevel, 94 | with_timestamps=args.log_timestamps, 95 | force_colors=args.force_colors, 96 | log_locations=args.log_message_locations, 97 | ) 98 | 99 | # get logger for CLI 100 | logger = _logging.make_logger("cli") 101 | 102 | # need up to date runtime to be able to read the mountpoint from stdout (was fixed only recently) 103 | # also, it's safer not to rely on the embedded runtime 104 | custom_runtime = AppImageRuntimeCache.get_data() 105 | 106 | # results logs are written immediately, but maybe we want to generate additional reports 107 | # for this purpose, we collect all results 108 | results = {} 109 | 110 | try: 111 | for path in args.path: 112 | results[path] = {} 113 | 114 | logger.info("Checking AppImage {}".format(path)) 115 | 116 | appimage = AppImage(path, custom_runtime=custom_runtime) 117 | 118 | kwargs = dict() 119 | if args.force_colors: 120 | kwargs["use_colors"] = True 121 | 122 | formatter = ResultFormatter(**kwargs) 123 | 124 | for check_cls in [GlibcABICheck, GlibcxxABICheck, IconsCheck]: 125 | logger.info("Running check \"{}\"".format(check_cls.name())) 126 | check = check_cls(appimage) 127 | 128 | results[path][check] = [] 129 | 130 | for testres in check.run(): 131 | results[path][check].append(testres) 132 | check.get_logger().info(formatter.format(testres)) 133 | 134 | if args.json_report: 135 | report = JSONReport(results) 136 | report.write(args.json_report) 137 | 138 | except KeyboardInterrupt: 139 | logger.critical("process interrupted by user") 140 | sys.exit(2) 141 | -------------------------------------------------------------------------------- /appimagelint/cache/common.py: -------------------------------------------------------------------------------- 1 | import gzip 2 | import os 3 | 4 | import requests 5 | import subprocess 6 | 7 | from . import DebianCodenameMapCache 8 | from ..services import GnuLibVersionSymbolsFinder 9 | from .._logging import make_logger 10 | from .._util import make_tempdir, max_version 11 | 12 | 13 | def _get_logger(): 14 | return make_logger("setup") 15 | 16 | 17 | def get_debian_package_versions_map(package_name: str): 18 | logger = _get_logger() 19 | 20 | logger.info("Fetching {} package versions from Debian sources API".format(package_name)) 21 | 22 | response = requests.get("https://sources.debian.org/api/src/{}/".format(package_name)) 23 | response.raise_for_status() 24 | 25 | json_data = response.json() 26 | 27 | if "error" in json_data: 28 | raise ValueError("invalid response from Debian sources API: {}".format(json_data["error"])) 29 | 30 | versions_map = {} 31 | 32 | for version in json_data["versions"]: 33 | parsed_version = ".".join(version["version"].split(".")[:2]).split("-")[0] 34 | 35 | for suite in version["suites"]: 36 | # simple search for maximum supported version 37 | if suite not in versions_map or parsed_version > versions_map[suite]: 38 | versions_map[suite] = parsed_version 39 | 40 | return versions_map 41 | 42 | 43 | def get_ubuntu_releases(): 44 | releases = ("trusty", "xenial", "bionic", "cosmic", "disco") 45 | return releases 46 | 47 | 48 | def get_debian_releases(): 49 | releases = ("oldstable", "stable", "testing", "unstable",) 50 | return releases 51 | 52 | 53 | def get_packages_gz_from_ftp_mirror(distro, release): 54 | url = "https://ftp.fau.de/{}/dists/{}/main/binary-amd64/Packages.gz".format(distro, release) 55 | response = requests.get(url) 56 | 57 | response.raise_for_status() 58 | 59 | data = gzip.decompress(response.content).decode() 60 | 61 | return data 62 | 63 | 64 | def get_ubuntu_package_versions_map(package_name: str): 65 | logger = _get_logger() 66 | 67 | logger.info("Fetching {} package versions from Ubuntu FTP mirror".format(package_name)) 68 | 69 | versions_map = {} 70 | 71 | releases = get_ubuntu_releases() 72 | for release in releases: 73 | data = get_packages_gz_from_ftp_mirror("ubuntu", release) 74 | 75 | # TODO: implement as binary search 76 | pkg_off = data.find("Package: {}".format(package_name)) 77 | pkg_ver_off = data.find("Version:", pkg_off) 78 | next_pkg_off = data.find("Package:".format(package_name), pkg_off+1) 79 | 80 | if pkg_ver_off == -1 or pkg_ver_off > next_pkg_off: 81 | raise ValueError() 82 | 83 | version = data[pkg_ver_off:pkg_ver_off+512].splitlines()[0].split("Version: ")[-1] 84 | parsed_version = ".".join(version.split(".")[:3]).split("-")[0] 85 | versions_map[release] = parsed_version 86 | 87 | return versions_map 88 | 89 | 90 | def get_glibcxx_version_from_debian_package(url: str): 91 | logger = _get_logger() 92 | 93 | with make_tempdir() as d: 94 | deb_path = os.path.join(d, "package.deb") 95 | 96 | logger.debug("Downloading {} to {}".format(url, deb_path)) 97 | 98 | out_path = os.path.join(d, "out/") 99 | 100 | subprocess.check_call(["wget", "-q", url, "-O", deb_path], stdout=subprocess.DEVNULL) 101 | subprocess.check_call(["dpkg", "-x", deb_path, out_path], stdout=subprocess.DEVNULL) 102 | 103 | finder = GnuLibVersionSymbolsFinder(query_deps=True, query_reqs=False) 104 | return finder.check_all_executables("GLIBCXX_", out_path) 105 | 106 | 107 | def get_debian_glibcxx_versions_map(): 108 | rv = {} 109 | 110 | debian_codenames = DebianCodenameMapCache.get_data() 111 | 112 | releases = [debian_codenames[i] for i in get_debian_releases()] 113 | 114 | for release in releases: 115 | url = get_glibcxx_package_url("debian", release) 116 | versions = get_glibcxx_version_from_debian_package(url) 117 | rv[release] = max_version(versions) 118 | 119 | return rv 120 | 121 | 122 | def get_glibcxx_package_url(distro: str, release: str): 123 | data = get_packages_gz_from_ftp_mirror(distro, release) 124 | 125 | bin_pkg_name = "libstdc++6" 126 | 127 | pkg_off = data.find("Package: {}".format(bin_pkg_name)) 128 | pkg_path_off = data.find("Filename:", pkg_off) 129 | next_pkg_off = data.find("Package:", pkg_off + 1) 130 | 131 | if pkg_path_off > next_pkg_off: 132 | raise ValueError("could not find Filename: entry for package {}".format(bin_pkg_name)) 133 | 134 | pkg_path = data[pkg_path_off:pkg_path_off+2048].splitlines()[0].split("Filename:")[1].strip() 135 | 136 | url_template = "https://ftp.fau.de/{}/{}"\ 137 | 138 | url = url_template.format(distro, pkg_path) 139 | return url 140 | 141 | 142 | def get_ubuntu_glibcxx_versions_map(): 143 | rv = {} 144 | 145 | for release in get_ubuntu_releases(): 146 | url = get_glibcxx_package_url("ubuntu", release) 147 | versions = get_glibcxx_version_from_debian_package(url) 148 | rv[release] = max_version(versions) 149 | 150 | return rv 151 | -------------------------------------------------------------------------------- /appimagelint/checks/gnu_abi_check_base.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import packaging.version 3 | from typing import Iterator 4 | 5 | from .._logging import make_logger 6 | from ..services import GnuLibVersionSymbolsFinder 7 | from ..cache import DebianCodenameMapCache 8 | from ..cache.common import get_debian_releases, get_ubuntu_releases 9 | from ..models import TestResult 10 | from ..services import BinaryWalker 11 | from ..models import AppImage 12 | from .._util import max_version 13 | from . import CheckBase 14 | 15 | 16 | class GnuAbiCheckBase(CheckBase): 17 | _gnu_lib_versions_symbol_finder = GnuLibVersionSymbolsFinder(query_reqs=True, query_deps=False) 18 | 19 | def __init__(self, appimage: AppImage): 20 | super().__init__(appimage) 21 | 22 | @classmethod 23 | def get_logger(cls) -> logging.Logger: 24 | return make_logger("{}_abi_check".format(cls._library_id())) 25 | 26 | @classmethod 27 | def _test_result_id_prefix(cls): 28 | return "{}_abi_check".format(cls._library_id()) 29 | 30 | @staticmethod 31 | def name(): 32 | raise NotImplementedError() 33 | 34 | @classmethod 35 | def _detect_versions_in_file(cls, path): 36 | raise NotImplementedError 37 | 38 | @staticmethod 39 | def _library_id(): 40 | raise NotImplementedError 41 | 42 | def run(self) -> Iterator[TestResult]: 43 | versions = set() 44 | 45 | logger = self.get_logger() 46 | 47 | versions.update(self._detect_versions_in_file(self._appimage.path())) 48 | 49 | logger.info("detected required version for runtime: " 50 | "{}".format(max_version(versions) if versions else "")) 51 | 52 | with self._appimage.mount() as mountpoint: 53 | payload_versions = set() 54 | 55 | for executable in BinaryWalker(mountpoint): 56 | # this check takes advantage of libc embedding static symbols into the binary depending on what 57 | # features are used 58 | # even binaries built on newer platforms may be running on older systems unless such features are used 59 | # example: a simple hello world built on bionic can run fine on trusty just fine 60 | executable_versions = self._detect_versions_in_file(executable) 61 | payload_versions.update(executable_versions) 62 | 63 | versions.update(payload_versions) 64 | 65 | if payload_versions: 66 | logger.info("detected required version for payload: " 67 | "{}".format(max_version(payload_versions) if versions else "")) 68 | 69 | if not versions: 70 | logger.warning("could not find any dependencies, skipping check") 71 | return 72 | 73 | required_version = packaging.version.Version(max_version(versions)) 74 | logger.debug("overall required version: {}".format(required_version)) 75 | 76 | for result in self._check_debian_compat(required_version): 77 | yield result 78 | 79 | for result in self._check_ubuntu_compat(required_version): 80 | yield result 81 | 82 | @classmethod 83 | def _get_debian_codename_map(cls): 84 | return DebianCodenameMapCache.get_data() 85 | 86 | @classmethod 87 | def _get_debian_versions_map(cls): 88 | raise NotImplementedError() 89 | 90 | @classmethod 91 | def _get_ubuntu_versions_map(cls): 92 | raise NotImplementedError() 93 | 94 | @classmethod 95 | def _check_debian_compat(cls, required_version: packaging.version.Version) -> Iterator[TestResult]: 96 | codename_map = cls._get_debian_codename_map() 97 | versions_map = cls._get_debian_versions_map() 98 | 99 | for release in get_debian_releases(): 100 | codename = codename_map[release] 101 | 102 | max_supported_version = None 103 | try: 104 | max_supported_version = versions_map[codename] 105 | except KeyError: 106 | cls.get_logger().warning("could not find version for {}, trying backports".format(release)) 107 | 108 | try: 109 | max_supported_version = versions_map["{}-backports".format(codename)] 110 | except KeyError: 111 | cls.get_logger().error( 112 | "could not find version for {} in backports either, aborting check".format(release) 113 | ) 114 | 115 | if max_supported_version is None: 116 | should_run = False 117 | else: 118 | should_run = required_version <= packaging.version.parse(max_supported_version) 119 | 120 | test_result_id = "{}_{}_{}".format(cls._test_result_id_prefix(), "debian", release) 121 | test_result_msg = "AppImage can run on Debian {} ({})".format(release, codename) 122 | 123 | cls.get_logger().debug("Debian {} max supported version: {}".format(release, max_supported_version)) 124 | yield TestResult(should_run, test_result_id, test_result_msg) 125 | 126 | @classmethod 127 | def _check_ubuntu_compat(cls, required_version: packaging.version.Version) -> Iterator[TestResult]: 128 | versions_map = cls._get_ubuntu_versions_map() 129 | 130 | for release in get_ubuntu_releases(): 131 | max_supported_version = versions_map[release] 132 | 133 | should_run = required_version <= packaging.version.Version(max_supported_version) 134 | 135 | test_result_id = "{}_{}_{}".format(cls._test_result_id_prefix(), "ubuntu", release) 136 | test_result_msg = "AppImage can run on Ubuntu {}".format(release) 137 | 138 | cls.get_logger().debug("Ubuntu {} max supported version: {}".format(release, max_supported_version)) 139 | yield TestResult(should_run, test_result_id, test_result_msg) 140 | -------------------------------------------------------------------------------- /appimagelint/checks/icons.py: -------------------------------------------------------------------------------- 1 | import glob 2 | import os.path as op 3 | import re 4 | from typing import Tuple, Union 5 | from xml.etree import ElementTree as ET 6 | 7 | from PIL import Image 8 | 9 | from appimagelint._logging import make_logger 10 | from appimagelint.models import TestResult 11 | from ..models import AppImage 12 | from . import CheckBase 13 | 14 | 15 | class IconsCheck(CheckBase): 16 | _VALID_RESOLUTIONS = (8, 16, 32, 48, 56, 64, 128, 192, 256, 384, 512) 17 | 18 | def __init__(self, appimage: AppImage): 19 | super().__init__(appimage) 20 | 21 | @staticmethod 22 | def name(): 23 | return "Icons validity and location check" 24 | 25 | def run(self): 26 | logger = self.get_logger() 27 | 28 | with self._appimage.mount() as mountpoint: 29 | # find desktop file, get name of icon and look for it in AppDir root 30 | desktop_files = glob.glob(op.join(mountpoint, "*.desktop")) 31 | 32 | # we can of course check the validity of all icon files we find, but there's always one main icon that is 33 | # referenced from the desktop file 34 | main_icon_name = None 35 | 36 | if not desktop_files: 37 | logger.error("Could not find desktop file in root directory") 38 | 39 | else: 40 | logger.debug("Found desktop files: %s", desktop_files) 41 | 42 | desktop_file = desktop_files[0] 43 | logger.info("Extracting icon name from desktop file: %s", desktop_file) 44 | 45 | with open(desktop_file) as f: 46 | # find Icon= entry and get the name of the icon file to look for 47 | # we don't need to check things like "is there just one Icon entry" etc., that's the job of another 48 | # test 49 | desktop_file_contents = f.read() 50 | 51 | # note for self: Python's re doesn't do multiline unless explicitly asked for with re.MULTILINE 52 | match = re.search(r"Icon=(.+)", desktop_file_contents) 53 | 54 | if not match: 55 | logger.error("Could not find Icon= entry in desktop file") 56 | else: 57 | main_icon_name = match.group(1) 58 | 59 | # to be able to filter out non-icon files with the same prefix in the AppDir root 60 | known_image_exts = ("png", "xpm", "svg", "jpg") 61 | 62 | # assuming test broke 63 | # now prove me wrong! 64 | root_icon_valid = False 65 | 66 | if main_icon_name is not None: 67 | if "/" in main_icon_name: 68 | logger.error("main icon name is a path, not a filename (contains /)") 69 | else: 70 | # properly escape some "magic" characters in the original filename so they won't be interpreted by glob 71 | fixed_main_icon_name = glob.escape(main_icon_name) 72 | 73 | # build glob pattern 74 | pattern = "{}.*".format(fixed_main_icon_name) 75 | 76 | logger.debug("Trying to find main icon in AppDir root, pattern: {}".format(repr(pattern))) 77 | 78 | appdir_root_icons = glob.glob(op.join(mountpoint, pattern)) 79 | 80 | if not appdir_root_icons: 81 | logger.error("Could not find suitable icon for desktop file's Icon= entry") 82 | 83 | else: 84 | # filter out all files with a not-well-known extension 85 | appdir_root_icons = [i for i in appdir_root_icons if 86 | op.splitext(i)[-1].lstrip(".") in known_image_exts] 87 | 88 | if len(appdir_root_icons) > 1: 89 | logger.warning("Multiple matching icons found in AppDir root, checking all") 90 | 91 | main_icon_check_results = [] 92 | for icon in appdir_root_icons: 93 | valid = self._check_icon_for_valid_resolution(icon) 94 | main_icon_check_results.append(valid) 95 | 96 | # if only one of the checks failed, we can't guarantee a working root icon 97 | root_icon_valid = all(main_icon_check_results) 98 | 99 | yield TestResult(root_icon_valid, "icons.valid_appdir_root_icon", "Valid icon in AppDir root") 100 | 101 | # next, check that .DirIcon is available and valid 102 | dotdiricon_valid = self._check_icon_for_valid_resolution(op.join(mountpoint, ".DirIcon")) 103 | yield TestResult(dotdiricon_valid, "icons.valid_dotdiricon", "Valid icon file in .DirIcon") 104 | 105 | # now check all remaining icons in usr/share/icons/... 106 | other_icons_root_path = op.join(mountpoint, "usr/share/icons/**/*.*") 107 | other_icons = glob.glob(other_icons_root_path, recursive=True) 108 | 109 | # assume everything works 110 | # prove me wrong! 111 | other_icons_checks_success = True 112 | 113 | for abs_path in other_icons: 114 | # check if this icon even belongs to here 115 | rel_path = op.relpath(abs_path, op.join(mountpoint, "usr/share/icons")) 116 | filename = op.basename(abs_path) 117 | 118 | split_fname = op.splitext(filename) 119 | 120 | # not an error, but means we don't have to process that file any further 121 | if split_fname[0] != main_icon_name: 122 | logger.warning("Icon found whose file name doesn't match the Icon= entry in desktop file: %s", 123 | rel_path) 124 | 125 | else: 126 | # also just a warning 127 | if split_fname[1].lstrip(".") not in known_image_exts: 128 | logger.warning("Icon has invalid extension: %s", split_fname[1]) 129 | 130 | logger.debug("checking whether icon has good resolution in general") 131 | if not self._check_icon_for_valid_resolution(abs_path): 132 | logger.warning("icon %s has invalid resolution", abs_path) 133 | other_icons_checks_success = False 134 | 135 | logger.debug("checking whether icon is in correct location") 136 | 137 | # split path into the interesting components: icon theme, resolution and actual filename 138 | split_path = rel_path.split("/") 139 | 140 | # find resolution component in split path 141 | path_res = None 142 | 143 | def extract_res_from_path_component(s): 144 | if s == "scalable": 145 | return s 146 | return tuple([int(i) for i in s.split("x")]) 147 | 148 | if len(split_path) != 4 or split_path[2] != "apps": 149 | logger.warning("Icon %s is in non-standard location", rel_path) 150 | else: 151 | try: 152 | path_res = extract_res_from_path_component(split_path[1]) 153 | except: 154 | pass 155 | 156 | if not path_res: 157 | # something's definitely broken 158 | other_icons_checks_success = False 159 | 160 | logger.warning("Could not find icon resolution at expected position in path, " 161 | "trying to guess from entire path") 162 | for comp in split_path: 163 | try: 164 | path_res = extract_res_from_path_component(comp) 165 | except: 166 | pass 167 | else: 168 | break 169 | 170 | if not path_res: 171 | other_icons_checks_success = False 172 | logger.error("Could not extract resolution from icon path," 173 | "should be usr/share/icons///apps/.") 174 | 175 | else: 176 | # make sure extracted resolution corresponds to the file's resolution 177 | actual_res = self._get_icon_res(abs_path) 178 | if actual_res != path_res: 179 | other_icons_checks_success = False 180 | logger.error("Icon resolution doesn't match resolution in path: %s (file resolution is %s)", 181 | path_res, actual_res) 182 | 183 | if not other_icons_checks_success: 184 | logger.warning("no other icons found") 185 | 186 | yield TestResult(other_icons_checks_success, "icons.valid_other_icons", "Other integration icons valid") 187 | 188 | @staticmethod 189 | def get_logger(): 190 | return make_logger("icon_check") 191 | 192 | def _get_svg_icon_res(self, icon_path: str) -> Union[Tuple[float, float], None]: 193 | with open(icon_path) as f: 194 | # own crappy SVG parsing just to get the height and width, if possible 195 | # only needed for the warning about non-square-ish icons 196 | et = ET.parse(f) 197 | root: ET.Element = et.getroot() 198 | 199 | height: str = root.attrib.get("height", None) 200 | width: str = root.attrib.get("width", None) 201 | 202 | if not height or not width: 203 | self.get_logger().error("Could not detect resolution of SVG icon: %s", icon_path) 204 | return 205 | 206 | def repl(s: str, to_remove: str): 207 | return s.replace(to_remove, "") 208 | 209 | # remove "px" suffixes, if available 210 | height = repl(height, "px") 211 | width = repl(width, "px") 212 | 213 | return float(height), float(width) 214 | 215 | def _is_svg(self, icon_path: str) -> bool: 216 | # "obvious" SVG files can be parsed directly as such 217 | if op.splitext(icon_path)[-1] == ".svg": 218 | return True 219 | 220 | # for .DirIcon we actually have to look into the file to check if it's an SVG by guessing based on file 221 | # contents 222 | with open(icon_path) as f: 223 | try: 224 | data = f.read() 225 | 226 | if not "svg" in data: 227 | return False 228 | 229 | root: ET.Element = ET.fromstring(data) 230 | 231 | # Inkscape makes this yield some tags like '{http://www.w3.org/2000/svg}svg' 232 | # therefore we guess that anything ending in "svg" is good enough 233 | return root.tag.endswith("svg") 234 | 235 | except: 236 | pass 237 | 238 | return False 239 | 240 | def _get_icon_res(self, icon_path: str) -> Union[Tuple[int, int], str, None]: 241 | logger = self.get_logger() 242 | 243 | logger.debug("Opening image: %s", icon_path) 244 | 245 | is_svg = self._is_svg(icon_path) 246 | 247 | if is_svg: 248 | try: 249 | resolution = self._get_svg_icon_res(icon_path) 250 | except ET.ParseError as e: 251 | logger.debug("Failed to parse SVG file: %s".format(e)) 252 | return None 253 | 254 | if resolution is None: 255 | logger.warning("Note: SVG resolution detection is still incomplete") 256 | return None 257 | 258 | # they need to be equivalent when rounded to integer only 259 | if round(resolution[0]) != round(resolution[1]): 260 | self.get_logger().warning("Non-square scalable icon found: %s", icon_path) 261 | 262 | return "scalable" 263 | 264 | else: 265 | try: 266 | im = Image.open(icon_path) 267 | 268 | logger.debug("format: %s -- resolution: %s, mode: %s", im.format, im.size, im.mode) 269 | return im.size 270 | 271 | except: # noqa 272 | logger.exception("Failed to identify icon %s", icon_path, ) 273 | 274 | def _check_icon_for_valid_resolution(self, icon_path: str) -> bool: 275 | res = self._get_icon_res(icon_path) 276 | 277 | if not res: 278 | return False 279 | 280 | if op.splitext(icon_path)[-1] == ".svg": 281 | return res == "scalable" 282 | 283 | # .DirIcon exception 284 | if op.basename(icon_path) == ".DirIcon" and res == "scalable": 285 | return True 286 | 287 | return res[0] == res[1] and res[0] in self._VALID_RESOLUTIONS and res[1] in self._VALID_RESOLUTIONS 288 | -------------------------------------------------------------------------------- /resources/appimagelint.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 19 | 21 | 30 | 32 | 36 | 40 | 41 | 51 | 53 | 57 | 61 | 62 | 72 | 74 | 78 | 82 | 83 | 94 | 96 | 100 | 104 | 105 | 116 | 118 | 122 | 126 | 127 | 137 | 139 | 143 | 147 | 148 | 158 | 160 | 164 | 168 | 169 | 179 | 181 | 185 | 189 | 190 | 198 | 200 | 204 | 208 | 212 | 213 | 214 | 233 | 235 | 236 | 238 | image/svg+xml 239 | 241 | 242 | 243 | 244 | 245 | 249 | 253 | 257 | 264 | 272 | 279 | 280 | 281 | 290 | 294 | 298 | 302 | 311 | 315 | 319 | 320 | 321 | --------------------------------------------------------------------------------
Check AppImages for compatibility, best practices etc. Powerful functionality combined with simple usage and human-friendly feedback.