├── .gitignore ├── MANIFEST.in ├── xbstrap ├── __main__.py ├── mirror │ ├── __main__.py │ └── __init__.py ├── pipeline │ ├── __main__.py │ └── __init__.py ├── cli_utils.py ├── exceptions.py ├── subpkgs.py ├── xbps_utils.py ├── util.py ├── schema.yml ├── vcs_utils.py └── __init__.py ├── .githooks └── pre-commit ├── setup.cfg ├── pyproject.toml ├── .github └── workflows │ ├── black.yml │ └── ci.yml ├── extrafiles ├── completion.fish └── completion.sh ├── LICENSE ├── README.md └── setup.py /.gitignore: -------------------------------------------------------------------------------- 1 | .env 2 | __pycache__ 3 | *.egg-info 4 | site-local 5 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include extrafiles/completion.sh 2 | include extrafiles/completion.fish 3 | -------------------------------------------------------------------------------- /xbstrap/__main__.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: MIT 2 | import sys 3 | 4 | from . import main 5 | 6 | sys.exit(main()) 7 | -------------------------------------------------------------------------------- /xbstrap/mirror/__main__.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: MIT 2 | import sys 3 | 4 | from . import main 5 | 6 | sys.exit(main()) 7 | -------------------------------------------------------------------------------- /xbstrap/pipeline/__main__.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: MIT 2 | import sys 3 | 4 | from . import main 5 | 6 | sys.exit(main()) 7 | -------------------------------------------------------------------------------- /.githooks/pre-commit: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | black --check --diff xbstrap/ setup.py 4 | flake8 xbstrap/ setup.py 5 | isort xbstrap/ setup.py -c --diff 6 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | description-file = README.md 3 | 4 | [flake8] 5 | ignore = E203,W503,C,E731 6 | max-line-length = 99 7 | select = E,F,W,N,I,E504 8 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | # unfortunately, black chooses not to support setup.cfg 2 | [tool.black] 3 | line-length = 99 4 | target-version = ["py37"] 5 | 6 | [tool.isort] 7 | py_version = 37 8 | profile = "black" 9 | -------------------------------------------------------------------------------- /xbstrap/cli_utils.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: MIT 2 | 3 | import re 4 | 5 | 6 | def open_file_from_cli(spec, *args, **kwargs): 7 | m = re.match(r"fd:(\d+)", spec) 8 | if m is not None: 9 | return open(int(m.group(1)), *args, **kwargs) 10 | m = re.match(r"path:(.+)", spec) 11 | if m is not None: 12 | return open(m.group(1), *args, **kwargs) 13 | raise ValueError("Illegal file specification on CLI") 14 | -------------------------------------------------------------------------------- /.github/workflows/black.yml: -------------------------------------------------------------------------------- 1 | name: Lint and check formatting 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | lint: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v2 10 | - uses: actions/setup-python@v2 11 | with: 12 | python-version: '3.11' 13 | - name: Install the test requirements 14 | run: "pip install '.[test]'" 15 | - uses: psf/black@stable 16 | - uses: suo/flake8-github-action@releases/v1 17 | with: 18 | checkName: lint # needs to be the same as job name 19 | env: 20 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 21 | -------------------------------------------------------------------------------- /xbstrap/exceptions.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: MIT 2 | 3 | # This module exists to solve circular dependencies between xbstrap.vcs_util 4 | # and xbstrap.base; however, moving all exceptions here casues a new circular 5 | # dependency: ExecutionFailureError needs Action.strings, defined in 6 | # xbstrap.base, but xbstrap.base needs xbstrap.exceptions (this module). 7 | # For this reason, further extraction has been halted, and the minimum (plus 8 | # some more) was done to break the circular dependency. 9 | # TODO(arsen): further disentangle exceptions from xbstrap.base 10 | 11 | 12 | class GenericError(Exception): 13 | pass 14 | 15 | 16 | class RollingIdUnavailableError(Exception): 17 | def __init__(self, name): 18 | super().__init__("No rolling_id specified for source {}".format(name)) 19 | -------------------------------------------------------------------------------- /extrafiles/completion.fish: -------------------------------------------------------------------------------- 1 | # Disable file completions for all subcommands 2 | complete -c xbstrap -f 3 | 4 | # Packages 5 | set -l package_commands configure build install pull-pack download archive 6 | complete -c xbstrap -n "__fish_seen_subcommand_from $package_commands" \ 7 | -f -a "(xbstrap list-pkgs)" 8 | 9 | # Tools 10 | set -l tool_commands configure-tool compile-tool install-tool download-tool-archive runtool 11 | complete -c xbstrap -n "__fish_seen_subcommand_from $tool_commands" \ 12 | -f -a "(xbstrap list-tools)" 13 | 14 | # Source management 15 | complete -c xbstrap -n "__fish_seen_subcommand_from fetch checkout patch regenerate" \ 16 | -f -a "(xbstrap list-tools)" -a "(xbstrap list-pkgs)" 17 | 18 | # Init 19 | complete -c xbstrap -n "__fish_seen_subcommand_from init" \ 20 | -a "(__fish_complete_directories)" 21 | 22 | # Misc options 23 | complete -c xbstrap -s h -l help -d "Print a short help text and exit" 24 | complete -c xbstrap -s v -d "Enable verbose output" 25 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | SPDX-License-Identifier: MIT 2 | 3 | Copyright 2018 Alexander van der Grinten 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the "Software"), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies 9 | of the Software, and to permit persons to whom the Software is furnished to do 10 | so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | build: 7 | name: Build source package 8 | runs-on: ubuntu-latest 9 | steps: 10 | - name: Checkout 11 | uses: actions/checkout@v4 12 | - name: Building using setuptools 13 | run: | 14 | ./setup.py sdist 15 | - name: Upload artifact 16 | uses: actions/upload-artifact@v4 17 | with: 18 | name: packages 19 | path: dist/xbstrap-*.tar.gz 20 | 21 | deploy: 22 | name: Publish release 23 | runs-on: ubuntu-latest 24 | if: "startsWith(github.ref, 'refs/tags/v')" 25 | needs: build 26 | steps: 27 | - name: Fetch artifact 28 | uses: actions/download-artifact@v4 29 | with: 30 | name: packages 31 | path: artifact 32 | - name: Prepare dist/ directory 33 | run: | 34 | mkdir dist/ 35 | # Get exactly the version that we want to publish. 36 | version="$(grep -Po '(?<=^refs/tags/v).+$' <<< "$ref")" 37 | mv "artifact/xbstrap-$version.tar.gz" dist/ 38 | env: 39 | ref: ${{ github.ref }} 40 | - name: Publish to PyPI 41 | uses: pypa/gh-action-pypi-publish@release/v1 42 | with: 43 | user: __token__ 44 | password: ${{ secrets.PYPI_API_TOKEN }} 45 | -------------------------------------------------------------------------------- /xbstrap/mirror/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | # SPDX-License-Identifier: MIT 3 | 4 | import argparse 5 | 6 | import xbstrap.base 7 | 8 | main_parser = argparse.ArgumentParser() 9 | main_parser.add_argument( 10 | "-S", type=str, dest="source_dir", help="source dir (in place of bootstrap.link)" 11 | ) 12 | main_parser.add_argument( 13 | "-C", type=str, dest="build_dir", help="build dir (in place of cwd)", default="" 14 | ) 15 | main_subcmds = main_parser.add_subparsers(metavar="") 16 | main_subcmds.required = True 17 | 18 | 19 | def do_update(args): 20 | cfg = xbstrap.base.Config(args.build_dir, changed_source_root=args.source_dir) 21 | plan = xbstrap.base.Plan(cfg) 22 | 23 | if args.dry_run: 24 | plan.dry_run = True 25 | if args.paranoid: 26 | plan.paranoid = True 27 | if args.keep_going: 28 | plan.keep_going = True 29 | 30 | # We always want to update mirrors. 31 | if not args.no_check: 32 | plan.check = True 33 | if not args.no_update: 34 | plan.update = True 35 | 36 | for src in cfg.all_sources(): 37 | plan.wanted.add((xbstrap.base.Action.MIRROR_SRC, src)) 38 | 39 | plan.run_plan() 40 | 41 | 42 | update_parser = main_subcmds.add_parser("update") 43 | update_parser.set_defaults(cmd=do_update) 44 | update_parser.add_argument( 45 | "-n", "--dry-run", action="store_true", help="compute a plan but do not execute it" 46 | ) 47 | update_parser.add_argument( 48 | "-C", 49 | "--no-check", 50 | action="store_true", 51 | help="do not skip packages that are already built/installed/etc.", 52 | ) 53 | update_parser.add_argument( 54 | "-U", "--no-update", action="store_true", help="do not check for package updates" 55 | ) 56 | update_parser.add_argument( 57 | "--paranoid", 58 | action="store_true", 59 | help="also consider unlikely updates (e.g., changes of git tags)", 60 | ) 61 | update_parser.add_argument( 62 | "--keep-going", 63 | action="store_true", 64 | help="continue running even if some build steps fail", 65 | ) 66 | 67 | 68 | def main(): 69 | main_args = main_parser.parse_args() 70 | main_args.cmd(main_args) 71 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # xbstrap: Build system for OS distributions 2 | 3 | xbstrap is a build system designed to build "distributions" consisting of multiple (usually many) packages. 4 | It does not replace neither `make` and `ninja` nor `autoconf`, `automake`, `meson` or `cmake` and similar utilities. 5 | Instead, xbstrap is intended to invoke those build systems in the correct order, while respecting inter-package dependencies. 6 | 7 | **Official Discord server:** https://discord.gg/7WB6Ur3 8 | 9 | ## Installation 10 | 11 | xbstrap is available from PyPI. To install it using pip, use: 12 | ``` 13 | pip3 install xbstrap 14 | ``` 15 | 16 | ## Basic usage 17 | 18 | See the [boostrap-managarm repository](https://github.com/managarm/bootstrap-managarm) for an example `bootstrap.yml` file. 19 | 20 | Installing all tools (that run on the build system) is done using: 21 | ``` 22 | xbstrap install-tool --all 23 | ``` 24 | Installing all packages to a sysroot (of the host system): 25 | ``` 26 | xbstrap install --all 27 | ``` 28 | It is often useful to rebuild specific packages. Rebuilding package `foobar` can be done by: 29 | ``` 30 | xbstrap install --rebuild foobar 31 | ``` 32 | If the `configure` script shall be run again, use instead: 33 | ``` 34 | xbstrap install --reconfigure foobar 35 | ``` 36 | 37 | ## Local development 38 | 39 | When developing `xbstrap`, you must install your local copy instead of the one provided by the `pip` repositories. To do this, run: 40 | ``` 41 | pip install --user -e . 42 | ``` 43 | 44 | ### Development with Docker 45 | 46 | For containerized builds, most `xbstrap` commands will run in two stages: once on the host, then again on the container to 47 | actually execute the build steps. Therefore, installing `xbstrap` locally (as shown above) is not sufficient in this case. 48 | 49 | In addition, you must change your `Dockerfile` so that instead of grabbing `xbstrap` from the `pip` repositories, it installs from the host: 50 | 1. Add the following lines (replace `/local-xbstrap` at your convenience): 51 | ```docker 52 | ADD xbstrap /local-xbstrap 53 | RUN pip3 install -e /local-xbstrap 54 | ``` 55 | 1. Copy or symlink your local `xbstrap` into the same folder that contains the `Dockerfile`, so that it can be accessed by the previous step. 56 | 1. Rebuild the docker container as usual. 57 | 58 | ### Enabling the pre-commit hook for linting (optional) 59 | 60 | To avoid running into the CI complaining about formatting, linting can be done in a pre-commit hook. To enable this, run: 61 | ``` 62 | git config core.hooksPath .githooks 63 | ``` 64 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import os 4 | import shutil 5 | 6 | from setuptools import find_packages, setup 7 | from setuptools.command.develop import develop 8 | from setuptools.command.install import install 9 | 10 | with open("README.md", "r") as f: 11 | readme = f.read() 12 | 13 | 14 | class CompletionDevelop(develop): 15 | def run(self): 16 | if os.access("/etc/bash_completion.d", os.W_OK): 17 | shutil.copyfile("extrafiles/completion.sh", "/etc/bash_completion.d/xbstrap") 18 | else: 19 | print( 20 | "Insufficient permissions to install the bash completion script to" 21 | " /etc/bash_completion.d" 22 | ) 23 | if os.access("/usr/share/fish/vendor_completions.d/", os.W_OK): 24 | shutil.copyfile( 25 | "extrafiles/completion.fish", "/usr/share/fish/vendor_completions.d/xbstrap.fish" 26 | ) 27 | else: 28 | print( 29 | "Insufficient permissions to install the fish completion script to" 30 | " /usr/share/fish/vendor_completions.d" 31 | ) 32 | develop.run(self) 33 | 34 | 35 | class CompletionInstall(install): 36 | def run(self): 37 | if os.access("/etc/bash_completion.d", os.W_OK): 38 | shutil.copyfile("extrafiles/completion.sh", "/etc/bash_completion.d/xbstrap") 39 | else: 40 | print( 41 | "Insufficient permissions to install the bash completion script to" 42 | " /etc/bash_completion.d" 43 | ) 44 | if os.access("/usr/share/fish/vendor_completions.d/", os.W_OK): 45 | shutil.copyfile( 46 | "extrafiles/completion.fish", "/usr/share/fish/vendor_completions.d/xbstrap.fish" 47 | ) 48 | else: 49 | print( 50 | "Insufficient permissions to install the fish completion script to" 51 | " /usr/share/fish/vendor_completions.d" 52 | ) 53 | install.run(self) 54 | 55 | 56 | setup( 57 | name="xbstrap", 58 | version="0.34.2", 59 | packages=find_packages(), 60 | package_data={"xbstrap": ["schema.yml"]}, 61 | install_requires=[ 62 | "colorama", 63 | "jsonschema", 64 | "pyyaml", 65 | "zstandard", # For xbps support. 66 | ], 67 | extras_require={ 68 | "test": [ 69 | "black", 70 | "flake8", 71 | "pep8-naming", 72 | "flake8-isort", 73 | ] 74 | }, 75 | cmdclass={ 76 | "develop": CompletionDevelop, 77 | "install": CompletionInstall, 78 | }, 79 | entry_points={ 80 | "console_scripts": [ 81 | "xbstrap = xbstrap:main", 82 | "xbstrap-pipeline = xbstrap.pipeline:main", 83 | "xbstrap-mirror = xbstrap.mirror:main", 84 | ] 85 | }, 86 | # Package metadata. 87 | author="Alexander van der Grinten", 88 | author_email="alexander.vandergrinten@gmail.com", 89 | license="MIT", 90 | url="https://github.com/managarm/xbstrap", 91 | long_description=readme, 92 | long_description_content_type="text/markdown", 93 | ) 94 | -------------------------------------------------------------------------------- /extrafiles/completion.sh: -------------------------------------------------------------------------------- 1 | #/usr/bin/env bash 2 | _xbstrap_completions() { 3 | if [ "${#COMP_WORDS[@]}" == "2" ]; then 4 | COMPREPLY+=($(compgen -W "init runtool fetch checkout patch regenerate configure-tool compile-tool install-tool configure build archive download install list-tools list-pkgs" -- "${COMP_WORDS[1]}")) 5 | return 6 | elif [ "${#COMP_WORDS[@]}" -ge "3" ]; then 7 | # check whether --all has been specified 8 | local all_specified=false 9 | 10 | for i in "${COMP_WORDS[@]:2}"; do 11 | if [ "$i" == "--all" ]; then 12 | all_specified=true 13 | fi 14 | done 15 | 16 | case "${COMP_WORDS[1]}" in 17 | init) 18 | COMPREPLY+=($(compgen -d -S / -- "${COMP_WORDS[${COMP_CWORD}]}")) 19 | COMPREPLY+=($(compgen -W "./ ../" -- "${COMP_WORDS[${COMP_CWORD}]}")) 20 | compopt -o nospace 21 | ;; 22 | configure-tool) 23 | if [[ ${COMP_WORDS[${COMP_CWORD}]:0:1} =~ "-" ]]; then 24 | COMPREPLY+=($(compgen -W "--all" -- "${COMP_WORDS[${COMP_CWORD}]}")) 25 | elif [ $all_specified = false ]; then 26 | local tools="$(xbstrap list-tools)" 27 | COMPREPLY+=($(compgen -W "${tools}" -- "${COMP_WORDS[${COMP_CWORD}]}")) 28 | fi 29 | ;; 30 | compile-tool) 31 | if [[ ${COMP_WORDS[${COMP_CWORD}]:0:1} =~ "-" ]]; then 32 | COMPREPLY+=($(compgen -W "--all --reconfigure" -- "${COMP_WORDS[${COMP_CWORD}]}")) 33 | elif [ $all_specified = false ]; then 34 | local tools="$(xbstrap list-tools)" 35 | COMPREPLY+=($(compgen -W "${tools}" -- "${COMP_WORDS[${COMP_CWORD}]}")) 36 | fi 37 | 38 | ;; 39 | install-tool) 40 | if [[ ${COMP_WORDS[${COMP_CWORD}]:0:1} =~ "-" ]]; then 41 | COMPREPLY+=($(compgen -W "--all --reconfigure --recompile" -- "${COMP_WORDS[${COMP_CWORD}]}")) 42 | elif [ $all_specified = false ]; then 43 | local tools="$(xbstrap list-tools)" 44 | COMPREPLY+=($(compgen -W "${tools}" -- "${COMP_WORDS[${COMP_CWORD}]}")) 45 | fi 46 | ;; 47 | fetch|checkout|patch|regenerate) 48 | local tools="$(xbstrap list-tools)" 49 | local pkgs="$(xbstrap list-pkgs)" 50 | COMPREPLY+=($(compgen -W "${pkgs} ${tools}" -- "${COMP_WORDS[${COMP_CWORD}]}")) 51 | ;; 52 | configure) 53 | if [[ ${COMP_WORDS[${COMP_CWORD}]:0:1} =~ "-" ]]; then 54 | COMPREPLY+=($(compgen -W "--update --overwrite --all" -- "${COMP_WORDS[${COMP_CWORD}]}")) 55 | elif [ $all_specified = false ]; then 56 | local pkgs="$(xbstrap list-pkgs)" 57 | COMPREPLY+=($(compgen -W "${pkgs}" -- "${COMP_WORDS[${COMP_CWORD}]}")) 58 | fi 59 | ;; 60 | build) 61 | if [[ ${COMP_WORDS[${COMP_CWORD}]:0:1} =~ "-" ]]; then 62 | COMPREPLY+=($(compgen -W "--update --overwrite --all --reconfigure" -- "${COMP_WORDS[${COMP_CWORD}]}")) 63 | elif [ $all_specified = false ]; then 64 | local pkgs="$(xbstrap list-pkgs)" 65 | COMPREPLY+=($(compgen -W "${pkgs}" -- "${COMP_WORDS[${COMP_CWORD}]}")) 66 | fi 67 | ;; 68 | install) 69 | if [[ ${COMP_WORDS[${COMP_CWORD}]:0:1} =~ "-" ]]; then 70 | COMPREPLY+=($(compgen -W "--all --reconfigure --rebuild --update --overwrite" -- "${COMP_WORDS[${COMP_CWORD}]}")) 71 | elif [ $all_specified = false ]; then 72 | local pkgs="$(xbstrap list-pkgs)" 73 | COMPREPLY+=($(compgen -W "${pkgs}" -- "${COMP_WORDS[${COMP_CWORD}]}")) 74 | fi 75 | ;; 76 | runtool) 77 | if [[ ${COMP_WORDS[${COMP_CWORD}]:0:1} =~ "-" ]]; then 78 | COMPREPLY+=($(compgen -W "--build" -- "${COMP_WORDS[${COMP_CWORD}]}")) 79 | else 80 | local tools="$(xbstrap list-tools)" 81 | COMPREPLY+=($(compgen -W "${tools}" -- "${COMP_WORDS[${COMP_CWORD}]}")) 82 | fi 83 | ;; 84 | download|archive) 85 | if [[ ${COMP_WORDS[${COMP_CWORD}]:0:1} =~ "-" ]]; then 86 | COMPREPLY+=($(compgen -W "--all" -- "${COMP_WORDS[${COMP_CWORD}]}")) 87 | elif [ $all_specified = false ]; then 88 | local pkgs="$(xbstrap list-pkgs)" 89 | COMPREPLY+=($(compgen -W "${pkgs}" -- "${COMP_WORDS[${COMP_CWORD}]}")) 90 | fi 91 | ;; 92 | esac 93 | return 94 | fi 95 | } 96 | 97 | complete -F _xbstrap_completions xbstrap -------------------------------------------------------------------------------- /xbstrap/subpkgs.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | import shutil 4 | 5 | import xbstrap.util as _util 6 | 7 | 8 | class Mapping: 9 | __slots__ = ("dirent", "children", "claims") 10 | 11 | @staticmethod 12 | def discover(path): 13 | mapping = Mapping(None) 14 | Mapping._discover_children(path, "", mapping) 15 | return mapping 16 | 17 | @staticmethod 18 | def _discover_children(path, subdir, mapping): 19 | fullpath = os.path.join(path, subdir) 20 | for dirent in os.scandir(fullpath): 21 | child = Mapping(dirent) 22 | mapping.children[dirent.name] = child 23 | if child.directory: 24 | Mapping._discover_children(path, os.path.join(subdir, dirent.name), child) 25 | 26 | def __init__(self, dirent): 27 | if not dirent: 28 | directory = True 29 | else: 30 | directory = dirent.is_dir(follow_symlinks=False) 31 | self.dirent = dirent 32 | self.children = {} if directory else None 33 | self.claims = set() 34 | 35 | @property 36 | def directory(self): 37 | return self.children is not None 38 | 39 | def __repr__(self): 40 | def visit(mapping, path): 41 | line = f"{path}: {mapping.claims}" 42 | if mapping.directory: 43 | return ", ".join( 44 | [line] 45 | + [ 46 | visit(child, os.path.join(path, name)) 47 | for name, child in mapping.children.items() 48 | ] 49 | ) 50 | else: 51 | return line 52 | 53 | return visit(self, "/") 54 | 55 | 56 | def determine_mapping(build): 57 | root = Mapping.discover(build.staging_dir) 58 | 59 | subpkg_to_patterns = {} 60 | for subpkg_name in build.all_subpkgs(): 61 | subpkg = build.cfg.get_target_pkg(subpkg_name) 62 | if subpkg.is_main_pkg: 63 | continue 64 | subpkg_to_patterns[subpkg_name] = [ 65 | re.compile(_util.translate_glob(incl, recursive=True, include_hidden=True)) 66 | for incl in subpkg.subpkg_include 67 | ] 68 | 69 | def visit(mapping, path): 70 | if mapping.directory: 71 | for name, child in mapping.children.items(): 72 | child_path = os.path.join(path, name) 73 | visit(child, child_path) 74 | mapping.claims.update(child.claims) 75 | else: 76 | for subpkg_name, patterns in subpkg_to_patterns.items(): 77 | if any(pattern.match(path) for pattern in patterns): 78 | mapping.claims.add(subpkg_name) 79 | if len(mapping.claims) > 1: 80 | raise RuntimeError( 81 | f"File {path} is claimed by multiple subpackages: {mapping.claims}" 82 | ) 83 | if not mapping.claims: 84 | mapping.claims.add(build.name) 85 | 86 | visit(root, "/") 87 | return root 88 | 89 | 90 | def install_mapping(pkg, root, outdir): 91 | def visit(mapping, src_path, dest_path): 92 | if pkg.name not in mapping.claims: 93 | return 94 | 95 | if mapping.directory: 96 | # The root directory was already created by the caller. 97 | if mapping != root: 98 | os.mkdir(dest_path) 99 | shutil.copystat(src_path, dest_path) 100 | 101 | for name, child in mapping.children.items(): 102 | visit( 103 | child, 104 | os.path.join(src_path, name), 105 | os.path.join(dest_path, name), 106 | ) 107 | elif mapping.dirent.is_symlink(): 108 | # Do not preserve attributes 109 | os.symlink(os.readlink(src_path), dest_path) 110 | else: 111 | shutil.copy2(src_path, dest_path) 112 | 113 | visit(root, pkg.build.staging_dir, outdir) 114 | -------------------------------------------------------------------------------- /xbstrap/xbps_utils.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | import plistlib 3 | import shlex 4 | import tarfile 5 | 6 | import zstandard 7 | 8 | 9 | def read_repodata(path): 10 | with open(path, "rb") as zidx: 11 | dctx = zstandard.ZstdDecompressor() 12 | with dctx.stream_reader(zidx) as reader: 13 | with tarfile.open(fileobj=reader, mode="r|") as tar: 14 | for ent in tar: 15 | if ent.name != "index.plist": 16 | continue 17 | with tar.extractfile(ent) as idxpl: 18 | pkg_idx = plistlib.load(idxpl, fmt=plistlib.FMT_XML) 19 | return pkg_idx 20 | 21 | 22 | class XbpsVersion: 23 | def __init__(self, comps, revision=1): 24 | self.comps = comps 25 | self.revision = revision 26 | 27 | 28 | # This matches the version parsing algorithm in xbps. 29 | def parse_components(v): 30 | modifiers = { 31 | "alpha": -3, 32 | "beta": -2, 33 | "pre": -1, 34 | "rc": -1, 35 | "pl": 0, 36 | ".": 0, 37 | } 38 | alphas = "abcdefghijklmnopqrstuvwxyz" 39 | 40 | n = 0 41 | out = [] 42 | 43 | def consume_next_token(): 44 | nonlocal n 45 | 46 | # Integers correspond to a single component. 47 | if v[n].isdigit(): 48 | d = 0 49 | while n < len(v) and v[n].isdigit(): 50 | d = (d * 10) + int(v[n]) 51 | n += 1 52 | out.append(d) 53 | return 54 | 55 | # Modifiers correspond to a single component with a fixed value. 56 | for modifier, prio in modifiers.items(): 57 | if v[n:].startswith(modifier): 58 | out.append(prio) 59 | n += len(modifier) 60 | return 61 | 62 | # Letters correspond to two components: (0, idx + 1). 63 | # For example: versions "0.2" and "0b" are identical. 64 | idx = alphas.find(v[n].lower()) 65 | if idx >= 0: 66 | out.append(0) 67 | out.append(idx + 1) 68 | 69 | # Consume the character. It does _not_ contribute to the version. 70 | n += 1 71 | 72 | while n < len(v): 73 | consume_next_token() 74 | 75 | return out 76 | 77 | 78 | def parse_version(v, *, strip_pkgname=True): 79 | if strip_pkgname: 80 | v = v.split("-")[-1] 81 | 82 | # Split into components + revision. 83 | # Technically, xbps's parsing code allows the revision to appear in the middle of a version. 84 | # We only accept revision at the end of a version string. 85 | parts = v.split("_") 86 | comps = parse_components(parts[0]) 87 | if len(parts) > 1: 88 | if len(parts) != 2: 89 | raise RuntimeError("Expected at most one revision in xbps version") 90 | revision = int(parts[1]) 91 | else: 92 | revision = 1 93 | 94 | return XbpsVersion(comps, revision) 95 | 96 | 97 | def compare_version(v1, v2): 98 | for c1, c2 in itertools.zip_longest(v1.comps, v2.comps, fillvalue=0): 99 | if c1 != c2: 100 | return c1 - c2 101 | if v1.revision != v2.revision: 102 | return v1.revision - v2.revision 103 | return 0 104 | 105 | 106 | # XBPS INSTALL/REMOVE scripts are called with arguments: 107 | # "no" 108 | 109 | 110 | def compose_xbps_install(cfg, pkg): 111 | yml = pkg._this_yml.get("scripts", dict()).get("post_install") 112 | if not yml: 113 | return None 114 | 115 | step_sh = [] 116 | for step_yml in yml: 117 | args_yml = step_yml["args"] 118 | if isinstance(args_yml, str): 119 | step_sh.append("(eval " + shlex.quote(args_yml) + ")") 120 | else: 121 | step_sh.append( 122 | "(exec `which " 123 | + shlex.quote(args_yml[0]) 124 | + "`" 125 | + "".join(" " + shlex.quote(q) for q in args_yml[1:]) 126 | + ")" 127 | ) 128 | 129 | return ( 130 | '#!/bin/sh\ncase "$1" in\npost)\n' 131 | + "".join(f" {s}\n" for s in step_sh) 132 | + " ;;\nesac\n" 133 | ) 134 | -------------------------------------------------------------------------------- /xbstrap/util.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: MIT 2 | 3 | import contextlib 4 | import errno 5 | import fcntl 6 | import os 7 | import os.path as path 8 | import re 9 | import sys 10 | import urllib.parse 11 | import urllib.request 12 | 13 | import colorama 14 | 15 | 16 | def eprint(*args, **kwargs): 17 | return print(*args, **kwargs, file=sys.stderr) 18 | 19 | 20 | def log_info(msg): 21 | eprint("{}xbstrap{}: {}".format(colorama.Style.BRIGHT, colorama.Style.RESET_ALL, msg)) 22 | 23 | 24 | def log_warn(msg): 25 | eprint( 26 | "{}xbstrap{}: {}{}{}".format( 27 | colorama.Style.BRIGHT, 28 | colorama.Style.NORMAL, 29 | colorama.Fore.YELLOW, 30 | msg, 31 | colorama.Style.RESET_ALL, 32 | ) 33 | ) 34 | 35 | 36 | def log_err(msg): 37 | eprint( 38 | "{}xbstrap{}: {}{}{}".format( 39 | colorama.Style.BRIGHT, 40 | colorama.Style.NORMAL, 41 | colorama.Fore.RED, 42 | msg, 43 | colorama.Style.RESET_ALL, 44 | ), 45 | ) 46 | 47 | 48 | def find_home(): 49 | if "XBSTRAP_HOME" in os.environ: 50 | return os.environ["XBSTRAP_HOME"] 51 | return os.path.expanduser("~/.xbstrap") 52 | 53 | 54 | def try_mkdir(path, recursive=False): 55 | try: 56 | if not recursive: 57 | os.mkdir(path) 58 | else: 59 | os.makedirs(path) 60 | except OSError as e: 61 | if e.errno != errno.EEXIST: 62 | raise 63 | 64 | 65 | def build_environ_paths(environ, varname, prepend): 66 | if not prepend: 67 | return 68 | joined = ":".join(prepend) 69 | if varname in environ and environ[varname]: 70 | environ[varname] = joined + ":" + environ[varname] 71 | else: 72 | environ[varname] = joined 73 | 74 | 75 | def interactive_download(url, path): 76 | istty = os.isatty(1) # This is stdout. 77 | if istty: 78 | eprint("...", end="") # This will become the status line. 79 | 80 | def show_progress(num_blocks, block_size, file_size): 81 | progress = min(num_blocks * block_size, file_size) 82 | rewind = "" 83 | newline = "" 84 | if istty: 85 | rewind = "\r" 86 | else: 87 | 88 | def discrete(n): 89 | return int(10 * n * block_size / file_size) 90 | 91 | if num_blocks > 0 and discrete(num_blocks - 1) == discrete(num_blocks): 92 | return 93 | newline = "\n" 94 | frac = progress / file_size 95 | eprint( 96 | "{}[{}{}]\x1b[K{:8.0f} KiB / {:8.0f} KiB, {:7.2f}%".format( 97 | rewind, 98 | "#" * int(20 * frac), 99 | " " * (20 - int(20 * frac)), 100 | progress / 1024, 101 | file_size / 1024, 102 | progress / file_size * 100, 103 | ), 104 | end=newline, 105 | ) 106 | 107 | temp_path = path + ".download" 108 | urllib.request.urlretrieve(url, temp_path, show_progress) 109 | os.rename(temp_path, path) 110 | if istty: 111 | eprint() 112 | 113 | 114 | @contextlib.contextmanager 115 | def lock_directory(directory, mode=fcntl.LOCK_EX): 116 | try_mkdir(directory) 117 | fname = path.join(directory, ".xbstrap_lock") 118 | with open(fname, "w") as f: 119 | fcntl.flock(f.fileno(), mode) 120 | yield 121 | 122 | 123 | # This is the glob.translate() function from Python 3.13+. 124 | # This is the version from Python 3.14. 125 | # Copyright (c) 2001-2025 Python Software Foundation 126 | # TODO: Get rid of this copy when we can rely on Python 3.13+ 127 | def translate_glob(pat, *, recursive=False, include_hidden=False, seps=None): 128 | if not seps: 129 | if os.path.altsep: 130 | seps = (os.path.sep, os.path.altsep) 131 | else: 132 | seps = os.path.sep 133 | escaped_seps = "".join(map(re.escape, seps)) 134 | any_sep = f"[{escaped_seps}]" if len(seps) > 1 else escaped_seps 135 | not_sep = f"[^{escaped_seps}]" 136 | if include_hidden: 137 | one_last_segment = f"{not_sep}+" 138 | one_segment = f"{one_last_segment}{any_sep}" 139 | any_segments = f"(?:.+{any_sep})?" 140 | any_last_segments = ".*" 141 | else: 142 | one_last_segment = f"[^{escaped_seps}.]{not_sep}*" 143 | one_segment = f"{one_last_segment}{any_sep}" 144 | any_segments = f"(?:{one_segment})*" 145 | any_last_segments = f"{any_segments}(?:{one_last_segment})?" 146 | 147 | results = [] 148 | parts = re.split(any_sep, pat) 149 | last_part_idx = len(parts) - 1 150 | for idx, part in enumerate(parts): 151 | if part == "*": 152 | results.append(one_segment if idx < last_part_idx else one_last_segment) 153 | elif recursive and part == "**": 154 | if idx < last_part_idx: 155 | if parts[idx + 1] != "**": 156 | results.append(any_segments) 157 | else: 158 | results.append(any_last_segments) 159 | else: 160 | if part: 161 | if not include_hidden and part[0] in "*?": 162 | results.append(r"(?!\.)") 163 | results.extend(fnmatch_underscore_translate(part, f"{not_sep}*", not_sep)[0]) 164 | if idx < last_part_idx: 165 | results.append(any_sep) 166 | res = "".join(results) 167 | return rf"(?s:{res})\Z" 168 | 169 | 170 | _re_setops_sub = re.compile(r"([&~|])").sub 171 | 172 | 173 | # This is fnmatch._translate() from Python 3.14. 174 | # Copyright (c) 2001-2025 Python Software Foundation 175 | def fnmatch_underscore_translate(pat, star, question_mark): 176 | res = [] 177 | add = res.append 178 | star_indices = [] 179 | 180 | i, n = 0, len(pat) 181 | while i < n: 182 | c = pat[i] 183 | i = i + 1 184 | if c == "*": 185 | # store the position of the wildcard 186 | star_indices.append(len(res)) 187 | add(star) 188 | # compress consecutive `*` into one 189 | while i < n and pat[i] == "*": 190 | i += 1 191 | elif c == "?": 192 | add(question_mark) 193 | elif c == "[": 194 | j = i 195 | if j < n and pat[j] == "!": 196 | j = j + 1 197 | if j < n and pat[j] == "]": 198 | j = j + 1 199 | while j < n and pat[j] != "]": 200 | j = j + 1 201 | if j >= n: 202 | add("\\[") 203 | else: 204 | stuff = pat[i:j] 205 | if "-" not in stuff: 206 | stuff = stuff.replace("\\", r"\\") 207 | else: 208 | chunks = [] 209 | k = i + 2 if pat[i] == "!" else i + 1 210 | while True: 211 | k = pat.find("-", k, j) 212 | if k < 0: 213 | break 214 | chunks.append(pat[i:k]) 215 | i = k + 1 216 | k = k + 3 217 | chunk = pat[i:j] 218 | if chunk: 219 | chunks.append(chunk) 220 | else: 221 | chunks[-1] += "-" 222 | # Remove empty ranges -- invalid in RE. 223 | for k in range(len(chunks) - 1, 0, -1): 224 | if chunks[k - 1][-1] > chunks[k][0]: 225 | chunks[k - 1] = chunks[k - 1][:-1] + chunks[k][1:] 226 | del chunks[k] 227 | # Escape backslashes and hyphens for set difference (--). 228 | # Hyphens that create ranges shouldn't be escaped. 229 | stuff = "-".join(s.replace("\\", r"\\").replace("-", r"\-") for s in chunks) 230 | i = j + 1 231 | if not stuff: 232 | # Empty range: never match. 233 | add("(?!)") 234 | elif stuff == "!": 235 | # Negated empty range: match any character. 236 | add(".") 237 | else: 238 | # Escape set operations (&&, ~~ and ||). 239 | stuff = _re_setops_sub(r"\\\1", stuff) 240 | if stuff[0] == "!": 241 | stuff = "^" + stuff[1:] 242 | elif stuff[0] in ("^", "["): 243 | stuff = "\\" + stuff 244 | add(f"[{stuff}]") 245 | else: 246 | add(re.escape(c)) 247 | assert i == n 248 | return res, star_indices 249 | -------------------------------------------------------------------------------- /xbstrap/schema.yml: -------------------------------------------------------------------------------- 1 | definitions: 2 | 'source_deps': 3 | type: array 4 | items: 5 | oneOf: 6 | - type: string 7 | - type: object 8 | additionalProperties: false 9 | properties: 10 | 'name': 11 | type: string 12 | 'recursive': 13 | type: boolean 14 | 15 | 'tool_deps': 16 | type: array 17 | items: 18 | oneOf: 19 | - type: string 20 | - type: object 21 | additionalProperties: false 22 | properties: 23 | 'tool': 24 | type: string 25 | 'recursive': 26 | type: boolean 27 | 'stage_dependencies': 28 | type: array 29 | items: 30 | type: string 31 | 'expose': 32 | type: boolean 33 | - type: object 34 | additionalProperties: false 35 | properties: 36 | 'virtual': 37 | type: string 38 | 'program_name': 39 | type: string 40 | 'triple': 41 | type: string 42 | 43 | 'pkg_deps': 44 | type: array 45 | items: 46 | type: string 47 | 48 | 'task_deps': 49 | type: array 50 | items: 51 | oneOf: 52 | - type: object 53 | additionalProperties: false 54 | properties: 55 | 'task': 56 | type: string 57 | 'order_only': 58 | type: boolean 59 | - type: string 60 | 61 | 'nested_source': 62 | type: object 63 | additionalProperties: false 64 | properties: 65 | 'name': 66 | type: string 67 | 'version': 68 | type: string 69 | 'subdir': 70 | type: string 71 | 'rolling_version': 72 | type: boolean 73 | # URL sources. 74 | 'url': 75 | type: string 76 | 'filename': { type: string } 77 | 'format': 78 | type: string 79 | 'extract_path': 80 | type: string 81 | 'patch-path-strip': 82 | type: integer 83 | 'checksum': 84 | type: string 85 | # VCS sources. 86 | 'git': 87 | oneOf: 88 | - type: array 89 | items: 90 | type: string 91 | minItems: 1 92 | - type: string 93 | 'hg': 94 | type: string 95 | 'svn': 96 | type: string 97 | 'tag': 98 | type: string 99 | 'branch': 100 | type: string 101 | 'commit': 102 | type: string 103 | 'rev': 104 | type: string 105 | 'disable_shallow_fetch': 106 | type: boolean 107 | 'submodules': 108 | type: boolean 109 | 'patch_keep_crlf': 110 | type: boolean 111 | 'regenerate': { $ref: '#/definitions/build_steps' } 112 | 'sources_required': { $ref: '#/definitions/source_deps' } 113 | 'tools_required': { $ref: '#/definitions/tool_deps' } 114 | 115 | 'nested_task': 116 | type: object 117 | additionalProperties: false 118 | required: ['name'] 119 | properties: 120 | 'name': 121 | type: string 122 | 'workdir': 123 | type: string 124 | 'environ': 125 | type: object 126 | additionalProperties: 127 | type: string 128 | 'args': { $ref: '#/definitions/args' } 129 | 'containerless': { type: boolean } 130 | 'quiet': { type: boolean } 131 | 'sources_required': { $ref: '#/definitions/source_deps' } 132 | 'tools_required': { $ref: '#/definitions/tool_deps' } 133 | 'pkgs_required': { $ref: '#/definitions/pkg_deps' } 134 | 'tasks_required': { $ref: '#/definitions/task_deps' } 135 | 136 | 'build_steps': 137 | type: array 138 | items: 139 | - type: object 140 | additionalProperties: false 141 | properties: 142 | 'args': { $ref: '#/definitions/args' } 143 | 'workdir': 144 | type: string 145 | 'environ': 146 | type: object 147 | additionalProperties: 148 | type: string 149 | 'containerless': { type: boolean } 150 | 'isolate_network': { type: boolean } 151 | 'quiet': 152 | type: boolean 153 | 'cargo_home': 154 | type: boolean 155 | 156 | 'args': 157 | oneOf: 158 | - type: array 159 | items: 160 | type: string 161 | - type: string 162 | 163 | type: object 164 | additionalProperties: false 165 | properties: 166 | 'general': 167 | type: object 168 | additionalProperties: false 169 | properties: 170 | 'patch_author': 171 | type: string 172 | 'patch_email': 173 | type: string 174 | 'everything_by_default': 175 | type: boolean 176 | 'enable_network_isolation': 177 | type: boolean 178 | 'mandate_hashes_for_archives': 179 | type: boolean 180 | 'cargo': 181 | type: object 182 | additionalProperties: false 183 | properties: 184 | 'config_toml': 185 | type: string 186 | 'repositories': 187 | type: object 188 | additionalProperties: false 189 | properties: 190 | 'pkg_archives': 191 | type: string 192 | 'tool_archives': 193 | oneOf: 194 | - type: string 195 | - type: object 196 | additionalProperties: 197 | type: string 198 | 'xbps': 199 | oneOf: 200 | - type: string 201 | - type: object 202 | additionalProperties: 203 | type: string 204 | 'directories': 205 | type: object 206 | additionalProperties: false 207 | properties: 208 | 'pkg_builds': { type: string } 209 | 'tool_builds': { type: string } 210 | 'packages': { type: string } 211 | 'tools': { type: string } 212 | 'system_root': { type: string } 213 | 'imports': 214 | type: array 215 | items: 216 | type: object 217 | additionalProperties: false 218 | properties: 219 | 'file': 220 | type: string 221 | 'declare_options': 222 | type: array 223 | items: 224 | type: object 225 | additionalProperties: false 226 | required: ['name'] 227 | properties: 228 | 'name': 229 | type: string 230 | 'default': { } 231 | 'sources': 232 | type: array 233 | items: 234 | allOf: 235 | - type: object 236 | required: ['name'] 237 | - { $ref: '#/definitions/nested_source' } 238 | 'tools': 239 | type: array 240 | items: 241 | type: object 242 | additionalProperties: false 243 | required: ['name'] 244 | properties: 245 | 'name': 246 | type: string 247 | 'revision': 248 | type: integer 249 | 'labels': 250 | type: array 251 | items: 252 | type: string 253 | 'stability_level': 254 | type: string 255 | enum: ['stable', 'unstable', 'broken'] 256 | 'source': { $ref: '#/definitions/nested_source' } 257 | 'from_source': 258 | type: string 259 | 'exports_aclocal': 260 | type: boolean 261 | 'exports_shared_libs': 262 | type: boolean 263 | 'containerless': 264 | type: boolean 265 | 'architecture': { type: string } 266 | 'configure': { $ref: '#/definitions/build_steps' } 267 | 'compile': { $ref: '#/definitions/build_steps' } 268 | 'install': { $ref: '#/definitions/build_steps' } 269 | 'stages': 270 | type: array 271 | items: 272 | type: object 273 | additionalProperties: false 274 | required: ['name'] 275 | properties: 276 | 'name': 277 | type: string 278 | 'compile': { $ref: '#/definitions/build_steps' } 279 | 'install': { $ref: '#/definitions/build_steps' } 280 | 'tools_required': { $ref: '#/definitions/tool_deps' } 281 | 'pkgs_required': { $ref: '#/definitions/pkg_deps' } 282 | 'sources_required': { $ref: '#/definitions/source_deps' } 283 | 'tools_required': { $ref: '#/definitions/tool_deps' } 284 | 'tasks': 285 | type: array 286 | items: { $ref: '#/definitions/nested_task' } 287 | 'packages': 288 | type: array 289 | items: 290 | type: object 291 | additionalProperties: false 292 | required: ['name'] 293 | properties: 294 | 'name': 295 | type: string 296 | 'metadata': 297 | type: object 298 | additionalProperties: false 299 | properties: 300 | 'summary': { type: string } 301 | 'description': { type: string } 302 | 'spdx': { type: string } 303 | 'website': { type: string } 304 | 'maintainer': 305 | type: string 306 | pattern: '^.+ <[^>]+>$' 307 | 'categories': 308 | type: array 309 | items: { type: string } 310 | 'replaces': 311 | type: array 312 | items: { type: string } 313 | 'revision': 314 | type: integer 315 | 'labels': 316 | type: array 317 | items: 318 | type: string 319 | 'default': 320 | type: boolean 321 | 'stability_level': 322 | type: string 323 | enum: ['stable', 'unstable', 'broken'] 324 | 'implict_package': 325 | type: boolean 326 | 'source': { $ref: '#/definitions/nested_source' } 327 | 'from_source': 328 | type: string 329 | 'architecture': { type: string } 330 | 'configure': { $ref: '#/definitions/build_steps' } 331 | 'build': { $ref: '#/definitions/build_steps' } 332 | 'install': { $ref: '#/definitions/build_steps' } 333 | 'sources_required': { $ref: '#/definitions/source_deps' } 334 | 'tools_required': { $ref: '#/definitions/tool_deps' } 335 | 'pkgs_required': { $ref: '#/definitions/pkg_deps' } 336 | 'tasks': 337 | type: array 338 | items: { $ref: '#/definitions/nested_task' } 339 | 'scripts': 340 | type: object 341 | additionalProperties: false 342 | properties: 343 | 'post_install': 344 | type: array 345 | items: 346 | type: object 347 | additionalProperties: false 348 | properties: 349 | 'args': 350 | oneOf: 351 | - type: string 352 | - type: array 353 | items: 354 | type: string 355 | 'subpackages': 356 | type: array 357 | items: 358 | type: object 359 | additionalProperties: false 360 | required: ['name'] 361 | properties: 362 | 'name': { type: string } 363 | 'include': 364 | type: array 365 | items: { type: string } 366 | 'tasks': 367 | type: array 368 | items: 369 | type: object 370 | additionalProperties: false 371 | required: ['name'] 372 | properties: 373 | 'name': 374 | type: string 375 | 'workdir': 376 | type: string 377 | 'environ': 378 | type: object 379 | additionalProperties: 380 | type: string 381 | 'artifact_files': 382 | type: array 383 | items: 384 | type: object 385 | additionalProperties: false 386 | required: ['name', 'path'] 387 | properties: 388 | 'name': { type: string } 389 | 'path': { type: string } 390 | 'architecture': { type: string } 391 | 'args': { $ref: '#/definitions/args' } 392 | 'containerless': { type: boolean } 393 | 'quiet': { type: boolean } 394 | 'sources_required': { $ref: '#/definitions/source_deps' } 395 | 'tools_required': { $ref: '#/definitions/tool_deps' } 396 | 'pkgs_required': { $ref: '#/definitions/pkg_deps' } 397 | 'tasks_required': { $ref: '#/definitions/task_deps' } 398 | -------------------------------------------------------------------------------- /xbstrap/vcs_utils.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: MIT 2 | 3 | import hashlib 4 | import os 5 | import re 6 | import shutil 7 | import subprocess 8 | import urllib.request 9 | from enum import Enum 10 | 11 | import xbstrap.util as _util 12 | from xbstrap.exceptions import GenericError 13 | 14 | 15 | class RepoStatus(Enum): 16 | GOOD = 0 17 | MISSING = 1 18 | OUTDATED = 2 19 | 20 | 21 | CHECKSUM_BLOCK_SIZE = 1048576 22 | DEFAULT_CHECKSUM_TYPE = "blake2b" 23 | HASHLIB_MAP = { 24 | "sha256": hashlib.sha256, 25 | "sha512": hashlib.sha512, 26 | "blake2b": hashlib.blake2b, 27 | } 28 | 29 | assert DEFAULT_CHECKSUM_TYPE in HASHLIB_MAP 30 | 31 | 32 | def vcs_name(src): 33 | if "git" in src._this_yml: 34 | return "git" 35 | elif "hg" in src._this_yml: 36 | return "hg" 37 | elif "svn" in src._this_yml: 38 | return "svn" 39 | elif "url" in src._this_yml: 40 | return "url" 41 | else: 42 | return None 43 | 44 | 45 | def determine_git_version(git): 46 | output = subprocess.check_output([git, "version"], encoding="ascii") 47 | matches = re.match(r"^git version (\d+).(\d+).(\d+)", output) 48 | if matches is None: 49 | raise RuntimeError(f"Could not parse git version string: '{output}'") 50 | return tuple(int(matches.group(i)) for i in range(1, 4)) 51 | 52 | 53 | def checksum_calculate(csum_type, file): 54 | if csum_type in HASHLIB_MAP: 55 | csum = HASHLIB_MAP.get(csum_type)() 56 | else: 57 | raise GenericError(f"Checksum type '{csum_type}' is unsupported") 58 | for block in iter(lambda: file.read(CHECKSUM_BLOCK_SIZE), b""): 59 | csum.update(block) 60 | return csum.hexdigest() 61 | 62 | 63 | def checksum_validate(source, source_archive_file, source_name, mandate_hashes): 64 | if "checksum" not in source: 65 | if mandate_hashes: 66 | with open(source_archive_file, "rb") as f: 67 | csum = checksum_calculate(DEFAULT_CHECKSUM_TYPE, f) 68 | _util.log_warn(f"Missing checksum for '{source_archive_file}':") 69 | _util.log_warn(f"{csum} ({DEFAULT_CHECKSUM_TYPE})") 70 | _util.log_err("Hashes are mandated, but some checksums are missing") 71 | raise GenericError(f"No checksum provided for source '{source_name}'") 72 | return 73 | with open(source_archive_file, "rb") as f: 74 | csum_type, _, csum_value = source["checksum"].partition(":") 75 | if not csum_value: 76 | raise GenericError(f"No checksum provided for source '{source_name}'") 77 | csum = checksum_calculate(csum_type, f) 78 | if not csum == csum_value: 79 | _util.log_warn(f"Mismatch for '{source_archive_file}'") 80 | _util.log_warn(f"Expected {csum_value} ({csum_type})") 81 | _util.log_warn(f"Got {csum} ({csum_type})") 82 | raise GenericError(f"Checksum for source '{source_name}' did not match") 83 | 84 | 85 | def check_repo(src, subdir, *, check_remotes=0): 86 | if "git" in src._this_yml: 87 | source_dir = os.path.join(subdir, src.name) 88 | 89 | xbstrap_mirror = src.cfg.xbstrap_mirror 90 | if xbstrap_mirror is None: 91 | git_url = src._this_yml["git"] 92 | else: 93 | git_url = urllib.parse.urljoin(xbstrap_mirror + "/git/", src.name) 94 | 95 | def check_commit(ref, branch): 96 | try: 97 | subprocess.check_call( 98 | ["git", "branch", "--contains", ref, branch], 99 | cwd=source_dir, 100 | stdout=subprocess.DEVNULL, 101 | stderr=subprocess.DEVNULL, 102 | ) 103 | except subprocess.CalledProcessError: 104 | return False 105 | return True 106 | 107 | def get_local_commit(ref): 108 | try: 109 | out = ( 110 | subprocess.check_output( 111 | ["git", "show-ref", "--verify", ref], 112 | cwd=source_dir, 113 | stderr=subprocess.DEVNULL, 114 | ) 115 | .decode() 116 | .splitlines() 117 | ) 118 | except subprocess.CalledProcessError: 119 | return None 120 | assert len(out) == 1 121 | (commit, outref) = out[0].split(" ") 122 | return commit 123 | 124 | def get_remote_commit(ref): 125 | try: 126 | out = ( 127 | subprocess.check_output(["git", "ls-remote", "--exit-code", git_url, ref]) 128 | .decode() 129 | .splitlines() 130 | ) 131 | except subprocess.CalledProcessError: 132 | return None 133 | assert len(out) == 1 134 | (commit, outref) = out[0].split("\t") 135 | return commit 136 | 137 | known_commit = None 138 | known_branch = None 139 | if "branch" in src._this_yml and "commit" in src._this_yml: 140 | known_commit = src._this_yml["commit"] 141 | known_branch = src._this_yml["branch"] 142 | 143 | # There is a TOCTOU here; we assume that users do not concurrently delete directories. 144 | if not os.path.isdir(source_dir): 145 | return RepoStatus.MISSING 146 | 147 | # If we know the commit hash, we do not need to check the remote. 148 | # Instead, we simply check if the commit exists locally. 149 | if known_commit and known_branch: 150 | if not check_commit(known_commit, known_branch): 151 | return RepoStatus.MISSING 152 | else: 153 | if "tag" in src._this_yml: 154 | ref = "refs/tags/" + src._this_yml["tag"] 155 | tracking_ref = "refs/tags/" + src._this_yml["tag"] 156 | else: 157 | ref = "refs/heads/" + src._this_yml["branch"] 158 | tracking_ref = "refs/remotes/origin/" + src._this_yml["branch"] 159 | local_commit = get_local_commit(tracking_ref) 160 | if local_commit is None: 161 | return RepoStatus.MISSING 162 | 163 | # Only check remote commits for 164 | do_check_remote = False 165 | if check_remotes >= 2: 166 | do_check_remote = True 167 | if check_remotes >= 1 and "tag" not in src._this_yml: 168 | do_check_remote = True 169 | 170 | if do_check_remote: 171 | _util.log_info("Checking for remote updates of {}".format(src.name)) 172 | remote_commit = get_remote_commit(ref) 173 | if local_commit != remote_commit: 174 | return RepoStatus.OUTDATED 175 | elif "hg" in src._this_yml: 176 | source_dir = os.path.join(subdir, src.name) 177 | 178 | if not os.path.isdir(source_dir): 179 | return RepoStatus.MISSING 180 | args = [ 181 | "hg", 182 | "manifest", 183 | "--pager", 184 | "never", 185 | "-r", 186 | ] 187 | if "tag" in src._this_yml: 188 | args.append(src._this_yml["tag"]) 189 | else: 190 | args.append(src._this_yml["branch"]) 191 | if subprocess.call(args, cwd=source_dir, stdout=subprocess.DEVNULL) != 0: 192 | return RepoStatus.MISSING 193 | elif "svn" in src._this_yml: 194 | source_dir = os.path.join(subdir, src.name) 195 | 196 | if not os.path.isdir(source_dir): 197 | return RepoStatus.MISSING 198 | elif "url" in src._this_yml: 199 | source_archive_file = os.path.join(subdir, src.name + "." + src.source_archive_format) 200 | 201 | if not os.access(source_archive_file, os.F_OK): 202 | return RepoStatus.MISSING 203 | else: 204 | # VCS-less source. 205 | source_dir = os.path.join(subdir, src.name) 206 | 207 | if not os.path.isdir(source_dir): 208 | return RepoStatus.MISSING 209 | 210 | return RepoStatus.GOOD 211 | 212 | 213 | def fetch_repo(cfg, src, subdir, *, ignore_mirror=False, bare_repo=False): 214 | source = src._this_yml 215 | 216 | if "git" in source: 217 | source_dir = os.path.join(subdir, src.name) 218 | 219 | if ignore_mirror: 220 | xbstrap_mirror = None 221 | else: 222 | xbstrap_mirror = src.cfg.xbstrap_mirror 223 | 224 | if xbstrap_mirror is None: 225 | if isinstance(source["git"], list): 226 | if len(source["git"]) == 0: 227 | raise GenericError("at least one URL is required for git sources") 228 | git_sources = source["git"] 229 | else: 230 | git_sources = [source["git"]] 231 | else: 232 | git_sources = [urllib.parse.urljoin(xbstrap_mirror + "/git/", src.name)] 233 | 234 | git = shutil.which("git") 235 | if git is None: 236 | raise GenericError("git not found; please install it and retry") 237 | commit_yml = cfg._commit_yml.get("commits", dict()).get(src.name, dict()) 238 | fixed_commit = commit_yml.get("fixed_commit", None) 239 | 240 | git_version = determine_git_version(git) 241 | 242 | # Newer versions of git remit a warning if -b is not passed. 243 | # (We do not care about the name of the master branch, but we need to 244 | # get rid of the warning.) 245 | b_args = [] 246 | if git_version >= (2, 28, 0): 247 | b_args = ["-b", "master"] 248 | 249 | init = not os.path.isdir(source_dir) 250 | if init: 251 | _util.try_mkdir(source_dir) 252 | if bare_repo: 253 | subprocess.check_call([git, "init", "--bare"], cwd=source_dir) 254 | else: 255 | subprocess.check_call([git, "init"] + b_args, cwd=source_dir) 256 | # We always set the remote to the true remote, not a mirror. 257 | subprocess.check_call([git, "remote", "add", "origin", git_sources[0]], cwd=source_dir) 258 | 259 | for num, fallback in enumerate(git_sources[1:]): 260 | subprocess.check_call( 261 | [git, "remote", "add", f"fallback-{num}", fallback], cwd=source_dir 262 | ) 263 | 264 | shallow = not source.get("disable_shallow_fetch", False) 265 | # We have to disable shallow fetches to get rolling versions right. 266 | if src.is_rolling_version: 267 | shallow = False 268 | 269 | # We cannot shallow clone mirrors 270 | if bare_repo: 271 | shallow = False 272 | 273 | fetch_succeeded = False 274 | fetch_failed_before = False 275 | 276 | for git_url in git_sources: 277 | args = [git, "fetch"] 278 | if "tag" in source: 279 | if shallow: 280 | args.append("--depth=1") 281 | args.extend([git_url, "tag", source["tag"]]) 282 | else: 283 | # If a commit is specified, we need the branch's full history. 284 | # TODO: it's unclear whether this is the best strategy: 285 | # - for simplicity, it might be easier to always pull the full history 286 | # - some remotes support fetching individual SHA1s. 287 | if "commit" in source or fixed_commit is not None: 288 | shallow = False 289 | 290 | # When initializing the repository, we fetch only one commit. 291 | # For updates, we fetch all *new* commits (= default behavior of 'git fetch'). 292 | # We do not unshallow the repository. 293 | if init and shallow: 294 | args.append("--depth=1") 295 | 296 | # For bare repos, we mirror the original repo 297 | # (in particular, we do not distinguish local and remote branches). 298 | if bare_repo: 299 | args.extend( 300 | [ 301 | git_url, 302 | "refs/heads/" 303 | + source["branch"] 304 | + ":" 305 | + "refs/heads/" 306 | + source["branch"], 307 | ] 308 | ) 309 | else: 310 | args.extend( 311 | [ 312 | git_url, 313 | "refs/heads/" 314 | + source["branch"] 315 | + ":" 316 | + "refs/remotes/origin/" 317 | + source["branch"], 318 | ] 319 | ) 320 | 321 | try: 322 | subprocess.check_call(args, cwd=source_dir) 323 | except subprocess.SubprocessError: 324 | _util.log_warn(f'Fetching from git remote "{git_url}" failed') 325 | fetch_failed_before = True 326 | else: 327 | if fetch_failed_before: 328 | _util.log_warn(f'Fetching from fallback git remote "{git_url}" succeeded') 329 | fetch_succeeded = True 330 | break 331 | 332 | if not fetch_succeeded: 333 | raise GenericError("Fetching {} failed".format(src.name)) 334 | elif "hg" in source: 335 | source_dir = os.path.join(subdir, src.name) 336 | 337 | hg = shutil.which("hg") 338 | if hg is None: 339 | raise GenericError("mercurial (hg) not found; please install it and retry") 340 | _util.try_mkdir(source_dir) 341 | args = [hg, "clone", source["hg"], source_dir] 342 | subprocess.check_call(args) 343 | elif "svn" in source: 344 | source_dir = os.path.join(subdir, src.name) 345 | 346 | svn = shutil.which("svn") 347 | if svn is None: 348 | raise GenericError("subversion (svn) not found; please install it and retry") 349 | _util.try_mkdir(source_dir) 350 | args = [svn, "co", source["svn"], source_dir] 351 | subprocess.check_call(args) 352 | elif "url" in source: 353 | source_dir = os.path.join(subdir, src.name) 354 | source_archive_file = os.path.join(subdir, src.name + "." + src.source_archive_format) 355 | 356 | _util.try_mkdir(source_dir) 357 | with urllib.request.urlopen(source["url"]) as req: 358 | with open(source_archive_file, "wb") as f: 359 | shutil.copyfileobj(req, f) 360 | checksum_validate(source, source_archive_file, src.name, cfg.mandate_hashes_for_archives) 361 | else: 362 | # VCS-less source. 363 | source_dir = os.path.join(subdir, src.name) 364 | _util.try_mkdir(source_dir) 365 | 366 | 367 | def determine_source_date_epoch(src): 368 | yml = src._this_yml 369 | 370 | if "git" in yml: 371 | # HEAD is usually what we want if the repository is fully patched. 372 | # TODO: Ensure that the current work tree matches the fully patched version. 373 | output = subprocess.check_output( 374 | ["git", "show", "-s", "--format=%ct", "HEAD"], encoding="ascii", cwd=src.source_dir 375 | ) 376 | return int(output) 377 | else: 378 | # If we do not know how to find the last modification time, 379 | # it is preferrable to simply return 0 (= Jan 1, 1970) over not setting SOURCE_DATE_EPOCH. 380 | return 0 381 | -------------------------------------------------------------------------------- /xbstrap/pipeline/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | # SPDX-License-Identifier: MIT 3 | 4 | import argparse 5 | import json 6 | import sys 7 | 8 | import colorama 9 | import yaml 10 | 11 | import xbstrap.base 12 | import xbstrap.cli_utils 13 | 14 | main_parser = argparse.ArgumentParser() 15 | main_parser.add_argument("-v", dest="verbose", action="store_true", help="verbose") 16 | main_parser.add_argument( 17 | "-S", type=str, dest="source_dir", help="source dir (in place of bootstrap.link)" 18 | ) 19 | main_parser.add_argument( 20 | "-C", type=str, dest="build_dir", help="build dir (in place of cwd)", default="" 21 | ) 22 | main_subparsers = main_parser.add_subparsers(dest="command") 23 | 24 | 25 | class Pipeline: 26 | def __init__(self, cfg, pipe_yml): 27 | self.cfg = cfg 28 | self.jobs = dict() 29 | 30 | default_caps = pipe_yml.get("default_capabilities", []) 31 | 32 | # Determine the set of jobs. 33 | mentioned_tools = set() 34 | mentioned_pkgs = set() 35 | for job_yml in pipe_yml["jobs"]: 36 | tools = [] 37 | pkgs = [] 38 | for name in job_yml.get("tools", []): 39 | tool = cfg.get_tool_pkg(name) 40 | tools.append(tool) 41 | mentioned_tools.add(tool) 42 | for name in job_yml.get("packages", []): 43 | build = cfg.get_build(name) 44 | for subpkg_name in build.all_subpkgs(): 45 | subpkg = cfg.get_target_pkg(subpkg_name) 46 | pkgs.append(subpkg) 47 | mentioned_pkgs.add(build) 48 | 49 | name = "batch:" + job_yml["name"] 50 | assert name not in self.jobs 51 | job = Job( 52 | name, 53 | tools, 54 | pkgs, 55 | default_caps=default_caps, 56 | explicit_caps=job_yml.get("capabilities"), 57 | ) 58 | self.jobs[name] = job 59 | 60 | for name in job_yml.get("tasks", []): 61 | job.tasks.add(cfg.get_task(name)) 62 | 63 | for tool in cfg.all_tools(): 64 | if tool in mentioned_tools: 65 | continue 66 | if tool.stability_level == "broken": 67 | continue 68 | name = "tool:" + tool.name 69 | assert name not in self.jobs 70 | job = Job(name, [tool], [], default_caps=default_caps) 71 | if tool.stability_level == "unstable": 72 | job.unstable = True 73 | self.jobs[name] = job 74 | for build in cfg.all_builds(): 75 | if build in mentioned_pkgs: 76 | continue 77 | if build.stability_level == "broken": 78 | continue 79 | name = "package:" + build.name 80 | assert name not in self.jobs 81 | pkgs = [cfg.get_target_pkg(subpkg_name) for subpkg_name in build.all_subpkgs()] 82 | job = Job(name, [], list(pkgs), default_caps=default_caps) 83 | if build.stability_level == "unstable": 84 | job.unstable = True 85 | self.jobs[name] = job 86 | 87 | def all_jobs(self): 88 | return self.jobs.values() 89 | 90 | def get_job(self, name): 91 | return self.jobs[name] 92 | 93 | 94 | class Job: 95 | def __init__(self, name, tools, pkgs, *, default_caps, explicit_caps=None): 96 | for tool in tools: 97 | assert isinstance(tool, xbstrap.base.HostPackage) 98 | for pkg in pkgs: 99 | assert isinstance(pkg, xbstrap.base.TargetPackage) 100 | 101 | self.name = name 102 | self.tools = set(tools) 103 | self.pkgs = set(pkgs) 104 | self.tasks = set() 105 | self.unstable = False 106 | 107 | caps = set(default_caps) 108 | if explicit_caps is not None: 109 | with_caps = set(k for k in explicit_caps if not k.startswith("!")) 110 | without_caps = set(k[1:] for k in explicit_caps if k.startswith("!")) 111 | contradictory_caps = with_caps.intersection(without_caps) 112 | if contradictory_caps: 113 | raise RuntimeError( 114 | f"Job {self.name} has contradictory capabilities {contradictory_caps}" 115 | ) 116 | caps.update(with_caps) 117 | caps.difference_update(without_caps) 118 | self.capabilities = caps 119 | 120 | 121 | def pipeline_for_dir(cfg): 122 | with open("pipeline.yml", "r") as f: 123 | pipe_yml = yaml.load(f, yaml.SafeLoader) 124 | return Pipeline(cfg, pipe_yml) 125 | 126 | 127 | class PipelineItem: 128 | def __init__(self, job): 129 | self.job = job 130 | self.edge_set = set() 131 | self.edge_list = [] 132 | self.plan_state = xbstrap.base.PlanState.NULL 133 | self.resolved_n = 0 134 | 135 | 136 | def do_compute_graph(args): 137 | cfg = xbstrap.base.Config(args.build_dir, changed_source_root=args.source_dir) 138 | pipe = pipeline_for_dir(cfg) 139 | 140 | if args.version_file: 141 | with xbstrap.cli_utils.open_file_from_cli(args.version_file, "rt") as f: 142 | version_yml = yaml.load(f, yaml.SafeLoader) 143 | if args.artifacts: 144 | out_root = dict() 145 | for job in pipe.all_jobs(): 146 | up2date = False 147 | if args.version_file: 148 | up2date = True 149 | if len(job.tasks): # For now, tasks are also always rebuilt. 150 | up2date = False 151 | for tool in job.tools: 152 | if tool.name not in version_yml["tools"]: 153 | up2date = False 154 | break 155 | if tool.version != version_yml["tools"][tool.name]: 156 | up2date = False 157 | break 158 | for pkg in job.pkgs: 159 | if pkg.name not in version_yml["pkgs"]: 160 | up2date = False 161 | break 162 | if pkg.version != version_yml["pkgs"][pkg.name]: 163 | up2date = False 164 | break 165 | 166 | plan = xbstrap.base.Plan(cfg) 167 | plan.build_scope = set().union(job.tools, [pkg.build for pkg in job.pkgs]) 168 | for tool in job.tools: 169 | plan.wanted.update([(xbstrap.base.Action.ARCHIVE_TOOL, tool)]) 170 | for pkg in job.pkgs: 171 | if cfg.use_xbps: 172 | plan.wanted.update([(xbstrap.base.Action.PACK_PKG, pkg)]) 173 | else: 174 | plan.wanted.update([(xbstrap.base.Action.BUILD_PKG, pkg.build)]) 175 | for task in job.tasks: 176 | plan.wanted.update([(xbstrap.base.Action.RUN, task)]) 177 | plan.compute_plan(no_ordering=True) 178 | 179 | out_job = { 180 | "unstable": job.unstable, 181 | "up2date": up2date, 182 | "capabilities": list(job.capabilities), 183 | } 184 | out_job["products"] = {"tools": [], "pkgs": [], "files": []} 185 | out_job["needed"] = {"tools": [], "pkgs": []} 186 | for tool in job.tools: 187 | out_job["products"]["tools"].append( 188 | { 189 | "name": tool.name, 190 | "version": tool.version, 191 | "architecture": tool.architecture, 192 | } 193 | ) 194 | for pkg in job.pkgs: 195 | out_job["products"]["pkgs"].append( 196 | { 197 | "name": pkg.name, 198 | "version": pkg.version, 199 | "architecture": pkg.architecture, 200 | } 201 | ) 202 | for task in job.tasks: 203 | for af in task.artifact_files: 204 | out_job["products"]["files"].append( 205 | { 206 | "name": af.name, 207 | "filepath": af.filepath, 208 | "architecture": af.architecture, 209 | } 210 | ) 211 | for key in plan.materialized_steps(): 212 | (action, subject) = (key.action, key.subject) 213 | if action == xbstrap.base.Action.WANT_TOOL: 214 | if subject in job.tools: 215 | continue 216 | out_job["needed"]["tools"].append( 217 | { 218 | "name": subject.name, 219 | "version": subject.version, 220 | "architecture": subject.architecture, 221 | } 222 | ) 223 | if action == xbstrap.base.Action.WANT_PKG: 224 | if subject in job.pkgs: 225 | continue 226 | out_job["needed"]["pkgs"].append( 227 | { 228 | "name": subject.name, 229 | "version": subject.version, 230 | "architecture": subject.architecture, 231 | } 232 | ) 233 | out_root[job.name] = out_job 234 | 235 | if args.json: 236 | print(json.dumps(out_root)) 237 | else: 238 | print(yaml.dump(out_root), end="") 239 | else: 240 | items = dict() 241 | for job in pipe.all_jobs(): 242 | item = PipelineItem(job) 243 | items[item.job.name] = item 244 | 245 | tool_mapping = dict() 246 | pkg_mapping = dict() 247 | for item in items.values(): 248 | for tool in item.job.tools: 249 | tool_mapping[tool] = item.job.name 250 | for pkg in item.job.pkgs: 251 | pkg_mapping[pkg] = item.job.name 252 | 253 | for item in items.values(): 254 | plan = xbstrap.base.Plan(cfg) 255 | plan.build_scope = set().union(item.job.tools, [pkg.build for pkg in item.job.pkgs]) 256 | for tool in item.job.tools: 257 | plan.wanted.update([(xbstrap.base.Action.ARCHIVE_TOOL, tool)]) 258 | for pkg in item.job.pkgs: 259 | if cfg.use_xbps: 260 | plan.wanted.update([(xbstrap.base.Action.PACK_PKG, pkg)]) 261 | else: 262 | plan.wanted.update([(xbstrap.base.Action.BUILD_PKG, pkg.build)]) 263 | for task in item.job.tasks: 264 | plan.wanted.update([(xbstrap.base.Action.RUN, task)]) 265 | plan.compute_plan(no_ordering=True) 266 | 267 | for key in plan.materialized_steps(): 268 | (action, subject) = (key.action, key.subject) 269 | if action == xbstrap.base.Action.WANT_TOOL: 270 | if subject in item.job.tools: 271 | continue 272 | item.edge_set.add(tool_mapping[subject]) 273 | if action == xbstrap.base.Action.WANT_PKG: 274 | if subject in item.job.pkgs: 275 | continue 276 | item.edge_set.add(pkg_mapping[subject]) 277 | 278 | for item in items.values(): 279 | item.edge_list = list(item.edge_set) 280 | 281 | order = [] 282 | 283 | # TODO: this is copied from the planning code. Unify these code paths! 284 | # The following code does a topologic sort of the desired items. 285 | stack = [] 286 | 287 | def visit(item): 288 | if item.plan_state == xbstrap.base.PlanState.NULL: 289 | item.plan_state = xbstrap.base.PlanState.EXPANDING 290 | stack.append(item) 291 | elif item.plan_state == xbstrap.base.PlanState.EXPANDING: 292 | reverse_chain = [item] 293 | for circ_item in reversed(stack): 294 | reverse_chain.append(circ_item) 295 | if circ_item == item: 296 | break 297 | chain = reversed(reverse_chain) 298 | raise RuntimeError( 299 | "Job has circular dependencies {}".format( 300 | [chain_item.job.name for chain_item in chain] 301 | ) 302 | ) 303 | else: 304 | # Packages that are already ordered do not need to be considered again. 305 | assert item.plan_state == xbstrap.base.PlanState.ORDERED 306 | 307 | for root_item in items.values(): 308 | visit(root_item) 309 | 310 | while stack: 311 | item = stack[-1] 312 | if item.resolved_n == len(item.edge_list): 313 | assert item.plan_state == xbstrap.base.PlanState.EXPANDING 314 | item.plan_state = xbstrap.base.PlanState.ORDERED 315 | stack.pop() 316 | order.append(item) 317 | else: 318 | edge_item = items[item.edge_list[item.resolved_n]] 319 | item.resolved_n += 1 320 | visit(edge_item) 321 | 322 | if args.gv: 323 | # For visualization purposes. 324 | print("digraph {") 325 | for item in order: 326 | for edge in item.edge_list: 327 | print(' "{}" -> "{}";'.format(edge, item.job.name)) 328 | print("}") 329 | elif args.linear: 330 | for item in order: 331 | print("{}".format(item.job.name)) 332 | else: 333 | for item in order: 334 | print("{} {}".format(item.job.name, " ".join(item.edge_list))) 335 | 336 | 337 | do_compute_graph.parser = main_subparsers.add_parser("compute-graph") 338 | do_compute_graph.parser.add_argument("--artifacts", action="store_true") 339 | do_compute_graph.parser.add_argument("--linear", action="store_true") 340 | do_compute_graph.parser.add_argument("--gv", action="store_true") 341 | do_compute_graph.parser.add_argument("--json", action="store_true") 342 | do_compute_graph.parser.add_argument( 343 | "--version-file", type=str, help="file that reports existing file versions" 344 | ) 345 | 346 | 347 | def do_run_job(args): 348 | cfg = xbstrap.base.Config(args.build_dir, changed_source_root=args.source_dir) 349 | pipe = pipeline_for_dir(cfg) 350 | job = pipe.get_job(args.job) 351 | 352 | plan = xbstrap.base.Plan(cfg) 353 | if args.dry_run: 354 | plan.dry_run = True 355 | if args.check: 356 | plan.check = True 357 | if args.keep_going: 358 | plan.keep_going = True 359 | plan.build_scope = set().union(job.tools, [pkg.build for pkg in job.pkgs]) 360 | 361 | if args.progress_file is not None: 362 | plan.progress_file = xbstrap.cli_utils.open_file_from_cli(args.progress_file, "wt") 363 | 364 | for tool in job.tools: 365 | plan.wanted.update([(xbstrap.base.Action.ARCHIVE_TOOL, tool)]) 366 | for pkg in job.pkgs: 367 | if cfg.use_xbps: 368 | plan.wanted.update([(xbstrap.base.Action.PACK_PKG, pkg)]) 369 | else: 370 | plan.wanted.update([(xbstrap.base.Action.BUILD_PKG, pkg.build)]) 371 | for task in job.tasks: 372 | plan.wanted.update([(xbstrap.base.Action.RUN, task)]) 373 | plan.run_plan() 374 | 375 | 376 | do_run_job.parser = main_subparsers.add_parser("run-job") 377 | do_run_job.parser.add_argument("job", type=str) 378 | do_run_job.parser.add_argument( 379 | "-n", "--dry-run", action="store_true", help="compute a plan but do not execute it" 380 | ) 381 | do_run_job.parser.add_argument( 382 | "-c", 383 | "--check", 384 | action="store_true", 385 | help="skip packages that are already built/installed/etc.", 386 | ) 387 | do_run_job.parser.add_argument( 388 | "--keep-going", 389 | action="store_true", 390 | help="continue running even if some build steps fail", 391 | ) 392 | do_run_job.parser.add_argument( 393 | "--progress-file", 394 | type=str, 395 | help="file that receives machine-ready progress notifications", 396 | ) 397 | 398 | 399 | def main(): 400 | args = main_parser.parse_args() 401 | 402 | colorama.init() 403 | 404 | if args.verbose: 405 | xbstrap.base.verbosity = True 406 | 407 | try: 408 | if args.command == "compute-graph": 409 | do_compute_graph(args) 410 | elif args.command == "run-job": 411 | do_run_job(args) 412 | else: 413 | assert not "Unexpected command" 414 | except ( 415 | xbstrap.base.ExecutionFailureError, 416 | xbstrap.base.PlanFailureError, 417 | ) as e: 418 | xbstrap.util.log_err(e) 419 | sys.exit(1) 420 | except KeyboardInterrupt: 421 | sys.exit(1) 422 | -------------------------------------------------------------------------------- /xbstrap/__init__.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: MIT 2 | 3 | import argparse 4 | import importlib.metadata 5 | import json 6 | import os 7 | import random 8 | import shutil 9 | import subprocess 10 | import sys 11 | import tarfile 12 | import urllib.parse 13 | 14 | import colorama 15 | import yaml 16 | 17 | import xbstrap.base 18 | import xbstrap.cli_utils 19 | import xbstrap.exceptions 20 | import xbstrap.util as _util 21 | from xbstrap.util import eprint 22 | 23 | # --------------------------------------------------------------------------------------- 24 | # Command line parsing. 25 | # --------------------------------------------------------------------------------------- 26 | 27 | main_parser = argparse.ArgumentParser() 28 | main_parser.add_argument("-v", dest="verbose", action="store_true", help="verbose") 29 | main_parser.add_argument( 30 | "--version", action="version", version=importlib.metadata.version("xbstrap") 31 | ) 32 | main_parser.add_argument( 33 | "--debug-cfg-files", 34 | action="store_true", 35 | default=False, 36 | help="write .out.yml files (to debug YAML file processing)", 37 | ) 38 | main_parser.add_argument( 39 | "--ignore-cfg-cache", 40 | action="store_true", 41 | default=False, 42 | help="do not read cache YAML configuration", 43 | ) 44 | main_parser.add_argument( 45 | "-S", type=str, dest="source_dir", help="source dir (in place of bootstrap.link)" 46 | ) 47 | main_parser.add_argument( 48 | "-C", type=str, dest="build_dir", help="build dir (in place of cwd)", default="" 49 | ) 50 | main_parser.add_argument( 51 | "--quiet", 52 | action="store_true", 53 | default=False, 54 | help="redirect stdout of packages to /dev/null", 55 | ) 56 | main_subparsers = main_parser.add_subparsers(dest="command") 57 | 58 | 59 | def config_for_args(args): 60 | return xbstrap.base.Config( 61 | args.build_dir, 62 | changed_source_root=args.source_dir, 63 | debug_cfg_files=args.debug_cfg_files, 64 | ignore_cfg_cache=args.ignore_cfg_cache, 65 | quiet=args.quiet, 66 | ) 67 | 68 | 69 | def do_runtool(args): 70 | cfg = config_for_args(args) 71 | 72 | tool_pkgs = [] 73 | workdir = None 74 | context = None 75 | subject = None 76 | for_package = False 77 | 78 | if args.build is not None: 79 | pkg = cfg.get_target_pkg(args.build) 80 | 81 | context = "build" 82 | workdir = "@THIS_BUILD_DIR@" 83 | subject = pkg.build 84 | 85 | tool_pkgs.extend(cfg.get_tool_pkg(name) for name in pkg.tool_dependencies) 86 | args = args.opts 87 | for_package = True 88 | else: 89 | if "--" not in args.opts: 90 | main_parser.error("tools and arguments must be separated by --") 91 | 92 | d = args.opts.index("--") 93 | tools = args.opts[:d] 94 | args = args.opts[(d + 1) :] 95 | 96 | if not args: 97 | main_parser.error("no command given") 98 | 99 | for name in tools: 100 | tool_pkgs.append(cfg.get_tool_pkg(name)) 101 | 102 | has_containerless = any(x.containerless for x in tool_pkgs) 103 | 104 | xbstrap.base.run_program( 105 | cfg, 106 | context, 107 | subject, 108 | args, 109 | tool_pkgs=tool_pkgs, 110 | workdir=workdir, 111 | for_package=for_package, 112 | containerless=has_containerless, 113 | virtual_tools=subject.virtual_tools if subject else [], 114 | ) 115 | 116 | 117 | do_runtool.parser = main_subparsers.add_parser("runtool") 118 | do_runtool.parser.add_argument("--build", type=str) 119 | do_runtool.parser.add_argument("opts", nargs=argparse.REMAINDER) 120 | 121 | 122 | def do_init(args): 123 | if not os.access(os.path.join(args.src_root, "bootstrap.yml"), os.F_OK): 124 | raise RuntimeError("Given src_root does not contain a bootstrap.yml") 125 | elif os.path.exists("bootstrap.link"): 126 | _util.log_warn("bootstrap.link already exists, skipping...") 127 | else: 128 | os.symlink(os.path.join(args.src_root, "bootstrap.yml"), "bootstrap.link") 129 | 130 | cfg = config_for_args(args) 131 | if cfg.cargo_config_toml is not None: 132 | eprint("Creating cargo-home/config.toml") 133 | os.makedirs("cargo-home", exist_ok=True) 134 | shutil.copy(os.path.join(args.src_root, cfg.cargo_config_toml), "cargo-home/config.toml") 135 | 136 | container = cfg._site_yml.get("container", dict()) 137 | if "build_mount" in container: 138 | build_root = container["build_mount"] 139 | source_root = container["src_mount"] 140 | else: 141 | eprint("Using non-Docker build") 142 | build_root = os.getcwd() 143 | source_root = os.path.abspath(args.src_root) 144 | 145 | with open("cargo-home/config.toml", "r") as f: 146 | 147 | def substitute(varname): 148 | if varname == "SOURCE_ROOT": 149 | return source_root 150 | elif varname == "BUILD_ROOT": 151 | return build_root 152 | 153 | content = xbstrap.base.replace_at_vars(f.read(), substitute) 154 | 155 | with open("cargo-home/config.toml", "w") as f: 156 | f.write(content) 157 | 158 | 159 | do_init.parser = main_subparsers.add_parser("init") 160 | do_init.parser.add_argument("src_root", type=str) 161 | 162 | 163 | def handle_plan_args(cfg, plan, args): 164 | if args.randomize_plan is not None: 165 | if args.randomize_plan == 0: 166 | seed = random.getrandbits(64) 167 | _util.log_info(f"Using seed {seed} for plan randomization") 168 | plan.ordering_prng = random.Random(seed) 169 | else: 170 | _util.log_info(f"Using seed {args.randomize_plan} for plan randomization") 171 | plan.ordering_prng = random.Random(args.randomize_plan) 172 | 173 | if args.dry_run: 174 | plan.dry_run = True 175 | if args.explain: 176 | plan.explain = True 177 | if args.check: 178 | plan.check = True 179 | if args.update: 180 | plan.update = True 181 | if args.recursive: 182 | plan.recursive = True 183 | if args.paranoid: 184 | plan.paranoid = True 185 | if args.reset: 186 | plan.reset = xbstrap.base.ResetMode.RESET 187 | if args.hard_reset: 188 | plan.reset = xbstrap.base.ResetMode.HARD_RESET 189 | if args.restrict_updates: 190 | plan.restrict_updates = True 191 | if args.only_wanted: 192 | plan.only_wanted = True 193 | if args.keep_going: 194 | plan.keep_going = True 195 | 196 | if args.progress_file is not None: 197 | plan.progress_file = xbstrap.cli_utils.open_file_from_cli(args.progress_file, "wt") 198 | 199 | if args.sysroot_isolation is not None: 200 | plan.isolate_sysroots = args.sysroot_isolation 201 | 202 | 203 | handle_plan_args.parser = argparse.ArgumentParser(add_help=False) 204 | handle_plan_args.parser.add_argument( 205 | "--randomize-plan", 206 | nargs="?", 207 | type=int, 208 | const=0, 209 | metavar="SEED", 210 | help="randomize the order of steps", 211 | ) 212 | handle_plan_args.parser.add_argument( 213 | "-n", "--dry-run", action="store_true", help="compute a plan but do not execute it" 214 | ) 215 | handle_plan_args.parser.add_argument( 216 | "--explain", action="store_true", help="explain the plan in more detail" 217 | ) 218 | handle_plan_args.parser.add_argument( 219 | "-c", 220 | "--check", 221 | action="store_true", 222 | help="skip packages that are already built/installed/etc.", 223 | ) 224 | handle_plan_args.parser.add_argument( 225 | "-u", "--update", action="store_true", help="check for package updates" 226 | ) 227 | handle_plan_args.parser.add_argument( 228 | "--recursive", action="store_true", help="recursively run build steps that are out of date" 229 | ) 230 | handle_plan_args.parser.add_argument( 231 | "--paranoid", 232 | action="store_true", 233 | help="also consider unlikely updates (e.g., changes of git tags)", 234 | ) 235 | handle_plan_args.parser.add_argument( 236 | "--reset", 237 | action="store_true", 238 | help="reset repository state; risks loss of local commits!", 239 | ) 240 | handle_plan_args.parser.add_argument( 241 | "--hard-reset", 242 | action="store_true", 243 | help="clean and reset repository state; risks loss of local changes and commits!", 244 | ) 245 | handle_plan_args.parser.add_argument( 246 | "--restrict-updates", 247 | action="store_true", 248 | help="restrict updates to packages that are explicitly wanted on the command line", 249 | ) 250 | handle_plan_args.parser.add_argument( 251 | "--only-wanted", 252 | action="store_true", 253 | help="fail steps that are not explicitly wanted", 254 | ) 255 | handle_plan_args.parser.add_argument( 256 | "--keep-going", 257 | action="store_true", 258 | help="continue running even if some build steps fail", 259 | ) 260 | handle_plan_args.parser.add_argument( 261 | "--progress-file", 262 | type=str, 263 | help="file that receives machine-ready progress notifications", 264 | ) 265 | handle_plan_args.parser.add_argument( 266 | "--sysroot-isolation", 267 | action=argparse.BooleanOptionalAction, 268 | help="force-enable or force-disable sysroot isolation", 269 | ) 270 | 271 | 272 | def do_list_srcs(args): 273 | cfg = config_for_args(args) 274 | for src in cfg.all_sources(): 275 | eprint("Source: {}".format(src.name)) 276 | 277 | 278 | do_list_srcs.parser = main_subparsers.add_parser("list-srcs") 279 | 280 | 281 | def do_fetch(args): 282 | cfg = config_for_args(args) 283 | plan = xbstrap.base.Plan(cfg) 284 | handle_plan_args(cfg, plan, args) 285 | 286 | if args.all: 287 | for src in cfg.all_sources(): 288 | eprint("Fetching {}".format(src.name)) 289 | plan.wanted.add((xbstrap.base.Action.FETCH_SRC, src)) 290 | else: 291 | for src_name in args.source: 292 | src = cfg.get_source(src_name) 293 | plan.wanted.add((xbstrap.base.Action.FETCH_SRC, src)) 294 | 295 | plan.run_plan() 296 | 297 | 298 | do_fetch.parser = main_subparsers.add_parser("fetch", parents=[handle_plan_args.parser]) 299 | do_fetch.parser.add_argument("--all", action="store_true") 300 | do_fetch.parser.add_argument("source", nargs="*", type=str) 301 | 302 | 303 | def do_checkout(args): 304 | cfg = config_for_args(args) 305 | plan = xbstrap.base.Plan(cfg) 306 | handle_plan_args(cfg, plan, args) 307 | 308 | if args.all: 309 | for src in cfg.all_sources(): 310 | eprint("Checking Out {}".format(src.name)) 311 | plan.wanted.add((xbstrap.base.Action.CHECKOUT_SRC, src)) 312 | else: 313 | for src_name in args.source: 314 | src = cfg.get_source(src_name) 315 | plan.wanted.add((xbstrap.base.Action.CHECKOUT_SRC, src)) 316 | 317 | plan.run_plan() 318 | 319 | 320 | do_checkout.parser = main_subparsers.add_parser("checkout", parents=[handle_plan_args.parser]) 321 | do_checkout.parser.add_argument("--all", action="store_true") 322 | do_checkout.parser.add_argument("source", nargs="*", type=str) 323 | 324 | 325 | def do_patch(args): 326 | cfg = config_for_args(args) 327 | plan = xbstrap.base.Plan(cfg) 328 | handle_plan_args(cfg, plan, args) 329 | 330 | if args.all: 331 | for src in cfg.all_sources(): 332 | eprint("Patching {}".format(src.name)) 333 | plan.wanted.add((xbstrap.base.Action.PATCH_SRC, src)) 334 | else: 335 | for src_name in args.source: 336 | src = cfg.get_source(src_name) 337 | plan.wanted.add((xbstrap.base.Action.PATCH_SRC, src)) 338 | 339 | plan.run_plan() 340 | 341 | 342 | do_patch.parser = main_subparsers.add_parser("patch", parents=[handle_plan_args.parser]) 343 | do_patch.parser.add_argument("--all", action="store_true") 344 | do_patch.parser.add_argument("source", nargs="*", type=str) 345 | 346 | 347 | def do_regenerate(args): 348 | cfg = config_for_args(args) 349 | plan = xbstrap.base.Plan(cfg) 350 | handle_plan_args(cfg, plan, args) 351 | 352 | if args.all: 353 | for src in cfg.all_sources(): 354 | eprint("Regenerating {}".format(src.name)) 355 | plan.wanted.add((xbstrap.base.Action.REGENERATE_SRC, src)) 356 | else: 357 | for src_name in args.source: 358 | src = cfg.get_source(src_name) 359 | plan.wanted.add((xbstrap.base.Action.REGENERATE_SRC, src)) 360 | 361 | plan.run_plan() 362 | 363 | 364 | do_regenerate.parser = main_subparsers.add_parser("regenerate", parents=[handle_plan_args.parser]) 365 | do_regenerate.parser.add_argument("--all", action="store_true") 366 | do_regenerate.parser.add_argument("source", nargs="*", type=str) 367 | 368 | 369 | def select_tools(cfg, args): 370 | if args.all: 371 | return [tool for tool in cfg.all_tools() if tool.is_default] 372 | else: 373 | sel = [cfg.get_tool_pkg(name) for name in args.tools] 374 | 375 | if args.build_deps_of is not None: 376 | for pkg_name in args.build_deps_of: 377 | pkg = cfg.get_target_pkg(pkg_name) 378 | for tool in pkg.tool_dependencies: 379 | sel.append(cfg.get_tool_pkg(tool)) 380 | 381 | # Deduplicate sel 382 | sel = list(dict.fromkeys(sel)) 383 | 384 | return sel 385 | 386 | 387 | select_tools.parser = argparse.ArgumentParser(add_help=False) 388 | select_tools.parser.add_argument("--all", action="store_true") 389 | select_tools.parser.add_argument("--build-deps-of", type=str, action="append") 390 | select_tools.parser.add_argument("tools", nargs="*", type=str) 391 | 392 | 393 | def reconfigure_and_recompile_tools(plan, args, sel): 394 | if args.reconfigure: 395 | for tool in sel: 396 | plan.wanted.add((xbstrap.base.Action.CONFIGURE_TOOL, tool)) 397 | for stage in tool.all_stages(): 398 | plan.wanted.add((xbstrap.base.Action.COMPILE_TOOL_STAGE, stage)) 399 | elif args.recompile: 400 | for tool in sel: 401 | for stage in tool.all_stages(): 402 | plan.wanted.add((xbstrap.base.Action.COMPILE_TOOL_STAGE, stage)) 403 | 404 | 405 | reconfigure_tools_parser = argparse.ArgumentParser(add_help=False) 406 | reconfigure_tools_parser.add_argument("--reconfigure", action="store_true") 407 | reconfigure_tools_parser.set_defaults(reconfigure=False, recompile=False) 408 | 409 | recompile_tools_parser = argparse.ArgumentParser(add_help=False) 410 | recompile_tools_parser.add_argument("--recompile", action="store_true") 411 | recompile_tools_parser.set_defaults(reconfigure=False, recompile=False) 412 | 413 | 414 | def do_configure_tool(args): 415 | cfg = config_for_args(args) 416 | sel = select_tools(cfg, args) 417 | plan = xbstrap.base.Plan(cfg) 418 | handle_plan_args(cfg, plan, args) 419 | plan.wanted.update([(xbstrap.base.Action.CONFIGURE_TOOL, pkg) for pkg in sel]) 420 | plan.run_plan() 421 | 422 | 423 | do_configure_tool.parser = main_subparsers.add_parser( 424 | "configure-tool", parents=[handle_plan_args.parser, select_tools.parser] 425 | ) 426 | 427 | 428 | def do_compile_tool(args): 429 | cfg = config_for_args(args) 430 | sel = select_tools(cfg, args) 431 | plan = xbstrap.base.Plan(cfg) 432 | handle_plan_args(cfg, plan, args) 433 | reconfigure_and_recompile_tools(plan, args, sel) 434 | plan.wanted.update( 435 | [ 436 | (xbstrap.base.Action.COMPILE_TOOL_STAGE, stage) 437 | for pkg in sel 438 | for stage in pkg.all_stages() 439 | ] 440 | ) 441 | plan.run_plan() 442 | 443 | 444 | do_compile_tool.parser = main_subparsers.add_parser( 445 | "compile-tool", 446 | parents=[handle_plan_args.parser, select_tools.parser, reconfigure_tools_parser], 447 | ) 448 | 449 | 450 | def do_install_tool(args): 451 | cfg = config_for_args(args) 452 | sel = select_tools(cfg, args) 453 | plan = xbstrap.base.Plan(cfg) 454 | handle_plan_args(cfg, plan, args) 455 | reconfigure_and_recompile_tools(plan, args, sel) 456 | plan.wanted.update( 457 | [ 458 | (xbstrap.base.Action.INSTALL_TOOL_STAGE, stage) 459 | for pkg in sel 460 | for stage in pkg.all_stages() 461 | ] 462 | ) 463 | plan.run_plan() 464 | 465 | 466 | do_install_tool.parser = main_subparsers.add_parser( 467 | "install-tool", 468 | parents=[ 469 | handle_plan_args.parser, 470 | select_tools.parser, 471 | reconfigure_tools_parser, 472 | recompile_tools_parser, 473 | ], 474 | ) 475 | 476 | 477 | def select_pkgs(cfg, args): 478 | if args.all: 479 | return [pkg for pkg in cfg.all_pkgs() if pkg.is_default] 480 | else: 481 | if args.command == "run": 482 | return [cfg.get_target_pkg(name) for name in args.pkg] 483 | else: 484 | sel = [cfg.get_target_pkg(name) for name in args.packages] 485 | 486 | if args.installed: 487 | sel.extend(cfg.get_installed_pkgs()) 488 | 489 | if args.deps_of is not None: 490 | for pkg_name in args.deps_of: 491 | pkg = cfg.get_target_pkg(pkg_name) 492 | sel.append(pkg) 493 | for dep_name in pkg.discover_recursive_pkg_dependencies(): 494 | dep = cfg.get_target_pkg(dep_name) 495 | sel.append(dep) 496 | 497 | return sel 498 | 499 | 500 | select_pkgs.parser = argparse.ArgumentParser(add_help=False) 501 | select_pkgs.parser.add_argument("--all", action="store_true") 502 | select_pkgs.parser.add_argument("--installed", action="store_true") 503 | select_pkgs.parser.add_argument("--deps-of", type=str, action="append") 504 | select_pkgs.parser.add_argument("packages", nargs="*", type=str) 505 | 506 | 507 | def reconfigure_and_rebuild_pkgs(plan, args, sel, no_pack=False): 508 | if args.reconfigure: 509 | for pkg in sel: 510 | plan.wanted.add((xbstrap.base.Action.CONFIGURE_PKG, pkg.build)) 511 | plan.wanted.add((xbstrap.base.Action.BUILD_PKG, pkg.build)) 512 | if no_pack: 513 | return 514 | if plan.cfg.use_xbps: 515 | plan.wanted.add((xbstrap.base.Action.PACK_PKG, pkg)) 516 | elif args.rebuild: 517 | for pkg in sel: 518 | plan.wanted.add((xbstrap.base.Action.BUILD_PKG, pkg.build)) 519 | if no_pack: 520 | return 521 | if plan.cfg.use_xbps: 522 | plan.wanted.add((xbstrap.base.Action.PACK_PKG, pkg)) 523 | 524 | 525 | reconfigure_pkgs_parser = argparse.ArgumentParser(add_help=False) 526 | reconfigure_pkgs_parser.add_argument("--reconfigure", action="store_true") 527 | reconfigure_pkgs_parser.set_defaults(reconfigure=False, rebuild=False) 528 | 529 | rebuild_pkgs_parser = argparse.ArgumentParser(add_help=False) 530 | rebuild_pkgs_parser.add_argument("--rebuild", action="store_true") 531 | rebuild_pkgs_parser.set_defaults(reconfigure=False, rebuild=False) 532 | 533 | 534 | def do_configure(args): 535 | cfg = config_for_args(args) 536 | sel = select_pkgs(cfg, args) 537 | plan = xbstrap.base.Plan(cfg) 538 | handle_plan_args(cfg, plan, args) 539 | plan.wanted.update([(xbstrap.base.Action.CONFIGURE_PKG, pkg.build) for pkg in sel]) 540 | plan.run_plan() 541 | 542 | 543 | do_configure.parser = main_subparsers.add_parser( 544 | "configure", parents=[handle_plan_args.parser, select_pkgs.parser] 545 | ) 546 | 547 | 548 | def do_build(args): 549 | cfg = config_for_args(args) 550 | sel = select_pkgs(cfg, args) 551 | plan = xbstrap.base.Plan(cfg) 552 | handle_plan_args(cfg, plan, args) 553 | reconfigure_and_rebuild_pkgs(plan, args, sel, no_pack=True) 554 | plan.wanted.update([(xbstrap.base.Action.BUILD_PKG, pkg.build) for pkg in sel]) 555 | plan.run_plan() 556 | 557 | 558 | do_build.parser = main_subparsers.add_parser( 559 | "build", 560 | parents=[handle_plan_args.parser, reconfigure_pkgs_parser, select_pkgs.parser], 561 | ) 562 | 563 | 564 | def do_reproduce_build(args): 565 | cfg = config_for_args(args) 566 | sel = select_pkgs(cfg, args) 567 | plan = xbstrap.base.Plan(cfg) 568 | handle_plan_args(cfg, plan, args) 569 | reconfigure_and_rebuild_pkgs(plan, args, sel, no_pack=True) 570 | plan.wanted.update([(xbstrap.base.Action.REPRODUCE_BUILD_PKG, pkg.build) for pkg in sel]) 571 | plan.run_plan() 572 | 573 | 574 | do_reproduce_build.parser = main_subparsers.add_parser( 575 | "reproduce-build", 576 | parents=[handle_plan_args.parser, reconfigure_pkgs_parser, select_pkgs.parser], 577 | ) 578 | 579 | 580 | def do_pack(args): 581 | cfg = config_for_args(args) 582 | sel = select_pkgs(cfg, args) 583 | plan = xbstrap.base.Plan(cfg) 584 | handle_plan_args(cfg, plan, args) 585 | reconfigure_and_rebuild_pkgs(plan, args, sel, no_pack=True) 586 | plan.wanted.update([(xbstrap.base.Action.PACK_PKG, pkg) for pkg in sel]) 587 | plan.run_plan() 588 | 589 | 590 | do_pack.parser = main_subparsers.add_parser( 591 | "pack", 592 | parents=[handle_plan_args.parser, reconfigure_pkgs_parser, select_pkgs.parser], 593 | ) 594 | 595 | 596 | def do_reproduce_pack(args): 597 | cfg = config_for_args(args) 598 | sel = select_pkgs(cfg, args) 599 | plan = xbstrap.base.Plan(cfg) 600 | handle_plan_args(cfg, plan, args) 601 | reconfigure_and_rebuild_pkgs(plan, args, sel, no_pack=True) 602 | plan.wanted.update([(xbstrap.base.Action.REPRODUCE_PACK_PKG, pkg) for pkg in sel]) 603 | plan.run_plan() 604 | 605 | 606 | do_reproduce_pack.parser = main_subparsers.add_parser( 607 | "reproduce-pack", 608 | parents=[handle_plan_args.parser, reconfigure_pkgs_parser, select_pkgs.parser], 609 | ) 610 | 611 | 612 | def do_download(args): 613 | cfg = config_for_args(args) 614 | sel = select_pkgs(cfg, args) 615 | 616 | if cfg.pkg_archives_url is None: 617 | raise RuntimeError("No repository URL in bootstrap.yml") 618 | 619 | _util.try_mkdir(cfg.package_out_dir) 620 | 621 | for pkg in sel: 622 | url = urllib.parse.urljoin(cfg.pkg_archives_url + "/", pkg.name + ".tar.gz") 623 | _util.log_info("Downloading package {} from {}".format(pkg.name, url)) 624 | _util.interactive_download(url, pkg.archive_file) 625 | 626 | xbstrap.base.try_rmtree(pkg.staging_dir) 627 | os.mkdir(pkg.staging_dir) 628 | with tarfile.open(pkg.archive_file, "r:gz") as tar: 629 | for info in tar: 630 | tar.extract(info, pkg.staging_dir) 631 | 632 | 633 | do_download.parser = main_subparsers.add_parser("download-archive", parents=[select_pkgs.parser]) 634 | 635 | 636 | def do_download_tool(args): 637 | cfg = config_for_args(args) 638 | sel = select_tools(cfg, args) 639 | plan = xbstrap.base.Plan(cfg) 640 | handle_plan_args(cfg, plan, args) 641 | plan.wanted.update([(xbstrap.base.Action.PULL_ARCHIVE, tool) for tool in sel]) 642 | plan.run_plan() 643 | 644 | 645 | do_download_tool.parser = main_subparsers.add_parser( 646 | "download-tool-archive", 647 | parents=[ 648 | handle_plan_args.parser, 649 | select_tools.parser, 650 | ], 651 | ) 652 | do_download_tool.parser.set_defaults(_impl=do_download_tool) 653 | 654 | 655 | def do_install(args): 656 | cfg = config_for_args(args) 657 | sel = select_pkgs(cfg, args) 658 | plan = xbstrap.base.Plan(cfg) 659 | handle_plan_args(cfg, plan, args) 660 | reconfigure_and_rebuild_pkgs(plan, args, sel) 661 | plan.wanted.update([(xbstrap.base.Action.INSTALL_PKG, pkg) for pkg in sel]) 662 | plan.run_plan() 663 | 664 | 665 | do_install.parser = main_subparsers.add_parser( 666 | "install", 667 | parents=[ 668 | handle_plan_args.parser, 669 | reconfigure_pkgs_parser, 670 | rebuild_pkgs_parser, 671 | select_pkgs.parser, 672 | ], 673 | ) 674 | 675 | 676 | def do_archive_tool(args): 677 | cfg = config_for_args(args) 678 | sel = select_tools(cfg, args) 679 | plan = xbstrap.base.Plan(cfg) 680 | handle_plan_args(cfg, plan, args) 681 | plan.wanted.update([(xbstrap.base.Action.ARCHIVE_TOOL, tool) for tool in sel]) 682 | plan.run_plan() 683 | 684 | 685 | do_archive_tool.parser = main_subparsers.add_parser( 686 | "archive-tool", parents=[handle_plan_args.parser, select_tools.parser] 687 | ) 688 | 689 | 690 | def do_archive(args): 691 | cfg = config_for_args(args) 692 | sel = select_pkgs(cfg, args) 693 | plan = xbstrap.base.Plan(cfg) 694 | handle_plan_args(cfg, plan, args) 695 | plan.wanted.update([(xbstrap.base.Action.ARCHIVE_PKG, pkg.build) for pkg in sel]) 696 | plan.run_plan() 697 | 698 | 699 | do_archive.parser = main_subparsers.add_parser( 700 | "archive", parents=[handle_plan_args.parser, select_pkgs.parser] 701 | ) 702 | 703 | # ---------------------------------------------------------------------------------------- 704 | 705 | 706 | def do_pull_pack(args): 707 | cfg = config_for_args(args) 708 | sel = select_pkgs(cfg, args) 709 | plan = xbstrap.base.Plan(cfg) 710 | handle_plan_args(cfg, plan, args) 711 | plan.wanted.update([(xbstrap.base.Action.PULL_PKG_PACK, pkg) for pkg in sel]) 712 | plan.run_plan() 713 | 714 | 715 | pull_pack_parser = main_subparsers.add_parser( 716 | "pull-pack", parents=[handle_plan_args.parser, select_pkgs.parser] 717 | ) 718 | pull_pack_parser.set_defaults(_impl=do_pull_pack) 719 | 720 | # ---------------------------------------------------------------------------------------- 721 | 722 | 723 | def do_list_tools(args): 724 | cfg = config_for_args(args) 725 | for tool in cfg.all_tools(): 726 | print(tool.name) 727 | 728 | 729 | do_list_tools.parser = main_subparsers.add_parser("list-tools") 730 | 731 | 732 | def do_list_pkgs(args): 733 | cfg = config_for_args(args) 734 | for tool in cfg.all_pkgs(): 735 | print(tool.name) 736 | 737 | 738 | do_list_pkgs.parser = main_subparsers.add_parser("list-pkgs") 739 | 740 | 741 | def do_run_task(args): 742 | args.all = False 743 | 744 | cfg = config_for_args(args) 745 | plan = xbstrap.base.Plan(cfg) 746 | handle_plan_args(cfg, plan, args) 747 | 748 | if args.pkg: 749 | sel = select_pkgs(cfg, args) 750 | for task_name in args.task: 751 | task = sel[0].build.get_task(task_name) 752 | if not task: 753 | raise RuntimeError( 754 | "task {} of package {} not found".format(args.task[0], task_name) 755 | ) 756 | plan.wanted.add((xbstrap.base.Action.RUN_PKG, task)) 757 | elif args.tool: 758 | args.tools = args.tool 759 | sel = select_tools(cfg, args) 760 | for task_name in args.task: 761 | task = sel[0].get_task(task_name) 762 | if not task: 763 | raise RuntimeError("task {} of tool {} not found".format(args.task[0], task_name)) 764 | plan.wanted.add((xbstrap.base.Action.RUN_TOOL, task)) 765 | else: 766 | for task_name in args.task: 767 | task = cfg.get_task(task_name) 768 | if not task: 769 | raise RuntimeError("task {} not found".format(task_name)) 770 | plan.wanted.add((xbstrap.base.Action.RUN, task)) 771 | 772 | plan.run_plan() 773 | 774 | 775 | do_run_task.parser = main_subparsers.add_parser("run", parents=[handle_plan_args.parser]) 776 | group = do_run_task.parser.add_mutually_exclusive_group(required=False) 777 | group.add_argument("--pkg", nargs=1, required=False, type=str) 778 | group.add_argument("--tool", nargs=1, required=False, type=str) 779 | do_run_task.parser.add_argument("task", nargs="+", type=str) 780 | 781 | # ---------------------------------------------------------------------------------------- 782 | 783 | var_commits_parser = main_subparsers.add_parser("variable-commits") 784 | var_commits_subparsers = var_commits_parser.add_subparsers(dest="command") 785 | 786 | 787 | def do_var_commits_fetch(args): 788 | cfg = config_for_args(args) 789 | plan = xbstrap.base.Plan(cfg) 790 | handle_plan_args(cfg, plan, args) 791 | 792 | for src in cfg.all_sources(): 793 | if not src.has_variable_checkout_commit: 794 | continue 795 | plan.wanted.add((xbstrap.base.Action.FETCH_SRC, src)) 796 | 797 | plan.run_plan() 798 | 799 | 800 | do_var_commits_fetch.parser = var_commits_subparsers.add_parser( 801 | "fetch", parents=[handle_plan_args.parser] 802 | ) 803 | do_var_commits_fetch.parser.set_defaults(_impl=do_var_commits_fetch) 804 | 805 | 806 | def do_var_commits_determine(args): 807 | cfg = config_for_args(args) 808 | 809 | out_yml = dict() 810 | for src in cfg.all_sources(): 811 | if not src.has_variable_checkout_commit: 812 | continue 813 | out_yml[src.name] = src.determine_variable_checkout_commit() 814 | 815 | if args.json: 816 | json.dump(out_yml, sys.stdout) 817 | else: 818 | print(yaml.dump(out_yml), end="") 819 | 820 | 821 | var_commits_determine_parser = var_commits_subparsers.add_parser("determine") 822 | var_commits_determine_parser.set_defaults(_impl=do_var_commits_determine) 823 | var_commits_determine_parser.add_argument("--json", action="store_true") 824 | 825 | # ---------------------------------------------------------------------------------------- 826 | 827 | rolling_parser = main_subparsers.add_parser("rolling-versions") 828 | rolling_subparsers = rolling_parser.add_subparsers(dest="command") 829 | 830 | 831 | def do_rolling_fetch(args): 832 | cfg = config_for_args(args) 833 | plan = xbstrap.base.Plan(cfg) 834 | handle_plan_args(cfg, plan, args) 835 | 836 | for src in cfg.all_sources(): 837 | if not src.is_rolling_version: 838 | continue 839 | plan.wanted.add((xbstrap.base.Action.FETCH_SRC, src)) 840 | 841 | plan.run_plan() 842 | 843 | 844 | do_rolling_fetch.parser = rolling_subparsers.add_parser("fetch", parents=[handle_plan_args.parser]) 845 | do_rolling_fetch.parser.set_defaults(_impl=do_rolling_fetch) 846 | 847 | 848 | def do_rolling_determine(args): 849 | cfg = config_for_args(args) 850 | out_yml = dict() 851 | for src in cfg.all_sources(): 852 | if not src.is_rolling_version: 853 | continue 854 | out_yml[src.name] = src.determine_rolling_id() 855 | 856 | if args.json: 857 | json.dump(out_yml, sys.stdout) 858 | else: 859 | print(yaml.dump(out_yml), end="") 860 | 861 | 862 | do_rolling_determine.parser = rolling_subparsers.add_parser("determine") 863 | do_rolling_determine.parser.add_argument("--json", action="store_true") 864 | do_rolling_determine.parser.set_defaults(_impl=do_rolling_determine) 865 | 866 | # ---------------------------------------------------------------------------------------- 867 | 868 | 869 | def do_prereqs(args): 870 | comps = set(args.components) 871 | valid_comps = ["cbuildrt", "xbps", "xmu"] 872 | if not comps.issubset(valid_comps): 873 | raise RuntimeError(f"Unknown component given; choose from: {valid_comps}") 874 | 875 | home = _util.find_home() 876 | bin_dir = os.path.join(home, "bin") 877 | _util.try_mkdir(home) 878 | _util.try_mkdir(bin_dir) 879 | 880 | if "cbuildrt" in comps: 881 | url = "https://github.com/managarm/cbuildrt" 882 | url += "/releases/latest/download/cbuildrt-linux-x86_64-static.tar" 883 | tar_path = os.path.join(home, "cbuildrt.tar") 884 | 885 | _util.log_info(f"Downloading cbuildrt from {url}") 886 | _util.interactive_download(url, tar_path) 887 | with tarfile.open(tar_path, "r") as tar: 888 | for info in tar: 889 | if info.name == "cbuildrt": 890 | tar.extract(info, bin_dir) 891 | os.chmod(os.path.join(bin_dir, "cbuildrt"), 0o755) 892 | if "xbps" in comps: 893 | url = "https://repo-default.voidlinux.org/static" 894 | url += "/xbps-static-static-0.59.2_1.x86_64-musl.tar.xz" 895 | tar_path = os.path.join(home, "xbps.tar.xz") 896 | 897 | _util.log_info(f"Downloading xbps from {url}") 898 | _util.interactive_download(url, tar_path) 899 | with tarfile.open(tar_path, "r:xz") as tar: 900 | for info in tar: 901 | if os.path.dirname(info.name) == "./usr/bin": 902 | info.name = os.path.basename(info.name) 903 | tar.extract(info, bin_dir) 904 | if "xmu" in comps: 905 | info_url = ( 906 | "https://api.github.com/repos/managarm/xbstrap-maintainer-utilities/releases/latest" 907 | ) 908 | releases = json.load(urllib.request.urlopen(info_url)) 909 | url = releases["tarball_url"] 910 | tar_path = os.path.join(home, f"xmu-{releases['name']}.tar.gz") 911 | 912 | _util.log_info(f"Downloading xmu {releases['name']} from {url} to {tar_path}") 913 | _util.interactive_download(url, tar_path) 914 | with tarfile.open(tar_path, "r") as tar: 915 | for info in tar: 916 | if "/" not in info.name: 917 | commit = info.name[-7:] 918 | tar.extract(info, bin_dir) 919 | 920 | extract_dir = os.path.join(bin_dir, f"managarm-xbstrap-maintainer-utilities-{commit}") 921 | dest_dir = os.path.join(bin_dir, "xmu") 922 | 923 | xbstrap.base.try_rmtree(dest_dir) 924 | shutil.move(extract_dir, dest_dir) 925 | if shutil.which("npm") is None: 926 | _util.log_err("npm not found") 927 | return 928 | _util.log_info("Installing xmu with npm") 929 | proc = subprocess.Popen(["npm", "install"], cwd=dest_dir) 930 | proc.wait() 931 | if proc.returncode != 0: 932 | _util.log_err(f"Installation of xmu failed with status code {proc.returncode}") 933 | xbstrap.base.try_unlink(tar_path) 934 | 935 | 936 | do_prereqs.parser = main_subparsers.add_parser("prereqs") 937 | do_prereqs.parser.add_argument("components", type=str, nargs="*") 938 | do_prereqs.parser.set_defaults(_impl=do_prereqs) 939 | 940 | # ---------------------------------------------------------------------------------------- 941 | 942 | 943 | def do_lsp(args): 944 | cfg = config_for_args(args) 945 | pkg = cfg.get_target_pkg(args.package) 946 | 947 | tool_pkgs = {cfg.get_tool_pkg(name) for name in pkg.tool_dependencies} 948 | tool_pkgs.update(cfg.get_tool_pkg(name) for name in args.extra_tools) 949 | 950 | def resolve_host_paths(x): 951 | return { 952 | "HOST_SOURCE_ROOT": os.path.abspath(cfg.source_root), 953 | "HOST_BUILD_ROOT": os.path.abspath(cfg.build_root), 954 | }.get(x, "@{}@".format(x)) 955 | 956 | xbstrap.base.run_program( 957 | cfg, 958 | "build", 959 | pkg.build, 960 | [xbstrap.base.replace_at_vars(x, resolve_host_paths) for x in args.lsp_program], 961 | tool_pkgs=tool_pkgs, 962 | workdir="@THIS_SOURCE_DIR@", 963 | for_package=True, 964 | ) 965 | 966 | 967 | do_lsp.parser = main_subparsers.add_parser( 968 | "lsp", 969 | formatter_class=argparse.RawDescriptionHelpFormatter, 970 | description=""" 971 | Invokes an LSP server inside the build environment for a given package. 972 | 973 | Example: 974 | # generate a compile_commands.json 975 | xbstrap -C ../../build lsp managarm-system -- \\ 976 | ln -s @THIS_BUILD_DIR@/compile_commands.json 977 | 978 | # tell your editor to invoke this 979 | xbstrap -C ../../build lsp managarm-system -- \\ 980 | clangd -background-index \\ 981 | --path-mappings \\ 982 | @HOST_BUILD_ROOT@=@BUILD_ROOT@,@HOST_SOURCE_ROOT@=@SOURCE_ROOT@ 983 | """.strip(), 984 | ) 985 | do_lsp.parser.add_argument( 986 | "--extra-tools", 987 | type=str, 988 | nargs="+", 989 | default=[], 990 | help="extra tools to add to the lsp environment", 991 | ) 992 | do_lsp.parser.add_argument("package", type=str, help="xbstrap package to run lsp for") 993 | do_lsp.parser.add_argument("lsp_program", type=str, help="LSP server and arguments", nargs="+") 994 | do_lsp.parser.set_defaults(_impl=do_lsp) 995 | 996 | # ---------------------------------------------------------------------------------------- 997 | 998 | 999 | def do_maintainer(args): 1000 | cfg = config_for_args(args) 1001 | xmu_dir = os.path.join(_util.find_home(), "bin", "xmu") 1002 | if not os.access(os.path.join(xmu_dir, "xmu.js"), os.F_OK): 1003 | _util.log_err("The maintainer utilities are not installed.") 1004 | _util.log_info("Try running `xbstrap prereqs xmu` to install them.") 1005 | return 1006 | proc = subprocess.Popen( 1007 | [os.path.join(xmu_dir, "xmu.js")] + args.args, cwd=os.path.abspath(cfg.source_root) 1008 | ) 1009 | proc.wait() 1010 | if proc.returncode != 0: 1011 | _util.log_err(f"xmu returned with status {proc.returncode}") 1012 | 1013 | 1014 | do_maintainer.parser = main_subparsers.add_parser("maintainer") 1015 | do_maintainer.parser.add_argument("args", type=str, nargs="*") 1016 | do_maintainer.parser.set_defaults(_impl=do_maintainer) 1017 | 1018 | # ---------------------------------------------------------------------------------------- 1019 | 1020 | 1021 | def do_execute_manifest(args): 1022 | if args.c is not None: 1023 | manifest = yaml.load(args.c, Loader=xbstrap.base.global_yaml_loader) 1024 | else: 1025 | manifest = yaml.load(sys.stdin, Loader=xbstrap.base.global_yaml_loader) 1026 | xbstrap.base.execute_manifest(manifest) 1027 | 1028 | 1029 | execute_manifest_parser = main_subparsers.add_parser("execute-manifest") 1030 | execute_manifest_parser.add_argument("-c", type=str) 1031 | execute_manifest_parser.set_defaults(_impl=do_execute_manifest) 1032 | 1033 | 1034 | def main(): 1035 | args = main_parser.parse_args() 1036 | 1037 | colorama.init() 1038 | 1039 | if args.verbose: 1040 | xbstrap.base.verbosity = True 1041 | 1042 | if not xbstrap.base.native_yaml_available: 1043 | _util.log_warn( 1044 | "Using pure Python YAML parser\n : Install libyaml for improved performance" 1045 | ) 1046 | 1047 | try: 1048 | if hasattr(args, "_impl"): 1049 | args._impl(args) 1050 | elif args.command == "init": 1051 | do_init(args) 1052 | elif args.command == "runtool": 1053 | do_runtool(args) 1054 | elif args.command == "fetch": 1055 | do_fetch(args) 1056 | elif args.command == "checkout": 1057 | do_checkout(args) 1058 | elif args.command == "patch": 1059 | do_patch(args) 1060 | elif args.command == "regenerate": 1061 | do_regenerate(args) 1062 | elif args.command == "configure-tool": 1063 | do_configure_tool(args) 1064 | elif args.command == "compile-tool": 1065 | do_compile_tool(args) 1066 | elif args.command == "install-tool": 1067 | do_install_tool(args) 1068 | elif args.command == "configure": 1069 | do_configure(args) 1070 | elif args.command == "build": 1071 | do_build(args) 1072 | elif args.command == "reproduce-build": 1073 | do_reproduce_build(args) 1074 | elif args.command == "pack": 1075 | do_pack(args) 1076 | elif args.command == "reproduce-pack": 1077 | do_reproduce_pack(args) 1078 | elif args.command == "archive-tool": 1079 | do_archive_tool(args) 1080 | elif args.command == "archive": 1081 | do_archive(args) 1082 | elif args.command == "download": 1083 | do_download(args) 1084 | elif args.command == "install": 1085 | do_install(args) 1086 | elif args.command == "list-tools": 1087 | do_list_tools(args) 1088 | elif args.command == "list-pkgs": 1089 | do_list_pkgs(args) 1090 | elif args.command == "list-srcs": 1091 | do_list_srcs(args) 1092 | elif args.command == "run": 1093 | do_run_task(args) 1094 | elif args.command == "lsp": 1095 | do_lsp(args) 1096 | elif args.command == "maintainer": 1097 | do_maintainer(args) 1098 | else: 1099 | main_parser.print_help() 1100 | except ( 1101 | xbstrap.base.ExecutionFailureError, 1102 | xbstrap.base.PlanFailureError, 1103 | xbstrap.exceptions.GenericError, 1104 | ) as e: 1105 | _util.log_err(e) 1106 | sys.exit(1) 1107 | except KeyboardInterrupt: 1108 | sys.exit(1) 1109 | --------------------------------------------------------------------------------