├── .cargo └── config.toml ├── .devcontainer ├── Dockerfile ├── devcontainer.json ├── library-scripts │ ├── common-redhat.sh │ └── docker-redhat.sh └── scripts │ └── postinstall.sh ├── .dockerignore ├── .env.example ├── .env.vault ├── .github ├── dependabot.yml └── workflows │ ├── package-git.yml │ ├── release-crate.yml │ └── rust.yml ├── .gitignore ├── .idea ├── .gitignore ├── anda.iml ├── codeStyles │ └── codeStyleConfig.xml ├── compiler.xml ├── discord.xml ├── inspectionProfiles │ └── Project_Default.xml ├── modules.xml ├── sqldialects.xml └── vcs.xml ├── .vscode ├── launch.json └── settings.json ├── Cargo.lock ├── Cargo.toml ├── LICENSE.md ├── README.md ├── anda-config ├── Cargo.toml ├── config.rs ├── context.rs ├── error.rs ├── lib.rs └── template.rs ├── anda.hcl ├── andax ├── Cargo.toml ├── error.rs ├── fns │ ├── build.rs │ ├── cfg.rs │ ├── io.rs │ ├── kokoro.rs │ ├── mod.rs │ ├── rpm.rs │ ├── tenshi.rs │ └── tsunagu.rs ├── hints │ └── gh_token_not_present.txt ├── lib.rs └── run.rs ├── assets ├── anda-compressed.png ├── anda-medium.png ├── anda.ico ├── anda.png └── anda.svg ├── bump.rhai ├── clippy.toml ├── rust-anda-git.spec ├── rust-anda.spec ├── rustfmt.toml ├── src ├── artifacts.rs ├── builder.rs ├── cli.rs ├── flatpak.rs ├── lib.rs ├── main.rs ├── oci.rs ├── rpm_spec.rs ├── update.rs └── util.rs ├── tests ├── anda.hcl ├── hello.sh ├── org.flatpak.Hello.yml ├── test.rhai ├── umpkg.spec └── umpkg.spec.in └── xtask ├── Cargo.toml └── src └── main.rs /.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [profile.release] 2 | opt-level = "s" 3 | 4 | [alias] 5 | anda = "run --bin anda" 6 | xtask = "run -p xtask --" -------------------------------------------------------------------------------- /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | # See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.238.1/containers/rust/.devcontainer/base.Dockerfile 2 | 3 | FROM registry.fedoraproject.org/fedora:37 4 | 5 | COPY library-scripts/ /tmp/library-scripts/ 6 | RUN bash /tmp/library-scripts/common-redhat.sh 7 | RUN bash /tmp/library-scripts/docker-redhat.sh 8 | COPY scripts/postinstall.sh /usr/share/scripts/ 9 | 10 | RUN dnf groupinstall -y "Development Tools" "RPM Development Tools" 11 | RUN dnf install -y openssl-devel flatpak flatpak-builder dnf-plugins-core 12 | RUN flatpak remote-add --if-not-exists flathub https://flathub.org/repo/flathub.flatpakrepo 13 | 14 | # Add Andaman repo 15 | RUN dnf config-manager --add-repo https://raw.githubusercontent.com/terrapkg/subatomic-repos/main/terra.repo 16 | 17 | RUN dnf install -y anda-mock-configs anda-srpm-macros 18 | 19 | ENTRYPOINT ["/usr/local/share/docker-init.sh"] 20 | VOLUME [ "/var/lib/docker" ] 21 | CMD ["sleep", "infinity"] 22 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | // For format details, see https://aka.ms/devcontainer.json. For config options, see the README at: 2 | // https://github.com/microsoft/vscode-dev-containers/tree/v0.238.1/containers/rust 3 | { 4 | "name": "Rust", 5 | "build": { 6 | "dockerfile": "Dockerfile" 7 | }, 8 | "runArgs": [ 9 | "--cap-add=SYS_PTRACE", 10 | "--security-opt", 11 | "seccomp=unconfined", 12 | "--init", 13 | "--privileged" 14 | ], 15 | "overrideCommand": false, 16 | "mounts": ["source=dind-var-lib-docker,target=/var/lib/docker,type=volume"], 17 | 18 | // Configure tool-specific properties. 19 | "customizations": { 20 | // Configure properties specific to VS Code. 21 | "vscode": { 22 | // Set *default* container specific settings.json values on container create. 23 | "settings": { 24 | "lldb.executable": "/usr/bin/lldb", 25 | // VS Code don't watch files under ./target 26 | "files.watcherExclude": { 27 | "**/target/**": true 28 | }, 29 | "rust-analyzer.checkOnSave.command": "clippy" 30 | }, 31 | 32 | // Add the IDs of extensions you want installed when the container is created. 33 | "extensions": [ 34 | "vadimcn.vscode-lldb", 35 | "mutantdino.resourcemonitor", 36 | "rust-lang.rust-analyzer", 37 | "tamasfe.even-better-toml", 38 | "serayuzgur.crates", 39 | "GitHub.copilot-nightly", 40 | "GitHub.copilot-labs", 41 | "rangav.vscode-thunder-client", 42 | "eamodio.gitlens", 43 | "1dot75cm.RPMSpec", 44 | "ms-kubernetes-tools.vscode-kubernetes-tools", 45 | "esbenp.prettier-vscode", 46 | "skellock.just", 47 | "bradlc.vscode-tailwindcss", 48 | "HashiCorp.HCL" 49 | ] 50 | } 51 | }, 52 | 53 | // Use 'forwardPorts' to make a list of ports inside the container available locally. 54 | // "forwardPorts": [], 55 | 56 | // Use 'postCreateCommand' to run commands after the container is created. 57 | // "postCreateCommand": "rustc --version", 58 | 59 | // Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. 60 | "remoteUser": "vscode", 61 | 62 | "postCreateCommand": "bash /usr/share/scripts/postinstall.sh", 63 | "postStartCommand": "/usr/local/share/docker-init.sh && sudo tailscaled &" 64 | } 65 | -------------------------------------------------------------------------------- /.devcontainer/library-scripts/common-redhat.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | #------------------------------------------------------------------------------------------------------------- 3 | # Copyright (c) Microsoft Corporation. All rights reserved. 4 | # Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. 5 | #------------------------------------------------------------------------------------------------------------- 6 | # 7 | # ** This script is community supported ** 8 | # Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/common.md 9 | # Maintainer: The VS Code and Codespaces Teams 10 | # 11 | # Syntax: ./common-redhat.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] 12 | 13 | set -e 14 | 15 | INSTALL_ZSH=${1:-"true"} 16 | USERNAME=${2:-"automatic"} 17 | USER_UID=${3:-"automatic"} 18 | USER_GID=${4:-"automatic"} 19 | UPGRADE_PACKAGES=${5:-"true"} 20 | INSTALL_OH_MYS=${6:-"true"} 21 | SCRIPT_DIR="$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)" 22 | MARKER_FILE="/usr/local/etc/vscode-dev-containers/common" 23 | 24 | if [ "$(id -u)" -ne 0 ]; then 25 | echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.' 26 | exit 1 27 | fi 28 | 29 | # Ensure that login shells get the correct path if the user updated the PATH using ENV. 30 | rm -f /etc/profile.d/00-restore-env.sh 31 | echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" > /etc/profile.d/00-restore-env.sh 32 | chmod +x /etc/profile.d/00-restore-env.sh 33 | 34 | # If in automatic mode, determine if a user already exists, if not use vscode 35 | if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then 36 | USERNAME="" 37 | POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)") 38 | for CURRENT_USER in ${POSSIBLE_USERS[@]}; do 39 | if id -u ${CURRENT_USER} > /dev/null 2>&1; then 40 | USERNAME=${CURRENT_USER} 41 | break 42 | fi 43 | done 44 | if [ "${USERNAME}" = "" ]; then 45 | USERNAME=vscode 46 | fi 47 | elif [ "${USERNAME}" = "none" ]; then 48 | USERNAME=root 49 | USER_UID=0 50 | USER_GID=0 51 | fi 52 | 53 | # Load markers to see which steps have already run 54 | if [ -f "${MARKER_FILE}" ]; then 55 | echo "Marker file found:" 56 | cat "${MARKER_FILE}" 57 | source "${MARKER_FILE}" 58 | fi 59 | 60 | # Install common dependencies 61 | if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then 62 | 63 | package_list="\ 64 | openssh-clients \ 65 | gnupg2 \ 66 | iproute \ 67 | procps \ 68 | lsof \ 69 | net-tools \ 70 | psmisc \ 71 | curl \ 72 | wget \ 73 | ca-certificates \ 74 | rsync \ 75 | unzip \ 76 | zip \ 77 | nano \ 78 | vim-minimal \ 79 | less \ 80 | jq \ 81 | redhat-lsb-core \ 82 | openssl-libs \ 83 | krb5-libs \ 84 | libicu \ 85 | zlib \ 86 | sudo \ 87 | sed \ 88 | grep \ 89 | which \ 90 | man-db \ 91 | strace" 92 | 93 | # Install OpenSSL 1.0 compat if needed 94 | if yum -q list compat-openssl10 >/dev/null 2>&1; then 95 | package_list="${package_list} compat-openssl10" 96 | fi 97 | 98 | yum -y install ${package_list} 99 | 100 | if ! type git > /dev/null 2>&1; then 101 | yum -y install git 102 | fi 103 | 104 | PACKAGES_ALREADY_INSTALLED="true" 105 | fi 106 | 107 | # Update to latest versions of packages 108 | if [ "${UPGRADE_PACKAGES}" = "true" ]; then 109 | yum upgrade -y 110 | fi 111 | 112 | # Create or update a non-root user to match UID/GID. 113 | group_name="${USERNAME}" 114 | if id -u ${USERNAME} > /dev/null 2>&1; then 115 | # User exists, update if needed 116 | if [ "${USER_GID}" != "automatic" ] && [ "$USER_GID" != "$(id -g $USERNAME)" ]; then 117 | group_name="$(id -gn $USERNAME)" 118 | groupmod --gid $USER_GID ${group_name} 119 | usermod --gid $USER_GID $USERNAME 120 | fi 121 | if [ "${USER_UID}" != "automatic" ] && [ "$USER_UID" != "$(id -u $USERNAME)" ]; then 122 | usermod --uid $USER_UID $USERNAME 123 | fi 124 | else 125 | # Create user 126 | if [ "${USER_GID}" = "automatic" ]; then 127 | groupadd $USERNAME 128 | else 129 | groupadd --gid $USER_GID $USERNAME 130 | fi 131 | if [ "${USER_UID}" = "automatic" ]; then 132 | useradd -s /bin/bash --gid $USERNAME -m $USERNAME 133 | else 134 | useradd -s /bin/bash --uid $USER_UID --gid $USERNAME -m $USERNAME 135 | fi 136 | fi 137 | 138 | # Add add sudo support for non-root user 139 | if [ "${USERNAME}" != "root" ] && [ "${EXISTING_NON_ROOT_USER}" != "${USERNAME}" ]; then 140 | echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME 141 | chmod 0440 /etc/sudoers.d/$USERNAME 142 | EXISTING_NON_ROOT_USER="${USERNAME}" 143 | fi 144 | 145 | # ** Shell customization section ** 146 | if [ "${USERNAME}" = "root" ]; then 147 | user_rc_path="/root" 148 | else 149 | user_rc_path="/home/${USERNAME}" 150 | fi 151 | 152 | # .bashrc/.zshrc snippet 153 | rc_snippet="$(cat << 'EOF' 154 | 155 | if [ -z "${USER}" ]; then export USER=$(whoami); fi 156 | if [[ "${PATH}" != *"$HOME/.local/bin"* ]]; then export PATH="${PATH}:$HOME/.local/bin"; fi 157 | 158 | # Display optional first run image specific notice if configured and terminal is interactive 159 | if [ -t 1 ] && [[ "${TERM_PROGRAM}" = "vscode" || "${TERM_PROGRAM}" = "codespaces" ]] && [ ! -f "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed" ]; then 160 | if [ -f "/usr/local/etc/vscode-dev-containers/first-run-notice.txt" ]; then 161 | cat "/usr/local/etc/vscode-dev-containers/first-run-notice.txt" 162 | elif [ -f "/workspaces/.codespaces/shared/first-run-notice.txt" ]; then 163 | cat "/workspaces/.codespaces/shared/first-run-notice.txt" 164 | fi 165 | mkdir -p $HOME/.config/vscode-dev-containers 166 | # Mark first run notice as displayed after 10s to avoid problems with fast terminal refreshes hiding it 167 | ((sleep 10s; touch "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed") &) 168 | fi 169 | 170 | # Set the default git editor if not already set 171 | if [ -z "$(git config --get core.editor)" ] && [ -z "${GIT_EDITOR}" ]; then 172 | if [ "${TERM_PROGRAM}" = "vscode" ]; then 173 | if [[ -n $(command -v code-insiders) && -z $(command -v code) ]]; then 174 | export GIT_EDITOR="code-insiders --wait" 175 | else 176 | export GIT_EDITOR="code --wait" 177 | fi 178 | fi 179 | fi 180 | 181 | EOF 182 | )" 183 | 184 | # code shim, it fallbacks to code-insiders if code is not available 185 | cat << 'EOF' > /usr/local/bin/code 186 | #!/bin/sh 187 | 188 | get_in_path_except_current() { 189 | which -a "$1" | grep -A1 "$0" | grep -v "$0" 190 | } 191 | 192 | code="$(get_in_path_except_current code)" 193 | 194 | if [ -n "$code" ]; then 195 | exec "$code" "$@" 196 | elif [ "$(command -v code-insiders)" ]; then 197 | exec code-insiders "$@" 198 | else 199 | echo "code or code-insiders is not installed" >&2 200 | exit 127 201 | fi 202 | EOF 203 | chmod +x /usr/local/bin/code 204 | 205 | # Codespaces bash and OMZ themes - partly inspired by https://github.com/ohmyzsh/ohmyzsh/blob/master/themes/robbyrussell.zsh-theme 206 | codespaces_bash="$(cat \ 207 | <<'EOF' 208 | 209 | # Codespaces bash prompt theme 210 | __bash_prompt() { 211 | local userpart='`export XIT=$? \ 212 | && [ ! -z "${GITHUB_USER}" ] && echo -n "\[\033[0;32m\]@${GITHUB_USER} " || echo -n "\[\033[0;32m\]\u " \ 213 | && [ "$XIT" -ne "0" ] && echo -n "\[\033[1;31m\]➜" || echo -n "\[\033[0m\]➜"`' 214 | local gitbranch='`\ 215 | if [ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ]; then \ 216 | export BRANCH=$(git symbolic-ref --short HEAD 2>/dev/null || git rev-parse --short HEAD 2>/dev/null); \ 217 | if [ "${BRANCH}" != "" ]; then \ 218 | echo -n "\[\033[0;36m\](\[\033[1;31m\]${BRANCH}" \ 219 | && if git ls-files --error-unmatch -m --directory --no-empty-directory -o --exclude-standard ":/*" > /dev/null 2>&1; then \ 220 | echo -n " \[\033[1;33m\]✗"; \ 221 | fi \ 222 | && echo -n "\[\033[0;36m\]) "; \ 223 | fi; \ 224 | fi`' 225 | local lightblue='\[\033[1;34m\]' 226 | local removecolor='\[\033[0m\]' 227 | PS1="${userpart} ${lightblue}\w ${gitbranch}${removecolor}\$ " 228 | unset -f __bash_prompt 229 | } 230 | __bash_prompt 231 | 232 | EOF 233 | )" 234 | 235 | codespaces_zsh="$(cat \ 236 | <<'EOF' 237 | # Codespaces zsh prompt theme 238 | __zsh_prompt() { 239 | local prompt_username 240 | if [ ! -z "${GITHUB_USER}" ]; then 241 | prompt_username="@${GITHUB_USER}" 242 | else 243 | prompt_username="%n" 244 | fi 245 | PROMPT="%{$fg[green]%}${prompt_username} %(?:%{$reset_color%}➜ :%{$fg_bold[red]%}➜ )" # User/exit code arrow 246 | PROMPT+='%{$fg_bold[blue]%}%(5~|%-1~/…/%3~|%4~)%{$reset_color%} ' # cwd 247 | PROMPT+='$([ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ] && git_prompt_info)' # Git status 248 | PROMPT+='%{$fg[white]%}$ %{$reset_color%}' 249 | unset -f __zsh_prompt 250 | } 251 | ZSH_THEME_GIT_PROMPT_PREFIX="%{$fg_bold[cyan]%}(%{$fg_bold[red]%}" 252 | ZSH_THEME_GIT_PROMPT_SUFFIX="%{$reset_color%} " 253 | ZSH_THEME_GIT_PROMPT_DIRTY=" %{$fg_bold[yellow]%}✗%{$fg_bold[cyan]%})" 254 | ZSH_THEME_GIT_PROMPT_CLEAN="%{$fg_bold[cyan]%})" 255 | __zsh_prompt 256 | 257 | EOF 258 | )" 259 | 260 | # Add RC snippet and custom bash prompt 261 | if [ "${RC_SNIPPET_ALREADY_ADDED}" != "true" ]; then 262 | echo "${rc_snippet}" >> /etc/bashrc 263 | echo "${codespaces_bash}" >> "${user_rc_path}/.bashrc" 264 | if [ "${USERNAME}" != "root" ]; then 265 | echo "${codespaces_bash}" >> "/root/.bashrc" 266 | fi 267 | chown ${USERNAME}:${group_name} "${user_rc_path}/.bashrc" 268 | RC_SNIPPET_ALREADY_ADDED="true" 269 | fi 270 | 271 | # Optionally install and configure zsh and Oh My Zsh! 272 | if [ "${INSTALL_ZSH}" = "true" ]; then 273 | if ! type zsh > /dev/null 2>&1; then 274 | yum install -y zsh 275 | fi 276 | if [ "${ZSH_ALREADY_INSTALLED}" != "true" ]; then 277 | echo "${rc_snippet}" >> /etc/zshrc 278 | ZSH_ALREADY_INSTALLED="true" 279 | fi 280 | 281 | # Adapted, simplified inline Oh My Zsh! install steps that adds, defaults to a codespaces theme. 282 | # See https://github.com/ohmyzsh/ohmyzsh/blob/master/tools/install.sh for official script. 283 | oh_my_install_dir="${user_rc_path}/.oh-my-zsh" 284 | if [ ! -d "${oh_my_install_dir}" ] && [ "${INSTALL_OH_MYS}" = "true" ]; then 285 | template_path="${oh_my_install_dir}/templates/zshrc.zsh-template" 286 | user_rc_file="${user_rc_path}/.zshrc" 287 | umask g-w,o-w 288 | mkdir -p ${oh_my_install_dir} 289 | git clone --depth=1 \ 290 | -c core.eol=lf \ 291 | -c core.autocrlf=false \ 292 | -c fsck.zeroPaddedFilemode=ignore \ 293 | -c fetch.fsck.zeroPaddedFilemode=ignore \ 294 | -c receive.fsck.zeroPaddedFilemode=ignore \ 295 | "https://github.com/ohmyzsh/ohmyzsh" "${oh_my_install_dir}" 2>&1 296 | echo -e "$(cat "${template_path}")\nDISABLE_AUTO_UPDATE=true\nDISABLE_UPDATE_PROMPT=true" > ${user_rc_file} 297 | sed -i -e 's/ZSH_THEME=.*/ZSH_THEME="codespaces"/g' ${user_rc_file} 298 | mkdir -p ${oh_my_install_dir}/custom/themes 299 | echo "${codespaces_zsh}" > "${oh_my_install_dir}/custom/themes/codespaces.zsh-theme" 300 | # Shrink git while still enabling updates 301 | cd "${oh_my_install_dir}" 302 | git repack -a -d -f --depth=1 --window=1 303 | # Copy to non-root user if one is specified 304 | if [ "${USERNAME}" != "root" ]; then 305 | cp -rf "${user_rc_file}" "${oh_my_install_dir}" /root 306 | chown -R ${USERNAME}:${group_name} "${user_rc_path}" 307 | fi 308 | 309 | # set shell 310 | sudo chsh ${USERNAME} -s $(which zsh) 311 | fi 312 | fi 313 | 314 | # Persist image metadata info, script if meta.env found in same directory 315 | meta_info_script="$(cat << 'EOF' 316 | #!/bin/sh 317 | . /usr/local/etc/vscode-dev-containers/meta.env 318 | 319 | # Minimal output 320 | if [ "$1" = "version" ] || [ "$1" = "image-version" ]; then 321 | echo "${VERSION}" 322 | exit 0 323 | elif [ "$1" = "release" ]; then 324 | echo "${GIT_REPOSITORY_RELEASE}" 325 | exit 0 326 | elif [ "$1" = "content" ] || [ "$1" = "content-url" ] || [ "$1" = "contents" ] || [ "$1" = "contents-url" ]; then 327 | echo "${CONTENTS_URL}" 328 | exit 0 329 | fi 330 | 331 | #Full output 332 | echo 333 | echo "Development container image information" 334 | echo 335 | if [ ! -z "${VERSION}" ]; then echo "- Image version: ${VERSION}"; fi 336 | if [ ! -z "${DEFINITION_ID}" ]; then echo "- Definition ID: ${DEFINITION_ID}"; fi 337 | if [ ! -z "${VARIANT}" ]; then echo "- Variant: ${VARIANT}"; fi 338 | if [ ! -z "${GIT_REPOSITORY}" ]; then echo "- Source code repository: ${GIT_REPOSITORY}"; fi 339 | if [ ! -z "${GIT_REPOSITORY_RELEASE}" ]; then echo "- Source code release/branch: ${GIT_REPOSITORY_RELEASE}"; fi 340 | if [ ! -z "${BUILD_TIMESTAMP}" ]; then echo "- Timestamp: ${BUILD_TIMESTAMP}"; fi 341 | if [ ! -z "${CONTENTS_URL}" ]; then echo && echo "More info: ${CONTENTS_URL}"; fi 342 | echo 343 | EOF 344 | )" 345 | if [ -f "${SCRIPT_DIR}/meta.env" ]; then 346 | mkdir -p /usr/local/etc/vscode-dev-containers/ 347 | cp -f "${SCRIPT_DIR}/meta.env" /usr/local/etc/vscode-dev-containers/meta.env 348 | echo "${meta_info_script}" > /usr/local/bin/devcontainer-info 349 | chmod +x /usr/local/bin/devcontainer-info 350 | fi 351 | 352 | # Write marker file 353 | mkdir -p "$(dirname "${MARKER_FILE}")" 354 | echo -e "\ 355 | PACKAGES_ALREADY_INSTALLED=${PACKAGES_ALREADY_INSTALLED}\n\ 356 | EXISTING_NON_ROOT_USER=${EXISTING_NON_ROOT_USER}\n\ 357 | RC_SNIPPET_ALREADY_ADDED=${RC_SNIPPET_ALREADY_ADDED}\n\ 358 | ZSH_ALREADY_INSTALLED=${ZSH_ALREADY_INSTALLED}" > "${MARKER_FILE}" 359 | 360 | echo "Done!" -------------------------------------------------------------------------------- /.devcontainer/library-scripts/docker-redhat.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | #------------------------------------------------------------------------------------------------------------- 3 | # Copyright (c) Microsoft Corporation. All rights reserved. 4 | # Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. 5 | #------------------------------------------------------------------------------------------------------------- 6 | # 7 | # ** This script is community supported ** 8 | # Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/docker.md 9 | # Maintainer: @smankoo 10 | # 11 | # Syntax: ./docker-redhat.sh [enable non-root docker socket access flag] [source socket] [target socket] [non-root user] 12 | 13 | ENABLE_NONROOT_DOCKER=${1:-"true"} 14 | SOURCE_SOCKET=${2:-"/var/run/docker-host.sock"} 15 | TARGET_SOCKET=${3:-"/var/run/docker.sock"} 16 | USERNAME=${4:-"automatic"} 17 | DOCKER_DASH_COMPOSE_VERSION="1" 18 | 19 | set -e 20 | 21 | if [ "$(id -u)" -ne 0 ]; then 22 | echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.' 23 | exit 1 24 | fi 25 | 26 | # Figure out correct version of a three part version number is not passed 27 | find_version_from_git_tags() { 28 | local variable_name=$1 29 | local requested_version=${!variable_name} 30 | if [ "${requested_version}" = "none" ]; then return; fi 31 | local repository=$2 32 | local prefix=${3:-"tags/v"} 33 | local separator=${4:-"."} 34 | local last_part_optional=${5:-"false"} 35 | if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then 36 | local escaped_separator=${separator//./\\.} 37 | local last_part 38 | if [ "${last_part_optional}" = "true" ]; then 39 | last_part="(${escaped_separator}[0-9]+)?" 40 | else 41 | last_part="${escaped_separator}[0-9]+" 42 | fi 43 | local regex="${prefix}\\K[0-9]+${escaped_separator}[0-9]+${last_part}$" 44 | local version_list="$(git ls-remote --tags ${repository} | grep -oP "${regex}" | tr -d ' ' | tr "${separator}" "." | sort -rV)" 45 | if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ] || [ "${requested_version}" = "lts" ]; then 46 | declare -g ${variable_name}="$(echo "${version_list}" | head -n 1)" 47 | else 48 | set +e 49 | declare -g ${variable_name}="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|$)")" 50 | set -e 51 | fi 52 | fi 53 | if [ -z "${!variable_name}" ] || ! echo "${version_list}" | grep "^${!variable_name//./\\.}$" > /dev/null 2>&1; then 54 | echo -e "Invalid ${variable_name} value: ${requested_version}\nValid values:\n${version_list}" >&2 55 | exit 1 56 | fi 57 | echo "${variable_name}=${!variable_name}" 58 | } 59 | 60 | # Determine the appropriate non-root user 61 | if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then 62 | USERNAME="" 63 | POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)") 64 | for CURRENT_USER in ${POSSIBLE_USERS[@]}; do 65 | if id -u ${CURRENT_USER} > /dev/null 2>&1; then 66 | USERNAME=${CURRENT_USER} 67 | break 68 | fi 69 | done 70 | if [ "${USERNAME}" = "" ]; then 71 | USERNAME=root 72 | fi 73 | elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then 74 | USERNAME=root 75 | fi 76 | 77 | # Install Prerequisites 78 | if yum list deltarpm > /dev/null 2>&1; then 79 | yum -y install deltarpm 80 | fi 81 | yum -y install ca-certificates curl gnupg2 dirmngr dnf net-tools dialog git openssh-clients curl less procps 82 | 83 | 84 | # Try to load os-release 85 | . /etc/os-release 2>/dev/null 86 | 87 | # If unable to load OS Name and Verstion from os-release, install lsb_release 88 | if [ $? -ne 0 ] || [ "${NAME}" = "" ] || [ "${VERSION_ID}" = "" ]; then 89 | 90 | yum -y install redhat-lsb-core 91 | 92 | OSNAME=$(lsb_release -is | tr '[:upper:]' '[:lower:]') 93 | RHEL_COMPAT_VER=${VERSION_ID:-`lsb_release -rs | cut -d. -f1`} 94 | 95 | else 96 | OSNAME=`echo $NAME | cut -d" " -f1 | tr '[:upper:]' '[:lower:]'` 97 | if [ "${OSNAME}" = "amazon" ]; then 98 | if [ "${VERSION_ID}" = "2" ]; then 99 | RHEL_COMPAT_VER=7 100 | else 101 | echo "Incompatible Operative System. Exiting..." 102 | exit 103 | fi 104 | else 105 | RHEL_COMPAT_VER=${VERSION_ID%%.*} 106 | fi 107 | fi 108 | 109 | #yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-${RHEL_COMPAT_VER}.noarch.rpm 110 | yum install -y yum-utils device-mapper-persistent-data lvm2 111 | #yum-config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo 112 | 113 | yum -y update 114 | # Install moby engine 115 | dnf install -y moby-engine 116 | 117 | # Install Docker Compose 118 | dnf install docker-compose -y 119 | 120 | # If init file already exists, exit 121 | if [ -f "/usr/local/share/docker-init.sh" ]; then 122 | echo "/usr/local/share/docker-init.sh already exists, so exiting." 123 | exit 0 124 | fi 125 | echo "docker-init doesnt exist, adding..." 126 | 127 | # Add user to the docker group 128 | if [ "${ENABLE_NONROOT_DOCKER}" = "true" ]; then 129 | if ! getent group docker > /dev/null 2>&1; then 130 | groupadd docker 131 | fi 132 | 133 | usermod -aG docker ${USERNAME} 134 | fi 135 | 136 | tee /usr/local/share/docker-init.sh > /dev/null \ 137 | << 'EOF' 138 | #!/bin/sh 139 | #------------------------------------------------------------------------------------------------------------- 140 | # Copyright (c) Microsoft Corporation. All rights reserved. 141 | # Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. 142 | #------------------------------------------------------------------------------------------------------------- 143 | set -e 144 | dockerd_start="$(cat << 'INNEREOF' 145 | # explicitly remove dockerd and containerd PID file to ensure that it can start properly if it was stopped uncleanly 146 | # ie: docker kill 147 | find /run /var/run -iname 'docker*.pid' -delete || : 148 | find /run /var/run -iname 'container*.pid' -delete || : 149 | ## Dind wrapper script from docker team, adapted to a function 150 | # Maintained: https://github.com/moby/moby/blob/master/hack/dind 151 | export container=docker 152 | if [ -d /sys/kernel/security ] && ! mountpoint -q /sys/kernel/security; then 153 | mount -t securityfs none /sys/kernel/security || { 154 | echo >&2 'Could not mount /sys/kernel/security.' 155 | echo >&2 'AppArmor detection and --privileged mode might break.' 156 | } 157 | fi 158 | # Mount /tmp (conditionally) 159 | if ! mountpoint -q /tmp; then 160 | mount -t tmpfs none /tmp 161 | fi 162 | # cgroup v2: enable nesting 163 | if [ -f /sys/fs/cgroup/cgroup.controllers ]; then 164 | # move the processes from the root group to the /init group, 165 | # otherwise writing subtree_control fails with EBUSY. 166 | # An error during moving non-existent process (i.e., "cat") is ignored. 167 | mkdir -p /sys/fs/cgroup/init 168 | xargs -rn1 < /sys/fs/cgroup/cgroup.procs > /sys/fs/cgroup/init/cgroup.procs || : 169 | # enable controllers 170 | sed -e 's/ / +/g' -e 's/^/+/' < /sys/fs/cgroup/cgroup.controllers \ 171 | > /sys/fs/cgroup/cgroup.subtree_control 172 | fi 173 | ## Dind wrapper over. 174 | # Handle DNS 175 | set +e 176 | cat /etc/resolv.conf | grep -i 'internal.cloudapp.net' 177 | if [ $? -eq 0 ] 178 | then 179 | echo "Setting dockerd Azure DNS." 180 | CUSTOMDNS="--dns 1.1.1.1" 181 | else 182 | echo "Not setting dockerd DNS manually." 183 | CUSTOMDNS="" 184 | fi 185 | set -e 186 | # Start docker/moby engine 187 | ( dockerd $CUSTOMDNS > /tmp/dockerd.log 2>&1 ) & 188 | INNEREOF 189 | )" 190 | # Start using sudo if not invoked as root 191 | if [ "$(id -u)" -ne 0 ]; then 192 | sudo /bin/sh -c "${dockerd_start}" 193 | else 194 | eval "${dockerd_start}" 195 | fi 196 | set +e 197 | # Execute whatever commands were passed in (if any). This allows us 198 | # to set this script to ENTRYPOINT while still executing the default CMD. 199 | exec "$@" 200 | EOF 201 | 202 | chmod +x /usr/local/share/docker-init.sh 203 | chown ${USERNAME}:root /usr/local/share/docker-init.sh 204 | 205 | echo 'docker-in-docker-fedora script has completed!' -------------------------------------------------------------------------------- /.devcontainer/scripts/postinstall.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Install rust using rustup 4 | curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain stable --profile complete -y 5 | 6 | source "$HOME/.cargo/env" 7 | 8 | # cargo install sea-orm-cli 9 | 10 | # curl -fsSL https://get.pnpm.io/install.sh | sh - 11 | # curl -s https://raw.githubusercontent.com/k3d-io/k3d/main/install.sh | bash 12 | 13 | echo "export PNPM_HOME=\"/home/vscode/.local/share/pnpm\"" >> "$HOME/.zshrc" 14 | echo "export PATH=\"\$PNPM_HOME:$PATH\"" >> "$HOME/.zshrc" 15 | 16 | sudo dnf module enable nodejs:16 -y 17 | 18 | sudo dnf install -y just nodejs dotnet-runtime-3.1 htop 19 | 20 | #sudo /usr/local/share/docker-init.sh 21 | sudo ln -sf /usr/libexec/docker/docker-proxy /usr/bin/docker-proxy 22 | 23 | sudo usermod -aG docker vscode 24 | sudo usermod -aG mock vscode 25 | 26 | # cat < 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | -------------------------------------------------------------------------------- /.idea/codeStyles/codeStyleConfig.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | -------------------------------------------------------------------------------- /.idea/compiler.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | -------------------------------------------------------------------------------- /.idea/discord.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 7 | -------------------------------------------------------------------------------- /.idea/inspectionProfiles/Project_Default.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 18 | -------------------------------------------------------------------------------- /.idea/modules.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /.idea/sqldialects.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /.idea/vcs.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "type": "lldb", 9 | "request": "launch", 10 | "name": "Debug executable 'anda'", 11 | "cargo": { 12 | "args": [ 13 | "build", 14 | "--bin=anda", 15 | "--package=anda" 16 | ], 17 | "filter": { 18 | "name": "anda", 19 | "kind": "bin" 20 | } 21 | }, 22 | "args": [], 23 | "cwd": "${workspaceFolder}" 24 | }, 25 | { 26 | "type": "lldb", 27 | "request": "launch", 28 | "name": "Debug unit tests in executable 'anda'", 29 | "cargo": { 30 | "args": [ 31 | "test", 32 | "--no-run", 33 | "--bin=anda", 34 | "--package=anda" 35 | ], 36 | "filter": { 37 | "name": "anda", 38 | "kind": "bin" 39 | } 40 | }, 41 | "args": [], 42 | "cwd": "${workspaceFolder}" 43 | }, 44 | { 45 | "type": "lldb", 46 | "request": "launch", 47 | "name": "Debug executable 'anda-server'", 48 | "cargo": { 49 | "args": [ 50 | "build", 51 | "--bin=anda-server", 52 | "--package=anda-server" 53 | ], 54 | "filter": { 55 | "name": "anda-server", 56 | "kind": "bin" 57 | } 58 | }, 59 | "args": [], 60 | "cwd": "${workspaceFolder}" 61 | }, 62 | { 63 | "type": "lldb", 64 | "request": "launch", 65 | "name": "Debug unit tests in executable 'anda-server'", 66 | "cargo": { 67 | "args": [ 68 | "test", 69 | "--no-run", 70 | "--bin=anda-server", 71 | "--package=anda-server" 72 | ], 73 | "filter": { 74 | "name": "anda-server", 75 | "kind": "bin" 76 | } 77 | }, 78 | "args": [], 79 | "cwd": "${workspaceFolder}" 80 | } 81 | ] 82 | } -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "sqltools.connections": [ 3 | { 4 | "previewLimit": 50, 5 | "server": "localhost", 6 | "port": 5432, 7 | "driver": "PostgreSQL", 8 | "name": "anda", 9 | "database": "anda", 10 | "username": "postgres", 11 | "password": "example" 12 | } 13 | ], 14 | "rust-analyzer.checkOnSave.command": "clippy", 15 | "search.useGlobalIgnoreFiles": true, 16 | "files.exclude": { 17 | "**/.flatpak-builder": true 18 | }, 19 | "files.watcherExclude": { 20 | "**/.flatpak-builder/**": true 21 | }, 22 | "dotenv.enableAutocloaking": false, 23 | "rust-analyzer.files.excludeDirs": [".flatpak-builder"] 24 | } 25 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | # Cargo workspace 2 | [package] 3 | name = "anda" 4 | version = "0.4.12" 5 | edition = "2021" 6 | description = "Andaman Build toolchain" 7 | license = "MIT" 8 | repository = "https://github.com/FyraLabs/anda" 9 | readme = "README.md" 10 | keywords = ["build", "toolchain", "rpm", "flatpak", "oci"] 11 | exclude = [ 12 | "anda-build", 13 | "anda-config", 14 | "andax", 15 | ".devcontainer", 16 | ".github", 17 | ".flatpak-builder/*", 18 | ] 19 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 20 | 21 | [dependencies] 22 | clap = { workspace = true } 23 | clap_complete = { workspace = true } 24 | tracing = { workspace = true } 25 | tracing-subscriber = "0.3" 26 | tracing-log = "0.2.0" 27 | color-eyre = { workspace = true } 28 | walkdir = "2.5.0" 29 | tempfile = "3.20.0" 30 | anda-config = { workspace = true } 31 | andax = { path = "./andax", version = "0.4.12" } 32 | flatpak = "0.18.1" 33 | clap-verbosity-flag = "3.0.2" 34 | tokio = { version = "1.45.1", features = [ 35 | "process", 36 | "io-util", 37 | "macros", 38 | "signal", 39 | "rt-multi-thread", 40 | ] } 41 | async-trait = "0.1.87" 42 | nix = { version = "0.30.1", features = ["signal"], default-features = false } 43 | git2 = "0.20.2" 44 | chrono = { workspace = true } 45 | ignore = { workspace = true } 46 | promptly = "0.3.1" 47 | console = "0.15.11" 48 | serde = { workspace = true } 49 | serde_json = { workspace = true } 50 | regex = { workspace = true } 51 | itertools = "0.14.0" 52 | lazy_format = "2.0.3" 53 | lazy_static = { workspace = true } 54 | shell-quote = "0.7.2" 55 | 56 | 57 | [lints] 58 | workspace = true 59 | 60 | 61 | [workspace] 62 | members = ["anda-config", "xtask", "andax"] 63 | 64 | [workspace.dependencies] 65 | anda-config = { path = "./anda-config", version = "0.4.12" } 66 | serde = { version = "1.0", default-features = false, features = ["derive"] } 67 | lazy_static = "1.5" 68 | regex = "1.11.1" 69 | serde_json = "1.0" 70 | color-eyre = "0.6.5" 71 | tracing = "0.1" 72 | chrono = { version = "0.4", default-features = false, features = ["now"] } 73 | ignore = "0.4.23" 74 | clap_complete = "4" 75 | clap = { version = "4", features = ["derive", "env"] } 76 | 77 | [workspace.lints.clippy] 78 | cargo = { level = "warn", priority = -1 } 79 | complexity = { level = "warn", priority = -1 } 80 | nursery = { level = "warn", priority = -1 } 81 | pedantic = { level = "warn", priority = -1 } 82 | suspicious = { level = "warn", priority = -1 } 83 | 84 | disallowed-macros = "deny" 85 | excessive-nesting = "warn" 86 | 87 | arithmetic_side_effects = "warn" 88 | assertions_on_result_states = "warn" 89 | clone_on_ref_ptr = "warn" 90 | create_dir = "warn" 91 | empty_enum_variants_with_brackets = "warn" 92 | empty_structs_with_brackets = "warn" 93 | field_scoped_visibility_modifiers = "deny" 94 | format_push_string = "warn" 95 | get_unwrap = "warn" 96 | if_then_some_else_none = "warn" 97 | impl_trait_in_params = "warn" 98 | indexing_slicing = "warn" 99 | infinite_loop = "deny" 100 | let_underscore_must_use = "deny" 101 | let_underscore_untyped = "warn" 102 | multiple_inherent_impl = "warn" 103 | needless_raw_strings = "warn" 104 | rc_buffer = "warn" 105 | rc_mutex = "deny" 106 | redundant_type_annotations = "warn" 107 | renamed_function_params = "warn" 108 | rest_pat_in_fully_bound_structs = "warn" 109 | semicolon_outside_block = "warn" 110 | str_to_string = "warn" 111 | string_lit_chars_any = "warn" 112 | string_to_string = "warn" 113 | tests_outside_test_module = "warn" 114 | todo = "warn" 115 | try_err = "warn" 116 | undocumented_unsafe_blocks = "deny" 117 | unimplemented = "warn" 118 | unneeded_field_pattern = "warn" 119 | unwrap_in_result = "warn" 120 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022-2024 Fyra Labs 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Andaman Project 2 | 3 | # Andaman 4 | 5 | Andaman is a package build toolchain written in Rust. It is designed to simplify building various types of packages. 6 | 7 | It is inspired by `umpkg` and aims to be a more modern and flexible alternative. It is also designed for monorepos in mind, so you can define multiple projects with multiple package formats in a single repository. 8 | 9 | You can look into [the Fyra Wiki page](https://wiki.fyralabs.com/Andaman) and [the Terra Devdocs](https://developer.fyralabs.com/terra/autoupdate) for more technical details. 10 | 11 | # Installation 12 | On systems with [Terra](https://terra.fyralabs.com), you can install it with `sudo dnf5 in anda` (or dnf). 13 | 14 | On systems with Cargo, you can install it with `cargo install anda`. 15 | 16 | Alternatively you can also manually download the sources from GitHub: 17 | ```sh 18 | git clone https://github.com/FyraLabs/anda 19 | cd anda 20 | git checkout 0.1.18 21 | cargo install --path . 22 | ``` 23 | 24 | ## How it works 25 | Andaman is simply a meta-build system that calls upon other build systems to build and distribute packages. It works by reading a project manifest in `anda.hcl` and then calling the appropriate build system to build the package. 26 | 27 | ## Features 28 | - Building RPMs 29 | - Building Docker/Podman/OCI images 30 | - Building Flatpak packages 31 | 32 | ## How we use it at Fyra Labs 33 | 34 | We use Andaman to quickly build packages for our distributions, [Ultramarine Linux] and [TauOS]. 35 | Another notable usage is the [Terra] repository, which provides more than 1000 packages for Fedora-based distributions. 36 | The artifacts generated by Andaman are then sent to [Subatomic], our compose and repository manager. 37 | 38 | ## History 39 | 40 | The Andaman project has gone through many forms. We initially wrote Andaman as a custom RPM frontend as an alternative to DNF that is similar to that of an AUR helper. However, there were issues with rust and RPM support and we decided to move to a more general CI server based on BuildKit. This however turned into a massive feature creep of projects and so we decided to simply write a build system that can build artifacts for various package formats. 41 | 42 | [Subatomic]: https://github.com/FyraLabs/subatomic 43 | [Terra]: https://terra.fyralabs.com 44 | [Ultramarine Linux]: https://ultramarine-linux.org 45 | [TauOS]: https://github.com/tau-OS 46 | -------------------------------------------------------------------------------- /anda-config/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "anda-config" 3 | description = "Configuration module for Andaman" 4 | version = "0.4.12" 5 | edition = "2021" 6 | license = "MIT" 7 | repository = "https://github.com/FyraLabs/anda" 8 | readme = "../README.md" 9 | 10 | [lib] 11 | path = "./lib.rs" 12 | 13 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 14 | 15 | [dependencies] 16 | hcl-rs = { version = "0.15.0" } 17 | dotenv = "0.15.0" 18 | serde = { workspace = true } 19 | tracing = "0.1" 20 | ignore = { workspace = true } 21 | once_cell = { version = "1.20", default-features = false, features = ["std"] } 22 | parking_lot = "0.12.4" 23 | serde_with = "3.12.0" 24 | 25 | [dev-dependencies] 26 | env_logger = ">= 0.10, < 0.12" 27 | 28 | 29 | [lints] 30 | workspace = true 31 | -------------------------------------------------------------------------------- /anda-config/config.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use std::collections::{BTreeMap, HashMap}; 3 | use std::fs; 4 | use std::io::ErrorKind; 5 | use std::path::PathBuf; 6 | use tracing::{debug, instrument, trace}; 7 | 8 | use crate::error::ProjectError; 9 | 10 | #[derive(Deserialize, Serialize, Debug, Clone)] 11 | #[serde_with::skip_serializing_none] 12 | pub struct ProjectData { 13 | pub manifest: HashMap, 14 | } 15 | 16 | #[derive(Deserialize, Serialize, Debug, Clone)] 17 | #[serde_with::skip_serializing_none] 18 | pub struct Manifest { 19 | pub project: BTreeMap, 20 | #[serde(default)] 21 | pub config: Config, 22 | } 23 | 24 | #[derive(Deserialize, Serialize, Debug, Clone, Default)] 25 | #[serde_with::skip_serializing_none] 26 | pub struct Config { 27 | pub mock_config: Option, 28 | pub strip_prefix: Option, 29 | pub strip_suffix: Option, 30 | pub project_regex: Option, 31 | } 32 | 33 | impl Manifest { 34 | #[must_use] 35 | pub fn find_key_for_value(&self, value: &Project) -> Option<&String> { 36 | self.project.iter().find_map(|(key, val)| (val == value).then_some(key)) 37 | } 38 | 39 | #[must_use] 40 | pub fn get_project(&self, key: &str) -> Option<&Project> { 41 | self.project.get(key).map_or_else( 42 | || { 43 | self.project.iter().find_map(|(_k, v)| { 44 | let alias = v.alias.as_ref()?; 45 | alias.contains(&key.to_owned()).then_some(v) 46 | }) 47 | }, 48 | Some, 49 | ) 50 | } 51 | } 52 | 53 | #[derive(Deserialize, PartialEq, Eq, Serialize, Debug, Clone, Default)] 54 | #[serde_with::skip_serializing_none] 55 | pub struct Project { 56 | pub rpm: Option, 57 | pub podman: Option, 58 | pub docker: Option, 59 | pub flatpak: Option, 60 | pub pre_script: Option, 61 | pub post_script: Option, 62 | pub env: Option>, 63 | pub alias: Option>, 64 | pub scripts: Option>, 65 | #[serde(default)] 66 | #[serde(deserialize_with = "btree_wild_string")] 67 | pub labels: BTreeMap, 68 | pub update: Option, 69 | pub arches: Option>, 70 | } 71 | 72 | /// Deserialize the value of the BTreeMap into a String even if they are some other types. 73 | /// 74 | /// # Errors 75 | /// This function itself does not raise any errors unless the given value has the wrong type. 76 | /// However, it inherits errors from `serde::Deserializer`. 77 | fn btree_wild_string<'de, D>(deserializer: D) -> Result, D::Error> 78 | where 79 | D: serde::Deserializer<'de>, 80 | { 81 | struct WildString; 82 | 83 | impl serde::de::Visitor<'_> for WildString { 84 | type Value = String; 85 | 86 | fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { 87 | formatter.write_str("string, integer, bool or unit") 88 | } 89 | 90 | fn visit_string(self, v: String) -> Result 91 | where 92 | E: serde::de::Error, 93 | { 94 | Ok(v) 95 | } 96 | 97 | fn visit_str(self, v: &str) -> Result 98 | where 99 | E: serde::de::Error, 100 | { 101 | Ok(v.to_owned()) 102 | } 103 | 104 | fn visit_i64(self, v: i64) -> Result 105 | where 106 | E: serde::de::Error, 107 | { 108 | Ok(format!("{v}")) 109 | } 110 | 111 | fn visit_i128(self, v: i128) -> Result 112 | where 113 | E: serde::de::Error, 114 | { 115 | Ok(format!("{v}")) 116 | } 117 | 118 | fn visit_u64(self, v: u64) -> Result 119 | where 120 | E: serde::de::Error, 121 | { 122 | Ok(format!("{v}")) 123 | } 124 | 125 | fn visit_u128(self, v: u128) -> Result 126 | where 127 | E: serde::de::Error, 128 | { 129 | Ok(format!("{v}")) 130 | } 131 | 132 | fn visit_unit(self) -> Result 133 | where 134 | E: serde::de::Error, 135 | { 136 | Ok(String::new()) 137 | } 138 | 139 | fn visit_bool(self, v: bool) -> Result 140 | where 141 | E: serde::de::Error, 142 | { 143 | Ok(format!("{v}")) 144 | } 145 | } 146 | 147 | struct RealWildString(String); 148 | 149 | impl<'de> Deserialize<'de> for RealWildString { 150 | fn deserialize(deserializer: D) -> Result 151 | where 152 | D: serde::Deserializer<'de>, 153 | { 154 | deserializer.deserialize_any(WildString).map(Self) 155 | } 156 | } 157 | 158 | struct BTreeWildStringVisitor; 159 | 160 | impl<'de> serde::de::Visitor<'de> for BTreeWildStringVisitor { 161 | type Value = BTreeMap; 162 | 163 | fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { 164 | formatter.write_str("map (key: string, value: wild string)") 165 | } 166 | 167 | fn visit_map(self, mut map: A) -> Result 168 | where 169 | A: serde::de::MapAccess<'de>, 170 | { 171 | let mut res = Self::Value::new(); 172 | while let Some((k, v)) = map.next_entry::()? { 173 | res.insert(k, v.0); 174 | } 175 | Ok(res) 176 | } 177 | } 178 | 179 | deserializer.deserialize_map(BTreeWildStringVisitor) 180 | } 181 | 182 | #[derive(Deserialize, PartialEq, Eq, Serialize, Debug, Clone, Default)] 183 | #[serde_with::skip_serializing_none] 184 | pub struct RpmBuild { 185 | pub spec: PathBuf, 186 | pub sources: Option, 187 | pub package: Option, 188 | pub pre_script: Option, 189 | pub post_script: Option, 190 | pub enable_scm: Option, 191 | #[serde(default)] 192 | pub extra_repos: Vec, 193 | pub scm_opts: Option>, 194 | pub config: Option>, 195 | pub mock_config: Option, 196 | pub plugin_opts: Option>, 197 | pub macros: Option>, 198 | pub opts: Option>, 199 | } 200 | 201 | #[derive(Deserialize, PartialEq, Eq, Serialize, Debug, Clone, Default)] 202 | #[serde_with::skip_serializing_none] 203 | pub struct Docker { 204 | pub image: BTreeMap, // tag, file 205 | } 206 | 207 | pub fn parse_kv(input: &str) -> impl Iterator> + '_ { 208 | input 209 | .split(',') 210 | .filter(|item| !item.trim().is_empty()) 211 | .map(|item| item.split_once('=').map(|(l, r)| (l.to_owned(), r.to_owned()))) 212 | } 213 | 214 | pub fn parse_filters(filters: &[String]) -> Option>> { 215 | filters.iter().map(std::ops::Deref::deref).map(crate::parse_kv).map(Iterator::collect).collect() 216 | } 217 | 218 | /// Turn a string into a BTreeMap 219 | pub fn parse_labels<'a, I: Iterator>(labels: I) -> Option> { 220 | labels.flat_map(parse_kv).collect() 221 | } 222 | 223 | #[derive(Deserialize, PartialEq, Eq, Serialize, Debug, Clone, Default)] 224 | #[serde_with::skip_serializing_none] 225 | pub struct DockerImage { 226 | pub dockerfile: Option, 227 | pub import: Option, 228 | pub tag_latest: Option, 229 | pub context: String, 230 | pub version: Option, 231 | } 232 | 233 | #[derive(Deserialize, PartialEq, Eq, Serialize, Debug, Clone)] 234 | #[serde_with::skip_serializing_none] 235 | pub struct Flatpak { 236 | pub manifest: PathBuf, 237 | pub pre_script: Option, 238 | pub post_script: Option, 239 | } 240 | 241 | /// Converts a [`Manifest`] to `String` (.hcl). 242 | /// 243 | /// # Errors 244 | /// - [`hcl::Error`] : Cannot convert to HCL. 245 | pub fn to_string(config: &Manifest) -> Result { 246 | let config = hcl::to_string(&config)?; 247 | Ok(config) 248 | } 249 | 250 | #[instrument] 251 | pub fn load_from_file(path: &PathBuf) -> Result { 252 | debug!("Reading hcl file: {path:?}"); 253 | let file = fs::read_to_string(path).map_err(|e| match e.kind() { 254 | ErrorKind::NotFound => ProjectError::NoManifest, 255 | _ => ProjectError::InvalidManifest(e.to_string()), 256 | })?; 257 | 258 | debug!("Loading config from {path:?}"); 259 | let mut config = load_from_string(&file)?; 260 | 261 | // recursively merge configs 262 | 263 | // get parent path of config file 264 | let parent = if path.parent().unwrap().as_os_str().is_empty() { 265 | PathBuf::from(".") 266 | } else { 267 | path.parent().unwrap().to_path_buf() 268 | }; 269 | 270 | let walk = ignore::Walk::new(parent); 271 | 272 | let path = path.canonicalize().expect("Invalid path"); 273 | 274 | for entry in walk { 275 | trace!("Found {entry:?}"); 276 | let entry = entry.unwrap(); 277 | 278 | // assume entry.path() is canonicalised 279 | if entry.path() == path { 280 | continue; 281 | } 282 | 283 | if entry.file_type().unwrap().is_file() && entry.path().file_name().unwrap() == "anda.hcl" { 284 | debug!("Loading: {entry:?}"); 285 | let readfile = fs::read_to_string(entry.path()) 286 | .map_err(|e| ProjectError::InvalidManifest(e.to_string()))?; 287 | 288 | let en = entry.path().parent().unwrap(); 289 | 290 | let nested_config = prefix_config( 291 | load_from_string(&readfile)?, 292 | &en.strip_prefix("./").unwrap_or(en).display().to_string(), 293 | ); 294 | // merge the btreemap 295 | config.project.extend(nested_config.project); 296 | } 297 | } 298 | 299 | trace!("Loaded config: {config:#?}"); 300 | generate_alias(&mut config); 301 | 302 | check_config(config) 303 | } 304 | 305 | #[must_use] 306 | pub fn prefix_config(mut config: Manifest, prefix: &str) -> Manifest { 307 | let mut new_config = config.clone(); 308 | 309 | for (project_name, project) in &mut config.project { 310 | // set project name to prefix 311 | let new_project_name = format!("{prefix}/{project_name}"); 312 | // modify project data 313 | let mut new_project = std::mem::take(project); 314 | 315 | macro_rules! default { 316 | ($o:expr, $attr:ident, $d:expr) => { 317 | if let Some($attr) = &mut $o.$attr { 318 | if $attr.as_os_str().is_empty() { 319 | *$attr = $d.into(); 320 | } 321 | *$attr = PathBuf::from(format!("{prefix}/{}", $attr.display())); 322 | } else { 323 | let p = PathBuf::from(format!("{prefix}/{}", $d)); 324 | if p.exists() { 325 | $o.$attr = Some(p); 326 | } 327 | } 328 | }; 329 | } // default!(obj, attr, default_value); 330 | if let Some(rpm) = &mut new_project.rpm { 331 | rpm.spec = PathBuf::from(format!("{prefix}/{}", rpm.spec.display())); 332 | default!(rpm, pre_script, "rpm_pre.rhai"); 333 | default!(rpm, post_script, "rpm_post.rhai"); 334 | default!(rpm, sources, "."); 335 | } 336 | default!(new_project, update, "update.rhai"); 337 | default!(new_project, pre_script, "pre.rhai"); 338 | default!(new_project, post_script, "post.rhai"); 339 | 340 | if let Some(scripts) = &mut new_project.scripts { 341 | for scr in scripts { 342 | *scr = PathBuf::from(format!("{prefix}/{}", scr.display())); 343 | } 344 | } 345 | 346 | new_config.project.remove(project_name); 347 | new_config.project.insert(new_project_name, new_project); 348 | } 349 | generate_alias(&mut new_config); 350 | new_config 351 | } 352 | 353 | pub fn generate_alias(config: &mut Manifest) { 354 | fn append_vec(vec: &mut Option>, value: String) { 355 | if let Some(vec) = vec { 356 | if vec.contains(&value) { 357 | return; 358 | } 359 | 360 | vec.push(value); 361 | } else { 362 | *vec = Some(vec![value]); 363 | } 364 | } 365 | 366 | for (name, project) in &mut config.project { 367 | #[allow(clippy::assigning_clones)] 368 | if config.config.strip_prefix.is_some() || config.config.strip_suffix.is_some() { 369 | let mut new_name = name.clone(); 370 | if let Some(strip_prefix) = &config.config.strip_prefix { 371 | new_name = new_name.strip_prefix(strip_prefix).unwrap_or(&new_name).to_owned(); 372 | } 373 | if let Some(strip_suffix) = &config.config.strip_suffix { 374 | new_name = new_name.strip_suffix(strip_suffix).unwrap_or(&new_name).to_owned(); 375 | } 376 | 377 | if name != &new_name { 378 | append_vec(&mut project.alias, new_name); 379 | } 380 | } 381 | } 382 | } 383 | 384 | #[instrument] 385 | pub fn load_from_string(config: &str) -> Result { 386 | trace!(config, "Dump config"); 387 | let mut config: Manifest = hcl::eval::from_str(config, &crate::context::hcl_context())?; 388 | 389 | generate_alias(&mut config); 390 | 391 | check_config(config) 392 | } 393 | 394 | /// Lints and checks the config for errors. 395 | /// 396 | /// # Errors 397 | /// - nothing. This function literally does nothing. For now. 398 | pub const fn check_config(config: Manifest) -> Result { 399 | // do nothing for now 400 | Ok(config) 401 | } 402 | 403 | #[allow(clippy::indexing_slicing)] 404 | #[cfg(test)] 405 | mod test_parser { 406 | use super::*; 407 | 408 | #[test] 409 | fn test_parse() { 410 | // set env var 411 | std::env::set_var("RUST_LOG", "trace"); 412 | env_logger::init(); 413 | let config = r#" 414 | hello = "world" 415 | project "anda" { 416 | pre_script { 417 | commands = [ 418 | "echo '${env.RUST_LOG}'", 419 | ] 420 | } 421 | labels { 422 | nightly = 1 423 | } 424 | } 425 | "#; 426 | 427 | let body = hcl::parse(config).unwrap(); 428 | 429 | print!("{body:#?}"); 430 | 431 | let config = load_from_string(config).unwrap(); 432 | 433 | println!("{config:#?}"); 434 | 435 | assert_eq!(config.project["anda"].labels.get("nightly"), Some(&"1".to_owned())); 436 | } 437 | 438 | #[test] 439 | fn test_map() { 440 | let m = [("foo".to_owned(), "bar".to_owned())].into(); 441 | 442 | assert_eq!(parse_labels(std::iter::once("foo=bar")), Some(m)); 443 | 444 | let multieq = [("foo".to_owned(), "bar=baz".to_owned())].into(); 445 | 446 | assert_eq!(parse_labels(std::iter::once("foo=bar=baz")), Some(multieq)); 447 | 448 | let multi = 449 | [("foo".to_owned(), "bar".to_owned()), ("baz".to_owned(), "qux".to_owned())].into(); 450 | 451 | assert_eq!(parse_labels(std::iter::once("foo=bar,baz=qux")), Some(multi)); 452 | } 453 | } 454 | -------------------------------------------------------------------------------- /anda-config/context.rs: -------------------------------------------------------------------------------- 1 | use std::collections::BTreeMap; 2 | 3 | use hcl::eval::{Context, FuncArgs, FuncDef}; 4 | use hcl::Value; 5 | 6 | // once_cell for global context 7 | use once_cell::sync::OnceCell; 8 | use parking_lot::Mutex; 9 | 10 | // todo: let this be mutable 11 | static GLOBAL_CONTEXT: OnceCell> = OnceCell::new(); 12 | 13 | /// Generate Context for HCL evaluation 14 | /// 15 | /// # Panics 16 | /// - cannot lock mutex (poison?) 17 | /// - cannot convert FuncArgs to str 18 | /// - cannot find FuncArgs as key in environment variables 19 | pub fn hcl_context() -> Context<'static> { 20 | let env_func = |args: FuncArgs| { 21 | let env = std::env::vars().collect::>(); 22 | let key = args.first().and_then(|v| v.as_str()).ok_or("Invalid argument")?; 23 | let value = env.get(key).ok_or("Key not found in environment variables")?; 24 | Ok(Value::String(value.to_string())) 25 | }; 26 | let c = GLOBAL_CONTEXT.get_or_init(|| { 27 | dotenv::dotenv().ok(); 28 | let mut ctx = Context::new(); 29 | let env_func = FuncDef::builder().param(hcl::eval::ParamType::String).build(env_func); 30 | ctx.declare_func("env", env_func); 31 | 32 | let env = std::env::vars().collect::>(); 33 | let mut map = hcl::Map::new(); 34 | 35 | map.extend(env.into_iter().map(|(k, v)| (k, Value::String(v)))); 36 | 37 | ctx.declare_var("env", Value::Object(map)); 38 | 39 | Mutex::new(ctx) 40 | }); 41 | c.lock().clone() 42 | } 43 | -------------------------------------------------------------------------------- /anda-config/error.rs: -------------------------------------------------------------------------------- 1 | //! Andaman client error handler 2 | 3 | // derive macro that implements the From trait 4 | 5 | #[derive(Debug)] 6 | pub enum ProjectError { 7 | NoManifest, 8 | InvalidManifest(String), 9 | Multiple(Vec), 10 | HclError(hcl::error::Error), 11 | Other(String), 12 | } 13 | 14 | impl From for ProjectError { 15 | fn from(e: hcl::error::Error) -> Self { 16 | Self::HclError(e) 17 | } 18 | } 19 | 20 | impl std::fmt::Display for ProjectError { 21 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 22 | match self { 23 | Self::NoManifest => write!(f, "No manifest found"), 24 | Self::InvalidManifest(e) => write!(f, "Invalid manifest: {e}"), 25 | Self::Other(msg) => write!(f, "{msg}"), 26 | Self::HclError(e) => write!( 27 | f, 28 | "Error parsing HCL: {e}{}", 29 | e.location().map(|l| format!(" at {}:{}", l.line, l.col)).unwrap_or_default() 30 | ), 31 | Self::Multiple(errors) => { 32 | write!(f, "Multiple errors:")?; 33 | for error in errors { 34 | write!(f, "\n - {error}")?; 35 | } 36 | Ok(()) 37 | } 38 | } 39 | } 40 | } 41 | 42 | impl std::error::Error for ProjectError {} 43 | -------------------------------------------------------------------------------- /anda-config/lib.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::doc_markdown)] 2 | #![allow(clippy::module_name_repetitions)] 3 | pub mod config; 4 | pub mod context; 5 | pub mod error; 6 | pub mod template; 7 | pub use config::*; 8 | -------------------------------------------------------------------------------- /anda-config/template.rs: -------------------------------------------------------------------------------- 1 | use crate::context::hcl_context; 2 | use hcl::eval::Evaluate; 3 | use hcl::template::Template; 4 | use std::str::FromStr; 5 | 6 | /// Parse an HCL template. 7 | /// 8 | /// # Errors 9 | /// - cannot parse template 10 | /// - cannot evaluate template 11 | pub fn parse_template(template: &str) -> Result { 12 | let template = Template::from_str(template).map_err(|e| e.to_string())?; 13 | let ctx = hcl_context(); 14 | let value = template.evaluate(&ctx).map_err(|e| e.to_string())?; 15 | Ok(value) 16 | } 17 | 18 | #[allow(clippy::missing_panics_doc)] 19 | #[cfg(test)] 20 | mod tests { 21 | #[test] 22 | fn test_templ() { 23 | let template = "hello ${env.USER}"; 24 | let result = crate::template::parse_template(template).unwrap(); 25 | println!("{result}"); 26 | // get current username 27 | let username = std::env::var("USER").unwrap(); 28 | assert_eq!(result, format!("hello {username}")); 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /anda.hcl: -------------------------------------------------------------------------------- 1 | 2 | // Build macros are built using AndaX, a Rhai runtime for Andaman. 3 | 4 | config { 5 | strip_prefix = "tests/" 6 | } 7 | 8 | project "test" { 9 | // pre_script = "tests/hello.sh" 10 | rpm { 11 | spec = "tests/umpkg.spec" 12 | // post_script = "tests/hello.sh" 13 | 14 | sources = "tests/" 15 | } 16 | flatpak { 17 | manifest = "tests/org.flatpak.Hello.yml" 18 | } 19 | } 20 | 21 | project "anda" { 22 | rpm { 23 | spec = "rust-anda.spec" 24 | } 25 | } 26 | 27 | project "anda-git" { 28 | rpm { 29 | spec = "rust-anda-git.spec" 30 | } 31 | } -------------------------------------------------------------------------------- /andax/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "andax" 3 | version = "0.4.12" 4 | edition = "2021" 5 | license = "MIT" 6 | description = "Andaman scripting runtime" 7 | repository = "https://github.com/FyraLabs/anda" 8 | readme = "../README.md" 9 | 10 | [lib] 11 | path = "./lib.rs" 12 | 13 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 14 | 15 | [dependencies] 16 | serde_json = { workspace = true } 17 | regex = { workspace = true } 18 | lazy_static = { workspace = true } 19 | rhai = { version = "1.22.0", features = ["serde", "internals"] } 20 | ureq = { version = "3", features = ["json"] } 21 | tracing = { workspace = true } 22 | color-eyre = { workspace = true } 23 | smartstring = "1.0.1" 24 | tempfile = "3.20.0" 25 | anda-config = { workspace = true } 26 | rhai-fs = "0.1.2" 27 | rhai-url = "0.0.5" 28 | hcl-rs = "0.15.0" 29 | directories = "6.0.0" 30 | chrono = { workspace = true } 31 | 32 | [lints] 33 | workspace = true 34 | -------------------------------------------------------------------------------- /andax/error.rs: -------------------------------------------------------------------------------- 1 | use rhai::EvalAltResult; 2 | use smartstring::{LazyCompact, SmartString}; 3 | use std::fmt::Display; 4 | use std::rc::Rc; 5 | use tracing::instrument; 6 | use tracing::trace; 7 | 8 | type SStr = SmartString; 9 | 10 | #[derive(Clone, Debug)] 11 | pub enum AndaxError { 12 | // rhai_fn, fn_src, E 13 | RustReport(SStr, SStr, Rc), 14 | RustError(SStr, SStr, Rc), 15 | Exit(bool), 16 | } 17 | 18 | #[derive(Debug)] 19 | pub enum TbErr { 20 | Report(Rc), 21 | Arb(Rc), 22 | Rhai(EvalAltResult), 23 | } 24 | 25 | impl Display for TbErr { 26 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 27 | match self { 28 | Self::Report(o) => f.write_fmt(format_args!("From: {o:#}")), 29 | Self::Arb(o) => f.write_fmt(format_args!("Caused by: {o}")), 30 | Self::Rhai(o) => f.write_fmt(format_args!("Rhai: {o}")), 31 | } 32 | } 33 | } 34 | 35 | pub trait AndaxRes { 36 | /// Error handling function for Rhai functions written in Rust. 37 | /// 38 | /// # Errors 39 | /// This turns any `Result` type into the Rhai result type. 40 | fn ehdl(self, ctx: &rhai::NativeCallContext) -> Result>; 41 | } 42 | 43 | impl AndaxRes for Result 44 | where 45 | E: std::error::Error + 'static, 46 | { 47 | #[instrument(skip(self, ctx))] 48 | fn ehdl(self, ctx: &rhai::NativeCallContext<'_>) -> Result> 49 | where 50 | Self: Sized, 51 | { 52 | self.map_err(|err| { 53 | trace!(func = ctx.fn_name(), source = ctx.source(), "Oops!"); 54 | Box::new(EvalAltResult::ErrorRuntime( 55 | rhai::Dynamic::from(AndaxError::RustError( 56 | ctx.fn_name().into(), 57 | ctx.source().unwrap_or("").into(), 58 | std::rc::Rc::from(err), 59 | )), 60 | ctx.position(), 61 | )) 62 | }) 63 | } 64 | } 65 | 66 | pub const EARTH: &str = r" 67 | . . * . . . . * . . . . . . * . . . . 68 | * . . * . . . * . . * . . . * . . . 69 | . * . . . . . * . . . .-o--. . * . 70 | . . . . . . . * * . :O o O : . . 71 | ____ * . . . . . . . . : O. Oo; . . 72 | `. ````.---...___ . * . . . * . `-.O-' . * . . 73 | \_ ; \`.-'```--..__. . . * . . . . . 74 | ,'_,-' _,-' ``--._ . * . . . . * . . . 75 | -' ,-' `-._ * . . * . . . 76 | ,-' _,-._ ,`-. . . . . . * . . 77 | '--. _ _.._`-. `-._ | `_ . * . . . . . . 78 | ; ,' ' _ `._`._ `. `,-'' `-. . . . . . . 79 | ,-' \ `;. `. ;` `._ _/\___ `. . * . . * 80 | \ \ , `-' ) `':_ ; \ `. . * . . . * 81 | \ _; ` ,; __; `. . . . . . 82 | '-.; __, ` _,-'-.--''' \-: `. * . . . * . 83 | )`-..---' `---'' \ `. . . . . . . . 84 | .' `. `. ` . * . . . 85 | / `. `. ` * . . 86 | / `. `. ' . . * 87 | / `. `. _'. . . . . 88 | | `._\-' ' . . . 89 | | `.__, \ * . . *. . 90 | | \ \. . . 91 | | \ \ . * jrei *"; 92 | -------------------------------------------------------------------------------- /andax/fns/build.rs: -------------------------------------------------------------------------------- 1 | //! This file contains functions for andax 2 | //! which implements procedures from building RPMs 3 | //! see `anda::rpm_spec.rs` 4 | use rhai::plugin::{ 5 | export_module, mem, Dynamic, FuncRegistration, ImmutableString, Module, NativeCallContext, 6 | PluginFunc, RhaiResult, TypeId, 7 | }; 8 | 9 | // 正にこうです。 :3 10 | macro_rules! rpmargs { 11 | ($a:expr, $spec:expr, $sources:expr) => { 12 | [ 13 | "mock", 14 | $a, 15 | "--spec", 16 | $spec, 17 | "--sources", 18 | $sources.unwrap_or(""), 19 | "--resultdir", 20 | &format!( 21 | "{:?}", 22 | tempfile::Builder::new() 23 | .prefix("anda-srpm") 24 | .tempdir() 25 | .expect("Cannot make dir?") 26 | .path() 27 | ), 28 | "--enable-network", 29 | "--verbose", 30 | ] 31 | .into_iter() 32 | .map(|s| s.to_owned()) 33 | .collect() 34 | }; 35 | } 36 | 37 | #[export_module] 38 | pub mod ar { 39 | pub fn cmd_srpm(spec: &str, sources: Option<&str>) -> Vec { 40 | rpmargs!("--buildsrpm", spec, sources) 41 | } 42 | pub fn cmd_rpm(spec: &str, sources: Option<&str>) -> Vec { 43 | rpmargs!("--rebuild", spec, sources) 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /andax/fns/cfg.rs: -------------------------------------------------------------------------------- 1 | use crate::error::AndaxRes; 2 | use anda_config::load_from_file; 3 | use rhai::{ 4 | plugin::{ 5 | export_module, mem, Dynamic, ImmutableString, Module, NativeCallContext, PluginFunc, 6 | RhaiResult, TypeId, 7 | }, 8 | EvalAltResult, FuncRegistration, 9 | }; 10 | use std::path::PathBuf; 11 | 12 | type Res = Result>; 13 | 14 | #[export_module] 15 | pub mod ar { 16 | #[rhai_fn(return_raw)] 17 | pub fn load_file(ctx: NativeCallContext, path: &str) -> Res { 18 | let m = load_from_file(&PathBuf::from(path)).ehdl(&ctx)?; 19 | ctx.engine().parse_json(serde_json::to_string(&m).ehdl(&ctx)?, true) 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /andax/fns/io.rs: -------------------------------------------------------------------------------- 1 | use crate::error::AndaxRes; 2 | use rhai::{ 3 | plugin::{ 4 | export_module, mem, Dynamic, FnNamespace, ImmutableString, Module, NativeCallContext, 5 | PluginFunc, RhaiResult, TypeId, 6 | }, 7 | EvalAltResult, FuncRegistration, 8 | }; 9 | use std::io::Write; 10 | use std::process::Command; 11 | use tracing::{debug, instrument}; 12 | 13 | macro_rules! _sh_out { 14 | ($ctx:expr, $o:expr) => { 15 | Ok(( 16 | _sh_out!($o)?, 17 | String::from_utf8($o.stdout).ehdl($ctx)?, 18 | String::from_utf8($o.stderr).ehdl($ctx)?, 19 | )) 20 | }; 21 | ($o:expr) => {{ 22 | $o.status.code().ok_or::>("No exit code".into()) 23 | }}; 24 | } 25 | macro_rules! _cmd { 26 | ($cmd:expr) => {{ 27 | let mut x; 28 | if cfg!(target_os = "windows") { 29 | x = Command::new("cmd"); 30 | x.args(["/C", $cmd]); 31 | } else { 32 | x = Command::new("sh"); 33 | x.args(["-c", $cmd]); 34 | } 35 | x 36 | }}; 37 | } 38 | 39 | macro_rules! _stream_cmd { 40 | ($cmd:expr) => {{ 41 | _cmd!($cmd).stdout(Stdio::inherit()).stderr(Stdio::inherit()) 42 | }}; 43 | } 44 | 45 | type T = Result<(i32, String, String), Box>; 46 | 47 | /// for andax, shell(): 48 | /// ``` 49 | /// sh("echo hai"); 50 | /// sh(["echo", "hai"]); 51 | /// sh(["rm", "-rf", "/path/with/some space"]); 52 | /// sh("ls -al", "/current/working/directory"); 53 | /// sh(["grep", "andaman", "file"], "/working/dir"); 54 | /// ``` 55 | /// Returns (rc, stdout, stderr) 56 | /// We will let rhai handle all the nasty things. 57 | #[export_module] 58 | pub mod ar { 59 | use core::str::FromStr; 60 | use std::process::Stdio; 61 | 62 | macro_rules! die { 63 | ($id:literal, $expect:expr, $found:expr) => {{ 64 | let mut e = rhai::Map::new(); 65 | let mut inner = std::collections::BTreeMap::new(); 66 | e.insert("outcome".into(), rhai::Dynamic::from_str("fatal").unwrap()); 67 | inner.insert("kind".into(), rhai::Dynamic::from_str($id).unwrap()); 68 | inner.insert("expect".into(), rhai::Dynamic::from_str($expect).unwrap()); 69 | inner.insert("found".into(), rhai::Dynamic::from_str($found).unwrap()); 70 | e.insert("ctx".into(), rhai::Dynamic::from_map(inner)); 71 | e 72 | }}; 73 | } 74 | 75 | /// get the return code from the return value of `sh()` 76 | #[rhai_fn(global)] 77 | pub fn sh_rc(o: (i32, String, String)) -> i32 { 78 | o.0 79 | } 80 | /// get stdout from the return value of `sh()` 81 | #[rhai_fn(global)] 82 | pub fn sh_stdout(o: (i32, String, String)) -> String { 83 | o.1 84 | } 85 | /// get stderr from the return value of `sh()` 86 | #[rhai_fn(global)] 87 | pub fn sh_stderr(o: (i32, String, String)) -> String { 88 | o.2 89 | } 90 | 91 | fn parse_io_opt(opt: Option<&mut rhai::Dynamic>) -> Result, rhai::Map> { 92 | let Some(s) = opt else { return Ok(Stdio::inherit()) }; 93 | let s = match std::mem::take(s).into_string() { 94 | Ok(s) => s, 95 | Err(e) => return Err(die!("bad_stdio_type", r#""inherit" | "null" | "piped""#, e)), 96 | }; 97 | Ok(match &*s { 98 | "inherit" => Stdio::inherit(), 99 | "null" => Stdio::null(), 100 | "piped" => Stdio::piped(), 101 | _ => return Err(die!("bad_stdio_opt", r#""inherit" | "null" | "piped""#, &s)), 102 | }) 103 | } 104 | 105 | /// Run a command 106 | #[instrument] 107 | #[rhai_fn(global, name = "sh")] 108 | pub fn exec_cmd(command: Dynamic, mut opts: rhai::Map) -> rhai::Map { 109 | let mut cmd: Command; 110 | if command.is_string() { 111 | cmd = Command::new("sh"); 112 | cmd.arg("-c").arg(command.into_string().unwrap()) 113 | } else { 114 | let res = command.into_typed_array(); 115 | let Ok(arr) = res else { 116 | return die!("bad_param_type", "String | Vec", res.unwrap_err()); 117 | }; 118 | let [exec, args @ ..]: &[&str] = &arr[..] else { 119 | return die!("empty_cmd_arr", "cmd.len() >= 1", "cmd.len() == 0"); 120 | }; 121 | cmd = Command::new(exec); 122 | cmd.args(args) 123 | }; 124 | 125 | cmd.stdout(match parse_io_opt(opts.get_mut("stdout")) { 126 | Ok(io) => io, 127 | Err(e) => return e, 128 | }); 129 | cmd.stderr(match parse_io_opt(opts.get_mut("stderr")) { 130 | Ok(io) => io, 131 | Err(e) => return e, 132 | }); 133 | 134 | if let Some(cwd) = opts.get_mut("cwd") { 135 | match std::mem::take(cwd).into_string() { 136 | Ok(cwd) => _ = cmd.current_dir(cwd), 137 | Err(e) => return die!("bad_cwd_type", "String", e), 138 | } 139 | } 140 | 141 | let out = match cmd.output() { 142 | Ok(x) => x, 143 | Err(err) => { 144 | let mut e = rhai::Map::new(); 145 | let mut inner = rhai::Map::new(); 146 | e.insert("outcome".into(), rhai::Dynamic::from_str("failure").unwrap()); 147 | inner.insert("error".into(), rhai::Dynamic::from_str(&err.to_string()).unwrap()); 148 | e.insert("ctx".into(), rhai::Dynamic::from_map(inner)); 149 | return e; 150 | } 151 | }; 152 | 153 | let mut ret = rhai::Map::new(); 154 | let mut inner = rhai::Map::new(); 155 | ret.insert("outcome".into(), rhai::Dynamic::from_str("success").unwrap()); 156 | inner.insert( 157 | "stdout".into(), 158 | rhai::Dynamic::from_str(&String::from_utf8_lossy(&out.stdout)).unwrap(), 159 | ); 160 | inner.insert( 161 | "stderr".into(), 162 | rhai::Dynamic::from_str(&String::from_utf8_lossy(&out.stderr)).unwrap(), 163 | ); 164 | inner.insert( 165 | "rc".into(), 166 | rhai::Dynamic::from_int(i64::from(out.status.code().unwrap_or(0))), 167 | ); 168 | ret.insert("ctx".into(), rhai::Dynamic::from_map(inner)); 169 | ret 170 | } 171 | 172 | /// run a command using `cmd` on Windows and `sh` on other systems 173 | #[instrument(skip(ctx))] 174 | #[rhai_fn(return_raw, name = "sh", global)] 175 | pub fn shell(ctx: NativeCallContext, cmd: &str) -> T { 176 | debug!("Running in shell"); 177 | _sh_out!(&ctx, _cmd!(cmd).output().ehdl(&ctx)?) 178 | } 179 | /// run a command using `cmd` on Windows and `sh` on other systems in working dir 180 | #[instrument(skip(ctx))] 181 | #[rhai_fn(return_raw, name = "sh", global)] 182 | pub fn shell_cwd(ctx: NativeCallContext, cmd: &str, cwd: &str) -> T { 183 | debug!("Running in shell"); 184 | _sh_out!(&ctx, _cmd!(cmd).current_dir(cwd).output().ehdl(&ctx)?) 185 | } 186 | /// run an executable 187 | #[instrument(skip(ctx))] 188 | #[rhai_fn(return_raw, name = "sh", global)] 189 | pub fn sh(ctx: NativeCallContext, cmd: Vec<&str>) -> T { 190 | debug!("Running executable"); 191 | _sh_out!(&ctx, Command::new(cmd[0]).args(&cmd[1..]).output().ehdl(&ctx)?) 192 | } 193 | /// run an executable in working directory 194 | #[instrument(skip(ctx))] 195 | #[rhai_fn(return_raw, name = "sh", global)] 196 | pub fn sh_cwd(ctx: NativeCallContext, cmd: Vec<&str>, cwd: &str) -> T { 197 | debug!("Running executable"); 198 | _sh_out!(&ctx, Command::new(cmd[0]).args(&cmd[1..]).current_dir(cwd).output().ehdl(&ctx)?) 199 | } 200 | /// list files and folders in directory 201 | /// ## Example 202 | /// ```rhai 203 | /// for x in ls("/") { 204 | /// if x == "/bin" { 205 | /// print("I found the `/bin` folder!"); 206 | /// } 207 | /// } 208 | /// ``` 209 | #[rhai_fn(return_raw, global)] 210 | pub fn ls(ctx: NativeCallContext, dir: &str) -> Result> { 211 | (std::fs::read_dir(dir).ehdl(&ctx)?) 212 | .map(|dir| Ok(dir.ehdl(&ctx)?.path().to_string_lossy().to_string().into())) 213 | .collect() 214 | } 215 | #[rhai_fn(return_raw, name = "ls", global)] 216 | pub fn ls_cwd(ctx: NativeCallContext) -> Result> { 217 | ls(ctx, ".") 218 | } 219 | /// write data to file 220 | /// 221 | /// ## Example 222 | /// ```rhai 223 | /// let foo = "bar"; 224 | /// foo.write("bar.txt") 225 | /// ``` 226 | #[rhai_fn(name = "write", return_raw, global)] 227 | pub fn write( 228 | ctx: NativeCallContext, 229 | data: Dynamic, 230 | file: &str, 231 | ) -> Result<(), Box> { 232 | let mut f = std::fs::File::create(file).ehdl(&ctx)?; 233 | let data = { 234 | if data.is_map() { 235 | // turn into JSON 236 | serde_json::to_string(&data).ehdl(&ctx)? 237 | } else { 238 | data.to_string() 239 | } 240 | }; 241 | f.write_all(data.as_bytes()).ehdl(&ctx)?; 242 | Ok(()) 243 | } 244 | } 245 | 246 | #[cfg(test)] 247 | mod test { 248 | use super::*; 249 | #[test] 250 | fn shells() -> Result<(), Box> { 251 | let (en, _) = crate::run::gen_en(); 252 | en.run( 253 | r#" 254 | let a = sh("echo hai > test"); 255 | let b = sh(["echo", "hai"]); 256 | let c = sh(["rm", "-rf", "test"]); 257 | let d = sh("ls -al", "/"); 258 | let pwd = sh("pwd").sh_stdout(); 259 | let e = sh(["grep", "hai", "test"], pwd); 260 | if a.sh_stderr() != "" { 261 | throw "error!?"; 262 | } 263 | if b.sh_stdout() != "hai\n" { 264 | throw "bad echo?"; 265 | } 266 | if c.sh_rc() != 0 { 267 | throw "cannot rm?"; 268 | } 269 | if d.sh_stdout().is_empty() { 270 | throw "why is out empty?"; 271 | } 272 | let f = sh("pwd", #{"stdout": "piped", "cwd": "/"}); 273 | if f.outcome != "succes" { 274 | throw "not success??"; 275 | } 276 | if f.ctx.rc != 0 { 277 | throw `pwd returned ${f.ctx.rc}`; 278 | } 279 | if f.ctx.stdout != "/" { 280 | throw "cwd doesn't work?"; 281 | } 282 | "#, 283 | )?; 284 | Ok(()) 285 | } 286 | } 287 | -------------------------------------------------------------------------------- /andax/fns/kokoro.rs: -------------------------------------------------------------------------------- 1 | use crate::error::{AndaxError as AErr, AndaxRes}; 2 | use regex::Regex; 3 | use rhai::{ 4 | plugin::{ 5 | export_module, mem, FnNamespace, ImmutableString, Module, NativeCallContext, PluginFunc, 6 | RhaiResult, TypeId, 7 | }, 8 | Dynamic, EvalAltResult as RhaiE, FuncRegistration, 9 | }; 10 | type Res = Result>; 11 | 12 | #[export_module] 13 | pub mod ar { 14 | use std::str::FromStr; 15 | 16 | #[rhai_fn(return_raw, global)] 17 | pub fn terminate(ctx: NativeCallContext) -> Res { 18 | Err(Box::new(RhaiE::ErrorRuntime(Dynamic::from(AErr::Exit(false)), ctx.position()))) 19 | } 20 | #[rhai_fn(return_raw, global)] 21 | pub fn defenestrate(ctx: NativeCallContext) -> Res { 22 | Err(Box::new(RhaiE::ErrorRuntime(Dynamic::from(AErr::Exit(true)), ctx.position()))) 23 | } 24 | #[rhai_fn(return_raw, global)] 25 | pub fn json(ctx: NativeCallContext, a: String) -> Res { 26 | ctx.engine().parse_json(a, true) 27 | } 28 | #[rhai_fn(return_raw, global)] 29 | pub fn json_arr(ctx: NativeCallContext, a: String) -> Res { 30 | serde_json::from_str(&a).ehdl(&ctx) 31 | } 32 | #[rhai_fn(return_raw, global)] 33 | pub fn find(ctx: NativeCallContext, r: &str, text: &str, group: i64) -> Res { 34 | let captures = Regex::new(r).ehdl(&ctx)?.captures(text); 35 | let cap = captures.ok_or_else(|| format!("Can't match regex: {r}\nText: {text}"))?; 36 | Ok((cap 37 | .get(group.try_into().unwrap()) 38 | .ok_or_else(|| format!("Can't get group: {r}\nText: {text}"))?) 39 | .as_str() 40 | .into()) 41 | } 42 | #[rhai_fn(return_raw, global)] 43 | pub fn find_all(ctx: NativeCallContext, r: &str, text: &str) -> Res { 44 | Ok((Regex::new(r).ehdl(&ctx)?.captures_iter(text)) 45 | .map(|cap| { 46 | // NOTE: Dynamic::from_str() is always Ok() 47 | cap.iter() 48 | .map(|m| m.map_or(Dynamic::UNIT, |s| Dynamic::from_str(s.as_str()).unwrap())) 49 | .collect() 50 | }) 51 | .collect()) 52 | } 53 | #[rhai_fn(return_raw, global)] 54 | pub fn sub(ctx: NativeCallContext, r: &str, rep: &str, text: &str) -> Res { 55 | Ok(Regex::new(r).ehdl(&ctx)?.replace_all(text, rep).into()) 56 | } 57 | #[rhai_fn(global)] 58 | pub fn date() -> String { 59 | chrono::offset::Utc::now().format("%Y%m%d").to_string() 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /andax/fns/mod.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::indexing_slicing)] 2 | #![allow(clippy::missing_errors_doc)] 3 | pub mod build; 4 | pub mod cfg; 5 | pub mod io; 6 | pub mod kokoro; 7 | pub mod rpm; 8 | pub mod tenshi; 9 | pub mod tsunagu; 10 | -------------------------------------------------------------------------------- /andax/fns/rpm.rs: -------------------------------------------------------------------------------- 1 | use rhai::CustomType; 2 | use std::{ 3 | fs, 4 | path::{Path, PathBuf}, 5 | }; 6 | use tracing::{error, info}; 7 | 8 | lazy_static::lazy_static! { 9 | static ref RE_RELEASE: regex::Regex = regex::Regex::new(r"Release:(\s+)(.+?)\n").unwrap(); 10 | static ref RE_VERSION: regex::Regex = regex::Regex::new(r"Version:(\s+)(\S+)\n").unwrap(); 11 | static ref RE_DEFINE: regex::Regex = regex::Regex::new(r"(?m)%define(\s+)(\S+)(\s+)([^\n]+?)$").unwrap(); 12 | static ref RE_GLOBAL: regex::Regex = regex::Regex::new(r"(?m)%global(\s+)(\S+)(\s+)([^\n]+?)$").unwrap(); 13 | static ref RE_SOURCE: regex::Regex = regex::Regex::new(r"Source(\d+):(\s+)([^\n]+)\n").unwrap(); 14 | } 15 | 16 | /// Update RPM spec files 17 | #[derive(Debug, Clone, PartialEq, Eq)] 18 | pub struct RPMSpec { 19 | /// Original spec file content 20 | original: String, 21 | /// Name of project 22 | pub name: String, 23 | /// AndaX chkupdate script of project 24 | pub chkupdate: PathBuf, 25 | /// Path to spec file 26 | pub spec: PathBuf, 27 | /// RPM spec file content 28 | pub f: String, 29 | } 30 | 31 | impl RPMSpec { 32 | /// Creates a new RPMSpec file representation 33 | /// 34 | /// # Panics 35 | /// - spec file does not exist / cannot read spec to string 36 | pub fn new(name: String, chkupdate: T, spec: U) -> Self 37 | where 38 | T: Into + AsRef, 39 | U: Into + AsRef, 40 | { 41 | let f = fs::read_to_string(&spec).expect("Cannot read spec to string"); 42 | Self { name, chkupdate: chkupdate.into(), original: f.clone(), f, spec: spec.into() } 43 | } 44 | /// Resets the release number to 1 45 | pub fn reset_release(&mut self) { 46 | self.release("1"); 47 | } 48 | /// Sets the release number in the spec file 49 | pub fn release(&mut self, rel: &str) { 50 | let m = RE_RELEASE.captures(self.f.as_str()); 51 | let Some(m) = m else { return error!("No `Release:` preamble for {}", self.name) }; 52 | self.f = RE_RELEASE.replace(&self.f, format!("Release:{}{rel}%?dist\n", &m[1])).to_string(); 53 | } 54 | /// Sets the version in the spec file 55 | pub fn version(&mut self, ver: &str) { 56 | let Some(m) = RE_VERSION.captures(self.f.as_str()) else { 57 | return error!("No `Version:` preamble for {}", self.name); 58 | }; 59 | let ver = ver.strip_prefix('v').unwrap_or(ver).replace('-', "."); 60 | if ver != m[2] { 61 | info!("{}: {} —→ {ver}", self.name, &m[2]); 62 | self.f = RE_VERSION.replace(&self.f, format!("Version:{}{ver}\n", &m[1])).to_string(); 63 | self.reset_release(); 64 | } 65 | } 66 | /// Change the value of a `%define` macro by the name 67 | pub fn define(&mut self, name: &str, val: &str) { 68 | let Some(cap) = RE_DEFINE.captures_iter(self.f.as_str()).find(|cap| &cap[2] == name) else { 69 | return error!("Cannot find `%define` for {}", self.name); 70 | }; 71 | self.f = self.f.replace(&cap[0], &format!("%define{}{name}{}{val}", &cap[1], &cap[3])); 72 | } 73 | /// Change the value of a `%global` macro by the name 74 | pub fn global(&mut self, name: &str, val: &str) { 75 | let Some(cap) = RE_GLOBAL.captures_iter(self.f.as_str()).find(|cap| &cap[2] == name) else { 76 | return error!("Cannot find `%global` for {}", self.name); 77 | }; 78 | self.f = self.f.replace(&cap[0], &format!("%global{}{name}{}{val}", &cap[1], &cap[3])); 79 | } 80 | /// Change the `SourceN:` preamble value by `N` 81 | pub fn source(&mut self, i: i64, p: &str) { 82 | let si = i.to_string(); 83 | let Some(cap) = RE_SOURCE.captures_iter(self.f.as_str()).find(|cap| cap[1] == si) else { 84 | return error!("No `Source{i}:` preamble for {}", self.name); 85 | }; 86 | info!("{}: Source{i}: {p}", self.name); 87 | self.f = self.f.replace(&cap[0], &format!("Source{i}:{}{p}\n", &cap[2])); 88 | } 89 | /// Write the updated spec file content 90 | /// 91 | /// # Errors 92 | /// - happens only if the writing part failed :3 93 | pub fn write(mut self) -> std::io::Result<()> { 94 | if self.changed() { 95 | fs::write(self.spec, self.f)?; 96 | } 97 | Ok(()) 98 | } 99 | /// Get the spec file content 100 | pub fn get(&mut self) -> String { 101 | self.f.clone() 102 | } 103 | /// Override the spec file content manually 104 | pub fn set(&mut self, ff: String) { 105 | self.f = ff; 106 | } 107 | /// Check if file has been changed 108 | #[must_use] 109 | pub fn changed(&mut self) -> bool { 110 | self.f != self.original 111 | } 112 | } 113 | 114 | impl CustomType for RPMSpec { 115 | fn build(mut builder: rhai::TypeBuilder<'_, Self>) { 116 | builder 117 | .with_name("Rpm") 118 | .with_fn("version", Self::version) 119 | .with_fn("source", Self::source) 120 | .with_fn("define", Self::define) 121 | .with_fn("global", Self::global) 122 | .with_fn("release", Self::reset_release) 123 | .with_fn("release", Self::release) 124 | .with_fn("changed", Self::changed) 125 | .with_get_set("f", Self::get, Self::set); 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /andax/fns/tenshi.rs: -------------------------------------------------------------------------------- 1 | /// Tenshi module for `AndaX` 2 | /// Various utility functions for Andaman Scripting 3 | use crate::error::AndaxRes; 4 | 5 | use rhai::{ 6 | plugin::{ 7 | export_module, mem, FnNamespace, ImmutableString, Module, NativeCallContext, PluginFunc, 8 | RhaiResult, TypeId, 9 | }, 10 | Dynamic, EvalAltResult as RhaiE, FuncRegistration, 11 | }; 12 | type Res = Result>; 13 | 14 | #[export_module] 15 | pub mod ar { 16 | use hcl::eval::Evaluate; 17 | 18 | use std::io::Read; 19 | use tracing::{debug, trace}; 20 | #[rhai_fn(return_raw, global)] 21 | pub fn template(ctx: NativeCallContext, tmpl: rhai::Map, input: String) -> Res { 22 | let mut hcl = anda_config::context::hcl_context(); 23 | for (k, v) in tmpl { 24 | let key = k.to_string(); 25 | // turn value into a hcl::Value::Object 26 | let value = hcl::value::to_value(v).ehdl(&ctx)?; 27 | 28 | let span = tracing::debug_span!("hcl.declare_var", ?key, ?value); 29 | span.in_scope(|| { 30 | hcl.declare_var(key, value); 31 | }); 32 | // let value = hcl::value::Value::try_from(_val); 33 | } 34 | println!("{:?}", ctx.source()); 35 | 36 | let template = 37 | ::from_str(&input).ehdl(&ctx)?; 38 | 39 | let res = template.evaluate(&hcl).ehdl(&ctx)?; 40 | 41 | // ok, so we usually build from RPM spec files. 42 | // the issue here is that: rpm macros are defined using %{} 43 | // which coincidentally, is also the syntax for hcl template interpolation. 44 | // 45 | // We will be doing a stopgap solution for now, which is requiring the user to use 46 | // @{} instead of %{} for rpm macros, then replace them after evaluation 47 | // FIXME 48 | let res = res.replace("@{", "%{"); 49 | 50 | trace!(?res, "Template Result"); 51 | // write the result to out 52 | 53 | Ok(res) 54 | } 55 | 56 | /// Function that takes in an object map and a file path 57 | #[rhai_fn(return_raw, global)] 58 | pub fn template_file(ctx: NativeCallContext, map: rhai::Map, path: String) -> Res { 59 | let mut file = std::fs::File::open(&path).ehdl(&ctx)?; 60 | let mut buf = String::new(); 61 | file.read_to_string(&mut buf).ehdl(&ctx)?; 62 | // template is a HCL templated file 63 | debug!("Templating file: {:#?}", path); 64 | debug!(?map, "Loading Template"); 65 | template(ctx, map, buf) 66 | } 67 | 68 | /// turns a map into json 69 | #[rhai_fn(return_raw, global)] 70 | pub fn to_json(ctx: NativeCallContext, map: rhai::Map) -> Res { 71 | let json = serde_json::to_string(&map).ehdl(&ctx)?; 72 | Ok(json) 73 | } 74 | 75 | /// turns a json string into a map 76 | #[rhai_fn(return_raw, global)] 77 | pub fn from_json(ctx: NativeCallContext, json: String) -> Res { 78 | let map = serde_json::from_str(&json).ehdl(&ctx)?; 79 | Ok(map) 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /andax/fns/tsunagu.rs: -------------------------------------------------------------------------------- 1 | use crate::{error::AndaxRes, run::rf}; 2 | use rhai::{ 3 | plugin::{ 4 | export_module, mem, Dynamic, EvalAltResult, FnNamespace, ImmutableString, Module, 5 | NativeCallContext, PluginFunc, RhaiResult, TypeId, 6 | }, 7 | CustomType, FuncRegistration, 8 | }; 9 | use serde_json::Value; 10 | use std::env::VarError; 11 | use tracing::trace; 12 | 13 | type Res = Result>; 14 | 15 | pub const USER_AGENT: &str = "AndaX"; 16 | #[export_module] 17 | pub mod ar { 18 | type E = Box; 19 | 20 | static AGENT: std::sync::LazyLock = std::sync::LazyLock::new(|| { 21 | ureq::Agent::new_with_config(ureq::Agent::config_builder().build()) 22 | }); 23 | 24 | #[rhai_fn(return_raw, global)] 25 | pub fn get_json(ctx: NativeCallContext, url: &str) -> Res { 26 | let resp = AGENT.get(url).header("User-Agent", USER_AGENT).call().ehdl(&ctx)?; 27 | resp.into_body().read_json().ehdl(&ctx) 28 | } 29 | 30 | fn get_json_value(ctx: NativeCallContext, url: &str) -> Res { 31 | let resp = AGENT.get(url).header("User-Agent", USER_AGENT).call().ehdl(&ctx)?; 32 | resp.into_body().read_json().ehdl(&ctx) 33 | } 34 | 35 | #[rhai_fn(return_raw, global)] 36 | pub fn get(ctx: NativeCallContext, url: &str) -> Res { 37 | let resp = AGENT.get(url).header("User-Agent", USER_AGENT).call().ehdl(&ctx)?; 38 | resp.into_body().read_to_string().ehdl(&ctx) 39 | } 40 | 41 | #[rhai_fn(return_raw, global)] 42 | pub fn gh(ctx: NativeCallContext, repo: &str) -> Res { 43 | let req = (AGENT.get(&format!("https://api.github.com/repos/{repo}/releases/latest"))) 44 | .header("Authorization", &format!("Bearer {}", internal_env("GITHUB_TOKEN")?)) 45 | .header("User-Agent", USER_AGENT); 46 | let v: Value = req.call().ehdl(&ctx)?.into_body().read_json().ehdl(&ctx)?; 47 | trace!("Got json from {repo}:\n{v}"); 48 | Ok(v["tag_name"].as_str().unwrap_or("").to_owned()) 49 | } 50 | #[rhai_fn(return_raw, global)] 51 | pub fn gh_tag(ctx: NativeCallContext, repo: &str) -> Res { 52 | let req = (AGENT.get(&format!("https://api.github.com/repos/{repo}/tags"))) 53 | .header("Authorization", &format!("Bearer {}", internal_env("GITHUB_TOKEN")?)) 54 | .header("User-Agent", USER_AGENT); 55 | let v: Value = req.call().ehdl(&ctx)?.into_body().read_json().ehdl(&ctx)?; 56 | trace!("Got json from {repo}:\n{v}"); 57 | let v = (v.as_array()) 58 | .ok_or_else(|| E::from("gh_tag received not array")) 59 | .map(|a| a.first().ok_or_else(|| E::from("gh_tag no tags")))??; 60 | Ok(v["name"].as_str().unwrap_or("").to_owned()) 61 | } 62 | #[rhai_fn(return_raw, global)] 63 | pub fn gh_commit(ctx: NativeCallContext, repo: &str) -> Res { 64 | let req = (AGENT.get(&format!("https://api.github.com/repos/{repo}/commits/HEAD"))) 65 | .header("Authorization", &format!("Bearer {}", internal_env("GITHUB_TOKEN")?)) 66 | .header("User-Agent", USER_AGENT); 67 | let v: Value = req.call().ehdl(&ctx)?.into_body().read_json().ehdl(&ctx)?; 68 | trace!("Got json from {repo}:\n{v}"); 69 | Ok(v["sha"].as_str().unwrap_or("").to_owned()) 70 | } 71 | #[rhai_fn(return_raw, global)] 72 | pub fn gh_rawfile(ctx: NativeCallContext, repo: &str, branch: &str, file: &str) -> Res { 73 | get(ctx, &format!("https://raw.githubusercontent.com/{repo}/{branch}/{file}")) 74 | } 75 | 76 | #[rhai_fn(return_raw, name = "gitlab", global)] 77 | pub fn gitlab_domain(ctx: NativeCallContext, domain: &str, id: &str) -> Res { 78 | let v = get_json_value(ctx, &format!("https://{domain}/api/v4/projects/{id}/releases/"))?; 79 | trace!("Got json from {id}:\n{v}"); 80 | Ok(v[0]["tag_name"].as_str().unwrap_or("").to_owned()) 81 | } 82 | #[rhai_fn(return_raw, global)] 83 | pub fn gitlab(ctx: NativeCallContext, id: &str) -> Res { 84 | gitlab_domain(ctx, "gitlab.com", id) 85 | } 86 | #[rhai_fn(return_raw, name = "gitlab_tag", global)] 87 | pub fn gitlab_tag_domain(ctx: NativeCallContext, domain: &str, id: &str) -> Res { 88 | let v = 89 | get_json_value(ctx, &format!("https://{domain}/api/v4/projects/{id}/repository/tags"))?; 90 | trace!("Got json from {id}:\n{v}"); 91 | Ok(v[0]["name"].as_str().unwrap_or("").to_owned()) 92 | } 93 | #[rhai_fn(return_raw, global)] 94 | pub fn gitlab_tag(ctx: NativeCallContext, id: &str) -> Res { 95 | gitlab_tag_domain(ctx, "gitlab.com", id) 96 | } 97 | #[rhai_fn(return_raw, name = "gitlab_commit", global)] 98 | pub fn gitlab_commit_domain( 99 | ctx: NativeCallContext, 100 | domain: &str, 101 | id: &str, 102 | branch: &str, 103 | ) -> Res { 104 | let v = get_json_value( 105 | ctx, 106 | &format!("https://{domain}/api/v4/projects/{id}/repository/branches/{branch}"), 107 | )?; 108 | trace!("Got json from {id}:\n{v}"); 109 | Ok(v["commit"]["id"].as_str().unwrap_or("").to_owned()) 110 | } 111 | #[rhai_fn(return_raw, global)] 112 | pub fn gitlab_commit(ctx: NativeCallContext, id: &str, branch: &str) -> Res { 113 | gitlab_commit_domain(ctx, "gitlab.com", id, branch) 114 | } 115 | 116 | #[rhai_fn(return_raw, global)] 117 | pub fn pypi(ctx: NativeCallContext, name: &str) -> Res { 118 | let obj = get_json_value(ctx, &format!("https://pypi.org/pypi/{name}/json"))?; 119 | let obj = obj.get("info").ok_or_else(|| E::from("No json[`info`]?"))?; 120 | let obj = obj.get("version").ok_or_else(|| E::from("No json[`info`][`version`]?"))?; 121 | obj.as_str().map(std::string::ToString::to_string).ok_or_else(|| "json not string?".into()) 122 | } 123 | 124 | #[rhai_fn(return_raw, global)] 125 | pub fn crates(ctx: NativeCallContext, name: &str) -> Res { 126 | let obj = get_json_value(ctx, &format!("https://crates.io/api/v1/crates/{name}"))?; 127 | let obj = obj.get("crate").ok_or_else(|| E::from("No json[`crate`]?"))?; 128 | let obj = obj.get("max_stable_version"); 129 | let obj = obj.ok_or_else(|| E::from("No json[`crate`][`max_stable_version`]?"))?; 130 | obj.as_str().map(std::string::ToString::to_string).ok_or_else(|| "json not string?".into()) 131 | } 132 | 133 | #[rhai_fn(return_raw, global)] 134 | pub fn crates_max(ctx: NativeCallContext, name: &str) -> Res { 135 | let obj = get_json_value(ctx, &format!("https://crates.io/api/v1/crates/{name}"))?; 136 | let obj = obj.get("crate").ok_or_else(|| E::from("No json[`crate`]?"))?; 137 | let obj = obj.get("max_version"); 138 | let obj = obj.ok_or_else(|| E::from("No json[`crate`][`max_version`]?"))?; 139 | obj.as_str().map(std::string::ToString::to_string).ok_or_else(|| "json not string?".into()) 140 | } 141 | 142 | #[rhai_fn(return_raw, global)] 143 | pub fn crates_newest(ctx: NativeCallContext, name: &str) -> Res { 144 | let obj = get_json_value(ctx, &format!("https://crates.io/api/v1/crates/{name}"))?; 145 | let obj = obj.get("crate").ok_or_else(|| E::from("No json[`crate`]?"))?; 146 | let obj = obj.get("newest_version"); 147 | let obj = obj.ok_or_else(|| E::from("No json[`crate`][`newest_version`]?"))?; 148 | obj.as_str().map(std::string::ToString::to_string).ok_or_else(|| "json not string?".into()) 149 | } 150 | #[rhai_fn(return_raw, global)] 151 | pub fn npm(ctx: NativeCallContext, name: &str) -> Res { 152 | let obj = get_json_value(ctx, &format!("https://registry.npmjs.org/{name}/latest"))?; 153 | let obj = obj.get("version").ok_or_else(|| E::from("No json[`version`]?"))?; 154 | obj.as_str().map(std::string::ToString::to_string).ok_or_else(|| "json not string?".into()) 155 | } 156 | 157 | #[rhai_fn(skip)] 158 | pub fn internal_env(key: &str) -> Res { 159 | trace!("env(`{key}`) = {:?}", std::env::var(key)); 160 | match std::env::var(key) { 161 | Ok(s) => Ok(s), 162 | Err(VarError::NotPresent) => Err(format!("env(`{key}`) not present").into()), 163 | Err(VarError::NotUnicode(o)) => Err(format!("env(`{key}`): invalid UTF: {o:?}").into()), 164 | } 165 | } 166 | 167 | #[rhai_fn(global)] 168 | pub fn env(key: &str) -> String { 169 | trace!("env(`{key}`) = {:?}", std::env::var_os(key)); 170 | std::env::var_os(key).map(|s| s.to_string_lossy().to_string()).unwrap_or_default() 171 | } 172 | } 173 | 174 | #[derive(Clone)] 175 | pub struct Req { 176 | pub url: String, 177 | pub headers: Vec<(String, String)>, 178 | pub redirects: i64, 179 | } 180 | 181 | impl CustomType for Req { 182 | fn build(mut builder: rhai::TypeBuilder<'_, Self>) { 183 | builder 184 | .with_name("Req") 185 | .with_fn("new_req", Self::new) 186 | .with_fn("get", |ctx: NativeCallContext, x: Self| rf(&ctx, x.get())) 187 | .with_fn("redirects", Self::redirects) 188 | .with_fn("head", Self::head); 189 | } 190 | } 191 | 192 | impl Req { 193 | pub const fn new(url: String) -> Self { 194 | Self { url, headers: vec![], redirects: 0 } 195 | } 196 | pub fn get(self) -> color_eyre::Result { 197 | let cfg = ureq::Agent::config_builder().max_redirects(self.redirects.try_into()?).build(); 198 | let r = ureq::Agent::new_with_config(cfg).get(&self.url); 199 | let mut r = r.header("User-Agent", USER_AGENT); 200 | for (k, v) in self.headers { 201 | r = r.header(k.as_str(), v.as_str()); 202 | } 203 | Ok(r.call()?.into_body().read_to_string()?) 204 | } 205 | pub fn head(&mut self, key: String, val: String) { 206 | self.headers.push((key, val)); 207 | } 208 | pub fn redirects(&mut self, i: i64) { 209 | self.redirects = i; 210 | } 211 | } 212 | -------------------------------------------------------------------------------- /andax/hints/gh_token_not_present.txt: -------------------------------------------------------------------------------- 1 | gh() requires the environment variable `GITHUB_TOKEN` to be set as a Github token so as to avoid rate-limits: 2 | https://docs.github.com/en/rest/using-the-rest-api/rate-limits-for-the-rest-api 3 | To create a Github token, see: 4 | https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token 5 | -------------------------------------------------------------------------------- /andax/lib.rs: -------------------------------------------------------------------------------- 1 | //! AndaX, an embedded scripting system powered by Rhai. 2 | //! 3 | //! To start running a script, use `run()`. 4 | #![allow(clippy::doc_markdown)] 5 | #![allow(clippy::module_name_repetitions)] 6 | // Since Rhai relies on implicit lifetimes a lot, we are not going to deny rust_2018_idioms. 7 | mod error; 8 | mod fns; 9 | mod run; 10 | 11 | pub use fns::rpm::RPMSpec; 12 | pub use rhai::{self, Map}; 13 | pub use run::{errhdl, run}; 14 | 15 | /// The usual Error type returned by the Rhai engine. 16 | /// Alias for `Box`. 17 | pub type RhaiErr = Box; 18 | -------------------------------------------------------------------------------- /andax/run.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | error::{ 3 | AndaxError as AErr, 4 | TbErr::{self, Arb, Report, Rhai}, 5 | }, 6 | fns as f, 7 | }; 8 | use directories::BaseDirs; 9 | use lazy_static::lazy_static; 10 | use regex::Regex; 11 | use rhai::{ 12 | module_resolvers::ModuleResolversCollection, 13 | packages::Package, 14 | plugin::{exported_module, Dynamic, EvalAltResult, Position}, 15 | Engine, EvalAltResult as RhaiE, NativeCallContext as Ctx, Scope, 16 | }; 17 | use std::fmt::Write; 18 | use std::{io::BufRead, path::Path}; 19 | use tracing::{debug, error, instrument, trace, warn}; 20 | 21 | /// # Errors 22 | /// Turns a color_eyre Result into the Rhai Result 23 | pub fn rf(ctx: &Ctx, res: color_eyre::Result) -> Result> 24 | where 25 | T: rhai::Variant + Clone, 26 | { 27 | res.map_err(|err| { 28 | Box::new(RhaiE::ErrorRuntime( 29 | Dynamic::from(AErr::RustReport( 30 | ctx.fn_name().into(), 31 | ctx.source().unwrap_or("").into(), 32 | std::rc::Rc::from(err), 33 | )), 34 | ctx.position(), 35 | )) 36 | }) 37 | } 38 | 39 | fn module_resolver() -> ModuleResolversCollection { 40 | let mut resolv = ModuleResolversCollection::default(); 41 | 42 | let mut base_modules = rhai::module_resolvers::StaticModuleResolver::new(); 43 | 44 | // todo: rewrite all these stuff to make use of the new resolver 45 | 46 | base_modules.insert("io", exported_module!(f::io::ar)); 47 | base_modules.insert("tsunagu", exported_module!(f::tsunagu::ar)); 48 | base_modules.insert("kokoro", exported_module!(f::kokoro::ar)); 49 | base_modules.insert("tenshi", exported_module!(f::tenshi::ar)); 50 | base_modules.insert("anda::rpmbuild", exported_module!(f::build::ar)); 51 | base_modules.insert("anda::cfg", exported_module!(f::cfg::ar)); 52 | 53 | resolv.push(base_modules); 54 | 55 | let sys_modules = vec![ 56 | "/usr/lib/anda", 57 | "/usr/local/lib/anda", 58 | // "/lib/anda", 59 | "/usr/lib64/anda", 60 | "/usr/local/lib64/anda", 61 | // "/lib64/anda", 62 | ]; 63 | 64 | for path in sys_modules { 65 | let mut sys_resolv = rhai::module_resolvers::FileModuleResolver::new_with_path(path); 66 | sys_resolv.enable_cache(true); 67 | resolv.push(sys_resolv); 68 | } 69 | 70 | if let Some(base_dirs) = BaseDirs::new() { 71 | let user_libs = base_dirs.home_dir().join(".local/lib/anda"); 72 | if user_libs.is_dir() { 73 | let mut local_resolv = 74 | rhai::module_resolvers::FileModuleResolver::new_with_path(user_libs); 75 | local_resolv.enable_cache(true); 76 | resolv.push(local_resolv); 77 | } 78 | } 79 | 80 | let std_resolv = rhai::module_resolvers::FileModuleResolver::new(); 81 | resolv.push(std_resolv); 82 | 83 | resolv 84 | } 85 | pub fn gen_en() -> (Engine, Scope<'static>) { 86 | let mut sc = Scope::new(); 87 | sc.push("USER_AGENT", f::tsunagu::USER_AGENT); 88 | sc.push("IS_WIN32", cfg!(windows)); 89 | sc.push("ANDAX_VER", env!("CARGO_PKG_VERSION")); 90 | let mut en = Engine::new(); 91 | 92 | let resolv = module_resolver(); 93 | en.set_module_resolver(resolv) 94 | .register_global_module(exported_module!(f::io::ar).into()) 95 | .register_global_module(exported_module!(f::tsunagu::ar).into()) 96 | .register_global_module(exported_module!(f::kokoro::ar).into()) 97 | .register_global_module(exported_module!(f::tenshi::ar).into()) 98 | .register_static_module("anda::rpmbuild", exported_module!(f::build::ar).into()) 99 | .register_static_module("anda::cfg", exported_module!(f::cfg::ar).into()) 100 | .build_type::() 101 | .build_type::(); 102 | rhai_fs::FilesystemPackage::new().register_into_engine(&mut en); 103 | rhai_url::UrlPackage::new().register_into_engine(&mut en); 104 | trace!(?en, "Engine created"); 105 | (en, sc) 106 | } 107 | 108 | #[inline] 109 | fn _gpos(p: Position) -> Option<(usize, usize)> { 110 | p.line().map(|l| (l, p.position().unwrap_or(0))) 111 | } 112 | lazy_static! { 113 | static ref WORD_REGEX: Regex = Regex::new("[A-Za-z_][A-Za-z0-9_]*").unwrap(); 114 | } 115 | 116 | // proj: project name, scr: script path, nntz (nanitozo): just give me the error 117 | // pos: error position, rhai_fn: function that caused the issue, fn_src: idk… 118 | #[allow(clippy::arithmetic_side_effects)] 119 | #[instrument] 120 | pub fn traceback(proj: &str, scr: &Path, nntz: TbErr, pos: Position, rhai_fn: &str, fn_src: &str) { 121 | let Some((line, col)) = _gpos(pos) else { 122 | return error!("{proj}: {scr:?} (no position data)\n{nntz}"); 123 | }; 124 | let f = std::fs::File::open(scr); 125 | let scr = scr.display(); 126 | macro_rules! die { 127 | ($var:expr, $msg:expr) => {{ 128 | if let Err(e) = $var { 129 | error!($msg, e); 130 | return error!("{proj}: {scr} (no position data)\n{nntz}"); 131 | } 132 | $var.unwrap() 133 | }}; 134 | } 135 | let f = die!(f, "{proj}: Cannot open `{scr}`: {}"); 136 | let Some(sl) = std::io::BufReader::new(f).lines().nth(line - 1) else { 137 | error!("{proj}: Non-existence exception at {scr}:{line}:{col}"); 138 | return error!("{proj}: {scr} (no position data)\n{nntz}"); 139 | }; 140 | // replace tabs to avoid wrong position when print 141 | let sl = die!(sl, "{proj}: Cannot read line: {}").replace('\t', " "); 142 | let m = WORD_REGEX.find_at(sl.as_str(), col - 1).map_or(1, |x| { 143 | let r = x.range(); 144 | if r.start == col - 1 { 145 | r.len() 146 | } else { 147 | 1 148 | } 149 | }); // number of underline chars 150 | let ln = line.to_string().len(); // length of the string of the line number 151 | let lns = " ".repeat(ln); // spaces for padding the left hand side line number place 152 | let l = "─".repeat(ln); // padding for the top of line number display 153 | let r = "─".repeat(sl.len() + 2); // right hand side padding 154 | let mut code = format!( 155 | "─{l}─┬{r}\n {lns} │ {scr}:{line}:{col}\n─{l}─┼{r}\n {line} │ {sl}\n {lns} │ {}{}", 156 | " ".repeat(col - 1), // padding at left of underline 157 | "🭶".repeat(m) // underline the word 158 | ); 159 | if !rhai_fn.is_empty() { 160 | _ = write!(code, "\n {lns} └─═ When invoking: {rhai_fn}()"); 161 | } 162 | if !fn_src.is_empty() { 163 | _ = write!(code, "\n {lns} └─═ Function source: {fn_src}"); 164 | } 165 | _ = write!(code, "\n {lns} └─═ {nntz}"); 166 | code += &hint(&sl, &lns, &nntz, rhai_fn).unwrap_or_default(); 167 | // slow but works! 168 | let c = code.matches('└').count(); 169 | if c > 0 { 170 | code = code.replacen('└', "├", c - 1); 171 | } 172 | return error!("Script Exception —— {proj}\n{code}"); 173 | } 174 | 175 | /// Handles an exception thrown while executing an AndaX script. 176 | pub fn errhdl(name: &str, scr: &Path, err: EvalAltResult) { 177 | trace!("{name}: Generating traceback"); 178 | if let EvalAltResult::ErrorRuntime(ref run_err, pos) = err { 179 | match run_err.clone().try_cast::() { 180 | Some(AErr::RustReport(rhai_fn, fn_src, others)) => { 181 | return traceback( 182 | name, 183 | scr, 184 | Report(others), 185 | pos, 186 | rhai_fn.as_str(), 187 | fn_src.as_str(), 188 | ); 189 | } 190 | Some(AErr::RustError(rhai_fn, fn_src, others)) => { 191 | return traceback(name, scr, Arb(others), pos, rhai_fn.as_str(), fn_src.as_str()); 192 | } 193 | Some(AErr::Exit(b)) => { 194 | if b { 195 | warn!("世界を壊している。\n{}", crate::error::EARTH); 196 | error!("生存係為咗喵?打程式幾好呀。仲喵要咁憤世嫉俗喎。還掂おこちゃま戦争係政治家嘅事……"); 197 | trace!("あなたは世界の終わりにずんだを食べるのだ"); 198 | } 199 | return debug!("Exit from rhai at: {pos}"); 200 | } 201 | None => {} 202 | } 203 | } 204 | trace!("Rhai moment: {err:#?}"); 205 | let pos = err.position(); 206 | traceback(name, scr, Rhai(err), pos, "", ""); 207 | } 208 | 209 | /// Executes an AndaX script. 210 | pub fn run< 211 | 'a, 212 | F: FnOnce(&mut Scope<'a>), 213 | K: Into, 214 | V: Into, 215 | L: Iterator, 216 | >( 217 | name: &'a str, 218 | scr: &'a Path, 219 | labels: L, 220 | f: F, 221 | ) -> Option> { 222 | let (en, mut sc) = gen_en(); 223 | f(&mut sc); 224 | let lbls: rhai::Map = labels.map(|(k, v)| (k.into(), v.into())).collect(); 225 | sc.push("labels", lbls); 226 | sc.push("__script_path", format!("{}", scr.display())); 227 | exec(name, scr, sc, en) 228 | } 229 | 230 | #[instrument(skip(sc, en))] 231 | fn exec<'a>(name: &'a str, scr: &'a Path, mut sc: Scope<'a>, en: Engine) -> Option> { 232 | debug!("Running {name}"); 233 | match en.run_file_with_scope(&mut sc, scr.to_path_buf()) { 234 | Ok(()) => Some(sc), 235 | Err(err) => { 236 | errhdl(name, scr, *err); 237 | None 238 | } 239 | } 240 | } 241 | 242 | macro_rules! gen_h { 243 | // nyeshu 244 | ($lns:ident) => { 245 | macro_rules! h { 246 | ($s:expr) => {{ 247 | #[allow(clippy::arithmetic_side_effects)] 248 | let left = " ".repeat(7 + $lns.len()); 249 | let mut s = String::new(); 250 | let mut first = true; 251 | for l in $s.lines() { 252 | if first { 253 | s = format!("\n {} └─═ Hint: {l}", $lns); 254 | first = false; 255 | continue; 256 | } 257 | _ = write!(s, "\n{left}...: {l}"); 258 | } 259 | return Some(s); 260 | }}; 261 | } 262 | }; 263 | } 264 | 265 | #[instrument(skip(sl, lns, nanitozo, rhai_fn))] 266 | fn hint(sl: &str, lns: &str, nanitozo: &TbErr, rhai_fn: &str) -> Option { 267 | trace!("Matching hints"); 268 | gen_h!(lns); 269 | match nanitozo { 270 | Arb(err) => { 271 | if let Some(err) = (**err).downcast_ref::() { 272 | return hint_ear(sl, lns, err, rhai_fn); 273 | } 274 | let s = format!("{err}"); 275 | if rhai_fn == "gh" 276 | && s.starts_with("https://api.github.com/repos/") 277 | && s.ends_with("/releases/latest: status code 404") 278 | { 279 | h!("Check if the repo is valid. Only releases are supported; use gh_tag() for tags."); 280 | } 281 | if rhai_fn.starts_with("gh") && s.ends_with(": status code 403") { 282 | h!("Maybe you have reached the ratelimit: https://docs.github.com/en/rest/using-the-rest-api/rate-limits-for-the-rest-api"); 283 | } 284 | None 285 | } 286 | Report(report) => { 287 | if let Some(err) = report.source() { 288 | if let Some(err) = err.downcast_ref::() { 289 | return hint_ear(sl, lns, err, rhai_fn); 290 | } 291 | } 292 | None 293 | } 294 | Rhai(err) => hint_ear(sl, lns, err, rhai_fn), 295 | } 296 | } 297 | /// # Panics 298 | /// This function will never panic. 299 | fn hint_ear(sl: &str, lns: &str, ear: &EvalAltResult, rhai_fn: &str) -> Option { 300 | use rhai::ParseErrorType::MissingToken; 301 | use EvalAltResult::{ErrorMismatchOutputType, ErrorParsing, ErrorRuntime}; 302 | trace!(?rhai_fn, "Hinting for EvalAltResult"); 303 | gen_h!(lns); 304 | match ear { 305 | ErrorRuntime(d, _) => { 306 | if let Some(s) = d.read_lock::() { 307 | if s.as_str() == "env(`GITHUB_TOKEN`) not present" { 308 | h!(include_str!("hints/gh_token_not_present.txt")) 309 | } 310 | } 311 | } 312 | ErrorMismatchOutputType(req, actual, _) => { 313 | if sl.contains("json(") && req == "map" && actual == "array" { 314 | h!("If the json root is an array `[]`, use json_arr() instead."); 315 | } 316 | } 317 | ErrorParsing(MissingToken(token, _), _) if token == ";" => { 318 | h!("You most likely forgot to add a semicolon to the end of the last line."); 319 | } 320 | _ => {} 321 | } 322 | trace!("No hints"); 323 | None 324 | } 325 | -------------------------------------------------------------------------------- /assets/anda-compressed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FyraLabs/anda/7ec8571beccb6b2761672133a96e03b041e2418f/assets/anda-compressed.png -------------------------------------------------------------------------------- /assets/anda-medium.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FyraLabs/anda/7ec8571beccb6b2761672133a96e03b041e2418f/assets/anda-medium.png -------------------------------------------------------------------------------- /assets/anda.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FyraLabs/anda/7ec8571beccb6b2761672133a96e03b041e2418f/assets/anda.ico -------------------------------------------------------------------------------- /assets/anda.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FyraLabs/anda/7ec8571beccb6b2761672133a96e03b041e2418f/assets/anda.png -------------------------------------------------------------------------------- /assets/anda.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /bump.rhai: -------------------------------------------------------------------------------- 1 | #!anda run 2 | 3 | if labels.newver == () { 4 | print("please supply --labels newver=..."); 5 | terminate(); 6 | } 7 | 8 | let oldver = labels.get("oldver"); 9 | if oldver == () { 10 | oldver = find(`(?m)^version = "([\d.]+)"$`, open_file("anda-config/Cargo.toml", "r").read_string(), 1); 11 | print(`Assuming old version: ${oldver}`); 12 | } 13 | oldver.replace(".", "\\."); 14 | labels.oldver = oldver; 15 | 16 | 17 | fn edit_cargo_toml(path, labels) { 18 | print(`Editing file: ${path}`); 19 | let file = open_file(path); 20 | let s = sub(`version = "${labels.oldver}"`, `version = "${labels.newver}"`, file.read_string()); 21 | file.seek(0); 22 | file.write(s); 23 | } 24 | 25 | edit_cargo_toml("anda-config/Cargo.toml", labels); 26 | edit_cargo_toml("andax/Cargo.toml", labels); 27 | edit_cargo_toml("Cargo.toml", labels); 28 | 29 | 30 | print(`Editing file: rust-anda-git.spec`); 31 | let spec1 = open_file("rust-anda-git.spec"); 32 | let s1 = sub(`(?m)^%global _version ${labels.oldver}$`, `%global _version ${labels.newver}`, spec1.read_string()); 33 | spec1.seek(0); 34 | spec1.write(s1); 35 | 36 | 37 | print(`Editing file: rust-anda.spec`); 38 | let spec2 = open_file("rust-anda.spec"); 39 | let s2 = sub(`(?m)^(Version:\s+) ${labels.oldver}$`, `$1 ${labels.newver}`, spec2.read_string()); 40 | spec2.seek(0); 41 | spec2.write(s2); 42 | -------------------------------------------------------------------------------- /clippy.toml: -------------------------------------------------------------------------------- 1 | 2 | 3 | disallowed-types = ["std::sync::Mutex"] 4 | disallowed-names = ["binding", ".."] 5 | disallowed-macros = ["cmd_lib::run_cmd", "cmd_lib::run_fun"] 6 | # check-private-items = true 7 | enforce-iter-loop-reborrow = true 8 | excessive-nesting-threshold = 5 9 | -------------------------------------------------------------------------------- /rust-anda-git.spec: -------------------------------------------------------------------------------- 1 | # Generated by rust2rpm 22 2 | %bcond_without check 3 | %define debug_package %{nil} 4 | 5 | %global crate anda 6 | 7 | %global _version 0.4.12 8 | 9 | Name: rust-anda 10 | Version: %{_version}.%{autogitversion} 11 | Release: 1%{?dist} 12 | Summary: Andaman Build toolchain 13 | 14 | License: MIT 15 | URL: https://crates.io/crates/anda 16 | 17 | %global version %{_version} 18 | Source: https://github.com/FyraLabs/anda/archive/%{autogitcommit}.tar.gz 19 | 20 | ExclusiveArch: %{rust_arches} 21 | 22 | BuildRequires: rust-packaging >= 21 23 | BuildRequires: anda-srpm-macros 24 | BuildRequires: openssl-devel 25 | BuildRequires: git-core 26 | BuildRequires: mold 27 | 28 | Requires: mock 29 | Requires: rpm-build 30 | Requires: createrepo_c 31 | Requires: git-core 32 | %global _description %{expand: 33 | Andaman Build toolchain.} 34 | 35 | %description %{_description} 36 | 37 | %package -n %{crate} 38 | Summary: %{summary} 39 | 40 | %description -n %{crate} %{_description} 41 | 42 | %files -n %{crate} 43 | %{_bindir}/anda 44 | %{_mandir}/man1/anda*.1* 45 | %{_sysconfdir}/bash_completion.d/anda.bash 46 | %{_datadir}/zsh/site-functions/_anda 47 | %{_datadir}/fish/completions/anda.fish 48 | %prep 49 | %autosetup -n %{crate}-%{autogitcommit} -p1 50 | %cargo_prep_online 51 | 52 | %build 53 | %cargo_build 54 | cargo run --release -p xtask -- manpage 55 | cargo run --release -p xtask -- completion 56 | 57 | 58 | %install 59 | %cargo_install 60 | 61 | mkdir -p %{buildroot}%{_mandir}/man1/ 62 | 63 | # Install shell completions 64 | 65 | COMPDIR="target/assets/completion" 66 | 67 | mkdir -p %{buildroot}%{_sysconfdir}/bash_completion.d/ 68 | cp -v $COMPDIR/bash/anda.bash %{buildroot}%{_sysconfdir}/bash_completion.d/anda.bash 69 | mkdir -p %{buildroot}%{_datadir}/zsh/site-functions/ 70 | cp -v $COMPDIR/zsh/_anda %{buildroot}%{_datadir}/zsh/site-functions/_anda 71 | mkdir -p %{buildroot}%{_datadir}/fish/completions/ 72 | cp -v $COMPDIR/fish/anda.fish %{buildroot}%{_datadir}/fish/completions/anda.fish 73 | 74 | # install man pages 75 | cp -v target/assets/man_pages/* %{buildroot}%{_mandir}/man1/ 76 | 77 | rm -rf %{buildroot}%{cargo_registry} 78 | 79 | %changelog 80 | %autochangelog 81 | -------------------------------------------------------------------------------- /rust-anda.spec: -------------------------------------------------------------------------------- 1 | # Generated by rust2rpm 22 2 | %bcond_without check 3 | %define debug_package %{nil} 4 | 5 | %global crate anda 6 | 7 | Name: rust-anda 8 | Version: 0.4.12 9 | Release: 1%{?dist} 10 | Summary: Andaman Build toolchain 11 | 12 | License: MIT 13 | URL: https://crates.io/crates/anda 14 | Source: https://github.com/FyraLabs/anda/archive/refs/tags/%{version}.tar.gz 15 | 16 | ExclusiveArch: %{rust_arches} 17 | 18 | BuildRequires: rust-packaging >= 21 19 | BuildRequires: anda-srpm-macros 20 | BuildRequires: openssl-devel 21 | BuildRequires: git-core 22 | BuildRequires: mold 23 | 24 | Requires: mock 25 | Requires: rpm-build 26 | Requires: createrepo_c 27 | Requires: git-core 28 | %global _description %{expand: 29 | Andaman Build toolchain.} 30 | 31 | %description %{_description} 32 | 33 | %package -n %{crate} 34 | Summary: %{summary} 35 | 36 | %description -n %{crate} %{_description} 37 | 38 | %files -n %{crate} 39 | %{_bindir}/anda 40 | %{_mandir}/man1/anda*.1* 41 | %{_sysconfdir}/bash_completion.d/anda.bash 42 | %{_datadir}/zsh/site-functions/_anda 43 | %{_datadir}/fish/completions/anda.fish 44 | %prep 45 | %autosetup -n %{crate}-%{version_no_tilde} -p1 46 | %cargo_prep_online 47 | 48 | %build 49 | %cargo_build 50 | cargo run --release -p xtask -- manpage 51 | cargo run --release -p xtask -- completion 52 | 53 | %install 54 | %cargo_install 55 | 56 | mkdir -p %{buildroot}%{_mandir}/man1/ 57 | 58 | # Install shell completions 59 | 60 | COMPDIR="target/assets/completion" 61 | 62 | mkdir -p %{buildroot}%{_sysconfdir}/bash_completion.d/ 63 | cp -v $COMPDIR/bash/anda.bash %{buildroot}%{_sysconfdir}/bash_completion.d/anda.bash 64 | mkdir -p %{buildroot}%{_datadir}/zsh/site-functions/ 65 | cp -v $COMPDIR/zsh/_anda %{buildroot}%{_datadir}/zsh/site-functions/_anda 66 | mkdir -p %{buildroot}%{_datadir}/fish/completions/ 67 | cp -v $COMPDIR/fish/anda.fish %{buildroot}%{_datadir}/fish/completions/anda.fish 68 | 69 | # install man pages 70 | cp -v target/assets/man_pages/* %{buildroot}%{_mandir}/man1/ 71 | 72 | 73 | rm -rf %{buildroot}%{cargo_registry} 74 | 75 | %changelog 76 | %autochangelog 77 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | use_small_heuristics = "Max" 2 | edition = "2021" 3 | merge_derives = true 4 | match_block_trailing_comma = false 5 | use_field_init_shorthand = true 6 | use_try_shorthand = true 7 | -------------------------------------------------------------------------------- /src/artifacts.rs: -------------------------------------------------------------------------------- 1 | use std::collections::BTreeMap; 2 | 3 | use crate::cli::PackageType; 4 | 5 | #[derive(Clone)] 6 | pub struct Artifacts { 7 | pub packages: BTreeMap, 8 | } 9 | 10 | impl Artifacts { 11 | pub const fn new() -> Self { 12 | Self { packages: BTreeMap::new() } 13 | } 14 | pub fn add(&mut self, name: String, package_type: PackageType) { 15 | self.packages.insert(name, package_type); 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/builder.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | artifacts::Artifacts, 3 | cli::{Cli, FlatpakOpts, OciOpts, PackageType, RpmOpts}, 4 | cmd, 5 | flatpak::{FlatpakArtifact, FlatpakBuilder}, 6 | oci::{build_oci, OCIBackend}, 7 | rpm_spec::{RPMBuilder, RPMExtraOptions, RPMOptions}, 8 | }; 9 | use anda_config::{Docker, Flatpak, Project}; 10 | use color_eyre::{eyre::eyre, eyre::Context, Result}; 11 | use itertools::Itertools; 12 | use std::path::{Path, PathBuf}; 13 | use tracing::{debug, error, info, trace}; 14 | 15 | pub async fn build_rpm( 16 | opts: &mut RPMOptions, 17 | spec: &Path, 18 | builder: RPMBuilder, 19 | output_dir: &Path, 20 | rpmb_opts: &RpmOpts, 21 | ) -> Result> { 22 | let repo_path = output_dir.join("rpm"); 23 | println!("Building RPMs in {}", repo_path.display()); 24 | let repodata_path = repo_path.join("repodata"); 25 | 26 | if repodata_path.exists() { 27 | let repo_path = repo_path.canonicalize()?; 28 | 29 | let repo_path = format!("file://{}", repo_path.display()); 30 | if opts.extra_repos.is_none() { 31 | opts.extra_repos = Some(vec![repo_path]); 32 | } else { 33 | opts.extra_repos.as_mut().unwrap().push(repo_path); 34 | } 35 | } else { 36 | debug!("No repodata found, skipping"); 37 | } 38 | 39 | opts.set_target(rpmb_opts.rpm_target.clone()); 40 | 41 | for repo in &rpmb_opts.extra_repos { 42 | if opts.extra_repos.is_none() { 43 | opts.extra_repos = Some(vec![repo.clone()]); 44 | } else { 45 | opts.extra_repos.as_mut().unwrap().push(repo.clone()); 46 | } 47 | } 48 | 49 | for rpmmacro in &rpmb_opts.rpm_macro { 50 | let split = rpmmacro.split_once(' '); 51 | if let Some((key, value)) = split { 52 | opts.def_macro(key, value); 53 | } else { 54 | return Err(eyre!("Invalid rpm macro: {rpmmacro}")); 55 | } 56 | } 57 | { 58 | // HACK: Define macro for autogitversion 59 | // get git version 60 | let commit_id = crate::util::get_commit_id_cwd(); 61 | 62 | let date = crate::util::get_date(); 63 | let mut tmp = String::new(); 64 | 65 | let autogitversion = commit_id.as_ref().map_or(&date, |commit| { 66 | tmp = format!("{date}.{}", commit.chars().take(8).collect::()); 67 | &tmp 68 | }); 69 | 70 | // limit to 16 chars 71 | 72 | opts.def_macro("autogitversion", autogitversion); 73 | 74 | opts.def_macro("autogitcommit", &commit_id.unwrap_or_else(|| "unknown".into())); 75 | 76 | opts.def_macro("autogitdate", &date); 77 | }; 78 | 79 | trace!("Building RPMs with {opts:?}"); 80 | 81 | let builder = builder.build(spec, opts).await?; 82 | 83 | cmd!(? "createrepo_c" "--quiet" "--update" {{repo_path.display()}})?; 84 | 85 | Ok(builder) 86 | } 87 | 88 | /// Build a flatpak package. 89 | /// 90 | /// # Errors 91 | /// - cannot create bundle 92 | pub async fn build_flatpak( 93 | output_dir: &Path, 94 | manifest: &Path, 95 | flatpak_opts: &mut FlatpakOpts, 96 | ) -> Result> { 97 | let mut artifacts = Vec::new(); 98 | 99 | let out = output_dir.join("flatpak"); 100 | 101 | let flat_out = out.join("build"); 102 | let flat_repo = out.join("repo"); 103 | let flat_bundles = out.join("bundles"); 104 | 105 | let mut builder = FlatpakBuilder::new(flat_out, flat_repo, flat_bundles); 106 | 107 | for extra_source in &mut flatpak_opts.extra_sources { 108 | builder.add_extra_source(PathBuf::from(std::mem::take(extra_source))); 109 | } 110 | 111 | for extra_source_url in &mut flatpak_opts.extra_sources_url { 112 | builder.add_extra_source_url(std::mem::take(extra_source_url)); 113 | } 114 | 115 | if !flatpak_opts.dont_delete_build_dir { 116 | builder.add_extra_args("--delete-build-dirs".to_owned()); 117 | } 118 | 119 | let flatpak = builder.build(manifest).await?; 120 | artifacts.push(FlatpakArtifact::Ref(flatpak.clone())); 121 | artifacts.push(FlatpakArtifact::Bundle(builder.bundle(&flatpak).await?)); 122 | 123 | Ok(artifacts) 124 | } 125 | 126 | macro_rules! script { 127 | ($name:expr, $scr:expr, $( $var:ident ),*) => { 128 | let sc = andax::run( 129 | $name, 130 | &$scr, 131 | std::iter::once(("script_path".to_string(), $scr.to_string_lossy().to_string())), 132 | |_sc| { 133 | $( _sc.push(stringify!($var), $var); )* 134 | }, 135 | ); 136 | #[allow(unused_assignments)] 137 | if let Some(_sc) = sc { 138 | $( $var = _sc.get_value(stringify!($var)).expect(concat!("No `{}` in scope", stringify!($var))); )* 139 | } else { 140 | error!( 141 | scr = $scr.display().to_string(), 142 | concat!(stringify!($scr), " —— failed with aforementioned exception.") 143 | ); 144 | return Err(eyre!(concat!(stringify!($scr), " failed"))); 145 | } 146 | }; 147 | } 148 | 149 | // Functions to actually call the builds 150 | // yeah this is ugly and relies on side effects, but it reduces code duplication 151 | // to anyone working on this, please rewrite this call to make it more readable 152 | pub async fn build_rpm_call( 153 | cli: &Cli, 154 | mut opts: RPMOptions, 155 | rpmbuild: &anda_config::RpmBuild, 156 | mut rpm_builder: RPMBuilder, 157 | artifact_store: &mut Artifacts, 158 | rpmb_opts: &RpmOpts, 159 | ) -> Result<()> { 160 | // run pre-build script 161 | if let Some(pre_script) = &rpmbuild.pre_script { 162 | if pre_script.extension().unwrap_or_default() == "rhai" { 163 | script!( 164 | rpmbuild.spec.as_os_str().to_str().unwrap_or(""), 165 | pre_script, 166 | opts, 167 | rpm_builder 168 | ); 169 | } else { 170 | cmd!(? "sh" "-c" {{ pre_script.display() }})?; 171 | } 172 | } 173 | 174 | let art = build_rpm(&mut opts, &rpmbuild.spec, rpm_builder, &cli.target_dir, rpmb_opts).await?; 175 | 176 | // `opts` is consumed in build_rpm()/build() 177 | if let Some(post_script) = &rpmbuild.post_script { 178 | if post_script.extension().unwrap_or_default() == "rhai" { 179 | script!( 180 | rpmbuild.spec.as_os_str().to_str().unwrap_or(""), 181 | post_script, 182 | opts, 183 | rpm_builder 184 | ); 185 | } else { 186 | cmd!(? "sh" "-c" {{ post_script.display() }})?; 187 | } 188 | } 189 | 190 | for artifact in art { 191 | artifact_store.add(artifact.to_string_lossy().to_string(), PackageType::Rpm); 192 | } 193 | 194 | Ok(()) 195 | } 196 | 197 | pub async fn build_flatpak_call( 198 | cli: &Cli, 199 | flatpak: &Flatpak, 200 | artifact_store: &mut Artifacts, 201 | mut flatpak_opts: FlatpakOpts, 202 | ) -> Result<()> { 203 | if let Some(pre_script) = &flatpak.pre_script { 204 | script!( 205 | flatpak.manifest.as_path().to_str().unwrap_or(""), 206 | pre_script, 207 | flatpak_opts 208 | ); 209 | } 210 | 211 | let art = build_flatpak(&cli.target_dir, &flatpak.manifest, &mut flatpak_opts).await.unwrap(); 212 | 213 | for artifact in art { 214 | artifact_store.add(artifact.to_string(), PackageType::Flatpak); 215 | } 216 | 217 | if let Some(post_script) = &flatpak.post_script { 218 | script!(flatpak.manifest.as_path().to_str().unwrap_or(""), post_script,); 219 | } 220 | 221 | Ok(()) 222 | } 223 | 224 | pub fn build_oci_call( 225 | backend: OCIBackend, 226 | _cli: &Cli, 227 | manifest: &mut Docker, 228 | artifact_store: &mut Artifacts, 229 | ) { 230 | let art_type = match backend { 231 | OCIBackend::Docker => PackageType::Docker, 232 | OCIBackend::Podman => PackageType::Podman, 233 | }; 234 | 235 | for (tag, image) in std::mem::take(&mut manifest.image) { 236 | let art = build_oci( 237 | backend, 238 | &image.dockerfile.unwrap(), 239 | image.tag_latest.unwrap_or(false), 240 | &tag, 241 | &image.version.unwrap_or_else(|| "latest".into()), 242 | &image.context, 243 | ); 244 | 245 | for artifact in art { 246 | artifact_store.add(artifact.clone(), art_type); 247 | } 248 | } 249 | } 250 | 251 | // project parser 252 | 253 | pub async fn build_project( 254 | cli: &Cli, 255 | mut proj: Project, 256 | package: PackageType, 257 | rbopts: &RpmOpts, 258 | fpopts: &FlatpakOpts, 259 | _oci_opts: &OciOpts, 260 | ) -> Result<()> { 261 | let cwd = std::env::current_dir().unwrap(); 262 | 263 | let mut rpm_opts = RPMOptions::new(rbopts.mock_config.clone(), cwd, cli.target_dir.clone()); 264 | 265 | // export environment variables 266 | if let Some(env) = proj.env.as_ref() { 267 | env.iter().for_each(|(k, v)| std::env::set_var(k, v)); 268 | } 269 | 270 | if let Some(pre_script) = &proj.pre_script { 271 | if pre_script.extension().unwrap_or_default() == "rhai" { 272 | script!("pre_script", pre_script,); 273 | } else { 274 | cmd!(? "sh" "-c" {{ pre_script.display() }})?; 275 | } 276 | } 277 | 278 | if let Some(rpmbuild) = &proj.rpm { 279 | rpm_opts.extra_repos = Some(rpmbuild.extra_repos.clone()); 280 | if let Some(srcdir) = &rpmbuild.sources { 281 | rpm_opts.sources.clone_from(srcdir); 282 | } 283 | rpm_opts.no_mirror = rbopts.no_mirrors; 284 | rpm_opts.def_macro("_disable_source_fetch", "0"); 285 | rpm_opts.config_opts.push("external_buildrequires=True".to_owned()); 286 | 287 | if let Some(bool) = rpmbuild.enable_scm { 288 | rpm_opts.scm_enable = bool; 289 | } 290 | 291 | if let Some(scm_opt) = &rpmbuild.scm_opts { 292 | rpm_opts.scm_opts = scm_opt.iter().map(|(k, v)| format!("{k}={v}")).collect(); 293 | } 294 | 295 | if let Some(cfg) = &rpmbuild.config { 296 | rpm_opts.config_opts.extend(cfg.iter().map(|(k, v)| format!("{k}={v}")).collect_vec()); 297 | } 298 | 299 | if let Some(plugin_opt) = &rpmbuild.plugin_opts { 300 | rpm_opts.plugin_opts = plugin_opt.iter().map(|(k, v)| format!("{k}={v}")).collect(); 301 | } 302 | 303 | if rbopts.mock_config.is_none() { 304 | if let Some(mockcfg) = &rbopts.mock_config { 305 | rpm_opts.mock_config = Some(mockcfg.to_owned()); 306 | } 307 | // TODO: Implement global settings 308 | } 309 | } 310 | let mut arts = Artifacts::new(); 311 | 312 | _build_pkg(package, &mut proj, cli, rpm_opts, rbopts, &mut arts, fpopts).await?; 313 | 314 | for (path, arttype) in arts.packages { 315 | let type_string = match arttype { 316 | PackageType::Rpm => "RPM", 317 | PackageType::Docker => "Docker image", 318 | PackageType::Podman => "Podman image", 319 | PackageType::Flatpak => "flatpak", 320 | // PackageType::RpmOstree => "rpm-ostree compose", 321 | PackageType::All => unreachable!(), 322 | }; 323 | println!("Built {type_string}: {path}"); 324 | } 325 | 326 | if let Some(post_script) = &proj.post_script { 327 | if post_script.extension().unwrap_or_default() == "rhai" { 328 | script!("post_script", post_script,); 329 | } else { 330 | cmd!(? "sh" "-c" {{ post_script.display() }})?; 331 | } 332 | } 333 | 334 | Ok(()) 335 | } 336 | 337 | async fn _build_pkg( 338 | package: PackageType, 339 | proj: &mut Project, 340 | cli: &Cli, 341 | rpm_opts: RPMOptions, 342 | rbopts: &RpmOpts, 343 | arts: &mut Artifacts, 344 | fpopts: &FlatpakOpts, 345 | ) -> Result<(), color_eyre::Report> { 346 | match package { 347 | PackageType::All => build_all(proj, cli, rpm_opts, rbopts, arts, fpopts).await?, 348 | PackageType::Rpm => { 349 | if let Some(rpmbuild) = &proj.rpm { 350 | build_rpm_call(cli, rpm_opts, rpmbuild, rbopts.rpm_builder.into(), arts, rbopts) 351 | .await 352 | .with_context(|| "Failed to build RPMs".to_owned())?; 353 | } else { 354 | println!("No RPM build defined for project"); 355 | } 356 | } 357 | PackageType::Docker => { 358 | proj.docker.as_mut().map_or_else( 359 | || println!("No Docker build defined for project"), 360 | |docker| build_oci_call(OCIBackend::Docker, cli, docker, arts), 361 | ); 362 | } 363 | PackageType::Podman => { 364 | proj.podman.as_mut().map_or_else( 365 | || println!("No Podman build defined for project"), 366 | |podman| build_oci_call(OCIBackend::Podman, cli, podman, arts), 367 | ); 368 | } 369 | PackageType::Flatpak => { 370 | if let Some(flatpak) = &proj.flatpak { 371 | build_flatpak_call(cli, flatpak, arts, fpopts.clone()) 372 | .await 373 | .with_context(|| "Failed to build Flatpaks".to_owned())?; 374 | } else { 375 | println!("No Flatpak build defined for project"); 376 | } 377 | } // PackageType::RpmOstree => todo!(), 378 | }; 379 | Ok(()) 380 | } 381 | 382 | async fn build_all( 383 | project: &mut Project, 384 | cli: &Cli, 385 | rpm_opts: RPMOptions, 386 | rbopts: &RpmOpts, 387 | artifacts: &mut Artifacts, 388 | flatpak_opts: &FlatpakOpts, 389 | ) -> Result<(), color_eyre::Report> { 390 | if let Some(rpmbuild) = &project.rpm { 391 | build_rpm_call(cli, rpm_opts, rpmbuild, rbopts.rpm_builder.into(), artifacts, rbopts) 392 | .await 393 | .with_context(|| "Failed to build RPMs".to_owned())?; 394 | } 395 | if let Some(flatpak) = &project.flatpak { 396 | build_flatpak_call(cli, flatpak, artifacts, flatpak_opts.clone()) 397 | .await 398 | .with_context(|| "Failed to build Flatpaks".to_owned())?; 399 | } 400 | if let Some(podman) = project.podman.as_mut() { 401 | build_oci_call(OCIBackend::Podman, cli, podman, artifacts); 402 | } 403 | if let Some(docker) = project.docker.as_mut() { 404 | build_oci_call(OCIBackend::Docker, cli, docker, artifacts); 405 | } 406 | if let Some(scripts) = &project.scripts { 407 | info!("Running build scripts"); 408 | crate::update::run_scripts( 409 | scripts 410 | .iter() 411 | .map(|p| p.to_string_lossy().to_string()) 412 | .collect::>() 413 | .as_slice(), 414 | project.labels.iter().map(|(a, b)| (a.clone(), b.clone())).collect(), 415 | )?; 416 | }; 417 | Ok(()) 418 | } 419 | 420 | pub async fn builder( 421 | cli: &Cli, 422 | rpm_opts: RpmOpts, 423 | all: bool, 424 | project: Option, 425 | package: PackageType, 426 | flatpak_opts: FlatpakOpts, 427 | oci_opts: OciOpts, 428 | ) -> Result<()> { 429 | // Parse the project manifest 430 | // todo 431 | // ? can we assume cli.config won't be modified? 432 | let config = anda_config::load_from_file(&cli.config.clone())?; 433 | trace!("all: {all}"); 434 | trace!("project: {project:?}"); 435 | trace!("package: {package:?}"); 436 | // export envars for CLI environment 437 | std::env::set_var("ANDA_TARGET_DIR", &cli.target_dir); 438 | std::env::set_var("ANDA_CONFIG_PATH", &cli.config); 439 | 440 | if all { 441 | for (name, project) in config.project { 442 | println!("Building project: {name}"); 443 | build_project(cli, project, package, &rpm_opts, &flatpak_opts, &oci_opts).await?; 444 | } 445 | } else { 446 | // find project named project 447 | if let Some(name) = project { 448 | if let Some(project) = config.get_project(&name) { 449 | // cannot take: get_project() returns immut ref 450 | build_project(cli, project.clone(), package, &rpm_opts, &flatpak_opts, &oci_opts) 451 | .await?; 452 | } else { 453 | return Err(eyre!("Project not found: {name}")); 454 | } 455 | } else { 456 | return Err(eyre!("No project specified")); 457 | } 458 | } 459 | Ok(()) 460 | } 461 | -------------------------------------------------------------------------------- /src/cli.rs: -------------------------------------------------------------------------------- 1 | // This module is included in the build.rs file so we can generate some CLI completions/man pages 2 | // If you want to add a crate in here, also add it to build-dependencies 3 | 4 | use clap::{Args, Parser, Subcommand, ValueEnum}; 5 | use clap_complete::Shell; 6 | use clap_verbosity_flag::InfoLevel; 7 | use std::{path::PathBuf, str::FromStr}; 8 | 9 | #[derive(ValueEnum, Debug, Clone, Copy, Default)] 10 | pub enum RPMBuilder { 11 | #[default] 12 | Mock, 13 | Rpmbuild, 14 | } 15 | 16 | #[derive(Copy, Clone, ValueEnum, Debug)] 17 | pub enum PackageType { 18 | Rpm, 19 | Docker, 20 | Podman, 21 | Flatpak, 22 | // RpmOstree, 23 | All, 24 | } 25 | 26 | impl FromStr for PackageType { 27 | type Err = String; 28 | 29 | fn from_str(s: &str) -> Result { 30 | match s { 31 | "rpm" => Ok(Self::Rpm), 32 | "docker" => Ok(Self::Docker), 33 | "podman" => Ok(Self::Podman), 34 | "flatpak" => Ok(Self::Flatpak), 35 | // "rpm-ostree" => Ok(Self::RpmOstree), 36 | "all" => Ok(Self::All), 37 | _ => Err(format!("Invalid package type: {s}")), 38 | } 39 | } 40 | } 41 | 42 | /// Andaman is a package building toolchain that can automate building packages in various formats, 43 | /// such as RPM, Flatpak, Docker, etc. 44 | /// 45 | /// It is designed to be used in a CI/CD pipeline, but can also be used locally. 46 | /// To use Andaman, you need a project manifest file in the root of your repository. 47 | /// The file in question is a HCL (Hashicorp Configuration Language) file, and it is called `anda.hcl`. 48 | /// The file is used to configure the build process, and it is used to define the build steps. 49 | /// 50 | #[derive(Parser, Debug)] 51 | #[clap(about, version)] 52 | // #[clap(global_setting = AppSettings::DeriveDisplayOrder)] 53 | pub struct Cli { 54 | #[clap(subcommand)] 55 | pub command: Command, 56 | 57 | /// Path to Andaman configuration file 58 | #[clap(default_value = "anda.hcl", short, long, env = "ANDA_CONFIG")] 59 | pub config: PathBuf, 60 | 61 | #[clap(flatten)] 62 | // #[clap(default_value = "info")] 63 | pub verbose: clap_verbosity_flag::Verbosity, 64 | 65 | /// Output directory for built packages 66 | #[clap(short, long, env = "TARGET_DIR", default_value = "anda-build")] 67 | pub target_dir: PathBuf, 68 | } 69 | 70 | #[derive(Args, Debug, Clone, Default)] 71 | pub struct FlatpakOpts { 72 | /// Flatpak: Extra source directory 73 | /// can be defined multiple times 74 | #[clap(long, group = "extra-source")] 75 | pub extra_sources: Vec, 76 | 77 | /// Flatpak: Extra source URL 78 | /// can be defined multiple times 79 | #[clap(long)] 80 | pub extra_sources_url: Vec, 81 | 82 | /// Flatpak: Do not delete the build directory 83 | #[clap(long, action)] 84 | pub dont_delete_build_dir: bool, 85 | } 86 | 87 | #[derive(Args, Debug, Clone, Default)] 88 | pub struct OciOpts { 89 | /// OCI: Labels to add to the image 90 | #[clap(long)] 91 | pub label: Vec, 92 | 93 | /// OCI: Build Arguments to pass to the build 94 | #[clap(long)] 95 | pub build_arg: Vec, 96 | 97 | /// OCI: compress the context with gzip 98 | #[clap(long, action)] 99 | pub compress: bool, 100 | } 101 | 102 | #[derive(Args, Debug, Clone, Default)] 103 | pub struct RpmOpts { 104 | /// RPM: Do not mirror repositories. 105 | /// 106 | /// This flag sets the `mirror` config opt in your mock config to `false`, which most mock configs use to enable 107 | /// usage of the test repo in Fedora. 108 | /// The test repo is usually an internal Koji artifact repository used in its build tags. 109 | /// This is useful for quickly building from test repositories 110 | /// without having to wait for the compose to finish. 111 | /// 112 | /// This argument is ignored if the build is not RPM Mock. 113 | #[clap(long, action)] 114 | pub no_mirrors: bool, 115 | 116 | /// RPM: Builder backend 117 | #[clap(long, short, value_enum, default_value = "mock")] 118 | pub rpm_builder: RPMBuilder, 119 | 120 | /// RPM: Define a custom macro 121 | /// can be defined multiple times 122 | #[clap(short = 'D', long)] 123 | pub rpm_macro: Vec, 124 | 125 | /// RPM: A target to pass to rpmbuild/mock, useful for cross compilation 126 | #[clap(long)] 127 | pub rpm_target: Option, 128 | 129 | /// RPM: Mock configuration 130 | #[clap(long, short = 'c')] 131 | pub mock_config: Option, 132 | 133 | /// RPM: Extra repositories to pass to mock 134 | #[clap(long, short = 'R')] 135 | pub extra_repos: Vec, 136 | } 137 | 138 | #[derive(Subcommand, Debug, Clone)] 139 | #[allow(clippy::large_enum_variant)] 140 | pub enum Command { 141 | /// Build a project 142 | /// 143 | /// This is the main entrypoint of Andaman. 144 | /// This command optionally accepts a project name to build, or an `--all` flag to build all projects in the manifest. 145 | /// If no project name is specified, and the `--all` flag is not specified, the program will exit with an error. 146 | Build { 147 | /// Builds all projects in the current directory 148 | #[clap(short, long, action)] 149 | all: bool, 150 | 151 | /// Project to build 152 | #[clap()] 153 | project: Option, 154 | 155 | /// Builds a specific artifact format 156 | #[clap(short, long, value_enum, default_value = "all")] 157 | package: PackageType, 158 | 159 | /// Options for RPM builds 160 | #[clap(flatten)] 161 | rpm_opts: RpmOpts, 162 | 163 | /// Options for Flatpak builds 164 | #[clap(flatten)] 165 | flatpak_opts: FlatpakOpts, 166 | 167 | /// Options for OCI builds 168 | #[clap(flatten)] 169 | oci_opts: OciOpts, 170 | }, 171 | /// Cleans up the build directory 172 | Clean, 173 | 174 | /// Lists all projects in the manifest 175 | List, 176 | 177 | /// Initializes a new project manifest 178 | Init { 179 | /// Path to the project manifest 180 | #[clap(default_value = ".")] 181 | path: PathBuf, 182 | 183 | /// Assume yes to all questions 184 | #[clap(short, long, action)] 185 | yes: bool, 186 | }, 187 | /// Generate shell completions 188 | Completion { 189 | /// Shell to generate completions for 190 | #[clap(value_enum)] 191 | shell: Shell, 192 | }, 193 | /// Get CI output for Github Actions 194 | CI, 195 | 196 | /// Update all projects 197 | Update { 198 | /// Labels to pass to the scripts 199 | #[clap(short, long)] 200 | labels: Vec, 201 | /// Only run update scripts in project with the specified labels 202 | /// 203 | /// This should be a comma-separated list of filters. 204 | /// Each time `--filters=...` is specified, the comma-separated list of key-values will be 205 | /// checked against a project. If missing or different, the project will be ignored. 206 | /// However, specifying `--filters` multiple times will create an "or" effect --- the 207 | /// project will not be ignored if it satisfies one of the list of `--filters`. For 208 | /// example, `-f a=1,b=2 -f c=3` means the project needs to satisfy either "a=1" and "b=2", 209 | /// or only "c=3". 210 | #[clap(short, long)] 211 | filters: Vec, 212 | /// Exclude update scripts in project with the specified labels 213 | /// 214 | /// This should be a comma-separated list of excludes. 215 | /// Each time `--exclude=...` is specified, the comma-separated list of key-values will be 216 | /// checked against the labels of a project, and it will be ignored if all the key-values 217 | /// are present. In addition, specifying `--exclude` multiple times will create an "or" 218 | /// effect --- a project will be excluded if it satisfies one of the list of `--filters`. 219 | /// For example, `-e a=1,b=2 -e c=3` means projects with "a=1" and "b=2" at the same time, 220 | /// or "c=3", are excluded. Projects with only "a=1" or "b=2" are not excluded. 221 | /// 222 | /// This will always override `--filters`. 223 | #[clap(short, long)] 224 | excludes: Vec, 225 | }, 226 | 227 | /// Run .rhai scripts 228 | Run { 229 | scripts: Vec, 230 | #[clap(short, long)] 231 | labels: Option, 232 | }, 233 | } 234 | -------------------------------------------------------------------------------- /src/flatpak.rs: -------------------------------------------------------------------------------- 1 | #![allow(dead_code)] 2 | use crate::util::CommandLog; 3 | use color_eyre::Report; 4 | use flatpak::application::FlatpakApplication; 5 | use std::{ 6 | env, 7 | fmt::Display, 8 | path::{Path, PathBuf}, 9 | }; 10 | use tokio::process::Command; 11 | type Result = std::result::Result; 12 | 13 | pub enum FlatpakArtifact { 14 | Ref(String), 15 | Bundle(PathBuf), 16 | } 17 | 18 | impl Display for FlatpakArtifact { 19 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 20 | match self { 21 | Self::Ref(r) => write!(f, "ref {r}"), 22 | Self::Bundle(b) => write!(f, "bundle {}", b.display()), 23 | } 24 | } 25 | } 26 | 27 | pub struct FlatpakBuilder { 28 | // The output directory for the flatpak build 29 | output_dir: PathBuf, 30 | // The output flatpak repository 31 | output_repo: PathBuf, 32 | 33 | // The bundles directory 34 | bundles_dir: PathBuf, 35 | // Extra sources as paths 36 | extra_sources: Vec, 37 | // Extra sources as URLs 38 | extra_sources_urls: Vec, 39 | // extra arguments to pass to flatpak-builder 40 | extra_args: Vec, 41 | } 42 | 43 | impl FlatpakBuilder { 44 | pub const fn new(output_dir: PathBuf, output_repo: PathBuf, bundles_dir: PathBuf) -> Self { 45 | Self { 46 | output_dir, 47 | output_repo, 48 | bundles_dir, 49 | extra_sources: Vec::new(), 50 | extra_sources_urls: Vec::new(), 51 | extra_args: Vec::new(), 52 | } 53 | } 54 | 55 | pub fn add_extra_source(&mut self, source: PathBuf) { 56 | self.extra_sources.push(source); 57 | } 58 | // Add extra sources from an iterator 59 | pub fn extra_sources_iter>(&mut self, iter: I) { 60 | self.extra_sources.extend(iter); 61 | } 62 | 63 | pub fn extra_args_iter>(&mut self, iter: I) { 64 | self.extra_args.extend(iter); 65 | } 66 | 67 | pub fn add_extra_args(&mut self, arg: String) { 68 | self.extra_args.push(arg); 69 | } 70 | 71 | pub fn add_extra_source_url(&mut self, source: String) { 72 | self.extra_sources_urls.push(source); 73 | } 74 | 75 | // Add extra sources from an iterator 76 | pub fn extra_sources_urls_iter>(&mut self, iter: I) { 77 | self.extra_sources_urls.extend(iter); 78 | } 79 | 80 | pub async fn build(&self, manifest: &Path) -> Result { 81 | // we parse the flatpak metadata file 82 | let flatpak_meta = FlatpakApplication::load_from_file(manifest.display().to_string()) 83 | .map_err(color_eyre::Report::msg)?; 84 | 85 | // create the flatpak output folders 86 | let output_dir = 87 | env::current_dir()?.join(".flatpak-builder/build").join(&flatpak_meta.app_id); 88 | std::fs::create_dir_all(&output_dir)?; 89 | std::fs::create_dir_all(&self.output_repo)?; 90 | 91 | // build the flatpak 92 | let mut flatpak = Command::new("flatpak-builder"); 93 | flatpak 94 | .arg(output_dir) 95 | .arg(manifest) 96 | .arg("--force-clean") 97 | .arg("--repo") 98 | .arg(self.output_repo.canonicalize().unwrap()); 99 | 100 | // add extra sources 101 | 102 | for source in &self.extra_sources { 103 | flatpak.arg("--extra-sources").arg(source); 104 | } 105 | 106 | for source in &self.extra_sources_urls { 107 | flatpak.arg("--extra-sources-url").arg(source); 108 | } 109 | 110 | flatpak.args(&self.extra_args); 111 | 112 | // run the command 113 | flatpak.log().await?; 114 | Ok(flatpak_meta.app_id) 115 | } 116 | 117 | pub async fn bundle(&self, app_id: &str) -> Result { 118 | std::fs::create_dir_all(&self.bundles_dir)?; 119 | let bundle_path = self.bundles_dir.join(format!("{app_id}.flatpak")); 120 | 121 | let mut flatpak = Command::new("flatpak"); 122 | 123 | flatpak 124 | .arg("build-bundle") 125 | .arg(self.output_repo.canonicalize().unwrap()) 126 | .arg(&bundle_path) 127 | .arg(app_id); 128 | 129 | flatpak.log().await?; 130 | 131 | Ok(bundle_path) 132 | } 133 | } 134 | 135 | #[cfg(test)] 136 | mod test_super {} 137 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod cli; 2 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | //! Andaman, a package build toolchain for RPM, OCI and Flatpak. 2 | #![allow(clippy::module_name_repetitions)] 3 | #![deny(rust_2018_idioms)] 4 | 5 | mod artifacts; 6 | mod builder; 7 | mod cli; 8 | mod flatpak; 9 | mod oci; 10 | mod rpm_spec; 11 | mod update; 12 | mod util; 13 | use anda_config::parse_labels; 14 | use clap::{CommandFactory, Parser}; 15 | use clap_complete::generate; 16 | use cli::{Cli, Command}; 17 | use color_eyre::{eyre::eyre, Result}; 18 | use std::{io, mem::take}; 19 | use tracing::{debug, trace}; 20 | 21 | #[allow(clippy::unwrap_in_result)] 22 | #[allow(clippy::missing_errors_doc)] 23 | #[allow(clippy::missing_panics_doc)] 24 | #[tokio::main] 25 | async fn main() -> Result<()> { 26 | color_eyre::install()?; 27 | let mut cli = Cli::parse(); 28 | let mut app = Cli::command(); 29 | app.build(); 30 | 31 | tracing_log::LogTracer::init()?; 32 | let subscriber = tracing_subscriber::FmtSubscriber::builder() 33 | .with_max_level(util::convert_filter(cli.verbose.log_level_filter())) 34 | .event_format(tracing_subscriber::fmt::format().pretty()) 35 | .finish(); 36 | tracing::subscriber::set_global_default(subscriber).expect("setting default subscriber failed"); 37 | 38 | trace!("Matching subcommand"); 39 | match cli.command { 40 | Command::Build { 41 | all, 42 | ref mut project, 43 | ref mut package, 44 | ref mut rpm_opts, 45 | ref mut flatpak_opts, 46 | ref mut oci_opts, 47 | } => { 48 | if project.is_none() && !all { 49 | // print help 50 | let mut app = Cli::command(); 51 | let a = app.find_subcommand_mut("build").unwrap(); 52 | let mut a = take(a).display_name("anda-build").name("anda-build"); 53 | a.print_help()?; 54 | return Err(eyre!("No project specified, and --all not specified.")); 55 | } 56 | 57 | let project = take(project); 58 | let package = std::mem::replace(package, cli::PackageType::Rpm); 59 | let flatpak_opts = take(flatpak_opts); 60 | let oci_opts = take(oci_opts); 61 | let rpm_opts = take(rpm_opts); 62 | debug!("{all:?}"); 63 | builder::builder(&cli, rpm_opts, all, project, package, flatpak_opts, oci_opts).await?; 64 | } 65 | Command::Clean => { 66 | println!("Cleaning up build directory"); 67 | let clean = std::fs::remove_dir_all(&cli.target_dir); 68 | if let Err(e) = clean { 69 | // match the errors 70 | match e.kind() { 71 | std::io::ErrorKind::NotFound => {} 72 | e => println!("Error cleaning up build directory: {e:?}"), 73 | } 74 | } 75 | } 76 | 77 | Command::List => { 78 | let config = anda_config::load_from_file(&cli.config)?; 79 | 80 | for (project_name, project) in &config.project { 81 | let project_alias = project 82 | .alias 83 | .as_ref() 84 | .map_or_else(String::new, |alias| format!(" ({})", alias.join(", "))); 85 | 86 | println!("{project_name}{project_alias}"); 87 | } 88 | 89 | trace!("{config:#?}"); 90 | } 91 | Command::Init { path, yes } => { 92 | // create a new project 93 | debug!("Creating new project in {}", path.display()); 94 | util::init(path.as_path(), yes)?; 95 | } 96 | Command::Completion { shell } => { 97 | generate(shell, &mut cli::Cli::command(), "anda", &mut io::stdout()); 98 | } 99 | Command::CI => { 100 | let config = anda_config::load_from_file(&cli.config)?; 101 | let entries = util::fetch_build_entries(config); 102 | 103 | println!("build_matrix={}", serde_json::to_string(&entries)?); 104 | } 105 | Command::Update { labels, filters, excludes } => { 106 | let labels = parse_labels(labels.iter().map(std::ops::Deref::deref)) 107 | .ok_or_else(|| eyre!("Cannot parse --labels"))?; 108 | let filters = anda_config::parse_filters(&filters) 109 | .ok_or_else(|| eyre!("Cannot parse --filters"))?; 110 | let excludes = anda_config::parse_filters(&excludes) 111 | .ok_or_else(|| eyre!("Cannot parse --excludes"))?; 112 | update::update(anda_config::load_from_file(&cli.config)?, labels, filters, excludes)?; 113 | } 114 | Command::Run { scripts, labels } => { 115 | if scripts.is_empty() { 116 | return Err(eyre!("No scripts to run")); 117 | } 118 | let labels = parse_labels(labels.iter().map(std::ops::Deref::deref)) 119 | .ok_or_else(|| eyre!("Cannot parse --labels"))?; 120 | update::run_scripts(&scripts, labels)?; 121 | } 122 | } 123 | Ok(()) 124 | } 125 | -------------------------------------------------------------------------------- /src/oci.rs: -------------------------------------------------------------------------------- 1 | //! OCI Builder backend 2 | //! Supports Docker and Podman 3 | use std::process::Command; 4 | 5 | #[derive(Clone, Copy)] 6 | pub enum OCIBackend { 7 | Docker, 8 | Podman, 9 | } 10 | 11 | impl OCIBackend { 12 | pub fn command(self) -> Command { 13 | let cmd = match self { 14 | Self::Docker => "docker", 15 | Self::Podman => "podman", 16 | }; 17 | 18 | Command::new(cmd) 19 | } 20 | } 21 | 22 | pub struct OCIBuilder { 23 | context: String, 24 | tag: String, 25 | version: String, 26 | label: Vec, 27 | } 28 | 29 | impl OCIBuilder { 30 | pub const fn new(context: String, tag: String, version: String) -> Self { 31 | Self { context, tag, version, label: Vec::new() } 32 | } 33 | 34 | pub fn add_label(&mut self, label: String) { 35 | self.label.push(label); 36 | } 37 | 38 | // We use string here because we want to let people use stuff like git contexts 39 | pub fn build(&self, dockerfile: &str, backend: OCIBackend, latest: bool) { 40 | let mut cmd = backend.command(); 41 | 42 | let real_tag = &format!("{}:{}", &self.tag, self.version); 43 | 44 | cmd.arg("build") 45 | .arg(&self.context) 46 | .arg("-f") 47 | .arg(dockerfile) 48 | .arg("-t") 49 | .env("DOCKER_BUILDKIT", "1") 50 | .arg(real_tag); 51 | 52 | if latest { 53 | cmd.arg("-t").arg(format!("{}:latest", &self.tag)); 54 | } 55 | 56 | for label in &self.label { 57 | cmd.arg("--label").arg(label); 58 | } 59 | } 60 | } 61 | 62 | pub fn build_oci( 63 | backend: OCIBackend, 64 | dockerfile: &str, 65 | latest: bool, 66 | tag: &str, 67 | version: &str, 68 | context: &str, 69 | ) -> Vec { 70 | let mut builder = OCIBuilder::new(context.to_owned(), tag.to_owned(), version.to_owned()); 71 | builder.add_label(format!("com.fyralabs.anda.version={}", env!("CARGO_PKG_VERSION"))); 72 | 73 | builder.build(dockerfile, backend, latest); 74 | 75 | let mut tags = vec![format!("{tag}:{version}")]; 76 | 77 | if latest { 78 | tags.push(format!("{tag}:latest")); 79 | } 80 | tags 81 | } 82 | -------------------------------------------------------------------------------- /src/rpm_spec.rs: -------------------------------------------------------------------------------- 1 | //! RPM spec building backend for Andaman 2 | //! This modules provides the RPM spec builder backend, which builds RPMs 3 | //! from a spec file. 4 | 5 | #![allow(dead_code)] 6 | 7 | use clap::clap_derive::ValueEnum; 8 | use tempfile::TempDir; 9 | 10 | use crate::util::CommandLog; 11 | use async_trait::async_trait; 12 | use color_eyre::{eyre::eyre, Report, Result}; 13 | use std::mem::take; 14 | use std::path::{Path, PathBuf}; 15 | use std::{collections::BTreeMap, str::FromStr}; 16 | use tokio::process::Command; 17 | use tracing::{debug, info}; 18 | 19 | #[derive(Clone, Debug)] 20 | pub struct RPMOptions { 21 | /// Mock config, only used if backend is mock 22 | pub mock_config: Option, 23 | /// With flags 24 | pub with: Vec, 25 | /// Without flags 26 | pub without: Vec, 27 | /// Build target, used for cross-compile 28 | pub target: Option, 29 | /// Path to sources 30 | pub sources: PathBuf, 31 | /// Output directory 32 | pub resultdir: PathBuf, 33 | /// Extra repos 34 | /// Only used if backend is mock 35 | pub extra_repos: Option>, 36 | /// Do not use mirrors 37 | /// Only used if backend is mock 38 | pub no_mirror: bool, 39 | /// Custom RPM macros to define 40 | pub macros: BTreeMap, 41 | /// Config options for Mock 42 | pub config_opts: Vec, 43 | /// Enable SCM support 44 | pub scm_enable: bool, 45 | /// SCM Options (mock) 46 | pub scm_opts: Vec, 47 | /// Plugin Options (mock) 48 | pub plugin_opts: Vec, 49 | } 50 | 51 | impl RPMOptions { 52 | pub const fn new(mock_config: Option, sources: PathBuf, resultdir: PathBuf) -> Self { 53 | Self { 54 | mock_config, 55 | with: Vec::new(), 56 | without: Vec::new(), 57 | target: None, 58 | sources, 59 | resultdir, 60 | extra_repos: None, 61 | no_mirror: false, 62 | macros: BTreeMap::new(), 63 | config_opts: Vec::new(), 64 | scm_enable: false, 65 | scm_opts: Vec::new(), 66 | plugin_opts: Vec::new(), 67 | } 68 | } 69 | pub fn add_extra_repo(&mut self, repo: String) { 70 | if let Some(ref mut repos) = self.extra_repos { 71 | repos.push(repo); 72 | } else { 73 | self.extra_repos = Some(vec![repo]); 74 | } 75 | } 76 | 77 | pub fn no_mirror(&mut self, no_mirror: bool) { 78 | self.no_mirror = no_mirror; 79 | } 80 | } 81 | 82 | impl RPMExtraOptions for RPMOptions { 83 | fn with_flags(&self) -> Vec { 84 | self.with.clone() 85 | } 86 | fn with_flags_mut(&mut self) -> &mut Vec { 87 | &mut self.with 88 | } 89 | fn without_flags(&self) -> Vec { 90 | self.without.clone() 91 | } 92 | fn without_flags_mut(&mut self) -> &mut Vec { 93 | &mut self.without 94 | } 95 | fn macros(&self) -> BTreeMap { 96 | self.macros.clone() 97 | } 98 | fn macros_mut(&mut self) -> &mut BTreeMap { 99 | &mut self.macros 100 | } 101 | fn set_target(&mut self, target: Option) { 102 | self.target = target; 103 | } 104 | } 105 | 106 | #[derive(ValueEnum, Debug, Clone, Copy)] 107 | pub enum RPMBuilder { 108 | Mock, 109 | Rpmbuild, 110 | } 111 | 112 | impl FromStr for RPMBuilder { 113 | type Err = Report; 114 | fn from_str(s: &str) -> Result { 115 | match s { 116 | "mock" => Ok(Self::Mock), 117 | "rpmbuild" => Ok(Self::Rpmbuild), 118 | _ => Err(eyre!("Invalid RPM builder: {s}")), 119 | } 120 | } 121 | } 122 | 123 | impl From for RPMBuilder { 124 | fn from(builder: crate::cli::RPMBuilder) -> Self { 125 | match builder { 126 | crate::cli::RPMBuilder::Mock => Self::Mock, 127 | crate::cli::RPMBuilder::Rpmbuild => Self::Rpmbuild, 128 | } 129 | } 130 | } 131 | 132 | impl RPMBuilder { 133 | /// Build the RPMs. 134 | /// 135 | /// # Errors 136 | /// This inherits errors from `RPMSpecBackend::build()`. 137 | pub async fn build(&self, spec: &Path, options: &mut RPMOptions) -> Result> { 138 | // TODO: take ownership of `options` 139 | if matches!(self, Self::Mock) { 140 | let mut mock = MockBackend::new( 141 | take(&mut options.mock_config), 142 | take(&mut options.sources), 143 | take(&mut options.resultdir), 144 | ); 145 | if let Some(extra_repos) = options.extra_repos.take() { 146 | for extra_repo in extra_repos { 147 | mock.add_extra_repo(extra_repo); 148 | } 149 | } 150 | options.macros.iter().for_each(|(k, v)| { 151 | mock.def_macro(k, v); 152 | }); 153 | mock.target(take(&mut options.target)); 154 | mock.with_flags_mut().extend(take(&mut options.with)); 155 | mock.without_flags_mut().extend(take(&mut options.without)); 156 | mock.extend_config_opts(take(&mut options.config_opts)); 157 | mock.no_mirror(options.no_mirror); 158 | mock.enable_scm(options.scm_enable); 159 | mock.extend_scm_opts(take(&mut options.scm_opts)); 160 | mock.plugin_opts(take(&mut options.plugin_opts)); 161 | 162 | mock.build(spec).await 163 | } else { 164 | let mut rpmbuild = 165 | RPMBuildBackend::new(take(&mut options.sources), take(&mut options.resultdir)); 166 | 167 | options.macros.iter().for_each(|(k, v)| { 168 | rpmbuild.def_macro(k, v); 169 | }); 170 | 171 | rpmbuild.set_target(take(&mut options.target)); 172 | rpmbuild.with_flags_mut().extend(take(&mut options.with)); 173 | rpmbuild.without_flags_mut().extend(take(&mut options.without)); 174 | 175 | rpmbuild.build(spec).await 176 | } 177 | } 178 | } 179 | 180 | #[async_trait::async_trait] 181 | pub trait RPMSpecBackend { 182 | async fn build_srpm(&self, spec: &Path) -> Result; 183 | async fn build_rpm(&self, spec: &Path) -> Result>; 184 | 185 | async fn build(&self, spec: &Path) -> Result> { 186 | self.build_rpm(&self.build_srpm(spec).await?).await 187 | } 188 | } 189 | 190 | pub trait RPMExtraOptions { 191 | /// Lists all macros 192 | fn macros(&self) -> BTreeMap; 193 | /// Returns macros as a mutable reference 194 | /// This is useful for advanced macro manipulation 195 | fn macros_mut(&mut self) -> &mut BTreeMap; 196 | 197 | /// Set target, used for cross-compile 198 | fn set_target(&mut self, target: Option); 199 | 200 | /// Adds a list of macros from an iterator 201 | fn macros_iter(&mut self, iter: I) 202 | where 203 | I: IntoIterator, 204 | { 205 | self.macros_mut().extend(iter); 206 | } 207 | 208 | /// Defines a macro 209 | fn def_macro(&mut self, name: &str, value: &str) { 210 | self.macros_mut().insert(name.to_owned(), value.to_owned()); 211 | } 212 | /// Undefines a macro 213 | fn undef_macro(&mut self, name: &str) { 214 | self.macros_mut().remove(name); 215 | } 216 | 217 | // Configuration flags 218 | // === with flags === 219 | /// Returns a list of `with` flags 220 | fn with_flags(&self) -> Vec; 221 | 222 | /// Returns a mutable reference to the `with` flags 223 | fn with_flags_mut(&mut self) -> &mut Vec; 224 | 225 | /// Sets a `with` flag for the build from an iterator 226 | fn with_flags_iter(&mut self, iter: I) 227 | where 228 | I: IntoIterator, 229 | { 230 | self.with_flags_mut().extend(iter); 231 | } 232 | 233 | // === without flags === 234 | /// Returns a list of `without` flags 235 | fn without_flags(&self) -> Vec; 236 | 237 | /// Returns a mutable reference to the `without` flags 238 | fn without_flags_mut(&mut self) -> &mut Vec; 239 | 240 | /// Sets a `without` flag for the build from an iterator 241 | fn without_flags_iter(&mut self, iter: I) 242 | where 243 | I: IntoIterator, 244 | { 245 | self.without_flags_mut().extend(iter); 246 | } 247 | } 248 | 249 | /// An RPM spec backend that uses Mock to build RPMs 250 | pub struct MockBackend { 251 | mock_config: Option, 252 | with: Vec, 253 | without: Vec, 254 | sources: PathBuf, 255 | resultdir: PathBuf, 256 | extra_repos: Vec, 257 | no_mirror: bool, 258 | macros: BTreeMap, 259 | config_opts: Vec, 260 | scm_enable: bool, 261 | scm_opts: Vec, 262 | plugin_opts: Vec, 263 | target: Option, 264 | } 265 | 266 | impl RPMExtraOptions for MockBackend { 267 | fn with_flags(&self) -> Vec { 268 | self.with.clone() 269 | } 270 | fn with_flags_mut(&mut self) -> &mut Vec { 271 | &mut self.with 272 | } 273 | fn without_flags(&self) -> Vec { 274 | self.without.clone() 275 | } 276 | fn without_flags_mut(&mut self) -> &mut Vec { 277 | &mut self.without 278 | } 279 | fn macros(&self) -> BTreeMap { 280 | self.macros.clone() 281 | } 282 | fn macros_mut(&mut self) -> &mut BTreeMap { 283 | &mut self.macros 284 | } 285 | fn set_target(&mut self, target: Option) { 286 | self.target = target; 287 | } 288 | } 289 | 290 | impl MockBackend { 291 | pub const fn new(mock_config: Option, sources: PathBuf, resultdir: PathBuf) -> Self { 292 | Self { 293 | mock_config, 294 | with: Vec::new(), 295 | without: Vec::new(), 296 | sources, 297 | resultdir, 298 | extra_repos: Vec::new(), 299 | no_mirror: false, 300 | macros: BTreeMap::new(), 301 | config_opts: Vec::new(), 302 | scm_enable: false, 303 | scm_opts: Vec::new(), 304 | plugin_opts: Vec::new(), 305 | target: None, 306 | } 307 | } 308 | 309 | pub fn extend_config_opts(&mut self, opts: Vec) { 310 | self.config_opts.extend(opts); 311 | } 312 | 313 | pub fn add_config_opt(&mut self, opt: String) { 314 | self.config_opts.push(opt); 315 | } 316 | 317 | pub fn add_extra_repo(&mut self, repo: String) { 318 | self.extra_repos.push(repo); 319 | } 320 | pub fn no_mirror(&mut self, no_mirror: bool) { 321 | self.no_mirror = no_mirror; 322 | } 323 | 324 | pub fn enable_scm(&mut self, enable: bool) { 325 | self.scm_enable = enable; 326 | } 327 | 328 | pub fn extend_scm_opts(&mut self, opts: Vec) { 329 | self.scm_opts.extend(opts); 330 | } 331 | 332 | pub fn add_scm_opt(&mut self, opt: String) { 333 | self.scm_opts.push(opt); 334 | } 335 | 336 | pub fn plugin_opts(&mut self, opts: Vec) { 337 | self.plugin_opts.extend(opts); 338 | } 339 | 340 | pub fn target(&mut self, target: Option) { 341 | self.target = target; 342 | } 343 | 344 | pub fn mock(&self) -> Command { 345 | let mut cmd = Command::new("mock"); 346 | 347 | if let Some(config) = &self.mock_config { 348 | cmd.arg("-r").arg(config); 349 | } 350 | 351 | // cmd.arg("--verbose"); 352 | 353 | if let Some(target) = &self.target { 354 | cmd.arg("--target").arg(target); 355 | } 356 | 357 | self.extra_repos.iter().for_each(|repo| { 358 | cmd.arg("-a").arg(repo); 359 | }); 360 | 361 | self.with.iter().for_each(|with| { 362 | cmd.arg("--with").arg(with); 363 | }); 364 | 365 | self.without.iter().for_each(|without| { 366 | cmd.arg("--without").arg(without); 367 | }); 368 | 369 | self.macros.iter().for_each(|(name, value)| { 370 | cmd.arg("-D").arg(format!("{name} {value}")); 371 | }); 372 | 373 | if self.no_mirror { 374 | cmd.arg("--config-opts").arg("mirrored=False"); 375 | } 376 | 377 | self.config_opts.iter().for_each(|opt| { 378 | cmd.arg("--config-opts").arg(opt); 379 | }); 380 | 381 | if self.scm_enable { 382 | cmd.arg("--scm-enable"); 383 | } 384 | 385 | self.scm_opts.iter().for_each(|scm| { 386 | cmd.arg("--scm-option").arg(scm); 387 | }); 388 | 389 | cmd 390 | } 391 | } 392 | 393 | #[async_trait] 394 | impl RPMSpecBackend for MockBackend { 395 | async fn build_srpm(&self, spec: &Path) -> Result { 396 | let mut cmd = self.mock(); 397 | let tmp = tempfile::Builder::new().prefix("anda-srpm").tempdir()?; 398 | 399 | // todo: Probably copy the spec file and the sources to rpmbuild/SOURCES or some kind of temp dir instead 400 | // of building everything in the specfile's directory. 401 | 402 | cmd.arg("--buildsrpm") 403 | .arg("--spec") 404 | .arg(spec) 405 | .arg("--sources") 406 | .arg(&self.sources) 407 | .arg("--resultdir") 408 | .arg(tmp.path()) 409 | .arg("--enable-network"); 410 | 411 | // cmd.status()?; 412 | 413 | cmd.log().await?; 414 | 415 | // find srpm in resultdir using walkdir 416 | 417 | // let mut srpm = None; 418 | 419 | for entry in walkdir::WalkDir::new(tmp.path()) { 420 | let entry = entry?; 421 | debug!("entry: {:?}", entry.file_name()); 422 | if entry.file_name().to_string_lossy().ends_with(".src.rpm") { 423 | // srpm = Some(entry.path().to_path_buf()); 424 | // eprintln!("found srpm: {:?}", srpm); 425 | 426 | info!("Moving srpm to resultdir..."); 427 | // create srpm dir if it doesnt exist 428 | let srpm_dir = self.resultdir.join("rpm/srpm"); 429 | std::fs::create_dir_all(&srpm_dir)?; 430 | let dest = srpm_dir.join(entry.file_name()); 431 | std::fs::copy(entry.path(), &dest)?; 432 | return Ok(dest); 433 | } 434 | } 435 | 436 | Err(eyre!("Failed to find srpm")) 437 | } 438 | async fn build_rpm(&self, spec: &Path) -> Result> { 439 | let mut cmd = self.mock(); 440 | let tmp = tempfile::Builder::new().prefix("anda-rpm").tempdir()?; 441 | cmd.arg("--rebuild").arg(spec).arg("--enable-network").arg("--resultdir").arg(tmp.path()); 442 | 443 | cmd.log().await?; 444 | 445 | // find rpms in resultdir using walkdir 446 | 447 | let mut rpms = Vec::new(); 448 | 449 | for entry in walkdir::WalkDir::new(tmp.path()) { 450 | let entry = entry?; 451 | //eprintln!("entry: {:?}", entry.file_name()); 452 | 453 | if entry.file_name().to_string_lossy().ends_with(".src.rpm") { 454 | } else if entry.file_name().to_string_lossy().ends_with(".rpm") { 455 | //rpms.push(entry.path().to_path_buf()); 456 | //eprintln!("found rpm: {:?}", rpms); 457 | 458 | let rpms_dir = self.resultdir.join("rpm/rpms"); 459 | std::fs::create_dir_all(&rpms_dir)?; 460 | let dest = rpms_dir.join(entry.file_name()); 461 | std::fs::copy(entry.path(), &dest)?; 462 | rpms.push(dest); 463 | } 464 | } 465 | //println!("rpms: {:?}", rpms); 466 | Ok(rpms) 467 | } 468 | } 469 | 470 | /// Pure rpmbuild backend for building inside host 471 | /// 472 | /// This is faster than mock due to not having to spin up a chroot, but 473 | /// it requires the host to have all the dependencies instead. 474 | /// It is also useful when building in unprivileged containers, as mock requires some 475 | /// privileges to run a chroot. 476 | /// 477 | /// This backend is not recommended when building distros, as all changes will not 478 | /// be reflected for every package. 479 | pub struct RPMBuildBackend { 480 | sources: PathBuf, 481 | resultdir: PathBuf, 482 | with: Vec, 483 | without: Vec, 484 | target: Option, 485 | macros: BTreeMap, 486 | } 487 | 488 | impl RPMExtraOptions for RPMBuildBackend { 489 | fn with_flags(&self) -> Vec { 490 | self.with.clone() 491 | } 492 | fn with_flags_mut(&mut self) -> &mut Vec { 493 | &mut self.with 494 | } 495 | fn without_flags(&self) -> Vec { 496 | self.without.clone() 497 | } 498 | fn without_flags_mut(&mut self) -> &mut Vec { 499 | &mut self.without 500 | } 501 | fn macros(&self) -> BTreeMap { 502 | self.macros.clone() 503 | } 504 | fn macros_mut(&mut self) -> &mut BTreeMap { 505 | &mut self.macros 506 | } 507 | fn set_target(&mut self, target: Option) { 508 | self.target = target; 509 | } 510 | } 511 | 512 | impl RPMBuildBackend { 513 | pub const fn new(sources: PathBuf, resultdir: PathBuf) -> Self { 514 | Self { 515 | sources, 516 | resultdir, 517 | with: Vec::new(), 518 | without: Vec::new(), 519 | macros: BTreeMap::new(), 520 | target: None, 521 | } 522 | } 523 | 524 | pub fn rpmbuild(&self) -> Command { 525 | let mut cmd = Command::new("rpmbuild"); 526 | 527 | for with in &self.with { 528 | cmd.arg("--with").arg(with); 529 | } 530 | 531 | for without in &self.without { 532 | cmd.arg("--without").arg(without); 533 | } 534 | 535 | for (name, value) in &self.macros { 536 | cmd.arg("-D").arg(format!("{name} {value}")); 537 | } 538 | 539 | cmd 540 | } 541 | } 542 | 543 | #[async_trait] 544 | impl RPMSpecBackend for RPMBuildBackend { 545 | async fn build_srpm(&self, spec: &Path) -> Result { 546 | let mut cmd = self.rpmbuild(); 547 | let tmp = tempfile::Builder::new().prefix("anda-srpm").tempdir()?; 548 | 549 | cmd.arg("-br") 550 | .arg(spec) 551 | .arg("--define") 552 | .arg(format!("_sourcedir {}", self.sources.canonicalize()?.display())) 553 | .arg("--define") 554 | .arg(format!("_srcrpmdir {}", tmp.path().display())); 555 | 556 | cmd.log().await?; 557 | 558 | // find srpm in resultdir using walkdir 559 | 560 | for entry in walkdir::WalkDir::new(tmp.path()) { 561 | let entry = entry?; 562 | debug!("entry: {:?}", entry.file_name()); 563 | if entry.file_name().to_string_lossy().ends_with(".src.rpm") { 564 | // srpm = Some(entry.path().to_path_buf()); 565 | // eprintln!("found srpm: {:?}", srpm); 566 | 567 | info!("Moving srpm to resultdir..."); 568 | // create srpm dir if it doesnt exist 569 | let srpm_dir = self.resultdir.join("rpm/srpm"); 570 | std::fs::create_dir_all(&srpm_dir)?; 571 | let dest = srpm_dir.join(entry.file_name()); 572 | std::fs::copy(entry.path(), &dest)?; 573 | return Ok(dest); 574 | } 575 | } 576 | 577 | todo!() 578 | } 579 | 580 | async fn build_rpm(&self, spec: &Path) -> Result> { 581 | let mut cmd = self.rpmbuild(); 582 | let tmp = tempfile::Builder::new().prefix("anda-rpm").tempdir()?; 583 | 584 | cmd.arg("-bb") 585 | .arg(spec) 586 | .arg("--define") 587 | .arg(format!("_sourcedir {}", self.sources.canonicalize()?.display())) 588 | .arg("--define") 589 | .arg(format!("_rpmdir {}", tmp.path().display())); 590 | 591 | cmd.log().await?; 592 | 593 | let mut rpms = Vec::new(); 594 | 595 | // find rpms in resultdir using walkdir 596 | 597 | for entry in walkdir::WalkDir::new(tmp.path()) { 598 | let entry = entry?; 599 | //eprintln!("entry: {:?}", entry.file_name()); 600 | if entry.file_name().to_string_lossy().ends_with(".rpm") { 601 | //rpms.push(entry.path().to_path_buf()); 602 | // eprintln!("found rpm: {:?}", rpms); 603 | 604 | let rpms_dir = self.resultdir.join("rpm/rpms"); 605 | std::fs::create_dir_all(&rpms_dir)?; 606 | let dest = rpms_dir.join(entry.file_name()); 607 | std::fs::copy(entry.path(), dest)?; 608 | rpms.push(rpms_dir.join(entry.file_name())); 609 | } 610 | } 611 | 612 | //println!("rpms: {:?}", rpms); 613 | Ok(rpms) 614 | } 615 | 616 | async fn build(&self, spec: &Path) -> Result> { 617 | let mut cmd = self.rpmbuild(); 618 | let tmp = TempDir::with_prefix("anda-rpmbuild")?; 619 | cmd.arg("-ba") 620 | .arg(spec) 621 | .arg("--define") 622 | .arg(format!("_sourcedir {}", self.sources.canonicalize()?.display())) 623 | .arg("--define") 624 | .arg(format!("_srcrpmdir {}", tmp.path().display())) 625 | .arg("--define") 626 | .arg(format!("_rpmdir {}", tmp.path().display())); 627 | cmd.log().await?; 628 | 629 | let mut rpms = Vec::new(); 630 | 631 | // find rpms in resultdir using walkdir 632 | 633 | for entry in walkdir::WalkDir::new(tmp.path()) { 634 | let entry = entry?; 635 | let entry_filename = entry.file_name().to_string_lossy(); 636 | 637 | let (subdir, is_rpm) = if entry_filename.ends_with(".src.rpm") { 638 | ("rpm/srpm", false) 639 | } else if entry_filename.ends_with(".rpm") { 640 | ("rpm/rpms", true) 641 | } else { 642 | continue; 643 | }; 644 | 645 | let target_dir = self.resultdir.join(subdir); 646 | std::fs::create_dir_all(&target_dir)?; 647 | let dest = target_dir.join(entry.file_name()); 648 | std::fs::copy(entry.path(), &dest)?; 649 | 650 | if is_rpm { 651 | rpms.push(dest); 652 | } 653 | } 654 | 655 | //println!("rpms: {:?}", rpms); 656 | Ok(rpms) 657 | } 658 | } 659 | -------------------------------------------------------------------------------- /src/update.rs: -------------------------------------------------------------------------------- 1 | use anda_config::Manifest; 2 | use andax::{run, RPMSpec}; 3 | use color_eyre::{Result, Section}; 4 | use itertools::Itertools; 5 | use std::io::Write; 6 | use std::{ 7 | collections::BTreeMap, 8 | thread::{self, Builder}, 9 | }; 10 | use tracing::{debug, error, instrument, trace}; 11 | 12 | /// Return true only if the project `lbls` does not have the key or the value does not match. 13 | fn filter_project(lbls: &BTreeMap) -> impl Fn(&(String, String)) -> bool + '_ { 14 | |(k, v)| lbls.get(k) != Some(v) 15 | } 16 | 17 | /// Return true only if `lbls` have the key and the value matches. 18 | fn exclude_project(lbls: &BTreeMap) -> impl Fn(&(String, String)) -> bool + '_ { 19 | |(k, v)| lbls.get(k) == Some(v) 20 | } 21 | 22 | #[allow(clippy::arithmetic_side_effects)] 23 | #[instrument(skip(cfg))] 24 | pub fn update( 25 | cfg: Manifest, 26 | global_lbls: Vec<(String, String)>, 27 | fls: Vec>, 28 | excls: Vec>, 29 | ) -> Result<()> { 30 | let mut handlers = vec![]; 31 | let proj_len = cfg.project.len(); 32 | let mut scr_len = 0; 33 | for (name, mut proj) in cfg.project { 34 | let Some(scr) = proj.update else { continue }; 35 | scr_len += 1; 36 | let mut lbls = std::mem::take(&mut proj.labels); 37 | lbls.extend(global_lbls.clone()); 38 | if !fls.is_empty() && fls.iter().all(|fls| fls.iter().any(filter_project(&lbls))) { 39 | continue; 40 | } 41 | if excls.iter().any(|excls| excls.iter().all(exclude_project(&lbls))) { 42 | continue; 43 | } 44 | trace!(name, scr = scr.to_str(), "Th start"); 45 | let fls = fls.clone(); 46 | let alias = proj.alias.into_iter().flatten().next().clone().unwrap_or(name); 47 | handlers.push(Builder::new().name(alias).spawn(move || { 48 | let th = thread::current(); 49 | let name = th.name().expect("No name for andax thread??"); 50 | let start = std::time::Instant::now(); 51 | let sc = run(name, &scr, lbls.iter(), |sc| { 52 | // we have to do it here as `Dynamic` in andax::Map nu Sync impl 53 | let filters = fls 54 | .into_iter() 55 | .flat_map(|fls| fls.into_iter().map(|(k, v)| (k.into(), v.into()))) 56 | .collect::(); 57 | sc.push("filters", filters); 58 | if let Some(rpm) = &proj.rpm { 59 | sc.push("rpm", RPMSpec::new(name.to_owned(), &scr, &rpm.spec)); 60 | } 61 | }); 62 | let duration = start.elapsed().as_millis(); 63 | if let Some(sc) = sc { 64 | let rpm: RPMSpec = sc.get_value("rpm").expect("No rpm object in rhai scope"); 65 | if let Err(e) = rpm.write() { 66 | error!("{name}: Failed to write RPM: {e}"); 67 | } 68 | } 69 | duration 70 | })?); 71 | } 72 | 73 | let hdl_len = handlers.len(); 74 | if hdl_len == 0 { 75 | tracing::info!("No tasks were run."); 76 | return Ok(()); 77 | } 78 | debug!("Joining {hdl_len} threads"); 79 | let mut panicked = Vec::with_capacity(0); 80 | 81 | let tasks = handlers 82 | .into_iter() 83 | .filter_map(|hdl| { 84 | let th = hdl.thread(); 85 | let name = th.name().expect("No name for andax thread??").to_owned(); 86 | if let Ok(duration) = hdl.join() { 87 | Some((name, duration)) 88 | } else { 89 | error!("Thread `{name}` panicked. This is most likely a bug."); 90 | panicked.push(name); 91 | None 92 | } 93 | }) 94 | .sorted_unstable_by(|(_, duration0), (_, duration1)| duration1.cmp(duration0)); 95 | let task_len = tasks.len(); 96 | let pname_len = tasks 97 | .clone() 98 | .max_by(|(name0, _), (name1, _)| name0.len().cmp(&name1.len())) 99 | .map_or(13, |(name, _)| name.len()); 100 | let mut stdout = std::io::stdout(); 101 | 102 | writeln!( 103 | stdout, 104 | "\nFinished running {task_len}/{scr_len} scripts out of {proj_len} projects, {} failed fatally.", 105 | hdl_len - task_len 106 | ) 107 | .unwrap(); 108 | writeln!(stdout, "Here is a list of unfiltered tasks:\n").unwrap(); 109 | writeln!(stdout, "No. Time/ms Project/alias").unwrap(); 110 | writeln!(stdout, "═════╤════════╤═{}", "═".repeat(pname_len.max(13))).unwrap(); 111 | 112 | for (n, (name, duration)) in tasks.enumerate() { 113 | let sep = if n % 2 == 0 { '┃' } else { '│' }; 114 | writeln!(stdout, "{:<5}{sep}{:>7} {sep} {name}", n + 1, duration).unwrap(); 115 | } 116 | 117 | if !panicked.is_empty() { 118 | return Err(panicked.into_iter().fold( 119 | color_eyre::Report::msg("One of the threads panicked while running the update script") 120 | .suggestion("Bug report: https://github.com/FyraLabs/anda/issues"), 121 | |err, name| err.warning(format!("Project/alias: {name}")), 122 | )); 123 | } 124 | 125 | Ok(()) 126 | } 127 | 128 | #[instrument] 129 | pub fn run_scripts(scripts: &[String], labels: Vec<(String, String)>) -> Result<()> { 130 | let mut handlers = vec![]; 131 | for scr in scripts { 132 | trace!(scr, "Th start"); 133 | let labels = labels.clone(); 134 | handlers.push(Builder::new().name(scr.to_owned()).spawn(move || { 135 | let th = thread::current(); 136 | let name = th.name().expect("No name for andax thread??"); 137 | run(name, &std::path::PathBuf::from(name), labels.into_iter(), |_| {}); 138 | })?); 139 | } 140 | 141 | debug!("Joining {} threads", handlers.len()); 142 | 143 | for hdl in handlers { 144 | let th = hdl.thread(); 145 | let name = th.name().expect("No name for andax thread??").to_owned(); 146 | if let Err(e) = hdl.join() { 147 | error!("Panic @ `{name}` : {e:#?}"); 148 | } 149 | } 150 | 151 | Ok(()) 152 | } 153 | 154 | #[cfg(test)] 155 | mod tests { 156 | use super::*; 157 | #[test] 158 | fn test_filter() { 159 | let transform = |arr: &[(&str, &str)]| { 160 | arr.iter() 161 | .map(|(l, r)| ((*l).to_owned(), (*r).to_owned())) 162 | .collect::>() 163 | }; 164 | // update only nightly packages 165 | let lbls = std::iter::once(("nightly", "1")).map(|(l, r)| (l.into(), r.into())).collect(); 166 | let test1 = filter_project(&lbls); 167 | for (k, v) in transform(&[("nightly", "0"), ("hai", "bai"), ("large", "1")]) { 168 | assert!(test1(&(k, v))); 169 | } 170 | for (k, v) in transform(&[("nightly", "1")]) { 171 | assert!(!test1(&(k, v))); 172 | } 173 | } 174 | } 175 | -------------------------------------------------------------------------------- /src/util.rs: -------------------------------------------------------------------------------- 1 | //! Utility functions and types 2 | use anda_config::{Docker, DockerImage, Manifest, Project, RpmBuild}; 3 | use clap_verbosity_flag::log::LevelFilter; 4 | use color_eyre::{eyre::eyre, Result, Section}; 5 | use console::style; 6 | use itertools::Itertools; 7 | use nix::{sys::signal, unistd::Pid}; 8 | use regex::Regex; 9 | use serde::{Deserialize, Serialize}; 10 | use std::{ 11 | collections::BTreeMap, 12 | io::{IsTerminal, Write}, 13 | path::Path, 14 | }; 15 | use tokio::{io::AsyncBufReadExt, process::Command}; 16 | use tracing::{debug, info}; 17 | 18 | lazy_static::lazy_static! { 19 | static ref BUILDARCH_REGEX: Regex = Regex::new("BuildArch:\\s*(.+)").unwrap(); 20 | static ref EXCLUSIVEARCH_REGEX: Regex = Regex::new("ExclusiveArch:\\s*(.+)").unwrap(); 21 | static ref DEFAULT_ARCHES: [String; 2] = ["x86_64".to_owned(), "aarch64".to_owned()]; 22 | } 23 | 24 | #[derive(Copy, Clone)] 25 | enum ConsoleOut { 26 | Stdout, 27 | Stderr, 28 | } 29 | // Build entry for GHA 30 | #[derive(Debug, Clone, Serialize, Deserialize, Ord, Eq, PartialEq, PartialOrd)] 31 | pub struct BuildEntry { 32 | pub pkg: String, 33 | pub arch: String, 34 | pub labels: BTreeMap, 35 | } 36 | 37 | pub fn fetch_build_entries(config: Manifest) -> Vec { 38 | let changed_files = get_changed_files(Path::new(".")).unwrap_or_default(); 39 | let changed_dirs: std::collections::HashSet<_> = changed_files 40 | .iter() 41 | .map(|f| f.trim_end_matches(|x| x != '/').trim_end_matches('/')) 42 | .collect(); 43 | let suffix = config.config.strip_suffix.clone().unwrap_or_default(); 44 | 45 | let mut entries = Vec::new(); 46 | for (mut name, project) in config.project { 47 | let dir = name.trim_end_matches(&suffix); 48 | if !changed_dirs.contains(dir) { 49 | continue; 50 | } 51 | 52 | if let Some(rpm) = project.rpm { 53 | if rpm.enable_scm.unwrap_or(false) { 54 | entries.extend(DEFAULT_ARCHES.iter().map(|arch| BuildEntry { 55 | pkg: std::mem::take(&mut name), 56 | arch: arch.clone(), 57 | labels: project.labels.clone(), 58 | })); 59 | continue; 60 | } 61 | } 62 | entries.extend( 63 | project 64 | .arches 65 | .unwrap_or_else(|| DEFAULT_ARCHES.to_vec()) 66 | .into_iter() 67 | .map(|arch| BuildEntry { pkg: name.clone(), arch, labels: project.labels.clone() }), 68 | ); 69 | } 70 | 71 | entries 72 | } 73 | 74 | /// Command Logging 75 | /// 76 | /// This trait implements custom logging for commands in a format of `{command} | {line}` 77 | /// It also implements Ctrl-C handling for the command, and will send a SIGINT to the command 78 | #[async_trait::async_trait] 79 | pub trait CommandLog { 80 | async fn log(&mut self) -> Result<()>; 81 | } 82 | fn print_log(process: &str, output: &[u8], out: ConsoleOut) { 83 | // check if no_color is set 84 | let no_color = std::env::var("NO_COLOR").is_ok(); 85 | 86 | let process = { 87 | if no_color { 88 | style(process) 89 | } else { 90 | match out { 91 | ConsoleOut::Stdout => style(process).cyan(), 92 | ConsoleOut::Stderr => style(process).yellow(), 93 | } 94 | } 95 | }; 96 | let mut output2 = Vec::with_capacity(output.len().saturating_add(10)); 97 | output2.extend_from_slice(format!("{process} │ ").as_bytes()); 98 | for &c in output { 99 | if c == b'\r' { 100 | // check if is terminal 101 | if std::io::stdout().is_terminal() { 102 | output2.extend_from_slice(format!("\r{process} │ ").as_bytes()); 103 | } else { 104 | // format!("{process} │ ").as_bytes().clone_into(&mut output2); 105 | break; 106 | } 107 | } else { 108 | output2.push(c); 109 | } 110 | } 111 | output2.push(b'\n'); 112 | std::io::stdout().write_all(&output2).unwrap(); 113 | } 114 | #[async_trait::async_trait] 115 | impl CommandLog for Command { 116 | async fn log(&mut self) -> Result<()> { 117 | // make process name a constant string that we can reuse every time we call print_log 118 | let process = self.as_std().get_program().to_owned().into_string().unwrap(); 119 | let args = (self.as_std().get_args()) 120 | .map(shell_quote::Sh::quote_vec) 121 | .map(|s| String::from_utf8(s).unwrap()) 122 | .join(" "); 123 | debug!("Running command: {process} {args}",); 124 | 125 | // Wrap the command in `script` to force it to give it a TTY 126 | let mut c = Self::new("script"); 127 | 128 | c.args(["-e", "-f", "/dev/null", "-q", "-c"]) 129 | .arg(format!("{process} {args}")) 130 | .stdin(std::process::Stdio::null()) 131 | .stdout(std::process::Stdio::piped()) 132 | .stderr(std::process::Stdio::piped()); 133 | 134 | trace!(?c, "Running command"); 135 | 136 | let mut output = c.spawn().map_err(|e| { 137 | eyre!("Cannot run `script`") 138 | .wrap_err(e) 139 | .suggestion("You might need to install `script` via a package manager.") 140 | })?; 141 | 142 | // HACK: Rust ownership is very fun. 143 | let t = process.clone(); 144 | let stdout = output.stdout.take().unwrap(); 145 | let mut stdout_lines = tokio::io::BufReader::new(stdout).split(b'\n'); 146 | let stderr = output.stderr.take().unwrap(); 147 | let mut stderr_lines = tokio::io::BufReader::new(stderr).split(b'\n'); 148 | 149 | // handles so we can run both at the same time 150 | for task in [ 151 | tokio::spawn(async move { 152 | while let Some(line) = stdout_lines.next_segment().await.unwrap() { 153 | print_log(&t, &line, ConsoleOut::Stdout); 154 | } 155 | Ok(()) 156 | }), 157 | tokio::spawn(async move { 158 | while let Some(line) = stderr_lines.next_segment().await.unwrap() { 159 | print_log(&process, &line, ConsoleOut::Stderr); 160 | } 161 | Ok(()) 162 | }), 163 | tokio::spawn(async move { 164 | tokio::select! { 165 | _ = tokio::signal::ctrl_c() => { 166 | info!("Received ctrl-c, sending sigint to child process"); 167 | #[allow(clippy::cast_possible_wrap)] 168 | signal::kill(Pid::from_raw(output.id().unwrap() as i32), signal::Signal::SIGINT).unwrap(); 169 | eprintln!("Received ctrl-c, exiting"); 170 | // std::process::exit(127); 171 | Err(eyre!("Received ctrl-c, exiting")) 172 | } 173 | w = output.wait() => { 174 | let status = w.unwrap(); 175 | if status.success() { 176 | info!("Command exited successfully"); 177 | Ok(()) 178 | } else { 179 | info!("Command exited with status: {status}"); 180 | Err(eyre!("Command exited with status: {status}")) 181 | } 182 | } 183 | } 184 | }), 185 | ] { 186 | task.await??; 187 | } 188 | 189 | Ok(()) 190 | } 191 | } 192 | 193 | // utility functions for spec templating 194 | 195 | use git2::Repository; 196 | /// Get the current commit id from the current git repository (cwd) 197 | pub fn get_commit_id_cwd() -> Option { 198 | let repo = Repository::open(".").ok()?; 199 | let head = repo.head().ok()?; 200 | let commit = head.peel_to_commit().ok()?; 201 | let id = commit.id(); 202 | Some(id.to_string()) 203 | } 204 | 205 | /// Get the current commit id from a git repository 206 | pub fn _get_commit_id(path: &str) -> Option { 207 | let repo = Repository::open(path).ok()?; 208 | let head = repo.head().ok()?; 209 | let commit = head.peel_to_commit().ok()?; 210 | let id = commit.id(); 211 | Some(id.to_string()) 212 | } 213 | 214 | // git diff --name-only HEAD^ 215 | pub fn get_changed_files(path: &Path) -> Option> { 216 | let repo = Repository::open(path).ok()?; 217 | let head = repo.head().ok()?; 218 | let commit = head.peel_to_commit().ok()?; 219 | let parent = commit.parent(0).ok()?; 220 | let diff = repo 221 | .diff_tree_to_tree(Some(&parent.tree().ok()?), Some(&commit.tree().ok()?), None) 222 | .ok()?; 223 | let mut changed_files = vec![]; 224 | diff.foreach( 225 | &mut |delta, _| { 226 | changed_files.push(delta.new_file().path().unwrap().to_str().unwrap().to_owned()); 227 | true 228 | }, 229 | None, 230 | None, 231 | None, 232 | ) 233 | .ok()?; 234 | trace!("changed files: {changed_files:?}"); 235 | Some(changed_files) 236 | } 237 | 238 | /// Formats the current time in the format of YYYYMMDD 239 | pub fn get_date() -> String { 240 | let now: chrono::DateTime = chrono::Utc::now(); 241 | now.format("%Y%m%d").to_string() 242 | } 243 | 244 | use promptly::prompt_default; 245 | use tracing::trace; 246 | 247 | /// Initializes a new anda project 248 | pub fn init(path: &Path, yes: bool) -> Result<()> { 249 | // create the directory if not exists 250 | if !path.exists() { 251 | std::fs::create_dir_all(path)?; 252 | } 253 | 254 | let mut config = Manifest { project: BTreeMap::new(), config: anda_config::Config::default() }; 255 | 256 | // use ignore to scan for files 257 | let walk = ignore::WalkBuilder::new(path).build(); 258 | 259 | for entry in walk { 260 | let entry = entry?; 261 | let path = entry.path().strip_prefix("./").unwrap(); 262 | 263 | if !path.is_file() { 264 | continue; 265 | } 266 | 267 | match path.extension().unwrap_or_default().as_encoded_bytes() { 268 | b"spec" => { 269 | debug!("Found spec file: {}", path.display()); 270 | if yes 271 | || prompt_default( 272 | format!("Add spec file `{}` to manifest?", path.display()), 273 | true, 274 | )? 275 | { 276 | let project_name = path.file_stem().unwrap().to_str().unwrap(); 277 | let project = Project { 278 | rpm: Some(RpmBuild { spec: path.to_path_buf(), ..Default::default() }), 279 | ..Default::default() 280 | }; 281 | config.project.insert(project_name.to_owned(), project); 282 | } 283 | } 284 | b"dockerfile" => add_dockerfile_to_manifest(yes, path, &mut config)?, 285 | _ if path.file_name().is_some_and(|f| f.eq("Dockerfile")) => { 286 | add_dockerfile_to_manifest(yes, path, &mut config)?; 287 | } 288 | _ => {} 289 | } 290 | } 291 | println!("{}", anda_config::config::to_string(&config)?); 292 | 293 | Ok(()) 294 | } 295 | 296 | fn add_dockerfile_to_manifest( 297 | yes: bool, 298 | path: &Path, 299 | config: &mut Manifest, 300 | ) -> Result<(), color_eyre::eyre::Error> { 301 | let add_oci = 302 | yes || prompt_default(format!("Add Dockerfile `{}` to manifest?", path.display()), true)?; 303 | if add_oci { 304 | // create a new project called docker 305 | 306 | let mut docker = Docker::default(); 307 | 308 | let image = 309 | DockerImage { dockerfile: Some(path.display().to_string()), ..Default::default() }; 310 | let image_name = "docker-1".to_owned(); 311 | docker.image.insert(image_name, image); 312 | 313 | let project = Project { docker: Some(docker), ..Default::default() }; 314 | 315 | // increment counter 316 | config.project.insert("docker".to_owned(), project); 317 | } 318 | Ok(()) 319 | } 320 | 321 | pub const fn convert_filter(filter: LevelFilter) -> tracing_subscriber::filter::LevelFilter { 322 | match filter { 323 | LevelFilter::Off => tracing_subscriber::filter::LevelFilter::OFF, 324 | LevelFilter::Error => tracing_subscriber::filter::LevelFilter::ERROR, 325 | LevelFilter::Warn => tracing_subscriber::filter::LevelFilter::WARN, 326 | LevelFilter::Info => tracing_subscriber::filter::LevelFilter::INFO, 327 | LevelFilter::Debug => tracing_subscriber::filter::LevelFilter::DEBUG, 328 | LevelFilter::Trace => tracing_subscriber::filter::LevelFilter::TRACE, 329 | } 330 | } 331 | 332 | #[macro_export] 333 | macro_rules! cmd { 334 | (@ $cmd:ident [[$expr:expr]]) => { $cmd.args($expr); }; 335 | (@ $cmd:ident $tt:tt) => { $cmd.arg(cmd!(# $tt)); }; 336 | (# [$expr:literal $($arg:expr),*]) => { format!($expr, $($arg),*) }; 337 | (# {{$expr:expr}}) => { format!("{}", $expr) }; 338 | (# $expr:expr) => { &$expr }; 339 | (# $expr:literal) => { $expr }; 340 | 341 | (stdout $cmd:literal $($t:tt)+) => {{ 342 | #[allow(unused_braces)] 343 | let cmd = cmd!($cmd $($t)+).output()?; 344 | String::from_utf8_lossy(&cmd.stdout) 345 | }}; 346 | ($cmd:literal $($t:tt)*) => {{ 347 | #[allow(unused_braces)] 348 | let mut cmd = std::process::Command::new($cmd); 349 | $( 350 | cmd!(@ cmd $t); 351 | )* 352 | cmd 353 | }}; 354 | ($cmd:block $($t:tt)*) => {{ 355 | #[allow(unused_braces)] 356 | let mut cmd = std::process::Command::new(cmd!(# $cmd)); 357 | $( 358 | cmd!(@ cmd $t); 359 | )* 360 | cmd 361 | }}; 362 | (?$cmd:tt $($t:tt)*) => {{ 363 | #[allow(unused_braces)] 364 | $crate::util::cmd(cmd!($cmd $($t)*), &[Box::new($cmd), $(Box::new(cmd!(# $t))),*]) 365 | }}; 366 | } 367 | 368 | /// Run a command and perform logging. 369 | /// 370 | /// # Errors 371 | /// This function transform command failures into better error messages. 372 | #[inline] 373 | pub fn cmd( 374 | mut cmd: std::process::Command, 375 | cmd_arr: &[Box; N], 376 | ) -> color_eyre::Result<()> { 377 | use color_eyre::Help; 378 | use itertools::Itertools; 379 | let cmd_str = cmd_arr.iter().join(" "); 380 | tracing::trace!("Running command: `{cmd_str}`"); 381 | let status = cmd.status()?; 382 | Err(match (status, status.code()) { 383 | _ if status.success() => return Ok(()), 384 | (_, Some(rc)) => color_eyre::Report::msg("Command exited") 385 | .warning(lazy_format::lazy_format!("Status code: {rc}")) 386 | .with_note(|| format!("Command: `{cmd_str}`")) 387 | .note(lazy_format::lazy_format!("Status: {status}")), 388 | _ => color_eyre::Report::msg("Script terminated unexpectedly") 389 | .note(lazy_format::lazy_format!("Status: {status}")), 390 | }) 391 | } 392 | 393 | #[cfg(test)] 394 | mod tests { 395 | use super::*; 396 | use std::path::PathBuf; 397 | 398 | #[test] 399 | fn test_head() { 400 | println!("{:?}", get_changed_files(Path::new("."))); 401 | } 402 | #[test] 403 | fn test_entries() { 404 | let config = anda_config::load_from_file(&PathBuf::from("anda.hcl")); 405 | 406 | fetch_build_entries(config.unwrap()); 407 | } 408 | } 409 | -------------------------------------------------------------------------------- /tests/anda.hcl: -------------------------------------------------------------------------------- 1 | project "nested" { 2 | rpm { 3 | spec = "umpkg.spec" 4 | } 5 | } -------------------------------------------------------------------------------- /tests/hello.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | echo "Hello world, from a sandbox" -------------------------------------------------------------------------------- /tests/org.flatpak.Hello.yml: -------------------------------------------------------------------------------- 1 | app-id: org.flatpak.Hello 2 | runtime: org.freedesktop.Platform 3 | runtime-version: '21.08' 4 | sdk: org.freedesktop.Sdk 5 | command: hello.sh 6 | modules: 7 | - name: hello 8 | buildsystem: simple 9 | build-commands: 10 | - install -D hello.sh /app/bin/hello.sh 11 | sources: 12 | - type: file 13 | path: hello.sh -------------------------------------------------------------------------------- /tests/test.rhai: -------------------------------------------------------------------------------- 1 | // let req = new_req("https://api.snapcraft.io/v2/snaps/info/authy"); 2 | // req.head("Snap-Device-Series", "16"); 3 | // req.head("User-Agent", USER_AGENT); 4 | // let obj = json(req.get()) @ "channel-map" @ 0 @ "version"; 5 | // rpm.version(obj.str()); 6 | // rpm.f = sub("2.2.2", "3.3.3", rpm.f); 7 | 8 | // print(gh("Bonandry/adwaita-plus")); 9 | print(env("PATH")); 10 | 11 | // rpm.version("0.3.63"); 12 | // rpm.source(0, "https://github.com/Ultramarine-Linux/umpkg/archive/refs/tags/%{version}.tar.gz"); 13 | 14 | // let manifest = anda::cfg::load_file("/stuff/chubby/terra/anda.hcl"); 15 | // print(manifest); 16 | 17 | // print(npm("discord.js")); 18 | 19 | let obj = #{ 20 | "a": 1, 21 | "b": 2 , 22 | "c": #{ 23 | "d": "e", 24 | }, 25 | "bar": npm("discord.js") 26 | }; 27 | 28 | // print(obj); 29 | print(obj); 30 | // funny template test 31 | let t = template_file(obj,"tests/umpkg.spec.in"); 32 | 33 | print(t); -------------------------------------------------------------------------------- /tests/umpkg.spec: -------------------------------------------------------------------------------- 1 | %undefine _disable_source_fetch 2 | 3 | Name: umpkg 4 | Version: 0.3.63 5 | Release: 2%{?dist} 6 | Summary: The Ultramarine Packager tool 7 | URL: https://ultramarine-linux.org 8 | Source0: https://github.com/Ultramarine-Linux/umpkg/archive/refs/tags/%{version}.tar.gz 9 | License: MIT 10 | BuildRequires: python3-devel 11 | Requires: mock 12 | Requires: python3-arrow 13 | Group: Applications/Internet 14 | BuildArch: noarch 15 | %description 16 | umpkg is an RPM packaging tool for Ultramarine Linux. It can be used to quickly create RPMs from source code, and pushing them to a repository. 17 | Instead of writing long and complex commandline arguments for RPMBuild and Mock, umpkg uses a configuration file to specify the build process for a reproducible build. 18 | 19 | 20 | %prep 21 | %autosetup -n umpkg-%{version} 22 | 23 | %generate_buildrequires 24 | %pyproject_buildrequires 25 | 26 | 27 | %build 28 | %pyproject_wheel 29 | 30 | 31 | %install 32 | %pyproject_install 33 | %pyproject_save_files umpkg 34 | 35 | %files -f %{pyproject_files} 36 | %{_bindir}/umpkg 37 | 38 | %changelog 39 | * Mon May 30 2022 Cappy Ishihara - 0.3.3-2.um36 40 | - Updated Packaging 41 | 42 | * Sat May 28 2022 Cappy Ishihara - 0.3.1-1.um36 43 | - Initial Rewrite 44 | -------------------------------------------------------------------------------- /tests/umpkg.spec.in: -------------------------------------------------------------------------------- 1 | %undefine _disable_source_fetch 2 | 3 | 4 | Name: node-discord.js 5 | Version: ${bar} 6 | Release: 2@{?dist} 7 | Summary: Discord API for Node.js 8 | URL: https://ultramarine-linux.org 9 | Source0: https://github.com/Ultramarine-Linux/umpkg/archive/refs/tags/@{version}.tar.gz 10 | License: MIT 11 | BuildRequires: python3-devel 12 | Requires: mock 13 | Requires: python3-arrow 14 | Group: Applications/Internet 15 | BuildArch: noarch 16 | %description 17 | umpkg is an RPM packaging tool for Ultramarine Linux. It can be used to quickly create RPMs from source code, and pushing them to a repository. 18 | Instead of writing long and complex commandline arguments for RPMBuild and Mock, umpkg uses a configuration file to specify the build process for a reproducible build. 19 | 20 | 21 | %prep 22 | %autosetup -n umpkg-@{version} 23 | 24 | %generate_buildrequires 25 | %pyproject_buildrequires 26 | 27 | 28 | %build 29 | %pyproject_wheel 30 | 31 | 32 | %install 33 | %pyproject_install 34 | %pyproject_save_files umpkg 35 | 36 | %files -f @{pyproject_files} 37 | @{_bindir}/umpkg 38 | 39 | %changelog 40 | * Mon May 30 2022 Cappy Ishihara - 0.3.3-2.um36 41 | - Updated Packaging 42 | 43 | * Sat May 28 2022 Cappy Ishihara - 0.3.1-1.um36 44 | - Initial Rewrite 45 | -------------------------------------------------------------------------------- /xtask/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "xtask" 3 | version = "0.2.0" 4 | edition = "2021" 5 | 6 | publish = false 7 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 8 | 9 | [dependencies] 10 | clap_mangen = "0.2.26" 11 | anda = { path = ".." } 12 | clap = { workspace = true } 13 | anyhow = "1.0.98" 14 | clap_complete = { workspace = true } 15 | -------------------------------------------------------------------------------- /xtask/src/main.rs: -------------------------------------------------------------------------------- 1 | use anda::cli::Cli; 2 | use anyhow::Result; 3 | use clap::{Command, CommandFactory}; 4 | use clap_complete::{generate_to, shells::Shell}; 5 | use std::env; 6 | use std::fs::create_dir_all; 7 | use std::fs::File; 8 | use std::io::Write; 9 | use std::path::{Path, PathBuf}; 10 | use std::rc::Rc; 11 | 12 | fn main() -> Result<()> { 13 | let task = env::args().nth(1); 14 | match task.as_deref() { 15 | Some("manpage") => manpage()?, 16 | Some("completion") => completion()?, 17 | _ => print_help(), 18 | } 19 | Ok(()) 20 | } 21 | 22 | fn print_help() { 23 | eprintln!( 24 | "Tasks: 25 | manpage builds application and man pages 26 | completion builds shell completions 27 | " 28 | ) 29 | } 30 | 31 | /// WARN: Consumes subcommands 32 | fn gen_manpage(cmd: Rc, man_dir: &Path) { 33 | let name = cmd 34 | .get_display_name() 35 | .map(|s| s.to_string()) 36 | .unwrap_or_else(|| cmd.clone().get_name().to_string()); 37 | if name.starts_with("anda-help") { 38 | return; 39 | } 40 | let mut out = File::create(man_dir.join(format!("{name}.1"))).unwrap(); 41 | { 42 | // HACK 'static 43 | let name: &'static str = Box::leak(Box::new(name)); 44 | let man_cmd = (*cmd).clone().name(name); 45 | clap_mangen::Man::new(man_cmd).render(&mut out).unwrap(); 46 | } 47 | out.flush().unwrap(); 48 | 49 | for sub in (*cmd).clone().get_subcommands_mut() { 50 | // let sub = sub.clone().display_name("anda-b"); 51 | gen_manpage(Rc::new(std::mem::take(sub)), man_dir) 52 | } 53 | } 54 | 55 | fn manpage() -> Result<()> { 56 | let app = Rc::new({ 57 | let mut cmd = Cli::command(); 58 | cmd.build(); 59 | cmd 60 | }); 61 | let out_dir = "target"; 62 | let man_dir = PathBuf::from(&out_dir).join("man_pages"); 63 | 64 | create_dir_all(&man_dir).unwrap(); 65 | 66 | gen_manpage(app.clone(), &man_dir); 67 | 68 | let path = PathBuf::from(&out_dir).join("assets"); 69 | 70 | let man_dir = path.join("man_pages"); 71 | std::fs::create_dir_all(&man_dir).unwrap(); 72 | gen_manpage(app, &man_dir); 73 | 74 | Ok(()) 75 | } 76 | 77 | fn completion() -> Result<()> { 78 | let mut app = Cli::command(); 79 | app.build(); 80 | 81 | let out_dir = "target"; 82 | let completion_dir = PathBuf::from(&out_dir).join("assets/completion"); 83 | 84 | let shells: Vec<(Shell, &str)> = vec![ 85 | (Shell::Bash, "bash"), 86 | (Shell::Fish, "fish"), 87 | (Shell::Zsh, "zsh"), 88 | (Shell::PowerShell, "pwsh"), 89 | (Shell::Elvish, "elvish"), 90 | ]; 91 | 92 | for (shell, name) in shells { 93 | let dir = completion_dir.join(name); 94 | std::fs::create_dir_all(&dir).unwrap(); 95 | generate_to(shell, &mut app, "anda", dir)?; 96 | } 97 | 98 | Ok(()) 99 | } 100 | --------------------------------------------------------------------------------