├── asu ├── routers │ ├── __init__.py │ ├── stats.py │ └── api.py ├── __init__.py ├── static │ ├── logo.png │ └── style.css ├── package_changes.py ├── config.py ├── build_request.py ├── main.py ├── templates │ └── overview.html ├── build.py └── util.py ├── misc ├── auc.png ├── luci.png ├── ofs.png ├── owut.png ├── prometheus.yml ├── asu.service ├── worker@.service ├── Caddyfile ├── squid.conf ├── setup.sh ├── stats_modernize.py ├── nginx.conf └── grafana.json ├── tests ├── upstream │ ├── snapshots │ │ ├── .targets.json │ │ ├── targets │ │ │ ├── testtarget │ │ │ │ └── testsubtarget │ │ │ │ │ ├── openwrt-imagebuilder-testtarget-testsubtarget.Linux-x86_64 │ │ │ │ │ ├── repositories.conf │ │ │ │ │ ├── pseudo_kernel_build_dir │ │ │ │ │ │ └── tmp │ │ │ │ │ │ │ └── fake_trash │ │ │ │ │ ├── openwrt-testtarget-testsubtarget-testprofile-sysupgrade.bin │ │ │ │ │ ├── .config │ │ │ │ │ ├── Dockerfile │ │ │ │ │ ├── sha256sums │ │ │ │ │ ├── Makefile │ │ │ │ │ ├── profiles.json │ │ │ │ │ ├── openwrt-testtarget-testsubtarget-testprofile.info │ │ │ │ │ └── openwrt-testtarget-testsubtarget-testprofile.manifest │ │ │ │ │ ├── openwrt-testtarget-testsubtarget-testprofile.json │ │ │ │ │ ├── packages │ │ │ │ │ ├── Packages │ │ │ │ │ └── Packages.manifest │ │ │ │ │ └── profiles.json │ │ │ └── ath79 │ │ │ │ └── generic │ │ │ │ └── profiles.json │ │ └── packages │ │ │ └── testarch │ │ │ └── base │ │ │ ├── Packages │ │ │ └── Packages.manifest │ ├── releases │ │ ├── 1.2.3 │ │ │ ├── .targets.json │ │ │ └── targets │ │ │ │ └── testtarget │ │ │ │ └── testsubtarget │ │ │ │ └── profiles.json │ │ └── 23.05.5 │ │ │ ├── targets │ │ │ ├── ath79 │ │ │ │ └── generic │ │ │ │ │ └── profiles.json │ │ │ └── x86 │ │ │ │ └── 64 │ │ │ │ └── profiles.json │ │ │ └── .targets.json │ └── .versions.json ├── ci │ └── openwrt-one-24.10.0.json ├── test_store.py ├── test_package_changes.py ├── conftest.py ├── test_stats.py └── test_util.py ├── .gitignore ├── Containerfile ├── .github ├── dependabot.yml └── workflows │ ├── test.yml │ ├── podman.yml │ └── publish.yml ├── pyproject.toml ├── podman-compose.yml ├── README.md └── LICENSE /asu/routers/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /asu/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.0.0" 2 | -------------------------------------------------------------------------------- /misc/auc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openwrt/asu/main/misc/auc.png -------------------------------------------------------------------------------- /misc/luci.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openwrt/asu/main/misc/luci.png -------------------------------------------------------------------------------- /misc/ofs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openwrt/asu/main/misc/ofs.png -------------------------------------------------------------------------------- /misc/owut.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openwrt/asu/main/misc/owut.png -------------------------------------------------------------------------------- /tests/upstream/snapshots/.targets.json: -------------------------------------------------------------------------------- 1 | {"testtarget/testsubtarget": "testarch"} 2 | -------------------------------------------------------------------------------- /asu/static/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openwrt/asu/main/asu/static/logo.png -------------------------------------------------------------------------------- /tests/upstream/releases/1.2.3/.targets.json: -------------------------------------------------------------------------------- 1 | {"testtarget/testsubtarget": "testarch"} 2 | -------------------------------------------------------------------------------- /tests/upstream/snapshots/targets/testtarget/testsubtarget/openwrt-imagebuilder-testtarget-testsubtarget.Linux-x86_64/repositories.conf: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/upstream/snapshots/targets/testtarget/testsubtarget/openwrt-imagebuilder-testtarget-testsubtarget.Linux-x86_64/pseudo_kernel_build_dir/tmp/fake_trash: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/upstream/snapshots/targets/testtarget/testsubtarget/openwrt-imagebuilder-testtarget-testsubtarget.Linux-x86_64/openwrt-testtarget-testsubtarget-testprofile-sysupgrade.bin: -------------------------------------------------------------------------------- 1 | fake image 2 | -------------------------------------------------------------------------------- /tests/upstream/snapshots/targets/testtarget/testsubtarget/openwrt-imagebuilder-testtarget-testsubtarget.Linux-x86_64/.config: -------------------------------------------------------------------------------- 1 | CONFIG_TARGET_ROOTFS_EXT4FS=y 2 | CONFIG_TARGET_ROOTFS_SQUASHFS=y 3 | -------------------------------------------------------------------------------- /tests/upstream/.versions.json: -------------------------------------------------------------------------------- 1 | { 2 | "stable_version": "23.05.5", 3 | "oldstable_version": "22.03.7", 4 | "upcoming_version": "24.10.0-rc6", 5 | "versions_list": ["1.2.3", "22.03.7", "23.05.5", "24.10.0-rc6"] 6 | } 7 | -------------------------------------------------------------------------------- /tests/upstream/snapshots/targets/testtarget/testsubtarget/openwrt-imagebuilder-testtarget-testsubtarget.Linux-x86_64/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine 2 | 3 | RUN apk add make bash 4 | 5 | RUN adduser -D buildbot -h /builder/ 6 | 7 | USER buildbot 8 | 9 | ADD ./ /builder/ 10 | 11 | WORKDIR /builder/ 12 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | var/ 2 | store/ 3 | site/ 4 | redis/ 5 | public/ 6 | squid/ 7 | poetry.lock 8 | json/ 9 | instance/ 10 | htmlcov/ 11 | dist/ 12 | coverage.xml 13 | cache/ 14 | asu.egg-info/ 15 | *$py.class 16 | *.swp 17 | *.sig 18 | *.sec 19 | *.py[cod] 20 | *.pub 21 | .vscode/ 22 | .venv/ 23 | .env 24 | .DS_Store 25 | .direnv/ 26 | .coverage.* 27 | .coverage 28 | **/__pycache__/ 29 | *.pyc 30 | -------------------------------------------------------------------------------- /Containerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.12-slim 2 | 3 | WORKDIR /app/ 4 | 5 | RUN pip install poetry 6 | 7 | COPY poetry.lock pyproject.toml README.md ./ 8 | 9 | RUN poetry config virtualenvs.create false 10 | 11 | RUN poetry install --only main --no-root --no-directory 12 | 13 | COPY ./asu/ ./asu/ 14 | 15 | RUN poetry install --only main 16 | 17 | CMD uvicorn --host 0.0.0.0 'asu.main:app' 18 | -------------------------------------------------------------------------------- /misc/prometheus.yml: -------------------------------------------------------------------------------- 1 | global: 2 | scrape_interval: 1m 3 | scrape_timeout: 10s 4 | evaluation_interval: 1m 5 | scrape_configs: 6 | - job_name: asu 7 | honor_timestamps: true 8 | scrape_interval: 2m 9 | scrape_timeout: 5s 10 | metrics_path: /metrics 11 | scheme: http 12 | follow_redirects: true 13 | enable_http2: true 14 | static_configs: 15 | - targets: 16 | - sysupgrade.openwrt.org:80 17 | -------------------------------------------------------------------------------- /tests/upstream/snapshots/targets/testtarget/testsubtarget/openwrt-imagebuilder-testtarget-testsubtarget.Linux-x86_64/sha256sums: -------------------------------------------------------------------------------- 1 | 8fda670a73d59fd0632970f4b5c1b8063203a908e13131a1f7cf44dd71a5b30a *openwrt-testtarget-testsubtarget-testprofile.json 2 | 78f241feaf440e5d5f806013b955473d7a04bb9be71c8a848b8c4c871e03b306 *openwrt-testtarget-testsubtarget-testprofile.manifest 3 | fb63c1efa50246f48bd9cce66f5b9af817f1d55c783ac8242e1547b3fc81fa86 *openwrt-testtarget-testsubtarget-testprofile-sysupgrade.bin 4 | -------------------------------------------------------------------------------- /misc/asu.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Gunicorn instance to serve ASU 3 | After=network.target 4 | 5 | [Service] 6 | User=debian 7 | Group=debian 8 | WorkingDirectory=/home/debian/asu/ 9 | Environment="BRANCHES_FILE=./branches.yml" 10 | ExecStart=/home/debian/.local/bin/gunicorn \ 11 | --bind 127.0.0.1:8000 \ 12 | --extra-reload-file "$BRANCHES_FILE" \ 13 | --reload \ 14 | --umask 007 \ 15 | --workers 3 \ 16 | "asu.asu:create_app()" 17 | 18 | [Install] 19 | WantedBy=multi-user.target 20 | 21 | -------------------------------------------------------------------------------- /misc/worker@.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=ASU Worker Number %i 3 | After=network.target 4 | 5 | [Service] 6 | User=debian 7 | Group=debian 8 | Type=simple 9 | WorkingDirectory=/home/debian/asu/worker%i/ 10 | Environment=LANG=en_US.UTF-8 11 | Environment=LC_ALL=en_US.UTF-8 12 | Environment=LC_LANG=en_US.UTF-8 13 | ExecStart=/home/debian/.local/bin/rqworker --name asu-worker-%i 14 | ExecReload=/bin/kill -s HUP $MAINPID 15 | ExecStop=/bin/kill -s TERM $MAINPID 16 | PrivateTmp=true 17 | Restart=always 18 | 19 | [Install] 20 | WantedBy=multi-user.target 21 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "pip" # See documentation for possible values 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "daily" 12 | -------------------------------------------------------------------------------- /tests/upstream/snapshots/targets/testtarget/testsubtarget/openwrt-imagebuilder-testtarget-testsubtarget.Linux-x86_64/Makefile: -------------------------------------------------------------------------------- 1 | 2 | image: 3 | mkdir -p $(BIN_DIR)/ 4 | cp ./openwrt-testtarget-testsubtarget-testprofile-sysupgrade.bin $(BIN_DIR)/ 5 | cp ./openwrt-testtarget-testsubtarget-testprofile.manifest $(BIN_DIR)/ 6 | cp ./profiles.json $(BIN_DIR)/ 7 | 8 | manifest: 9 | @cat ./openwrt-testtarget-testsubtarget-testprofile.manifest 10 | 11 | info: 12 | @cat ./openwrt-testtarget-testsubtarget-testprofile.info 13 | 14 | val.KERNEL_BUILD_DIR: 15 | @echo "$$(pwd)/pseudo_kernel_build_dir/" 16 | -------------------------------------------------------------------------------- /tests/upstream/snapshots/targets/testtarget/testsubtarget/openwrt-testtarget-testsubtarget-testprofile.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "testprofile", 3 | "image_prefix": "openwrt-testtarget-testsubtarget-testprofile", 4 | "images": [ 5 | { 6 | "name": "openwrt-testtarget-testsubtarget-testprofile-sysupgrade.bin", 7 | "sha256": "000", 8 | "type": "sysupgrade" 9 | } 10 | ], 11 | "metadata_version": 1, 12 | "supported_devices": [ 13 | "testprofile" 14 | ], 15 | "target": "testtarget/testsubtarget", 16 | "titles": [ 17 | { 18 | "model": "Test1", 19 | "vendor": "The Test Comp" 20 | } 21 | ], 22 | "version_code": "r99999-999999999", 23 | "version_number": "SNAPSHOT" 24 | } 25 | -------------------------------------------------------------------------------- /misc/Caddyfile: -------------------------------------------------------------------------------- 1 | #{ 2 | # auto_https disable_redirects 3 | # preferred_chains { 4 | # root_common_name "ISRG Root X1" 5 | # } 6 | #} 7 | # 8 | #sysupgrade.openwrt.org sysupgrade.openwrt.org:80 { 9 | # root * /path/to/asu/ 10 | # file_server /json/ 11 | # file_server /store/ 12 | # header Access-Control-Allow-Methods "POST, GET, OPTIONS" 13 | # header Access-Control-Allow-Headers "*" 14 | # header Access-Control-Allow-Origin "*" 15 | # reverse_proxy * localhost:8000 16 | #} 17 | 18 | :80 { 19 | root * /site/ 20 | file_server /json/ 21 | file_server /store/ 22 | header Access-Control-Allow-Methods "POST, GET, OPTIONS" 23 | header Access-Control-Allow-Headers "*" 24 | header Access-Control-Allow-Origin "*" 25 | reverse_proxy * server:8000 26 | reverse_proxy /stats grafana:3000 27 | } 28 | -------------------------------------------------------------------------------- /tests/upstream/snapshots/targets/testtarget/testsubtarget/openwrt-imagebuilder-testtarget-testsubtarget.Linux-x86_64/profiles.json: -------------------------------------------------------------------------------- 1 | { 2 | "metadata_version": 1, 3 | "source_date_epoch": 1612136917, 4 | "target": "testtarget/testsubtarget", 5 | "version_code": "r15666-8019c54d8a", 6 | "version_number": "SNAPSHOT", 7 | "profiles": { 8 | "testprofile": { 9 | "supported_devices": [ 10 | "testprofile" 11 | ], 12 | "image_prefix": "openwrt-testtarget-testsubtarget-testprofile", 13 | "images": [ 14 | { 15 | "name": "openwrt-testtarget-testsubtarget-testprofile-sysupgrade.bin", 16 | "sha256": "000", 17 | "type": "sysupgrade" 18 | } 19 | ], 20 | "titles": [ 21 | { 22 | "model": "Test1", 23 | "vendor": "The Test Comp" 24 | } 25 | ] 26 | } 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /misc/squid.conf: -------------------------------------------------------------------------------- 1 | # Always revalidate cached content with the origin server 2 | refresh_pattern . 0 100% 0 refresh-ims 3 | 4 | # Set memory cache size 5 | cache_mem 5 GB 6 | # Allow caching of objects up to 200 MB 7 | maximum_object_size 200 MB 8 | # Spool larger than usual objects to disk only 9 | maximum_object_size_in_memory 100 MB 10 | # While permit serving larger objects bypassing cache 11 | reply_body_max_size 500 MB 12 | 13 | # Optimizing file hit rate in memory 14 | memory_replacement_policy heap GSDF 15 | # optimizing byte hit rate / network efficiency on disk 16 | cache_replacement_policy heap LFUDA 17 | 18 | # Set the disk cache size to 50GB 19 | # This is default cache location on Debian/Ubuntu package 20 | # Where squid will use 25% more disk space at worst 21 | cache_dir ufs /var/spool/squid 50000 16 256 22 | 23 | -------------------------------------------------------------------------------- /tests/upstream/snapshots/targets/testtarget/testsubtarget/openwrt-imagebuilder-testtarget-testsubtarget.Linux-x86_64/openwrt-testtarget-testsubtarget-testprofile.info: -------------------------------------------------------------------------------- 1 | Current Target: "testtarget/testsubtarget" 2 | Current Revision: "r12647-cb44ab4f5d" 3 | Default Packages: base-files libc libgcc busybox dropbear mtd uci opkg netifd fstools uclient-fetch logd urandom-seed urngd kmod-gpio-button-hotplug swconfig kmod-ath9k uboot-envtools wpad-basic dnsmasq iptables ip6tables ppp ppp-mod-pppoe firewall odhcpd-ipv6only odhcp6c kmod-ipt-offload 4 | Available Profiles: 5 | 6 | Default: 7 | Default Profile (all drivers) 8 | Packages: iwinfo 9 | hasImageMetadata: 0 10 | 8dev_carambola2: 11 | 8devices Carambola2 12 | Packages: kmod-usb2 kmod-usb-chipidea2 13 | hasImageMetadata: 1 14 | SupportedDevices: 8dev,carambola2 carambola2 15 | testprofile: 16 | Testprofile 17 | Packages: kmod-usb2 kmod-usb-chipidea2 kmod-usb-storage -swconfig 18 | hasImageMetadata: 1 19 | SupportedDevices: testvendor,testprofile testprofile 20 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "asu" 3 | version = "0.0.0" 4 | description = "A firmware on demand server for OpenWrt based distributions" 5 | authors = ["Paul Spooren "] 6 | readme = "README.md" 7 | 8 | [tool.poetry.dependencies] 9 | python = "^3.11" 10 | fastapi = { extras = ["standard"], version = "^0.119.0" } 11 | pynacl = "^1.6.0" 12 | podman = "^5.6.0" 13 | redis = "^6.4.0" 14 | pydantic-settings = "^2.12.0" 15 | rq = "^2.6.0" 16 | uvicorn = "^0.37.0" 17 | fastapi-cache2 = "^0.2.2" 18 | httpx = "^0.28.1" 19 | hishel = "^0.1.3" 20 | 21 | [tool.poetry.group.dev.dependencies] 22 | pytest = "^8.4.2" 23 | ruff = "^0.14.9" 24 | coverage = "^7.13.0" 25 | isort = "^7.0.0" 26 | fakeredis = "^2.32.0" 27 | pytest-httpserver = "^1.1.3" 28 | 29 | 30 | [tool.coverage.run] 31 | dynamic_context = "test_function" 32 | 33 | [tool.coverage.report] 34 | precision = 1 35 | 36 | [tool.coverage.html] 37 | show_contexts = "true" 38 | title = "ASU Server Regression Test Coverage" 39 | 40 | 41 | [tool.pytest.ini_options] 42 | testpaths = [ 43 | "tests" 44 | ] 45 | 46 | [build-system] 47 | requires = ["poetry-core"] 48 | build-backend = "poetry.core.masonry.api" 49 | -------------------------------------------------------------------------------- /tests/ci/openwrt-one-24.10.0.json: -------------------------------------------------------------------------------- 1 | { 2 | "profile": "openwrt_one", 3 | "target": "mediatek/filogic", 4 | "packages": [ 5 | "base-files", 6 | "ca-bundle", 7 | "dnsmasq", 8 | "dropbear", 9 | "firewall4", 10 | "fitblk", 11 | "fstools", 12 | "kmod-crypto-hw-safexcel", 13 | "kmod-gpio-button-hotplug", 14 | "kmod-leds-gpio", 15 | "kmod-nft-offload", 16 | "kmod-phy-aquantia", 17 | "libc", 18 | "libgcc", 19 | "libustream-mbedtls", 20 | "logd", 21 | "mtd", 22 | "netifd", 23 | "nftables", 24 | "odhcp6c", 25 | "odhcpd-ipv6only", 26 | "opkg", 27 | "ppp", 28 | "ppp-mod-pppoe", 29 | "procd-ujail", 30 | "uboot-envtools", 31 | "uci", 32 | "uclient-fetch", 33 | "urandom-seed", 34 | "urngd", 35 | "wpad-basic-mbedtls", 36 | "kmod-mt7915e", 37 | "kmod-mt7981-firmware", 38 | "mt7981-wo-firmware", 39 | "kmod-rtc-pcf8563", 40 | "kmod-usb3", 41 | "kmod-nvme", 42 | "kmod-phy-airoha-en8811h", 43 | "luci" 44 | ], 45 | "defaults": "", 46 | "version_code": "r28427-6df0e3d02a", 47 | "version": "24.10.0", 48 | "diff_packages": true, 49 | "client": "ci" 50 | } 51 | -------------------------------------------------------------------------------- /tests/test_store.py: -------------------------------------------------------------------------------- 1 | from asu.config import settings 2 | 3 | # store_path = settings.public_path / "store" 4 | 5 | 6 | def test_store_content_type_img(client): 7 | (settings.public_path / "store").mkdir(parents=True, exist_ok=True) 8 | (settings.public_path / "store" / "test_store_content_type.img").touch() 9 | 10 | response = client.head("/store/test_store_content_type.img") 11 | 12 | assert response.status_code == 200 13 | 14 | headers = response.headers 15 | assert headers["Content-Type"] == "application/octet-stream" 16 | 17 | 18 | def test_store_content_type_imggz(client): 19 | (settings.public_path / "store").mkdir(parents=True, exist_ok=True) 20 | (settings.public_path / "store" / "test_store_content_type.img.gz").touch() 21 | 22 | response = client.head("/store/test_store_content_type.img.gz") 23 | 24 | assert response.status_code == 200 25 | 26 | headers = response.headers 27 | assert headers["Content-Type"] == "application/octet-stream" 28 | 29 | 30 | def test_store_file_missing(client): 31 | response = client.head("/store/test_store_file_missing.bin") 32 | 33 | assert response.status_code == 404 34 | 35 | headers = response.headers 36 | assert headers["Content-Type"] != "application/octet-stream" 37 | -------------------------------------------------------------------------------- /misc/setup.sh: -------------------------------------------------------------------------------- 1 | set -e 2 | 3 | # the inputs: 4 | TARGET="${TARGET:-x86/64}" 5 | VERSION_PATH="${VERSION_PATH:-snapshots}" 6 | UPSTREAM_URL="${UPSTREAM_URL:-http://downloads.openwrt.org}" 7 | 8 | # use "*.Linux-x86_64.*" to create the imagebuilder 9 | DOWNLOAD_FILE="imagebuilder-.*x86_64.tar.[xz|zst]" 10 | DOWNLOAD_PATH="$VERSION_PATH/targets/$TARGET" 11 | 12 | curl 'https://git.openwrt.org/?p=keyring.git;a=blob_plain;f=gpg/626471F1.asc' | gpg --import \ 13 | && gpg --fingerprint --with-colons '' | grep '^fpr:::::::::54CC74307A2C6DC9CE618269CD84BCED626471F1:$' \ 14 | && echo '54CC74307A2C6DC9CE618269CD84BCED626471F1:6:' | gpg --import-ownertrust 15 | 16 | wget "$UPSTREAM_URL/$DOWNLOAD_PATH/sha256sums" -O sha256sums 17 | wget "$UPSTREAM_URL/$DOWNLOAD_PATH/sha256sums.asc" -O sha256sums.asc 18 | 19 | gpg --with-fingerprint --verify sha256sums.asc sha256sums 20 | 21 | # determine archive name 22 | file_name="$(grep "$DOWNLOAD_FILE" sha256sums | cut -d "*" -f 2)" 23 | 24 | # download imagebuilder/sdk archive 25 | wget "$UPSTREAM_URL/$DOWNLOAD_PATH/$file_name" 26 | 27 | # shrink checksum file to single desired file and verify downloaded archive 28 | grep "$file_name" sha256sums > sha256sums_min 29 | cat sha256sums_min 30 | sha256sum -c sha256sums_min 31 | 32 | # cleanup 33 | rm -vrf sha256sums{,_min,.asc} keys/ 34 | 35 | tar xf "$file_name" --strip=1 --no-same-owner -C . 36 | rm -vrf "$file_name" 37 | -------------------------------------------------------------------------------- /tests/upstream/snapshots/packages/testarch/base/Packages: -------------------------------------------------------------------------------- 1 | Package: base-files 2 | Version: 214-r12467-5ea1b1ecd1 3 | Depends: libc, netifd, procd, jsonfilter, usign, openwrt-keyring, fstools, fwtool 4 | License: GPL-2.0 5 | Section: base 6 | Architecture: mips_mips32 7 | Installed-Size: 45996 8 | Filename: base-files_214-r12467-5ea1b1ecd1_mips_mips32.ipk 9 | Size: 46979 10 | SHA256sum:81184dbc1753154f6420eb7f9f20b5cc50aab071d9ee65b51d91a14a6817b1e6 11 | Description: This package contains a base filesystem and system scripts for OpenWrt. 12 | 13 | Package: block-mount 14 | Version: 2020-01-21-deb745f8-1 15 | Depends: libc, ubox, libubox20191228, libuci20130104, libblobmsg-json, libjson-c4 16 | License: GPL-2.0 17 | Section: base 18 | Architecture: mips_mips32 19 | Installed-Size: 22892 20 | Filename: block-mount_2020-01-21-deb745f8-1_mips_mips32.ipk 21 | Size: 23512 22 | SHA256sum:5eaa1bb7e3df6c354fa8dec3c30e77d0e8dc0b601923eba22b5ae3319b868a8a 23 | Description: Block device mounting and checking 24 | 25 | Package: blockd 26 | Version: 2020-01-21-deb745f8-1 27 | Depends: libc, block-mount, fstools, libubus20191227, kmod-fs-autofs4, libblobmsg-json, libjson-c4 28 | License: GPL-2.0 29 | Section: base 30 | Architecture: mips_mips32 31 | Installed-Size: 5153 32 | Filename: blockd_2020-01-21-deb745f8-1_mips_mips32.ipk 33 | Size: 5919 34 | SHA256sum:4186637e16373c4485bcb5a8d70193dd398d5f74810e30c5268e75af3c6e0535 35 | Description: Block device automounting 36 | 37 | -------------------------------------------------------------------------------- /tests/upstream/snapshots/targets/testtarget/testsubtarget/packages/Packages: -------------------------------------------------------------------------------- 1 | Package: base-files 2 | Version: 214-r12467-5ea1b1ecd1 3 | Depends: libc, netifd, procd, jsonfilter, usign, openwrt-keyring, fstools, fwtool 4 | License: GPL-2.0 5 | Section: base 6 | Architecture: mips_mips32 7 | Installed-Size: 45996 8 | Filename: base-files_214-r12467-5ea1b1ecd1_mips_mips32.ipk 9 | Size: 46979 10 | SHA256sum:81184dbc1753154f6420eb7f9f20b5cc50aab071d9ee65b51d91a14a6817b1e6 11 | Description: This package contains a base filesystem and system scripts for OpenWrt. 12 | 13 | Package: block-mount 14 | Version: 2020-01-21-deb745f8-1 15 | Depends: libc, ubox, libubox20191228, libuci20130104, libblobmsg-json, libjson-c4 16 | License: GPL-2.0 17 | Section: base 18 | Architecture: mips_mips32 19 | Installed-Size: 22892 20 | Filename: block-mount_2020-01-21-deb745f8-1_mips_mips32.ipk 21 | Size: 23512 22 | SHA256sum:5eaa1bb7e3df6c354fa8dec3c30e77d0e8dc0b601923eba22b5ae3319b868a8a 23 | Description: Block device mounting and checking 24 | 25 | Package: blockd 26 | Version: 2020-01-21-deb745f8-1 27 | Depends: libc, block-mount, fstools, libubus20191227, kmod-fs-autofs4, libblobmsg-json, libjson-c4 28 | License: GPL-2.0 29 | Section: base 30 | Architecture: mips_mips32 31 | Installed-Size: 5153 32 | Filename: blockd_2020-01-21-deb745f8-1_mips_mips32.ipk 33 | Size: 5919 34 | SHA256sum:4186637e16373c4485bcb5a8d70193dd398d5f74810e30c5268e75af3c6e0535 35 | Description: Block device automounting 36 | 37 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test Pull Request 2 | 3 | concurrency: 4 | group: ${{ github.workflow }}-${{ github.ref }} 5 | cancel-in-progress: true 6 | 7 | on: 8 | push: 9 | branches: 10 | - main 11 | pull_request: 12 | branches: 13 | - main 14 | 15 | env: 16 | TERM: xterm-color 17 | PY_COLORS: 1 18 | LOG_LEVEL: DEBUG 19 | 20 | jobs: 21 | build: 22 | runs-on: ubuntu-24.04 23 | strategy: 24 | fail-fast: false 25 | matrix: 26 | python-version: 27 | - "3.11" 28 | - "3.12" 29 | 30 | name: Python ${{ matrix.python-version }} 31 | 32 | steps: 33 | - uses: actions/checkout@v4 34 | 35 | - uses: actions/setup-python@v1 36 | with: 37 | python-version: ${{ matrix.python-version }} 38 | 39 | - name: Display Python version 40 | run: python -c "import sys; print(sys.version)" 41 | 42 | - name: Install dependencies 43 | run: | 44 | python -m pip install --upgrade poetry 45 | poetry install 46 | 47 | - name: Lint with ruff 48 | run: | 49 | poetry run ruff check . 50 | 51 | - name: Stylecheck with ruff 52 | run: | 53 | poetry run ruff format --check . 54 | 55 | - name: Test with pytest 56 | run: | 57 | export CONTAINER_SOCKET_PATH="/tmp/podman.sock" 58 | podman system service --time=0 "unix://$CONTAINER_SOCKET_PATH" & 59 | poetry run coverage run -m pytest -vv --runslow 60 | poetry run coverage xml 61 | 62 | - name: Codecov 63 | uses: codecov/codecov-action@v4 64 | -------------------------------------------------------------------------------- /misc/stats_modernize.py: -------------------------------------------------------------------------------- 1 | from asu.util import get_redis_ts 2 | 3 | stat_types = ( 4 | "cache-hits", 5 | "cache-misses", 6 | "failures", 7 | "requests", 8 | "successes", 9 | ) 10 | 11 | ts = get_redis_ts() 12 | rc = ts.client 13 | 14 | converted = rc.exists("stats:build:requests") 15 | force = False 16 | if converted and not force: 17 | print("Already converted =====================") 18 | else: 19 | print("Converting ============================") 20 | 21 | if rc.exists("stats:cache-misses"): 22 | # Note: "rename" overwrites any existing destination... 23 | rc.rename("stats:cache-misses", "stats:build:cache-misses") 24 | if rc.exists("stats:cache-hits"): 25 | # Old stats are completely incorrect, so just delete them. 26 | rc.delete("stats:cache-hits") 27 | 28 | for stat_type in stat_types: 29 | key = f"stats:build:{stat_type}" 30 | func = ts.alter if rc.exists(key) else ts.create 31 | func(key, labels={"stats": "summary"}, duplicate_policy="sum") 32 | 33 | # Attempt to repopulate total requests and success values as 34 | # accurately as possible using existing stats:builds:* data. 35 | ts.delete("stats:build:requests", "-", "+") # Empty them out. 36 | ts.delete("stats:build:successes", "-", "+") 37 | all_builds = ts.mrange("-", "+", filters=["stats=builds"]) 38 | for build in all_builds: 39 | _, data = build.popitem() 40 | series = data[1] 41 | for stamp, value in series: 42 | ts.add("stats:build:requests", timestamp=stamp, value=value) 43 | ts.add("stats:build:successes", timestamp=stamp, value=value) 44 | 45 | for stat_type in stat_types: 46 | key = f"stats:build:{stat_type}" 47 | print(f"{key:<25} - {ts.info(key).total_samples} samples") 48 | -------------------------------------------------------------------------------- /tests/upstream/snapshots/targets/testtarget/testsubtarget/packages/Packages.manifest: -------------------------------------------------------------------------------- 1 | Package: base-files 2 | Version: 1403-r15666-8019c54d8a 3 | Depends: libc, netifd, jsonfilter, usign, openwrt-keyring, fstools, fwtool 4 | Source: package/base-files 5 | SourceName: base-files 6 | License: GPL-2.0 7 | Section: base 8 | SourceDateEpoch: 1611955596 9 | Architecture: x86_64 10 | Installed-Size: 43933 11 | Filename: base-files_1403-r15666-8019c54d8a_x86_64.ipk 12 | Size: 44961 13 | SHA256sum: 9f980084a0ee1337f63c0fe30ea9d76e0fc7596dc57ceafa295f2a80d743f78e 14 | Description: This package contains a base filesystem and system scripts for OpenWrt. 15 | 16 | Package: block-mount 17 | Version: 2021-01-04-c53b1882-1 18 | Depends: libc, ubox, libubox20201212, libuci20130104, libblobmsg-json, libjson-c5 19 | Source: package/system/fstools 20 | SourceName: block-mount 21 | License: GPL-2.0 22 | Section: base 23 | SourceDateEpoch: 1609794455 24 | Maintainer: John Crispin 25 | Architecture: x86_64 26 | Installed-Size: 22438 27 | Filename: block-mount_2021-01-04-c53b1882-1_x86_64.ipk 28 | Size: 23214 29 | SHA256sum: 9f5e1d69aeb95e3a501f0e6afc134f902d19d8088ecf440762219293babe45ac 30 | Description: Block device mounting and checking 31 | 32 | Package: blockd 33 | Version: 2021-01-04-c53b1882-1 34 | Depends: libc, block-mount, fstools, libubus20191227, kmod-fs-autofs4, libblobmsg-json, libjson-c5 35 | Source: package/system/fstools 36 | SourceName: blockd 37 | License: GPL-2.0 38 | Section: base 39 | SourceDateEpoch: 1609794455 40 | Maintainer: John Crispin 41 | Architecture: x86_64 42 | Installed-Size: 5810 43 | Filename: blockd_2021-01-04-c53b1882-1_x86_64.ipk 44 | Size: 6596 45 | SHA256sum: e170bfe5c99cad6c23bd289aba0ff7fff700c4d1a550851afa1080f5578926f5 46 | Description: Block device automounting 47 | 48 | -------------------------------------------------------------------------------- /tests/upstream/snapshots/packages/testarch/base/Packages.manifest: -------------------------------------------------------------------------------- 1 | Package: 464xlat 2 | Version: 12 3 | Depends: libc, kmod-nat46, ip 4 | Source: feeds/base/package/network/ipv6/464xlat 5 | SourceName: 464xlat 6 | License: GPL-2.0 7 | Section: net 8 | SourceDateEpoch: 1594830836 9 | Maintainer: Hans Dedecker 10 | Architecture: x86_64 11 | Installed-Size: 4120 12 | Filename: 464xlat_12_x86_64.ipk 13 | Size: 4940 14 | SHA256sum: ee063ac6f151ed6524ca39c6a3de410b4b656037add5cc963108fe053da7b51b 15 | Description: 464xlat provides support to deploy limited IPv4 access services to mobile 16 | and wireline IPv6-only edge networks without encapsulation (RFC6877) 17 | 18 | Package: 6in4 19 | Version: 26 20 | Depends: libc, kmod-sit, uclient-fetch 21 | Source: feeds/base/package/network/ipv6/6in4 22 | SourceName: 6in4 23 | License: GPL-2.0 24 | Section: net 25 | SourceDateEpoch: 1594830836 26 | Maintainer: Jo-Philipp Wich 27 | Architecture: all 28 | Installed-Size: 1698 29 | Filename: 6in4_26_all.ipk 30 | Size: 2515 31 | SHA256sum: 019edf72aa30f83fb8cc8acd2655074c546be15090ab794b27aeedd1180c202f 32 | Description: Provides support for 6in4 tunnels in /etc/config/network. 33 | Refer to http://wiki.openwrt.org/doc/uci/network for 34 | configuration details. 35 | 36 | Package: 6rd 37 | Version: 10 38 | Depends: libc, kmod-sit 39 | Source: feeds/base/package/network/ipv6/6rd 40 | SourceName: 6rd 41 | License: GPL-2.0 42 | Section: net 43 | SourceDateEpoch: 1594830836 44 | Maintainer: Steven Barth 45 | Architecture: all 46 | Installed-Size: 2904 47 | Filename: 6rd_10_all.ipk 48 | Size: 3704 49 | SHA256sum: 5a4165edfd033ae363aeffde7d67ca7c0a6f3e5e0f0898a24ad2284b13e738c3 50 | Description: Provides support for 6rd tunnels in /etc/config/network. 51 | Refer to http://wiki.openwrt.org/doc/uci/network for 52 | configuration details. 53 | 54 | -------------------------------------------------------------------------------- /.github/workflows/podman.yml: -------------------------------------------------------------------------------- 1 | name: Test podman-compose.yml 2 | 3 | concurrency: 4 | group: ${{ github.workflow }}-${{ github.ref }} 5 | cancel-in-progress: true 6 | 7 | on: 8 | push: 9 | branches: 10 | - main 11 | pull_request: 12 | branches: 13 | - main 14 | 15 | env: 16 | TERM: xterm-color 17 | PY_COLORS: 1 18 | LOG_LEVEL: DEBUG 19 | 20 | jobs: 21 | build: 22 | runs-on: ubuntu-latest 23 | 24 | name: podman-compose integration test 25 | 26 | steps: 27 | - uses: actions/checkout@v4 28 | 29 | - name: Install ImageBuilder prereqs 30 | run: sudo apt-get install -y podman-compose jq 31 | 32 | - name: Build the containers 33 | run: | 34 | podman-compose build 35 | 36 | - name: Start the containers 37 | run: | 38 | export CONTAINER_SOCKET_PATH="/tmp/podman.sock" 39 | podman system service --time=0 "unix://$CONTAINER_SOCKET_PATH" & 40 | echo "PUBLIC_PATH=$(pwd)/public" > .env 41 | echo "CONTAINER_SOCKET_PATH=$CONTAINER_SOCKET_PATH" >> .env 42 | podman-compose up -d 43 | 44 | - name: Let the containers start 45 | run: sleep 30 46 | 47 | - name: Test startup 48 | run: | 49 | curl -s http://localhost:8000/api/v1/stats | tee response.json | jq 50 | [ "$(jq -r '.queue_length' response.json)" -eq 0 ] || exit 1 51 | 52 | - name: Test build 53 | run: | 54 | for i in {1..20}; do 55 | curl 'http://localhost:8000/api/v1/build' \ 56 | --request 'POST' \ 57 | --header 'Content-Type: application/json' \ 58 | --data @tests/ci/openwrt-one-24.10.0.json | tee response.json | jq 59 | if [ "$(jq -r '.status' response.json)" -eq 200 ]; then 60 | break 61 | fi 62 | if [ $i -eq 20 ]; then 63 | exit 1 64 | fi 65 | sleep 10 66 | done 67 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Upload Python Package and Docker Container 2 | permissions: 3 | contents: read 4 | 5 | on: 6 | push: 7 | branches: 8 | - main 9 | release: 10 | types: 11 | - created 12 | 13 | jobs: 14 | deploy: 15 | runs-on: ubuntu-24.04 16 | steps: 17 | - uses: actions/checkout@v4 18 | with: 19 | fetch-depth: 0 20 | 21 | - name: Set up Python 22 | uses: actions/setup-python@v2 23 | with: 24 | python-version: "3.x" 25 | 26 | - name: Install dependencies 27 | run: | 28 | python -m pip install --upgrade pip 29 | pip install poetry 30 | 31 | - name: Set __version__ and poetry version 32 | run: | 33 | TAG="$(git describe --tags --always | awk -F"-" '{if (NF>1) {print substr($1, 2)".post"$2} else {print substr($1, 2)}}')" 34 | echo "__version__ = \"$TAG\"" > asu/__init__.py 35 | poetry version "$TAG" 36 | 37 | - name: Build and publish PyPi package 38 | if: github.event_name == 'release' 39 | run: | 40 | poetry config pypi-token.pypi ${{ secrets.PYPI_TOKEN }} 41 | poetry publish --build 42 | 43 | - name: Docker meta 44 | id: meta 45 | uses: docker/metadata-action@v5 46 | with: 47 | images: openwrt/asu 48 | tags: | 49 | type=semver,pattern={{version}} 50 | latest 51 | 52 | - name: Login to DockerHub 53 | if: github.repository_owner == 'openwrt' 54 | uses: docker/login-action@v3 55 | with: 56 | username: ${{ secrets.DOCKER_USERNAME }} 57 | password: ${{ secrets.DOCKER_PASSWORD }} 58 | 59 | - name: Build and push ASU to Docker Hub 60 | uses: docker/build-push-action@v5 61 | with: 62 | context: . 63 | file: Containerfile 64 | push: ${{ github.repository_owner == 'openwrt' }} 65 | tags: ${{ steps.meta.outputs.tags }} 66 | labels: ${{ steps.meta.outputs.labels }} 67 | -------------------------------------------------------------------------------- /tests/upstream/releases/23.05.5/targets/ath79/generic/profiles.json: -------------------------------------------------------------------------------- 1 | { 2 | "arch_packages": "mips_24kc", 3 | "default_packages": [ 4 | "base-files", 5 | "busybox", 6 | "ca-bundle", 7 | "dnsmasq", 8 | "dropbear", 9 | "firewall4", 10 | "fstools", 11 | "kmod-ath9k", 12 | "kmod-gpio-button-hotplug", 13 | "kmod-nft-offload", 14 | "libc", 15 | "libgcc", 16 | "libustream-mbedtls", 17 | "logd", 18 | "mtd", 19 | "netifd", 20 | "nftables", 21 | "odhcp6c", 22 | "odhcpd-ipv6only", 23 | "opkg", 24 | "ppp", 25 | "ppp-mod-pppoe", 26 | "procd", 27 | "procd-seccomp", 28 | "procd-ujail", 29 | "swconfig", 30 | "uboot-envtools", 31 | "uci", 32 | "uclient-fetch", 33 | "urandom-seed", 34 | "urngd", 35 | "wpad-basic-mbedtls" 36 | ], 37 | "metadata_version": 1, 38 | "profiles": { 39 | "8dev_carambola2": { 40 | "device_packages": ["kmod-usb-chipidea2"], 41 | "image_prefix": "openwrt-23.05.5-ath79-generic-8dev_carambola2", 42 | "images": [ 43 | { 44 | "filesystem": "squashfs", 45 | "name": "openwrt-23.05.5-ath79-generic-8dev_carambola2-squashfs-sysupgrade.bin", 46 | "sha256": "08c9eb2d6998fd6faf9ee0b1e9415d2aca20d00c2236e99c51bb0a9dff2056fc", 47 | "sha256_unsigned": "12d6b21303a784468aecf611d19b46abc69557510c329531080351f1fd777c73", 48 | "type": "sysupgrade" 49 | }, 50 | { 51 | "filesystem": "initramfs", 52 | "name": "openwrt-23.05.5-ath79-generic-8dev_carambola2-initramfs-kernel.bin", 53 | "sha256": "737643f972ed8eed3101c00fed490f14659af7c4ff910a63fe1e614e1b359eb5", 54 | "sha256_unsigned": "737643f972ed8eed3101c00fed490f14659af7c4ff910a63fe1e614e1b359eb5", 55 | "type": "kernel" 56 | } 57 | ], 58 | "supported_devices": ["8dev,carambola2", "carambola2"], 59 | "titles": [{ "model": "Carambola2", "vendor": "8devices" }] 60 | } 61 | }, 62 | "source_date_epoch": 1727094886, 63 | "target": "ath79/generic", 64 | "version_code": "r24106-10cc5fcd00", 65 | "version_number": "23.05.5" 66 | } 67 | -------------------------------------------------------------------------------- /tests/upstream/snapshots/targets/ath79/generic/profiles.json: -------------------------------------------------------------------------------- 1 | { 2 | "arch_packages": "mips_24kc", 3 | "default_packages": [ 4 | "apk-mbedtls", 5 | "base-files", 6 | "ca-bundle", 7 | "dnsmasq", 8 | "dropbear", 9 | "firewall4", 10 | "fstools", 11 | "kmod-ath9k", 12 | "kmod-gpio-button-hotplug", 13 | "kmod-nft-offload", 14 | "libc", 15 | "libgcc", 16 | "libustream-mbedtls", 17 | "logd", 18 | "mtd", 19 | "netifd", 20 | "nftables", 21 | "odhcp6c", 22 | "odhcpd-ipv6only", 23 | "ppp", 24 | "ppp-mod-pppoe", 25 | "procd-ujail", 26 | "swconfig", 27 | "uboot-envtools", 28 | "uci", 29 | "uclient-fetch", 30 | "urandom-seed", 31 | "urngd", 32 | "wpad-basic-mbedtls" 33 | ], 34 | "linux_kernel": { 35 | "release": "1", 36 | "vermagic": "75d549fb5cfabb78b8a91bec585e5ecb", 37 | "version": "6.6.74" 38 | }, 39 | "metadata_version": 1, 40 | "profiles": { 41 | "8dev_carambola2": { 42 | "device_packages": ["kmod-usb-chipidea2"], 43 | "image_prefix": "openwrt-ath79-generic-8dev_carambola2", 44 | "images": [ 45 | { 46 | "filesystem": "initramfs", 47 | "name": "openwrt-ath79-generic-8dev_carambola2-initramfs-kernel.bin", 48 | "sha256": "c5a6f09645091430e4a273cc6ebe9d237abc4bf547b9c491c6733d2551dc770b", 49 | "sha256_unsigned": "c5a6f09645091430e4a273cc6ebe9d237abc4bf547b9c491c6733d2551dc770b", 50 | "type": "kernel" 51 | }, 52 | { 53 | "filesystem": "squashfs", 54 | "name": "openwrt-ath79-generic-8dev_carambola2-squashfs-sysupgrade.bin", 55 | "sha256": "41a74f28d1cf20a25812822417651604699d33bdcf09998d4e459a4603e41fb6", 56 | "sha256_unsigned": "31f29cf6a7573c63f0cdf5c1d220afe7dd6cf7f40dc46949f604198d8694dabb", 57 | "type": "sysupgrade" 58 | } 59 | ], 60 | "supported_devices": ["8dev,carambola2", "carambola2"], 61 | "titles": [{ "model": "Carambola2", "vendor": "8devices" }] 62 | } 63 | }, 64 | "source_date_epoch": 1737902564, 65 | "target": "ath79/generic", 66 | "version_code": "r28695-22d1e6cd26", 67 | "version_number": "SNAPSHOT" 68 | } 69 | -------------------------------------------------------------------------------- /misc/nginx.conf: -------------------------------------------------------------------------------- 1 | # based on https://docs.gunicorn.org/en/stable/deploy.html#nginx-configuration 2 | 3 | worker_processes 1; 4 | 5 | user nobody nogroup; 6 | # 'user nobody nobody;' for systems with 'nobody' as a group instead 7 | error_log /var/log/nginx/error.log warn; 8 | pid /var/run/nginx.pid; 9 | 10 | events { 11 | worker_connections 1024; # increase if you have lots of clients 12 | accept_mutex off; # set to 'on' if nginx worker_processes > 1 13 | # 'use epoll;' to enable for Linux 2.6+ 14 | # 'use kqueue;' to enable for FreeBSD, OSX 15 | } 16 | 17 | http { 18 | include mime.types; 19 | # fallback in case we can't determine a type 20 | default_type application/octet-stream; 21 | access_log /var/log/nginx/access.log combined; 22 | sendfile on; 23 | 24 | upstream app_server { 25 | # fail_timeout=0 means we always retry an upstream even if it failed 26 | # to return a good HTTP response 27 | server localhost:8000 fail_timeout=0; 28 | } 29 | 30 | server { 31 | # if no Host match, close the connection to prevent host spoofing 32 | listen 80 default_server; 33 | return 444; 34 | } 35 | 36 | server { 37 | # use 'listen 80 deferred;' for Linux 38 | # use 'listen 80 accept_filter=httpready;' for FreeBSD 39 | listen 80; 40 | client_max_body_size 4G; 41 | 42 | # set the correct host(s) for your site 43 | server_name chef.libremesh.org; 44 | 45 | keepalive_timeout 5; 46 | 47 | # path for static files 48 | #root /home/aparcar/asu/var/asu-instance/public; 49 | 50 | location / { 51 | alias /var/cache/asu/public/; 52 | autoindex on; 53 | autoindex_exact_size off; 54 | } 55 | 56 | location /api { 57 | # checks for static file, if not found proxy to app 58 | try_files $uri @proxy_to_app; 59 | } 60 | 61 | location @proxy_to_app { 62 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 63 | proxy_set_header X-Forwarded-Proto $scheme; 64 | proxy_set_header Host $http_host; 65 | # we don't want nginx trying to do something clever with 66 | # redirects, we set the Host: header above already. 67 | proxy_redirect off; 68 | proxy_pass http://app_server; 69 | } 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /podman-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | server: 3 | image: "docker.io/openwrt/asu:latest" 4 | build: 5 | context: . 6 | dockerfile: Containerfile 7 | restart: unless-stopped 8 | command: uvicorn --host 0.0.0.0 asu.main:app 9 | env_file: .env 10 | environment: 11 | REDIS_URL: "redis://redis:6379/0" 12 | volumes: 13 | - $PUBLIC_PATH/store:$PUBLIC_PATH/store:ro 14 | ports: 15 | - "127.0.0.1:8000:8000" 16 | depends_on: 17 | - redis 18 | 19 | worker: 20 | image: "docker.io/openwrt/asu:latest" 21 | build: 22 | context: . 23 | dockerfile: Containerfile 24 | restart: unless-stopped 25 | command: rqworker --logging_level INFO 26 | env_file: .env 27 | environment: 28 | REDIS_URL: "redis://redis:6379/0" 29 | volumes: 30 | - $PUBLIC_PATH:$PUBLIC_PATH:rw 31 | - $CONTAINER_SOCKET_PATH:$CONTAINER_SOCKET_PATH:rw 32 | depends_on: 33 | - redis 34 | 35 | redis: 36 | image: "docker.io/redis/redis-stack-server" 37 | restart: unless-stopped 38 | volumes: 39 | - ./redis-data:/data/:rw 40 | ports: 41 | - "127.0.0.1:6379:6379" 42 | 43 | # Optionally add more workers 44 | # worker2: 45 | # image: "docker.io/openwrt/asu:latest" 46 | # restart: unless-stopped 47 | # command: rqworker --logging_level INFO 48 | # env_file: .env 49 | # environment: 50 | # REDIS_URL: "redis://redis:6379/0" 51 | # volumes: 52 | # - $PUBLIC_PATH:$PUBLIC_PATH:rw 53 | # - $CONTAINER_SOCKET_PATH:$CONTAINER_SOCKET_PATH:rw 54 | # depends_on: 55 | # - redis 56 | # 57 | # Optionally add a Squid cache container when using `SQUID_CACHE` 58 | # squid: 59 | # image: "docker.io/ubuntu/squid:latest" 60 | # restart: unless-stopped 61 | # ports: 62 | # - "127.0.0.1:3128:3128" 63 | # volumes: 64 | # - ".squid.conf:/etc/squid/conf.d/snippet.conf:ro" 65 | # - "./squid-data/:/var/spool/squid/:rw" 66 | 67 | # Optionally add a Grafana container when using `SERVER_STATS` 68 | # grafana: 69 | # image: docker.io/grafana/grafana-oss 70 | # container_name: grafana 71 | # restart: unless-stopped 72 | # ports: 73 | # - "127.0.0.1:3000:3000" 74 | # depends_on: 75 | # - redis 76 | # environment: 77 | # GF_SERVER_DOMAIN: sysupgrade.openwrt.org 78 | # GF_SERVER_ROOT_URL: https://sysupgrade.openwrt.org/stats/ 79 | # GF_SERVER_SERVE_FROM_SUB_PATH: "true" 80 | # volumes: 81 | # - ./grafana-data:/var/lib/grafana 82 | -------------------------------------------------------------------------------- /asu/package_changes.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from asu.build_request import BuildRequest 4 | 5 | log = logging.getLogger("rq.worker") 6 | 7 | 8 | # Language pack replacements are done generically on a per-version basis. 9 | # Note that the version comparison below applies to all versions the same 10 | # or newer, so for example "24.10" applies to snapshots, too. 11 | language_packs = { 12 | "24.10": { 13 | "luci-i18n-opkg-": "luci-i18n-package-manager-", 14 | }, 15 | } 16 | 17 | 18 | def apply_package_changes(build_request: BuildRequest): 19 | """ 20 | Apply package changes to the request 21 | 22 | Args: 23 | req (dict): The image request 24 | log (logging.Logger): The logger to use 25 | """ 26 | 27 | def _add_if_missing(package): 28 | if package not in build_request.packages: 29 | build_request.packages.append(package) 30 | log.debug(f"Added {package} to packages") 31 | 32 | # 23.05 specific changes 33 | if build_request.version.startswith("23.05"): 34 | # mediatek/mt7622 specific changes 35 | if build_request.target == "mediatek/mt7622": 36 | _add_if_missing("kmod-mt7622-firmware") 37 | 38 | # ath79/generic specific changes 39 | elif build_request.target == "ath79/generic": 40 | if build_request.profile in { 41 | "buffalo_wzr-hp-g300nh-s", 42 | "dlink_dir-825-b1", 43 | "netgear_wndr3700", 44 | "netgear_wndr3700-v2", 45 | "netgear_wndr3800", 46 | "netgear_wndr3800ch", 47 | "netgear_wndrmac-v1", 48 | "netgear_wndrmac-v2", 49 | "trendnet_tew-673gru", 50 | }: 51 | _add_if_missing("kmod-switch-rtl8366s") 52 | 53 | elif build_request.profile == "buffalo_wzr-hp-g300nh-rb": 54 | _add_if_missing("kmod-switch-rtl8366rb") 55 | 56 | if build_request.version.startswith("24.10"): 57 | # `auc` no longer exists here 58 | if "auc" in build_request.packages: 59 | build_request.packages.remove("auc") 60 | _add_if_missing("owut") 61 | 62 | # TODO: if we ever fully implement 'packages_versions', this needs rework 63 | for version, packages in language_packs.items(): 64 | if build_request.version >= version: # Includes snapshots 65 | for i, package in enumerate(build_request.packages): 66 | for old, new in packages.items(): 67 | if package.startswith(old): 68 | lang = package.replace(old, "") 69 | build_request.packages[i] = f"{new}{lang}" 70 | -------------------------------------------------------------------------------- /tests/upstream/snapshots/targets/testtarget/testsubtarget/profiles.json: -------------------------------------------------------------------------------- 1 | { 2 | "arch_packages": "testarch", 3 | "default_packages": [ 4 | "base-files", 5 | "ca-bundle", 6 | "dropbear", 7 | "fstools", 8 | "libc", 9 | "libgcc", 10 | "libustream-wolfssl", 11 | "logd", 12 | "mtd", 13 | "netifd", 14 | "opkg", 15 | "uci", 16 | "uclient-fetch", 17 | "urandom-seed", 18 | "urngd", 19 | "busybox", 20 | "procd", 21 | "partx-utils", 22 | "mkf2fs", 23 | "e2fsprogs", 24 | "kmod-button-hotplug", 25 | "dnsmasq", 26 | "firewall", 27 | "ip6tables", 28 | "iptables", 29 | "kmod-ipt-offload", 30 | "odhcp6c", 31 | "odhcpd-ipv6only", 32 | "ppp", 33 | "ppp-mod-pppoe", 34 | "partx-utils", 35 | "mkf2fs", 36 | "e2fsprogs", 37 | "kmod-button-hotplug" 38 | ], 39 | "metadata_version": 1, 40 | "profiles": { 41 | "generic": { 42 | "device_packages": [ 43 | "kmod-bnx2", 44 | "kmod-e1000e", 45 | "kmod-e1000", 46 | "kmod-forcedeth", 47 | "kmod-igb", 48 | "kmod-ixgbe", 49 | "kmod-r8169" 50 | ], 51 | "image_prefix": "openwrt-x86-64-generic", 52 | "images": [ 53 | { 54 | "filesystem": "squashfs", 55 | "name": "openwrt-x86-64-generic-squashfs-rootfs.img.gz", 56 | "sha256": "dad02f19c22a0f95a5ea8af6203f9be0582b7ad92ae261f5851c471a494c8163", 57 | "type": "rootfs" 58 | }, 59 | { 60 | "filesystem": "ext4", 61 | "name": "openwrt-x86-64-generic-ext4-rootfs.img.gz", 62 | "sha256": "4800c1df7d9207fcac97d0a175942dfd00de9317a1fcc275d064bf338b1ed1bf", 63 | "type": "rootfs" 64 | }, 65 | { 66 | "filesystem": "squashfs", 67 | "name": "openwrt-x86-64-generic-squashfs-combined-efi.img.gz", 68 | "sha256": "f1ab9b40193df4bacfc41d2724b35790013074628c44e5dfe83ef683ae465db8", 69 | "type": "combined-efi" 70 | }, 71 | { 72 | "filesystem": "ext4", 73 | "name": "openwrt-x86-64-generic-ext4-combined-efi.img.gz", 74 | "sha256": "b00a557dce04c0679001c764099fac620acd56c6a245c1a218a6df5c02bdd0ed", 75 | "type": "combined-efi" 76 | }, 77 | { 78 | "filesystem": "ext4", 79 | "name": "openwrt-x86-64-generic-ext4-combined.img.gz", 80 | "sha256": "cd4aadeadbce6ea3209cbea6bc066850a7cf526c63ae137d882601665dd3fc4e", 81 | "type": "combined" 82 | }, 83 | { 84 | "filesystem": "squashfs", 85 | "name": "openwrt-x86-64-generic-squashfs-combined.img.gz", 86 | "sha256": "43b7b5233b096d794fd3a52b4ac0e352e690fd9b4cd45a29efc828973e2c36b3", 87 | "type": "combined" 88 | } 89 | ], 90 | "supported_devices": [], 91 | "titles": [ 92 | { 93 | "title": "Generic x86/64" 94 | } 95 | ] 96 | } 97 | }, 98 | "source_date_epoch": "1612136917", 99 | "target": "testtarget/testsubtarget", 100 | "version_code": "r15666-8019c54d8a", 101 | "version_number": "SNAPSHOT" 102 | } 103 | -------------------------------------------------------------------------------- /tests/upstream/releases/23.05.5/.targets.json: -------------------------------------------------------------------------------- 1 | { 2 | "kirkwood/generic": "arm_xscale", 3 | "apm821xx/sata": "powerpc_464fp", 4 | "apm821xx/nand": "powerpc_464fp", 5 | "realtek/rtl838x": "mips_4kec", 6 | "realtek/rtl839x": "mips_24kc", 7 | "realtek/rtl930x": "mips_24kc", 8 | "mpc85xx/p2020": "powerpc_8548", 9 | "mpc85xx/p1010": "powerpc_8548", 10 | "mpc85xx/p1020": "powerpc_8548", 11 | "bcm4908/generic": "aarch64_cortex-a53", 12 | "octeon/generic": "mips64_octeonplus", 13 | "sifiveu/generic": "riscv64_riscv64", 14 | "pistachio/generic": "mipsel_24kc_24kf", 15 | "tegra/generic": "arm_cortex-a9_vfpv3-d16", 16 | "imx/cortexa7": "arm_cortex-a7_neon-vfpv4", 17 | "imx/cortexa9": "arm_cortex-a9_neon", 18 | "ramips/mt7621": "mipsel_24kc", 19 | "ramips/rt3883": "mipsel_74kc", 20 | "ramips/mt76x8": "mipsel_24kc", 21 | "ramips/mt7620": "mipsel_24kc", 22 | "ramips/rt305x": "mipsel_24kc", 23 | "ath79/mikrotik": "mips_24kc", 24 | "ath79/generic": "mips_24kc", 25 | "ath79/tiny": "mips_24kc", 26 | "ath79/nand": "mips_24kc", 27 | "bcm27xx/bcm2711": "aarch64_cortex-a72", 28 | "bcm27xx/bcm2710": "aarch64_cortex-a53", 29 | "bcm27xx/bcm2709": "arm_cortex-a7_neon-vfpv4", 30 | "bcm27xx/bcm2708": "arm_arm1176jzf-s_vfp", 31 | "bmips/bcm6318": "mips_mips32", 32 | "bmips/bcm6362": "mips_mips32", 33 | "bmips/bcm63268": "mips_mips32", 34 | "bmips/bcm6368": "mips_mips32", 35 | "bmips/bcm6358": "mips_mips32", 36 | "bmips/bcm6328": "mips_mips32", 37 | "layerscape/armv7": "arm_cortex-a7_neon-vfpv4", 38 | "layerscape/armv8_64b": "aarch64_generic", 39 | "bcm47xx/generic": "mipsel_mips32", 40 | "bcm47xx/legacy": "mipsel_mips32", 41 | "bcm47xx/mips74k": "mipsel_74kc", 42 | "mvebu/cortexa53": "aarch64_cortex-a53", 43 | "mvebu/cortexa72": "aarch64_cortex-a72", 44 | "mvebu/cortexa9": "arm_cortex-a9_vfpv3-d16", 45 | "lantiq/xway": "mips_24kc", 46 | "lantiq/xrx200": "mips_24kc", 47 | "sunxi/cortexa8": "arm_cortex-a8_vfpv3", 48 | "sunxi/cortexa53": "aarch64_cortex-a53", 49 | "sunxi/cortexa7": "arm_cortex-a7_neon-vfpv4", 50 | "at91/sama7": "arm_cortex-a7_vfpv4", 51 | "at91/sam9x": "arm_arm926ej-s", 52 | "at91/sama5": "arm_cortex-a5_vfpv4", 53 | "armsr/armv8": "aarch64_generic", 54 | "armsr/armv7": "arm_cortex-a15_neon-vfpv4", 55 | "mxs/generic": "arm_arm926ej-s", 56 | "ipq40xx/mikrotik": "arm_cortex-a7_neon-vfpv4", 57 | "ipq40xx/generic": "arm_cortex-a7_neon-vfpv4", 58 | "ipq40xx/chromium": "arm_cortex-a7_neon-vfpv4", 59 | "bcm63xx/generic": "mips_mips32", 60 | "bcm63xx/smp": "mips_mips32", 61 | "mediatek/mt7623": "arm_cortex-a7_neon-vfpv4", 62 | "mediatek/mt7629": "arm_cortex-a7", 63 | "mediatek/mt7622": "aarch64_cortex-a53", 64 | "mediatek/filogic": "aarch64_cortex-a53", 65 | "x86/generic": "i386_pentium4", 66 | "x86/64": "x86_64", 67 | "x86/legacy": "i386_pentium-mmx", 68 | "x86/geode": "i386_pentium-mmx", 69 | "ipq807x/generic": "aarch64_cortex-a53", 70 | "zynq/generic": "arm_cortex-a9_neon", 71 | "rockchip/armv8": "aarch64_generic", 72 | "oxnas/ox820": "arm_mpcore", 73 | "gemini/generic": "arm_fa526", 74 | "ipq806x/generic": "arm_cortex-a15_neon-vfpv4", 75 | "ipq806x/chromium": "arm_cortex-a15_neon-vfpv4", 76 | "bcm53xx/generic": "arm_cortex-a9" 77 | } 78 | -------------------------------------------------------------------------------- /tests/upstream/releases/1.2.3/targets/testtarget/testsubtarget/profiles.json: -------------------------------------------------------------------------------- 1 | { 2 | "arch_packages": "testarch", 3 | "default_packages": [ 4 | "base-files", 5 | "ca-bundle", 6 | "dropbear", 7 | "fstools", 8 | "libc", 9 | "libgcc", 10 | "libustream-wolfssl", 11 | "logd", 12 | "mtd", 13 | "netifd", 14 | "opkg", 15 | "uci", 16 | "uclient-fetch", 17 | "urandom-seed", 18 | "urngd", 19 | "busybox", 20 | "procd", 21 | "partx-utils", 22 | "mkf2fs", 23 | "e2fsprogs", 24 | "kmod-button-hotplug", 25 | "dnsmasq", 26 | "firewall", 27 | "ip6tables", 28 | "iptables", 29 | "kmod-ipt-offload", 30 | "odhcp6c", 31 | "odhcpd-ipv6only", 32 | "ppp", 33 | "ppp-mod-pppoe", 34 | "partx-utils", 35 | "mkf2fs", 36 | "e2fsprogs", 37 | "kmod-button-hotplug" 38 | ], 39 | "metadata_version": 1, 40 | "profiles": { 41 | "testprofile": { 42 | "device_packages": [ 43 | "kmod-bnx2", 44 | "kmod-e1000e", 45 | "kmod-e1000", 46 | "kmod-forcedeth", 47 | "kmod-igb", 48 | "kmod-ixgbe", 49 | "kmod-r8169" 50 | ], 51 | "image_prefix": "openwrt-testtarget-testsubtarget-testprofile", 52 | "images": [ 53 | { 54 | "filesystem": "squashfs", 55 | "name": "openwrt-testtarget-testsubtarget-testprofile-squashfs-rootfs.img.gz", 56 | "sha256": "dad02f19c22a0f95a5ea8af6203f9be0582b7ad92ae261f5851c471a494c8163", 57 | "type": "rootfs" 58 | }, 59 | { 60 | "filesystem": "ext4", 61 | "name": "openwrt-testtarget-testsubtarget-testprofile-ext4-rootfs.img.gz", 62 | "sha256": "4800c1df7d9207fcac97d0a175942dfd00de9317a1fcc275d0testsubtargetbf338b1ed1bf", 63 | "type": "rootfs" 64 | }, 65 | { 66 | "filesystem": "squashfs", 67 | "name": "openwrt-testtarget-testsubtarget-testprofile-squashfs-combined-efi.img.gz", 68 | "sha256": "f1ab9b40193df4bacfc41d2724b35790013074628c44e5dfe83ef683ae465db8", 69 | "type": "combined-efi" 70 | }, 71 | { 72 | "filesystem": "ext4", 73 | "name": "openwrt-testtarget-testsubtarget-testprofile-ext4-combined-efi.img.gz", 74 | "sha256": "b00a557dce04c0679001c7testsubtarget099fac620acd56c6a245c1a218a6df5c02bdd0ed", 75 | "type": "combined-efi" 76 | }, 77 | { 78 | "filesystem": "ext4", 79 | "name": "openwrt-testtarget-testsubtarget-testprofile-ext4-combined.img.gz", 80 | "sha256": "cd4aadeadbce6ea3209cbea6bc066850a7cf526c63ae137d882601665dd3fc4e", 81 | "type": "combined" 82 | }, 83 | { 84 | "filesystem": "squashfs", 85 | "name": "openwrt-testtarget-testsubtarget-testprofile-squashfs-combined.img.gz", 86 | "sha256": "43b7b5233b096d794fd3a52b4ac0e352e690fd9b4cd45a29efc828973e2c36b3", 87 | "type": "combined" 88 | } 89 | ], 90 | "supported_devices": [], 91 | "titles": [ 92 | { 93 | "title": "Not Real Device" 94 | } 95 | ] 96 | } 97 | }, 98 | "source_date_epoch": "1612136917", 99 | "target": "testtarget/testsubtarget", 100 | "version_code": "r15666-8019c54d8a", 101 | "version_number": "SNAPSHOT" 102 | } 103 | -------------------------------------------------------------------------------- /tests/upstream/releases/23.05.5/targets/x86/64/profiles.json: -------------------------------------------------------------------------------- 1 | { 2 | "arch_packages": "x86_64", 3 | "default_packages": [ 4 | "base-files", 5 | "busybox", 6 | "ca-bundle", 7 | "dnsmasq", 8 | "dropbear", 9 | "e2fsprogs", 10 | "firewall4", 11 | "fstools", 12 | "grub2-bios-setup", 13 | "kmod-button-hotplug", 14 | "kmod-nft-offload", 15 | "libc", 16 | "libgcc", 17 | "libustream-mbedtls", 18 | "logd", 19 | "mkf2fs", 20 | "mtd", 21 | "netifd", 22 | "nftables", 23 | "odhcp6c", 24 | "odhcpd-ipv6only", 25 | "opkg", 26 | "partx-utils", 27 | "ppp", 28 | "ppp-mod-pppoe", 29 | "procd", 30 | "procd-seccomp", 31 | "procd-ujail", 32 | "uci", 33 | "uclient-fetch", 34 | "urandom-seed", 35 | "urngd" 36 | ], 37 | "metadata_version": 1, 38 | "profiles": { 39 | "generic": { 40 | "device_packages": [ 41 | "kmod-amazon-ena", 42 | "kmod-amd-xgbe", 43 | "kmod-bnx2", 44 | "kmod-e1000e", 45 | "kmod-e1000", 46 | "kmod-forcedeth", 47 | "kmod-fs-vfat", 48 | "kmod-igb", 49 | "kmod-igc", 50 | "kmod-ixgbe", 51 | "kmod-r8169", 52 | "kmod-tg3" 53 | ], 54 | "image_prefix": "openwrt-23.05.5-x86-64-generic", 55 | "images": [ 56 | { 57 | "filesystem": "ext4", 58 | "name": "openwrt-23.05.5-x86-64-generic-ext4-combined-efi.img.gz", 59 | "sha256": "7da4854ce867474708bf562e003ea3924fe880acb6522e019a9a155a9b4d53d0", 60 | "sha256_unsigned": "2c3bd32bb79288f56c0c0805eb11b83d48dc2debfe9dda4dc8151f6c793cf6bc", 61 | "type": "combined-efi" 62 | }, 63 | { 64 | "filesystem": "ext4", 65 | "name": "openwrt-23.05.5-x86-64-generic-ext4-combined.img.gz", 66 | "sha256": "f5c77659a33bd43cba105cf2f75e56d054fa9e0b9a73dc9f2a5bcb0e126ff7d5", 67 | "sha256_unsigned": "922f386c08cd0c60a79cdfa27d051dcc030008de2ce3fbb9f5235f9caddfe0d2", 68 | "type": "combined" 69 | }, 70 | { 71 | "filesystem": "squashfs", 72 | "name": "openwrt-23.05.5-x86-64-generic-squashfs-rootfs.img.gz", 73 | "sha256": "478601ab0f5176372e6e0079614240dd25049c74167572ca9bc1b91e9261fe17", 74 | "sha256_unsigned": "478601ab0f5176372e6e0079614240dd25049c74167572ca9bc1b91e9261fe17", 75 | "type": "rootfs" 76 | }, 77 | { 78 | "filesystem": "ext4", 79 | "name": "openwrt-23.05.5-x86-64-generic-ext4-rootfs.img.gz", 80 | "sha256": "3aaeaba100299bd71f10c461e4bcb32cf8cae76769e4ab4334db875009057f33", 81 | "sha256_unsigned": "3aaeaba100299bd71f10c461e4bcb32cf8cae76769e4ab4334db875009057f33", 82 | "type": "rootfs" 83 | }, 84 | { 85 | "filesystem": "squashfs", 86 | "name": "openwrt-23.05.5-x86-64-generic-squashfs-combined-efi.img.gz", 87 | "sha256": "e5300e3cb4c32029c3eec5cf8e85ac99222233a231b35e2175608badb10a7638", 88 | "sha256_unsigned": "cc310d331d3d341bd5606537c73a732989ccaf5bad4e8f7162f1b6b2749c91f5", 89 | "type": "combined-efi" 90 | }, 91 | { 92 | "filesystem": "squashfs", 93 | "name": "openwrt-23.05.5-x86-64-generic-squashfs-combined.img.gz", 94 | "sha256": "788a786a7c2692cce1ac8922b71c4030958cb8ddd7f6b507fbba79c757a04fdf", 95 | "sha256_unsigned": "fc3f45b8413d5848b61891c5564108018cef452fa73c4cc375466192e6f00661", 96 | "type": "combined" 97 | } 98 | ], 99 | "supported_devices": [], 100 | "titles": [{ "model": "x86/64", "vendor": "Generic" }] 101 | } 102 | }, 103 | "source_date_epoch": 1727094886, 104 | "target": "x86/64", 105 | "version_code": "r24106-10cc5fcd00", 106 | "version_number": "23.05.5" 107 | } 108 | -------------------------------------------------------------------------------- /asu/config.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import Union 3 | 4 | from pydantic_settings import BaseSettings, SettingsConfigDict 5 | 6 | # Adding a new entry to `package_changes_list` requires determining 7 | # the revision at which the package appears, is removed or has been 8 | # renamed/replaced. To find the revision number: 9 | # 10 | # 1) Look up the date of the commit for the package change, 11 | # - either in the package repo itself (say, when auc was deleted); or 12 | # - in the openwrt repo where the package swap occurred (firewall4 13 | # in include/target.mk is a good example of this one). 14 | # 15 | # 2) Use 'scripts/getver.sh yyyy-mm-dd' in buildroot to get the revision. 16 | # See https://github.com/openwrt/openwrt/commit/e56845fae3c0 17 | # 18 | # Clients should interpret the table as follows: 19 | # 20 | # rename/replace = if 'source' and 'target' both specified 21 | # added = if only 'source' is specified 22 | # deleted = if only 'target' is specified 23 | # 24 | # If 'mandatory' is true, this package must be added or deleted (probably 25 | # because the default package list has changed). 26 | 27 | package_changes_list = [ 28 | {"source": "firewall", "target": "firewall4", "revision": 18611}, 29 | {"source": "kmod-nft-nat6", "revision": 20282, "mandatory": True}, 30 | {"source": "libustream-wolfssl", "target": "libustream-mbedtls", "revision": 21994}, 31 | {"source": "px5g-wolfssl", "target": "px5g-mbedtls", "revision": 21994}, 32 | {"source": "wpad-basic-wolfssl", "target": "wpad-basic-mbedtls", "revision": 21994}, 33 | {"source": "luci-app-diag-core", "revision": 25984, "mandatory": True}, 34 | {"source": "auc", "target": "owut", "revision": 26792}, 35 | { 36 | "source": "luci-app-opkg", 37 | "target": "luci-app-package-manager", 38 | "revision": 27897, 39 | }, 40 | {"source": "opkg", "target": "apk-mbedtls", "revision": 28056}, 41 | ] 42 | 43 | 44 | def package_changes(before=None): 45 | changes = [] 46 | for change in package_changes_list: 47 | if before is None or change["revision"] <= before: 48 | changes.append(change) 49 | return changes 50 | 51 | 52 | def release(branch_off_rev, enabled=True): 53 | return { 54 | "path": "releases/{version}", 55 | "enabled": enabled, 56 | "snapshot": False, 57 | "path_packages": "DEPRECATED", 58 | "branch_off_rev": branch_off_rev, 59 | "package_changes": package_changes(branch_off_rev), 60 | } 61 | 62 | 63 | class Settings(BaseSettings): 64 | model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8") 65 | 66 | public_path: Path = Path.cwd() / "public" 67 | redis_url: str = "redis://localhost:6379" 68 | upstream_url: str = "https://downloads.openwrt.org" 69 | allow_defaults: bool = False 70 | async_queue: bool = True 71 | branches_file: Union[str, Path, None] = None 72 | max_custom_rootfs_size_mb: int = 1024 73 | max_defaults_length: int = 20480 74 | repository_allow_list: list = [] 75 | base_container: str = "ghcr.io/openwrt/imagebuilder" 76 | container_socket_path: str = "" 77 | container_identity: str = "" 78 | branches: dict = { 79 | "SNAPSHOT": { 80 | "path": "snapshots", 81 | "enabled": True, 82 | "snapshot": True, 83 | "path_packages": "DEPRECATED", 84 | "package_changes": package_changes(), 85 | }, 86 | "25.12": release(32295), 87 | "24.10": release(27990), 88 | "23.05": release(23069), 89 | "22.03": release(19160), 90 | "21.02": release(15812, enabled=True), # Enabled for now... 91 | } 92 | server_stats: str = "" 93 | log_level: str = "INFO" 94 | squid_cache: bool = False 95 | build_ttl: str = "3h" 96 | build_defaults_ttl: str = "30m" 97 | build_failure_ttl: str = "10m" 98 | max_pending_jobs: int = 200 99 | job_timeout: str = "10m" 100 | 101 | 102 | settings = Settings() 103 | -------------------------------------------------------------------------------- /tests/test_package_changes.py: -------------------------------------------------------------------------------- 1 | from asu.build_request import BuildRequest 2 | from asu.package_changes import apply_package_changes 3 | 4 | 5 | def test_apply_package_changes_adds_kmod_switch_rtl8366s(): 6 | build_request = BuildRequest( 7 | **{ 8 | "version": "23.05", 9 | "target": "ath79/generic", 10 | "profile": "buffalo_wzr-hp-g300nh-s", 11 | "packages": ["kmod-ath9k-htc"], 12 | } 13 | ) 14 | apply_package_changes(build_request) 15 | 16 | assert "kmod-switch-rtl8366s" in build_request.packages 17 | 18 | 19 | def test_apply_package_changes_does_not_add_duplicate_packages(): 20 | build_request = BuildRequest( 21 | **{ 22 | "version": "23.05", 23 | "target": "ath79/generic", 24 | "profile": "buffalo_wzr-hp-g300nh-s", 25 | "packages": ["kmod-ath9k-htc", "kmod-switch-rtl8366s"], 26 | } 27 | ) 28 | apply_package_changes(build_request) 29 | 30 | assert build_request.packages == ["kmod-ath9k-htc", "kmod-switch-rtl8366s"] 31 | 32 | 33 | def test_apply_package_changes_does_not_modify_input_dict(): 34 | build_request = BuildRequest( 35 | **{ 36 | "version": "23.05", 37 | "target": "ath79/generic", 38 | "profile": "buffalo_wzr-hp-g300nh-s", 39 | "packages": ["kmod-ath9k-htc"], 40 | } 41 | ) 42 | original_req = build_request.model_copy() 43 | apply_package_changes(build_request) 44 | 45 | assert build_request == original_req 46 | 47 | 48 | def test_apply_package_changes_release(): 49 | build_request = BuildRequest( 50 | **{ 51 | "version": "21.02.0-rc1", 52 | "target": "ath79/generic", 53 | "profile": "buffalo_wzr-hp-g300nh-s", 54 | "packages": ["kmod-ath9k-htc"], 55 | } 56 | ) 57 | apply_package_changes(build_request) 58 | 59 | original_build_request = build_request.model_copy() 60 | apply_package_changes(build_request) 61 | 62 | assert build_request == original_build_request 63 | 64 | 65 | def test_apply_package_changes_mediatek(): 66 | build_request = BuildRequest( 67 | **{ 68 | "version": "23.05", 69 | "target": "mediatek/mt7622", 70 | "profile": "foobar", 71 | "packages": ["ubus"], 72 | } 73 | ) 74 | apply_package_changes(build_request) 75 | 76 | assert "kmod-mt7622-firmware" in build_request.packages 77 | 78 | 79 | def test_apply_package_changes_lang_packs(): 80 | build_request = BuildRequest( 81 | **{ 82 | "version": "23.05.5", 83 | "target": "mediatek/mt7622", 84 | "profile": "foobar", 85 | "packages": [ 86 | "luci-i18n-opkg-ko", # Should be replaced 87 | "luci-i18n-xinetd-lt", # Should be untouched 88 | "luci-i18n-opkg-zh-cn", # Should be replaced 89 | ], 90 | } 91 | ) 92 | 93 | assert len(build_request.packages) == 3 94 | assert build_request.packages[0] == "luci-i18n-opkg-ko" 95 | assert build_request.packages[1] == "luci-i18n-xinetd-lt" 96 | assert build_request.packages[2] == "luci-i18n-opkg-zh-cn" 97 | 98 | apply_package_changes(build_request) 99 | 100 | assert len(build_request.packages) == 4 101 | assert build_request.packages[0] == "luci-i18n-opkg-ko" 102 | assert build_request.packages[1] == "luci-i18n-xinetd-lt" 103 | assert build_request.packages[2] == "luci-i18n-opkg-zh-cn" 104 | assert build_request.packages[3] == "kmod-mt7622-firmware" 105 | 106 | build_request.version = "24.10.0-rc5" 107 | apply_package_changes(build_request) 108 | 109 | assert len(build_request.packages) == 4 110 | assert build_request.packages[0] == "luci-i18n-package-manager-ko" 111 | assert build_request.packages[1] == "luci-i18n-xinetd-lt" 112 | assert build_request.packages[2] == "luci-i18n-package-manager-zh-cn" 113 | assert build_request.packages[3] == "kmod-mt7622-firmware" 114 | -------------------------------------------------------------------------------- /asu/routers/stats.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime as dt, timedelta, UTC 2 | 3 | from fastapi import APIRouter 4 | 5 | from asu.util import get_redis_ts 6 | 7 | router = APIRouter() 8 | 9 | 10 | DAY_MS = 24 * 60 * 60 * 1000 11 | N_DAYS = 30 12 | 13 | 14 | def start_stop(duration, interval): 15 | """Calculate the time series boundaries and bucket values.""" 16 | 17 | # "stop" is next midnight to define buckets on exact day boundaries. 18 | stop = dt.now(UTC).replace(hour=0, minute=0, second=0, microsecond=0) 19 | stop += timedelta(days=1) 20 | stop = int(stop.timestamp() * 1000) 21 | start = stop - duration * interval 22 | 23 | stamps = list(range(start, stop, interval)) 24 | labels = [str(dt.fromtimestamp(stamp // 1000, UTC))[:10] + "Z" for stamp in stamps] 25 | 26 | return start, stop, stamps, labels 27 | 28 | 29 | @router.get("/builds-per-day") 30 | def get_builds_per_day() -> dict: 31 | """ 32 | References: 33 | https://redis.readthedocs.io/en/latest/redismodules.html#redis.commands.timeseries.commands.TimeSeriesCommands.range 34 | https://www.chartjs.org/docs/latest/charts/line.html 35 | """ 36 | 37 | start, stop, stamps, labels = start_stop(N_DAYS, DAY_MS) 38 | 39 | ts = get_redis_ts() 40 | rc = ts.client 41 | range_options = dict( 42 | from_time=start, 43 | to_time=stop, 44 | align=start, # Ensures alignment of X values with "stamps". 45 | aggregation_type="sum", 46 | bucket_size_msec=DAY_MS, 47 | ) 48 | 49 | def get_dataset(event: str, color: str) -> dict: 50 | """Fills "data" array completely, supplying 0 for missing values.""" 51 | key = f"stats:build:{event}" 52 | result = ts.range(key, **range_options) if rc.exists(key) else [] 53 | data_map = dict(result) 54 | return { 55 | "label": event.title(), 56 | "data": [data_map.get(stamp, 0) for stamp in stamps], 57 | "color": color, 58 | } 59 | 60 | return { 61 | "labels": labels, 62 | "datasets": [ 63 | # See add_build_event for valid "event" values. 64 | get_dataset("requests", "green"), 65 | get_dataset("cache-hits", "orange"), 66 | get_dataset("failures", "red"), 67 | ], 68 | } 69 | 70 | 71 | @router.get("/builds-by-version") 72 | def get_builds_by_version(branch: str = None) -> dict(): 73 | """If 'branch' is None, then data will be returned "by branch", 74 | so you get one curve for each of 23.05, 24.10, 25.12 etc. 75 | 76 | If you specify a branch, say "24.10", then the results are for 77 | all versions on that branch, 24.10.0, 24.1.1 and so on.""" 78 | 79 | interval = 7 * DAY_MS # Each bucket is a week. 80 | duration = 26 # Number of weeks of data, about 6 months. 81 | 82 | start, stop, stamps, labels = start_stop(duration, interval) 83 | 84 | bucket = {} 85 | 86 | def sum_data(version, data): 87 | data_map = dict(data) 88 | if version not in bucket: 89 | bucket[version] = [0.0] * len(stamps) 90 | for i, stamp in enumerate(stamps): 91 | bucket[version][i] += data_map.get(stamp, 0) 92 | 93 | range_options = dict( 94 | filters=["stats=builds"], 95 | with_labels=True, 96 | from_time=start, 97 | to_time=stop, 98 | align=start, # Ensures alignment of X values with "stamps". 99 | aggregation_type="sum", 100 | bucket_size_msec=interval, 101 | ) 102 | 103 | result = get_redis_ts().mrange(**range_options) 104 | for row in result: 105 | for data in row.values(): 106 | version = data[0]["version"] 107 | if branch and not version.startswith(branch): 108 | continue 109 | elif branch is None and "." in version: 110 | version = version[:5] 111 | sum_data(version, data[1]) 112 | 113 | return { 114 | "labels": labels, 115 | "datasets": [ 116 | { 117 | "label": version, 118 | "data": bucket[version], 119 | } 120 | for version in sorted(bucket) 121 | ], 122 | } 123 | -------------------------------------------------------------------------------- /tests/upstream/snapshots/targets/testtarget/testsubtarget/openwrt-imagebuilder-testtarget-testsubtarget.Linux-x86_64/openwrt-testtarget-testsubtarget-testprofile.manifest: -------------------------------------------------------------------------------- 1 | test1 - 1.0 2 | test2 - 2.0 3 | base-files - 213-r12288-1173719817 4 | busybox - 1.31.1-1 5 | cgi-io - 17 6 | dnsmasq - 2.80-18 7 | dropbear - 2019.78-3 8 | firewall - 2019-11-22-8174814a-1 9 | fstools - 2020-01-21-deb745f8-1 10 | fwtool - 2019-11-12-8f7fe925-1 11 | getrandom - 2019-12-31-0e34af14-3 12 | hostapd-common - 2019-08-08-ca8c2bd2-6 13 | ip6tables - 1.8.3-2 14 | iptables - 1.8.3-2 15 | iw-full - 5.3-2 16 | jshn - 2020-01-20-43a103ff-1 17 | jsonfilter - 2018-02-04-c7e938d6-1 18 | kernel - 4.14.169-1-0b48c80107a57fc6efd13f54e5fffa14 19 | kmod-cfg80211 - 4.14.169+5.4-rc8-1-1 20 | kmod-gpio-button-hotplug - 4.14.169-3 21 | kmod-ip6tables - 4.14.169-1 22 | kmod-ipt-conntrack - 4.14.169-1 23 | kmod-ipt-core - 4.14.169-1 24 | kmod-ipt-nat - 4.14.169-1 25 | kmod-ipt-offload - 4.14.169-1 26 | kmod-leds-gpio - 4.14.169-1 27 | kmod-lib-crc-ccitt - 4.14.169-1 28 | kmod-mac80211 - 4.14.169+5.4-rc8-1-1 29 | kmod-mt76-core - 4.14.169+2020-02-20-fd892bc0-1 30 | kmod-mt76x02-common - 4.14.169+2020-02-20-fd892bc0-1 31 | kmod-mt76x2 - 4.14.169+2020-02-20-fd892bc0-1 32 | kmod-mt76x2-common - 4.14.169+2020-02-20-fd892bc0-1 33 | kmod-nf-conntrack - 4.14.169-1 34 | kmod-nf-conntrack6 - 4.14.169-1 35 | kmod-nf-flow - 4.14.169-1 36 | kmod-nf-ipt - 4.14.169-1 37 | kmod-nf-ipt6 - 4.14.169-1 38 | kmod-nf-nat - 4.14.169-1 39 | kmod-nf-reject - 4.14.169-1 40 | kmod-nf-reject6 - 4.14.169-1 41 | kmod-nls-base - 4.14.169-1 42 | kmod-ppp - 4.14.169-1 43 | kmod-pppoe - 4.14.169-1 44 | kmod-pppox - 4.14.169-1 45 | kmod-rt2800-lib - 4.14.169+5.4-rc8-1-1 46 | kmod-rt2800-mmio - 4.14.169+5.4-rc8-1-1 47 | kmod-rt2800-soc - 4.14.169+5.4-rc8-1-1 48 | kmod-rt2x00-lib - 4.14.169+5.4-rc8-1-1 49 | kmod-rt2x00-mmio - 4.14.169+5.4-rc8-1-1 50 | kmod-slhc - 4.14.169-1 51 | kmod-usb-core - 4.14.169-1 52 | kmod-usb-ehci - 4.14.169-1 53 | kmod-usb-ohci - 4.14.169-1 54 | kmod-usb2 - 4.14.169-1 55 | libblobmsg-json - 2020-01-20-43a103ff-1 56 | libc - 1.1.24-2 57 | libgcc1 - 8.3.0-2 58 | libip4tc2 - 1.8.3-2 59 | libip6tc2 - 1.8.3-2 60 | libiwinfo-lua - 2020-01-05-bf2c1069-1 61 | libiwinfo20200105 - 2020-01-05-bf2c1069-1 62 | libjson-c4 - 0.13.1-1 63 | libjson-script - 2020-01-20-43a103ff-1 64 | liblua5.1.5 - 5.1.5-7 65 | liblucihttp-lua - 2019-07-05-a34a17d5-1 66 | liblucihttp0 - 2019-07-05-a34a17d5-1 67 | libnl-tiny - 2019-10-29-0219008c-1 68 | libpthread - 1.1.24-2 69 | libubox20191228 - 2020-01-20-43a103ff-1 70 | libubus-lua - 2020-01-05-d35df8ad-1 71 | libubus20191227 - 2020-01-05-d35df8ad-1 72 | libuci20130104 - 2020-01-27-e8d83732-3 73 | libuclient20160123 - 2020-01-05-fef6d3d3-1 74 | libxtables12 - 1.8.3-2 75 | logd - 2019-12-31-0e34af14-3 76 | lua - 5.1.5-7 77 | luci - git-20.047.38617-2fa9885-1 78 | luci-app-firewall - git-20.047.38617-2fa9885-1 79 | luci-app-opkg - git-20.047.38617-2fa9885-1 80 | luci-base - git-20.047.38617-2fa9885-1 81 | luci-lib-ip - git-20.047.38617-2fa9885-1 82 | luci-lib-jsonc - git-20.047.38617-2fa9885-1 83 | luci-lib-nixio - git-20.047.38617-2fa9885-1 84 | luci-mod-admin-full - git-20.047.38617-2fa9885-1 85 | luci-mod-network - git-20.047.38617-2fa9885-1 86 | luci-mod-status - git-20.047.38617-2fa9885-1 87 | luci-mod-system - git-20.047.38617-2fa9885-1 88 | luci-proto-ipv6 - git-20.047.38617-2fa9885-1 89 | luci-proto-ppp - git-20.047.38617-2fa9885-1 90 | luci-theme-bootstrap - git-20.047.38617-2fa9885-1 91 | mtd - 25 92 | netifd - 2020-01-18-1321c1bd-1 93 | odhcp6c - 2019-01-11-e199804b-16 94 | odhcpd-ipv6only - 2020-01-14-6db312a6-3 95 | openwrt-keyring - 2019-07-25-8080ef34-1 96 | opkg - 2020-01-25-c09fe209-1 97 | ppp - 2.4.8-1 98 | ppp-mod-pppoe - 2.4.8-1 99 | procd - 2020-02-11-c30b23e3-1 100 | rpcd - 2020-01-05-efe51f41-2 101 | rpcd-mod-file - 2020-01-05-efe51f41-2 102 | rpcd-mod-iwinfo - 2020-01-05-efe51f41-2 103 | rpcd-mod-luci - 20191114 104 | rpcd-mod-rrdns - 20170710 105 | swconfig - 12 106 | uboot-envtools - 2019.07-2 107 | ubox - 2019-12-31-0e34af14-3 108 | ubus - 2020-01-05-d35df8ad-1 109 | ubusd - 2020-01-05-d35df8ad-1 110 | uci - 2020-01-27-e8d83732-3 111 | uclient-fetch - 2020-01-05-fef6d3d3-1 112 | uhttpd - 2020-02-12-2ee323c0-1 113 | urandom-seed - 1.0-1 114 | urngd - 2020-01-21-c7f7b6b6-1 115 | usign - 2019-09-21-f34a383e-1 116 | wireless-regdb - 2019.06.03 117 | wpad-basic - 2019-08-08-ca8c2bd2-6 118 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import shutil 2 | import tempfile 3 | from pathlib import Path 4 | 5 | import pytest 6 | from fakeredis import FakeStrictRedis 7 | from rq import Queue 8 | from fastapi.testclient import TestClient 9 | 10 | from asu.config import settings 11 | 12 | 13 | def redis_load_mock_data(redis): 14 | return 15 | redis.sadd( 16 | "packages:1.2:1.2.3:testtarget/testsubtarget", 17 | "test1", 18 | "test2", 19 | "test3", 20 | "valid_new_package", 21 | ) 22 | redis.sadd("profiles:1.2:1.2.3:testtarget/testsubtarget", "testprofile") 23 | redis.sadd("profiles:SNAPSHOT:SNAPSHOT:ath79/generic", "tplink_tl-wdr4300-v1") 24 | redis.sadd("packages:SNAPSHOT:SNAPSHOT:ath79/generic", "vim", "tmux") 25 | redis.sadd("packages:SNAPSHOT:SNAPSHOT:x86/64", "vim", "tmux") 26 | 27 | redis.sadd("branches", "SNAPSHOT", "1.2", "21.02", "19.07") 28 | redis.sadd("versions:SNAPSHOT", "SNAPSHOT") 29 | redis.sadd("versions:1.2", "1.2.3") 30 | redis.sadd("versions:21.02", "21.02.7", "21.02.0", "21.02.0-rc4", "21.02-SNAPSHOT") 31 | redis.sadd("versions:19.07", "19.07.7", "19.07.6") 32 | 33 | redis.sadd("profiles:21.02:21.02.7:ath79/generic", "tplink_tl-wdr4300-v1") 34 | redis.sadd("packages:21.02:21.02.7:ath79/generic", "vim", "tmux") 35 | redis.sadd("packages:21.02:21.02.7:x86/64", "vim", "tmux") 36 | 37 | redis.sadd("profiles:21.02:21.02.7:x86/64", "generic") 38 | redis.set("revision:21.02.7:x86/64", "r16847-f8282da11e") 39 | 40 | redis.hset( 41 | "mapping:1.2:1.2.3:testtarget/testsubtarget", 42 | mapping={"testvendor,testprofile": "testprofile"}, 43 | ) 44 | redis.hset("targets:1.2", mapping={"testtarget/testsubtarget": "testarch"}) 45 | redis.hset("targets:SNAPSHOT", mapping={"ath79/generic": "", "x86/64": ""}) 46 | redis.hset( 47 | "targets:21.02", 48 | mapping={ 49 | "testtarget/testsubtarget": "testarch", 50 | "ath79/generic": "", 51 | "x86/64": "", 52 | }, 53 | ) 54 | redis.hset("mapping-abi", mapping={"test1-1": "test1"}) 55 | 56 | 57 | @pytest.fixture 58 | def redis_server(): 59 | r = FakeStrictRedis() 60 | redis_load_mock_data(r) 61 | yield r 62 | r.flushall() 63 | 64 | 65 | def pytest_addoption(parser): 66 | parser.addoption( 67 | "--runslow", action="store_true", default=False, help="run slow tests" 68 | ) 69 | 70 | 71 | def pytest_configure(config): 72 | config.addinivalue_line("markers", "slow: mark test as slow to run") 73 | 74 | 75 | def pytest_collection_modifyitems(config, items): 76 | if config.getoption("--runslow"): 77 | # --runslow given in cli: do not skip slow tests 78 | return 79 | skip_slow = pytest.mark.skip(reason="need --runslow option to run") 80 | for item in items: 81 | if "slow" in item.keywords: 82 | item.add_marker(skip_slow) 83 | 84 | 85 | @pytest.fixture 86 | def test_path(): 87 | test_path = tempfile.mkdtemp(dir=Path.cwd() / "tests") 88 | yield test_path 89 | shutil.rmtree(test_path) 90 | 91 | 92 | @pytest.fixture 93 | def app(redis_server, test_path, monkeypatch, upstream): 94 | def mocked_redis_client(*args, **kwargs): 95 | return redis_server 96 | 97 | def mocked_redis_queue(): 98 | return Queue(connection=redis_server, is_async=settings.async_queue) 99 | 100 | settings.public_path = Path(test_path) / "public" 101 | settings.async_queue = False 102 | settings.upstream_url = "http://localhost:8123" 103 | settings.server_stats = "stats" 104 | for branch in "1.2", "19.07", "21.02": 105 | if branch not in settings.branches: 106 | settings.branches[branch] = { 107 | "path": "releases/{version}", 108 | "enabled": True, 109 | } 110 | 111 | monkeypatch.setattr("asu.util.get_queue", mocked_redis_queue) 112 | monkeypatch.setattr("asu.routers.api.get_queue", mocked_redis_queue) 113 | monkeypatch.setattr("asu.util.get_redis_client", mocked_redis_client) 114 | 115 | from asu.main import app as real_app 116 | 117 | yield real_app 118 | 119 | 120 | @pytest.fixture 121 | def client(app, upstream): 122 | yield TestClient(app) 123 | 124 | 125 | @pytest.fixture(scope="session") 126 | def httpserver_listen_address(): 127 | return ("127.0.0.1", 8123) 128 | 129 | 130 | @pytest.fixture 131 | def upstream(httpserver): 132 | base_url = "" 133 | upstream_path = Path("./tests/upstream/") 134 | expected_file_requests = [ 135 | ".versions.json", 136 | "releases/1.2.3/.targets.json", 137 | "releases/1.2.3/targets/testtarget/testsubtarget/profiles.json", 138 | "releases/23.05.5/.targets.json", 139 | "releases/23.05.5/targets/ath79/generic/profiles.json", 140 | "releases/23.05.5/targets/x86/64/profiles.json", 141 | "snapshots/.targets.json", 142 | "snapshots/packages/testarch/base/Packages.manifest", 143 | "snapshots/targets/ath79/generic/profiles.json", 144 | "snapshots/targets/testtarget/testsubtarget/packages/Packages.manifest", 145 | "snapshots/targets/testtarget/testsubtarget/profiles.json", 146 | ] 147 | 148 | for f in expected_file_requests: 149 | httpserver.expect_request(f"{base_url}/{f}").respond_with_data( 150 | (upstream_path / f).read_bytes() 151 | ) 152 | -------------------------------------------------------------------------------- /asu/build_request.py: -------------------------------------------------------------------------------- 1 | from typing import Annotated 2 | 3 | from pydantic import BaseModel, Field 4 | 5 | from asu.config import settings 6 | 7 | STRING_PATTERN = r"^[\w.,-]*$" 8 | TARGET_PATTERN = r"^[\w]*/[\w]*$" 9 | PKG_VERSION_PATTERN = r"^[\w+.,~-]*$" 10 | 11 | 12 | class BuildRequest(BaseModel): 13 | distro: Annotated[ 14 | str, 15 | Field( 16 | description=""" 17 | This parameter is currently optional since no other 18 | distributions are supported. 19 | """.strip(), 20 | pattern=STRING_PATTERN, 21 | ), 22 | ] = "openwrt" 23 | version: Annotated[ 24 | str, 25 | Field( 26 | examples=["23.05.2"], 27 | description=""" 28 | It is recommended to always upgrade to the latest version, 29 | however it is possible to request older images for testing. 30 | """.strip(), 31 | pattern=STRING_PATTERN, 32 | ), 33 | ] 34 | version_code: Annotated[ 35 | str, 36 | Field( 37 | examples=["r26741-dcc4307205"], 38 | description=""" 39 | It is possible to send the expected revision. This allows to 40 | show the revision within clients before the request. If the 41 | resulting firmware is a different revision, the build results 42 | in an error. 43 | """.strip(), 44 | pattern=STRING_PATTERN, 45 | ), 46 | ] = "" 47 | target: Annotated[ 48 | str, 49 | Field( 50 | examples=["ath79/generic"], 51 | description=""" 52 | It is recommended to always upgrade to the latest version, however 53 | it is possible to request older images for testing. 54 | """.strip(), 55 | pattern=TARGET_PATTERN, 56 | ), 57 | ] 58 | profile: Annotated[ 59 | str, 60 | Field( 61 | examples=["8dev_carambola2"], 62 | description=""" 63 | The ImageBuilder `PROFILE`. Can be found with `ubus call 64 | system board` as the `board_name` value. 65 | """.strip(), 66 | pattern=STRING_PATTERN, 67 | ), 68 | ] 69 | packages: Annotated[ 70 | list[Annotated[str, Field(pattern=STRING_PATTERN)]], 71 | Field( 72 | examples=[["vim", "tmux"]], 73 | description=""" 74 | List of packages, either *additional* or *absolute* depending 75 | of the `diff_packages` parameter. This is augmented by the 76 | `packages_versions` field, which allow you to additionally 77 | specify the versions of the packages to be installed. 78 | """.strip(), 79 | ), 80 | ] = [] 81 | packages_versions: Annotated[ 82 | dict[ 83 | Annotated[str, Field(pattern=STRING_PATTERN)], 84 | Annotated[str, Field(pattern=PKG_VERSION_PATTERN)], 85 | ], 86 | Field( 87 | examples=[{"vim": "1.2.3", "tmux": "2.3.4"}], 88 | description=""" 89 | A dictionary of package names and versions. This is an 90 | alternate form of `packages`, in which the expected package 91 | versions are specified for verification after the build has 92 | completed. 93 | """.strip(), 94 | ), 95 | ] = {} 96 | diff_packages: Annotated[ 97 | bool, 98 | Field( 99 | description=""" 100 | This parameter determines if requested packages are seen as 101 | *additional* or *absolute*. If set to `true` the packages are 102 | seen as *absolute* and all default packages outside the 103 | requested packages are removed. \n\n It is possible to brick 104 | devices when requesting an incomplete list with this parameter 105 | enabled since it may remove WiFi drivers or other essential 106 | packages. 107 | """.strip(), 108 | ), 109 | ] = False 110 | defaults: Annotated[ 111 | str | None, 112 | Field( 113 | max_length=settings.max_defaults_length, 114 | examples=['echo "Hello world"\nwifi restart\n'], 115 | description=f""" 116 | Custom shell script embedded in firmware image to be run 117 | on first boot. This feature might be dropped in the future. 118 | Input file size is limited to {settings.max_defaults_length} 119 | bytes and cannot be exceeded. 120 | """.strip(), 121 | ), 122 | ] = None 123 | rootfs_size_mb: Annotated[ 124 | int | None, 125 | Field( 126 | ge=1, 127 | le=settings.max_custom_rootfs_size_mb, 128 | examples=[256], 129 | description=""" 130 | Ability to specify a custom `CONFIG_TARGET_ROOTFS_PARTSIZE` 131 | for the resulting image. Attaching this optional parameter 132 | will cause ImageBuilder to build a rootfs with that size 133 | in MB. 134 | """.strip(), 135 | ), 136 | ] = None 137 | repositories: Annotated[ 138 | dict[str, str], 139 | Field( 140 | description=""" 141 | Additional repositories for user packages. 142 | """.strip() 143 | ), 144 | ] = {} 145 | repository_keys: Annotated[ 146 | list[str], 147 | Field( 148 | description=""" 149 | Verfication keys for the additional repositories. 150 | """.strip(), 151 | ), 152 | ] = [] 153 | client: Annotated[ 154 | str | None, 155 | Field( 156 | examples=["luci/git-22.073.39928-701ea94"], 157 | description=""" 158 | Client name and version that requests the image, 159 | """.strip(), 160 | ), 161 | ] = None 162 | -------------------------------------------------------------------------------- /tests/test_stats.py: -------------------------------------------------------------------------------- 1 | import time 2 | from fakeredis import FakeStrictRedis 3 | 4 | build_config_1 = dict( 5 | version="1.2.3", 6 | target="testtarget/testsubtarget", 7 | profile="testprofile", 8 | packages=["test1"], 9 | ) 10 | 11 | build_config_2 = dict( 12 | version="1.2.3", 13 | target="testtarget/testsubtarget", 14 | profile="testprofile", 15 | packages=["test1", "test2"], 16 | ) 17 | 18 | 19 | class Stats: 20 | def __init__(self, redis_server: FakeStrictRedis): 21 | self.ts = redis_server.ts() 22 | 23 | def summary(self, type): 24 | key = f"stats:build:{type}" 25 | if self.ts.client.exists(key): 26 | return self.ts.range(key, "-", "+") 27 | return [] 28 | 29 | def client(self, tag): 30 | clients = self.ts.mrange("-", "+", filters=["stats=clients"]) 31 | if not clients: 32 | return [] 33 | return clients[0][f"stats:clients:{tag}"] 34 | 35 | def builds(self, tag): 36 | builds = self.ts.mrange("-", "+", filters=["stats=builds"]) 37 | if not builds: 38 | return [] 39 | return builds[0][f"stats:builds:{tag}"] 40 | 41 | 42 | def test_stats_image_builds(client, redis_server: FakeStrictRedis): 43 | stats = Stats(redis_server) 44 | assert len(stats.builds("1.2.3:testtarget/testsubtarget:testprofile")) == 0 45 | 46 | response = client.post("/api/v1/build", json=build_config_1) 47 | assert response.status_code == 200 48 | assert len(stats.builds("1.2.3:testtarget/testsubtarget:testprofile")[1]) == 1 49 | 50 | 51 | def test_stats_summary(client, redis_server: FakeStrictRedis): 52 | stats = Stats(redis_server) 53 | 54 | assert len(stats.summary("hits")) == 0 55 | assert len(stats.summary("misses")) == 0 56 | 57 | response = client.post("/api/v1/build", json=build_config_2) 58 | assert response.status_code == 200 59 | assert len(stats.summary("requests")) == 1 60 | assert len(stats.summary("cache-hits")) == 0 61 | assert len(stats.summary("cache-misses")) == 1 62 | assert len(stats.summary("successes")) == 1 63 | assert len(stats.summary("failures")) == 0 64 | 65 | response = client.post("/api/v1/build", json=build_config_2) 66 | assert response.status_code == 200 67 | assert len(stats.summary("requests")) == 2 68 | assert len(stats.summary("cache-hits")) == 1 69 | assert len(stats.summary("cache-misses")) == 1 70 | assert len(stats.summary("successes")) == 1 71 | assert len(stats.summary("failures")) == 0 72 | 73 | time.sleep(1) # Ensure timestamp is on next second. 74 | response = client.post("/api/v1/build", json=build_config_2) 75 | assert response.status_code == 200 76 | assert len(stats.summary("requests")) == 3 77 | assert len(stats.summary("cache-hits")) == 2 78 | assert len(stats.summary("cache-misses")) == 1 79 | assert len(stats.summary("successes")) == 1 80 | assert len(stats.summary("failures")) == 0 81 | 82 | response = client.post("/api/v1/build", json=build_config_1) 83 | assert response.status_code == 200 84 | assert len(stats.summary("requests")) == 4 85 | assert len(stats.summary("cache-hits")) == 2 86 | assert len(stats.summary("cache-misses")) == 2 87 | assert len(stats.summary("successes")) == 2 88 | assert len(stats.summary("failures")) == 0 89 | 90 | 91 | def test_stats_clients_luci(client, redis_server: FakeStrictRedis): 92 | asu_client = "luci/git-22.073.39928-701ea94" 93 | 94 | stats = Stats(redis_server) 95 | assert len(stats.client(asu_client)) == 0 96 | 97 | response = client.post( 98 | "/api/v1/build", json=dict(client=asu_client, **build_config_1) 99 | ) 100 | assert response.status_code == 200 101 | assert len(stats.client(asu_client)[1]) == 1 102 | 103 | 104 | def test_stats_clients_unknown(client, redis_server: FakeStrictRedis): 105 | asu_client = "unknown/0" 106 | 107 | stats = Stats(redis_server) 108 | assert len(stats.client(asu_client)) == 0 109 | 110 | response = client.post("/api/v1/build", json=build_config_2) 111 | assert response.status_code == 200 112 | assert len(stats.client(asu_client)[1]) == 1 113 | 114 | 115 | def test_stats_clients_auc(client, redis_server: FakeStrictRedis): 116 | asu_client = "auc/0.3.2" 117 | 118 | stats = Stats(redis_server) 119 | assert len(stats.client(asu_client)) == 0 120 | 121 | response = client.post( 122 | "/api/v1/build", json=build_config_2, headers={"User-Agent": "auc (0.3.2)"} 123 | ) 124 | assert response.status_code == 200 125 | assert len(stats.client(asu_client)[1]) == 1 126 | 127 | 128 | def test_stats_clients_auc_possible_new_format(client, redis_server: FakeStrictRedis): 129 | asu_client = "auc/0.3.2" 130 | 131 | stats = Stats(redis_server) 132 | assert len(stats.client(asu_client)) == 0 133 | 134 | response = client.post( 135 | "/api/v1/build", json=build_config_2, headers={"User-Agent": asu_client} 136 | ) 137 | assert response.status_code == 200 138 | assert len(stats.client(asu_client)[1]) == 1 139 | 140 | 141 | def test_stats_builds_per_day(client, redis_server: FakeStrictRedis): 142 | from asu.routers.stats import N_DAYS 143 | 144 | response = client.get("/api/v1/builds-per-day") 145 | assert response.status_code == 200 146 | 147 | data = response.json() 148 | assert "labels" in data 149 | assert len(data["labels"]) == N_DAYS 150 | assert "datasets" in data 151 | assert "data" in data["datasets"][0] 152 | assert len(data["datasets"][0]["data"]) == N_DAYS 153 | 154 | 155 | def test_stats_builds_by_version(client, redis_server: FakeStrictRedis): 156 | response = client.post("/api/v1/build", json=build_config_1) 157 | response = client.post("/api/v1/build", json=build_config_2) 158 | 159 | response = client.get("/api/v1/builds-by-version") 160 | assert response.status_code == 200 161 | 162 | data = response.json() 163 | assert "labels" in data 164 | assert len(data["labels"]) == 26 165 | assert "datasets" in data 166 | assert len(data["datasets"]) == 1 167 | assert len(data["datasets"][0]["data"]) == 26 168 | -------------------------------------------------------------------------------- /asu/main.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from collections import defaultdict 3 | from datetime import datetime 4 | from pathlib import Path 5 | from typing import Union 6 | 7 | from fastapi import FastAPI, HTTPException, Request 8 | from fastapi.responses import FileResponse, HTMLResponse, RedirectResponse 9 | from fastapi.staticfiles import StaticFiles 10 | from fastapi.templating import Jinja2Templates 11 | 12 | from asu import __version__ 13 | from asu.config import settings 14 | from asu.routers import api, stats 15 | from asu.util import ( 16 | client_get, 17 | get_branch, 18 | is_post_kmod_split_build, 19 | parse_feeds_conf, 20 | parse_kernel_version, 21 | parse_packages_file, 22 | reload_targets, 23 | reload_versions, 24 | ) 25 | 26 | logging.basicConfig(encoding="utf-8", level=settings.log_level) 27 | 28 | base_path = Path(__file__).resolve().parent 29 | 30 | app = FastAPI() 31 | app.include_router(api.router, prefix="/api/v1") 32 | app.include_router(stats.router, prefix="/api/v1") 33 | 34 | (settings.public_path / "store").mkdir(parents=True, exist_ok=True) 35 | 36 | app.mount("/static", StaticFiles(directory=base_path / "static"), name="static") 37 | 38 | templates = Jinja2Templates(directory=base_path / "templates") 39 | 40 | app.latest = [] 41 | app.versions = [] 42 | reload_versions(app) 43 | logging.info(f"Found {len(app.versions)} versions") 44 | 45 | app.targets = defaultdict(list) 46 | app.profiles = defaultdict(lambda: defaultdict(dict)) 47 | 48 | 49 | @app.api_route("/store/{path:path}", methods=["GET", "HEAD"]) 50 | def store(path: str): 51 | path = (settings.public_path / "store" / path).resolve() 52 | if not path.is_file() or settings.public_path / "store" not in path.parents: 53 | raise HTTPException(status_code=404, detail="Not found") 54 | 55 | return FileResponse( 56 | path, 57 | media_type="application/octet-stream", 58 | filename=path.name, # adds Content-Disposition: attachment; filename="..." 59 | headers={"X-Content-Type-Options": "nosniff"}, 60 | ) 61 | 62 | 63 | @app.get("/", response_class=HTMLResponse) 64 | def index(request: Request): 65 | return templates.TemplateResponse( 66 | request=request, 67 | name="overview.html", 68 | context=dict( 69 | versions=app.versions, 70 | defaults=settings.allow_defaults, 71 | version=__version__, 72 | server_stats=settings.server_stats, 73 | max_custom_rootfs_size_mb=settings.max_custom_rootfs_size_mb, 74 | max_defaults_length=settings.max_defaults_length, 75 | ), 76 | ) 77 | 78 | 79 | @app.get("/json/v1/{path:path}/index.json") 80 | def json_v1_target_index(path: str) -> dict[str, Union[str, dict[str, str]]]: 81 | base_path: str = f"{settings.upstream_url}/{path}" 82 | base_packages: dict[str, str] = parse_packages_file(f"{base_path}/packages") 83 | if is_post_kmod_split_build(path): 84 | kmods_directory: str = parse_kernel_version(f"{base_path}/profiles.json") 85 | if kmods_directory: 86 | kmod_packages: dict[str, str] = parse_packages_file( 87 | f"{base_path}/kmods/{kmods_directory}" 88 | ) 89 | base_packages["packages"].update(kmod_packages.get("packages", {})) 90 | return base_packages 91 | 92 | 93 | @app.get("/json/v1/{path:path}/{arch:path}-index.json") 94 | def json_v1_arch_index(path: str, arch: str): 95 | feed_url: str = f"{settings.upstream_url}/{path}/{arch}" 96 | feeds: list[str] = parse_feeds_conf(feed_url) 97 | packages: dict[str, str] = {} 98 | for feed in feeds: 99 | packages.update(parse_packages_file(f"{feed_url}/{feed}").get("packages", {})) 100 | return packages 101 | 102 | 103 | @app.get("/json/v1/{path:path}/targets/{target:path}/{profile:path}.json") 104 | def json_v1_profile(path: str, target: str, profile: str): 105 | metadata: dict = client_get( 106 | f"{settings.upstream_url}/{path}/targets/{target}/profiles.json" 107 | ).json() 108 | profiles: dict = metadata.pop("profiles", {}) 109 | if profile not in profiles: 110 | return {} 111 | 112 | return { 113 | **metadata, 114 | **profiles[profile], 115 | "id": profile, 116 | "build_at": datetime.utcfromtimestamp( 117 | int(metadata.get("source_date_epoch", 0)) 118 | ).strftime("%Y-%m-%dT%H:%M:%S.%fZ"), 119 | } 120 | 121 | 122 | def generate_latest(): 123 | reload_versions(app) # Do a reload in case .versions.json has updated. 124 | return app.latest 125 | 126 | 127 | @app.get("/json/v1/latest.json") 128 | def json_v1_latest(): 129 | latest = generate_latest() 130 | return {"latest": latest} 131 | 132 | 133 | def generate_branches(): 134 | reload_versions(app) # Do a reload in case .versions.json has updated. 135 | branches = dict(**settings.branches) 136 | 137 | for branch in branches: 138 | branches[branch]["versions"] = [] 139 | branches[branch]["name"] = branch 140 | 141 | for version in app.versions: 142 | branch_name = get_branch(version)["name"] 143 | branches[branch_name]["versions"].append(version) 144 | 145 | for branch in branches: 146 | version = branches[branch]["versions"][0] 147 | if not app.targets[version]: 148 | reload_targets(app, version) 149 | 150 | branches[branch]["targets"] = app.targets[version] 151 | 152 | return branches 153 | 154 | 155 | @app.get("/json/v1/branches.json") 156 | def json_v1_branches(): 157 | return list(generate_branches().values()) 158 | 159 | 160 | @app.get("/json/v1/overview.json") 161 | def json_v1_overview(): 162 | overview = { 163 | "latest": generate_latest(), 164 | "branches": generate_branches(), 165 | "upstream_url": settings.upstream_url, 166 | "server": { 167 | "version": __version__, 168 | "contact": "mail@aparcar.org", 169 | "allow_defaults": settings.allow_defaults, 170 | "repository_allow_list": settings.repository_allow_list, 171 | "max_custom_rootfs_size_mb": settings.max_custom_rootfs_size_mb, 172 | "max_defaults_length": settings.max_defaults_length, 173 | }, 174 | } 175 | 176 | return overview 177 | 178 | 179 | @app.get("//{path:path}") 180 | def api_double_slash(path: str): 181 | print(f"Redirecting double slash to single slash: {path}") 182 | return RedirectResponse(f"/{path}", status_code=301) 183 | 184 | 185 | # very legacy 186 | @app.get("/overview") 187 | def api_overview(): 188 | return RedirectResponse("/json/v1/overview.json", status_code=301) 189 | -------------------------------------------------------------------------------- /asu/templates/overview.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | OpenWrt Sysupgrade Server 7 | 8 | 9 | 10 |
11 |
12 | OpenWrt Firmware logo 13 |
14 |
15 |
16 |

OpenWrt Sysupgrade Server ({{ version }})

17 | 18 |
19 |
20 |

About the Sysupgrade Server

21 |

22 | This server listens for requests to build custom OpenWrt 23 | sysupgrade images. It starts an ImageBuilder process for 24 | the specified target, incorporates any requested 25 | packages, and returns that image to the requestor. The 26 | server also provides an 27 | API (or 28 | Redoc) for common 29 | operations. Learn more on 30 | Github. 33 | Please 34 | report issues! 39 |

40 |

41 | How to use this server: There are at least three 42 | clients that can request a new image from the Sysupgrade 43 | Server: 44 |

45 | 73 | 74 | {% if server_stats %} 75 |
76 |

Builds per Day (last 30 days)

77 | 78 |
79 |
80 |

Weekly build counts by branch (last 6 months)

81 | 82 |
83 | {% endif %} 84 | 85 |
86 |

Server Configuration

87 | 88 |

89 | Allow custom UCI defaults: {{ defaults }}
90 | {% if defaults %} 91 | Maximum UCI defaults script size: {{ 92 | max_defaults_length }} bytes
93 | {% endif %} 94 | Maximum requested root filesystem size: {{ 95 | max_custom_rootfs_size_mb }} MB
96 | Available versions on this server: 97 | 98 |

    99 | {% for version in versions %} 100 |
  • {{ version }}
  • 101 | {%- endfor -%} 102 |
103 |

104 | 105 |
106 |
107 |
108 |
109 | 110 | {% if server_stats %} 111 | 112 | 153 | {% endif %} 154 | 155 | 156 | 157 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Attendedsysupgrade Server (GSoC 2017) 2 | 3 | [![codecov](https://codecov.io/gh/aparcar/asu/branch/master/graph/badge.svg)](https://codecov.io/gh/aparcar/asu) 4 | [![PyPi](https://badge.fury.io/py/asu.svg)](https://badge.fury.io/py/asu) 5 | 6 | This project simplifies the sysupgrade process for upgrading the firmware of 7 | devices running OpenWrt or distributions based on it. These tools offer an easy 8 | way to reflash the router with a new firmware version 9 | (including all packages) without the need to use `opkg`. 10 | 11 | ASU is based on an [API](#api) to request custom firmware images with any 12 | selection of packages pre-installed. This avoids the need to set up a build 13 | environment, and makes it possible to create a custom firmware image even using 14 | a mobile device. 15 | 16 | ## Clients of the Sysupgrade Server 17 | 18 | ### OpenWrt Firmware Selector 19 | 20 | Simple web interface using vanilla JavaScript currently developed by @mwarning. 21 | It offers a device search based on model names and show links either to 22 | [official images](https://downloads.openwrt.org/) or requests images via the 23 | _asu_ API. Please join in the development at 24 | [GitLab repository](https://gitlab.com/openwrt/web/firmware-selector-openwrt-org) 25 | 26 | - 27 | 28 | ![ofs](misc/ofs.png) 29 | 30 | ### LuCI app 31 | 32 | The package 33 | [`luci-app-attendedsysupgrade`](https://github.com/openwrt/luci/tree/master/applications/luci-app-attendedsysupgrade) 34 | offers a simple tool under `System > Attended Sysupgrade`. It requests a new 35 | firmware image that includes the current set of packages, waits until it's built 36 | and flashes it. If "Keep Configuration" is checked in the GUI, the device 37 | upgrades to the new firmware without any need to re-enter any configuration or 38 | re-install any packages. 39 | 40 | ![luci](misc/luci.png) 41 | 42 | ### CLI 43 | 44 | With `OpenWrt SNAPSHOT-r26792 or newer` (and in the 24.10 release) the CLI app 45 | [`auc`](https://github.com/openwrt/packages/tree/master/utils/auc) was replaced 46 | with [`owut`](https://openwrt.org/docs/guide-user/installation/sysupgrade.owut) 47 | as a more comprehensive CLI tool to provide an easy way to upgrade your device. 48 | 49 | ![owut](misc/owut.png) 50 | 51 | ## Server 52 | 53 | The server listens for image requests and, if valid, automatically generates 54 | them. It coordinates several OpenWrt ImageBuilders and caches the resulting 55 | images in a Redis database. If an image is cached, the server can provide it 56 | immediately without rebuilding. 57 | 58 | ### Active server 59 | 60 | > [!NOTE] 61 | > Official server using ImageBuilder published on [OpenWrt 62 | > Downloads](downloads.openwrt.org). 63 | 64 | - [sysupgrade.openwrt.org](https://sysupgrade.openwrt.org) 65 | 66 | > [!NOTE] 67 | > Unofficial servers, may run modified ImageBuilder 68 | 69 | - [ImmortalWrt](https://sysupgrade.kyarucloud.moe) 70 | - [LibreMesh](https://sysupgrade.antennine.org) (only `stable` and `oldstable` OpenWrt versions) 71 | - [sysupgrade.guerra24.net](https://sysupgrade.guerra24.net) 72 | - Create a pull request to add your server here 73 | 74 | ## Run your own server 75 | 76 | For security reasons each build happens inside a container so that one build 77 | can't affect another build. For this to work a Podman container runs an API 78 | service so workers can themselfs execute builds inside containers. 79 | 80 | ### Installation 81 | 82 | The server uses `podman-compose` to manage the containers. On a Debian based 83 | system, install the following packages: 84 | 85 | ```bash 86 | sudo apt install podman-compose 87 | ``` 88 | 89 | A [Python library](https://podman-py.readthedocs.io/en/latest/) is used to 90 | communicate with Podman over a socket. To enable the socket either `systemd` is 91 | required or the socket must be started manually using the Podman itself: 92 | 93 | ```bash 94 | # systemd 95 | systemctl --user enable podman.socket 96 | systemctl --user start podman.socket 97 | systemctl --user status podman.socket 98 | 99 | # manual (must stay open) 100 | podman system service --time=0 unix:/run/user/$(id -u)/podman/podman.sock 101 | ``` 102 | 103 | Now you can either use the latest ASU containers or build them yourself, run 104 | either of the following two commands: 105 | 106 | ```bash 107 | # use existing containers 108 | podman-compose pull 109 | 110 | # build containers locally 111 | podman-compose build 112 | ``` 113 | 114 | The services are configured via environment variables, which can be set in a 115 | `.env` file 116 | 117 | ```bash 118 | echo "PUBLIC_PATH=$(pwd)/public" > .env 119 | echo "CONTAINER_SOCKET_PATH=/run/user/$(id -u)/podman/podman.sock" >> .env 120 | # optionally allow custom scripts running on first boot 121 | echo "ALLOW_DEFAULTS=1" >> .env 122 | ``` 123 | 124 | Now it's possible to run all services via `podman-compose`: 125 | 126 | ```bash 127 | podman-compose up -d 128 | ``` 129 | 130 | This will start the server, the Podman API container and one worker. Once the 131 | server is running, it's possible to request images via the API on 132 | `http://localhost:8000`. Modify `podman-compose.yml` to change the port. 133 | 134 | ### Production 135 | 136 | For production it's recommended to use a reverse proxy like `nginx` or `caddy`. 137 | You can find a Caddy sample configuration in `misc/Caddyfile`. 138 | 139 | If you want your server to remain active after you log out of the server, you 140 | must enable "linger" in `loginctl`: 141 | 142 | ```bash 143 | loginctl enable-linger 144 | ``` 145 | 146 | #### System requirements 147 | 148 | - 2 GB RAM (4 GB recommended) 149 | - 2 CPU cores (4 cores recommended) 150 | - 50 GB disk space (200 GB recommended) 151 | 152 | #### Squid Cache 153 | 154 | Instead of creating and uploading SNAPSHOT ImageBuilder containers everyday, 155 | only a container with installed dependencies and a `setup.sh` script is offered. 156 | ASU will automatically run that script and setup the latest ImageBuilder. To 157 | speed up the process, a Squid cache can be used to store the ImageBuilder 158 | archives locally. To enable the cache, set `SQUID_CACHE=1` in the `.env` file. 159 | 160 | To have the cache accessible from running containers, the Squid port 3128 inside 161 | a running container must be forwarded to the host. This can be done by adding 162 | the following line to the `.config/containers/containers.conf` file: 163 | 164 | ```toml 165 | [network] 166 | pasta_options = [ 167 | "-a", "10.0.2.0", 168 | "-n", "24", 169 | "-g", "10.0.2.2", 170 | "--dns-forward", "10.0.2.3", 171 | "-T", "3128:3128" 172 | ] 173 | ``` 174 | 175 | > If you know a better setup, please create a pull request. 176 | 177 | ### Development 178 | 179 | After cloning this repository, install `poetry` which manages the Python 180 | dependencies. 181 | 182 | ```bash 183 | apt install python3-poetry 184 | poetry install 185 | ``` 186 | 187 | #### Running the server 188 | 189 | ```bash 190 | poetry run fastapi dev asu/main.py 191 | ``` 192 | 193 | #### Running a worker 194 | 195 | ```bash 196 | source .env # poetry does not load .env 197 | poetry run rq worker 198 | ``` 199 | 200 | ### API 201 | 202 | The API is documented via _OpenAPI_ and can be viewed interactively on the 203 | server: 204 | 205 | - [https://sysupgrade.openwrt.org/docs/](https://sysupgrade.openwrt.org/docs/) 206 | - [https://sysupgrade.openwrt.org/redoc](https://sysupgrade.openwrt.org/redoc/) 207 | -------------------------------------------------------------------------------- /misc/grafana.json: -------------------------------------------------------------------------------- 1 | { 2 | "annotations": { 3 | "list": [ 4 | { 5 | "builtIn": 1, 6 | "datasource": { 7 | "type": "datasource", 8 | "uid": "grafana" 9 | }, 10 | "enable": true, 11 | "hide": true, 12 | "iconColor": "rgba(0, 211, 255, 1)", 13 | "name": "Annotations & Alerts", 14 | "target": { 15 | "limit": 100, 16 | "matchAny": false, 17 | "tags": [], 18 | "type": "dashboard" 19 | }, 20 | "type": "dashboard" 21 | } 22 | ] 23 | }, 24 | "editable": true, 25 | "fiscalYearStartMonth": 0, 26 | "graphTooltip": 0, 27 | "id": 4, 28 | "links": [], 29 | "liveNow": false, 30 | "panels": [ 31 | { 32 | "datasource": { 33 | "type": "prometheus", 34 | "uid": "XwiupDP7k" 35 | }, 36 | "fieldConfig": { 37 | "defaults": { 38 | "color": { 39 | "mode": "thresholds" 40 | }, 41 | "mappings": [], 42 | "thresholds": { 43 | "mode": "absolute", 44 | "steps": [ 45 | { 46 | "color": "green", 47 | "value": null 48 | } 49 | ] 50 | } 51 | }, 52 | "overrides": [] 53 | }, 54 | "gridPos": { 55 | "h": 6, 56 | "w": 12, 57 | "x": 0, 58 | "y": 0 59 | }, 60 | "id": 4, 61 | "options": { 62 | "colorMode": "value", 63 | "graphMode": "area", 64 | "justifyMode": "auto", 65 | "orientation": "auto", 66 | "reduceOptions": { 67 | "calcs": [ 68 | "lastNotNull" 69 | ], 70 | "fields": "", 71 | "values": false 72 | }, 73 | "textMode": "auto" 74 | }, 75 | "pluginVersion": "9.0.2", 76 | "targets": [ 77 | { 78 | "datasource": { 79 | "type": "prometheus", 80 | "uid": "XwiupDP7k" 81 | }, 82 | "exemplar": true, 83 | "expr": "sum(increase(builds_total[$__range]))", 84 | "interval": "", 85 | "legendFormat": "", 86 | "refId": "A" 87 | } 88 | ], 89 | "title": "Builds Total", 90 | "type": "stat" 91 | }, 92 | { 93 | "datasource": { 94 | "type": "prometheus", 95 | "uid": "XwiupDP7k" 96 | }, 97 | "fieldConfig": { 98 | "defaults": { 99 | "color": { 100 | "mode": "thresholds" 101 | }, 102 | "mappings": [], 103 | "thresholds": { 104 | "mode": "absolute", 105 | "steps": [ 106 | { 107 | "color": "green", 108 | "value": null 109 | }, 110 | { 111 | "color": "red", 112 | "value": 80 113 | } 114 | ] 115 | }, 116 | "unit": "percentunit" 117 | }, 118 | "overrides": [] 119 | }, 120 | "gridPos": { 121 | "h": 6, 122 | "w": 12, 123 | "x": 12, 124 | "y": 0 125 | }, 126 | "id": 10, 127 | "options": { 128 | "colorMode": "value", 129 | "graphMode": "area", 130 | "justifyMode": "auto", 131 | "orientation": "auto", 132 | "reduceOptions": { 133 | "calcs": [ 134 | "lastNotNull" 135 | ], 136 | "fields": "", 137 | "values": false 138 | }, 139 | "textMode": "auto" 140 | }, 141 | "pluginVersion": "9.0.2", 142 | "targets": [ 143 | { 144 | "datasource": { 145 | "type": "prometheus", 146 | "uid": "XwiupDP7k" 147 | }, 148 | "exemplar": true, 149 | "expr": "increase(cache_hits[$__range]) / (increase(cache_hits[$__range]) + increase(cache_misses[$__range]))", 150 | "interval": "", 151 | "legendFormat": "", 152 | "refId": "A" 153 | } 154 | ], 155 | "title": "Cache Hits", 156 | "type": "stat" 157 | }, 158 | { 159 | "datasource": { 160 | "type": "prometheus", 161 | "uid": "XwiupDP7k" 162 | }, 163 | "fieldConfig": { 164 | "defaults": { 165 | "color": { 166 | "mode": "palette-classic" 167 | }, 168 | "custom": { 169 | "hideFrom": { 170 | "legend": false, 171 | "tooltip": false, 172 | "viz": false 173 | } 174 | }, 175 | "mappings": [], 176 | "unit": "short" 177 | }, 178 | "overrides": [] 179 | }, 180 | "gridPos": { 181 | "h": 11, 182 | "w": 12, 183 | "x": 0, 184 | "y": 6 185 | }, 186 | "id": 8, 187 | "options": { 188 | "legend": { 189 | "displayMode": "table", 190 | "placement": "right", 191 | "values": [ 192 | "percent" 193 | ] 194 | }, 195 | "pieType": "pie", 196 | "reduceOptions": { 197 | "calcs": [ 198 | "lastNotNull" 199 | ], 200 | "fields": "", 201 | "values": false 202 | }, 203 | "tooltip": { 204 | "mode": "single", 205 | "sort": "none" 206 | } 207 | }, 208 | "targets": [ 209 | { 210 | "datasource": { 211 | "type": "prometheus", 212 | "uid": "XwiupDP7k" 213 | }, 214 | "exemplar": true, 215 | "expr": "sum by (profile) (increase(builds_total[$__range])) > 1", 216 | "interval": "", 217 | "intervalFactor": 1, 218 | "legendFormat": "{{profile}}", 219 | "refId": "A" 220 | } 221 | ], 222 | "title": "Builds by profile", 223 | "transformations": [], 224 | "type": "piechart" 225 | }, 226 | { 227 | "datasource": { 228 | "type": "prometheus", 229 | "uid": "XwiupDP7k" 230 | }, 231 | "fieldConfig": { 232 | "defaults": { 233 | "color": { 234 | "mode": "palette-classic" 235 | }, 236 | "custom": { 237 | "hideFrom": { 238 | "legend": false, 239 | "tooltip": false, 240 | "viz": false 241 | } 242 | }, 243 | "mappings": [], 244 | "unit": "none" 245 | }, 246 | "overrides": [] 247 | }, 248 | "gridPos": { 249 | "h": 11, 250 | "w": 12, 251 | "x": 12, 252 | "y": 6 253 | }, 254 | "id": 2, 255 | "options": { 256 | "legend": { 257 | "displayMode": "table", 258 | "placement": "right", 259 | "values": [ 260 | "percent" 261 | ] 262 | }, 263 | "pieType": "pie", 264 | "reduceOptions": { 265 | "calcs": [ 266 | "lastNotNull" 267 | ], 268 | "fields": "", 269 | "values": false 270 | }, 271 | "tooltip": { 272 | "mode": "single", 273 | "sort": "none" 274 | } 275 | }, 276 | "pluginVersion": "8.4.3", 277 | "targets": [ 278 | { 279 | "datasource": { 280 | "type": "prometheus", 281 | "uid": "XwiupDP7k" 282 | }, 283 | "exemplar": true, 284 | "expr": "sum by (version) (increase(builds_total[$__range]))", 285 | "interval": "", 286 | "legendFormat": "{{version}}", 287 | "refId": "A" 288 | } 289 | ], 290 | "title": "Builds Total by Release", 291 | "type": "piechart" 292 | } 293 | ], 294 | "refresh": "1m", 295 | "schemaVersion": 36, 296 | "style": "dark", 297 | "tags": [], 298 | "templating": { 299 | "list": [] 300 | }, 301 | "time": { 302 | "from": "now-7d", 303 | "to": "now" 304 | }, 305 | "timepicker": {}, 306 | "timezone": "", 307 | "title": "Attended Sysupgrade Server", 308 | "uid": "LM1HE4E7k", 309 | "version": 3, 310 | "weekStart": "" 311 | } 312 | -------------------------------------------------------------------------------- /asu/static/style.css: -------------------------------------------------------------------------------- 1 | /* 2 | @media (min-resolution: 200dpi), (max-width: 1000px) { 3 | .container { 4 | width: 100% !important; 5 | } 6 | } 7 | */ 8 | 9 | /* move version selection in a new line*/ 10 | /* 11 | @media (min-resolution: 400dpi), (max-width: 600px) { 12 | #models-autocomplete { 13 | flex-direction: column; 14 | } 15 | #versions { 16 | margin-top: 0.5em; 17 | } 18 | .autocomplete > input { 19 | margin-right: 0px !important; 20 | } 21 | } 22 | */ 23 | 24 | body { 25 | font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; 26 | font-size: 14px; 27 | line-height: 1.4; 28 | color: #333; 29 | padding: 0; 30 | margin: 0; 31 | } 32 | 33 | /* OpenWrt Wiki link color is #337ab7; */ 34 | /* Original little-steps link color is very dark: #337ab7; */ 35 | /* OpenWrt Logo Guidelines says bright blue color should be #00B5E2; */ 36 | /* But... the actual SVG in 43b0e8; */ 37 | 38 | a { 39 | color: #00b5e2; 40 | text-decoration: none; 41 | } 42 | 43 | a:hover { 44 | text-decoration: underline; 45 | } 46 | 47 | #models-autocomplete { 48 | display: flex; 49 | padding-top: 20px; 50 | } 51 | 52 | .autocomplete { 53 | position: relative; 54 | } 55 | 56 | .autocomplete > input { 57 | border: 0.06em solid transparent; 58 | background-color: #f1f1f1; 59 | padding: 0.8em; 60 | flex-grow: 1; 61 | margin-right: 5px; 62 | border-radius: 0.2em; 63 | font-size: inherit; 64 | } 65 | 66 | .autocomplete-items { 67 | position: absolute; 68 | border: 0.1em solid #d4d4d4; 69 | border-bottom: none; 70 | font-size: inherit; 71 | border-top: none; 72 | z-index: 99; 73 | /*position the autocomplete items to be the same width as the container:*/ 74 | top: 100%; 75 | left: 0; 76 | right: 0; 77 | } 78 | 79 | .autocomplete-items div { 80 | padding: 0.8em; 81 | cursor: pointer; 82 | background-color: #fff; 83 | border-bottom: 0.1em solid #d4d4d4; 84 | } 85 | 86 | /*when hovering an item:*/ 87 | .autocomplete-items div:hover { 88 | background-color: #e9e9e9; 89 | } 90 | 91 | /*when navigating through the items using the arrow keys:*/ 92 | .autocomplete-active { 93 | background-color: DodgerBlue !important; 94 | color: #ffffff; 95 | } 96 | 97 | header { 98 | margin-right: auto; 99 | margin-left: auto; 100 | font-weight: 500; 101 | width: 100%; 102 | z-index: 1100; 103 | box-shadow: 104 | 0em 0.125em 0.25em -0.06em rgba(0, 0, 0, 0.2), 105 | 0em 0.25em 0.3em 0em rgba(0, 0, 0, 0.14), 106 | 0em 0.06em 0.625em 0em rgba(0, 0, 0, 0.12); 107 | background-color: #002b49; 108 | } 109 | 110 | header > div { 111 | margin-right: auto; 112 | margin-left: auto; 113 | width: auto; 114 | display: flex; 115 | position: relative; 116 | align-items: center; 117 | color: #fff; 118 | } 119 | 120 | header > div > img { 121 | height: 3em; 122 | padding: 0.75em; 123 | } 124 | 125 | h6 { 126 | margin: 0; 127 | font-size: 1.25rem; 128 | font-weight: 500; 129 | line-height: 1.6; 130 | letter-spacing: 0.0075em; 131 | } 132 | 133 | header > div { 134 | padding-left: 1.5em; 135 | padding-right: 1.5em; 136 | min-height: 4em; 137 | 138 | display: flex; 139 | position: relative; 140 | align-items: center; 141 | color: #fff; 142 | } 143 | 144 | .container { 145 | padding-left: 1.2em; 146 | padding-right: 1.2em; 147 | width: 60%; 148 | box-sizing: border-box; 149 | margin-top: 1.2em; 150 | margin-right: auto; 151 | margin-left: auto; 152 | } 153 | 154 | /*.container > div { 155 | padding: 0.625em 20px; 156 | text-align: left; 157 | box-shadow: 0em 0.06em 0.19em 0em rgba(0, 0, 0, 0.2), 158 | 0em 0.06em 0.06em 0em rgba(0, 0, 0, 0.14), 159 | 0em 0.125em 0.06em -0.06em rgba(0, 0, 0, 0.12); 160 | border-radius: 0.25em; 161 | color: rgba(0, 0, 0, 0.87); 162 | transition: box-shadow 300ms cubic-bezier(0.4, 0, 0.2, 1) 0ms; 163 | background-color: #fff; 164 | }*/ 165 | 166 | #versions { 167 | border: 0.06em solid transparent; 168 | background-color: #f1f1f1; 169 | padding: 0.8em; 170 | min-width: 4em; 171 | border-radius: 0.25em; 172 | font-size: inherit; 173 | } 174 | 175 | .download-link { 176 | text-decoration: none; 177 | border-radius: 0.2em; 178 | padding: 0.8em; 179 | margin: 0.2em; 180 | font-size: inherit; 181 | cursor: pointer; 182 | letter-spacing: 0.05em; 183 | display: inline-flex; 184 | align-items: center; 185 | box-shadow: 186 | 0em 0.06em 0.3em 0em rgba(0, 0, 0, 0.2), 187 | 0em 0.125em 0.125em 0em rgba(0, 0, 0, 0.14), 188 | 0em 0.19em 0.06em -0.125em rgba(0, 0, 0, 0.12); 189 | color: #fff; 190 | background-color: #00a3e1; 191 | } 192 | 193 | .download-link-hover { 194 | background-color: #038fc6; 195 | } 196 | 197 | .download-link :first-child { 198 | width: 30px; 199 | margin-right: 15px; 200 | margin-top: -0.125em; 201 | content: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'%3E%3Cpath d='M19.35 10.04C18.67 6.59 15.64 4 12 4 9.11 4 6.6 5.64 5.35 8.04 2.34 8.36 0 10.91 0 14c0 3.31 2.69 6 6 6h13c2.76 0 5-2.24 5-5 0-2.64-2.05-4.78-4.65-4.96zM17 13l-5 5-5-5h3V9h4v4h3z' fill='%23fff'%3E%3C/path%3E%3Cpath d='M0 0h24v24H0z' fill='none'/%3E%3C/svg%3E"); 202 | } 203 | 204 | #image-info { 205 | width: 1em; 206 | content: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 -256 1850 1850' width='100%25' height='100%25'%3E%3Cg transform='matrix(1,0,0,-1,30.372881,1426.9492)'%3E%3Cpath d='M 1408,608 V 288 Q 1408,169 1323.5,84.5 1239,0 1120,0 H 288 Q 169,0 84.5,84.5 0,169 0,288 v 832 Q 0,1239 84.5,1323.5 169,1408 288,1408 h 704 q 14,0 23,-9 9,-9 9,-23 v -64 q 0,-14 -9,-23 -9,-9 -23,-9 H 288 q -66,0 -113,-47 -47,-47 -47,-113 V 288 q 0,-66 47,-113 47,-47 113,-47 h 832 q 66,0 113,47 47,47 47,113 v 320 q 0,14 9,23 9,9 23,9 h 64 q 14,0 23,-9 9,-9 9,-23 z m 384,864 V 960 q 0,-26 -19,-45 -19,-19 -45,-19 -26,0 -45,19 L 1507,1091 855,439 q -10,-10 -23,-10 -13,0 -23,10 L 695,553 q -10,10 -10,23 0,13 10,23 l 652,652 -176,176 q -19,19 -19,45 0,26 19,45 19,19 45,19 h 512 q 26,0 45,-19 19,-19 19,-45 z' /%3E%3C/g%3E%3C/svg%3E%0A"); 207 | } 208 | 209 | .row { 210 | display: flex; 211 | line-height: 1.5; 212 | overflow: hidden; 213 | } 214 | 215 | .col1 { 216 | display: inline-block; 217 | width: 6em; 218 | } 219 | 220 | .col2 { 221 | display: inline-block; 222 | overflow: hidden; 223 | text-overflow: ellipsis; 224 | } 225 | 226 | #image-model { 227 | font-weight: bold; 228 | } 229 | 230 | .hide { 231 | display: none; 232 | } 233 | 234 | .download-help { 235 | margin-top: 0.5em; 236 | } 237 | 238 | #help { 239 | margin: 20px 0; 240 | } 241 | 242 | #footer { 243 | font-size: 0.8em; 244 | text-align: right; 245 | margin-top: 1em; 246 | } 247 | 248 | #footer a { 249 | text-decoration: none; 250 | } 251 | 252 | #notfound h3 { 253 | text-align: center; 254 | } 255 | 256 | table { 257 | /*table-layout: fixed;*/ 258 | font-size: 14px; 259 | box-shadow: 0 0 0.5em #999; 260 | margin: 0; 261 | border: none !important; 262 | margin-bottom: 2em; 263 | border-collapse: collapse; 264 | border-spacing: 0; 265 | } 266 | 267 | th { 268 | background: #002b49; 269 | line-height: 24px; 270 | border: none; 271 | text-align: left; 272 | color: #fff; 273 | } 274 | 275 | tr { 276 | background: rgba(255, 255, 255, 0.8); 277 | } 278 | 279 | tr:hover { 280 | background: rgba(255, 255, 255, 0.6); 281 | } 282 | 283 | tr:nth-child(2n + 1) { 284 | background: #eee; 285 | } 286 | 287 | th, 288 | td { 289 | height: 20px; 290 | vertical-align: middle; 291 | white-space: nowrap; 292 | padding: 0.2em 0.5em; 293 | border-bottom: 1px solid #ccc; 294 | } 295 | 296 | .n { 297 | font-weight: bold; 298 | } 299 | 300 | .s { 301 | text-align: right; 302 | } 303 | 304 | .d { 305 | text-align: center; 306 | } 307 | 308 | .sh { 309 | font-family: monospace; 310 | } 311 | 312 | /* We start writing out flexbox here. The above is just page setup */ 313 | .flex-container { 314 | display: flex; 315 | flex-wrap: wrap; 316 | flex-direction: row; 317 | /*border:10px solid white;*/ 318 | /*height:100vh;*/ 319 | } 320 | 321 | .flex-item { 322 | /*height: 300px;*/ 323 | /* width: 350px; */ 324 | flex: 1; 325 | background-color: white; 326 | border: 1px solid lightgrey; 327 | padding: 10px 20px; 328 | margin: 2px; 329 | } 330 | 331 | .grid-container { 332 | display: grid; 333 | grid-gap: 20px; 334 | } 335 | 336 | .grid-item { 337 | /* border: 1px solid lightgrey; */ 338 | margin: 2px; 339 | width: 1fr; 340 | } 341 | -------------------------------------------------------------------------------- /asu/routers/api.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import Union 3 | 4 | from fastapi import APIRouter, Header, Request 5 | from fastapi.responses import RedirectResponse, Response 6 | from rq.job import Job 7 | 8 | from asu.build import build 9 | from asu.build_request import BuildRequest 10 | from asu.config import settings 11 | from asu.util import ( 12 | add_timestamp, 13 | add_build_event, 14 | client_get, 15 | get_branch, 16 | get_queue, 17 | get_request_hash, 18 | reload_profiles, 19 | reload_targets, 20 | reload_versions, 21 | ) 22 | 23 | router = APIRouter() 24 | 25 | 26 | def get_distros() -> list: 27 | """Return available distributions 28 | 29 | Returns: 30 | list: Available distributions 31 | """ 32 | return ["openwrt"] 33 | 34 | 35 | @router.get("/revision/{version}/{target}/{subtarget}") 36 | def api_v1_revision( 37 | version: str, target: str, subtarget: str, response: Response, request: Request 38 | ): 39 | branch_data = get_branch(version) 40 | version_path = branch_data["path"].format(version=version) 41 | req = client_get( 42 | settings.upstream_url 43 | + f"/{version_path}/targets/{target}/{subtarget}/profiles.json" 44 | ) 45 | 46 | if req.status_code != 200: 47 | response.status_code = req.status_code 48 | return { 49 | "detail": f"Failed to fetch revision for {version}/{target}/{subtarget}", 50 | "status": req.status_code, 51 | } 52 | 53 | return {"revision": req.json()["version_code"]} 54 | 55 | 56 | @router.get("/latest") 57 | def api_latest(): 58 | return RedirectResponse("/json/v1/latest.json", status_code=301) 59 | 60 | 61 | @router.get("/overview") 62 | def api_v1_overview(): 63 | return RedirectResponse("/json/v1/overview.json", status_code=301) 64 | 65 | 66 | def validation_failure(detail: str) -> tuple[dict[str, Union[str, int]], int]: 67 | logging.info(f"Validation failure {detail = }") 68 | return {"detail": detail, "status": 400}, 400 69 | 70 | 71 | def validate_request( 72 | app, 73 | build_request: BuildRequest, 74 | ) -> tuple[dict[str, Union[str, int]], int]: 75 | """Validate an image request and return found errors with status code 76 | 77 | Instead of building every request it is first validated. This checks for 78 | existence of requested profile, distro, version and package. 79 | 80 | Args: 81 | req (dict): The image request 82 | 83 | Returns: 84 | (dict, int): Status message and code, empty if no error appears 85 | 86 | """ 87 | 88 | if build_request.defaults and not settings.allow_defaults: 89 | return validation_failure("Handling `defaults` not enabled on server") 90 | 91 | if build_request.distro not in get_distros(): 92 | return validation_failure(f"Unsupported distro: {build_request.distro}") 93 | 94 | branch = get_branch(build_request.version)["name"] 95 | 96 | if branch not in settings.branches: 97 | return validation_failure(f"Unsupported branch: {build_request.version}") 98 | 99 | if build_request.version not in app.versions: 100 | reload_versions(app) 101 | if build_request.version not in app.versions: 102 | return validation_failure(f"Unsupported version: {build_request.version}") 103 | 104 | build_request.packages: list[str] = [ 105 | x.removeprefix("+") 106 | for x in (build_request.packages_versions.keys() or build_request.packages) 107 | ] 108 | 109 | if build_request.target not in app.targets[build_request.version]: 110 | reload_targets(app, build_request.version) 111 | if build_request.target not in app.targets[build_request.version]: 112 | return validation_failure( 113 | f"Unsupported target: {build_request.target}. The requested " 114 | "target was either dropped, is still being built or is not " 115 | "supported by the selected version. Please check the forums or " 116 | "try again later." 117 | ) 118 | 119 | def valid_profile(profile: str, build_request: BuildRequest) -> bool: 120 | profiles = app.profiles[build_request.version][build_request.target] 121 | if profile in profiles: 122 | return True 123 | if len(profiles) == 1 and "generic" in profiles: 124 | # Handles the x86, armsr and other generic variants. 125 | build_request.profile = "generic" 126 | return True 127 | return False 128 | 129 | if not valid_profile(build_request.profile, build_request): 130 | reload_profiles(app, build_request.version, build_request.target) 131 | if not valid_profile(build_request.profile, build_request): 132 | return validation_failure( 133 | f"Unsupported profile: {build_request.profile}. The requested " 134 | "profile was either dropped or never existed. Please check the " 135 | "forums for more information." 136 | ) 137 | 138 | build_request.profile = app.profiles[build_request.version][build_request.target][ 139 | build_request.profile 140 | ] 141 | return ({}, None) 142 | 143 | 144 | def return_job_v1(job: Job) -> tuple[dict, int, dict]: 145 | response: dict = job.get_meta() 146 | imagebuilder_status: str = "done" 147 | queue_position: int = 0 148 | 149 | if job.meta: 150 | response.update(job.meta) 151 | 152 | if job.is_failed: 153 | error_message: str = job.latest_result().exc_string 154 | if "stderr" in response: 155 | error_message = response["stderr"] + "\n" + error_message 156 | detail: str = response.get("detail", "failed") 157 | if detail == "init": # Happens when container startup fails. 158 | detail = "failed" 159 | response.update(status=500, detail=detail, stderr=error_message) 160 | imagebuilder_status = "failed" 161 | 162 | elif job.is_queued: 163 | queue_position = job.get_position() or 0 164 | response.update(status=202, detail="queued", queue_position=queue_position) 165 | imagebuilder_status = "queued" 166 | 167 | elif job.is_started: 168 | response.update(status=202, detail="started") 169 | imagebuilder_status = response.get("imagebuilder_status", "init") 170 | 171 | elif job.is_finished: 172 | response.update(status=200, **job.return_value()) 173 | imagebuilder_status = "done" 174 | 175 | headers = { 176 | "X-Imagebuilder-Status": imagebuilder_status, 177 | "X-Queue-Position": str(queue_position), 178 | } 179 | 180 | response.update(enqueued_at=job.enqueued_at, request_hash=job.id) 181 | 182 | logging.debug(response) 183 | return response, response["status"], headers 184 | 185 | 186 | @router.head("/build/{request_hash}") 187 | @router.get("/build/{request_hash}") 188 | def api_v1_build_get(request: Request, request_hash: str, response: Response) -> dict: 189 | job: Job = get_queue().fetch_job(request_hash) 190 | if not job: 191 | response.status_code = 404 192 | return { 193 | "status": 404, 194 | "title": "Not Found", 195 | "detail": "could not find provided request hash", 196 | } 197 | 198 | content, status, headers = return_job_v1(job) 199 | response.headers.update(headers) 200 | response.status_code = status 201 | 202 | return content 203 | 204 | 205 | @router.post("/build") 206 | def api_v1_build_post( 207 | build_request: BuildRequest, 208 | response: Response, 209 | request: Request, 210 | user_agent: str = Header(None), 211 | ): 212 | # Sanitize the profile in case the client did not (bug in older LuCI app). 213 | build_request.profile = build_request.profile.replace(",", "_") 214 | 215 | add_build_event("requests") 216 | 217 | request_hash: str = get_request_hash(build_request) 218 | job: Job = get_queue().fetch_job(request_hash) 219 | status: int = 200 220 | result_ttl: str = settings.build_ttl 221 | if build_request.defaults: 222 | result_ttl = settings.build_defaults_ttl 223 | failure_ttl: str = settings.build_failure_ttl 224 | 225 | if build_request.client: 226 | client = build_request.client 227 | elif user_agent.startswith("auc"): 228 | client = user_agent.replace(" (", "/").replace(")", "") 229 | else: 230 | client = "unknown/0" 231 | 232 | add_timestamp( 233 | f"stats:clients:{client}", 234 | {"stats": "clients", "client": client}, 235 | ) 236 | 237 | if job is None: 238 | add_build_event("cache-misses") 239 | 240 | content, status = validate_request(request.app, build_request) 241 | if content: 242 | response.status_code = status 243 | return content 244 | 245 | job_queue_length = len(get_queue()) 246 | if job_queue_length > settings.max_pending_jobs: 247 | response.status_code = 529 248 | return { 249 | "status": 529, # "Site is overloaded" 250 | "title": "Server overloaded", 251 | "detail": f"server overload, queue contains too many build requests: {job_queue_length}", 252 | } 253 | 254 | job = get_queue().enqueue( 255 | build, 256 | build_request, 257 | job_id=request_hash, 258 | result_ttl=result_ttl, 259 | failure_ttl=failure_ttl, 260 | job_timeout=settings.job_timeout, 261 | ) 262 | else: 263 | if job.is_finished: 264 | add_build_event("cache-hits") 265 | 266 | content, status, headers = return_job_v1(job) 267 | response.headers.update(headers) 268 | response.status_code = status 269 | 270 | return content 271 | 272 | 273 | @router.get("/stats") 274 | def api_v1_builder_stats(): 275 | """Return status of builders 276 | 277 | Returns: 278 | queue_length: Number of jobs currently in build queue 279 | """ 280 | return { 281 | "queue_length": len(get_queue()), 282 | } 283 | -------------------------------------------------------------------------------- /tests/test_util.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tempfile 3 | from pathlib import Path 4 | 5 | from podman import PodmanClient 6 | 7 | import asu.util 8 | from asu.build_request import BuildRequest 9 | from asu.util import ( 10 | check_manifest, 11 | diff_packages, 12 | fingerprint_pubkey_usign, 13 | get_container_version_tag, 14 | get_file_hash, 15 | get_packages_hash, 16 | get_podman, 17 | get_request_hash, 18 | get_str_hash, 19 | is_post_kmod_split_build, 20 | is_snapshot_build, 21 | parse_feeds_conf, 22 | parse_kernel_version, 23 | parse_manifest, 24 | parse_packages_file, 25 | run_cmd, 26 | verify_usign, 27 | ) 28 | 29 | 30 | def test_get_str_hash(): 31 | assert ( 32 | get_str_hash("test") 33 | == "9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08" 34 | ) 35 | 36 | 37 | def test_get_file_hash(): 38 | file_fd, file_path = tempfile.mkstemp() 39 | os.write(file_fd, b"test") 40 | 41 | assert ( 42 | get_file_hash(file_path) 43 | == "9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08" 44 | ) 45 | 46 | os.close(file_fd) 47 | os.unlink(file_path) 48 | 49 | 50 | def test_get_packages_hash(): 51 | assert ( 52 | get_packages_hash(["test1", "test2"]) 53 | == "57aab5949a36e66b535a8cb13e39e9e093181c9000c016990d7be9eb86a9b9e8" 54 | ) 55 | 56 | 57 | def test_get_request_hash(): 58 | request = BuildRequest( 59 | **{ 60 | "distro": "test", 61 | "version": "test", 62 | "target": "testtarget/testsubtarget", 63 | "profile": "test", 64 | "packages": ["test"], 65 | } 66 | ) 67 | 68 | assert ( 69 | get_request_hash(request) 70 | == "99ff721439cd696f7da259541a07d7bfc7eb6c45a844db532e0384b464e23f46" 71 | ) 72 | 73 | 74 | def test_diff_packages(): 75 | assert diff_packages(["test1"], {"test1", "test2"}) == ["-test2", "test1"] 76 | assert diff_packages(["test1"], {"test1"}) == ["test1"] 77 | assert diff_packages(["test1"], {"test2", "test3"}) == ["-test2", "-test3", "test1"] 78 | assert diff_packages(["test1"], {"test2", "-test3"}) == [ 79 | "-test2", 80 | "-test3", 81 | "test1", 82 | ] 83 | assert diff_packages(["z", "x"], {"x", "y", "z"}) == ["-y", "z", "x"] 84 | assert diff_packages(["x", "z"], {"x", "y", "z"}) == ["-y", "x", "z"] 85 | assert diff_packages(["z", "y"], {"x", "y", "z"}) == ["-x", "z", "y"] 86 | assert diff_packages(["y", "z"], {"x", "y", "z"}) == ["-x", "y", "z"] 87 | 88 | 89 | def test_fingerprint_pubkey_usign(): 90 | pub_key = "RWSrHfFmlHslUcLbXFIRp+eEikWF9z1N77IJiX5Bt/nJd1a/x+L+SU89" 91 | assert fingerprint_pubkey_usign(pub_key) == "ab1df166947b2551" 92 | 93 | 94 | def test_verify_usign(): 95 | sig = b"\nRWSrHfFmlHslUQ9dCB1AJr/PoIIbBJJKtofZ5frLOuG03SlwAwgU1tYOaJs2eVGdo1C8S9LNcMBLPIfDDCWSdrLK3WJ6JV6HNQM=" 96 | msg_fd, msg_path = tempfile.mkstemp() 97 | sig_fd, sig_path = tempfile.mkstemp() 98 | os.write(msg_fd, b"test\n") 99 | os.write(sig_fd, sig) 100 | 101 | pub_key = "RWSrHfFmlHslUcLbXFIRp+eEikWF9z1N77IJiX5Bt/nJd1a/x+L+SU89" 102 | pub_key_bad = "rWSrHfFmlHslUcLbXFIRp+eEikWF9z1N77IJiX5Bt/nJd1a/x+L+SXXX" 103 | 104 | assert verify_usign(Path(sig_path), Path(msg_path), pub_key) 105 | assert not verify_usign(Path(sig_path), Path(msg_path), pub_key_bad) 106 | 107 | os.close(msg_fd) 108 | os.close(sig_fd) 109 | os.unlink(msg_path) 110 | os.unlink(sig_path) 111 | 112 | 113 | def test_get_version_container_tag(): 114 | assert get_container_version_tag("1.0.0") == "v1.0.0" 115 | assert get_container_version_tag("SNAPSHOT") == "master" 116 | assert get_container_version_tag("1.0.0-SNAPSHOT") == "openwrt-1.0.0" 117 | assert get_container_version_tag("23.05.0-rc3") == "v23.05.0-rc3" 118 | assert get_container_version_tag("SNAPP-SNAPSHOT") == "openwrt-SNAPP" 119 | 120 | 121 | def test_get_packages_versions(): 122 | packages_with_abi = { 123 | "libusb-1.0-0": "1.2.3", 124 | "libpython-3.3-3": "1.2.3", 125 | "bort": "9.9.9", 126 | } 127 | packages_without_abi = { 128 | "libusb-1.0": "1.2.3", 129 | "libpython-3.3": "1.2.3", 130 | "bort": "9.9.9", 131 | } 132 | 133 | class Response404: 134 | status_code = 404 135 | 136 | class ResponseJson1: 137 | status_code = 200 138 | 139 | def json(self): 140 | return { 141 | "architecture": "aarch_generic", 142 | "packages": packages_with_abi, 143 | } 144 | 145 | class ResponseJson2: 146 | status_code = 200 147 | 148 | def json(self): 149 | return { 150 | "version": 2, 151 | "architecture": "aarch_generic", 152 | "packages": packages_without_abi, 153 | } 154 | 155 | class ResponseText: 156 | status_code = 200 157 | text = ( 158 | "Package: libusb-1.0-0\n" 159 | "ABIVersion: -0\n" 160 | "Version: 1.2.3\n" 161 | "Architecture: x86_64\n" 162 | "\n" 163 | "Package: libpython-3.3-3\n" 164 | "ABIVersion: -3\n" 165 | "Version: 1.2.3\n" 166 | "\n" 167 | "Package: bort\n" 168 | "Version: 9.9.9\n" 169 | "\n" 170 | "\n" # Add two more to fake malformed input. 171 | "\n" 172 | ) 173 | 174 | # Old opkg-style Packages format, no index.json 175 | asu.util.client_get = lambda url: Response404() if "json" in url else ResponseText() 176 | index = parse_packages_file("httpx://fake_url") 177 | packages = index["packages"] 178 | 179 | assert index["architecture"] == "x86_64" 180 | assert packages == packages_without_abi 181 | 182 | # Old opkg-style Packages format, but with v1 index.json 183 | asu.util.client_get = ( 184 | lambda url: ResponseJson1() if "json" in url else ResponseText() 185 | ) 186 | index = parse_packages_file("httpx://fake_url") 187 | packages = index["packages"] 188 | 189 | assert index["architecture"] == "x86_64" 190 | assert packages == packages_without_abi 191 | 192 | # New apk-style without Packages, but old v1 index.json 193 | asu.util.client_get = ( 194 | lambda url: ResponseJson1() if "json" in url else Response404() 195 | ) 196 | index = parse_packages_file("httpx://fake_url") 197 | packages = index["packages"] 198 | 199 | assert index["architecture"] == "aarch_generic" 200 | assert packages == packages_with_abi 201 | 202 | # New index.json v2 format 203 | asu.util.client_get = lambda url: ResponseJson2() 204 | index = parse_packages_file("httpx://fake_url") 205 | packages = index["packages"] 206 | 207 | assert index["architecture"] == "aarch_generic" 208 | assert packages == packages_without_abi 209 | 210 | # Everything fails 211 | asu.util.client_get = lambda url: Response404() 212 | index = parse_packages_file("abc://fake") 213 | assert index == {} 214 | 215 | 216 | def test_get_kernel_version(): 217 | class Response: 218 | status_code = 200 219 | 220 | json_data = { 221 | "linux_kernel": { 222 | "release": "1", 223 | "vermagic": "ed1b0ea64b60bcea5dd4112f33d0dcbe", 224 | "version": "6.6.63", 225 | }, 226 | } 227 | 228 | def json(self): 229 | return Response.json_data 230 | 231 | asu.util.client_get = lambda url: Response() 232 | 233 | version = parse_kernel_version("httpx://fake_url") 234 | assert version == "6.6.63-1-ed1b0ea64b60bcea5dd4112f33d0dcbe" 235 | 236 | Response.json_data = {} 237 | version = parse_kernel_version("httpx://fake_url") 238 | assert version == "" 239 | 240 | 241 | def test_check_kmod_split(): 242 | cases = { 243 | "releases/22.07.3/targets/x86/64": False, 244 | "releases/23.05.0-rc3/targets/x86/64": False, 245 | "releases/23.05.2/targets/x86/64": False, 246 | "releases/23.05.5/targets/x86/64": False, 247 | "releases/23.05.6/targets/x86/64": True, 248 | "releases/23.05-SNAPSHOT/targets/x86/64": True, 249 | "releases/24.10.0-rc1/targets/x86/64": True, 250 | "releases/24.10.2/targets/x86/64": True, 251 | "releases/24.10-SNAPSHOT/targets/x86/64": True, 252 | "releases/25.12.2/targets/x86/64": True, 253 | "releases/26.10-SNAPSHOT/targets/x86/64": True, 254 | "snapshots/targets/x86/64": True, 255 | } 256 | 257 | for path, expected in cases.items(): 258 | result: bool = is_post_kmod_split_build(path) 259 | assert result == expected 260 | 261 | 262 | def test_check_snapshot_versions(): 263 | cases = { 264 | "22.07.3": False, 265 | "23.05.0-rc3": False, 266 | "23.05.2": False, 267 | "23.05.5": False, 268 | "23.05.6": False, 269 | "23.05-SNAPSHOT": True, 270 | "24.10.0-rc1": False, 271 | "24.10.2": False, 272 | "24.10-SNAPSHOT": True, 273 | "SNAPSHOT": True, 274 | } 275 | 276 | for version, expected in cases.items(): 277 | result: bool = is_snapshot_build(version) 278 | print(version, expected, result) 279 | assert result == expected 280 | 281 | 282 | def test_get_feeds(): 283 | class Response: 284 | status_code = 200 285 | text = ( 286 | "src-git packages https://git.openwrt.org/feed/packages.git^b1635b8\n" 287 | "src-git luci https://git.openwrt.org/project/luci.git^63d8b79\n" 288 | ) 289 | 290 | asu.util.client_get = lambda url: Response() 291 | 292 | feeds = parse_feeds_conf("httpx://fake_url") 293 | assert len(feeds) == 2 294 | assert feeds[0] == "packages" 295 | assert feeds[1] == "luci" 296 | 297 | Response.status_code = 404 298 | feeds = parse_feeds_conf("httpx://fake_url") 299 | assert feeds == [] 300 | 301 | 302 | def test_check_manifest(): 303 | assert check_manifest({"test": "1.0"}, {"test": "1.0"}) is None 304 | assert ( 305 | check_manifest({"test": "1.0"}, {"test": "2.0"}) 306 | == "Impossible package selection: test version not as requested: 2.0 vs. 1.0" 307 | ) 308 | assert ( 309 | check_manifest({"test": "1.0"}, {"test2": "1.0"}) 310 | == "Impossible package selection: test2 not in manifest" 311 | ) 312 | 313 | 314 | def test_get_podman(): 315 | podman = get_podman() 316 | assert isinstance(podman, PodmanClient) 317 | 318 | 319 | def test_run_cmd(): 320 | podman = get_podman() 321 | podman.images.pull("ghcr.io/openwrt/imagebuilder:testtarget-testsubtarget-v1.2.3") 322 | 323 | container = podman.containers.create( 324 | "ghcr.io/openwrt/imagebuilder:testtarget-testsubtarget-v1.2.3", 325 | command=["sleep", "1000"], 326 | detach=True, 327 | ) 328 | container.start() 329 | 330 | returncode, stdout, stderr = run_cmd( 331 | container, 332 | ["make", "info"], 333 | ) 334 | 335 | assert returncode == 0 336 | assert "testtarget/testsubtarget" in stdout 337 | 338 | 339 | def test_parse_manifest_opkg(): 340 | manifest = parse_manifest("test - 1.0\ntest2 - 2.0\ntest3 - 3.0\ntest4 - 3.0\n") 341 | 342 | assert manifest == { 343 | "test": "1.0", 344 | "test2": "2.0", 345 | "test3": "3.0", 346 | "test4": "3.0", 347 | } 348 | 349 | 350 | def test_parse_manifest_apk(): 351 | manifest = parse_manifest("test 1.0\ntest2 2.0\ntest3 3.0\ntest4 3.0\n") 352 | 353 | assert manifest == { 354 | "test": "1.0", 355 | "test2": "2.0", 356 | "test3": "3.0", 357 | "test4": "3.0", 358 | } 359 | -------------------------------------------------------------------------------- /asu/build.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import json 3 | import logging 4 | import re 5 | from os import getenv 6 | from pathlib import Path 7 | from typing import Union 8 | from time import perf_counter 9 | 10 | from rq import get_current_job 11 | from rq.utils import parse_timeout 12 | from podman import errors 13 | 14 | from asu.build_request import BuildRequest 15 | from asu.config import settings 16 | from asu.package_changes import apply_package_changes 17 | from asu.util import ( 18 | add_timestamp, 19 | add_build_event, 20 | check_manifest, 21 | diff_packages, 22 | fingerprint_pubkey_usign, 23 | get_branch, 24 | get_container_version_tag, 25 | get_packages_hash, 26 | get_podman, 27 | get_request_hash, 28 | is_snapshot_build, 29 | parse_manifest, 30 | report_error, 31 | run_cmd, 32 | ) 33 | 34 | log = logging.getLogger("rq.worker") 35 | 36 | 37 | def _build(build_request: BuildRequest, job=None): 38 | """Build image request and setup ImageBuilders automatically 39 | 40 | The `request` dict contains properties of the requested image. 41 | 42 | Args: 43 | request (dict): Contains all properties of requested image 44 | """ 45 | 46 | build_start: float = perf_counter() 47 | 48 | request_hash = get_request_hash(build_request) 49 | bin_dir: Path = settings.public_path / "store" / request_hash 50 | bin_dir.mkdir(parents=True, exist_ok=True) 51 | log.debug(f"Bin dir: {bin_dir}") 52 | 53 | job = job or get_current_job() 54 | job.meta["detail"] = "init" 55 | job.meta["imagebuilder_status"] = "init" 56 | job.meta["request"] = build_request 57 | job.save_meta() 58 | 59 | log.debug(f"Building {build_request}") 60 | 61 | podman = get_podman() 62 | 63 | log.debug(f"Podman version: {podman.version()}") 64 | 65 | container_version_tag = get_container_version_tag(build_request.version) 66 | log.debug( 67 | f"Container version: {container_version_tag} (requested {build_request.version})" 68 | ) 69 | 70 | mounts: list[dict[str, Union[str, bool]]] = [] 71 | environment: dict[str, str] = {} 72 | 73 | image = f"{settings.base_container}:{build_request.target.replace('/', '-')}-{container_version_tag}" 74 | 75 | if is_snapshot_build(build_request.version): 76 | environment.update( 77 | { 78 | "TARGET": build_request.target, 79 | "VERSION_PATH": get_branch(build_request.version) 80 | .get("path", "") 81 | .replace("{version}", build_request.version), 82 | } 83 | ) 84 | 85 | if settings.squid_cache: 86 | environment.update( 87 | { 88 | "UPSTREAM_URL": settings.upstream_url.replace("https", "http"), 89 | "use_proxy": "on", 90 | "http_proxy": "http://127.0.0.1:3128", 91 | } 92 | ) 93 | 94 | job.meta["imagebuilder_status"] = "container_setup" 95 | job.save_meta() 96 | 97 | log.info(f"Pulling {image}...") 98 | try: 99 | podman.images.pull(image) 100 | except errors.ImageNotFound: 101 | report_error( 102 | job, 103 | f"Image not found: {image}. If this version was just released, please try again in a few hours as it may take some time to become fully available.", 104 | ) 105 | log.info(f"Pulling {image}... done") 106 | 107 | bin_dir.mkdir(parents=True, exist_ok=True) 108 | log.debug("Created store path: %s", bin_dir) 109 | 110 | if build_request.repository_keys: 111 | log.debug("Found extra keys") 112 | 113 | (bin_dir / "keys").mkdir(parents=True, exist_ok=True) 114 | 115 | for key in build_request.repository_keys: 116 | fingerprint = fingerprint_pubkey_usign(key) 117 | log.debug(f"Found key {fingerprint}") 118 | 119 | (bin_dir / "keys" / fingerprint).write_text( 120 | f"untrusted comment: {fingerprint}\n{key}" 121 | ) 122 | 123 | mounts.append( 124 | { 125 | "type": "bind", 126 | "source": str(bin_dir / "keys" / fingerprint), 127 | "target": "/builder/keys/" + fingerprint, 128 | "read_only": True, 129 | }, 130 | ) 131 | 132 | if build_request.repositories: 133 | log.debug("Found extra repos") 134 | repositories = "" 135 | for name, repo in build_request.repositories.items(): 136 | if repo.startswith(tuple(settings.repository_allow_list)): 137 | repositories += f"src/gz {name} {repo}\n" 138 | else: 139 | report_error(job, f"Repository {repo} not allowed") 140 | 141 | repositories += "src imagebuilder file:packages\noption check_signature" 142 | 143 | (bin_dir / "repositories.conf").write_text(repositories) 144 | 145 | mounts.append( 146 | { 147 | "type": "bind", 148 | "source": str(bin_dir / "repositories.conf"), 149 | "target": "/builder/repositories.conf", 150 | "read_only": True, 151 | }, 152 | ) 153 | 154 | if build_request.defaults: 155 | log.debug("Found defaults") 156 | 157 | defaults_file = bin_dir / "files/etc/uci-defaults/99-asu-defaults" 158 | defaults_file.parent.mkdir(parents=True, exist_ok=True) 159 | defaults_file.write_text(build_request.defaults) 160 | mounts.append( 161 | { 162 | "type": "bind", 163 | "source": str(bin_dir / "files"), 164 | "target": str(bin_dir / "files"), 165 | "read_only": True, 166 | }, 167 | ) 168 | 169 | log.debug("Mounts: %s", mounts) 170 | 171 | container = podman.containers.create( 172 | image, 173 | command=["sleep", str(parse_timeout(settings.job_timeout))], 174 | mounts=mounts, 175 | cap_drop=["all"], 176 | no_new_privileges=True, 177 | privileged=False, 178 | networks={"pasta": {}}, 179 | auto_remove=True, 180 | environment=environment, 181 | ) 182 | container.start() 183 | 184 | if is_snapshot_build(build_request.version): 185 | log.info("Running setup.sh for ImageBuilder") 186 | returncode, job.meta["stdout"], job.meta["stderr"] = run_cmd( 187 | container, ["sh", "setup.sh"] 188 | ) 189 | if returncode: 190 | container.kill() 191 | report_error(job, "Could not set up ImageBuilder") 192 | 193 | returncode, job.meta["stdout"], job.meta["stderr"] = run_cmd( 194 | container, ["make", "info"] 195 | ) 196 | 197 | job.meta["imagebuilder_status"] = "validate_revision" 198 | job.save_meta() 199 | 200 | version_code = re.search('Current Revision: "(r.+)"', job.meta["stdout"]).group(1) 201 | 202 | if requested := build_request.version_code: 203 | if version_code != requested: 204 | report_error( 205 | job, 206 | f"Received incorrect version {version_code} (requested {requested})", 207 | ) 208 | 209 | default_packages = set( 210 | re.search(r"Default Packages: (.*)\n", job.meta["stdout"]).group(1).split() 211 | ) 212 | log.debug(f"Default packages: {default_packages}") 213 | 214 | profile_packages = set( 215 | re.search( 216 | r"{}:\n .+\n Packages: (.*?)\n".format(build_request.profile), 217 | job.meta["stdout"], 218 | re.MULTILINE, 219 | ) 220 | .group(1) 221 | .split() 222 | ) 223 | 224 | apply_package_changes(build_request) 225 | 226 | build_cmd_packages = build_request.packages 227 | 228 | if build_request.diff_packages: 229 | build_cmd_packages: list[str] = diff_packages( 230 | build_request.packages, default_packages | profile_packages 231 | ) 232 | log.debug(f"Diffed packages: {build_cmd_packages}") 233 | 234 | job.meta["imagebuilder_status"] = "validate_manifest" 235 | job.save_meta() 236 | 237 | if settings.squid_cache and not is_snapshot_build(build_request.version): 238 | log.info("Disabling HTTPS for repositories") 239 | # Once APK is used for a stable release, handle `repositories`, too 240 | run_cmd(container, ["sed", "-i", "s|https|http|g", "repositories.conf"]) 241 | 242 | returncode, job.meta["stdout"], job.meta["stderr"] = run_cmd( 243 | container, 244 | [ 245 | "make", 246 | "manifest", 247 | f"PROFILE={build_request.profile}", 248 | f"PACKAGES={' '.join(build_cmd_packages)}", 249 | "STRIP_ABI=1", 250 | ], 251 | ) 252 | 253 | job.save_meta() 254 | 255 | if returncode: 256 | container.kill() 257 | report_error(job, "Impossible package selection") 258 | 259 | manifest: dict[str, str] = parse_manifest(job.meta["stdout"]) 260 | log.debug(f"Manifest: {manifest}") 261 | 262 | # Check if all requested packages are in the manifest 263 | if err := check_manifest(manifest, build_request.packages_versions): 264 | report_error(job, err) 265 | 266 | packages_hash: str = get_packages_hash(manifest.keys()) 267 | log.debug(f"Packages Hash: {packages_hash}") 268 | 269 | job.meta["build_cmd"] = [ 270 | "make", 271 | "image", 272 | f"PROFILE={build_request.profile}", 273 | f"PACKAGES={' '.join(build_cmd_packages)}", 274 | f"EXTRA_IMAGE_NAME={packages_hash[:12]}", 275 | f"BIN_DIR=/builder/{request_hash}", 276 | ] 277 | 278 | if build_request.defaults: 279 | job.meta["build_cmd"].append(f"FILES={bin_dir}/files") 280 | 281 | # Check if custom rootfs size is requested 282 | if build_request.rootfs_size_mb: 283 | log.debug("Found custom rootfs size %d", build_request.rootfs_size_mb) 284 | job.meta["build_cmd"].append(f"ROOTFS_PARTSIZE={build_request.rootfs_size_mb}") 285 | 286 | log.debug("Build command: %s", job.meta["build_cmd"]) 287 | 288 | job.meta["imagebuilder_status"] = "building_image" 289 | job.save_meta() 290 | 291 | returncode, job.meta["stdout"], job.meta["stderr"] = run_cmd( 292 | container, 293 | job.meta["build_cmd"], 294 | copy=["/builder/" + request_hash, bin_dir.parent], 295 | ) 296 | 297 | container.kill() 298 | 299 | job.save_meta() 300 | 301 | if any(err in job.meta["stderr"] for err in ["is too big", "out of space?"]): 302 | report_error(job, "Selected packages exceed device storage") 303 | 304 | if returncode: 305 | report_error(job, "Error while building firmware. See stdout/stderr") 306 | 307 | json_file = bin_dir / "profiles.json" 308 | 309 | if not json_file.is_file(): 310 | report_error(job, "No JSON file found") 311 | 312 | json_content = json.loads(json_file.read_text()) 313 | 314 | # Check if profile is in JSON file 315 | if build_request.profile not in json_content["profiles"]: 316 | report_error(job, "Profile not found in JSON file") 317 | 318 | # get list of installable images to sign (i.e. don't sign kernel) 319 | images = list( 320 | map( 321 | lambda i: i["name"], 322 | filter( 323 | lambda i: i["type"] 324 | in ["sysupgrade", "factory", "combined", "combined-efi", "sdcard"], 325 | json_content["profiles"][build_request.profile]["images"], 326 | ), 327 | ) 328 | ) 329 | 330 | log.info(f"Signing images: {images}") 331 | 332 | # job.meta["imagebuilder_status"] = "signing_images" 333 | job.save_meta() 334 | 335 | build_key = getenv("BUILD_KEY") or str(Path.cwd() / "key-build") 336 | 337 | if Path(build_key).is_file(): 338 | log.info(f"Signing images with key {build_key}") 339 | container = podman.containers.create( 340 | image, 341 | mounts=[ 342 | { 343 | "type": "bind", 344 | "source": build_key, 345 | "target": "/builder/key-build", 346 | "read_only": True, 347 | }, 348 | { 349 | "type": "bind", 350 | "source": build_key + ".ucert", 351 | "target": "/builder/key-build.ucert", 352 | "read_only": True, 353 | }, 354 | { 355 | "type": "bind", 356 | "source": str(bin_dir), 357 | "target": request_hash, 358 | "read_only": False, 359 | }, 360 | ], 361 | user="root", # running as root to have write access to the mounted volume 362 | working_dir=request_hash, 363 | environment={ 364 | "IMAGES_TO_SIGN": " ".join(images), 365 | "PATH": "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/builder/staging_dir/host/bin", 366 | }, 367 | auto_remove=True, 368 | ) 369 | returncode, job.meta["stdout"], job.meta["stderr"] = run_cmd( 370 | container, 371 | [ 372 | "bash", 373 | "-c", 374 | ( 375 | "env;" 376 | "for IMAGE in $IMAGES_TO_SIGN; do " 377 | "touch ${IMAGE}.test;" 378 | 'fwtool -t -s /dev/null "$IMAGE" && echo "sign entfern";' 379 | 'cp "/builder/key-build.ucert" "$IMAGE.ucert" && echo "moved";' 380 | 'usign -S -m "$IMAGE" -s "/builder/key-build" -x "$IMAGE.sig" && echo "usign";' 381 | 'ucert -A -c "$IMAGE.ucert" -x "$IMAGE.sig" && echo "ucert";' 382 | 'fwtool -S "$IMAGE.ucert" "$IMAGE" && echo "fwtool";' 383 | "done" 384 | ), 385 | ], 386 | ) 387 | container.stop() 388 | job.save_meta() 389 | else: 390 | log.warning("No build key found, skipping signing") 391 | 392 | json_content.update({"manifest": manifest}) 393 | json_content.update(json_content["profiles"][build_request.profile]) 394 | json_content["id"] = build_request.profile 395 | json_content["bin_dir"] = request_hash 396 | json_content["build_cmd_packages"] = build_cmd_packages 397 | json_content.pop("profiles") 398 | json_content["build_at"] = datetime.datetime.fromtimestamp( 399 | int(json_content.get("source_date_epoch", 0)) 400 | ).strftime("%Y-%m-%dT%H:%M:%S.%fZ") 401 | json_content["detail"] = "done" 402 | 403 | log.debug("JSON content %s", json_content) 404 | 405 | add_timestamp( 406 | f"stats:builds:{build_request.version}:{build_request.target}:{build_request.profile}", 407 | { 408 | "stats": "builds", 409 | "version": build_request.version, 410 | "target": build_request.target, 411 | "profile": build_request.profile, 412 | }, 413 | ) 414 | 415 | # Calculate build duration and log it 416 | build_duration: float = round(perf_counter() - build_start) 417 | add_timestamp( 418 | f"stats:time:{build_request.version}:{build_request.target}:{build_request.profile}", 419 | { 420 | "stats": "time", 421 | "version": build_request.version, 422 | "target": build_request.target, 423 | "profile": build_request.profile, 424 | }, 425 | build_duration, 426 | ) 427 | 428 | job.meta["imagebuilder_status"] = "done" 429 | job.save_meta() 430 | 431 | return json_content 432 | 433 | 434 | def build(build_request: BuildRequest, job=None): 435 | try: 436 | result = _build(build_request, job) 437 | except Exception: 438 | # Log all build errors, including internal server errors. 439 | add_build_event("failures") 440 | raise 441 | else: 442 | add_build_event("successes") 443 | return result 444 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 2, June 1991 3 | 4 | Copyright (C) 1989, 1991 Free Software Foundation, Inc., 5 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 6 | Everyone is permitted to copy and distribute verbatim copies 7 | of this license document, but changing it is not allowed. 8 | 9 | Preamble 10 | 11 | The licenses for most software are designed to take away your 12 | freedom to share and change it. By contrast, the GNU General Public 13 | License is intended to guarantee your freedom to share and change free 14 | software--to make sure the software is free for all its users. This 15 | General Public License applies to most of the Free Software 16 | Foundation's software and to any other program whose authors commit to 17 | using it. (Some other Free Software Foundation software is covered by 18 | the GNU Lesser General Public License instead.) You can apply it to 19 | your programs, too. 20 | 21 | When we speak of free software, we are referring to freedom, not 22 | price. Our General Public Licenses are designed to make sure that you 23 | have the freedom to distribute copies of free software (and charge for 24 | this service if you wish), that you receive source code or can get it 25 | if you want it, that you can change the software or use pieces of it 26 | in new free programs; and that you know you can do these things. 27 | 28 | To protect your rights, we need to make restrictions that forbid 29 | anyone to deny you these rights or to ask you to surrender the rights. 30 | These restrictions translate to certain responsibilities for you if you 31 | distribute copies of the software, or if you modify it. 32 | 33 | For example, if you distribute copies of such a program, whether 34 | gratis or for a fee, you must give the recipients all the rights that 35 | you have. You must make sure that they, too, receive or can get the 36 | source code. And you must show them these terms so they know their 37 | rights. 38 | 39 | We protect your rights with two steps: (1) copyright the software, and 40 | (2) offer you this license which gives you legal permission to copy, 41 | distribute and/or modify the software. 42 | 43 | Also, for each author's protection and ours, we want to make certain 44 | that everyone understands that there is no warranty for this free 45 | software. If the software is modified by someone else and passed on, we 46 | want its recipients to know that what they have is not the original, so 47 | that any problems introduced by others will not reflect on the original 48 | authors' reputations. 49 | 50 | Finally, any free program is threatened constantly by software 51 | patents. We wish to avoid the danger that redistributors of a free 52 | program will individually obtain patent licenses, in effect making the 53 | program proprietary. To prevent this, we have made it clear that any 54 | patent must be licensed for everyone's free use or not licensed at all. 55 | 56 | The precise terms and conditions for copying, distribution and 57 | modification follow. 58 | 59 | GNU GENERAL PUBLIC LICENSE 60 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 61 | 62 | 0. This License applies to any program or other work which contains 63 | a notice placed by the copyright holder saying it may be distributed 64 | under the terms of this General Public License. The "Program", below, 65 | refers to any such program or work, and a "work based on the Program" 66 | means either the Program or any derivative work under copyright law: 67 | that is to say, a work containing the Program or a portion of it, 68 | either verbatim or with modifications and/or translated into another 69 | language. (Hereinafter, translation is included without limitation in 70 | the term "modification".) Each licensee is addressed as "you". 71 | 72 | Activities other than copying, distribution and modification are not 73 | covered by this License; they are outside its scope. The act of 74 | running the Program is not restricted, and the output from the Program 75 | is covered only if its contents constitute a work based on the 76 | Program (independent of having been made by running the Program). 77 | Whether that is true depends on what the Program does. 78 | 79 | 1. You may copy and distribute verbatim copies of the Program's 80 | source code as you receive it, in any medium, provided that you 81 | conspicuously and appropriately publish on each copy an appropriate 82 | copyright notice and disclaimer of warranty; keep intact all the 83 | notices that refer to this License and to the absence of any warranty; 84 | and give any other recipients of the Program a copy of this License 85 | along with the Program. 86 | 87 | You may charge a fee for the physical act of transferring a copy, and 88 | you may at your option offer warranty protection in exchange for a fee. 89 | 90 | 2. You may modify your copy or copies of the Program or any portion 91 | of it, thus forming a work based on the Program, and copy and 92 | distribute such modifications or work under the terms of Section 1 93 | above, provided that you also meet all of these conditions: 94 | 95 | a) You must cause the modified files to carry prominent notices 96 | stating that you changed the files and the date of any change. 97 | 98 | b) You must cause any work that you distribute or publish, that in 99 | whole or in part contains or is derived from the Program or any 100 | part thereof, to be licensed as a whole at no charge to all third 101 | parties under the terms of this License. 102 | 103 | c) If the modified program normally reads commands interactively 104 | when run, you must cause it, when started running for such 105 | interactive use in the most ordinary way, to print or display an 106 | announcement including an appropriate copyright notice and a 107 | notice that there is no warranty (or else, saying that you provide 108 | a warranty) and that users may redistribute the program under 109 | these conditions, and telling the user how to view a copy of this 110 | License. (Exception: if the Program itself is interactive but 111 | does not normally print such an announcement, your work based on 112 | the Program is not required to print an announcement.) 113 | 114 | These requirements apply to the modified work as a whole. If 115 | identifiable sections of that work are not derived from the Program, 116 | and can be reasonably considered independent and separate works in 117 | themselves, then this License, and its terms, do not apply to those 118 | sections when you distribute them as separate works. But when you 119 | distribute the same sections as part of a whole which is a work based 120 | on the Program, the distribution of the whole must be on the terms of 121 | this License, whose permissions for other licensees extend to the 122 | entire whole, and thus to each and every part regardless of who wrote it. 123 | 124 | Thus, it is not the intent of this section to claim rights or contest 125 | your rights to work written entirely by you; rather, the intent is to 126 | exercise the right to control the distribution of derivative or 127 | collective works based on the Program. 128 | 129 | In addition, mere aggregation of another work not based on the Program 130 | with the Program (or with a work based on the Program) on a volume of 131 | a storage or distribution medium does not bring the other work under 132 | the scope of this License. 133 | 134 | 3. You may copy and distribute the Program (or a work based on it, 135 | under Section 2) in object code or executable form under the terms of 136 | Sections 1 and 2 above provided that you also do one of the following: 137 | 138 | a) Accompany it with the complete corresponding machine-readable 139 | source code, which must be distributed under the terms of Sections 140 | 1 and 2 above on a medium customarily used for software interchange; or, 141 | 142 | b) Accompany it with a written offer, valid for at least three 143 | years, to give any third party, for a charge no more than your 144 | cost of physically performing source distribution, a complete 145 | machine-readable copy of the corresponding source code, to be 146 | distributed under the terms of Sections 1 and 2 above on a medium 147 | customarily used for software interchange; or, 148 | 149 | c) Accompany it with the information you received as to the offer 150 | to distribute corresponding source code. (This alternative is 151 | allowed only for noncommercial distribution and only if you 152 | received the program in object code or executable form with such 153 | an offer, in accord with Subsection b above.) 154 | 155 | The source code for a work means the preferred form of the work for 156 | making modifications to it. For an executable work, complete source 157 | code means all the source code for all modules it contains, plus any 158 | associated interface definition files, plus the scripts used to 159 | control compilation and installation of the executable. However, as a 160 | special exception, the source code distributed need not include 161 | anything that is normally distributed (in either source or binary 162 | form) with the major components (compiler, kernel, and so on) of the 163 | operating system on which the executable runs, unless that component 164 | itself accompanies the executable. 165 | 166 | If distribution of executable or object code is made by offering 167 | access to copy from a designated place, then offering equivalent 168 | access to copy the source code from the same place counts as 169 | distribution of the source code, even though third parties are not 170 | compelled to copy the source along with the object code. 171 | 172 | 4. You may not copy, modify, sublicense, or distribute the Program 173 | except as expressly provided under this License. Any attempt 174 | otherwise to copy, modify, sublicense or distribute the Program is 175 | void, and will automatically terminate your rights under this License. 176 | However, parties who have received copies, or rights, from you under 177 | this License will not have their licenses terminated so long as such 178 | parties remain in full compliance. 179 | 180 | 5. You are not required to accept this License, since you have not 181 | signed it. However, nothing else grants you permission to modify or 182 | distribute the Program or its derivative works. These actions are 183 | prohibited by law if you do not accept this License. Therefore, by 184 | modifying or distributing the Program (or any work based on the 185 | Program), you indicate your acceptance of this License to do so, and 186 | all its terms and conditions for copying, distributing or modifying 187 | the Program or works based on it. 188 | 189 | 6. Each time you redistribute the Program (or any work based on the 190 | Program), the recipient automatically receives a license from the 191 | original licensor to copy, distribute or modify the Program subject to 192 | these terms and conditions. You may not impose any further 193 | restrictions on the recipients' exercise of the rights granted herein. 194 | You are not responsible for enforcing compliance by third parties to 195 | this License. 196 | 197 | 7. If, as a consequence of a court judgment or allegation of patent 198 | infringement or for any other reason (not limited to patent issues), 199 | conditions are imposed on you (whether by court order, agreement or 200 | otherwise) that contradict the conditions of this License, they do not 201 | excuse you from the conditions of this License. If you cannot 202 | distribute so as to satisfy simultaneously your obligations under this 203 | License and any other pertinent obligations, then as a consequence you 204 | may not distribute the Program at all. For example, if a patent 205 | license would not permit royalty-free redistribution of the Program by 206 | all those who receive copies directly or indirectly through you, then 207 | the only way you could satisfy both it and this License would be to 208 | refrain entirely from distribution of the Program. 209 | 210 | If any portion of this section is held invalid or unenforceable under 211 | any particular circumstance, the balance of the section is intended to 212 | apply and the section as a whole is intended to apply in other 213 | circumstances. 214 | 215 | It is not the purpose of this section to induce you to infringe any 216 | patents or other property right claims or to contest validity of any 217 | such claims; this section has the sole purpose of protecting the 218 | integrity of the free software distribution system, which is 219 | implemented by public license practices. Many people have made 220 | generous contributions to the wide range of software distributed 221 | through that system in reliance on consistent application of that 222 | system; it is up to the author/donor to decide if he or she is willing 223 | to distribute software through any other system and a licensee cannot 224 | impose that choice. 225 | 226 | This section is intended to make thoroughly clear what is believed to 227 | be a consequence of the rest of this License. 228 | 229 | 8. If the distribution and/or use of the Program is restricted in 230 | certain countries either by patents or by copyrighted interfaces, the 231 | original copyright holder who places the Program under this License 232 | may add an explicit geographical distribution limitation excluding 233 | those countries, so that distribution is permitted only in or among 234 | countries not thus excluded. In such case, this License incorporates 235 | the limitation as if written in the body of this License. 236 | 237 | 9. The Free Software Foundation may publish revised and/or new versions 238 | of the General Public License from time to time. Such new versions will 239 | be similar in spirit to the present version, but may differ in detail to 240 | address new problems or concerns. 241 | 242 | Each version is given a distinguishing version number. If the Program 243 | specifies a version number of this License which applies to it and "any 244 | later version", you have the option of following the terms and conditions 245 | either of that version or of any later version published by the Free 246 | Software Foundation. If the Program does not specify a version number of 247 | this License, you may choose any version ever published by the Free Software 248 | Foundation. 249 | 250 | 10. If you wish to incorporate parts of the Program into other free 251 | programs whose distribution conditions are different, write to the author 252 | to ask for permission. For software which is copyrighted by the Free 253 | Software Foundation, write to the Free Software Foundation; we sometimes 254 | make exceptions for this. Our decision will be guided by the two goals 255 | of preserving the free status of all derivatives of our free software and 256 | of promoting the sharing and reuse of software generally. 257 | 258 | NO WARRANTY 259 | 260 | 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY 261 | FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN 262 | OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES 263 | PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED 264 | OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 265 | MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS 266 | TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE 267 | PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, 268 | REPAIR OR CORRECTION. 269 | 270 | 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 271 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR 272 | REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, 273 | INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING 274 | OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED 275 | TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY 276 | YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER 277 | PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE 278 | POSSIBILITY OF SUCH DAMAGES. 279 | 280 | END OF TERMS AND CONDITIONS 281 | 282 | How to Apply These Terms to Your New Programs 283 | 284 | If you develop a new program, and you want it to be of the greatest 285 | possible use to the public, the best way to achieve this is to make it 286 | free software which everyone can redistribute and change under these terms. 287 | 288 | To do so, attach the following notices to the program. It is safest 289 | to attach them to the start of each source file to most effectively 290 | convey the exclusion of warranty; and each file should have at least 291 | the "copyright" line and a pointer to where the full notice is found. 292 | 293 | 294 | Copyright (C) 295 | 296 | This program is free software; you can redistribute it and/or modify 297 | it under the terms of the GNU General Public License as published by 298 | the Free Software Foundation; either version 2 of the License, or 299 | (at your option) any later version. 300 | 301 | This program is distributed in the hope that it will be useful, 302 | but WITHOUT ANY WARRANTY; without even the implied warranty of 303 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 304 | GNU General Public License for more details. 305 | 306 | You should have received a copy of the GNU General Public License along 307 | with this program; if not, write to the Free Software Foundation, Inc., 308 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 309 | 310 | Also add information on how to contact you by electronic and paper mail. 311 | 312 | If the program is interactive, make it output a short notice like this 313 | when it starts in an interactive mode: 314 | 315 | Gnomovision version 69, Copyright (C) year name of author 316 | Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 317 | This is free software, and you are welcome to redistribute it 318 | under certain conditions; type `show c' for details. 319 | 320 | The hypothetical commands `show w' and `show c' should show the appropriate 321 | parts of the General Public License. Of course, the commands you use may 322 | be called something other than `show w' and `show c'; they could even be 323 | mouse-clicks or menu items--whatever suits your program. 324 | 325 | You should also get your employer (if you work as a programmer) or your 326 | school, if any, to sign a "copyright disclaimer" for the program, if 327 | necessary. Here is a sample; alter the names: 328 | 329 | Yoyodyne, Inc., hereby disclaims all copyright interest in the program 330 | `Gnomovision' (which makes passes at compilers) written by James Hacker. 331 | 332 | , 1 April 1989 333 | Ty Coon, President of Vice 334 | 335 | This General Public License does not permit incorporating your program into 336 | proprietary programs. If your program is a subroutine library, you may 337 | consider it more useful to permit linking proprietary applications with the 338 | library. If this is what you want to do, use the GNU Lesser General 339 | Public License instead of this License. 340 | -------------------------------------------------------------------------------- /asu/util.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import email 3 | import hashlib 4 | import json 5 | import logging 6 | import struct 7 | from os import getgid, getuid 8 | from pathlib import Path 9 | from re import match 10 | from tarfile import TarFile 11 | from io import BytesIO 12 | from typing import Optional 13 | 14 | import hishel 15 | import nacl.signing 16 | from fastapi import FastAPI 17 | from httpx import Response 18 | from podman import PodmanClient 19 | from podman.domain.containers import Container 20 | from rq import Queue 21 | from rq.job import Job 22 | 23 | import redis 24 | from asu.build_request import BuildRequest 25 | from asu.config import settings 26 | 27 | log: logging.Logger = logging.getLogger("rq.worker") 28 | log.propagate = False # Suppress duplicate log messages. 29 | 30 | 31 | def get_redis_client(unicode: bool = True) -> redis.client.Redis: 32 | return redis.from_url(settings.redis_url, decode_responses=unicode) 33 | 34 | 35 | def get_redis_ts(): 36 | return get_redis_client().ts() 37 | 38 | 39 | def client_get(url: str, ttl: int = 3600) -> Response: 40 | return hishel.CacheClient( 41 | storage=hishel.RedisStorage(client=get_redis_client(False), ttl=ttl), 42 | controller=hishel.Controller(always_revalidate=True, allow_heuristics=True), 43 | ).get(url) 44 | 45 | 46 | def add_timestamp(key: str, labels: dict[str, str] = {}, value: int = 1) -> None: 47 | if not settings.server_stats: 48 | return 49 | log.debug(f"Adding timestamp to {key}: {labels}") 50 | get_redis_ts().add( 51 | key, 52 | value=value, 53 | timestamp="*", 54 | labels=labels, 55 | duplicate_policy="sum", 56 | ) 57 | 58 | 59 | def add_build_event(event: str) -> None: 60 | """ 61 | Logs summary statistics for build events: 62 | 63 | - requests - total number of calls to /build API, logged for all build 64 | requests, irrespective of validity, success or failure 65 | - cache-hits - count of build requests satisfied by already-existing builds 66 | - cache-misses - count of build requests sent to builder 67 | - successes - count of builder runs with successful completion 68 | - failures - count of builder runs that failed 69 | 70 | Note that for validation, you can check that: 71 | - cache-misses = successes + failures 72 | - requests = cache-hits + cache-misses 73 | 74 | The summary stats key prefix is 'stats:build:*'. 75 | """ 76 | assert event in {"requests", "cache-hits", "cache-misses", "successes", "failures"} 77 | 78 | key: str = f"stats:build:{event}" 79 | add_timestamp(key, {"stats": "summary"}) 80 | 81 | 82 | def get_queue() -> Queue: 83 | """Return the current queue 84 | 85 | Returns: 86 | Queue: The current RQ work queue 87 | """ 88 | return Queue(connection=get_redis_client(False), is_async=settings.async_queue) 89 | 90 | 91 | def get_branch(version_or_branch: str) -> dict[str, str]: 92 | if version_or_branch not in settings.branches: 93 | if version_or_branch.endswith("-SNAPSHOT"): 94 | # e.g. 21.02-snapshot 95 | branch_name = version_or_branch.rsplit("-", maxsplit=1)[0] 96 | else: 97 | # e.g. snapshot, 21.02.0-rc1 or 19.07.7 98 | branch_name = version_or_branch.rsplit(".", maxsplit=1)[0] 99 | else: 100 | branch_name = version_or_branch 101 | 102 | return {**settings.branches.get(branch_name, {}), "name": branch_name} 103 | 104 | 105 | def get_str_hash(string: str) -> str: 106 | """Return sha256sum of str with optional length 107 | 108 | Args: 109 | string (str): input string 110 | 111 | Returns: 112 | str: hash of string with specified length 113 | """ 114 | return hashlib.sha256(bytes(string or "", "utf-8")).hexdigest() 115 | 116 | 117 | def get_file_hash(path: str) -> str: 118 | """Return sha256sum of given path 119 | 120 | Args: 121 | path (str): path to file 122 | 123 | Returns: 124 | str: hash of file 125 | """ 126 | BLOCK_SIZE: int = 65536 127 | 128 | h = hashlib.sha256() 129 | with open(path, "rb") as f: 130 | fb: bytes = f.read(BLOCK_SIZE) 131 | while len(fb) > 0: 132 | h.update(fb) 133 | fb = f.read(BLOCK_SIZE) 134 | 135 | return h.hexdigest() 136 | 137 | 138 | def get_manifest_hash(manifest: dict[str, str]) -> str: 139 | """Return sha256sum of package manifest 140 | 141 | Duplicate packages are automatically removed and the list is sorted to be 142 | reproducible 143 | 144 | Args: 145 | manifest(dict): list of packages 146 | 147 | Returns: 148 | str: hash of `req` 149 | """ 150 | return get_str_hash(json.dumps(manifest, sort_keys=True)) 151 | 152 | 153 | def get_request_hash(build_request: BuildRequest) -> str: 154 | """Return sha256sum of an image request 155 | 156 | Creates a reproducible hash of the request by sorting the arguments 157 | 158 | Args: 159 | req (dict): dict containing request information 160 | 161 | Returns: 162 | str: hash of `req` 163 | """ 164 | return get_str_hash( 165 | "".join( 166 | [ 167 | build_request.distro, 168 | build_request.version, 169 | build_request.version_code, 170 | build_request.target, 171 | build_request.profile, 172 | get_packages_hash( 173 | build_request.packages_versions.keys() or build_request.packages 174 | ), 175 | get_manifest_hash(build_request.packages_versions), 176 | str(build_request.diff_packages), 177 | "", # build_request.filesystem 178 | get_str_hash(build_request.defaults), 179 | str(build_request.rootfs_size_mb), 180 | str(build_request.repository_keys), 181 | str(build_request.repositories), 182 | ] 183 | ), 184 | ) 185 | 186 | 187 | def get_packages_hash(packages: list[str]) -> str: 188 | """Return sha256sum of package list 189 | 190 | Duplicate packages are automatically removed and the list is sorted to be 191 | reproducible 192 | 193 | Args: 194 | packages (list): list of packages 195 | 196 | Returns: 197 | str: hash of `packages` 198 | """ 199 | return get_str_hash( 200 | " ".join( 201 | sorted( 202 | set( 203 | (x.removeprefix("+") for x in packages), 204 | ) 205 | ) 206 | ) 207 | ) 208 | 209 | 210 | def fingerprint_pubkey_usign(pubkey: str) -> str: 211 | """Return fingerprint of signify/usign public key 212 | 213 | Args: 214 | pubkey (str): signify/usign public key 215 | 216 | Returns: 217 | str: string containing the fingerprint 218 | """ 219 | keynum = base64.b64decode(pubkey.splitlines()[-1])[2:10] 220 | return "".join(format(x, "02x") for x in keynum) 221 | 222 | 223 | def verify_usign(sig_file: Path, msg_file: Path, pub_key: str) -> bool: 224 | """Verify a signify/usign signature 225 | 226 | This implementation uses pynacl 227 | 228 | Args: 229 | sig_file (Path): signature file 230 | msg_file (Path): message file to be verified 231 | pub_key (str): public key to use for verification 232 | 233 | Returns: 234 | bool: Successful verification 235 | 236 | Todo: 237 | Currently ignores keynum and pkalg 238 | 239 | """ 240 | _pkalg, _keynum, pubkey = struct.unpack("!2s8s32s", base64.b64decode(pub_key)) 241 | sig = base64.b64decode(sig_file.read_text().splitlines()[-1]) 242 | 243 | _pkalg, _keynum, sig = struct.unpack("!2s8s64s", sig) 244 | 245 | verify_key = nacl.signing.VerifyKey(pubkey, encoder=nacl.encoding.RawEncoder) 246 | try: 247 | verify_key.verify(msg_file.read_bytes(), sig) 248 | return True 249 | except nacl.exceptions.CryptoError: 250 | return False 251 | 252 | 253 | def get_container_version_tag(input_version: str) -> str: 254 | if match(r"^\d+\.\d+\.\d+(-rc\d+)?$", input_version): 255 | log.debug("Version is a release version") 256 | version: str = "v" + input_version 257 | else: 258 | log.debug(f"Version {input_version} is a branch") 259 | if input_version == "SNAPSHOT": 260 | version: str = "master" 261 | else: 262 | version: str = "openwrt-" + input_version.removesuffix("-SNAPSHOT") 263 | 264 | return version 265 | 266 | 267 | def get_podman() -> PodmanClient: 268 | return PodmanClient( 269 | base_url=f"unix://{settings.container_socket_path}", 270 | identity=settings.container_identity, 271 | ) 272 | 273 | 274 | def diff_packages( 275 | requested_packages: list[str], default_packages: set[str] 276 | ) -> list[str]: 277 | """Return a list of packages to install and remove 278 | 279 | Args: 280 | requested_packages (set): List of requested packages in user-specified order 281 | default_packages (set): Set of default packages 282 | 283 | Returns: 284 | list: List of packages to install and remove""" 285 | remove_packages = default_packages - set(requested_packages) 286 | return ( 287 | sorted(set(map(lambda p: f"-{p}".replace("--", "-"), remove_packages))) 288 | + requested_packages 289 | ) 290 | 291 | 292 | def run_cmd( 293 | container: Container, 294 | command: list[str], 295 | copy: list[str] = [], 296 | environment: dict[str, str] = {}, 297 | ) -> tuple[int, str, str]: 298 | returncode, output = container.exec_run(command, demux=True, user="buildbot") 299 | 300 | stdout: str = output[0].decode("utf-8") if output[0] else "" 301 | stderr: str = output[1].decode("utf-8") if output[1] else "" 302 | 303 | log.debug(f"returncode: {returncode}") 304 | log.debug(f"stdout: {stdout}") 305 | log.debug(f"stderr: {stderr}") 306 | 307 | if copy: 308 | log.debug(f"Copying {copy[0]} from container to {copy[1]}") 309 | container_tar, _ = container.get_archive(copy[0]) 310 | 311 | with TarFile(fileobj=BytesIO(b"".join(container_tar))) as tar_file: 312 | uuid: int = getuid() 313 | ugid: int = getgid() 314 | for member in tar_file: 315 | # Fix the owner of the copied files, change to "us". 316 | member.uid = uuid 317 | member.gid = ugid 318 | member.mode = 0o755 if member.isdir() else 0o644 319 | tar_file.extractall(copy[1]) 320 | 321 | return returncode, stdout, stderr 322 | 323 | 324 | def report_error(job: Job, msg: str) -> None: 325 | log.warning(f"Error: {msg}") 326 | job.meta["detail"] = f"Error: {msg}" 327 | job.meta["imagebuilder_status"] = "failed" 328 | job.save_meta() 329 | raise RuntimeError(msg) 330 | 331 | 332 | def parse_manifest(manifest_content: str) -> dict[str, str]: 333 | """Parse a manifest file and return a dictionary 334 | 335 | Args: 336 | manifest (str): Manifest file content 337 | 338 | Returns: 339 | dict: Dictionary of packages and versions 340 | """ 341 | if " - " in manifest_content: 342 | separator = " - " # OPKG format 343 | else: 344 | separator = " " # APK format 345 | 346 | return dict(map(lambda pv: pv.split(separator), manifest_content.splitlines())) 347 | 348 | 349 | def check_manifest( 350 | manifest: dict[str, str], packages_versions: dict[str, str] 351 | ) -> Optional[str]: 352 | """Validate a manifest file 353 | 354 | Args: 355 | manifest (str): Manifest file content 356 | packages_versions (dict): Dictionary of packages and versions 357 | 358 | Returns: 359 | str: Error message or None 360 | """ 361 | for package, version in packages_versions.items(): 362 | if package not in manifest: 363 | return f"Impossible package selection: {package} not in manifest" 364 | if version != manifest[package]: 365 | return ( 366 | f"Impossible package selection: {package} version not as requested: " 367 | f"{version} vs. {manifest[package]}" 368 | ) 369 | 370 | 371 | def parse_packages_file(url: str) -> dict[str, str]: 372 | """Any index.json without a "version" tag is assumed to be v1, containing 373 | ABI-versioned package names, which may cause issues for those packages. 374 | If index.json contains "version: 2", then the package names are ABI-free, 375 | and the contents may be returned as-is. 376 | 377 | So, first we try to use the modern v2 index.json. If the json is not v2, 378 | then fall back to trying opkg-based Packages. If that fails on a 404, 379 | we'll just return the v1 index.json.""" 380 | 381 | res: Response = client_get(f"{url}/index.json") 382 | json = res.json() if res.status_code == 200 else {} 383 | if json.get("version", 1) >= 2: 384 | del json["version"] 385 | return json 386 | 387 | res = client_get(f"{url}/Packages") # For pre-v2, opkg-based releases 388 | if res.status_code != 200: 389 | return json # Bail out - probably with v1 index.json 390 | 391 | packages: dict[str, str] = {} 392 | architecture: str = "" 393 | 394 | parser: email.parser.Parser = email.parser.Parser() 395 | chunks: list[str] = res.text.strip().split("\n\n") 396 | for chunk in chunks: 397 | package: dict[str, str] = parser.parsestr(chunk, headersonly=True) 398 | if not architecture: 399 | package_arch = package["Architecture"] 400 | if package_arch != "all": 401 | architecture = package_arch 402 | 403 | package_name: str = package["Package"] 404 | if package_abi := package.get("ABIVersion"): 405 | package_name = package_name.removesuffix(package_abi) 406 | 407 | packages[package_name] = package["Version"] 408 | 409 | return {"architecture": architecture, "packages": packages} 410 | 411 | 412 | def parse_feeds_conf(url: str) -> list[str]: 413 | res: Response = client_get(f"{url}/feeds.conf") 414 | return ( 415 | [line.split()[1] for line in res.text.splitlines()] 416 | if res.status_code == 200 417 | else [] 418 | ) 419 | 420 | 421 | def is_snapshot_build(version: str) -> bool: 422 | """For imagebuilder containers using 'setup.sh' instead of fully populated.""" 423 | return version.lower().endswith("snapshot") 424 | 425 | 426 | def is_post_kmod_split_build(path: str) -> bool: 427 | """Root cause of what's going on here can be found at 428 | https://github.com/openwrt/buildbot/commit/a75ce026 429 | 430 | The short version is that kmods are no longer in the packages/index.json 431 | for the versions listed below, so we need to find 'linux_kernel' in the 432 | profiles.json and do some extra work. 433 | 434 | Versions for which kmods are in 'kmods//index.json' and not 435 | in 'packages/index.json': 436 | 437 | - SNAPSHOT 438 | - all of 24.10 and later 439 | - 23.05 builds for 23.05-SNAPSHOT, and 23.05.6 and later 440 | """ 441 | 442 | if path.startswith("snapshots"): 443 | return True 444 | 445 | version: str = path.split("/")[1] 446 | major_version: int = int(version.split(".")[0]) if "." in version else 0 447 | 448 | if major_version >= 24: 449 | return True 450 | if major_version == 23: 451 | minor_version = version.split(".")[-1] 452 | if minor_version == "05-SNAPSHOT" or minor_version >= "6": 453 | return True 454 | 455 | return False 456 | 457 | 458 | def parse_kernel_version(url: str) -> str: 459 | """Download a target's profiles.json and return the kernel version string.""" 460 | res: Response = client_get(url) 461 | if res.status_code != 200: 462 | return "" 463 | 464 | profiles: dict = res.json() 465 | kernel_info: dict = profiles.get("linux_kernel") 466 | if kernel_info: 467 | kernel_version: str = kernel_info["version"] 468 | kernel_release: str = kernel_info["release"] 469 | kernel_vermagic: str = kernel_info["vermagic"] 470 | return f"{kernel_version}-{kernel_release}-{kernel_vermagic}" 471 | return "" 472 | 473 | 474 | def reload_versions(app: FastAPI) -> bool: 475 | """Set the values of both `app.versions` and `app.latest` using the 476 | upstream `.versions.json` file. 477 | 478 | We check for updates to the versions by examining the response's 479 | `from_cache` attribute. This is safe because `reload_versions` is the 480 | only function that downloads that file, so no race conditions can exist. 481 | 482 | Returns `True` if data has changed, `False` when cache was used. 483 | """ 484 | 485 | def in_supported_branch(version: str) -> bool: 486 | for branch_name, branch in settings.branches.items(): 487 | if branch["enabled"] and version.startswith(branch_name): 488 | return True 489 | return False 490 | 491 | def add_versions(version_list: list, *versions: str) -> None: 492 | for version in versions: 493 | if not version: 494 | continue 495 | if version in version_list: 496 | continue 497 | if in_supported_branch(version): 498 | version_list.append(version) 499 | 500 | response = client_get(settings.upstream_url + "/.versions.json") 501 | if response.status_code != 200: 502 | log.info(f".versions.json: failed to download {response.status_code}") 503 | return False 504 | 505 | if response.extensions["from_cache"] and app.versions: 506 | log.debug(".versions.json: cache hit") 507 | return False 508 | 509 | log.debug(".versions.json: cache miss, reloading") 510 | 511 | versions_upstream = response.json() 512 | upcoming_version = versions_upstream["upcoming_version"] 513 | 514 | app.latest = [] 515 | add_versions( 516 | app.latest, 517 | upcoming_version, 518 | versions_upstream["stable_version"], 519 | versions_upstream["oldstable_version"], 520 | ) 521 | 522 | app.versions = [] 523 | add_versions( 524 | app.versions, 525 | upcoming_version, 526 | *versions_upstream["versions_list"], 527 | "SNAPSHOT", 528 | *[ 529 | f"{branch_name}-SNAPSHOT" 530 | for branch_name in settings.branches 531 | if branch_name != "SNAPSHOT" 532 | ], 533 | ) 534 | 535 | # Create a key that puts -rcN between -SNAPSHOT and releases. 536 | app.versions.sort(reverse=True, key=lambda v: v.replace(".0-rc", "-rc")) 537 | 538 | return True 539 | 540 | 541 | def reload_targets(app: FastAPI, version: str) -> bool: 542 | """Set a specific target value in `app.targets` using data from the 543 | upstream `.targets.json` file. 544 | 545 | No race conditions occur due to `reload_targets` being the sole user of 546 | the `.targets.json` file. 547 | 548 | Returns `True` if data has changed, `False` when cache was used. 549 | """ 550 | 551 | branch_data = get_branch(version) 552 | version_path = branch_data["path"].format(version=version) 553 | response = client_get(settings.upstream_url + f"/{version_path}/.targets.json") 554 | 555 | if response.extensions["from_cache"] and app.targets[version]: 556 | return False 557 | 558 | app.targets[version] = response.json() if response.status_code == 200 else {} 559 | 560 | return True 561 | 562 | 563 | def reload_profiles(app: FastAPI, version: str, target: str) -> bool: 564 | """Set the `app.profiles` for a specific version and target derived from 565 | the data in the corresponding `profiles.json` file. 566 | 567 | This function is subject to race conditions as various other functions 568 | also use the `profiles.json` files for other metadata, hence we do not 569 | check for recaching and always recompute the profiles when requested. 570 | 571 | Returns `True` indicating that we have reloaded the profile. 572 | """ 573 | 574 | branch_data = get_branch(version) 575 | version_path = branch_data["path"].format(version=version) 576 | response = client_get( 577 | settings.upstream_url + f"/{version_path}/targets/{target}/profiles.json" 578 | ) 579 | 580 | app.profiles[version][target] = { 581 | name.replace(",", "_"): profile 582 | for profile, data in response.json()["profiles"].items() 583 | for name in data.get("supported_devices", []) + [profile] 584 | } 585 | 586 | return True 587 | --------------------------------------------------------------------------------