└── .github
├── dockerfiles_feeds
├── Dockerfile
└── entrypoint.sh
├── dockerfiles
├── Dockerfile.toolchain
└── Dockerfile.tools
├── workflows
├── formal-this-repo.yaml
├── scripts
│ ├── ci_helpers.sh
│ └── show_build_failures.sh
├── reusable_determine_changed_files.yml
├── reusable_upload-file-s3.yml
├── reusable_determine_changed_packages.yml
├── label-target.yml
├── coverity.yml
├── toolchain.yml
├── reusable_build-tools.yml
├── packages.yml
├── formal.yml
├── tools.yml
├── reusable_check-tools.yml
├── label-kernel.yml
├── reusable_check-kernel-patches.yml
├── push-containers.yml
├── multi-arch-test-build.yml
├── kernel.yml
├── issue-labeller.yml
└── reusable_build.yml
├── dependabot.yml
└── scripts
├── show_build_failures.sh
├── ci_helpers.sh
├── get_changed_packages.pl
├── process_formalities.js
└── check_formalities.sh
/.github/dockerfiles_feeds/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG ARCH=x86-64
2 | FROM openwrt/rootfs:$ARCH
3 |
4 | ADD entrypoint.sh /entrypoint.sh
5 |
6 | CMD ["/entrypoint.sh"]
7 |
--------------------------------------------------------------------------------
/.github/dockerfiles/Dockerfile.toolchain:
--------------------------------------------------------------------------------
1 | ARG OWNER_LC
2 | ARG CONTAINER_TAG
3 |
4 | FROM ghcr.io/$OWNER_LC/tools:$CONTAINER_TAG
5 |
6 | ARG TOOLCHAIN_NAME
7 |
8 | ADD $TOOLCHAIN_NAME /external-toolchain/
9 |
--------------------------------------------------------------------------------
/.github/dockerfiles/Dockerfile.tools:
--------------------------------------------------------------------------------
1 | FROM ghcr.io/openwrt/buildbot/buildworker-v3.11.8:v23
2 |
3 | COPY --chown=buildbot staging_dir/host /prebuilt_tools/staging_dir/host
4 | COPY --chown=buildbot build_dir/host /prebuilt_tools/build_dir/host
5 |
--------------------------------------------------------------------------------
/.github/workflows/formal-this-repo.yaml:
--------------------------------------------------------------------------------
1 | name: Test Formalities
2 |
3 | on:
4 | pull_request_target:
5 |
6 | permissions:
7 | contents: read
8 | pull-requests: write
9 |
10 | jobs:
11 | build:
12 | name: Test Formalities
13 | uses: openwrt/actions-shared-workflows/.github/workflows/formal.yml@main
14 | with:
15 | post_comment: true
16 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # Set update schedule for GitHub Actions
2 |
3 | version: 2
4 | updates:
5 |
6 | - package-ecosystem: "github-actions"
7 | directory: "/"
8 | schedule:
9 | # Check for updates to GitHub Actions every week
10 | interval: "weekly"
11 | # Prefix all commit messages with "CI"
12 | commit-message:
13 | prefix: "CI"
14 |
--------------------------------------------------------------------------------
/.github/workflows/scripts/ci_helpers.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | color_out() {
4 | printf "\e[0;$1m%s\e[0;0m\n" "$2"
5 | }
6 |
7 | success() {
8 | color_out 32 "$1"
9 | }
10 |
11 | info() {
12 | color_out 36 "$1"
13 | }
14 |
15 | err() {
16 | color_out 31 "$1"
17 | }
18 |
19 | warn() {
20 | color_out 33 "$1"
21 | }
22 |
23 | err_die() {
24 | err "$1"
25 | exit 1
26 | }
27 |
--------------------------------------------------------------------------------
/.github/scripts/show_build_failures.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | original_exit_code="${ret:-1}"
4 | log_dir_path="${1:-logs}"
5 | context="${2:-10}"
6 |
7 | show_make_build_errors() {
8 | grep -slr 'make\[[[:digit:]]\+\].*Error [[:digit:]]\+$' "$log_dir_path" | while IFS= read -r log_file; do
9 | printf "====== Make errors from %s ======\n" "$log_file";
10 | grep -r -C"$context" 'make\[[[:digit:]]\+\].*Error [[:digit:]]\+$' "$log_file" ;
11 | done
12 | }
13 |
14 | show_make_build_errors
15 | exit "$original_exit_code"
16 |
--------------------------------------------------------------------------------
/.github/workflows/scripts/show_build_failures.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | original_exit_code="${ret:-1}"
4 | log_dir_path="${1:-logs}"
5 | context="${2:-20}"
6 |
7 | show_make_build_errors() {
8 | grep -slr 'make\[[[:digit:]]\+\].*Error [[:digit:]]\+$' "$log_dir_path" | while IFS= read -r log_file; do
9 | printf "====== Make errors from %s ======\n" "$log_file";
10 | grep -r -C"$context" 'make\[[[:digit:]]\+\].*Error [[:digit:]]\+$' "$log_file" ;
11 | done
12 | }
13 |
14 | show_make_build_errors
15 | exit "$original_exit_code"
16 |
--------------------------------------------------------------------------------
/.github/workflows/reusable_determine_changed_files.yml:
--------------------------------------------------------------------------------
1 | name: Determine Changed Files
2 |
3 | on:
4 | workflow_call:
5 | outputs:
6 | all_changed_files:
7 | value: ${{ jobs.determine_changed_files.outputs.all_changed_files }}
8 |
9 | jobs:
10 | determine_changed_files:
11 | name: Determine Changed Files
12 | runs-on: ubuntu-slim
13 | outputs:
14 | all_changed_files: ${{ steps.changed-files.outputs.all_changed_files }}
15 |
16 | steps:
17 | - name: Checkout
18 | uses: actions/checkout@v6
19 | with:
20 | fetch-depth: 2
21 |
22 | - name: Get changed files
23 | id: changed-files
24 | uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
25 |
--------------------------------------------------------------------------------
/.github/scripts/ci_helpers.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | color_out() {
4 | printf "\e[0;$1m%s%s\e[0m\n" "${PKG_NAME:+$PKG_NAME: }" "$2"
5 | }
6 |
7 | success() {
8 | color_out 32 "$1"
9 | }
10 |
11 | info() {
12 | color_out 36 "$1"
13 | }
14 |
15 | err() {
16 | color_out 31 "$1"
17 | }
18 |
19 | warn() {
20 | color_out 33 "$1"
21 | }
22 |
23 | err_die() {
24 | err "$1"
25 | exit 1
26 | }
27 |
28 | # Prints the string and colors the part after the given length in red
29 | split_fail() {
30 | printf "%s\e[1;31m%s\e[0m\n" "${2:0:$1}" "${2:$1}"
31 | }
32 |
33 | # Prints `[$2] $3` with status colored according to `$1`
34 | status() {
35 | printf "%s[\e[1;$1m%s\e[0m] %s\n" "${PKG_NAME:+$PKG_NAME: }" "$2" "$3"
36 | }
37 |
38 | # Prints `[pass] $1` with green pass (or blue on GitHub)
39 | status_pass() {
40 | status 32 pass "$1"
41 | }
42 |
43 | # Prints `[warn] $1` with yellow warn
44 | status_warn() {
45 | status 33 warn "$1"
46 | }
47 |
48 | # Prints `[fail] $1` with red fail
49 | status_fail() {
50 | status 31 fail "$1"
51 | }
52 |
--------------------------------------------------------------------------------
/.github/workflows/reusable_upload-file-s3.yml:
--------------------------------------------------------------------------------
1 | name: Upload File to S3
2 |
3 | on:
4 | workflow_call:
5 | secrets:
6 | s3_endpoint:
7 | s3_bucket:
8 | s3_access_key:
9 | s3_secret_key:
10 | inputs:
11 | download_id:
12 | required: true
13 | type: string
14 | filename:
15 | required: true
16 | type: string
17 |
18 | jobs:
19 | upload-file-in-s3:
20 | name: Upload file in S3
21 | runs-on: ubuntu-latest
22 |
23 | steps:
24 | - name: Install minio
25 | run: |
26 | curl https://dl.min.io/client/mc/release/linux-amd64/mc \
27 | --create-dirs \
28 | -o $GITHUB_WORKSPACE/minio-binaries/mc
29 |
30 | chmod +x $GITHUB_WORKSPACE/minio-binaries/mc
31 | echo $GITHUB_WORKSPACE/minio-binaries/ >> $GITHUB_PATH
32 |
33 | - name: Setup minio
34 | run: mc alias set s3 ${{ secrets.s3_endpoint }} ${{ secrets.s3_access_key }} ${{ secrets.s3_secret_key }}
35 |
36 | - name: Download file
37 | uses: actions/download-artifact@v6
38 | with:
39 | name: ${{ inputs.download_id }}
40 |
41 | - name: Upload file to s3
42 | run: mc cp ${{ inputs.filename }} s3/${{ secrets.s3_bucket }}/
43 |
--------------------------------------------------------------------------------
/.github/workflows/reusable_determine_changed_packages.yml:
--------------------------------------------------------------------------------
1 | name: Determine Changed Files
2 |
3 | on:
4 | workflow_call:
5 | outputs:
6 | changed_packages:
7 | value: ${{ jobs.determine_changed_packages.outputs.changed_packages }}
8 | inputs:
9 | all_changed_files:
10 | required: true
11 | type: string
12 |
13 | jobs:
14 | determine_changed_packages:
15 | name: Determine Changed Packages
16 | runs-on: ubuntu-slim
17 | outputs:
18 | changed_packages: ${{ steps.get_packages.outputs.changed_packages }}
19 |
20 | steps:
21 | - name: Checkout
22 | uses: actions/checkout@v6
23 | with:
24 | sparse-checkout: |
25 | package
26 |
27 | - name: Checkout shared workflows tools
28 | uses: actions/checkout@v6
29 | with:
30 | repository: openwrt/actions-shared-workflows
31 | path: shared-workflows
32 | sparse-checkout: .github/scripts/get_changed_packages.pl
33 |
34 | - name: Determine changed packages
35 | id: get_packages
36 | run: |
37 | CHANGED_PACKAGES="$(./shared-workflows/.github/scripts/get_changed_packages.pl "${{ inputs.all_changed_files }}")"
38 |
39 | echo "changed_packages="$CHANGED_PACKAGES"" >> $GITHUB_OUTPUT
40 |
--------------------------------------------------------------------------------
/.github/workflows/label-target.yml:
--------------------------------------------------------------------------------
1 | # ci:target:x86:64 is going to trigger CI target check jobs for x86/64 target
2 |
3 | name: Build check target specified in labels
4 | on:
5 | workflow_call:
6 |
7 | jobs:
8 | set_target:
9 | if: startsWith(github.event.label.name, 'ci:target:')
10 | name: Set target
11 | runs-on: ubuntu-slim
12 | outputs:
13 | target: ${{ steps.set_target.outputs.target }}
14 | subtarget: ${{ steps.set_target.outputs.subtarget }}
15 |
16 | steps:
17 | - name: Set target
18 | id: set_target
19 | env:
20 | CI_EVENT_LABEL_NAME: ${{ github.event.label.name }}
21 | run: |
22 | echo "$CI_EVENT_LABEL_NAME" | sed -n 's/.*:\(.*\):\(.*\)$/target=\1/p' | tee --append $GITHUB_OUTPUT
23 | echo "$CI_EVENT_LABEL_NAME" | sed -n 's/.*:\(.*\):\(.*\)$/subtarget=\2/p' | tee --append $GITHUB_OUTPUT
24 |
25 | build_target:
26 | name: Build target
27 | needs: set_target
28 | permissions:
29 | contents: read
30 | packages: read
31 | actions: write
32 | uses: ./.github/workflows/reusable_build.yml
33 | with:
34 | container_name: toolchain
35 | target: ${{ needs.set_target.outputs.target }}
36 | subtarget: ${{ needs.set_target.outputs.subtarget }}
37 | build_full: true
38 | build_all_kmods: true
39 | build_all_boards: true
40 | build_all_modules: true
41 |
--------------------------------------------------------------------------------
/.github/workflows/coverity.yml:
--------------------------------------------------------------------------------
1 | name: Coverity scan build
2 |
3 | on:
4 | workflow_call:
5 | secrets:
6 | coverity_api_token:
7 |
8 | jobs:
9 | coverity_build:
10 | if: github.repository_owner == 'openwrt'
11 | name: Coverity x86/64 build
12 | secrets:
13 | coverity_api_token: ${{ secrets.coverity_api_token }}
14 | permissions:
15 | contents: read
16 | packages: read
17 | actions: write
18 | uses: ./.github/workflows/reusable_build.yml
19 | with:
20 | container_name: toolchain
21 | target: x86
22 | subtarget: 64
23 | build_full: true
24 | include_feeds: true
25 | use_ccache_cache: false
26 | coverity_compiler_template_list: >-
27 | x86_64-openwrt-linux-gcc
28 | x86_64-openwrt-linux-musl-gcc
29 | # qosify fails to build with cov-build
30 | coverity_check_packages: >-
31 | cgi-io
32 | dnsmasq
33 | dropbear
34 | firewall
35 | fstools
36 | fwtool
37 | iwinfo
38 | jsonfilter
39 | libnl-tiny
40 | libubox
41 | mtd
42 | netifd
43 | odhcp6c
44 | odhcpd
45 | opkg
46 | procd
47 | relayd
48 | rpcd
49 | swconfig
50 | ubox
51 | ubus
52 | ucert
53 | uci
54 | uclient
55 | ucode
56 | ugps
57 | uhttpd
58 | umbim
59 | umdns
60 | unetd
61 | uqmi
62 | urngd
63 | usbmode
64 | usign
65 | usteer
66 | ustp
67 | ustream-ssl
68 |
--------------------------------------------------------------------------------
/.github/workflows/toolchain.yml:
--------------------------------------------------------------------------------
1 | name: Build Toolchains
2 |
3 | on:
4 | workflow_call:
5 |
6 | jobs:
7 | determine_targets:
8 | name: Set targets
9 | runs-on: ubuntu-slim
10 | outputs:
11 | target: ${{ steps.find_targets.outputs.target }}
12 |
13 | steps:
14 | - name: Checkout
15 | uses: actions/checkout@v6
16 |
17 | - name: Set targets
18 | id: find_targets
19 | run: |
20 | export TARGETS="$(perl ./scripts/dump-target-info.pl targets 2>/dev/null \
21 | | sort -u -t '/' -k1,1 \
22 | | awk '{ print $1 }')"
23 |
24 | JSON='['
25 | FIRST=1
26 | for TARGET in $TARGETS; do
27 | TUPLE='{"target":"'"$(echo $TARGET | cut -d "/" -f 1)"'","subtarget":"'"$(echo $TARGET | cut -d "/" -f 2)"'"}'
28 | [[ $FIRST -ne 1 ]] && JSON="$JSON"','
29 | JSON="$JSON""$TUPLE"
30 | FIRST=0
31 | done
32 | JSON="$JSON"']'
33 |
34 | echo -e "\n---- targets ----\n"
35 | echo "$JSON"
36 | echo -e "\n---- targets ----\n"
37 |
38 | echo "target=$JSON" >> $GITHUB_OUTPUT
39 |
40 | build:
41 | name: Build Target Toolchain
42 | needs: determine_targets
43 | permissions:
44 | contents: read
45 | packages: read
46 | actions: write
47 | strategy:
48 | fail-fast: False
49 | matrix:
50 | include: ${{fromJson(needs.determine_targets.outputs.target)}}
51 | uses: ./.github/workflows/reusable_build.yml
52 | with:
53 | target: ${{ matrix.target }}
54 | subtarget: ${{ matrix.subtarget }}
55 | build_toolchain: true
56 |
--------------------------------------------------------------------------------
/.github/scripts/get_changed_packages.pl:
--------------------------------------------------------------------------------
1 | #! /usr/bin/perl
2 |
3 | use strict;
4 | use warnings;
5 |
6 | my $PACKAGE_DIR = "package";
7 | my $SCAN_DEPTH = 5;
8 |
9 | my @PACKAGES_PATH = ();
10 | my @PACKAGES_CHANGED = ();
11 |
12 | # Lovely shorthand from https://stackoverflow.com/questions/31724503/most-efficient-way-to-check-if-string-starts-with-needle-in-perl
13 | # Very useless and stupid microptimization that drop execution time of 10ms (maybe?)
14 | sub begins_with
15 | {
16 | return substr($_[0], 0, length($_[1])) eq $_[1];
17 | }
18 |
19 | sub scan_dir
20 | {
21 | my ($dir, $depth) = @_;
22 |
23 | return if $depth == $SCAN_DEPTH;
24 |
25 | opendir(DIR,"$dir");
26 | my @files = readdir(DIR);
27 | closedir(DIR);
28 | foreach my $file (@files) {
29 | next if $file eq '.' or $file eq '..' or $file eq 'src';
30 | my $path = "$dir/$file";
31 | if (-d $path) {
32 | scan_dir("$path", $depth + 1);
33 | }
34 | # Search only for Makefile and ingore the Makefile in package
35 | next if not ($file eq "Makefile") or ($dir eq "package");
36 | push @PACKAGES_PATH, substr $path, 0, -length("Makefile");
37 | }
38 | }
39 |
40 | sub get_changed_packages
41 | {
42 | my ($CHANGED_FILES) = @_;
43 |
44 | # Traverse all the package directory in search of Makefiles
45 | scan_dir $PACKAGE_DIR, 0;
46 |
47 | foreach my $file (split ' ', $CHANGED_FILES) {
48 | next unless begins_with $file, "package/";
49 |
50 | foreach my $package (@PACKAGES_PATH) {
51 | if (begins_with $file, $package and not grep {$_ eq $package} @PACKAGES_CHANGED) {
52 | push @PACKAGES_CHANGED, $package;
53 | }
54 | }
55 | }
56 |
57 | foreach my $package (@PACKAGES_CHANGED) {
58 | # Get the package name from package path
59 | # Example libfido2 from package/feeds/packages/libfido2
60 | my ($name) = (split '/', $package)[-1];
61 | print "$name\n";
62 | }
63 | }
64 |
65 | # Pass a list of changed files and return the list of affected packages
66 | # We manually traverse the package directory in searching for Makefiles.
67 | # We follow the same logic used in scan.mk where the max SCAN_DEPTH is 5
68 | if (@ARGV == 1) {
69 | get_changed_packages $ARGV[0];
70 | }
71 | else {
72 | print "Usage: $0 \"changed_files\"\n";
73 | }
--------------------------------------------------------------------------------
/.github/workflows/reusable_build-tools.yml:
--------------------------------------------------------------------------------
1 | name: Build host tools
2 |
3 | on:
4 | workflow_call:
5 | inputs:
6 | generate_prebuilt_artifacts:
7 | type: boolean
8 |
9 | permissions:
10 | contents: read
11 |
12 | jobs:
13 | build:
14 | name: Build tools
15 | runs-on: ubuntu-latest
16 | container: ghcr.io/openwrt/buildbot/buildworker-v3.11.8:v23
17 |
18 | steps:
19 | - name: Checkout
20 | uses: actions/checkout@v6
21 | with:
22 | path: openwrt
23 |
24 | - name: Fix permission
25 | run: chown -R buildbot:buildbot openwrt
26 |
27 | - name: Set configs for tools container
28 | shell: su buildbot -c "sh -e {0}"
29 | working-directory: openwrt
30 | run: |
31 | touch .config
32 | echo CONFIG_DEVEL=y >> .config
33 | echo CONFIG_AUTOREMOVE=y >> .config
34 | echo CONFIG_CCACHE=y >> .config
35 | echo CONFIG_BUILD_ALL_HOST_TOOLS=y >> .config
36 |
37 | - name: Make prereq
38 | shell: su buildbot -c "sh -e {0}"
39 | working-directory: openwrt
40 | run: make defconfig
41 |
42 | - name: Build tools
43 | shell: su buildbot -c "sh -e {0}"
44 | working-directory: openwrt
45 | run: make tools/install -j$(nproc) BUILD_LOG=1 || ret=$? .github/workflows/scripts/show_build_failures.sh
46 |
47 | - name: Upload logs
48 | if: always()
49 | uses: actions/upload-artifact@v5
50 | with:
51 | name: linux-buildbot-logs
52 | path: openwrt/logs
53 |
54 | - name: Upload config
55 | if: always()
56 | uses: actions/upload-artifact@v5
57 | with:
58 | name: linux-buildbot-config
59 | path: openwrt/.config
60 |
61 | - name: Archive prebuilt tools
62 | if: inputs.generate_prebuilt_artifacts == true
63 | shell: su buildbot -c "sh -e {0}"
64 | working-directory: openwrt
65 | run: tar -cf tools.tar staging_dir/host build_dir/host
66 |
67 | - name: Upload prebuilt tools
68 | if: inputs.generate_prebuilt_artifacts == true
69 | uses: actions/upload-artifact@v5
70 | with:
71 | name: linux-buildbot-prebuilt-tools
72 | path: openwrt/tools.tar
73 | retention-days: 1
74 |
--------------------------------------------------------------------------------
/.github/workflows/packages.yml:
--------------------------------------------------------------------------------
1 | name: Build all core packages
2 |
3 | on:
4 | workflow_call:
5 | secrets:
6 | ccache_s3_endpoint:
7 | ccache_s3_bucket:
8 | ccache_s3_access_key:
9 | ccache_s3_secret_key:
10 |
11 | jobs:
12 | determine_changed_files:
13 | name: Determine Changed Files
14 | uses: ./.github/workflows/reusable_determine_changed_files.yml
15 |
16 | determine_changed_packages:
17 | name: Determine Changed Packages
18 | needs: determine_changed_files
19 | uses: ./.github/workflows/reusable_determine_changed_packages.yml
20 | with:
21 | all_changed_files: ${{ needs.determine_changed_files.outputs.all_changed_files }}
22 |
23 | build:
24 | name: Build Packages with external toolchain
25 | needs: determine_changed_packages
26 | permissions:
27 | contents: read
28 | packages: read
29 | actions: write
30 | strategy:
31 | fail-fast: False
32 | matrix:
33 | include:
34 | - target: malta
35 | subtarget: be
36 | - target: x86
37 | subtarget: 64
38 | uses: ./.github/workflows/reusable_build.yml
39 | with:
40 | container_name: toolchain
41 | target: ${{ matrix.target }}
42 | subtarget: ${{ matrix.subtarget }}
43 | build_kernel: true
44 | build_all_kmods: true
45 | build_all_modules: true
46 | build_full: true
47 | ccache_type: packages
48 | upload_ccache_cache: ${{ github.repository_owner == 'openwrt' }}
49 | check_packages_list: ${{ needs.determine_changed_packages.outputs.changed_packages }}
50 |
51 | upload-ccache-cache-in-s3:
52 | if: github.event_name == 'push' && github.repository_owner == 'openwrt'
53 | name: Upload ccache cache to s3
54 | needs: build
55 | strategy:
56 | fail-fast: False
57 | matrix:
58 | include:
59 | - target: malta
60 | subtarget: be
61 | - target: x86
62 | subtarget: 64
63 | secrets:
64 | s3_endpoint: ${{ secrets.ccache_s3_endpoint }}
65 | s3_bucket: ${{ secrets.ccache_s3_bucket }}
66 | s3_access_key: ${{ secrets.ccache_s3_access_key }}
67 | s3_secret_key: ${{ secrets.ccache_s3_secret_key }}
68 | uses: ./.github/workflows/reusable_upload-file-s3.yml
69 | with:
70 | download_id: ${{ matrix.target }}-${{ matrix.subtarget }}${{ matrix.testing != '' && '-testing' || '' }}-ccache-cache
71 | filename: ccache-packages-${{ matrix.target }}-${{ matrix.subtarget }}${{ matrix.testing != '' && '-testing' || '' }}${{ needs.build.outputs.ccache_tag }}.tar
72 |
73 |
--------------------------------------------------------------------------------
/.github/workflows/formal.yml:
--------------------------------------------------------------------------------
1 | name: Test Formalities
2 |
3 | on:
4 | workflow_call:
5 | inputs:
6 | exclude_dependabot:
7 | description: 'Exclude commits authored by dependabot from some checks'
8 | default: true
9 | required: false
10 | type: boolean
11 | exclude_weblate:
12 | description: 'Exclude commits authored by Weblate from some checks'
13 | required: false
14 | type: boolean
15 | post_comment:
16 | description: 'Post summaries to the pull request'
17 | required: false
18 | type: boolean
19 | warn_on_no_modify:
20 | description: 'Warn when PR edits by maintainers are not allowed. Requires post_comment to be true.'
21 | required: false
22 | type: boolean
23 |
24 | permissions:
25 | pull-requests: write
26 |
27 | jobs:
28 | formalities:
29 | name: Test Formalities
30 | runs-on: ubuntu-slim
31 |
32 | steps:
33 | - name: Checkout source code
34 | uses: actions/checkout@v6
35 | with:
36 | ref: ${{ github.event.pull_request.head.sha }}
37 | fetch-depth: 0
38 |
39 | - name: Checkout formalities
40 | uses: actions/checkout@v6
41 | with:
42 | repository: openwrt/actions-shared-workflows
43 | path: workflow_context
44 | sparse-checkout: |
45 | .github/scripts/check_formalities.sh
46 | .github/scripts/ci_helpers.sh
47 | .github/scripts/process_formalities.js
48 | sparse-checkout-cone-mode: false
49 |
50 | - name: Test formalities
51 | id: formalities
52 | run: workflow_context/.github/scripts/check_formalities.sh
53 | env:
54 | BRANCH: ${{ github.base_ref }}
55 | EXCLUDE_DEPENDABOT: ${{ inputs.exclude_dependabot }}
56 | EXCLUDE_WEBLATE: ${{ inputs.exclude_weblate }}
57 |
58 | - name: Process GitHub formality check results
59 | if: always() && inputs.post_comment == true
60 | uses: actions/github-script@v8
61 | env:
62 | JOB_ID: ${{ job.check_run_id }}
63 | SUMMARY: ${{ steps.formalities.outputs.content }}
64 | WARN_ON_NO_MODIFY: ${{ inputs.warn_on_no_modify }}
65 | with:
66 | script: |
67 | const processFormalities = require('./workflow_context/.github/scripts/process_formalities.js')
68 | await processFormalities({
69 | github,
70 | context,
71 | jobId: process.env.JOB_ID,
72 | summary: process.env.SUMMARY,
73 | warnOnNoModify: process.env.WARN_ON_NO_MODIFY,
74 | });
75 |
--------------------------------------------------------------------------------
/.github/workflows/tools.yml:
--------------------------------------------------------------------------------
1 | name: Build host tools
2 |
3 | on:
4 | workflow_call:
5 |
6 | jobs:
7 | build-macos-latest:
8 | name: Build tools with macos latest
9 | runs-on: macos-latest
10 |
11 | steps:
12 | - name: Checkout
13 | uses: actions/checkout@v6
14 | with:
15 | path: openwrt
16 |
17 | - name: Set XCode to latest
18 | uses: maxim-lobanov/setup-xcode@v1
19 | with:
20 | xcode-version: latest-stable
21 |
22 | - name: Setup MacOS
23 | run: |
24 | echo "WORKPATH=/Volumes/OpenWrt" >> "$GITHUB_ENV"
25 | hdiutil create -size 20g -type SPARSE -fs "Case-sensitive HFS+" -volname OpenWrt OpenWrt.sparseimage
26 | hdiutil attach OpenWrt.sparseimage
27 | mv "$GITHUB_WORKSPACE/openwrt" /Volumes/OpenWrt/
28 |
29 | - name: Install required prereq on MacOS
30 | working-directory: ${{ env.WORKPATH }}/openwrt
31 | run: |
32 | brew install \
33 | python@3.12 \
34 | automake \
35 | coreutils \
36 | diffutils \
37 | findutils \
38 | gawk \
39 | git-extras \
40 | gnu-getopt \
41 | gnu-sed \
42 | grep \
43 | gpatch \
44 | make \
45 | python-setuptools \
46 | swig
47 |
48 | echo "/bin" >> "$GITHUB_PATH"
49 | echo "/sbin/Library/Apple/usr/bin" >> "$GITHUB_PATH"
50 | echo "/usr/bin" >> "$GITHUB_PATH"
51 | echo "/opt/homebrew/bin" >> "$GITHUB_PATH"
52 | echo "/opt/homebrew/opt/coreutils/bin" >> "$GITHUB_PATH"
53 | echo "/opt/homebrew/opt/findutils/libexec/gnubin" >> "$GITHUB_PATH"
54 | echo "/opt/homebrew/opt/gnu-getopt/bin" >> "$GITHUB_PATH"
55 | echo "/opt/homebrew/opt/make/libexec/gnubin" >> "$GITHUB_PATH"
56 | echo "/usr/sbin" >> "$GITHUB_PATH"
57 |
58 | - name: Make prereq
59 | working-directory: ${{ env.WORKPATH }}/openwrt
60 | run: make defconfig
61 |
62 | - name: Build tools MacOS
63 | working-directory: ${{ env.WORKPATH }}/openwrt
64 | run: make tools/install -j$(nproc) BUILD_LOG=1 || ret=$? .github/workflows/scripts/show_build_failures.sh
65 |
66 | - name: Upload logs
67 | if: always()
68 | uses: actions/upload-artifact@v5
69 | with:
70 | name: macos-latest-logs
71 | path: ${{ env.WORKPATH }}/openwrt/logs
72 |
73 | - name: Upload config
74 | if: always()
75 | uses: actions/upload-artifact@v5
76 | with:
77 | name: macos-latest-config
78 | path: ${{ env.WORKPATH }}/openwrt/.config
79 |
80 | check-linux-buildbot:
81 | name: Check tools with buildbot container
82 | uses: ./.github/workflows/reusable_check-tools.yml
83 |
84 | build-linux-buildbot:
85 | name: Build tools with buildbot container
86 | needs: check-linux-buildbot
87 | uses: ./.github/workflows/reusable_build-tools.yml
88 |
--------------------------------------------------------------------------------
/.github/dockerfiles_feeds/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | # not enabling `errtrace` and `pipefail` since those are bash specific
4 | set -o errexit # failing commands causes script to fail
5 | set -o nounset # undefined variables causes script to fail
6 |
7 | mkdir -p /var/lock/
8 | mkdir -p /var/log/
9 |
10 | if [ $PKG_MANAGER = "opkg" ]; then
11 | echo "src/gz packages_ci file:///ci" >> /etc/opkg/distfeeds.conf
12 | # Disable checking signature for all opkg feeds, since it doesn't look like
13 | # it's possible to do it for the local feed only, which has signing removed.
14 | # This fixes running CI tests.
15 | sed -i '/check_signature/d' /etc/opkg.conf
16 | opkg update
17 | elif [ $PKG_MANAGER = "apk" ]; then
18 | echo "/ci/packages.adb" >> /etc/apk/repositories.d/distfeeds.list
19 | apk update
20 | fi
21 |
22 | CI_HELPER="${CI_HELPER:-/ci/.github/workflows/ci_helpers.sh}"
23 |
24 | for PKG in /ci/*.[ai]pk; do
25 | if [ $PKG_MANAGER = "opkg" ]; then
26 | tar -xzOf "$PKG" ./control.tar.gz | tar xzf - ./control
27 | # package name including variant
28 | PKG_NAME=$(sed -ne 's#^Package: \(.*\)$#\1#p' ./control)
29 | # package version without release
30 | PKG_VERSION=$(sed -ne 's#^Version: \(.*\)$#\1#p' ./control)
31 | PKG_VERSION="${PKG_VERSION%-[!-]*}"
32 | # package source containing test.sh script
33 | PKG_SOURCE=$(sed -ne 's#^Source: \(.*\)$#\1#p' ./control)
34 | PKG_SOURCE="${PKG_SOURCE#/feed/}"
35 | elif [ $PKG_MANAGER = "apk" ]; then
36 | # package name including variant
37 | PKG_NAME=$(apk adbdump --format json "$PKG" | jsonfilter -e '@["info"]["name"]')
38 | # package version without release
39 | PKG_VERSION=$(apk adbdump --format json "$PKG" | jsonfilter -e '@["info"]["version"]')
40 | PKG_VERSION="${PKG_VERSION%-[!-]*}"
41 | # package source containing test.sh script
42 | PKG_SOURCE=$(apk adbdump --format json "$PKG" | jsonfilter -e '@["info"]["origin"]')
43 | PKG_SOURCE="${PKG_SOURCE#/feed/}"
44 | fi
45 |
46 | echo
47 | echo "Testing package $PKG_NAME in version $PKG_VERSION from $PKG_SOURCE"
48 |
49 | if ! [ -d "/ci/$PKG_SOURCE" ]; then
50 | echo "$PKG_SOURCE is not a directory"
51 | exit 1
52 | fi
53 |
54 | PRE_TEST_SCRIPT="/ci/$PKG_SOURCE/pre-test.sh"
55 | TEST_SCRIPT="/ci/$PKG_SOURCE/test.sh"
56 |
57 | if ! [ -f "$TEST_SCRIPT" ]; then
58 | echo "No test.sh script available"
59 | continue
60 | fi
61 |
62 | export PKG_NAME PKG_VERSION CI_HELPER
63 |
64 | if [ -f "$PRE_TEST_SCRIPT" ]; then
65 | echo "Use package specific pre-test.sh"
66 | if sh "$PRE_TEST_SCRIPT" "$PKG_NAME" "$PKG_VERSION"; then
67 | echo "Pre-test successful"
68 | else
69 | echo "Pre-test failed"
70 | exit 1
71 | fi
72 | else
73 | echo "No pre-test.sh script available"
74 | fi
75 |
76 | if [ $PKG_MANAGER = "opkg" ]; then
77 | opkg install "$PKG"
78 | elif [ $PKG_MANAGER = "apk" ]; then
79 | apk add --allow-untrusted "$PKG"
80 | fi
81 |
82 | echo "Use package specific test.sh"
83 | if sh "$TEST_SCRIPT" "$PKG_NAME" "$PKG_VERSION"; then
84 | echo "Test successful"
85 | else
86 | echo "Test failed"
87 | exit 1
88 | fi
89 |
90 | if [ $PKG_MANAGER = "opkg" ]; then
91 | opkg remove "$PKG_NAME" --force-removal-of-dependent-packages --force-remove --autoremove || true
92 | elif [ $PKG_MANAGER = "apk" ]; then
93 | apk del -r "$PKG_NAME"
94 | fi
95 | done
96 |
--------------------------------------------------------------------------------
/.github/scripts/process_formalities.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 |
3 | const BOT_LOGIN = "github-actions";
4 | const STEP_ANCHOR = "step:4:1";
5 |
6 | const GET_COMMENTS_QUERY = `query($owner: String!, $repo: String!, $issueNumber: Int!) {
7 | repository(owner: $owner, name: $repo) {
8 | pullRequest(number: $issueNumber) {
9 | comments(last: 100) {
10 | nodes {
11 | id
12 | author {
13 | login
14 | }
15 | body
16 | isMinimized
17 | }
18 | }
19 | }
20 | }
21 | }`;
22 |
23 | // BUG: Classifiers are broken and they do nothing, but they must be set.
24 | // https://github.com/orgs/community/discussions/19865
25 | const MINIMIZE_COMMENT_MUTATION = `
26 | mutation($id: ID!) {
27 | minimizeComment(input: {subjectId: $id, classifier: OUTDATED}) {
28 | clientMutationId
29 | }
30 | }
31 | `;
32 |
33 | const COMMENT_LOOKUP = "";
34 |
35 | const SUMMARY_HEADER=`
36 | > [!WARNING]
37 | >
38 | > Some formality checks failed.
39 | >
40 | > Consider (re)reading [submissions guidelines](
41 | https://openwrt.org/submitting-patches#submission_guidelines).
42 |
43 |
44 | Failed checks
45 |
46 | Issues marked with an :x: are failing checks.
47 | `;
48 |
49 | const SUMMARY_FOOTER=`
50 |
51 | `;
52 |
53 | const NO_MODIFY=`
54 | > [!TIP]
55 | >
56 | > PR has _Allow edits and access to secrets by maintainers_ disabled. Consider allowing edits to simplify review.
57 | >
58 | > [More info](
59 | https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/allowing-changes-to-a-pull-request-branch-created-from-a-fork)
60 | `;
61 |
62 | const FEEDBACK=`
63 | Something broken? Consider [providing feedback](
64 | https://github.com/openwrt/actions-shared-workflows/issues).
65 | `;
66 |
67 | async function hideOldSummaries({ github, owner, repo, issueNumber }) {
68 | const result = await github.graphql(GET_COMMENTS_QUERY, { owner, repo, issueNumber });
69 |
70 | const commentsToHide = result.repository.pullRequest.comments.nodes.filter(comment => !comment.isMinimized &&
71 | comment.author?.login === BOT_LOGIN &&
72 | comment.body.includes(COMMENT_LOOKUP)
73 | );
74 |
75 | for (const { id } of commentsToHide) {
76 | console.log(`Hiding outdated summary comment ${id}`);
77 | await github.graphql(MINIMIZE_COMMENT_MUTATION, { id });
78 | }
79 | }
80 |
81 | function getJobUrl({ context, jobId }) {
82 | return `https://github.com/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}/job/${jobId}?pr=${context.issue.number}#${STEP_ANCHOR}`;
83 | }
84 |
85 | function getSummaryMessage({ context, jobId, summary }) {
86 | return `
87 | ${SUMMARY_HEADER}
88 | ${summary}
89 | ${SUMMARY_FOOTER}
90 | For more details, see the [full job log](${getJobUrl({ context, jobId })}).
91 | `;
92 | }
93 |
94 | function getCommentMessage({ context, jobId, noModify, summary }) {
95 | return `
96 | ${summary.length > 0 ? getSummaryMessage({ context, jobId, summary }) : ''}
97 | ${noModify ? NO_MODIFY : ''}
98 | ${FEEDBACK}
99 | ${COMMENT_LOOKUP}
100 | `;
101 | }
102 |
103 | async function processFormalities({
104 | context,
105 | github,
106 | jobId,
107 | summary,
108 | warnOnNoModify,
109 | }) {
110 | const { owner, repo, number: issueNumber } = context.issue;
111 |
112 | await hideOldSummaries({ github, owner, repo, issueNumber });
113 |
114 | // Explicitly check maintainer_can_modify as it might not be set at all
115 | const { pull_request: pr } = context.payload.pull_request;
116 | const noModify = warnOnNoModify && pr?.maintainer_can_modify === false;
117 | summary = summary.trim();
118 | if (summary.length === 0 && !noModify) {
119 | console.log('Summary is empty and modify checks passed, skipping posting a comment');
120 | return;
121 | }
122 |
123 | console.log("Posting new summary comment");
124 | const body = getCommentMessage({ context, jobId, noModify, summary });
125 | return github.rest.issues.createComment({
126 | issue_number: issueNumber,
127 | owner,
128 | repo,
129 | body,
130 | });
131 | }
132 |
133 | module.exports = processFormalities;
134 |
--------------------------------------------------------------------------------
/.github/workflows/reusable_check-tools.yml:
--------------------------------------------------------------------------------
1 | name: Check host tools
2 |
3 | on:
4 | workflow_call:
5 | inputs:
6 | use_openwrt_container:
7 | type: boolean
8 | default: true
9 |
10 | permissions:
11 | contents: read
12 |
13 | jobs:
14 | determine-container-info:
15 | name: Determine needed info to push containers
16 | runs-on: ubuntu-slim
17 | outputs:
18 | owner-lc: ${{ steps.generate-owner-lc.outputs.owner-lc }}
19 | container-tag: ${{ steps.determine-container-tag.outputs.container-tag }}
20 |
21 | steps:
22 | - name: Set lower case owner name
23 | id: generate-owner-lc
24 | run: |
25 | OWNER_LC=$(echo "${{ github.repository_owner }}" \
26 | | tr '[:upper:]' '[:lower:]')
27 |
28 | if [ ${{ inputs.use_openwrt_container }} == "true" ]; then
29 | OWNER_LC=openwrt
30 | fi
31 |
32 | echo "owner-lc=$OWNER_LC" >> $GITHUB_OUTPUT
33 |
34 | # Per branch tools container tag
35 | # By default stick to latest
36 | # For official test targetting openwrt stable branch
37 | # Get the branch or parse the tag and push dedicated tools containers
38 | # Any branch that will match this pattern openwrt-[0-9][0-9].[0-9][0-9]
39 | # will refresh the tools container with the matching tag.
40 | # (example branch openwrt-22.03 -> tools:openwrt-22.03)
41 | # (example branch openwrt-22.03-test -> tools:openwrt-22.03)
42 | - name: Determine tools container tag
43 | id: determine-container-tag
44 | run: |
45 | CONTAINER_TAG=latest
46 |
47 | if [ ${{ github.ref_type }} == "branch" ]; then
48 | if echo "${{ github.ref_name }}" | grep -q -E 'openwrt-[0-9][0-9]\.[0-9][0-9]'; then
49 | CONTAINER_TAG="$(echo ${{ github.ref_name }} | sed 's/^\(openwrt-[0-9][0-9]\.[0-9][0-9]\).*/\1/')"
50 | fi
51 | elif [ ${{ github.ref_type }} == "tag" ]; then
52 | if echo "${{ github.ref_name }}" | grep -q -E 'v[0-9][0-9]\.[0-9][0-9]\..+'; then
53 | CONTAINER_TAG=openwrt-"$(echo ${{ github.ref_name }} | sed 's/v\([0-9][0-9]\.[0-9][0-9]\)\..\+/\1/')"
54 | fi
55 | fi
56 |
57 | echo "Container tag to push for tools and toolchain is $CONTAINER_TAG"
58 | echo "container-tag=$CONTAINER_TAG" >> "$GITHUB_OUTPUT"
59 |
60 | check:
61 | name: Check tools
62 | needs: determine-container-info
63 | runs-on: ubuntu-latest
64 | container: ghcr.io/${{ needs.determine-container-info.outputs.owner-lc }}/tools:${{ needs.determine-container-info.outputs.container-tag }}
65 |
66 | steps:
67 | - name: Checkout
68 | uses: actions/checkout@v6
69 | with:
70 | path: openwrt
71 |
72 | - name: Fix permission
73 | run: chown -R buildbot:buildbot openwrt
74 |
75 | - name: Prepare prebuilt tools
76 | shell: su buildbot -c "sh -e {0}"
77 | working-directory: openwrt
78 | run: |
79 | mkdir -p staging_dir build_dir
80 | ln -s /prebuilt_tools/staging_dir/host staging_dir/host
81 | ln -s /prebuilt_tools/build_dir/host build_dir/host
82 |
83 | ./scripts/ext-tools.sh --refresh
84 |
85 | - name: Set configs for tools container
86 | shell: su buildbot -c "sh -e {0}"
87 | working-directory: openwrt
88 | run: |
89 | touch .config
90 | echo CONFIG_DEVEL=y >> .config
91 | echo CONFIG_AUTOREMOVE=y >> .config
92 | echo CONFIG_CCACHE=y >> .config
93 | echo CONFIG_BUILD_ALL_HOST_TOOLS=y >> .config
94 |
95 | - name: Make prereq
96 | shell: su buildbot -c "sh -e {0}"
97 | working-directory: openwrt
98 | run: make defconfig
99 |
100 | - name: Compile needed host tools
101 | shell: su buildbot -c "sh -e {0}"
102 | working-directory: openwrt
103 | run: make tools/zstd/compile tools/quilt/compile -j$(nproc) BUILD_LOG=1 || ret=$? .github/workflows/scripts/show_build_failures.sh
104 |
105 | - name: Download and check tools
106 | shell: su buildbot -c "sh -e {0}"
107 | working-directory: openwrt
108 | run: make tools/download tools/check FIXUP=1 -j$(nproc) BUILD_LOG=1 || ret=$? .github/workflows/scripts/show_build_failures.sh
109 |
110 | - name: Refresh tools
111 | shell: su buildbot -c "sh -e {0}"
112 | working-directory: openwrt
113 | run: make tools/refresh -j$(nproc) BUILD_LOG=1 || ret=$? .github/workflows/scripts/show_build_failures.sh
114 |
115 | - name: Validate checked tools
116 | shell: su buildbot -c "sh -e {0}"
117 | working-directory: openwrt
118 | run: |
119 | . .github/workflows/scripts/ci_helpers.sh
120 |
121 | if git diff --name-only --exit-code; then
122 | success "All tools seems ok"
123 | else
124 | err "Some package Makefiles requires fix. (run 'make package/check FIXUP=1' and force push this pr)"
125 | err "You can also check the provided artifacts with the refreshed patch from this CI run."
126 | mkdir tools-fixed
127 | for f in $(git diff --name-only); do
128 | cp --parents $f tools-fixed/
129 | done
130 | exit 1
131 | fi
132 |
133 | - name: Upload fixed tools
134 | if: failure()
135 | uses: actions/upload-artifact@v5
136 | with:
137 | name: tools-fixed
138 | path: openwrt/tools-fixed
139 |
--------------------------------------------------------------------------------
/.github/workflows/label-kernel.yml:
--------------------------------------------------------------------------------
1 | # ci:kernel:x86:64 is going to trigger CI kernel check jobs for x86/64 target
2 |
3 | name: Build kernel and check patches for target specified in labels
4 | on:
5 | workflow_call:
6 |
7 | jobs:
8 | set_target:
9 | if: startsWith(github.event.label.name, 'ci:kernel:')
10 | name: Set target
11 | runs-on: ubuntu-slim
12 | outputs:
13 | targets_subtargets: ${{ steps.set_target.outputs.targets_subtargets }}
14 | targets: ${{ steps.set_target.outputs.targets }}
15 |
16 | steps:
17 | - name: Checkout
18 | uses: actions/checkout@v6
19 |
20 | - name: Parse label
21 | id: parse_label
22 | env:
23 | CI_EVENT_LABEL_NAME: ${{ github.event.label.name }}
24 | run: |
25 | echo "$CI_EVENT_LABEL_NAME" | sed -n 's/ci:kernel:\([^:]*\):\([^:]*\):*\([^:]*\)$/target=\1/p' | tee --append $GITHUB_OUTPUT
26 | echo "$CI_EVENT_LABEL_NAME" | sed -n 's/ci:kernel:\([^:]*\):\([^:]*\):*\([^:]*\)$/subtarget=\2/p' | tee --append $GITHUB_OUTPUT
27 | echo "$CI_EVENT_LABEL_NAME" | sed -n 's/ci:kernel:\([^:]*\):\([^:]*\):*\([^:]*\)$/testing=\3/p' | tee --append $GITHUB_OUTPUT
28 |
29 | - name: Set targets
30 | id: set_target
31 | run: |
32 | ALL_TARGETS="$(perl ./scripts/dump-target-info.pl kernels 2>/dev/null)"
33 |
34 | TARGETS_SUBTARGETS="$(echo "$ALL_TARGETS" | sort -u -t '/' -k1)"
35 | TARGETS="$(echo "$ALL_TARGETS" | sort -u -t '/' -k1,1)"
36 |
37 | [ "${{ steps.parse_label.outputs.subtarget }}" = "first" ] && TARGETS_SUBTARGETS=$TARGETS
38 |
39 | JSON_TARGETS_SUBTARGETS='['
40 | FIRST=1
41 | while IFS= read -r line; do
42 | TARGET_SUBTARGET=$(echo $line | cut -d " " -f 1)
43 | TARGET=$(echo $TARGET_SUBTARGET | cut -d "/" -f 1)
44 | SUBTARGET=$(echo $TARGET_SUBTARGET | cut -d "/" -f 2)
45 |
46 | [ "${{ steps.parse_label.outputs.target }}" != "all" ] && [ "${{ steps.parse_label.outputs.target }}" != "$TARGET" ] && continue
47 | [ "${{ steps.parse_label.outputs.subtarget }}" != "all" ] && [ "${{ steps.parse_label.outputs.subtarget }}" != "first" ] &&
48 | [ "${{ steps.parse_label.outputs.subtarget }}" != $SUBTARGET ] && continue
49 | if [ "${{ steps.parse_label.outputs.testing }}" = "testing" ]; then
50 | TESTING_KERNEL_VER=$(echo $line | cut -d " " -f 3)
51 | [ -z "$TESTING_KERNEL_VER" ] && continue
52 | fi
53 |
54 | TUPLE='{"target":"'"$TARGET"'","subtarget":"'"$SUBTARGET"'","testing":"'"$TESTING_KERNEL_VER"'"}'
55 | [[ $FIRST -ne 1 ]] && JSON_TARGETS_SUBTARGETS="$JSON_TARGETS_SUBTARGETS"','
56 | JSON_TARGETS_SUBTARGETS="$JSON_TARGETS_SUBTARGETS""$TUPLE"
57 | FIRST=0
58 | done <<< "$TARGETS_SUBTARGETS"
59 | JSON_TARGETS_SUBTARGETS="$JSON_TARGETS_SUBTARGETS"']'
60 |
61 | JSON_TARGETS='['
62 | FIRST=1
63 | while IFS= read -r line; do
64 | TARGET_SUBTARGET=$(echo $line | cut -d " " -f 1)
65 | TARGET=$(echo $TARGET_SUBTARGET | cut -d "/" -f 1)
66 | SUBTARGET=$(echo $TARGET_SUBTARGET | cut -d "/" -f 2)
67 |
68 | [ "${{ steps.parse_label.outputs.target }}" != "all" ] && [ "${{ steps.parse_label.outputs.target }}" != $TARGET ] && continue
69 | if [ "${{ steps.parse_label.outputs.testing }}" = "testing" ]; then
70 | TESTING_KERNEL_VER=$(echo $line | cut -d " " -f 3)
71 | [ -z "$TESTING_KERNEL_VER" ] && continue
72 | fi
73 |
74 | TUPLE='{"target":"'"$TARGET"'","subtarget":"'"$SUBTARGET"'","testing":"'"$TESTING_KERNEL_VER"'"}'
75 | [[ $FIRST -ne 1 ]] && JSON_TARGETS="$JSON_TARGETS"','
76 | JSON_TARGETS="$JSON_TARGETS""$TUPLE"
77 | FIRST=0
78 | done <<< "$TARGETS"
79 | JSON_TARGETS="$JSON_TARGETS"']'
80 |
81 | echo -e "\n---- targets to build ----\n"
82 | echo "$JSON_TARGETS_SUBTARGETS"
83 | echo -e "\n---- targets to build ----\n"
84 |
85 | echo -e "\n---- targets to check patch ----\n"
86 | echo "$JSON_TARGETS"
87 | echo -e "\n---- targets to check patch ----\n"
88 |
89 | echo "targets_subtargets=$JSON_TARGETS_SUBTARGETS" >> $GITHUB_OUTPUT
90 | echo "targets=$JSON_TARGETS" >> $GITHUB_OUTPUT
91 |
92 | build_kernel:
93 | name: Build Kernel with external toolchain
94 | needs: set_target
95 | permissions:
96 | contents: read
97 | packages: read
98 | actions: write
99 | uses: ./.github/workflows/reusable_build.yml
100 | strategy:
101 | fail-fast: False
102 | matrix:
103 | include: ${{fromJson(needs.set_target.outputs.targets_subtargets)}}
104 | with:
105 | container_name: toolchain
106 | target: ${{ matrix.target }}
107 | subtarget: ${{ matrix.subtarget }}
108 | testing: ${{ matrix.testing != '' && true }}
109 | build_kernel: true
110 | build_all_kmods: true
111 | build_dtb: true
112 |
113 | check-kernel-patches:
114 | name: Check Kernel patches
115 | needs: set_target
116 | permissions:
117 | contents: read
118 | packages: read
119 | actions: write
120 | strategy:
121 | fail-fast: False
122 | matrix:
123 | include: ${{fromJson(needs.set_target.outputs.targets)}}
124 | uses: ./.github/workflows/reusable_check-kernel-patches.yml
125 | with:
126 | target: ${{ matrix.target }}
127 | subtarget: ${{ matrix.subtarget }}
128 | testing: ${{ matrix.testing != '' && true }}
129 |
--------------------------------------------------------------------------------
/.github/workflows/reusable_check-kernel-patches.yml:
--------------------------------------------------------------------------------
1 | name: Refresh kernel for target
2 |
3 | on:
4 | workflow_call:
5 | inputs:
6 | target:
7 | required: true
8 | type: string
9 | subtarget:
10 | required: true
11 | type: string
12 | testing:
13 | type: boolean
14 | use_openwrt_container:
15 | type: boolean
16 | default: true
17 |
18 | permissions:
19 | contents: read
20 |
21 | jobs:
22 | setup_build:
23 | name: Set up build
24 | runs-on: ubuntu-slim
25 | outputs:
26 | owner_lc: ${{ steps.lower_owner.outputs.owner_lc }}
27 | container_tag: ${{ steps.determine_tools_container.outputs.container_tag }}
28 |
29 | steps:
30 | - name: Set lower case owner name
31 | id: lower_owner
32 | run: |
33 | OWNER_LC=$(echo "${{ github.repository_owner }}" \
34 | | tr '[:upper:]' '[:lower:]')
35 |
36 | if [ ${{ inputs.use_openwrt_container }} == "true" ]; then
37 | OWNER_LC=openwrt
38 | fi
39 |
40 | echo "owner_lc=$OWNER_LC" >> $GITHUB_OUTPUT
41 |
42 | # Per branch tools container tag
43 | # By default stick to latest
44 | # For official test targetting openwrt stable branch
45 | # Get the branch or parse the tag and push dedicated tools containers
46 | # For local test to use the correct container for stable release testing
47 | # you need to use for the branch name a prefix of openwrt-[0-9][0-9].[0-9][0-9]-
48 | - name: Determine tools container tag
49 | id: determine_tools_container
50 | run: |
51 | CONTAINER_TAG=latest
52 | if [ -n "${{ github.base_ref }}" ]; then
53 | if echo "${{ github.base_ref }}" | grep -q -E '^openwrt-[0-9][0-9]\.[0-9][0-9]$'; then
54 | CONTAINER_TAG="${{ github.base_ref }}"
55 | fi
56 | elif [ ${{ github.ref_type }} == "branch" ]; then
57 | if echo "${{ github.ref_name }}" | grep -q -E '^openwrt-[0-9][0-9]\.[0-9][0-9]$'; then
58 | CONTAINER_TAG=${{ github.ref_name }}
59 | elif echo "${{ github.ref_name }}" | grep -q -E '^openwrt-[0-9][0-9]\.[0-9][0-9]-'; then
60 | CONTAINER_TAG="$(echo ${{ github.ref_name }} | sed 's/^\(openwrt-[0-9][0-9]\.[0-9][0-9]\)-.*/\1/')"
61 | fi
62 | elif [ ${{ github.ref_type }} == "tag" ]; then
63 | if echo "${{ github.ref_name }}" | grep -q -E '^v[0-9][0-9]\.[0-9][0-9]\..+'; then
64 | CONTAINER_TAG=openwrt-"$(echo ${{ github.ref_name }} | sed 's/^v\([0-9][0-9]\.[0-9][0-9]\)\..\+/\1/')"
65 | fi
66 | fi
67 | echo "Tools container to use tools:$CONTAINER_TAG"
68 | echo "container_tag=$CONTAINER_TAG" >> $GITHUB_OUTPUT
69 |
70 | check-patch:
71 | name: Check Kernel patches
72 | needs: setup_build
73 | runs-on: ubuntu-latest
74 |
75 | container: ghcr.io/${{ needs.setup_build.outputs.owner_lc }}/tools:${{ needs.setup_build.outputs.container_tag }}
76 |
77 | permissions:
78 | contents: read
79 | packages: read
80 |
81 | steps:
82 | - name: Checkout master directory
83 | uses: actions/checkout@v6
84 | with:
85 | path: openwrt
86 |
87 | - name: Fix permission
88 | run: |
89 | chown -R buildbot:buildbot openwrt
90 |
91 | - name: Prepare prebuilt tools
92 | shell: su buildbot -c "sh -e {0}"
93 | working-directory: openwrt
94 | run: |
95 | mkdir -p staging_dir build_dir
96 | ln -sf /prebuilt_tools/staging_dir/host staging_dir/host
97 | ln -sf /prebuilt_tools/build_dir/host build_dir/host
98 |
99 | ./scripts/ext-tools.sh --refresh
100 |
101 | - name: Configure testing kernel
102 | if: inputs.testing == true
103 | shell: su buildbot -c "sh -e {0}"
104 | working-directory: openwrt
105 | run: |
106 | echo CONFIG_TESTING_KERNEL=y >> .config
107 |
108 | - name: Configure system
109 | shell: su buildbot -c "sh -e {0}"
110 | working-directory: openwrt
111 | run: |
112 | echo CONFIG_ALL_KMODS=y >> .config
113 | echo CONFIG_DEVEL=y >> .config
114 | echo CONFIG_AUTOREMOVE=y >> .config
115 | echo CONFIG_CCACHE=y >> .config
116 |
117 | echo "CONFIG_TARGET_${{ inputs.target }}=y" >> .config
118 | echo "CONFIG_TARGET_${{ inputs.target }}_${{ inputs.subtarget }}=y" >> .config
119 |
120 | make defconfig
121 |
122 | - name: Build tools
123 | shell: su buildbot -c "sh -e {0}"
124 | working-directory: openwrt
125 | run: make tools/quilt/compile -j$(nproc) BUILD_LOG=1 || ret=$? .github/workflows/scripts/show_build_failures.sh
126 |
127 | - name: Refresh Kernel patches
128 | shell: su buildbot -c "sh -e {0}"
129 | working-directory: openwrt
130 | run: make target/linux/refresh V=s
131 |
132 | - name: Validate Refreshed Kernel Patches
133 | shell: su buildbot -c "sh -e {0}"
134 | working-directory: openwrt
135 | run: |
136 | . .github/workflows/scripts/ci_helpers.sh
137 |
138 | if git diff --name-only --exit-code; then
139 | success "Kernel patches for ${{ inputs.target }}/${{ inputs.subtarget }} seems ok"
140 | else
141 | err "Kernel patches for ${{ inputs.target }}/${{ inputs.subtarget }} require refresh. (run 'make target/linux/refresh' and force push this pr)"
142 | err "You can also check the provided artifacts with the refreshed patch from this CI run."
143 | mkdir ${{ inputs.target }}-${{ inputs.subtarget }}-refreshed
144 | for f in $(git diff --name-only); do
145 | cp --parents $f ${{ inputs.target }}-${{ inputs.subtarget }}-refreshed/
146 | done
147 | exit 1
148 | fi
149 |
150 | - name: Upload Refreshed Patches
151 | if: failure()
152 | uses: actions/upload-artifact@v5
153 | with:
154 | name: ${{ inputs.target }}-${{ inputs.subtarget }}${{ inputs.testing == true && '-testing' || '' }}-refreshed
155 | path: openwrt/${{ inputs.target }}-${{ inputs.subtarget }}-refreshed
156 |
--------------------------------------------------------------------------------
/.github/workflows/push-containers.yml:
--------------------------------------------------------------------------------
1 | name: Build and Push prebuilt tools container
2 |
3 | on:
4 | workflow_call:
5 |
6 | jobs:
7 | determine-container-info:
8 | name: Determine needed info to push containers
9 | if: ${{ github.repository_owner == 'openwrt' }}
10 | runs-on: ubuntu-slim
11 | outputs:
12 | owner-lc: ${{ steps.generate-owner-lc.outputs.owner-lc }}
13 | container-tag: ${{ steps.determine-container-tag.outputs.container-tag }}
14 |
15 | steps:
16 | - name: Set lower case owner name
17 | id: generate-owner-lc
18 | env:
19 | OWNER: ${{ github.repository_owner }}
20 | run: |
21 | echo "owner-lc=${OWNER,,}" >> "$GITHUB_OUTPUT"
22 |
23 | # Per branch tools container tag
24 | # By default stick to latest
25 | # For official test targetting openwrt stable branch
26 | # Get the branch or parse the tag and push dedicated tools containers
27 | # Any branch that will match this pattern openwrt-[0-9][0-9].[0-9][0-9]
28 | # will refresh the tools container with the matching tag.
29 | # (example branch openwrt-22.03 -> tools:openwrt-22.03)
30 | # (example branch openwrt-22.03-test -> tools:openwrt-22.03)
31 | - name: Determine tools container tag
32 | id: determine-container-tag
33 | run: |
34 | CONTAINER_TAG=latest
35 |
36 | if [ ${{ github.ref_type }} == "branch" ]; then
37 | if echo "${{ github.ref_name }}" | grep -q -E 'openwrt-[0-9][0-9]\.[0-9][0-9]'; then
38 | CONTAINER_TAG="$(echo ${{ github.ref_name }} | sed 's/^\(openwrt-[0-9][0-9]\.[0-9][0-9]\).*/\1/')"
39 | fi
40 | elif [ ${{ github.ref_type }} == "tag" ]; then
41 | if echo "${{ github.ref_name }}" | grep -q -E 'v[0-9][0-9]\.[0-9][0-9]\..+'; then
42 | CONTAINER_TAG=openwrt-"$(echo ${{ github.ref_name }} | sed 's/v\([0-9][0-9]\.[0-9][0-9]\)\..\+/\1/')"
43 | fi
44 | fi
45 |
46 | echo "Container tag to push for tools and toolchain is $CONTAINER_TAG"
47 | echo "container-tag=$CONTAINER_TAG" >> "$GITHUB_OUTPUT"
48 |
49 | build-linux-buildbot:
50 | name: Build tools with buildbot container
51 | if: ${{ github.repository_owner == 'openwrt' }}
52 | uses: ./.github/workflows/reusable_build-tools.yml
53 | with:
54 | generate_prebuilt_artifacts: true
55 |
56 | push-tools-container:
57 | needs: [ determine-container-info, build-linux-buildbot ]
58 | if: ${{ github.repository_owner == 'openwrt' }}
59 | name: Push prebuilt tools container
60 | runs-on: ubuntu-latest
61 |
62 | permissions:
63 | contents: read
64 | packages: write
65 |
66 | steps:
67 | - name: Checkout
68 | uses: actions/checkout@v6
69 | with:
70 | repository: openwrt/actions-shared-workflows
71 | sparse-checkout: .github/dockerfiles/Dockerfile.tools
72 | sparse-checkout-cone-mode: false
73 |
74 | - name: Download prebuilt tools from build job
75 | uses: actions/download-artifact@v6
76 | with:
77 | name: linux-buildbot-prebuilt-tools
78 |
79 | - name: Extract prebuild tools
80 | run: tar -xf tools.tar
81 |
82 | - name: Login to GitHub Container Registry
83 | uses: docker/login-action@v3
84 | with:
85 | registry: ghcr.io
86 | username: ${{ github.actor }}
87 | password: ${{ secrets.GITHUB_TOKEN }}
88 |
89 | - name: Build and push
90 | uses: docker/build-push-action@v6
91 | with:
92 | context: .
93 | push: true
94 | tags: ghcr.io/${{ needs.determine-container-info.outputs.owner-lc }}/tools:${{ needs.determine-container-info.outputs.container-tag }}
95 | file: .github/dockerfiles/Dockerfile.tools
96 |
97 | determine-targets:
98 | name: Set targets
99 | if: ${{ github.repository_owner == 'openwrt' }}
100 | runs-on: ubuntu-slim
101 | outputs:
102 | target: ${{ steps.find_targets.outputs.target }}
103 |
104 | steps:
105 | - name: Checkout
106 | uses: actions/checkout@v6
107 |
108 | - name: Set targets
109 | id: find_targets
110 | run: |
111 | export TARGETS="$(perl ./scripts/dump-target-info.pl targets 2>/dev/null \
112 | | awk '{ print $1 }')"
113 |
114 | JSON='['
115 | FIRST=1
116 | for TARGET in $TARGETS; do
117 | TUPLE='{"target":"'"$(echo $TARGET | cut -d "/" -f 1)"'","subtarget":"'"$(echo $TARGET | cut -d "/" -f 2)"'"}'
118 | [[ $FIRST -ne 1 ]] && JSON="$JSON"','
119 | JSON="$JSON""$TUPLE"
120 | FIRST=0
121 | done
122 | JSON="$JSON"']'
123 |
124 | echo -e "\n---- targets ----\n"
125 | echo "$JSON"
126 | echo -e "\n---- targets ----\n"
127 |
128 | echo "target=$JSON" >> $GITHUB_OUTPUT
129 |
130 | build:
131 | name: Build Target Toolchain
132 | if: ${{ github.repository_owner == 'openwrt' }}
133 | needs: [ determine-targets, push-tools-container ]
134 | permissions:
135 | contents: read
136 | packages: read
137 | actions: write
138 | strategy:
139 | fail-fast: False
140 | matrix:
141 | include: ${{fromJson(needs.determine-targets.outputs.target)}}
142 | uses: ./.github/workflows/reusable_build.yml
143 | with:
144 | target: ${{ matrix.target }}
145 | subtarget: ${{ matrix.subtarget }}
146 | build_toolchain: true
147 | build_external_toolchain: true
148 | upload_external_toolchain: true
149 |
150 | push-toolchain-container:
151 | name: Push Target Toolchain container
152 | if: ${{ github.repository_owner == 'openwrt' }}
153 | needs: [ determine-container-info, determine-targets, build ]
154 | runs-on: ubuntu-latest
155 |
156 | strategy:
157 | fail-fast: False
158 | matrix:
159 | include: ${{fromJson(needs.determine-targets.outputs.target)}}
160 |
161 | permissions:
162 | contents: read
163 | packages: write
164 |
165 | steps:
166 | - name: Checkout
167 | uses: actions/checkout@v6
168 | with:
169 | repository: openwrt/actions-shared-workflows
170 | sparse-checkout: .github/dockerfiles/Dockerfile.toolchain
171 | sparse-checkout-cone-mode: false
172 |
173 | - name: Download external toolchain from build job
174 | uses: actions/download-artifact@v6
175 | with:
176 | name: ${{ matrix.target }}-${{ matrix.subtarget }}-external-toolchain
177 |
178 | - name: Find external toolchain name
179 | id: get-toolchain-name
180 | run: |
181 | TOOLCHAIN_NAME=$(ls | grep toolchain-${{ matrix.target }}-${{ matrix.subtarget }})
182 | echo "toolchain-name=$TOOLCHAIN_NAME" >> $GITHUB_OUTPUT
183 |
184 | - name: Login to GitHub Container Registry
185 | uses: docker/login-action@v3
186 | with:
187 | registry: ghcr.io
188 | username: ${{ github.actor }}
189 | password: ${{ secrets.GITHUB_TOKEN }}
190 |
191 | - name: Build and push
192 | uses: docker/build-push-action@v6
193 | with:
194 | context: .
195 | push: true
196 | tags: ghcr.io/${{ needs.determine-container-info.outputs.owner-lc }}/toolchain:${{ matrix.target }}-${{ matrix.subtarget }}-${{ needs.determine-container-info.outputs.container-tag }}
197 | file: .github/dockerfiles/Dockerfile.toolchain
198 | build-args: |
199 | OWNER_LC=${{ needs.determine-container-info.outputs.owner-lc }}
200 | CONTAINER_TAG=${{ needs.determine-container-info.outputs.container-tag }}
201 | TOOLCHAIN_NAME=${{ steps.get-toolchain-name.outputs.toolchain-name }}
202 |
--------------------------------------------------------------------------------
/.github/workflows/multi-arch-test-build.yml:
--------------------------------------------------------------------------------
1 | name: Feeds Package Test Build
2 |
3 | on:
4 | workflow_call:
5 |
6 | concurrency:
7 | group: ${{ github.workflow }}-${{ github.ref }}
8 | cancel-in-progress: ${{ github.event_name == 'pull_request' }}
9 |
10 | jobs:
11 | build:
12 | name: Test ${{ matrix.arch }}
13 | runs-on: ubuntu-latest
14 | strategy:
15 | fail-fast: false
16 | matrix:
17 | include:
18 | - arch: aarch64_generic
19 | target: armsr-armv8
20 | runtime_test: true
21 |
22 | - arch: arm_cortex-a15_neon-vfpv4
23 | target: armsr-armv7
24 | runtime_test: true
25 |
26 | - arch: arm_cortex-a9_vfpv3-d16
27 | target: mvebu-cortexa9
28 | runtime_test: false
29 |
30 | - arch: i386_pentium-mmx
31 | target: x86-geode
32 | runtime_test: true
33 |
34 | - arch: mips_24kc
35 | target: ath79-generic
36 | runtime_test: true
37 |
38 | - arch: mipsel_24kc
39 | target: mt7621
40 | runtime_test: false
41 |
42 | - arch: powerpc_464fp
43 | target: apm821xx-nand
44 | runtime_test: false
45 |
46 | - arch: powerpc_8548
47 | target: mpc85xx-p1010
48 | runtime_test: false
49 |
50 | # Workaround: riscv64_riscv64 was renamed to riscv64_generic
51 | - arch: ${{ (github.base_ref == 'openwrt-24.10' || github.base_ref == 'openwrt-23.05') && 'riscv64_riscv64' || 'riscv64_generic' }}
52 | target: sifiveu-generic
53 | runtime_test: false
54 |
55 | - arch: x86_64
56 | target: x86-64
57 | runtime_test: true
58 |
59 | steps:
60 | - name: Remove unused Android SDK, .NET, Swift, GHC
61 | run: |
62 | sudo rm -rf /usr/local/lib/android
63 | sudo rm -rf /usr/local/.ghcup
64 | sudo rm -rf /usr/share/dotnet
65 | sudo rm -rf /usr/share/swift
66 |
67 | - uses: actions/checkout@v6
68 | with:
69 | fetch-depth: 0
70 |
71 | - name: Determine branch name
72 | run: |
73 | BRANCH="${GITHUB_BASE_REF#refs/heads/}"
74 | case "$BRANCH" in
75 | main|master|openwrt-[0-9]*\.[0-9]*)
76 | ;;
77 | *)
78 | BRANCH="master"
79 | ;;
80 | esac
81 | echo "Building for $BRANCH"
82 | echo "BRANCH=$BRANCH" >> $GITHUB_ENV
83 |
84 | - name: Determine changed packages
85 | run: |
86 | # only detect packages with changes
87 | PKG_ROOTS=$(find . -name Makefile | \
88 | grep -v ".*/src/Makefile" | \
89 | sed -e 's@./\(.*\)/Makefile@\1/@')
90 | CHANGES=$(git diff --diff-filter=d --name-only origin/$BRANCH...)
91 |
92 | for ROOT in $PKG_ROOTS; do
93 | for CHANGE in $CHANGES; do
94 | if [[ "$CHANGE" == "$ROOT"* ]]; then
95 | PACKAGES+=$(echo "$ROOT" | sed -e 's@\(.*/\)*\(.*\)/@\2 @')
96 | break
97 | fi
98 | done
99 | done
100 |
101 | # fallback to test packages if nothing explicitly changes this is
102 | # should run if other mechanics in packages.git changed
103 | REPOSITORY_NAME=${GITHUB_REPOSITORY#*/}
104 | if [ "$REPOSITORY_NAME" = "routing" ]; then
105 | PACKAGES="${PACKAGES:-bird2 cjdns olsrd}"
106 | elif [ "$REPOSITORY_NAME" = "telephony" ]; then
107 | PACKAGES="${PACKAGES:-asterisk siproxd freeswitch}"
108 | else
109 | PACKAGES="${PACKAGES:-vim attendedsysupgrade-common bmon}"
110 | fi
111 |
112 | echo "Building $PACKAGES"
113 | echo "PACKAGES=$PACKAGES" >> $GITHUB_ENV
114 |
115 | - name: Build
116 | uses: openwrt/gh-action-sdk@v10
117 | env:
118 | ARCH: ${{ matrix.arch }}-${{ env.BRANCH }}
119 | FEEDNAME: packages_ci
120 | INDEX: 1
121 | V: s
122 |
123 | - name: Move created packages to project dir
124 | if: always()
125 | run: cp -v bin/packages/${{ matrix.arch }}/packages_ci/* . || true
126 |
127 | - name: Collect metadata
128 | if: always()
129 | run: |
130 | MERGE_ID=$(git rev-parse --short HEAD)
131 | echo "MERGE_ID=$MERGE_ID" >> $GITHUB_ENV
132 | echo "BASE_ID=$(git rev-parse --short HEAD^1)" >> $GITHUB_ENV
133 | echo "HEAD_ID=$(git rev-parse --short HEAD^2)" >> $GITHUB_ENV
134 | PRNUMBER=${GITHUB_REF_NAME%/merge}
135 | echo "PRNUMBER=$PRNUMBER" >> $GITHUB_ENV
136 | echo "ARCHIVE_NAME=${{matrix.arch}}-PR$PRNUMBER-$MERGE_ID" >> $GITHUB_ENV
137 |
138 | - name: Generate metadata
139 | if: always()
140 | run: |
141 | cat << _EOF_ > PKG-INFO
142 | Metadata-Version: 2.1
143 | Name: ${{env.ARCHIVE_NAME}}
144 | Version: $BRANCH
145 | Author: $GITHUB_ACTOR
146 | Home-page: $GITHUB_SERVER_URL/$GITHUB_REPOSITORY/pull/$PRNUMBER
147 | Download-URL: $GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID
148 | Summary: $PACKAGES
149 | Platform: ${{ matrix.arch }}
150 |
151 | Packages for OpenWrt $BRANCH running on ${{matrix.arch}}, built from PR $PRNUMBER
152 | at commit $HEAD_ID, against $BRANCH at commit $BASE_ID, with merge SHA $MERGE_ID.
153 |
154 | Modified packages:
155 | _EOF_
156 | for p in $PACKAGES
157 | do
158 | echo " "$p >> PKG-INFO
159 | done
160 | echo >> PKG-INFO
161 | echo Full file listing: >> PKG-INFO
162 | ls -al *.ipk >> PKG-INFO || true
163 | ls -al *.apk >> PKG-INFO || true
164 | cat PKG-INFO
165 |
166 | - name: Store packages
167 | if: always()
168 | uses: actions/upload-artifact@v5
169 | with:
170 | name: ${{env.ARCHIVE_NAME}}-packages
171 | path: |
172 | Packages
173 | Packages.*
174 | *.ipk
175 | packages.adb
176 | *.apk
177 | PKG-INFO
178 |
179 | - name: Store logs
180 | if: always()
181 | uses: actions/upload-artifact@v5
182 | with:
183 | name: ${{env.ARCHIVE_NAME}}-logs
184 | path: |
185 | logs/
186 | PKG-INFO
187 |
188 | - name: Remove logs
189 | if: always()
190 | run: sudo rm -rf logs/ || true
191 |
192 | - name: Check if any packages were built
193 | run: |
194 | if [ -n "$(find . -maxdepth 1 -type f -name '*.apk' -print -quit)" ]; then
195 | echo "Found *.apk files"
196 | HAVE_PKGS=true
197 | PKG_MANAGER=apk
198 | elif [ -n "$(find . -maxdepth 1 -type f -name '*.ipk' -print -quit)" ]; then
199 | echo "Found *.ipk files"
200 | HAVE_PKGS=true
201 | PKG_MANAGER=opkg
202 | else
203 | echo "No *.apk or *.ipk files found"
204 | HAVE_PKGS=false
205 | fi
206 | echo "HAVE_PKGS=$HAVE_PKGS" >> $GITHUB_ENV
207 | echo "PKG_MANAGER=$PKG_MANAGER" >> $GITHUB_ENV
208 |
209 | - name: Register QEMU
210 | if: ${{ matrix.runtime_test && fromJSON(env.HAVE_PKGS) }}
211 | run: |
212 | sudo apt-get update
213 | sudo apt-get install -y qemu-user-static binfmt-support
214 | sudo update-binfmts --import
215 |
216 | - name: Checkout
217 | if: ${{ matrix.runtime_test && fromJSON(env.HAVE_PKGS) }}
218 | uses: actions/checkout@v6
219 | with:
220 | repository: openwrt/actions-shared-workflows
221 | path: dockerfiles_feeds
222 | sparse-checkout: |
223 | .github/scripts/ci_helpers.sh
224 | .github/dockerfiles_feeds/Dockerfile
225 | .github/dockerfiles_feeds/entrypoint.sh
226 | sparse-checkout-cone-mode: false
227 |
228 | - name: Build Docker container
229 | if: ${{ matrix.runtime_test && fromJSON(env.HAVE_PKGS) }}
230 | run: |
231 | docker build --platform linux/${{ matrix.arch }} -t test-container \
232 | --build-arg ARCH dockerfiles_feeds/.github/dockerfiles_feeds/
233 | env:
234 | ARCH: ${{ matrix.arch }}-${{ env.BRANCH }}
235 |
236 | - name: Test via Docker container
237 | if: ${{ matrix.runtime_test && fromJSON(env.HAVE_PKGS) }}
238 | run: |
239 | docker run --platform linux/${{ matrix.arch }} --rm -v $GITHUB_WORKSPACE:/ci \
240 | -v $GITHUB_WORKSPACE/dockerfiles_feeds:/dockerfiles_feeds \
241 | -e CI_HELPER=/dockerfiles_feeds/scripts/ci_helpers.sh \
242 | -e PKG_MANAGER=${{ env.PKG_MANAGER }} \
243 | test-container
244 |
--------------------------------------------------------------------------------
/.github/workflows/kernel.yml:
--------------------------------------------------------------------------------
1 | name: Build Kernel
2 |
3 | on:
4 | workflow_call:
5 | secrets:
6 | ccache_s3_endpoint:
7 | ccache_s3_bucket:
8 | ccache_s3_access_key:
9 | ccache_s3_secret_key:
10 |
11 | jobs:
12 | determine_changed_files:
13 | name: Determine Changed Files
14 | uses: ./.github/workflows/reusable_determine_changed_files.yml
15 |
16 | determine_targets:
17 | name: Set targets
18 | needs: determine_changed_files
19 | runs-on: ubuntu-slim
20 | outputs:
21 | targets_subtargets: ${{ steps.find_targets.outputs.targets_subtargets }}
22 | targets: ${{ steps.find_targets.outputs.targets }}
23 |
24 | steps:
25 | - name: Checkout
26 | uses: actions/checkout@v6
27 | with:
28 | sparse-checkout: |
29 | include
30 | scripts/dump-target-info.pl
31 | target/linux
32 |
33 | - name: Determine Affected Kernel Versions
34 | id: determine_affected_kernel_versions
35 | run: |
36 | CHANGED_FILES="$(echo ${{ needs.determine_changed_files.outputs.all_changed_files }} | tr ' ' '\n')"
37 |
38 | # Extract from changed files list pattern of (patches|backport|hack|pending|files|kernel|config)-[0-9]+.[0-9]+ and compose list of [0-9]+.[0-9]+
39 | AFFECTED_KERNEL_VERSIONS_LIST="$(echo $CHANGED_FILES | grep -oP "(patches|backport|hack|pending|files|kernel|config)-[0-9]+\.[0-9]+" | sort | uniq | \
40 | sed -E 's/(patches|backport|hack|pending|files|kernel|config)-//' | sort | uniq)"
41 |
42 | echo "affected_kernel_versions="$AFFECTED_KERNEL_VERSIONS_LIST"" >> $GITHUB_OUTPUT
43 |
44 | - name: Set targets
45 | id: find_targets
46 | env:
47 | AFFECTED_KERNEL_VERSIONS: ${{ steps.determine_affected_kernel_versions.outputs.affected_kernel_versions }}
48 | run: |
49 | ALL_TARGETS="$(perl ./scripts/dump-target-info.pl targets 2>/dev/null)"
50 | TARGET_KERNELS="$(perl ./scripts/dump-target-info.pl kernels 2>/dev/null)"
51 | CHANGED_FILES="$(echo ${{ needs.determine_changed_files.outputs.all_changed_files }} | tr ' ' '\n')"
52 |
53 | TARGETS_SUBTARGETS="$(echo "$ALL_TARGETS" | sort -u -t '/' -k1 | cut -d " " -f 1)"
54 | TARGETS="$(echo "$ALL_TARGETS" | sort -u -t '/' -k1,1 | cut -d " " -f 1)"
55 |
56 | # On testing non-specific target, skip testing each subtarget if we are testing pr
57 | if [ ${{ github.event_name }} != 'push' ]; then
58 | if echo "$CHANGED_FILES" | grep -v -q -P ^target/linux/.*/ ||
59 | echo "$CHANGED_FILES" | grep -q target/linux/generic; then
60 | TARGETS_SUBTARGETS=$TARGETS
61 | fi
62 | fi
63 |
64 | JSON_TARGETS_SUBTARGETS='['
65 | FIRST=1
66 | for TARGET_SUBTARGET in $TARGETS_SUBTARGETS; do
67 | TARGET_KERNEL_VER="$(echo "$TARGET_KERNELS" | grep -w $TARGET_SUBTARGET | cut -d " " -f 2)"
68 | TARGET_TESTING_KERNEL_VER="$(echo "$TARGET_KERNELS" | grep -w $TARGET_SUBTARGET | cut -d " " -f 3)"
69 |
70 | TARGET=$(echo $TARGET_SUBTARGET | cut -d "/" -f 1)
71 | SUBTARGET=$(echo $TARGET_SUBTARGET | cut -d "/" -f 2)
72 |
73 | if echo "$CHANGED_FILES" | grep -q target/linux/generic ||
74 | echo "$CHANGED_FILES" | grep -q "package/kernel" ||
75 | echo "$CHANGED_FILES" | grep -q "target/linux/$TARGET"; then
76 |
77 | # test target if kernel version is affected
78 | # If AFFECTED_KERNEL_VERSIONS is empty fallback to simple testing (case of changed files)
79 | if [ -z "${{ env.AFFECTED_KERNEL_VERSIONS }}" ] || echo "${{ env.AFFECTED_KERNEL_VERSIONS }}" | grep -q "$TARGET_KERNEL_VER"; then
80 | TUPLE='{"target":"'"$TARGET"'","subtarget":"'"$SUBTARGET"'"}'
81 | [[ $FIRST -ne 1 ]] && JSON_TARGETS_SUBTARGETS="$JSON_TARGETS_SUBTARGETS"','
82 | JSON_TARGETS_SUBTARGETS="$JSON_TARGETS_SUBTARGETS""$TUPLE"
83 | FIRST=0
84 | fi
85 |
86 | # Also test testing kernel version if kernel version is affected
87 | if [ -n "$TARGET_TESTING_KERNEL_VER" ] && echo "${{ env.AFFECTED_KERNEL_VERSIONS }}" | grep -q "$TARGET_TESTING_KERNEL_VER"; then
88 | TUPLE='{"target":"'"$TARGET"'","subtarget":"'"$SUBTARGET"'","testing":"'"$TARGET_TESTING_KERNEL_VER"'"}'
89 | [[ $FIRST -ne 1 ]] && JSON_TARGETS_SUBTARGETS="$JSON_TARGETS_SUBTARGETS"','
90 | JSON_TARGETS_SUBTARGETS="$JSON_TARGETS_SUBTARGETS""$TUPLE"
91 | FIRST=0
92 | fi
93 | fi
94 | done
95 | JSON_TARGETS_SUBTARGETS="$JSON_TARGETS_SUBTARGETS"']'
96 |
97 | JSON_TARGETS='['
98 | FIRST=1
99 | for TARGET_SUBTARGET in $TARGETS; do
100 | TARGET_KERNEL_VER="$(echo "$TARGET_KERNELS" | grep -w $TARGET_SUBTARGET | cut -d " " -f 2)"
101 | TARGET_TESTING_KERNEL_VER="$(echo "$TARGET_KERNELS" | grep -w $TARGET_SUBTARGET | cut -d " " -f 3)"
102 |
103 | TARGET=$(echo $TARGET_SUBTARGET | cut -d "/" -f 1)
104 | SUBTARGET=$(echo $TARGET_SUBTARGET | cut -d "/" -f 2)
105 |
106 | if echo "$CHANGED_FILES" | grep -q target/linux/generic ||
107 | echo "$CHANGED_FILES" | grep -q "package/kernel" ||
108 | echo "$CHANGED_FILES" | grep -q "target/linux/$TARGET"; then
109 |
110 | # test target if kernel version is affected
111 | # If AFFECTED_KERNEL_VERSIONS is empty fallback to simple testing (case of changed files)
112 | if [ -z "${{ env.AFFECTED_KERNEL_VERSIONS }}" ] || echo "${{ env.AFFECTED_KERNEL_VERSIONS }}" | grep -q "$TARGET_KERNEL_VER"; then
113 | TUPLE='{"target":"'"$TARGET"'","subtarget":"'"$SUBTARGET"'"}'
114 | [[ $FIRST -ne 1 ]] && JSON_TARGETS="$JSON_TARGETS"','
115 | JSON_TARGETS="$JSON_TARGETS""$TUPLE"
116 | FIRST=0
117 | fi
118 |
119 | # Also test testing kernel version if kernel version is affected
120 | if [ -n "$TARGET_TESTING_KERNEL_VER" ] && echo "${{ env.AFFECTED_KERNEL_VERSIONS }}" | grep -q "$TARGET_TESTING_KERNEL_VER"; then
121 | TUPLE='{"target":"'"$TARGET"'","subtarget":"'"$SUBTARGET"'","testing":"'"$TARGET_TESTING_KERNEL_VER"'"}'
122 | [[ $FIRST -ne 1 ]] && JSON_TARGETS="$JSON_TARGETS"','
123 | JSON_TARGETS="$JSON_TARGETS""$TUPLE"
124 | FIRST=0
125 | fi
126 | fi
127 | done
128 | JSON_TARGETS="$JSON_TARGETS"']'
129 |
130 | echo -e "\n---- targets to build ----\n"
131 | echo "$JSON_TARGETS_SUBTARGETS"
132 | echo -e "\n---- targets to build ----\n"
133 |
134 | echo -e "\n---- targets to check patch ----\n"
135 | echo "$JSON_TARGETS"
136 | echo -e "\n---- targets to check patch ----\n"
137 |
138 | echo "targets_subtargets=$JSON_TARGETS_SUBTARGETS" >> $GITHUB_OUTPUT
139 | echo "targets=$JSON_TARGETS" >> $GITHUB_OUTPUT
140 |
141 | determine_changed_packages:
142 | name: Determine Changed Packages
143 | needs: determine_changed_files
144 | uses: ./.github/workflows/reusable_determine_changed_packages.yml
145 | with:
146 | all_changed_files: ${{ needs.determine_changed_files.outputs.all_changed_files }}
147 |
148 | build:
149 | name: Build Kernel with external toolchain
150 | needs: [ determine_targets, determine_changed_packages ]
151 | permissions:
152 | contents: read
153 | packages: read
154 | actions: write
155 | strategy:
156 | fail-fast: False
157 | matrix:
158 | include: ${{fromJson(needs.determine_targets.outputs.targets_subtargets)}}
159 | uses: ./.github/workflows/reusable_build.yml
160 | with:
161 | container_name: toolchain
162 | target: ${{ matrix.target }}
163 | subtarget: ${{ matrix.subtarget }}
164 | testing: ${{ matrix.testing != '' }}
165 | build_kernel: true
166 | build_all_kmods: true
167 | build_dtb: true
168 | upload_ccache_cache: ${{ github.repository_owner == 'openwrt' }}
169 | check_packages_list: ${{ needs.determine_changed_packages.outputs.changed_packages }}
170 |
171 | check-kernel-patches:
172 | name: Check Kernel patches
173 | needs: determine_targets
174 | permissions:
175 | contents: read
176 | packages: read
177 | strategy:
178 | fail-fast: False
179 | matrix:
180 | include: ${{fromJson(needs.determine_targets.outputs.targets)}}
181 | uses: ./.github/workflows/reusable_check-kernel-patches.yml
182 | with:
183 | target: ${{ matrix.target }}
184 | subtarget: ${{ matrix.subtarget }}
185 | testing: ${{ matrix.testing != '' }}
186 |
187 | upload-ccache-cache-in-s3:
188 | if: github.event_name == 'push' && github.repository_owner == 'openwrt'
189 | name: Upload ccache cache to s3
190 | needs: [determine_targets, build]
191 | strategy:
192 | fail-fast: False
193 | matrix:
194 | include: ${{fromJson(needs.determine_targets.outputs.targets_subtargets)}}
195 | secrets:
196 | s3_endpoint: ${{ secrets.ccache_s3_endpoint }}
197 | s3_bucket: ${{ secrets.ccache_s3_bucket }}
198 | s3_access_key: ${{ secrets.ccache_s3_access_key }}
199 | s3_secret_key: ${{ secrets.ccache_s3_secret_key }}
200 | uses: ./.github/workflows/reusable_upload-file-s3.yml
201 | with:
202 | download_id: ${{ matrix.target }}-${{ matrix.subtarget }}${{ matrix.testing != '' && '-testing' || '' }}-ccache-cache
203 | filename: ccache-kernel-${{ matrix.target }}-${{ matrix.subtarget }}${{ matrix.testing != '' && '-testing' || '' }}${{ needs.build.outputs.ccache_tag }}.tar
204 |
--------------------------------------------------------------------------------
/.github/scripts/check_formalities.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Based on https://openwrt.org/submitting-patches#submission_guidelines
4 | # Hard limit is arbitrary
5 | MAX_SUBJECT_LEN_HARD=60
6 | MAX_SUBJECT_LEN_SOFT=50
7 | MAX_BODY_LINE_LEN=75
8 |
9 | DEPENDABOT_EMAIL="dependabot[bot]@users.noreply.github.com"
10 | GITHUB_NOREPLY_EMAIL='@users.noreply.github.com'
11 | WEBLATE_EMAIL=''
12 |
13 | EMOJI_WARN=':large_orange_diamond:'
14 | EMOJI_FAIL=':x:'
15 |
16 | RET=0
17 |
18 | REPO_PATH=${1:+-C "$1"}
19 | # shellcheck disable=SC2206
20 | REPO_PATH=($REPO_PATH)
21 |
22 | if [ -f 'workflow_context/.github/scripts/ci_helpers.sh' ]; then
23 | source workflow_context/.github/scripts/ci_helpers.sh
24 | else
25 | source .github/scripts/ci_helpers.sh
26 | fi
27 |
28 | # Use these global vars to improve header creation readability
29 | COMMIT=""
30 | HEADER_SET=0
31 |
32 | output() {
33 | [ -f "$GITHUB_OUTPUT" ] || return
34 |
35 | echo "$1" >> "$GITHUB_OUTPUT"
36 | }
37 |
38 | output_header() {
39 | [ "$HEADER_SET" = 0 ] || return
40 |
41 | [ -f "$GITHUB_OUTPUT" ] || return
42 |
43 | cat >> "$GITHUB_OUTPUT" <<-HEADER
44 |
45 | ### Commit $COMMIT
46 |
47 | HEADER
48 |
49 | HEADER_SET=1
50 | }
51 |
52 | output_warn() {
53 | output_header
54 | output "- $EMOJI_WARN $1"
55 | status_warn "$1"
56 | }
57 |
58 | output_fail_raw() {
59 | output_header
60 | output "$1"
61 | status_fail "$1"
62 | }
63 |
64 | output_fail() {
65 | output_header
66 | output "- $EMOJI_FAIL $1"
67 | status_fail "$1"
68 | }
69 |
70 | is_stable_branch() {
71 | [ "$1" != "main" ] && [ "$1" != "master" ]
72 | }
73 |
74 | is_dependabot() {
75 | echo "$1" | grep -iqF "$DEPENDABOT_EMAIL"
76 | }
77 |
78 | is_weblate() {
79 | echo "$1" | grep -iqF "$WEBLATE_EMAIL"
80 | }
81 |
82 | exclude_dependabot() {
83 | [ "$EXCLUDE_DEPENDABOT" = 'true' ]
84 | }
85 |
86 | exclude_weblate() {
87 | [ "$EXCLUDE_WEBLATE" = 'true' ]
88 | }
89 |
90 | check_name() {
91 | local type="$1"
92 | local name="$2"
93 | local email="$3"
94 |
95 | if exclude_dependabot && is_dependabot "$email"; then
96 | status_warn "$type email exception: authored by dependabot"
97 | elif exclude_weblate && is_weblate "$email"; then
98 | status_warn "$type email exception: authored by Weblate"
99 | # Pattern \S\+\s\+\S\+ matches >= 2 names i.e. 3 and more e.g. "John Von
100 | # Doe" also match
101 | elif echo "$name" | grep -q '\S\+\s\+\S\+'; then
102 | status_pass "$type name ($name) seems OK"
103 | # Pattern \S\+ matches single names, typical of nicknames or handles
104 | elif echo "$name" | grep -q '\S\+'; then
105 | output_warn "$type name ($name) seems to be a nickname or an alias"
106 | else
107 | output_fail "$type name ($name) must be one of:"
108 | output_fail_raw " - real name 'firstname lastname'"
109 | output_fail_raw ' - nickname/alias/handle'
110 | RET=1
111 | fi
112 | }
113 |
114 | check_email() {
115 | local type="$1"
116 | local email="$2"
117 |
118 | if exclude_dependabot && is_dependabot "$email"; then
119 | status_warn "$type email exception: authored by dependabot"
120 | elif exclude_weblate && is_weblate "$email"; then
121 | status_warn "$type email exception: authored by Weblate"
122 | elif echo "$email" | grep -qF "$GITHUB_NOREPLY_EMAIL"; then
123 | output_fail "$type email cannot be a GitHub noreply email"
124 | RET=1
125 | else
126 | status_pass "$type email is not a GitHub noreply email"
127 | fi
128 | }
129 |
130 | check_subject() {
131 | local subject="$1"
132 | local author_email="$2"
133 |
134 | # Check subject format
135 | if exclude_dependabot && is_dependabot "$author_email"; then
136 | status_warn 'Commit subject line exception: authored by dependabot'
137 | elif exclude_weblate && is_weblate "$author_email"; then
138 | status_warn 'Commit subject line exception: authored by Weblate'
139 | elif echo "$subject" | grep -qE -e '^([0-9A-Za-z,+/._-]+: )+[a-z]' -e '^Revert '; then
140 | status_pass 'Commit subject line format seems OK'
141 | elif echo "$subject" | grep -qE -e '^([0-9A-Za-z,+/._-]+: )+[A-Z]'; then
142 | output_fail 'First word after prefix in subject should not be capitalized'
143 | RET=1
144 | elif echo "$subject" | grep -qE -e '^([0-9A-Za-z,+/._-]+: )+'; then
145 | # Handles cases when there's a prefix but the check for capitalization
146 | # fails (e.g. no word after prefix)
147 | output_fail 'Commit subject line MUST start with `: ` and be followed by a lower-case word'
148 | RET=1
149 | else
150 | output_fail 'Commit subject line MUST start with `: `'
151 | RET=1
152 | fi
153 |
154 | if echo "$subject" | grep -q '\.$'; then
155 | output_fail 'Commit subject line should not end with a period'
156 | RET=1
157 | fi
158 |
159 | # Don't append to the workflow output, since these are more of internal
160 | # warnings.
161 | if exclude_dependabot && is_dependabot "$author_email"; then
162 | status_warn 'Commit subject line length exception: authored by dependabot'
163 | return
164 | elif exclude_weblate && is_weblate "$author_email"; then
165 | status_warn 'Commit subject line length exception: authored by Weblate'
166 | return
167 | fi
168 |
169 | # Check subject length first for hard limit which results in an error and
170 | # otherwise for a soft limit which results in a warning. Show soft limit in
171 | # either case.
172 | local msg="Commit subject length: recommended max $MAX_SUBJECT_LEN_SOFT, required max $MAX_SUBJECT_LEN_HARD characters"
173 | if [ ${#subject} -gt "$MAX_SUBJECT_LEN_HARD" ]; then
174 | output_fail "$msg"
175 | split_fail "$MAX_SUBJECT_LEN_SOFT" "$subject"
176 | RET=1
177 | elif [ ${#subject} -gt "$MAX_SUBJECT_LEN_SOFT" ]; then
178 | output_warn "$msg"
179 | split_fail "$MAX_SUBJECT_LEN_SOFT" "$subject"
180 | else
181 | status_pass "$msg"
182 | fi
183 | }
184 |
185 | check_body() {
186 | local body="$1"
187 | local sob="$2"
188 | local author_email="$3"
189 |
190 | # Check body line lengths
191 | if ! { exclude_weblate && is_weblate "$author_email"; } && ! { exclude_dependabot && is_dependabot "$author_email"; }; then
192 | body_line_too_long=0
193 | line_num=0
194 | while IFS= read -r line; do
195 | line_num=$((line_num + 1))
196 | if [ ${#line} -gt "$MAX_BODY_LINE_LEN" ]; then
197 | output_warn "Commit body line $line_num is longer than $MAX_BODY_LINE_LEN characters (is ${#line}):"
198 | output " $line"
199 | split_fail "$MAX_BODY_LINE_LEN" "$line"
200 | body_line_too_long=1
201 | fi
202 | done <<< "$body"
203 | if [ "$body_line_too_long" = 0 ]; then
204 | status_pass "Commit body lines are $MAX_BODY_LINE_LEN characters or less"
205 | fi
206 | else
207 | if exclude_dependabot && is_dependabot "$author_email"; then
208 | status_warn 'Commit body line length exception: authored by dependabot'
209 | elif exclude_weblate && is_weblate "$author_email"; then
210 | status_warn 'Commit body line length exception: authored by Weblate'
211 | fi
212 | fi
213 |
214 | if echo "$body" | grep -qF "$sob"; then
215 | status_pass '`Signed-off-by` matches author'
216 |
217 | # Don't append to the workflow output, since these are more of internal
218 | # warnings.
219 | elif exclude_dependabot && is_dependabot "$author_email"; then
220 | status_warn '`Signed-off-by` exception: authored by dependabot'
221 | elif exclude_weblate && is_weblate "$author_email"; then
222 | status_warn '`Signed-off-by` exception: authored by Weblate'
223 |
224 | else
225 | output_fail "\`Signed-off-by\` is missing or doesn't match author (should be \`$sob\`)"
226 | RET=1
227 | fi
228 |
229 | if ! ( exclude_dependabot && is_dependabot "$author_email" ) && ! ( exclude_weblate && is_weblate "$author_email" ); then
230 | if echo "$body" | grep -qF "$GITHUB_NOREPLY_EMAIL"; then
231 | output_fail '`Signed-off-by` email cannot be a GitHub noreply email'
232 | RET=1
233 | else
234 | status_pass '`Signed-off-by` email is not a GitHub noreply email'
235 | fi
236 | fi
237 |
238 | if echo "$body" | grep -v "Signed-off-by:" | grep -qv '^[[:space:]]*$'; then
239 | status_pass 'A commit message exists'
240 | else
241 | output_fail 'Commit message is missing. Please describe your changes.'
242 | RET=1
243 | fi
244 |
245 | if is_stable_branch "$BRANCH"; then
246 | if echo "$body" | grep -qF "(cherry picked from commit"; then
247 | status_pass "Commit is marked as cherry-picked"
248 | else
249 | output_warn "Commit tog stable branch \`$BRANCH\` should be cherry-picked"
250 | fi
251 | fi
252 | }
253 |
254 | main() {
255 | local author_email
256 | local author_name
257 | local body
258 | local commit
259 | local committer_name
260 | local subject
261 |
262 | # Initialize GitHub actions output
263 | output 'content<' "$commit")"
297 | committer_email="$(git "${REPO_PATH[@]}" show -s --format='<%cE>' "$commit")"
298 | check_name 'Author' "$author_name" "$author_email"
299 | check_email 'Author' "$author_email"
300 | check_name 'Committer' "$committer_name" "$committer_email"
301 | check_email 'Committer' "$committer_email"
302 |
303 | subject="$(git "${REPO_PATH[@]}" show -s --format=%s "$commit")"
304 | echo
305 | info 'Checking subject:'
306 | echo "$subject"
307 | check_subject "$subject" "$author_email"
308 |
309 | body="$(git "${REPO_PATH[@]}" show -s --format=%b "$commit")"
310 | sob="$(git "${REPO_PATH[@]}" show -s --format='Signed-off-by: %aN <%aE>' "$commit")"
311 | echo
312 | info 'Checking body:'
313 | echo "$body"
314 | echo
315 | check_body "$body" "$sob" "$author_email"
316 |
317 | info "=== Done checking commit '$commit'"
318 | echo
319 | done
320 |
321 | output 'EOF'
322 |
323 | exit $RET
324 | }
325 |
326 | main
327 |
--------------------------------------------------------------------------------
/.github/workflows/issue-labeller.yml:
--------------------------------------------------------------------------------
1 | name: Issue Labeller
2 |
3 | on:
4 | workflow_call:
5 |
6 | jobs:
7 | check-type:
8 | name: Parse Issue type
9 | runs-on: ubuntu-slim
10 |
11 | outputs:
12 | issue_type: ${{ steps.parse_labels.outputs.type }}
13 |
14 | steps:
15 | - name: Parse label from event
16 | id: parse_labels
17 | env:
18 | ISSUE_LABELS: ${{ toJSON(github.event.issue.labels) }}
19 | run: |
20 | labels="$(echo "$ISSUE_LABELS" | jq '.[] | .name' | tr -d '"')"
21 |
22 | # Exit if nothing to triage
23 | echo "$labels" | grep -q "to-triage" || exit 0
24 |
25 | for label in $labels; do
26 | if [ $label = "to-triage" ] || [ $label = "bug" ]; then
27 | continue
28 | fi
29 |
30 | # Stop at the first kind
31 | echo "type=$label" >> $GITHUB_OUTPUT
32 | break
33 | done
34 |
35 | triage-bug-report:
36 | name: Validate and Tag Bug Report
37 | needs: check-type
38 | if: needs.check-type.outputs.issue_type == 'bug-report'
39 | runs-on: ubuntu-slim
40 |
41 | permissions:
42 | issues: write
43 |
44 | steps:
45 | - name: Checkout main
46 | uses: actions/checkout@v6
47 | with:
48 | fetch-depth: 0
49 | show-progress: false
50 |
51 | - name: Parse issue form
52 | uses: Ansuel/github-issue-parser@v3
53 | id: issue-parser
54 | with:
55 | template-path: .github/ISSUE_TEMPLATE/bug-report.yml
56 |
57 | - name: Validate Release
58 | id: check_release
59 | env:
60 | RELEASE: ${{ steps.issue-parser.outputs.issueparser_release }}
61 | run: |
62 | # Make sure this is a real release following pattern r[0-9]-[0-9a-z]
63 | [ -z "$(echo "${{ env.RELEASE }}" | grep -Po '^SNAPSHOT$|^[0-9]+\.[0-9]+-SNAPSHOT$|^[0-9]+\.[0-9]+\.[0-9](-rc[0-9]+)*$')" ] && echo "invalid_release=true" >> "$GITHUB_OUTPUT" && exit 0
64 |
65 | release=${{ env.RELEASE }}
66 | # With release we need to add v for tag verification
67 | [ -n "$(echo ${{ env.RELEASE }} | grep -Po '^[0-9]+\.[0-9]+\.[0-9](-rc[0-9]+)*$')" ] && release=v${{ env.RELEASE }}
68 | [ -n "$(echo ${{ env.RELEASE }} | grep -Po '^[0-9]+\.[0-9]+-SNAPSHOT$')" ] && release=openwrt-$(echo ${{ env.RELEASE }} | grep -Po '^[0-9]+\.[0-9]+')
69 |
70 | # Check if this release exist or is something downstream
71 | [ $release != "SNAPSHOT" ] && [ -z $(echo $release | grep -Po '^openwrt-[0-9]+\.[0-9]+$') ] && ! git show-ref --tags $release --quiet && echo "invalid_release=true" >> "$GITHUB_OUTPUT" && exit 0
72 |
73 | tag_name=${{ env.RELEASE }}
74 | [ $tag_name != "SNAPSHOT" ] && tag_name=release/$(echo ${{ env.RELEASE }} | grep -Po '^[0-9]+\.[0-9]+')
75 |
76 | echo "release=$release" >> "$GITHUB_OUTPUT"
77 | echo "tag_name=$tag_name" >> "$GITHUB_OUTPUT"
78 | echo "Detected Release $release"
79 |
80 | - name: Checkout Bug Release
81 | uses: actions/checkout@v6
82 | with:
83 | fetch-depth: 0
84 | show-progress: false
85 | ref: ${{ steps.check_release.outputs.release != 'SNAPSHOT' && steps.check_release.outputs.release || '' }}
86 |
87 | - name: Validate Version
88 | id: check_version
89 | env:
90 | VERSION: ${{ steps.issue-parser.outputs.issueparser_version }}
91 | run: |
92 | # Make sure this is a real version following pattern r[0-9]-[0-9a-z]
93 | [ -z "$(echo "${{ env.VERSION }}" | grep -Po '^r[0-9]+-[0-9a-z]+$')" ] && echo "invalid_version=true" >> "$GITHUB_OUTPUT" && exit 0
94 |
95 | hash=$(./scripts/getver.sh ${{ env.VERSION }})
96 |
97 | # Check if this version exist or is something downstream
98 | [ $hash == "unknown" ] && echo "invalid_version=true" >> "$GITHUB_OUTPUT" && exit 0
99 |
100 | echo "Detected Hash $hash"
101 |
102 | - name: Validate Target/Subtarget
103 | id: check_target
104 | env:
105 | TARGET_SUBTARGET: ${{ steps.issue-parser.outputs.issueparser_target }}
106 | run: |
107 | [ -z "$(echo "${{ env.TARGET_SUBTARGET }}" | grep -Po '^[a-zA-Z0-9]+/[a-zA-Z0-9]+$')" ] && echo "invalid_target=true" >> "$GITHUB_OUTPUT" && exit 0
108 |
109 | TARGET=$(echo ${{ env.TARGET_SUBTARGET }} | cut -d "/" -f 1)
110 | SUBTARGET=$(echo ${{ env.TARGET_SUBTARGET }} | cut -d "/" -f 2)
111 |
112 | # Check if Target exist
113 | [ ! -d target/linux/$TARGET ] && echo "invalid_target=true" >> "$GITHUB_OUTPUT" && exit 0
114 |
115 | SUBTARGETS="$(TOPDIR=$(pwd) make -C target/linux/$TARGET --no-print-directory DUMP=1 TARGET_BUILD=1 val.SUBTARGETS V=s 2>/dev/null)"
116 |
117 | [ -z "$(echo "$SUBTARGETS" | grep "$SUBTARGET")" ] && echo "invalid_target=true" >> "$GITHUB_OUTPUT" && exit 0
118 |
119 | echo "tag_name=target/$TARGET" >> "$GITHUB_OUTPUT"
120 |
121 | echo "Detected target $TARGET is valid"
122 | echo "Detected subtarget $SUBTARGET is valid"
123 |
124 | - name: Validate Device
125 | id: check_device
126 | if: steps.check_target.outputs.invalid_target != 'true'
127 | env:
128 | TARGET_SUBTARGET: ${{ steps.issue-parser.outputs.issueparser_target }}
129 | DEVICE: ${{ steps.issue-parser.outputs.issueparser_device }}
130 | BRANCH: ${{ steps.check_release.outputs.release != 'SNAPSHOT' && steps.check_release.outputs.release || 'main' }}
131 | DUMP_TARGET_INFO_SCRIPT: scripts/dump-target-info.pl
132 | run: |
133 | BRANCH=${{ env.BRANCH }}
134 | [ $BRANCH != "main" ] && [ -z $( echo $BRANCH | grep -Po '^openwrt-[0-9]+\.[0-9]+$' ) ] && BRANCH=openwrt-$(echo ${{ env.BRANCH }} | sed 's/^v\([0-9]\+\.[0-9]\+\)\.[0-9]\(-rc[0-9]\+\)*$/\1/')
135 |
136 | # Checkout upstream version of the DUMP_TARGET_INFO_SCRIPT
137 | git checkout -q origin/$BRANCH ${{ env.DUMP_TARGET_INFO_SCRIPT }}
138 |
139 | DEVICES=$(./${{ env.DUMP_TARGET_INFO_SCRIPT }} devices ${{ env.TARGET_SUBTARGET }} 2>/dev/null)
140 |
141 | [ -z "$(echo $DEVICES | grep -P '(?> "$GITHUB_OUTPUT" && exit 0
142 |
143 | echo "Detected model ${{ env.DEVICE }} is valid"
144 |
145 | - name: Post Invalid Version
146 | if: steps.check_version.outputs.invalid_version == 'true'
147 | uses: octokit/request-action@v2.x
148 | with:
149 | route: POST /repos/{repository}/issues/{issue_number}/comments
150 | body: ${{ toJSON(env.REQUEST_BODY) }}
151 | env:
152 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
153 | INPUT_REPOSITORY: ${{ github.repository }}
154 | INPUT_ISSUE_NUMBER: ${{ github.event.issue.number }}
155 | REQUEST_BODY: |
156 | Invalid Version reported. `${{ steps.issue-parser.outputs.issueparser_version }}`
157 | Is this from a clean repository?
158 |
159 | - name: Post Invalid Release
160 | if: steps.check_release.outputs.invalid_release == 'true'
161 | uses: octokit/request-action@v2.x
162 | with:
163 | route: POST /repos/{repository}/issues/{issue_number}/comments
164 | body: ${{ toJSON(env.REQUEST_BODY) }}
165 | env:
166 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
167 | INPUT_REPOSITORY: ${{ github.repository }}
168 | INPUT_ISSUE_NUMBER: ${{ github.event.issue.number }}
169 | REQUEST_BODY: |
170 | Invalid Release reported. `${{ steps.issue-parser.outputs.issueparser_release }}`
171 | Is this from a clean repository?
172 |
173 | - name: Post Invalid Target/Subtarget
174 | if: steps.check_target.outputs.invalid_target == 'true'
175 | uses: octokit/request-action@v2.x
176 | with:
177 | route: POST /repos/{repository}/issues/{issue_number}/comments
178 | body: ${{ toJSON(env.REQUEST_BODY) }}
179 | env:
180 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
181 | INPUT_REPOSITORY: ${{ github.repository }}
182 | INPUT_ISSUE_NUMBER: ${{ github.event.issue.number }}
183 | REQUEST_BODY: |
184 | Invalid Target/Subtarget reported. `${{ steps.issue-parser.outputs.issueparser_target }}`
185 | Is this from a supported device?
186 |
187 | # Disable for now. It seems there is an inconsistency with model name set in DT
188 | # and model name set in image.mk
189 | # - name: Post Invalid Model
190 | # if: steps.check_device.outputs.invalid_device == 'true'
191 | # uses: octokit/request-action@v2.x
192 | # with:
193 | # route: POST /repos/{repository}/issues/{issue_number}/comments
194 | # body: ${{ toJSON(env.REQUEST_BODY) }}
195 | # env:
196 | # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
197 | # INPUT_REPOSITORY: ${{ github.repository }}
198 | # INPUT_ISSUE_NUMBER: ${{ github.event.issue.number }}
199 | # REQUEST_BODY: |
200 | # Invalid Device reported. `${{ steps.issue-parser.outputs.issueparser_device }}`
201 | # Is this a supported model?
202 |
203 | - name: Add Release tag
204 | if: steps.check_version.outputs.invalid_version != 'true' && steps.check_release.outputs.invalid_release != 'true' && steps.check_target.outputs.invalid_target != 'true'
205 | uses: octokit/request-action@v2.x
206 | with:
207 | route: POST /repos/{repository}/issues/{issue_number}/labels
208 | labels: ${{ env.REQUEST_BODY }}
209 | env:
210 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
211 | INPUT_REPOSITORY: ${{ github.repository }}
212 | INPUT_ISSUE_NUMBER: ${{ github.event.issue.number }}
213 | REQUEST_BODY: |
214 | ["${{ steps.check_release.outputs.tag_name }}"]
215 |
216 | - name: Add Target/Subtarget tag
217 | if: steps.check_version.outputs.invalid_version != 'true' && steps.check_release.outputs.invalid_release != 'true' && steps.check_target.outputs.invalid_target != 'true'
218 | uses: octokit/request-action@v2.x
219 | with:
220 | route: POST /repos/{repository}/issues/{issue_number}/labels
221 | labels: ${{ env.REQUEST_BODY }}
222 | env:
223 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
224 | INPUT_REPOSITORY: ${{ github.repository }}
225 | INPUT_ISSUE_NUMBER: ${{ github.event.issue.number }}
226 | REQUEST_BODY: |
227 | ["${{ steps.check_target.outputs.tag_name }}"]
228 |
229 | - name: Add tag Image Kind
230 | if: steps.check_version.outputs.invalid_version != 'true' && steps.check_release.outputs.invalid_release != 'true' && steps.check_target.outputs.invalid_target != 'true'
231 | uses: octokit/request-action@v2.x
232 | with:
233 | route: POST /repos/{repository}/issues/{issue_number}/labels
234 | labels: ${{ env.REQUEST_BODY }}
235 | env:
236 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
237 | INPUT_REPOSITORY: ${{ github.repository }}
238 | INPUT_ISSUE_NUMBER: ${{ github.event.issue.number }}
239 | REQUEST_BODY: |
240 | ["${{ steps.issue-parser.outputs.issueparser_image_kind == 'Official downloaded image' && 'Official Image' || 'Self Built Image' }}"]
241 |
242 | - name: Add tag Supported Device
243 | if: steps.check_version.outputs.invalid_version != 'true' && steps.check_release.outputs.invalid_release != 'true' && steps.check_target.outputs.invalid_target != 'true' && steps.check_device.outputs.invalid_device != 'true'
244 | uses: octokit/request-action@v2.x
245 | with:
246 | route: POST /repos/{repository}/issues/{issue_number}/labels
247 | labels: ${{ env.REQUEST_BODY }}
248 | env:
249 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
250 | INPUT_REPOSITORY: ${{ github.repository }}
251 | INPUT_ISSUE_NUMBER: ${{ github.event.issue.number }}
252 | REQUEST_BODY: |
253 | ["Supported Device"]
254 |
255 | - name: Add Invalid Tag
256 | if: steps.check_version.outputs.invalid_version == 'true' || steps.check_release.outputs.invalid_release == 'true' || steps.check_target.outputs.invalid_target == 'true'
257 | uses: octokit/request-action@v2.x
258 | with:
259 | route: POST /repos/{repository}/issues/{issue_number}/labels
260 | labels: ${{ env.REQUEST_BODY }}
261 | env:
262 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
263 | INPUT_REPOSITORY: ${{ github.repository }}
264 | INPUT_ISSUE_NUMBER: ${{ github.event.issue.number }}
265 | REQUEST_BODY: |
266 | ["invalid"]
267 |
268 | remove-labels:
269 | name: Remove Issue Labels
270 | needs: [ check-type, triage-bug-report ]
271 | runs-on: ubuntu-slim
272 |
273 | permissions:
274 | issues: write
275 |
276 | steps:
277 | - name: Remove tag to-triage
278 | uses: octokit/request-action@v2.x
279 | with:
280 | route: DELETE /repos/{repository}/issues/{issue_number}/labels/{issue_label}
281 | env:
282 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
283 | INPUT_REPOSITORY: ${{ github.repository }}
284 | INPUT_ISSUE_NUMBER: ${{ github.event.issue.number }}
285 | INPUT_ISSUE_LABEL: to-triage
286 |
287 | - name: Remove tag issue type
288 | uses: octokit/request-action@v2.x
289 | with:
290 | route: DELETE /repos/{repository}/issues/{issue_number}/labels/{issue_label}
291 | env:
292 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
293 | INPUT_REPOSITORY: ${{ github.repository }}
294 | INPUT_ISSUE_NUMBER: ${{ github.event.issue.number }}
295 | INPUT_ISSUE_LABEL: ${{ needs.check-type.outputs.issue_type }}
296 |
--------------------------------------------------------------------------------
/.github/workflows/reusable_build.yml:
--------------------------------------------------------------------------------
1 | name: Build sub target
2 |
3 | on:
4 | workflow_call:
5 | secrets:
6 | coverity_api_token:
7 | outputs:
8 | ccache_tag:
9 | value: ${{ jobs.setup_build.outputs.ccache_tag }}
10 | inputs:
11 | container_name:
12 | type: string
13 | default: tools
14 | target:
15 | required: true
16 | type: string
17 | subtarget:
18 | required: true
19 | type: string
20 | testing:
21 | type: boolean
22 | build_toolchain:
23 | type: boolean
24 | include_feeds:
25 | type: boolean
26 | build_full:
27 | type: boolean
28 | build_kernel:
29 | type: boolean
30 | build_all_modules:
31 | type: boolean
32 | build_all_kmods:
33 | type: boolean
34 | build_dtb:
35 | type: boolean
36 | build_all_boards:
37 | type: boolean
38 | use_openwrt_container:
39 | type: boolean
40 | default: true
41 | coverity_project_name:
42 | type: string
43 | default: OpenWrt
44 | coverity_check_packages:
45 | type: string
46 | coverity_compiler_template_list:
47 | type: string
48 | default: >-
49 | arm-openwrt-linux-gcc
50 | coverity_force_compile_packages:
51 | type: string
52 | default: >-
53 | curl
54 | libnl
55 | mbedtls
56 | wolfssl
57 | openssl
58 | build_external_toolchain:
59 | type: boolean
60 | upload_external_toolchain:
61 | type: boolean
62 | use_ccache_cache:
63 | type: boolean
64 | default: true
65 | ccache_type:
66 | type: string
67 | default: kernel
68 | upload_ccache_cache:
69 | type: boolean
70 | check:
71 | type: boolean
72 | default: true
73 | check_packages_list:
74 | type: string
75 |
76 | permissions:
77 | contents: read
78 |
79 | jobs:
80 | setup_build:
81 | name: Set up build ${{ inputs.target }}/${{ inputs.subtarget }}
82 | runs-on: ubuntu-slim
83 | outputs:
84 | container: ${{ steps.determine_container.outputs.container }}
85 | ccache_tag: ${{ steps.determine_ccache_tag.outputs.ccache_tag }}
86 | ccache_name: ${{ steps.determine_ccache_name.outputs.ccache_name }}
87 |
88 | steps:
89 | - name: Checkout
90 | uses: actions/checkout@v6
91 |
92 | - name: Set lower case owner name
93 | id: lower_owner
94 | run: |
95 | OWNER_LC=$(echo "${{ github.repository_owner }}" \
96 | | tr '[:upper:]' '[:lower:]')
97 |
98 | if [ ${{ inputs.use_openwrt_container }} == "true" ]; then
99 | OWNER_LC=openwrt
100 | fi
101 |
102 | echo "owner_lc=$OWNER_LC" >> $GITHUB_OUTPUT
103 |
104 | - name: Determine base branch tag
105 | id: determine_base_branch
106 | run: |
107 | BASE_BRANCH=main
108 | if [ -n "${{ github.base_ref }}" ]; then
109 | if echo "${{ github.base_ref }}" | grep -q -E '^openwrt-[0-9][0-9]\.[0-9][0-9]$'; then
110 | BASE_BRANCH="${{ github.base_ref }}"
111 | fi
112 | elif [ ${{ github.ref_type }} == "branch" ]; then
113 | if echo "${{ github.ref_name }}" | grep -q -E '^openwrt-[0-9][0-9]\.[0-9][0-9]$'; then
114 | BASE_BRANCH=${{ github.ref_name }}
115 | elif echo "${{ github.ref_name }}" | grep -q -E '^openwrt-[0-9][0-9]\.[0-9][0-9]-'; then
116 | BASE_BRANCH="$(echo ${{ github.ref_name }} | sed 's/^\(openwrt-[0-9][0-9]\.[0-9][0-9]\)-.*/\1/')"
117 | fi
118 | elif [ ${{ github.ref_type }} == "tag" ]; then
119 | if echo "${{ github.ref_name }}" | grep -q -E '^v[0-9][0-9]\.[0-9][0-9]\..+'; then
120 | BASE_BRANCH=openwrt-"$(echo ${{ github.ref_name }} | sed 's/^v\([0-9][0-9]\.[0-9][0-9]\)\..\+/\1/')"
121 | fi
122 | fi
123 |
124 | echo "Detected base branch as $BASE_BRANCH"
125 | echo "base_branch=$BASE_BRANCH" >> $GITHUB_OUTPUT
126 |
127 | # Per branch tools container tag
128 | # By default stick to latest
129 | # For official test targetting openwrt stable branch
130 | # Get the branch or parse the tag and push dedicated tools containers
131 | # For local test to use the correct container for stable release testing
132 | # you need to use for the branch name a prefix of openwrt-[0-9][0-9].[0-9][0-9]-
133 | - name: Determine container name
134 | id: determine_container_name
135 | env:
136 | CONTAINER_TAG: ${{ steps.determine_base_branch.outputs.base_branch != 'main' && steps.determine_base_branch.outputs.base_branch || 'latest' }}
137 | run: |
138 | CONTAINER_NAME=${{ inputs.container_name }}
139 | CONTAINER_TAG=${{ env.CONTAINER_TAG }}
140 |
141 | if [ "$CONTAINER_NAME" = "toolchain" ]; then
142 | GHCR_TOKEN=$(echo ${{ secrets.GITHUB_TOKEN }} | base64)
143 | GHCR_HEADER="Authorization: Bearer ${GHCR_TOKEN}"
144 | GHCR_MANIFEST_LINK=https://ghcr.io/v2/${{ steps.lower_owner.outputs.owner_lc }}/${{ inputs.container_name }}/manifests/${{ inputs.target }}-${{ inputs.subtarget }}-"$CONTAINER_TAG"
145 | # Check if container exist
146 | if [ $(curl -s -o /dev/null -w "%{http_code}" -H "$GHCR_HEADER" -I "$GHCR_MANIFEST_LINK") = 200 ]; then
147 | CONTAINER_TAG=${{ inputs.target }}-${{ inputs.subtarget }}-"$CONTAINER_TAG"
148 | else
149 | CONTAINER_NAME=tools
150 | fi
151 | fi
152 |
153 | echo "Tools container to use $CONTAINER_NAME:$CONTAINER_TAG"
154 | echo "container_name=$CONTAINER_NAME:$CONTAINER_TAG" >> $GITHUB_OUTPUT
155 |
156 | - name: Determine container
157 | id: determine_container
158 | run: echo "container=${{ steps.lower_owner.outputs.owner_lc }}/${{ steps.determine_container_name.outputs.container_name }}" >> $GITHUB_OUTPUT
159 |
160 | - name: Determine ccache tag
161 | id: determine_ccache_tag
162 | if: steps.determine_base_branch.outputs.base_branch != 'main'
163 | run: echo "ccache_tag=${{ format('-{0}', steps.determine_base_branch.outputs.base_branch) }}" >> $GITHUB_OUTPUT
164 |
165 | - name: Determine ccache name
166 | id: determine_ccache_name
167 | run: echo "ccache_name=ccache-${{ inputs.ccache_type }}-${{ inputs.target }}-${{ inputs.subtarget }}${{ inputs.testing == true && '-testing' || '' }}${{ steps.determine_ccache_tag.outputs.ccache_tag }}" >> $GITHUB_OUTPUT
168 |
169 | check:
170 | name: Check packages for ${{ inputs.target }}/${{ inputs.subtarget }}
171 | needs: setup_build
172 | if: inputs.check == true && ( github.event_name == 'push' || inputs.check_packages_list != '' )
173 | runs-on: ubuntu-latest
174 |
175 | container: ghcr.io/${{ needs.setup_build.outputs.container }}
176 |
177 | permissions:
178 | contents: read
179 | packages: read
180 |
181 | steps:
182 | - name: Checkout master directory
183 | uses: actions/checkout@v6
184 | with:
185 | path: openwrt
186 |
187 | - name: Fix permission
188 | run: |
189 | chown -R buildbot:buildbot openwrt
190 |
191 | - name: Prepare prebuilt tools
192 | shell: su buildbot -c "sh -e {0}"
193 | working-directory: openwrt
194 | run: |
195 | mkdir -p staging_dir build_dir
196 | ln -s /prebuilt_tools/staging_dir/host staging_dir/host
197 | ln -s /prebuilt_tools/build_dir/host build_dir/host
198 |
199 | ./scripts/ext-tools.sh --refresh
200 |
201 | - name: Configure all modules
202 | shell: su buildbot -c "sh -e {0}"
203 | working-directory: openwrt
204 | run: |
205 | echo CONFIG_ALL=y >> .config
206 |
207 | echo CONFIG_TARGET_MULTI_PROFILE=y >> .config
208 | echo CONFIG_TARGET_PER_DEVICE_ROOTFS=y >> .config
209 | echo CONFIG_TARGET_ALL_PROFILES=y >> .config
210 |
211 | echo CONFIG_DEVEL=y >> .config
212 | echo CONFIG_AUTOREMOVE=y >> .config
213 |
214 | echo "CONFIG_TARGET_${{ inputs.target }}=y" >> .config
215 | echo "CONFIG_TARGET_${{ inputs.target }}_${{ inputs.subtarget }}=y" >> .config
216 |
217 | make defconfig
218 |
219 | - name: Compile needed host tools
220 | shell: su buildbot -c "sh -e {0}"
221 | working-directory: openwrt
222 | run: make tools/tar/compile -j$(nproc) BUILD_LOG=1 || ret=$? .github/workflows/scripts/show_build_failures.sh
223 |
224 | - name: Download and check toolchain
225 | if: inputs.build_toolchain == true
226 | shell: su buildbot -c "sh -e {0}"
227 | working-directory: openwrt
228 | run: make toolchain/download toolchain/check FIXUP=1 -j$(nproc) BUILD_LOG=1 || ret=$? .github/workflows/scripts/show_build_failures.sh
229 |
230 | - name: Download and check packages
231 | if: inputs.build_all_modules == true || inputs.build_all_kmods == true || inputs.build_full == true
232 | shell: su buildbot -c "sh -e {0}"
233 | working-directory: openwrt
234 | run: |
235 | # With push events or check_packages_list set to all check all packages
236 | if [ "${{ github.event_name }}" = "push" ] || [ "${{ inputs.check_packages_list }}" = "all" ]; then
237 | make package/download package/check FIXUP=1 -j$(nproc) BUILD_LOG=1 || ret=$? .github/workflows/scripts/show_build_failures.sh
238 | # With every other event check only changed packages (if provided)
239 | elif [ -n "${{ inputs.check_packages_list }}" ]; then
240 | for package in ${{ inputs.check_packages_list }}; do
241 | make package/$package/download package/$package/check FIXUP=1 -j$(nproc) BUILD_LOG=1 || ret=$? .github/workflows/scripts/show_build_failures.sh
242 | done
243 | fi
244 |
245 | - name: Validate checked packages
246 | shell: su buildbot -c "sh -e {0}"
247 | working-directory: openwrt
248 | run: |
249 | . .github/workflows/scripts/ci_helpers.sh
250 |
251 | if git diff --name-only --exit-code; then
252 | success "All packages seems ok"
253 | else
254 | err "Some package Makefiles requires fix. (run 'make package/check FIXUP=1' and force push this pr)"
255 | err "You can also check the provided artifacts with the refreshed patch from this CI run."
256 | mkdir packages-fixed
257 | for f in $(git diff --name-only); do
258 | cp --parents $f packages-fixed/
259 | done
260 | exit 1
261 | fi
262 |
263 | - name: Upload fixed Packages
264 | if: failure()
265 | uses: actions/upload-artifact@v5
266 | with:
267 | name: ${{ inputs.target }}-${{ inputs.subtarget }}${{ inputs.testing == true && '-testing' || '' }}-packages-fixed
268 | path: openwrt/packages-fixed
269 |
270 | build:
271 | name: Build ${{ inputs.target }}/${{ inputs.subtarget }}
272 | needs: setup_build
273 | runs-on: ubuntu-latest
274 |
275 | container: ghcr.io/${{ needs.setup_build.outputs.container }}
276 |
277 | permissions:
278 | contents: read
279 | packages: read
280 | actions: write
281 |
282 | steps:
283 | - name: Checkout master directory
284 | uses: actions/checkout@v6
285 | with:
286 | path: openwrt
287 |
288 | - name: Checkout packages feed
289 | if: inputs.include_feeds == true
290 | uses: actions/checkout@v6
291 | with:
292 | repository: openwrt/packages
293 | path: openwrt/feeds/packages
294 |
295 | - name: Checkout luci feed
296 | if: inputs.include_feeds == true
297 | uses: actions/checkout@v6
298 | with:
299 | repository: openwrt/luci
300 | path: openwrt/feeds/luci
301 |
302 | - name: Checkout routing feed
303 | if: inputs.include_feeds == true
304 | uses: actions/checkout@v6
305 | with:
306 | repository: openwrt/routing
307 | path: openwrt/feeds/routing
308 |
309 | - name: Checkout telephony feed
310 | if: inputs.include_feeds == true
311 | uses: actions/checkout@v6
312 | with:
313 | repository: openwrt/telephony
314 | path: openwrt/feeds/telephony
315 |
316 | - name: Parse toolchain path
317 | if: inputs.build_toolchain == false
318 | working-directory: openwrt
319 | run: |
320 | TOOLCHAIN_PATH=snapshots
321 |
322 | if [ -n "${{ github.base_ref }}" ]; then
323 | if echo "${{ github.base_ref }}" | grep -q -E '^openwrt-[0-9][0-9]\.[0-9][0-9]$'; then
324 | major_ver="$(echo ${{ github.base_ref }} | sed 's/^openwrt-/v/')"
325 | fi
326 | elif [ "${{ github.ref_type }}" = "branch" ]; then
327 | if echo "${{ github.ref_name }}" | grep -q -E '^openwrt-[0-9][0-9]\.[0-9][0-9]$'; then
328 | major_ver="$(echo ${{ github.ref_name }} | sed 's/^openwrt-/v/')"
329 | elif echo "${{ github.ref_name }}" | grep -q -E '^openwrt-[0-9][0-9]\.[0-9][0-9]-'; then
330 | major_ver="$(echo ${{ github.ref_name }} | sed 's/^openwrt-\([0-9][0-9]\.[0-9][0-9]\)-.*/v\1/')"
331 | fi
332 | elif [ "${{ github.ref_type }}" = "tag" ]; then
333 | if echo "${{ github.ref_name }}" | grep -q -E '^v[0-9][0-9]\.[0-9][0-9]\..+'; then
334 | major_ver="$(echo ${{ github.ref_name }} | sed 's/^\(v[0-9][0-9]\.[0-9][0-9]\)\..\+/\1/')"
335 | fi
336 | fi
337 |
338 | if [ -n "$major_ver" ]; then
339 | git fetch --tags -f
340 | latest_tag="$(git tag --sort=-creatordate -l $major_ver* | head -n1)"
341 | if [ -n "$latest_tag" ]; then
342 | TOOLCHAIN_PATH=releases/$(echo $latest_tag | sed 's/^v//')
343 | fi
344 | fi
345 |
346 | SUMS_FILE="https://downloads.cdn.openwrt.org/$TOOLCHAIN_PATH/targets/${{ inputs.target }}/${{ inputs.subtarget }}/sha256sums"
347 |
348 | echo "TOOLCHAIN_PATH=$TOOLCHAIN_PATH" >> "$GITHUB_ENV"
349 | echo "SUMS_FILE=$SUMS_FILE" >> "$GITHUB_ENV"
350 |
351 | - name: Parse toolchain file
352 | if: inputs.build_toolchain == false
353 | id: parse-toolchain
354 | run: |
355 | if [ -d /external-toolchain/ ]; then
356 | echo "toolchain-type=external_container" >> $GITHUB_OUTPUT
357 | exit 0
358 | fi
359 |
360 | if curl ${{ env.SUMS_FILE }} | grep -qP ".*openwrt-toolchain.*tar.(xz|zst)"; then
361 | TOOLCHAIN_STRING="$( curl ${{ env.SUMS_FILE }} | grep -P ".*openwrt-toolchain.*tar.(xz|zst)")"
362 | TOOLCHAIN_FILE=$(echo "$TOOLCHAIN_STRING" | sed -n -E -e 's/.*(openwrt-toolchain.*.tar.(xz|zst))$/\1/p')
363 |
364 | echo "toolchain-type=external_toolchain" >> $GITHUB_OUTPUT
365 | elif curl ${{ env.SUMS_FILE }} | grep -qP ".*openwrt-sdk.*tar.(xz|zst)"; then
366 | TOOLCHAIN_STRING="$( curl ${{ env.SUMS_FILE }} | grep -P ".*openwrt-sdk.*tar.(xz|zst)")"
367 | TOOLCHAIN_FILE=$(echo "$TOOLCHAIN_STRING" | sed -n -E -e 's/.*(openwrt-sdk.*.tar.(xz|zst))$/\1/p')
368 |
369 | echo "toolchain-type=external_sdk" >> $GITHUB_OUTPUT
370 | else
371 | echo "toolchain-type=internal" >> $GITHUB_OUTPUT
372 | fi
373 |
374 | echo "TOOLCHAIN_FILE=$TOOLCHAIN_FILE" >> "$GITHUB_ENV"
375 | echo "TOOLCHAIN_NAME=$(echo TOOLCHAIN_FILE | sed -E -e 's/.tar.(xz|zst)$//')" >> "$GITHUB_ENV"
376 |
377 | - name: Prase prebuilt llvm file
378 | if: inputs.build_toolchain == false
379 | id: parse-prebuilt-llvm
380 | run: |
381 | if curl ${{ env.SUMS_FILE }} | grep -qP ".*llvm-bpf.*tar.(xz|zst)"; then
382 | LLVM_STRING="$( curl ${{ env.SUMS_FILE }} | grep -P ".*llvm-bpf.*tar.(xz|zst)")"
383 | LLVM_FILE=$(echo "$LLVM_STRING" | sed -n -E -e 's/.*(llvm-bpf.*.tar.(xz|zst))$/\1/p')
384 |
385 | echo "llvm-type=external" >> $GITHUB_OUTPUT
386 | fi
387 |
388 | echo "LLVM_FILE=$LLVM_FILE" >> "$GITHUB_ENV"
389 |
390 | - name: Download and extract ccache cache from s3
391 | id: restore-ccache-cache-s3
392 | if: inputs.use_ccache_cache == true
393 | working-directory: openwrt
394 | run: |
395 | S3_LINK=https://s3-ccache.openwrt-ci.ansuel.com
396 | CCACHE_TAR=${{ needs.setup_build.outputs.ccache_name }}.tar
397 |
398 | if curl -o /dev/null -s --head --fail $S3_LINK/$CCACHE_TAR; then
399 | wget -O - $S3_LINK/$CCACHE_TAR | tar -xf -
400 | echo "cache-hit=true" >> $GITHUB_OUTPUT
401 | fi
402 |
403 | - name: Fix permission
404 | run: |
405 | chown -R buildbot:buildbot openwrt
406 |
407 | - name: Prepare prebuilt tools
408 | shell: su buildbot -c "sh -e {0}"
409 | working-directory: openwrt
410 | run: |
411 | mkdir -p staging_dir build_dir
412 | ln -s /prebuilt_tools/staging_dir/host staging_dir/host
413 | ln -s /prebuilt_tools/build_dir/host build_dir/host
414 |
415 | ./scripts/ext-tools.sh --refresh
416 |
417 | - name: Update & Install feeds
418 | if: inputs.include_feeds == true
419 | shell: su buildbot -c "sh -e {0}"
420 | working-directory: openwrt
421 | run: |
422 | ./scripts/feeds update -a
423 | ./scripts/feeds install -a
424 |
425 | - name: Restore ccache cache
426 | id: restore-ccache-cache
427 | if: inputs.use_ccache_cache == true && steps.restore-ccache-cache-s3.outputs.cache-hit != 'true'
428 | uses: actions/cache/restore@v4
429 | with:
430 | path: openwrt/.ccache
431 | key: ${{ needs.setup_build.outputs.ccache_name }}-${{ hashFiles('openwrt/include/kernel-**') }}
432 | restore-keys: |
433 | ${{ needs.setup_build.outputs.ccache_name }}-
434 |
435 | - name: Checkout OpenWrt keyring
436 | if: inputs.build_toolchain == false && ((steps.parse-toolchain.outputs.toolchain-type != 'internal' && steps.parse-toolchain.outputs.toolchain-type != 'external_container') ||
437 | steps.parse-prebuilt-llvm.outputs.llvm-type == 'external')
438 | uses: actions/checkout@v6
439 | with:
440 | repository: openwrt/keyring
441 | path: keyring
442 | sparse-checkout: |
443 | gpg/CD54E82DADB3684D.asc
444 | gpg/0x1D53D1877742E911.asc
445 | gpg/626471F1.asc
446 | sparse-checkout-cone-mode: false
447 |
448 | - name: Import GPG keys
449 | shell: su buildbot -c "sh -e {0}"
450 | if: inputs.build_toolchain == false && ((steps.parse-toolchain.outputs.toolchain-type != 'internal' && steps.parse-toolchain.outputs.toolchain-type != 'external_container') ||
451 | steps.parse-prebuilt-llvm.outputs.llvm-type == 'external')
452 | run: gpg --import keyring/gpg/CD54E82DADB3684D.asc keyring/gpg/0x1D53D1877742E911.asc keyring/gpg/626471F1.asc
453 |
454 | - name: Download external toolchain/sdk
455 | if: inputs.build_toolchain == false && steps.parse-toolchain.outputs.toolchain-type != 'internal' && steps.parse-toolchain.outputs.toolchain-type != 'external_container'
456 | shell: su buildbot -c "sh -e {0}"
457 | working-directory: openwrt
458 | run: |
459 | wget https://downloads.cdn.openwrt.org/${{ env.TOOLCHAIN_PATH }}/targets/${{ inputs.target }}/${{ inputs.subtarget }}/${{ env.TOOLCHAIN_FILE }}
460 | wget https://downloads.cdn.openwrt.org/${{ env.TOOLCHAIN_PATH }}/targets/${{ inputs.target }}/${{ inputs.subtarget }}/sha256sums.asc
461 | wget https://downloads.cdn.openwrt.org/${{ env.TOOLCHAIN_PATH }}/targets/${{ inputs.target }}/${{ inputs.subtarget }}/sha256sums
462 | gpg --with-fingerprint --verify sha256sums.asc
463 | sha256sum --check --ignore-missing sha256sums
464 | tar -xf ${{ env.TOOLCHAIN_FILE }}
465 | rm ${{ env.TOOLCHAIN_FILE }} sha256sums
466 |
467 | - name: Download and extract prebuilt llvm
468 | if: inputs.build_toolchain == false && steps.parse-prebuilt-llvm.outputs.llvm-type == 'external'
469 | shell: su buildbot -c "sh -e {0}"
470 | working-directory: openwrt
471 | run: |
472 | wget https://downloads.cdn.openwrt.org/${{ env.TOOLCHAIN_PATH }}/targets/${{ inputs.target }}/${{ inputs.subtarget }}/${{ env.LLVM_FILE }}
473 | wget https://downloads.cdn.openwrt.org/${{ env.TOOLCHAIN_PATH }}/targets/${{ inputs.target }}/${{ inputs.subtarget }}/sha256sums.asc
474 | wget https://downloads.cdn.openwrt.org/${{ env.TOOLCHAIN_PATH }}/targets/${{ inputs.target }}/${{ inputs.subtarget }}/sha256sums
475 | gpg --with-fingerprint --verify sha256sums.asc
476 | sha256sum --check --ignore-missing sha256sums
477 | tar -xf ${{ env.LLVM_FILE }}
478 | rm ${{ env.LLVM_FILE }} sha256sums
479 |
480 | - name: Clean configuration
481 | shell: su buildbot -c "sh -e {0}"
482 | working-directory: openwrt
483 | run: |
484 | rm -rf .config
485 |
486 | - name: Configure testing kernel
487 | if: inputs.testing == true
488 | shell: su buildbot -c "sh -e {0}"
489 | working-directory: openwrt
490 | run: |
491 | echo CONFIG_TESTING_KERNEL=y >> .config
492 |
493 | - name: Configure KERNEL_WERROR
494 | shell: su buildbot -c "sh -e {0}"
495 | working-directory: openwrt
496 | run: |
497 | echo CONFIG_KERNEL_WERROR=y >> .config
498 |
499 | - name: Configure all kernel modules
500 | if: inputs.build_all_kmods == true
501 | shell: su buildbot -c "sh -e {0}"
502 | working-directory: openwrt
503 | run: |
504 | echo CONFIG_ALL_KMODS=y >> .config
505 |
506 | - name: Configure all modules
507 | if: inputs.build_all_modules == true
508 | shell: su buildbot -c "sh -e {0}"
509 | working-directory: openwrt
510 | run: |
511 | echo CONFIG_ALL=y >> .config
512 |
513 | - name: Configure all boards
514 | if: inputs.build_all_boards == true
515 | shell: su buildbot -c "sh -e {0}"
516 | working-directory: openwrt
517 | run: |
518 | echo CONFIG_TARGET_MULTI_PROFILE=y >> .config
519 | echo CONFIG_TARGET_PER_DEVICE_ROOTFS=y >> .config
520 | echo CONFIG_TARGET_ALL_PROFILES=y >> .config
521 |
522 | # ccache for some reason have problem detecting compiler type
523 | # with external toolchain. This cause the complete malfunction
524 | # of ccache with the result of tons of unsupported compiler
525 | # option error.
526 | # To fix this force compiler type to gcc.
527 | - name: Configure ccache and apply fixes
528 | if: inputs.use_ccache_cache == true
529 | shell: su buildbot -c "sh -e {0}"
530 | working-directory: openwrt
531 | env:
532 | SYSTEM_CCACHE_CONF: staging_dir/host/etc/ccache.conf
533 | run: |
534 | touch $SYSTEM_CCACHE_CONF
535 |
536 | echo compiler_type=gcc >> $SYSTEM_CCACHE_CONF
537 | [ ${{ inputs.ccache_type }} = 'kernel' ] && echo max_size=400M >> $SYSTEM_CCACHE_CONF
538 |
539 | echo depend_mode=true >> $SYSTEM_CCACHE_CONF
540 | echo sloppiness=file_macro,locale,time_macros >> $SYSTEM_CCACHE_CONF
541 |
542 | echo CONFIG_CCACHE=y >> .config
543 |
544 | - name: Configure external toolchain in container
545 | if: inputs.build_toolchain == false && steps.parse-toolchain.outputs.toolchain-type == 'external_container'
546 | shell: su buildbot -c "sh -e {0}"
547 | working-directory: openwrt
548 | run: |
549 | echo CONFIG_DEVEL=y >> .config
550 | echo CONFIG_AUTOREMOVE=y >> .config
551 |
552 | ./scripts/ext-toolchain.sh \
553 | --toolchain /external-toolchain/$(ls /external-toolchain/ | grep openwrt-toolchain)/toolchain-* \
554 | --overwrite-config \
555 | --config ${{ inputs.target }}/${{ inputs.subtarget }}
556 |
557 | - name: Configure external toolchain
558 | if: inputs.build_toolchain == false && steps.parse-toolchain.outputs.toolchain-type == 'external_toolchain'
559 | shell: su buildbot -c "sh -e {0}"
560 | working-directory: openwrt
561 | run: |
562 | echo CONFIG_DEVEL=y >> .config
563 | echo CONFIG_AUTOREMOVE=y >> .config
564 |
565 | ./scripts/ext-toolchain.sh \
566 | --toolchain ${{ env.TOOLCHAIN_NAME }}/toolchain-* \
567 | --overwrite-config \
568 | --config ${{ inputs.target }}/${{ inputs.subtarget }}
569 |
570 | - name: Adapt external sdk to external toolchain format
571 | if: inputs.build_toolchain == false && steps.parse-toolchain.outputs.toolchain-type == 'external_sdk'
572 | shell: su buildbot -c "sh -e {0}"
573 | working-directory: openwrt
574 | run: |
575 | TOOLCHAIN_DIR=${{ env.TOOLCHAIN_NAME }}/staging_dir/$(ls ${{ env.TOOLCHAIN_NAME }}/staging_dir | grep toolchain)
576 | TOOLCHAIN_BIN=$TOOLCHAIN_DIR/bin
577 | OPENWRT_DIR=$(pwd)
578 |
579 | # Find target name from toolchain info.mk
580 | GNU_TARGET_NAME=$(cat $TOOLCHAIN_DIR/info.mk | grep TARGET_CROSS | sed 's/^TARGET_CROSS=\(.*\)-$/\1/')
581 |
582 | cd $TOOLCHAIN_BIN
583 |
584 | # Revert sdk wrapper scripts applied to all the bins
585 | for app in $(find . -name "*.bin"); do
586 | TARGET_APP=$(echo $app | sed 's/\.\/\.\(.*\)\.bin/\1/')
587 | rm $TARGET_APP
588 | mv .$TARGET_APP.bin $TARGET_APP
589 | done
590 |
591 | # Setup the wrapper script in the sdk toolchain dir simulating an external toolchain build
592 | cp $OPENWRT_DIR/target/toolchain/files/wrapper.sh $GNU_TARGET_NAME-wrapper.sh
593 | for app in cc gcc g++ c++ cpp ld as ; do
594 | [ -f $GNU_TARGET_NAME-$app ] && mv $GNU_TARGET_NAME-$app $GNU_TARGET_NAME-$app.bin
595 | ln -sf $GNU_TARGET_NAME-wrapper.sh $GNU_TARGET_NAME-$app
596 | done
597 |
598 | - name: Configure external toolchain with sdk
599 | if: inputs.build_toolchain == false && steps.parse-toolchain.outputs.toolchain-type == 'external_sdk'
600 | shell: su buildbot -c "sh -e {0}"
601 | working-directory: openwrt
602 | run: |
603 | echo CONFIG_DEVEL=y >> .config
604 | echo CONFIG_AUTOREMOVE=y >> .config
605 |
606 | ./scripts/ext-toolchain.sh \
607 | --toolchain ${{ env.TOOLCHAIN_NAME }}/staging_dir/toolchain-* \
608 | --overwrite-config \
609 | --config ${{ inputs.target }}/${{ inputs.subtarget }}
610 |
611 | - name: Configure internal toolchain
612 | if: inputs.build_toolchain == true || steps.parse-toolchain.outputs.toolchain-type == 'internal'
613 | shell: su buildbot -c "sh -e {0}"
614 | working-directory: openwrt
615 | run: |
616 | echo CONFIG_DEVEL=y >> .config
617 | echo CONFIG_AUTOREMOVE=y >> .config
618 |
619 | echo "CONFIG_TARGET_${{ inputs.target }}=y" >> .config
620 | echo "CONFIG_TARGET_${{ inputs.target }}_${{ inputs.subtarget }}=y" >> .config
621 |
622 | make defconfig
623 |
624 | - name: Configure prebuilt llvm
625 | if: inputs.build_toolchain == false && steps.parse-prebuilt-llvm.outputs.llvm-type == 'external'
626 | shell: su buildbot -c "sh -e {0}"
627 | working-directory: openwrt
628 | run: |
629 | echo CONFIG_USE_LLVM_PREBUILT=y >> .config
630 |
631 | - name: Show configuration
632 | shell: su buildbot -c "sh -e {0}"
633 | working-directory: openwrt
634 | run: ./scripts/diffconfig.sh
635 |
636 | - name: Build tools
637 | shell: su buildbot -c "sh -e {0}"
638 | working-directory: openwrt
639 | run: make tools/install -j$(nproc) BUILD_LOG=1 || ret=$? .github/workflows/scripts/show_build_failures.sh
640 |
641 | - name: Build toolchain
642 | shell: su buildbot -c "sh -e {0}"
643 | working-directory: openwrt
644 | run: make toolchain/install -j$(nproc) BUILD_LOG=1 || ret=$? .github/workflows/scripts/show_build_failures.sh
645 |
646 | - name: Build Kernel
647 | if: inputs.build_kernel == true
648 | shell: su buildbot -c "sh -e {0}"
649 | working-directory: openwrt
650 | run: make target/compile -j$(nproc) BUILD_LOG=1 || ret=$? .github/workflows/scripts/show_build_failures.sh
651 |
652 | - name: Build Kernel Kmods
653 | if: inputs.build_kernel == true
654 | shell: su buildbot -c "sh -e {0}"
655 | working-directory: openwrt
656 | run: make package/linux/compile -j$(nproc) BUILD_LOG=1 || ret=$? .github/workflows/scripts/show_build_failures.sh
657 |
658 | - name: Build DTBs
659 | if: inputs.build_dtb == true
660 | shell: su buildbot -c "sh -e {0}"
661 | working-directory: openwrt
662 | run: |
663 | # Check if special dtb makefile target is available
664 | # For this to correctly work, a .config is required but this is already done by
665 | # previous steps
666 | TOPDIR=$(pwd) make -C target/linux dtb -q >/dev/null 2>/dev/null || ret=$?
667 | [ $ret = 2 ] && exit 0
668 |
669 | make target/linux/dtb -j$(nproc) BUILD_LOG=1 || ret=$? .github/workflows/scripts/show_build_failures.sh
670 |
671 | - name: Build everything
672 | if: inputs.build_full == true
673 | shell: su buildbot -c "sh -e {0}"
674 | working-directory: openwrt
675 | run: make -j$(nproc) BUILD_LOG=1 || ret=$? .github/workflows/scripts/show_build_failures.sh
676 |
677 | - name: Build external toolchain
678 | if: inputs.build_external_toolchain == true
679 | shell: su buildbot -c "sh -e {0}"
680 | working-directory: openwrt
681 | run: make target/toolchain/compile -j$(nproc) BUILD_LOG=1 || ret=$? .github/workflows/scripts/show_build_failures.sh
682 |
683 | - name: Coverity prepare toolchain
684 | if: inputs.coverity_check_packages != ''
685 | shell: su buildbot -c "sh -e {0}"
686 | working-directory: openwrt
687 | run: |
688 | wget -q https://scan.coverity.com/download/linux64 --post-data "token=${{ secrets.coverity_api_token }}&project=${{ inputs.coverity_project_name }}" -O coverity.tar.gz
689 | wget -q https://scan.coverity.com/download/linux64 --post-data "token=${{ secrets.coverity_api_token }}&project=${{ inputs.coverity_project_name }}&md5=1" -O coverity.tar.gz.md5
690 | echo ' coverity.tar.gz' >> coverity.tar.gz.md5
691 | md5sum -c coverity.tar.gz.md5
692 |
693 | mkdir cov-analysis-linux64
694 | tar xzf coverity.tar.gz --strip 1 -C cov-analysis-linux64
695 | export PATH=$(pwd)/cov-analysis-linux64/bin:$PATH
696 |
697 | for template in ${{ inputs.coverity_compiler_template_list }}; do
698 | cov-configure --template --comptype gcc --compiler "$template"
699 | done
700 |
701 | - name: Clean and recompile packages with Coverity toolchain
702 | if: inputs.coverity_check_packages != ''
703 | shell: su buildbot -c "bash {0}"
704 | working-directory: openwrt
705 | run: |
706 | set -o pipefail -o errexit
707 |
708 | coverity_check_packages=(${{ inputs.coverity_check_packages }})
709 | printf -v clean_packages "package/%s/clean " "${coverity_check_packages[@]}"
710 | make -j$(nproc) BUILD_LOG=1 $clean_packages || ret=$? .github/workflows/scripts/show_build_failures.sh
711 |
712 | coverity_force_compile_packages=(${{ inputs.coverity_force_compile_packages }})
713 | printf -v force_compile_packages "package/%s/compile " "${coverity_force_compile_packages[@]}"
714 | make -j$(nproc) BUILD_LOG=1 $force_compile_packages || ret=$? .github/workflows/scripts/show_build_failures.sh
715 |
716 | printf -v compile_packages "package/%s/compile " "${coverity_check_packages[@]}"
717 | export PATH=$(pwd)/cov-analysis-linux64/bin:$PATH
718 | cov-build --dir cov-int make -j $(nproc) BUILD_LOG=1 $compile_packages || ret=$? .github/workflows/scripts/show_build_failures.sh
719 |
720 | - name: Upload build to Coverity for analysis
721 | if: inputs.coverity_check_packages != ''
722 | shell: su buildbot -c "sh -e {0}"
723 | working-directory: openwrt
724 | run: |
725 | tar czf cov-int.tar.gz ./cov-int
726 | curl \
727 | --form token="${{ secrets.coverity_api_token }}" \
728 | --form email="contact@openwrt.org" \
729 | --form file=@cov-int.tar.gz \
730 | --form version="${{ github.ref_name }}-${{ github.sha }}" \
731 | --form description="OpenWrt ${{ github.ref_name }}-${{ github.sha }}" \
732 | "https://scan.coverity.com/builds?project=${{ inputs.coverity_project_name }}"
733 |
734 | - name: Upload logs
735 | if: failure()
736 | uses: actions/upload-artifact@v5
737 | with:
738 | name: ${{ inputs.target }}-${{ inputs.subtarget }}${{ inputs.testing == true && '-testing' || '' }}-logs
739 | path: "openwrt/logs"
740 |
741 | - name: Cleanup dl/build_dir/staging_dir to make some space
742 | working-directory: openwrt
743 | if: github.event_name == 'push'
744 | run: rm -rf dl build_dir staging_dir
745 |
746 | - name: Delete already present ccache cache
747 | if: steps.restore-ccache-cache.outputs.cache-hit == 'true' && inputs.use_ccache_cache == true &&
748 | github.event_name == 'push' && steps.restore-ccache-cache-s3.outputs.cache-hit != 'true'
749 | uses: octokit/request-action@v2.x
750 | with:
751 | route: DELETE /repos/{repository}/actions/caches?key={key}
752 | env:
753 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
754 | INPUT_REPOSITORY: ${{ github.repository }}
755 | INPUT_KEY: ${{ steps.restore-ccache-cache.outputs.cache-primary-key }}
756 |
757 | - name: Save ccache cache
758 | if: inputs.use_ccache_cache == true && github.event_name == 'push' &&
759 | steps.restore-ccache-cache-s3.outputs.cache-hit != 'true'
760 | uses: actions/cache/save@v4
761 | with:
762 | path: openwrt/.ccache
763 | key: ${{ steps.restore-ccache-cache.outputs.cache-primary-key }}
764 |
765 | - name: Archive ccache
766 | if: inputs.use_ccache_cache == true && github.event_name == 'push' &&
767 | inputs.upload_ccache_cache == true
768 | shell: su buildbot -c "sh -e {0}"
769 | working-directory: openwrt
770 | run: tar -cf ${{ needs.setup_build.outputs.ccache_name }}.tar .ccache
771 |
772 | - name: Upload ccache cache
773 | if: inputs.use_ccache_cache == true && github.event_name == 'push' &&
774 | inputs.upload_ccache_cache == true
775 | uses: actions/upload-artifact@v5
776 | with:
777 | name: ${{ inputs.target }}-${{ inputs.subtarget }}${{ inputs.testing == true && '-testing' || '' }}-ccache-cache
778 | path: openwrt/${{ needs.setup_build.outputs.ccache_name }}.tar
779 | retention-days: 1
780 |
781 | - name: Find external toolchain name
782 | id: get-toolchain-name
783 | if: inputs.upload_external_toolchain == true
784 | working-directory: openwrt
785 | run: |
786 | TOOLCHAIN_NAME=$(ls bin/targets/${{inputs.target }}/${{ inputs.subtarget }} | grep toolchain)
787 | echo "toolchain-name=$TOOLCHAIN_NAME" >> $GITHUB_OUTPUT
788 |
789 | - name: Upload prebuilt toolchain
790 | if: inputs.upload_external_toolchain == true
791 | uses: actions/upload-artifact@v5
792 | with:
793 | name: ${{ inputs.target }}-${{ inputs.subtarget }}-external-toolchain
794 | path: openwrt/bin/targets/${{ inputs.target }}/${{ inputs.subtarget }}/${{ steps.get-toolchain-name.outputs.toolchain-name }}
795 | retention-days: 1
796 |
--------------------------------------------------------------------------------