├── .dockerignore
├── .flake8
├── .github
├── dependabot.yml
└── workflows
│ └── build-push.yml
├── .gitignore
├── .gitlab-ci.yml
├── .gitlab
├── docker.yml
└── docker
│ ├── README.md
│ ├── buildmaster
│ └── gitlab.yml
│ └── buildworker
│ └── gitlab.yml
├── .ruff.toml
├── README.md
├── build-docker-images.sh
├── docker
├── buildmaster
│ ├── Dockerfile
│ └── files
│ │ ├── entry.sh
│ │ └── start.sh
├── buildworker
│ ├── Dockerfile
│ └── files
│ │ ├── entry.sh
│ │ └── start.sh
├── certs
│ ├── buildmaster-phase1.crt
│ ├── buildmaster-phase1.key
│ ├── buildmaster-phase2.crt
│ ├── buildmaster-phase2.key
│ ├── ca.crt
│ └── ca.key
├── config.ini
├── docker-compose.yml
└── rsync
│ ├── Dockerfile
│ └── files
│ └── entry.sh
├── phase1
├── buildbot.tac
├── config.ini.example
└── master.cfg
├── phase2
├── buildbot.tac
├── config.ini.example
└── master.cfg
├── requirements-dev.txt
├── scripts
├── ccache.sh
├── cleanup.sh
├── findbin.pl
├── makebranch.sh
├── rsync.sh
├── sec2pubkey.pl
├── sha2rsync.pl
└── signall.sh
└── tests
└── cram
├── master
├── 01-logs.t
└── 02-apk.t
└── worker
└── 01-logs.t
/.dockerignore:
--------------------------------------------------------------------------------
1 | docker/rsync
2 | !docker/rsync/files
3 | docker/build
4 | docker/docker-compose.yml
5 |
--------------------------------------------------------------------------------
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | max-line-length = 140
3 | per-file-ignores =
4 | phase2/master.cfg: E101,E117,E128,E201,E202,E203,E221,E225,E251,E266,E302,E305,E501,W191
5 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # Set update schedule for GitHub Actions
2 |
3 | version: 2
4 | updates:
5 |
6 | - package-ecosystem: "github-actions"
7 | directory: "/"
8 | schedule:
9 | # Check for updates to GitHub Actions every week
10 | interval: "weekly"
11 | # Prefix all commit messages with "CI" plus its scope, that is, a
12 | # list of updated dependencies
13 | commit-message:
14 | prefix: "CI"
15 | include: "scope"
16 |
--------------------------------------------------------------------------------
/.github/workflows/build-push.yml:
--------------------------------------------------------------------------------
1 | name: Build and push containers
2 | on:
3 | push:
4 | branches:
5 | - main
6 | tags:
7 | - 'v*'
8 | pull_request:
9 |
10 | env:
11 | BUILDBOT_VERSION: 3.11.8
12 | GITHUB_SHA_LEN: 8
13 |
14 | concurrency:
15 | group: ${{ github.workflow }}-${{ github.ref }}
16 | cancel-in-progress: ${{ github.event_name == 'pull_request' }}
17 |
18 | jobs:
19 | test-lint:
20 | name: Test with Python ${{ matrix.python-version }}
21 | runs-on: ubuntu-latest
22 |
23 | strategy:
24 | matrix:
25 | python-version:
26 | - "3.11"
27 |
28 | steps:
29 | - name: Checkout
30 | uses: actions/checkout@v4
31 |
32 | - uses: actions/setup-python@v5
33 | with:
34 | python-version: ${{ matrix.python-version }}
35 |
36 | - name: Install dependencies
37 | run: pip install -r requirements-dev.txt
38 |
39 | - name: Lint with ruff
40 | run: ruff phase*/master.cfg
41 |
42 | - name: Lint with flake8
43 | run: flake8 phase*/master.cfg
44 |
45 | - name: Stylecheck with black
46 | run: black phase1/master.cfg
47 |
48 | build-test:
49 | name: Build and Test container
50 | runs-on: ubuntu-latest
51 | needs: test-lint
52 |
53 | permissions:
54 | packages: write
55 |
56 | strategy:
57 | fail-fast: ${{ github.event_name == 'pull_request' }}
58 | matrix:
59 | include:
60 | - container_flavor: master
61 | - container_flavor: worker
62 | container_test_command: "--env BUILDWORKER_TLS=1 --env BUILDWORKER_MASTER=Z:1922 --env BUILDWORKER_NAME=X --env BUILDWORKER_PASSWORD=Y"
63 |
64 | steps:
65 | - name: Checkout
66 | uses: actions/checkout@v4
67 |
68 | - name: Environment variables
69 | run: |
70 | echo "GIT_SHA_SHORT=${GITHUB_SHA::${{ env.GITHUB_SHA_LEN }}}" >> $GITHUB_ENV
71 |
72 | - name: Build container and export it to local Docker
73 | uses: docker/build-push-action@v6
74 | with:
75 | load: true
76 | tags: local/${{ matrix.container_flavor }}
77 | file: docker/build${{ matrix.container_flavor }}/Dockerfile
78 | build-args: |
79 | BUILDBOT_VERSION=${{ env.BUILDBOT_VERSION }}
80 | OPENWRT_VERSION=${{ env.GIT_SHA_SHORT }}
81 |
82 | - name: Test ${{ matrix.container_flavor }} Docker container
83 | run: |
84 | docker run --detach ${{ matrix.container_test_command }} --name test-${{ matrix.container_flavor }} local/${{ matrix.container_flavor }}
85 | sleep 5
86 | pip install cram
87 | cram --verbose "tests/cram/${{ matrix.container_flavor }}"
88 |
89 | deploy:
90 | name: Push Container
91 | if: github.event_name != 'pull_request' || github.repository_owner != 'openwrt'
92 | runs-on: ubuntu-latest
93 | needs: build-test
94 |
95 | environment: production
96 |
97 | permissions:
98 | packages: write
99 |
100 | strategy:
101 | matrix:
102 | container_flavor:
103 | - master
104 | - worker
105 |
106 | steps:
107 | - name: Checkout
108 | uses: actions/checkout@v4
109 |
110 | - name: Environment variables
111 | run: |
112 | echo "GIT_SHA_SHORT=${GITHUB_SHA::${{ env.GITHUB_SHA_LEN }}}" >> $GITHUB_ENV
113 |
114 | - name: Docker meta
115 | id: meta
116 | uses: docker/metadata-action@v5
117 | with:
118 | images: name=ghcr.io/${{ github.repository }}/build${{ matrix.container_flavor }}-v${{ env.BUILDBOT_VERSION }}
119 |
120 | - name: Login to GitHub Container Registry
121 | uses: docker/login-action@v3
122 | with:
123 | registry: ghcr.io
124 | username: ${{ github.actor }}
125 | password: ${{ secrets.GITHUB_TOKEN }}
126 |
127 | - name: Build container again and push it
128 | uses: docker/build-push-action@v6
129 | with:
130 | push: true
131 | tags: ${{ steps.meta.outputs.tags }}
132 | labels: ${{ steps.meta.outputs.labels }}
133 | file: docker/build${{ matrix.container_flavor }}/Dockerfile
134 | build-args: |
135 | BUILDBOT_VERSION=${{ env.BUILDBOT_VERSION }}
136 | OPENWRT_VERSION=${{ env.GIT_SHA_SHORT }}
137 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *
2 | !.gitignore
3 | !.dockerignore
4 | !build-docker-images.sh
5 | !docker
6 | !docker/*
7 | !docker/*/*
8 | !docker/*/*/*
9 | docker/build
10 | !scripts
11 | !scripts/*
12 | !phase[12]
13 | !phase[12]/*
14 | phase[12]/*/*
15 | phase[12]/config.ini
16 | phase[12]/http.log
17 | phase[12]/key-build*
18 | phase[12]/state.sqlite*
19 | phase[12]/twistd.*
20 | !.gitlab-ci.yml
21 | !.gitlab
22 | !.gitlab/*
23 | !.gitlab/**/*
24 | !.github
25 | !.github/**/*
26 | !requirements-dev.txt
27 | !.ruff.toml
28 | !tests
29 | !tests/**/*
30 | !.flake8
31 |
--------------------------------------------------------------------------------
/.gitlab-ci.yml:
--------------------------------------------------------------------------------
1 | variables:
2 | BUILDBOT_VERSION: 3.5.0
3 | OPENWRT_VERSION: $CI_COMMIT_SHORT_SHA
4 |
5 | include:
6 | - local: .gitlab/docker.yml
7 | - local: .gitlab/docker/buildmaster/gitlab.yml
8 | - local: .gitlab/docker/buildworker/gitlab.yml
9 |
10 | stages:
11 | - docker
12 | - docker test
13 | - docker deploy
14 |
--------------------------------------------------------------------------------
/.gitlab/docker.yml:
--------------------------------------------------------------------------------
1 | .docker in docker:
2 | tags:
3 | - linux
4 | - openwrt
5 | - light-jobs
6 | - docker-privileged
7 | image: docker:19.03.7
8 | services:
9 | - docker:19.03.7-dind
10 | variables:
11 | DOCKER_DRIVER: overlay2
12 | DOCKER_TLS_CERTDIR: "/certs"
13 |
14 | .build Docker image:
15 | stage: docker
16 | extends: .docker in docker
17 | script:
18 | - export IMAGE_NAME="$(echo $CI_JOB_NAME | sed 's/build Docker image \(.*\)/\1/')"
19 | - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
20 | - docker build --build-arg BUILDBOT_VERSION=$BUILDBOT_VERSION --build-arg OPENWRT_VERSION=$OPENWRT_VERSION -t "$IMAGE_NAME" -f "docker/$IMAGE_NAME/Dockerfile" .
21 | - docker tag "$IMAGE_NAME" "$CI_REGISTRY_IMAGE/$IMAGE_NAME-$BUILDBOT_VERSION:$CI_COMMIT_REF_SLUG"
22 | - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME-$BUILDBOT_VERSION:$CI_COMMIT_REF_SLUG"
23 |
24 | .deploy Docker image:
25 | extends: .docker in docker
26 | rules:
27 | - if: $CI_COMMIT_TAG
28 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
29 | script:
30 | - export IMAGE_NAME="$(echo $CI_JOB_NAME | sed 's/deploy Docker image \(.*\)/\1/')"
31 | - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
32 | - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME-$BUILDBOT_VERSION:$CI_COMMIT_REF_SLUG"
33 |
34 | - docker tag "$CI_REGISTRY_IMAGE/$IMAGE_NAME-$BUILDBOT_VERSION:$CI_COMMIT_REF_SLUG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME-$BUILDBOT_VERSION:latest"
35 | - docker tag "$CI_REGISTRY_IMAGE/$IMAGE_NAME-$BUILDBOT_VERSION:latest" "$CI_REGISTRY_IMAGE/$IMAGE_NAME-$BUILDBOT_VERSION:$CI_COMMIT_SHORT_SHA"
36 | - docker tag "$CI_REGISTRY_IMAGE/$IMAGE_NAME-$BUILDBOT_VERSION:latest" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:latest"
37 |
38 | - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME-$BUILDBOT_VERSION:latest"
39 | - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME-$BUILDBOT_VERSION:$CI_COMMIT_SHORT_SHA"
40 | - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:latest"
41 |
--------------------------------------------------------------------------------
/.gitlab/docker/README.md:
--------------------------------------------------------------------------------
1 | # Content
2 |
3 | This directory contains bits for Docker images used on the GitLab CI.
4 |
5 | ## buildmaster
6 | ## buildworker
7 |
--------------------------------------------------------------------------------
/.gitlab/docker/buildmaster/gitlab.yml:
--------------------------------------------------------------------------------
1 | build Docker image buildmaster:
2 | stage: docker
3 | extends: .build Docker image
4 |
5 | test Docker image buildmaster:
6 | stage: docker test
7 | extends: .docker in docker
8 | needs: ["build Docker image buildmaster"]
9 | script:
10 | - export IMAGE_NAME="$(echo $CI_JOB_NAME | sed 's/test Docker image \(.*\)/\1/')"
11 | - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
12 | - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME-$BUILDBOT_VERSION:$CI_COMMIT_REF_SLUG"
13 | - >
14 | docker run --rm "$CI_REGISTRY_IMAGE/$IMAGE_NAME-$BUILDBOT_VERSION:$CI_COMMIT_REF_SLUG" |
15 | grep "buildmaster configured in /master"
16 |
17 | deploy Docker image buildmaster:
18 | stage: docker deploy
19 | extends: .deploy Docker image
20 | needs: ["test Docker image buildmaster"]
21 |
--------------------------------------------------------------------------------
/.gitlab/docker/buildworker/gitlab.yml:
--------------------------------------------------------------------------------
1 | build Docker image buildworker:
2 | stage: docker
3 | extends: .build Docker image
4 |
5 | .test Docker image buildworker:
6 | stage: docker test
7 | extends: .docker in docker
8 | needs: ["build Docker image buildworker"]
9 | script:
10 | - export IMAGE_NAME="$(echo $CI_JOB_NAME | sed 's/test Docker image \(.*\)/\1/')"
11 | - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
12 | - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME-$BUILDBOT_VERSION:$CI_COMMIT_REF_SLUG"
13 | - >
14 | docker run --rm --env BUILDWORKER_NAME=foo --env BUILDWORKER_PASSWORD=XXX
15 | "$CI_REGISTRY_IMAGE/$IMAGE_NAME-$BUILDBOT_VERSION:$CI_COMMIT_REF_SLUG" |
16 | grep "worker configured in /builder"
17 |
18 | deploy Docker image buildworker:
19 | stage: docker deploy
20 | extends: .deploy Docker image
21 | #needs: ["test Docker image buildworker"]
22 |
--------------------------------------------------------------------------------
/.ruff.toml:
--------------------------------------------------------------------------------
1 | ignore = ["E501"]
2 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # OpenWrt buildbot configuration
2 |
3 | This repository provides containers used for building OpenWrt at https://buildbot.openwrt.org
4 |
--------------------------------------------------------------------------------
/build-docker-images.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker build -t openwrtorg/buildmaster -f docker/buildmaster/Dockerfile .
4 | docker push openwrtorg/buildmaster
5 |
6 | docker build -t openwrtorg/buildworker -f docker/buildworker/Dockerfile .
7 | docker push openwrtorg/buildworker
8 |
--------------------------------------------------------------------------------
/docker/buildmaster/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM alpinelinux/build-base:latest-x86_64 AS apk-builder
2 |
3 | RUN abuild-apk add -u \
4 | gcc \
5 | git \
6 | linux-headers \
7 | lua5.3-dev \
8 | lua5.3-lzlib \
9 | make \
10 | musl-dev \
11 | openssl-dev \
12 | openssl-libs-static \
13 | zlib-dev \
14 | zlib-static \
15 | zstd-dev \
16 | zstd-static
17 |
18 | ARG APK_TOOLS_COMMIT=74de0e9bd73d1af8720df40aa68d472943909804
19 |
20 | RUN git clone https://gitlab.alpinelinux.org/alpine/apk-tools.git /tmp/apk-tools
21 | WORKDIR /tmp/apk-tools
22 | RUN git checkout $APK_TOOLS_COMMIT
23 | RUN make -j$(nproc) static
24 |
25 |
26 | FROM debian:11
27 | MAINTAINER OpenWrt Maintainers
28 |
29 | ARG DEBIAN_FRONTEND=noninteractive
30 | ARG BUILDBOT_VERSION=2.10.1
31 | ARG OPENWRT_VERSION=unknown
32 |
33 | ENV BUILDMASTER_CONFIG config.ini
34 | ENV BUILDMASTER_PHASE 1
35 |
36 | USER root
37 |
38 | RUN \
39 | apt-get update && \
40 | apt-get install -y \
41 | build-essential \
42 | gawk \
43 | git-core \
44 | gosu \
45 | libncurses5-dev \
46 | locales \
47 | pv \
48 | pwgen \
49 | python3-venv \
50 | python3-pip \
51 | signify-openbsd \
52 | wget && \
53 | apt-get clean && \
54 | localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8
55 |
56 | RUN python3 -m venv /opt/venv
57 | ENV PATH="/opt/venv/bin:$PATH"
58 | RUN pip3 install -U pip
59 | RUN \
60 | pip3 install \
61 | "buildbot==$BUILDBOT_VERSION" \
62 | "buildbot-www==$BUILDBOT_VERSION" \
63 | "buildbot-waterfall-view==$BUILDBOT_VERSION" \
64 | "buildbot-console-view==$BUILDBOT_VERSION" \
65 | "buildbot-grid-view==$BUILDBOT_VERSION" \
66 | "buildbot-worker==$BUILDBOT_VERSION" \
67 | pyOpenSSL \
68 | service_identity
69 |
70 | RUN \
71 | sed -i \
72 | 's/Welcome to buildbot/Welcome to OpenWrt buildbot/' \
73 | /opt/venv/lib/python*/site-packages/buildbot_www/static/scripts.js
74 |
75 | RUN \
76 | sed -i \
77 | "s;\"col-sm-12\">
- OpenWrt version: $OPENWRT_VERSION
- &2
14 | exit 1
15 | ;;
16 | esac
17 |
18 | /opt/venv/bin/buildbot create-master --config=/phase${BUILDMASTER_PHASE:-1}/master.cfg /master
19 |
20 | unset BUILDMASTER_PHASE
21 |
22 | rm -f /master/twistd.pid
23 | exec /opt/venv/bin/buildbot start --nodaemon /master
24 | ;;
25 | /*)
26 | exec "$@"
27 | ;;
28 | *)
29 | echo "Unknown command given. Must be either 'start' or 'reconfig'" >&2
30 | exit 1
31 | ;;
32 | esac
33 |
--------------------------------------------------------------------------------
/docker/buildworker/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM debian:11
2 | MAINTAINER OpenWrt Maintainers
3 |
4 | ARG DEBIAN_FRONTEND=noninteractive
5 | ARG BUILDBOT_VERSION=2.10.1
6 | ARG OPENWRT_VERSION=unknown
7 |
8 | ENV BUILDWORKER_MASTER builds.openwrt.org:9990
9 | ENV BUILDWORKER_ADMIN contact@openwrt.org
10 | ENV BUILDWORKER_DESCRIPTION Docker Container https://git.openwrt.org/$OPENWRT_VERSION
11 |
12 | USER root
13 |
14 | RUN \
15 | apt-get update && \
16 | apt-get install -y \
17 | build-essential \
18 | ccache \
19 | curl \
20 | file \
21 | gawk \
22 | g++-multilib \
23 | gcc-multilib \
24 | genisoimage \
25 | git-core \
26 | gosu \
27 | libdw-dev \
28 | libelf-dev \
29 | libncurses5-dev \
30 | locales \
31 | pv \
32 | pwgen \
33 | python3 \
34 | python3-venv \
35 | python3-pip \
36 | python3-pyelftools \
37 | python3-cryptography \
38 | qemu-utils \
39 | rsync \
40 | signify-openbsd \
41 | subversion \
42 | swig \
43 | unzip \
44 | wget \
45 | zstd && \
46 | apt-get clean && \
47 | localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8
48 |
49 | RUN python3 -m venv /opt/venv
50 | ENV PATH="/opt/venv/bin:$PATH"
51 | RUN pip3 install -U pip
52 | RUN pip3 install \
53 | "buildbot-worker==$BUILDBOT_VERSION" \
54 | pyelftools \
55 | pyOpenSSL \
56 | service_identity
57 |
58 | ENV LANG=en_US.utf8
59 |
60 | COPY docker/buildworker/files/entry.sh /entry.sh
61 | COPY docker/buildworker/files/start.sh /start.sh
62 |
63 | RUN \
64 | groupadd buildbot && \
65 | useradd \
66 | --create-home --home-dir /builder \
67 | --comment "OpenWrt buildbot" \
68 | --gid buildbot --shell /bin/bash buildbot && \
69 | chown buildbot:buildbot /builder && \
70 | chmod 0755 /entry.sh /start.sh
71 |
72 | VOLUME [ "/builder" ]
73 | ENTRYPOINT [ "/entry.sh" ]
74 | CMD [ "/start.sh" ]
75 |
--------------------------------------------------------------------------------
/docker/buildworker/files/entry.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | chown buildbot:buildbot /builder
4 |
5 | /usr/sbin/gosu buildbot "$@"
6 |
--------------------------------------------------------------------------------
/docker/buildworker/files/start.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | cleanup_buildworker_env_variables() {
4 | for var in "${!BUILDWORKER_@}"; do
5 | unset "$var"
6 | done
7 | }
8 |
9 | [ -n "$BUILDWORKER_NAME" ] || {
10 | echo "Please supply a name via --env BUILDWORKER_NAME=XXX" >&2
11 | exit 1
12 | }
13 |
14 | [ -n "$BUILDWORKER_PASSWORD" ] || {
15 | echo "Please supply a password via --env BUILDWORKER_PASSWORD=XXX" >&2
16 | exit 2
17 | }
18 |
19 | rm -f /builder/buildbot.tac
20 |
21 | /opt/venv/bin/buildbot-worker create-worker \
22 | --force \
23 | --umask="0o22" \
24 | ${BUILDWORKER_TLS:+--connection-string="SSL:$BUILDWORKER_MASTER"} \
25 | /builder \
26 | "$BUILDWORKER_MASTER" \
27 | "$BUILDWORKER_NAME" \
28 | "$BUILDWORKER_PASSWORD"
29 |
30 | echo "$BUILDWORKER_ADMIN" > /builder/info/admin
31 | echo "$BUILDWORKER_DESCRIPTION" > /builder/info/host
32 |
33 | cleanup_buildworker_env_variables
34 | rm -f /builder/twistd.pid
35 | exec /opt/venv/bin/buildbot-worker start --nodaemon /builder
36 |
--------------------------------------------------------------------------------
/docker/certs/buildmaster-phase1.crt:
--------------------------------------------------------------------------------
1 | -----BEGIN CERTIFICATE-----
2 | MIIDbjCCAlYCFBczjElNehQ1sSWjdvGyPBET3j3eMA0GCSqGSIb3DQEBCwUAMGgx
3 | CzAJBgNVBAYTAlhYMQ4wDAYDVQQIDAVXb3JsZDEiMCAGA1UECgwZQnVpbGRib3Qg
4 | Q29tcG9zZXIgVGVzdGluZzElMCMGA1UECwwcQnVpbGRib3QgQ29tcG9zZXIgVGVz
5 | dGluZyBDQTAeFw0yMDAyMDMxNjUwMjlaFw0zMDAxMzExNjUwMjlaMH8xCzAJBgNV
6 | BAYTAlhYMQ4wDAYDVQQIDAVXb3JsZDEfMB0GA1UECgwWQnVpbGRib3QgQ29tcG9z
7 | ZXIgVGVzdDEiMCAGA1UECwwZQnVpbGRib3QgQ29tcG9zZXIgVGVzdCBDQTEbMBkG
8 | A1UEAwwSYnVpbGRtYXN0ZXItcGhhc2UxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
9 | MIIBCgKCAQEAq1b4I44iYmhue1+IPSdeqcK7IL+INl/l2EE1Yw51Z+fPazkZdpiw
10 | +ZkB+dOdhiOL4+LW9SnrCUQ5CBOW8unG+nG7mvi63Pv61G5WLGlqW1pl6+Rt2Cpc
11 | CIhDcxRB+xzt1Yiu+BT+EIZO8NCLl+NHPgNrB1b2B8WmIEfWp/zEtaCTROWsHnax
12 | 1LUaP/mis1exRA1ITCNMwb1drP1eWRw99zGP2ax7nnju4zRK4QiQj3V/SNXqprvS
13 | kAAEogMtnsC6LIqPDABz/lcrfXhbbi4+MGmcy1Wz6ukDkusDf0jhssjVl29d9kL2
14 | grLohPHbmsQqEsumd6/6KUkybwHkdPQc0QIDAQABMA0GCSqGSIb3DQEBCwUAA4IB
15 | AQCd89o+MOX3//XtgsreITq8PVbY8MEATEbd1Q5F0VgMvzVMh0Agzy5zfvNtPfUK
16 | Grj2kTuL9H1TlPvIJ9x4+n6mqbXp46ajIWWlJ22lCV1vgtvA245YnmZKPlFw1FtS
17 | GU0AaIU76VLmlb6NH6sMdRX3/11WpdUJd46kyf/hkXlwk/sG7XR+IVB7cyE/iufq
18 | TTyGU9cUSEq6baYUOW2mVl91XCY2l9s7ZCIAG0MKw7aA0nOBfIutAhnXhhcWjUvp
19 | 9KOkUx14REM7x5mTtiAUwjs7d/6JDT+k2giWm01ca9Wyf2x2kND4fJOepAHIzXBI
20 | fRN/b8wrmWeAyFMBg6v/HG3u
21 | -----END CERTIFICATE-----
22 |
--------------------------------------------------------------------------------
/docker/certs/buildmaster-phase1.key:
--------------------------------------------------------------------------------
1 | -----BEGIN RSA PRIVATE KEY-----
2 | MIIEpQIBAAKCAQEAq1b4I44iYmhue1+IPSdeqcK7IL+INl/l2EE1Yw51Z+fPazkZ
3 | dpiw+ZkB+dOdhiOL4+LW9SnrCUQ5CBOW8unG+nG7mvi63Pv61G5WLGlqW1pl6+Rt
4 | 2CpcCIhDcxRB+xzt1Yiu+BT+EIZO8NCLl+NHPgNrB1b2B8WmIEfWp/zEtaCTROWs
5 | Hnax1LUaP/mis1exRA1ITCNMwb1drP1eWRw99zGP2ax7nnju4zRK4QiQj3V/SNXq
6 | prvSkAAEogMtnsC6LIqPDABz/lcrfXhbbi4+MGmcy1Wz6ukDkusDf0jhssjVl29d
7 | 9kL2grLohPHbmsQqEsumd6/6KUkybwHkdPQc0QIDAQABAoIBACuC64MD/mFlSeFU
8 | 0mFNVTHhPOpFGY9NbS4s6STdsB0R9k+xcXD3t8G7BJBwGAnPjx/xJEugOIri42TU
9 | rLnFen1xUoY5ciLabslxt7qRjqviWg28tdR2dWbJS++2jkb6Ar/+dQiVNCv4H6Xy
10 | H/ETi60I6y4y/Ene/KAn/jAs9bt1wD13wPFNHHHJes46719cqSuVMWBMW0qFJdBW
11 | xWxfeDNEKhRmby43gakzs3BNYtS5eHpmWXqYMhP34H+JURNa1tW1UWD1swqvyZKF
12 | WxXgU+ckvBJG2BLrF6LLLwDYtkwAJwPtA0arpaTs+9QBSvv1hC2G2rpPOrxKz/he
13 | c3ksIcECgYEA3rmkRXLZwTQKTZ1X90upgVJsHEQEuWmb2ETd/ED4rWfyfYaaFk7i
14 | 8bg7g8og9fUsso1gJOOXVgmwhCilKZrwau1xRDkmNOKYirh/35Rf8pEQKF7v3Njd
15 | htMN+OGuLUzqhbm9oH2d6B8VrJwAsgagpCURlCZWpvU5tmpKLaMrjDkCgYEAxPAJ
16 | kvQvCmTNXl4IUZX6sDmEywqpW1szV/ImdmVnUBkrdFQzw/IB8Mo61TW/fGv6+tlZ
17 | gKcLfvfEiP1BBuGHZY6e3ep6U0iN+KQhqsMIy8LnBqSep76E121bpwxrY08sW/R1
18 | gHDku3EAiwDns+dYydLK8QdouLcquL4/NxgtRVkCgYEAtem6ml6sLHls0OfqqTlL
19 | QS0o0A1eLnJrr8DP7vDCLB/yl0QRJpdYFUX8li/JrBqMX2nJVVfui39uhQMNo2XB
20 | aBR3ptlms1tAbE01ZE+z7lM9dzvTffZDOZO4ncb2lFl2U7LZhvQSvTxmX40rpDx1
21 | iJweN3r423T/plY6L/xN3UECgYEAsX4Ngu6kvt9e4KArKyzbBZyxfDWdsYKe4bKS
22 | y75UwLIOogeJqB4jdZsuLx8D9+VyDEd8DSGAkOxJq5Vk2AjHeNZzhFph6VHI8sEq
23 | vraM5OhCQ5B2lYR2QZqEMihVWOnwduf6Rsp4vB76eE1WhqQJN8zNrzIYRfJ/hIWi
24 | f8tAcfECgYEAqpryfEa4W7BXXxFusMCSgNpP7yBJ6x5p/UT1cZ8behVFJb78P4K5
25 | CPK2Pl+LGywEl+CHsbuPAyPzEU66/kwVN66F2W4Jz+n3QWuM1ZK8VWwF2piQL4lz
26 | wDTlzlYLBy3RLLOR4Zgs7wS8emkRhkGH7gZh0QL0//3PmH1gzzVQPSY=
27 | -----END RSA PRIVATE KEY-----
28 |
--------------------------------------------------------------------------------
/docker/certs/buildmaster-phase2.crt:
--------------------------------------------------------------------------------
1 | -----BEGIN CERTIFICATE-----
2 | MIIDbjCCAlYCFBczjElNehQ1sSWjdvGyPBET3j3fMA0GCSqGSIb3DQEBCwUAMGgx
3 | CzAJBgNVBAYTAlhYMQ4wDAYDVQQIDAVXb3JsZDEiMCAGA1UECgwZQnVpbGRib3Qg
4 | Q29tcG9zZXIgVGVzdGluZzElMCMGA1UECwwcQnVpbGRib3QgQ29tcG9zZXIgVGVz
5 | dGluZyBDQTAeFw0yMDAyMDMxNjUwMzdaFw0zMDAxMzExNjUwMzdaMH8xCzAJBgNV
6 | BAYTAlhYMQ4wDAYDVQQIDAVXb3JsZDEfMB0GA1UECgwWQnVpbGRib3QgQ29tcG9z
7 | ZXIgVGVzdDEiMCAGA1UECwwZQnVpbGRib3QgQ29tcG9zZXIgVGVzdCBDQTEbMBkG
8 | A1UEAwwSYnVpbGRtYXN0ZXItcGhhc2UyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
9 | MIIBCgKCAQEAzuwsg3UgB1/xlhuOPF1uXkmsooEyof+/x0rZhDDURCPr8bjGUvzr
10 | WsHcKqrQCujVslVFDSqusmrhM2ONFaIbr3dGSPXBbCRfT6pi6f6XcvPtaeWcOA/g
11 | rXqb7wVUQf8t31lQhWexqmc2lietF6wC2iBOvi7/G352NkYOwy+cyodq9VUQkvT2
12 | BmL/B9On4SQkhblhovN4TtcvyRNuT87J+4yn8GmAu586Gss5xhvFIxnr016vxA/M
13 | NShKLGjsgWzn6THaJTTtAQYlBpRSkoFo+mOXHXqB/WSU0MNDybxu3Puqha8vRFMT
14 | crynEuGya9bpUMdvBedDDmH7aCmUJp0lIQIDAQABMA0GCSqGSIb3DQEBCwUAA4IB
15 | AQC/DWvwWNUBAD8znaIcNPYO3Ua4UL/ozERuij5VvZjf9ic0Pzq92qLQkTyDIlGP
16 | 7hL4Tp3g/PaGTg8A4LZfTrt1sPOHXHYR0Ysiw5iYgcljsmUoKWsh6SFaPDS4JnFG
17 | tKE4FJ4YtZOgB2fo2yRuJTGQUDyEYCfpCvhUKlMpofFM2Y/Fu1xZR/c+dxsazNHe
18 | GOqPIRPcbXGcK97H3EasBJ7oHrGV3HqOjkT+Y936Esgr6Y/jJi9NVNeV6Fwu8nWj
19 | O16Q3Yxj1G28uMcbv54SbJ0DzdXfrVs8ylFYd4becVEx2h03qthBM5j70epaHvrP
20 | HFA34mxzOcCsZ3Gc+4qsAOQp
21 | -----END CERTIFICATE-----
22 |
--------------------------------------------------------------------------------
/docker/certs/buildmaster-phase2.key:
--------------------------------------------------------------------------------
1 | -----BEGIN RSA PRIVATE KEY-----
2 | MIIEpQIBAAKCAQEAzuwsg3UgB1/xlhuOPF1uXkmsooEyof+/x0rZhDDURCPr8bjG
3 | UvzrWsHcKqrQCujVslVFDSqusmrhM2ONFaIbr3dGSPXBbCRfT6pi6f6XcvPtaeWc
4 | OA/grXqb7wVUQf8t31lQhWexqmc2lietF6wC2iBOvi7/G352NkYOwy+cyodq9VUQ
5 | kvT2BmL/B9On4SQkhblhovN4TtcvyRNuT87J+4yn8GmAu586Gss5xhvFIxnr016v
6 | xA/MNShKLGjsgWzn6THaJTTtAQYlBpRSkoFo+mOXHXqB/WSU0MNDybxu3Puqha8v
7 | RFMTcrynEuGya9bpUMdvBedDDmH7aCmUJp0lIQIDAQABAoIBAQCiad3GkPhC0Zve
8 | JmLOHseyWkGWi7IiVYtB8g4pI+p4UXmwkUeZe8t/wQn+s3P4HCkfgK5iV04n3JxG
9 | qazjfgoc/5G7UqovJ51n+mUJJTvajnua0w57Mns3rzUVtykbZB+tX31EOM/K4JiH
10 | 4zyJiAn1C8bCoEC8Y9aFYxfTPFvk2lxYUh4mPe+rENRTQz6Ms4uVpXnOelcydLnA
11 | O3BpYE4GBZOT8+qWhiXI6ofuMGGno/h1s4gBfDpWpEDS/k00UHsArmur/+Wl7Ii3
12 | Mhd0+YwzBJu3hut9l5kXeS+vHAcdtjI6rN1tGIhC/bI5h1kUFuGWPyj8poZ6dS1a
13 | YKk1zCUBAoGBAOgCosDLzZfVBPjyD3S/c0rQzCSv/TCDS4jWYg4W8bcWTX5BnGHq
14 | d6Tq/7DqF7F444+GTX62kAcjEmKj0/ZV9sRI2rJ0cZVjJZLWf9PeJnn2sKRZnMKm
15 | Co1jWPx6j3ei1eszt+rHkNnSWSBvpvV00gnvVz/KeKXqrrKjRsXZ4BqNAoGBAORR
16 | dcNauLyqPPtQpwwdYWvbEAmmVyhwJ4AlJoDsvKD/bQ9HaI8eTyz1vt6nV13/EQPe
17 | Af6LXjAngx18AVUmxmgFGdxPzLrh3hwAzr+lD9b7+KDEzwBfsnYImKpS3B0UvHsn
18 | xKoKn7hVv7Uu7T+TU5heK3huE75/iBZye263LDnlAoGAWuSldFijq+cFcq4KRhe3
19 | CElyoLDdxUK6tdofQhvVCIBRTbRQmr+rrc1u7JVZQOr2Y8Ue2RSWqmUgD8rVAH2/
20 | FjjgjOc3lGnqT3N/UraGZmOx4kFFO7CnE6snMoqlaB5AJkv1sqIKanuQ9TcotqLU
21 | opXEnbYpu/6MAGodEOgBdwUCgYEAlHSXIuf5WMtEjZCanZj5AH1XBz8+Ss/qDzuY
22 | tpm36ONMkXO3+98UUkKT0ghzGW5BLQeMumr6WktGyMQZxblJptcyQnssvpMgrYqk
23 | 5B26l+oMdtShWDfhPThHs+/eqZYTG+z2xi3UPrKazX9uyjjMUTViBJNmiSY5YYcc
24 | 1ReLTTUCgYEAyXL3k1vpoq2nKWk7SJ8/6C4YrsAbeeqQ2cJiUWPvO81Gxrgb4Tm1
25 | rNOKpXTrRj+4HQ5ovx8Oou4FyUFVL01p5bdKIMtZAXKqgPlqve1kB4rSAjhajiud
26 | J0+atj4hTgqQc8ZEOX6PVhLt35v9TQgiM3AV+e6Q6FmbooDj71DJ6x8=
27 | -----END RSA PRIVATE KEY-----
28 |
--------------------------------------------------------------------------------
/docker/certs/ca.crt:
--------------------------------------------------------------------------------
1 | -----BEGIN CERTIFICATE-----
2 | MIIDsTCCApmgAwIBAgIUUmlVvmL3Dz698ZIH9IZ95NfWyW8wDQYJKoZIhvcNAQEL
3 | BQAwaDELMAkGA1UEBhMCWFgxDjAMBgNVBAgMBVdvcmxkMSIwIAYDVQQKDBlCdWls
4 | ZGJvdCBDb21wb3NlciBUZXN0aW5nMSUwIwYDVQQLDBxCdWlsZGJvdCBDb21wb3Nl
5 | ciBUZXN0aW5nIENBMB4XDTIwMDIwMzE2NTAxNVoXDTI5MTEwMjE2NTAxNVowaDEL
6 | MAkGA1UEBhMCWFgxDjAMBgNVBAgMBVdvcmxkMSIwIAYDVQQKDBlCdWlsZGJvdCBD
7 | b21wb3NlciBUZXN0aW5nMSUwIwYDVQQLDBxCdWlsZGJvdCBDb21wb3NlciBUZXN0
8 | aW5nIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAyZNSkk631K+0
9 | A+R8HJszw0hqWQo46E3SfYpc746JejXhhrASpKiOHZo2TLuQ4g1XBHsUbY3yt3kp
10 | wW3IR/WomxecfadM+g9JPWNzmAvrhqvE9SFrp6uC0q70Kz9huYq8mu+/n6uds+Bt
11 | RJfdGXxzUMCpUmtlybTqT11nkkRYtWyH/l+PHDpephrqCXd7Zx47q9d4ypavo584
12 | nQ68bV8xosSM/SL1Yo7tkP64WQEsvuA6nhpXIjnlpHVsKhnxxuFGgDGqAkiogdjY
13 | hHIqSRngIlG1yQSATMgyiINYqvloFR0/q1CoU15021l5OnA3ddshYivCPWZIFvNN
14 | eTkF5dxpQQIDAQABo1MwUTAdBgNVHQ4EFgQUe9rUZI62smrqO3x4BOxRJDjuYP8w
15 | HwYDVR0jBBgwFoAUe9rUZI62smrqO3x4BOxRJDjuYP8wDwYDVR0TAQH/BAUwAwEB
16 | /zANBgkqhkiG9w0BAQsFAAOCAQEAPzvuWMXNuP9mTHQ9Jni2ytMXjtvW70G6QUVM
17 | 36JssU7S2kyGGtXepgtuqQ1qwCcU+IZUhbGjr8uoD6hnD/GFxHO1SAJFM1E1qw+U
18 | 2aUreE/DjSLJPj3aHorw19tpnEhzGhd3sK4IruI2iyzm7bBrHjT27u7Vhb5T9O1Q
19 | HWjcM80ntuBZBWZoIb8SCQSXJF0bVSwXEPtr1l6t5iSbe9GDS4BrrRedCIEOlCk4
20 | 2WiKi1gACmxmI9Tz8alZPpzuxWs10Ft6FVHjLsH6ovDjMCBwxie2HxUaIzUK9Huq
21 | X4j0ifxFUcknI5YTWM/zUSWqFJo2aBgsgq7Q9VVr+D2UiBjSVQ==
22 | -----END CERTIFICATE-----
23 |
--------------------------------------------------------------------------------
/docker/certs/ca.key:
--------------------------------------------------------------------------------
1 | -----BEGIN RSA PRIVATE KEY-----
2 | MIIEowIBAAKCAQEAyZNSkk631K+0A+R8HJszw0hqWQo46E3SfYpc746JejXhhrAS
3 | pKiOHZo2TLuQ4g1XBHsUbY3yt3kpwW3IR/WomxecfadM+g9JPWNzmAvrhqvE9SFr
4 | p6uC0q70Kz9huYq8mu+/n6uds+BtRJfdGXxzUMCpUmtlybTqT11nkkRYtWyH/l+P
5 | HDpephrqCXd7Zx47q9d4ypavo584nQ68bV8xosSM/SL1Yo7tkP64WQEsvuA6nhpX
6 | IjnlpHVsKhnxxuFGgDGqAkiogdjYhHIqSRngIlG1yQSATMgyiINYqvloFR0/q1Co
7 | U15021l5OnA3ddshYivCPWZIFvNNeTkF5dxpQQIDAQABAoIBAAxvbTmfZ5HUYQuY
8 | bdup63CRqBXkHoyeOG0MOx2AMpT6e/Y+KHhF+bZzzAPB0ndPkV2yZhk4F5AgBmZr
9 | al6eFg+zTjEmQAZxKPiDiR8JJTeCozzoGecXGpP6vQ9p8pJqr6XzQmQMR0dQgg0o
10 | PTiUR9zkdc2i2H2wDoBk573LP5m5gzRzGSJe4WvhFKKEjjae3CrHdMUYjOdu85D7
11 | qExvo45GaWN9uo0Ylgmt1jNkjO411cv9vAB6WTg1Lw93pV9blih4aoII5KXhk/3V
12 | gHhLElrxrLs4ckeG19fUHEs/xuFaFPPMWhPf5crGvtOP1UyzlNMvYsME7xQfEjEV
13 | VzLEDq0CgYEA9B8zq9BC5YIPtEpiuwgNYmIYx5KY5BMfwuE3jW/BG5E6T+wOAT9a
14 | Ohw5iOKetUYmYyPQn1KWHzumXG/lMkrdsRCffZVpdMJSl+/E0F8l6M6spwpbtCD7
15 | 724pWusWP+gZEXyeo2Sos32euzJs8b5NrMUL4XPbXdoZVGIXfoeFNp8CgYEA02Io
16 | 8C8nzIwsGnsw0aV45VdyGrywOqCnSarrjfa4PNNMljn6i/0D//4N8mh4Ws2WsvQU
17 | qChkKITlpkJsq0kLZxUdWcvzvNlH9EFBHoiImUvq+5DOD5ucr5b3hZwrtnI6C76J
18 | oSB7BCAV8oDpMYwvtahRlkxQ+Lv8typIc8vNtB8CgYEAkblCbfB+zmPKHhQ9RWo0
19 | GyN1qURQMe1ci0dHkw0/18Xkair93S7FHGUWzyiAFrOOXKXXdhOle3VvBETjKxdX
20 | qMfEfQHAlqsdBIdjFAOILKWamIftX+REn5NB6nzkpjdVJ4Qdamm+7o4xP4uFTvUL
21 | hvOE+QrgyvBTKHT5k1UwZv8CgYBDg6KXtDf9+PdNLfOVwkgu2BM1vvZ6gz1rJhA+
22 | M4L8ynA3uyTu+U0Bwl8qAXwPZIKxfYvreZsj5e6Df8u9mYLu1aueNqoOs0dsGDt4
23 | SRt3+ut+le16xrTw0EMWhZ7gkvM/NQg4Umt4EddwsQPKM4A5gR0t6Aokp8Y7qmGh
24 | uoJk+QKBgCMEeqEfsuQ8iYfeVNjxoZnTJsWN1cb/ZZzCfHx/Fbo1XXDchcB7MNNM
25 | 56Y6eVRs0tz9f+UNNnKaRNIjJUfqnibsInfBYf7wYJbG4azyD1jORCpf53srOAue
26 | 3gLVBrzUbaul5nRNHLVsTB7Io+vlNTlQ5PJ6OEblIQcZmukeQ9Fo
27 | -----END RSA PRIVATE KEY-----
28 |
--------------------------------------------------------------------------------
/docker/config.ini:
--------------------------------------------------------------------------------
1 | [general]
2 | title = OpenWrt Project
3 | title_url = http://openwrt.org/
4 | workdir = /master
5 |
6 | [phase1]
7 | status_bind = tcp:8010:interface=0.0.0.0
8 | status_user = admin
9 | status_password = admin
10 | buildbot_url = http://buildmaster-phase1:8010/
11 | expire = 1209600
12 | port = ssl:9989:privateKey=/certs/master.key:certKey=/certs/master.crt
13 | config_seed = # Seed configuration
14 | CONFIG_BUILDBOT=y
15 | CONFIG_DEVEL=y
16 | CONFIG_CCACHE=n
17 | CONFIG_KERNEL_KALLSYMS=y
18 | CONFIG_AUTOREMOVE=y
19 |
20 | [phase2]
21 | status_bind = tcp:8011:interface=0.0.0.0
22 | status_user = admin
23 | status_password = admin
24 | buildbot_url = http://buildmaster-phase2:8011/
25 | port = ssl:9990:privateKey=/certs/master.key:certKey=/certs/master.crt
26 | persistent = false
27 |
28 | [repo]
29 | url = https://git.openwrt.org/openwrt/openwrt.git
30 | branch = main
31 |
32 | [rsync]
33 | binary_url = upload@rsync-server::data/bin
34 | binary_password = secret
35 | source_url = upload@rsync-server::data/src
36 | source_password = secret
37 | sdk_url = upload@rsync-server::data/bin/targets
38 | sdk_password = secret
39 | sdk_pattern = openwrt-sdk-*.tar.*
40 |
41 | [gpg]
42 | key = -----BEGIN PGP PRIVATE KEY BLOCK-----
43 | Comment: Example secret GPG key
44 |
45 | lQWGBF0ZCWsBDADXslVt7Rk/bPIduao3exEqGhzgR+Wv7i8H/gxZdxGbe+LpX04h
46 | D60LOpCKf1T1MV0lPNk4FXhoj7I3qa1VQxDAg/6teBWIC4bKKj44pq09sljPVxRx
47 | LJARWjBTM7GgGnu+8/UWTMDoDRLxSabDlFU3sWo4Xh6iCom8IjiZaOcmtDUlOjJs
48 | 0jwhelcmULPFBRVhIglNaHEaC06r/4jhpgzyEITQkQsh6QVYbMRtEi2bXoqfxu9f
49 | /1CvYjO/4A6F9G3aG0ubu7SyggQ5lcObVr9poDPZ79x3e+1wGILUfUImE4MxVmN/
50 | WNcPhJ13J6FvmLH2cj8oJhl4U5oWyxMRZg6CDpEO5UlP8wwB25DJ+5d/qsX3gEC5
51 | chJQhwHgthHBGJMbbvCPXuSDW4s4TYbyVuG8IntLuzaowmjyiKH/BVUubFpMoM5p
52 | 69hwpjI30T3Dy5hhrgclIrbgeAInZ7y03VTjLbb9FuYSfGGi9FTcW+FJAdw3lWzC
53 | vARoxtbiJ+J2QWkAEQEAAf4HAwKFxhkeBLI6K/CJR8c3UgEPSwvUq5B1KTtBUsBq
54 | wODF1O/tZnZG0YREkqt2HTFU9nGYKyCHiKVqksBLDJx2aL++MwZIyelWTsa1juFV
55 | 1VFVp0ggHjHoVBGk37vpoHgqq6kATEKx92Hgd0AMsyvplQrxCtKu3NZu1V+O1wP4
56 | 1bgMqJy3uSVXzko6n+DixQOZCNf+6/3r+2pR3XK5aPc2hhMa+CYbNcqWfiVEnTIJ
57 | tskohTsosnUUtPeq2B7c/dTdhGkhW1GiZLlAgOrpYqFqZgUUFwvnSeX3Gz73re6C
58 | tfcWDVZau6dAm9j7z/fmXEmWwgmgPOWTxxbcSoKscFZL4e2rVxghtCsDTHH1PO5O
59 | CJwPvpYty7CmkassnKzYFZ14krqAvCGE3xT6NzesdSyEcvu1RiZfO/7iWWHwNJbk
60 | k0NEgxV+AB8zmg3hUd4D+XlMQLrkZKGGc22BOux3lS16oRvKoKtsjj0OrQ0j0bYY
61 | 2YS3Q0Bvbf9YH3h2HXfrjI3JE55MpC5s8bnXRl7ioH+qWh/E2KaZWQPmx566x+hf
62 | hof89ODYc72LKuEOtzSbKGX7O1oivw0IPGm73yTHj6RYhJ16HmNvSmyMvuZCfwpo
63 | cVM2muVnhJj4syrenvr5GeltYlLRrrxj0WWZnCqHo1MyFz7Ax9+knFqrTFiZ6wzU
64 | vVI+/DWX/peMRlFtkxSP3KgFvx/TUu6j1rTsbuH+QauI4AX3RSEqpma2dP4yN3Tn
65 | JDVanvpYqqJzHpmhIDDBDDRAEvLNf7bsWSctaOe4CLjGZC/4AHL2Z55QsZ627p0G
66 | 8TwV7lcsWU4q5zdZ8NDyfXuLTNv5A8/zrMy9NeqCitA7eOe+tBS/N7MBTRzZ7mKK
67 | H4AXRpzrwBb0T6F6pjtrYp7cUTu6r1aBP2W61VzWWZChFM1AWfk3qgX4TvGikpe9
68 | RfyUfPdALboZ4ZI2opoU9TQcjyhdLyfojMofoOdYZl68tImdd44+8sidj9ZW7JB3
69 | +rrEBm4yWNOhTJnVnYaau+gY65WGYOb2rnSPlNa81aC2j141U2bxyigqTENH6W6N
70 | xCAdIxWvXIYIRDSfrLFryGHdw/+O0SerK7NdCB3Yhb+YrzGMB1BOLFhbuXPnDMeB
71 | JMN4WGCrxBl9a7Mtck/VSTXYPGqDA33AgOAgiJk/hVCc7Uq3TElro2FlvlOMUUzU
72 | vAcM3ZMVDTa9FG9BGzpusf6P7JHfLJ/pIk6M0AfgWLnFSyR7OQZ26ImfWkOsRnqS
73 | LzuQC4rpPQ8JViOwNdMsJ9Lh9ZIxSmeKdYAIdj0Rjv8xzE+QYVxVlCNzqA5XQSnW
74 | ExyJIb5ZWWgfh7x60OF+7FcmXl7Z8zorxbQnRXhhbXBsZSBHUEcgS2V5IDxkZXZl
75 | bG9wZXJAZXhhbXBsZS5vcmc+iQHUBBMBCAA+FiEE8tv6Y8FEu+A6mMiU4agkU1Bn
76 | 2NgFAl0ZCWsCGwMFCQPCZwAFCwkIBwIGFQgJCgsCBBYCAwECHgECF4AACgkQ4agk
77 | U1Bn2NirAwv/dNjiPyvLH57N9d0DTH+8G53JMaNNZiIeW4cbE1lTaVL1nL1cwEkp
78 | enpCWg1S6OKxRQ+gDrPP94dwJFCdEgPdRV7+I4OU6fiPaHKAO7xZQM4MtlWpQnro
79 | qXg7oC5LEj9gGNOE6mmtNEPd5xuIyfWXSbVd2222EbpdmSTrP5L52gehlG4o0GRc
80 | GT3Y7o5LZ6VcJiZJWbjpzicLcJ7NJNIJ8ektYcSugKOEheXW1/ys0oEFlQ2SHuvZ
81 | kjKbfYrCqj6pPd3oJIb4EDc5RK9ADGAsOKnHxcIMy/UyielushtG6E0x6OmONJTm
82 | VNOz1i09nVaIHc4lM5Dq7ktrZQw//JcIJNH7OcnpcvFnnAsnE+UZEJNRIruPMJul
83 | 6FYrAGv+buuJ1BhcoCZeJ6ETzj0obrDtMhgpmbfM3HUhqUBJUZumx0zvMUEz+Mtx
84 | +YiN6ECSFad0uLQPp4Qv+atBDvVIK8uSevJIZlxXgYSYpcPH2l4ZlANhF3Z/MWjc
85 | 0aK6p/W4Rha1nQWGBF0ZCWsBDAD8WYToIsYRAilX19cZ7V6RG0lGY6CpF8aOKRyY
86 | LcH6T0lva0GrEXVwo2ms3bNSUnLzE8SKOPPam8FMT7xzEZ3caLnI6LB/n9DWPFI0
87 | BVNOEBwFf7aadTBAZIRarO7fqIey/QnkBdw2UQYFTZBQMs7ov9LvZqmu0Ya0b/xs
88 | NfhwWpb2QUGeeZThJlEWsatELphE81BdC4FvpPiAXN+JgL5prGQz2p87VDo6uu+9
89 | W8Si81WQLvVXXEG0175UorQ6CwBiQVLjiaCGQ+Yn0ZkJVpwReKsSdwP7LBsVGuA+
90 | YNoJW0+2oX+suFHrSbGdUGaSKEiR4AnHoSuFu4eK8FZNXyg9zoBv6M5QKU2lGFT6
91 | V8TwbEIhLIIAwil+T+ZUu4tKbuDDFKh4CPalx5GehNN7ZRFOZgNz/rf7aOtA4s3P
92 | kbB5fjR3+Z9ns70xej9z57kIIB1KgcX0Ov9kp5zNtZdpPRzRURPAEmhNveAHtEbR
93 | aqBH64vxilDUXvcs1QlP4066TcsAEQEAAf4HAwJI+fXlrlUZcvBCEaouhaNghWDA
94 | 2dSvGyzrp32wmLXoRwC0wUiy4hUduxrEYECd1wjPM9t22j7++7SAfqdfto6RNXYT
95 | AramO2Aj8ldqhJoCfkIh64pU0/hUmQnJ56KTdXLNuoDbF43usnDO0KvfSmvHMyBs
96 | tADZ67fgit3043LZFEPbAov33b37qoLNvrqdQHes2olE32mpNOJoVyG3P5/3YixT
97 | TalKWMhXiZ8xePzyrcw85hj/SBNIeVeJ4QRotlQJA7dvVxbEtATzN7wCv6B9BrE+
98 | Lm3Es5zDrVX6n2zcnwRWh40lADZvEzKSXEI2priFEpdjU4HmMP9BZ2jnMMYB8wkP
99 | IMSvlgWuXSt9t27lnpE1G/Wpk+Q02zTOTDsLRk9crUFTw3YoyPsp7QdQCtew0z3S
100 | yIn/+Wer+p++d2/qgiZxlh15V8msj5qX6ALTZpZ0hg1+dpvvQMqTRJNiX81oenmF
101 | FEWmDA6Q4cRjYhub2nDUdmxU5z3dlxr2F2Lrxuheg3zQyt6P0FD7yzL6aDsU7Eok
102 | rvZrYcShWBPW7LigjHNKrdUoTLHAqj5asuPgqw+hQJ2x7e0VC3bAl/e8M1/vQ0Uw
103 | UAD8q6wbE7+/C419K7JZchZeqfARl7qC29xsh22yLU33II5yzEwshaQCSF8RWBWi
104 | D5Dvdkw9ZV94gz+8ZnAezFpkd1igA7Qcnfcsy/JwzsaOHkZN2nwfTeK2WW+NwMOu
105 | PkJG8JuFpQ5QwcL5axion7QrvhrfdmV/o//IaAFyxnhOo3IddLNm8S+qpiU6HIj3
106 | pyr6PJFlUq9gxpuCrRW+yXcvlqEdQygmDDr1UWlrblVoKgbIkZAJPvlCD29jhbaM
107 | UryrhhOSURmf158FU5wA9DzlcsCwqdCPTnbeqfnxLRS/+hX3zB7T4ezIBya09o0s
108 | iCEHP+8cOVsf2metZJ2ONrXnBp+LGoMqD8I4vGNIamvIZivPwq7LefUk1XhBd7Ed
109 | aP50tYZuEQnoNpa4S6sR2qgxxCRLmqYRJvaabYdEUBdV8F/0K4XxJfMLPLZfhRO0
110 | wdL1fRx2cxoeZH0BsEntQVHdhJJqRmPJNt8SyahwRrpoJ1Lb5iZm9yj0ZU4dP7h/
111 | qbc66QCg/eqA+0r66xd6bzxORM/HjM0itNPkVp/4PdSKKQ0JnYRwtPNE3pBvuDCK
112 | EZ5GPuCM0szInM11sLTf96HBL0G4VWspKqZIDS7ezZ7zj5TBooLTzglmy2rA0UAE
113 | SrxvD0sI3ULEiNIEjogJJtj9MSATjPLXGzt+ekrlb+i6c1A2IrrPOUkEhAa6F4HL
114 | 0HcJn3/sWfOVH+mj5AinIsiosHvZiYOGoMBGHAmxUfi/74kBvAQYAQgAJhYhBPLb
115 | +mPBRLvgOpjIlOGoJFNQZ9jYBQJdGQlrAhsMBQkDwmcAAAoJEOGoJFNQZ9jYMusL
116 | +gLg/aXZj8w72Zdabvsuh+FsP1dRAvEpF+RFfmUTyCdxOA7xY9DScCaa5gBN35KI
117 | EsKE3EQJr8W+iX0+jIrsyn00Tb4D0MA7oVHDYfDOlxXTk+NHs1GW6CN5aXSCkulT
118 | 7dP+09oqPXS4z/bcX2UTbhh01VgYAWrMOWj5ycza+OrF2+hK7U7sVQoTN7kMLvQs
119 | C2CVJm4wppT6CYMhKOxVEkvL3fcK7ZYXRhBGij97vR7kCbXMoSDPKT7b0Ulm+pOj
120 | k69gOBkKw95S2sGXifyp2t6tWrcfNf0K7cnp0yS0OYT64t670x9g/6qxduT4VueJ
121 | gR8gW5jTxhyqiIKQ2t7rdlsOQHUAHDolwC/d+BoVC7kUj+450gzuIt9TF0yYRpiz
122 | PPjCSsIQfwMGOGj8P5X06tjIAiJy79J1QoM6NaRaT3AX39edzUS5SsX2HaHPvk4D
123 | UmP52XKFIRFi1EeDPLt1/JMzHc8+5fVxtXO+mFHKxD3Q3q9bAK7qcNsj0jtY7UzD
124 | 3w==
125 | =zJBW
126 | -----END PGP PRIVATE KEY BLOCK-----
127 | passphrase = secret
128 | comment = Example GPG key
129 |
130 | [usign]
131 | key = RWRCSwAAAADUvtjCkFEF4bWWxpPBo9o8R5FK6Rz5aPUsaZONLu8kxIjud9Fd+Mgu7J2fFJDVyKFAXNH6pKS+AuBW3v+TQT5m1J0W/JYTjqzIrgAZhRtm5v3vSKRl3HUD2zEEbG5j3tg=
132 | comment = Example usign key
133 |
134 | [apk]
135 | key = -----BEGIN EC PRIVATE KEY-----
136 | MHcCAQEEIIP54p1G0UgCleLObh07Gxq0S0Iz22OQpkUj8S1AzXB9oAoGCCqGSM49
137 | ...
138 | -----END EC PRIVATE KEY-----
139 |
140 | [worker 1]
141 | phase = 1
142 | name = buildworker-phase1
143 | password = secret
144 | builds = 1
145 | cleanup = 1
146 |
147 | [worker 2]
148 | phase = 2
149 | name = buildworker-phase2
150 | password = secret
151 | builds = 1
152 | cleanup = 1
153 |
--------------------------------------------------------------------------------
/docker/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2'
2 |
3 | services:
4 | rsync-server:
5 | build:
6 | context: ../
7 | dockerfile: docker/rsync/Dockerfile
8 | image: rsync:latest
9 | restart: always
10 | cpuset: '0'
11 | environment:
12 | SHARE_USER: upload
13 | SHARE_PASSWORD: secret
14 | volumes:
15 | - './build/output:/data'
16 |
17 | buildmaster-phase1:
18 | build:
19 | context: ../
20 | dockerfile: docker/buildmaster/Dockerfile
21 | image: buildmaster:latest
22 | restart: always
23 | cpuset: '0'
24 | environment:
25 | BUILDMASTER_PHASE: 1
26 | BUILDMASTER_CONFIG: /config.ini
27 | ports:
28 | - '8010:8010'
29 | - '9989:9989'
30 | volumes:
31 | - './config.ini:/config.ini'
32 | - './certs/buildmaster-phase1.crt:/certs/master.crt'
33 | - './certs/buildmaster-phase1.key:/certs/master.key'
34 | - './build/master-phase1:/master'
35 |
36 | buildmaster-phase2:
37 | build:
38 | context: ../
39 | dockerfile: docker/buildmaster/Dockerfile
40 | image: buildmaster:latest
41 | restart: always
42 | cpuset: '0'
43 | environment:
44 | BUILDMASTER_PHASE: 2
45 | BUILDMASTER_CONFIG: /config.ini
46 | ports:
47 | - '8011:8011'
48 | - '9990:9990'
49 | volumes:
50 | - './config.ini:/config.ini'
51 | - './certs/buildmaster-phase2.crt:/certs/master.crt'
52 | - './certs/buildmaster-phase2.key:/certs/master.key'
53 | - './build/master-phase2:/master'
54 |
55 | buildworker-phase1:
56 | build:
57 | context: ../
58 | dockerfile: docker/buildworker/Dockerfile
59 | image: buildworker:latest
60 | restart: always
61 | cpuset: 1-3
62 | environment:
63 | BUILDWORKER_MASTER: buildmaster-phase1:9989
64 | BUILDWORKER_NAME: buildworker-phase1
65 | BUILDWORKER_PASSWORD: secret
66 | BUILDWORKER_TLS: '1'
67 | links:
68 | - 'rsync-server'
69 | - 'buildmaster-phase1'
70 | volumes:
71 | - './certs/ca.crt:/certs/ca.pem'
72 | - './build/worker-phase1:/builder'
73 |
74 | buildworker-phase2:
75 | build:
76 | context: ../
77 | dockerfile: docker/buildworker/Dockerfile
78 | image: buildworker:latest
79 | restart: always
80 | cpuset: 1-3
81 | environment:
82 | BUILDWORKER_MASTER: buildmaster-phase2:9990
83 | BUILDWORKER_NAME: buildworker-phase2
84 | BUILDWORKER_PASSWORD: secret
85 | BUILDWORKER_TLS: '1'
86 | links:
87 | - 'rsync-server'
88 | - 'buildmaster-phase2'
89 | volumes:
90 | - './certs/ca.crt:/certs/ca.pem'
91 | - './build/worker-phase2:/builder'
92 |
--------------------------------------------------------------------------------
/docker/rsync/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM debian:12
2 |
3 | COPY docker/rsync/files/entry.sh /entry.sh
4 |
5 | RUN apt-get update && \
6 | apt-get -y install rsync && \
7 | apt-get clean && \
8 | mkdir -p /data && \
9 | chmod 0755 /entry.sh
10 |
11 | EXPOSE 873
12 | VOLUME [ "/data" ]
13 | ENTRYPOINT [ "/entry.sh" ]
14 | HEALTHCHECK CMD xargs kill -0 < /tmp/rsyncd.pid
15 |
--------------------------------------------------------------------------------
/docker/rsync/files/entry.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | (
4 | echo "use chroot = yes"
5 | echo "[${SHARE_NAME:-data}]"
6 | echo "log file = /dev/null"
7 | echo "uid = ${SHARE_UID:-1000}"
8 | echo "gid = ${SHARE_GID:-1000}"
9 | echo "path = /data"
10 | echo "read only = false"
11 | echo "write only = false"
12 | echo "comment = ${SHARE_COMMENT:-Rsync data share}"
13 |
14 | if [ -n "$SHARE_USER" -a -n "$SHARE_PASSWORD" ]; then
15 | echo "auth users = $SHARE_USER"
16 | echo "secrets file = /rsyncd.secrets"
17 | fi
18 | ) > /rsyncd.conf
19 |
20 | if [ -n "$SHARE_USER" -a -n "$SHARE_PASSWORD" ]; then
21 | echo "$SHARE_USER:$SHARE_PASSWORD" > /rsyncd.secrets
22 | chmod 0600 /rsyncd.secrets
23 | fi
24 |
25 | chown "${SHARE_UID:-1000}:${SHARE_GID:-1000}" /data
26 |
27 | rm -f /tmp/rsyncd.pid
28 |
29 | exec /usr/bin/rsync --daemon --no-detach --config=/rsyncd.conf --log-file=/dev/stdout --dparam=pidfile=/tmp/rsyncd.pid "$@"
30 |
--------------------------------------------------------------------------------
/phase1/buildbot.tac:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from twisted.application import service
4 | from buildbot.master import BuildMaster
5 |
6 | basedir = '.'
7 | rotateLength = 10000000
8 | maxRotatedFiles = 10
9 | configfile = 'master.cfg'
10 |
11 | # Default umask for server
12 | umask = None
13 |
14 | # if this is a relocatable tac file, get the directory containing the TAC
15 | if basedir == '.':
16 | import os
17 | basedir = os.path.abspath(os.path.dirname(__file__))
18 |
19 | # note: this line is matched against to check that this is a buildmaster
20 | # directory; do not edit it.
21 | application = service.Application('buildmaster')
22 | from twisted.python.logfile import LogFile
23 | from twisted.python.log import ILogObserver, FileLogObserver
24 | logfile = LogFile.fromFullPath(os.path.join(basedir, "twistd.log"), rotateLength=rotateLength,
25 | maxRotatedFiles=maxRotatedFiles)
26 | application.setComponent(ILogObserver, FileLogObserver(logfile).emit)
27 |
28 | m = BuildMaster(basedir, configfile, umask)
29 | m.setServiceParent(application)
30 | m.log_rotation.rotateLength = rotateLength
31 | m.log_rotation.maxRotatedFiles = maxRotatedFiles
32 |
--------------------------------------------------------------------------------
/phase1/config.ini.example:
--------------------------------------------------------------------------------
1 | [general]
2 | title = OpenWrt Project
3 | title_url = http://openwrt.org/
4 | workdir = /buildbot
5 |
6 | [phase1]
7 | buildbot_url = http://phase1.builds.openwrt.org/
8 | status_bind = tcp:8010:interface=127.0.0.1
9 | status_user = example
10 | status_password = example
11 | port = 9989
12 |
13 | [irc]
14 | host = irc.freenode.net
15 | port = 6667
16 | channel = #example-channel
17 | nickname = example-builder
18 | password = example
19 |
20 | [repo]
21 | url = https://git.openwrt.org/openwrt/openwrt.git
22 | tree_stable_timer = 900
23 |
24 | # branches should be listed by decreasing build priority order, typically oldest branch first (less build intensive)
25 | # branch section name should match branch "name" option until signall.sh is reworked
26 | [branch openwrt-21.02]
27 | name = openwrt-21.02
28 | gpg_key = -----BEGIN PGP PRIVATE KEY BLOCK-----
29 | Version: GnuPG v2
30 |
31 | mQGNBFX4kxkBDACcTUVUl6zbn4r9tDap0/aCpcK9MO+HPatS7p2aBGY51kh78Ixr
32 | ...
33 | HwHLaFTMvYFY7WJDwA==
34 | -----END PGP PRIVATE KEY BLOCK-----
35 | gpg_passphrase = secret password
36 | gpg_comment = Unattended build signature
37 | usign_key = RWRCSwAAA...OihABfuLvGRVfVaJ6wLf0=
38 | usign_comment = Unattended build signature
39 | apk_key = -----BEGIN EC PRIVATE KEY-----
40 | MHcCAQEEIIP54p1G0UgCleLObh07Gxq0S0Iz22OQpkUj8S1AzXB9oAoGCCqGSM49
41 | ...
42 | -----END EC PRIVATE KEY-----
43 | binary_url = user@example.org::upload-binary
44 | binary_password = example
45 | source_url = user@example.org::upload-sources
46 | source_password = example2
47 | config_seed = # Seed configuration
48 | CONFIG_BUILDBOT=y
49 | CONFIG_DEVEL=y
50 | CONFIG_CCACHE=n
51 | CONFIG_KERNEL_KALLSYMS=y
52 | CONFIG_AUTOREMOVE=y
53 | build_targets = armsr/armv8
54 | malta/be
55 | mediatek/filogic
56 | qualcommax/ipq807x
57 | x86/64
58 |
59 | [branch openwrt-22.03]
60 | name = openwrt-22.03
61 |
62 | [branch main]
63 | name = main
64 |
65 | [worker 1]
66 | phase = 1
67 | name = example-worker-1
68 | password = example
69 |
70 | [worker 2]
71 | phase = 1
72 | name = example-worker-2
73 | password = example2
74 | # for workers which share a common pipe, ul/dl resource-access locks can be defined.
75 | # if the identifier is the same for both ul/dl, then all ul/dl operations will be serialized between workers having the same id.
76 | # if the identifier differs for ul and dl, then dl operations will be serialized independently of ul operations.
77 | ul_lock = host1
78 | dl_lock = host1
79 | # tag_only workers will only build forced tag buildrequests (i.e. release builds)
80 | tag_only = yes
81 | # if rsync operations must prefer ipv4 ('rsync -4'), set the following
82 | rsync_ipv4 = yes
83 |
--------------------------------------------------------------------------------
/phase1/master.cfg:
--------------------------------------------------------------------------------
1 | # -*- python -*-
2 | # ex: set syntax=python:
3 |
4 | import os
5 | import re
6 | import base64
7 | import subprocess
8 | import configparser
9 |
10 | from dateutil.tz import tzutc
11 | from datetime import datetime, timedelta
12 |
13 | from twisted.internet import defer
14 | from twisted.python import log
15 |
16 | from buildbot import locks
17 | from buildbot.data import resultspec
18 | from buildbot.changes.gitpoller import GitPoller
19 | from buildbot.config import BuilderConfig
20 | from buildbot.process import buildstep
21 | from buildbot.plugins import reporters
22 | from buildbot.plugins import schedulers
23 | from buildbot.plugins import steps
24 | from buildbot.plugins import util
25 | from buildbot.process import properties
26 | from buildbot.process import results
27 | from buildbot.process.factory import BuildFactory
28 | from buildbot.process.properties import Interpolate
29 | from buildbot.process.properties import Property
30 | from buildbot.schedulers.basic import AnyBranchScheduler
31 | from buildbot.schedulers.forcesched import BaseParameter
32 | from buildbot.schedulers.forcesched import ForceScheduler
33 | from buildbot.schedulers.forcesched import ValidationError
34 | from buildbot.steps.master import MasterShellCommand
35 | from buildbot.steps.shell import SetPropertyFromCommand
36 | from buildbot.steps.shell import ShellCommand
37 | from buildbot.steps.source.git import Git
38 | from buildbot.steps.transfer import FileDownload
39 | from buildbot.steps.transfer import FileUpload
40 | from buildbot.steps.transfer import StringDownload
41 | from buildbot.worker import Worker
42 | from buildbot.worker.local import LocalWorker
43 |
44 |
45 | if not os.path.exists("twistd.pid"):
46 | with open("twistd.pid", "w") as pidfile:
47 | pidfile.write("{}".format(os.getpid()))
48 |
49 | # This is a sample buildmaster config file. It must be installed as
50 | # 'master.cfg' in your buildmaster's base directory.
51 |
52 | ini = configparser.ConfigParser()
53 | ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
54 |
55 | if "general" not in ini or "phase1" not in ini:
56 | raise ValueError("Fix your configuration")
57 |
58 | inip1 = ini["phase1"]
59 |
60 | # Globals
61 | work_dir = os.path.abspath(ini["general"].get("workdir", "."))
62 | scripts_dir = os.path.abspath("../scripts")
63 |
64 | repo_url = ini["repo"].get("url")
65 | tree_stable_timer = ini["repo"].getint("tree_stable_timer", 15 * 60)
66 |
67 | rsync_defopts = ["-v", "--timeout=120"]
68 |
69 | # if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
70 | # rsync_bin_defopts += ["--contimeout=20"]
71 |
72 | branches = {}
73 |
74 |
75 | def ini_parse_branch(section):
76 | b = {}
77 | name = section.get("name")
78 |
79 | if not name:
80 | raise ValueError("missing 'name' in " + repr(section))
81 | if name in branches:
82 | raise ValueError("duplicate branch name in " + repr(section))
83 |
84 | b["name"] = name
85 | b["bin_url"] = section.get("binary_url")
86 | b["bin_key"] = section.get("binary_password")
87 |
88 | b["src_url"] = section.get("source_url")
89 | b["src_key"] = section.get("source_password")
90 |
91 | b["gpg_key"] = section.get("gpg_key")
92 |
93 | b["usign_key"] = section.get("usign_key")
94 | usign_comment = "untrusted comment: " + name.replace("-", " ").title() + " key"
95 | b["usign_comment"] = section.get("usign_comment", usign_comment)
96 |
97 | b["config_seed"] = section.get("config_seed")
98 | b["build_targets"] = section.get("build_targets")
99 |
100 | b["kmod_archive"] = section.getboolean("kmod_archive", False)
101 |
102 | branches[name] = b
103 | log.msg("Configured branch: {}".format(name))
104 |
105 |
106 | # PB port can be either a numeric port or a connection string
107 | pb_port = inip1.get("port") or 9989
108 |
109 | # This is the dictionary that the buildmaster pays attention to. We also use
110 | # a shorter alias to save typing.
111 | c = BuildmasterConfig = {}
112 |
113 | # PROJECT IDENTITY
114 |
115 | # the 'title' string will appear at the top of this buildbot
116 | # installation's html.WebStatus home page (linked to the
117 | # 'titleURL') and is embedded in the title of the waterfall HTML page.
118 |
119 | c["title"] = ini["general"].get("title")
120 | c["titleURL"] = ini["general"].get("title_url")
121 |
122 | # the 'buildbotURL' string should point to the location where the buildbot's
123 | # internal web server (usually the html.WebStatus page) is visible. This
124 | # typically uses the port number set in the Waterfall 'status' entry, but
125 | # with an externally-visible host name which the buildbot cannot figure out
126 | # without some help.
127 |
128 | c["buildbotURL"] = inip1.get("buildbot_url")
129 |
130 | # BUILDWORKERS
131 |
132 | # The 'workers' list defines the set of recognized buildworkers. Each element is
133 | # a Worker object, specifying a unique worker name and password. The same
134 | # worker name and password must be configured on the worker.
135 |
136 | c["workers"] = []
137 | NetLocks = dict()
138 |
139 |
140 | def ini_parse_workers(section):
141 | name = section.get("name")
142 | password = section.get("password")
143 | phase = section.getint("phase")
144 | tagonly = section.getboolean("tag_only")
145 | rsyncipv4 = section.getboolean("rsync_ipv4")
146 |
147 | if not name or not password or not phase == 1:
148 | log.msg("invalid worker configuration ignored: {}".format(repr(section)))
149 | return
150 |
151 | sl_props = {"tag_only": tagonly}
152 | if "dl_lock" in section:
153 | lockname = section.get("dl_lock")
154 | sl_props["dl_lock"] = lockname
155 | if lockname not in NetLocks:
156 | NetLocks[lockname] = locks.MasterLock(lockname)
157 | if "ul_lock" in section:
158 | lockname = section.get("ul_lock")
159 | sl_props["ul_lock"] = lockname
160 | if lockname not in NetLocks:
161 | NetLocks[lockname] = locks.MasterLock(lockname)
162 | if rsyncipv4:
163 | sl_props[
164 | "rsync_ipv4"
165 | ] = True # only set prop if required, we use '+' Interpolate substitution
166 |
167 | log.msg("Configured worker: {}".format(name))
168 | # NB: phase1 build factory requires workers to be single-build only
169 | c["workers"].append(Worker(name, password, max_builds=1, properties=sl_props))
170 |
171 |
172 | for section in ini.sections():
173 | if section.startswith("branch "):
174 | ini_parse_branch(ini[section])
175 |
176 | if section.startswith("worker "):
177 | ini_parse_workers(ini[section])
178 |
179 | # list of branches in build-priority order
180 | branchNames = [branches[b]["name"] for b in branches]
181 |
182 | c["protocols"] = {"pb": {"port": pb_port}}
183 |
184 | # coalesce builds
185 | c["collapseRequests"] = True
186 |
187 | # Reduce amount of backlog data
188 | c["configurators"] = [
189 | util.JanitorConfigurator(
190 | logHorizon=timedelta(days=3),
191 | hour=6,
192 | )
193 | ]
194 |
195 |
196 | @defer.inlineCallbacks
197 | def getNewestCompleteTimePrio(bldr):
198 | """Returns the priority and the complete_at of the latest completed and not SKIPPED
199 | build request for this builder, or None if there are no such build
200 | requests. We need to filter out SKIPPED requests because we're
201 | using collapseRequests=True which is unfortunately marking all
202 | previous requests as complete when new buildset is created.
203 |
204 | @returns: (priority, datetime instance or None), via Deferred
205 | """
206 |
207 | prio = yield bldr.get_highest_priority()
208 | if prio is None:
209 | prio = 0
210 |
211 | bldrid = yield bldr.getBuilderId()
212 | completed = yield bldr.master.data.get(
213 | ("builders", bldrid, "buildrequests"),
214 | [
215 | resultspec.Filter("complete", "eq", [True]),
216 | resultspec.Filter("results", "ne", [results.SKIPPED]),
217 | ],
218 | order=["-complete_at"],
219 | limit=1,
220 | )
221 | if not completed:
222 | return (prio, None)
223 |
224 | complete_at = completed[0]["complete_at"]
225 |
226 | last_build = yield bldr.master.data.get(
227 | ("builds",),
228 | [
229 | resultspec.Filter("builderid", "eq", [bldrid]),
230 | ],
231 | order=["-started_at"],
232 | limit=1,
233 | )
234 |
235 | if last_build and last_build[0]:
236 | last_complete_at = last_build[0]["complete_at"]
237 | if last_complete_at and (last_complete_at > complete_at):
238 | return (prio, last_complete_at)
239 |
240 | return (prio, complete_at)
241 |
242 |
243 | @defer.inlineCallbacks
244 | def prioritizeBuilders(master, builders):
245 | """Returns sorted list of builders by their last timestamp of completed and
246 | not skipped build, ordered first by branch name.
247 |
248 | @returns: list of sorted builders
249 | """
250 |
251 | bldrNamePrio = {"__Janitor": 0, "00_force_build": 0}
252 | i = 1
253 | for bname in branchNames:
254 | bldrNamePrio[bname] = i
255 |
256 | def is_building(bldr):
257 | return bool(bldr.building) or bool(bldr.old_building)
258 |
259 | def bldr_info(bldr):
260 | d = defer.maybeDeferred(getNewestCompleteTimePrio, bldr)
261 | d.addCallback(lambda retval: (retval, bldr))
262 | return d
263 |
264 | def bldr_sort(item):
265 | ((hiprio, complete_at), bldr) = item
266 |
267 | # check if we have some high prio build requests pending (i.e. tag builds),
268 | # if so, front-run these builders, while preserving the per-branch static priority
269 | pos = 99
270 | for name, prio in bldrNamePrio.items():
271 | if bldr.name.startswith(name):
272 | # higher priority (larger positive number) raises position
273 | pos = prio + 50 - min(hiprio, 50)
274 | break
275 |
276 | # pos order: janitor/local (0), tag builds if any [1..50], !tag builds [51...]
277 |
278 | if not complete_at:
279 | date = datetime.min
280 | complete_at = date.replace(tzinfo=tzutc())
281 |
282 | if is_building(bldr):
283 | date = datetime.max
284 | complete_at = date.replace(tzinfo=tzutc())
285 |
286 | return (pos, complete_at, bldr.name)
287 |
288 | results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
289 | results.sort(key=bldr_sort)
290 |
291 | # for r in results:
292 | # log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
293 |
294 | return [r[1] for r in results]
295 |
296 |
297 | c["prioritizeBuilders"] = prioritizeBuilders
298 |
299 | # CHANGESOURCES
300 |
301 | # find targets
302 | targets = dict()
303 |
304 |
305 | def populateTargets():
306 | def buildTargetsConfigured(branch):
307 | builders = branches[branch].get("build_targets")
308 | return builders and set(filter(None, [t.strip() for t in builders.split("\n")]))
309 |
310 | for branch in branchNames:
311 | targets[branch] = buildTargetsConfigured(branch)
312 | if not targets[branch]:
313 | populateTargetsForBranch(branch)
314 |
315 |
316 | def populateTargetsForBranch(branch):
317 | """fetches a shallow clone for passed `branch` and then
318 | executes dump-target-info.pl and collates the results to ensure
319 | targets that only exist in specific branches get built.
320 | This takes a while during master startup but is executed only once.
321 | """
322 | targets[branch] = set()
323 | sourcegit = work_dir + "/source.git"
324 |
325 | log.msg(f"Populating targets for {branch}, this will take time")
326 |
327 | if os.path.isdir(sourcegit):
328 | subprocess.call(["rm", "-rf", sourcegit])
329 |
330 | subprocess.call(
331 | [
332 | "git",
333 | "clone",
334 | "-q",
335 | "--depth=1",
336 | "--branch=" + branch,
337 | repo_url,
338 | sourcegit,
339 | ]
340 | )
341 |
342 | os.makedirs(sourcegit + "/tmp", exist_ok=True)
343 | findtargets = subprocess.Popen(
344 | ["./scripts/dump-target-info.pl", "targets"],
345 | stdout=subprocess.PIPE,
346 | stderr=subprocess.DEVNULL,
347 | cwd=sourcegit,
348 | )
349 |
350 | while True:
351 | line = findtargets.stdout.readline()
352 | if not line:
353 | break
354 | ta = line.decode().strip().split(" ")
355 | targets[branch].add(ta[0])
356 |
357 | subprocess.call(["rm", "-rf", sourcegit])
358 |
359 |
360 | populateTargets()
361 |
362 | # the 'change_source' setting tells the buildmaster how it should find out
363 | # about source code changes.
364 |
365 | c["change_source"] = []
366 | c["change_source"].append(
367 | GitPoller(
368 | repo_url,
369 | workdir=work_dir + "/work.git",
370 | branches=branchNames,
371 | pollAtLaunch=True,
372 | pollInterval=300,
373 | )
374 | )
375 |
376 | # SCHEDULERS
377 |
378 | # Configure the Schedulers, which decide how to react to incoming changes.
379 |
380 |
381 | # Selector for known valid tags
382 | class TagChoiceParameter(BaseParameter):
383 | spec_attributes = ["strict", "choices"]
384 | type = "list"
385 | strict = True
386 |
387 | def __init__(self, name, label=None, **kw):
388 | super().__init__(name, label, **kw)
389 | self._choice_list = []
390 |
391 | def getRevTags(self, findtag=None):
392 | taglist = []
393 | branchvers = []
394 |
395 | # we will filter out tags that do no match the configured branches
396 | for b in branchNames:
397 | basever = re.search(r"-([0-9]+\.[0-9]+)$", b)
398 | if basever:
399 | branchvers.append(basever[1])
400 |
401 | # grab tags from remote repository
402 | alltags = subprocess.Popen(
403 | ["git", "ls-remote", "--tags", repo_url], stdout=subprocess.PIPE
404 | )
405 |
406 | while True:
407 | line = alltags.stdout.readline()
408 |
409 | if not line:
410 | break
411 |
412 | (rev, tag) = line.split()
413 |
414 | # does it match known format? ('vNN.NN.NN(-rcN)')
415 | tagver = re.search(
416 | r"\brefs/tags/(v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$",
417 | tag.decode().strip(),
418 | )
419 |
420 | # only list valid tags matching configured branches
421 | if tagver and any(tagver[1][1:].startswith(b) for b in branchvers):
422 | # if we want a specific tag, ignore all that don't match
423 | if findtag and findtag != tagver[1]:
424 | continue
425 | taglist.append({"rev": rev.decode().strip(), "tag": tagver[1]})
426 |
427 | return taglist
428 |
429 | @property
430 | def choices(self):
431 | taglist = [rt["tag"] for rt in self.getRevTags()]
432 | taglist.sort(
433 | reverse=True,
434 | key=lambda tag: tag if re.search(r"-rc[0-9]+$", tag) else tag + "-z",
435 | )
436 | taglist.insert(0, "")
437 |
438 | self._choice_list = taglist
439 |
440 | return self._choice_list
441 |
442 | def updateFromKwargs(self, properties, kwargs, **unused):
443 | tag = self.getFromKwargs(kwargs)
444 | properties[self.name] = tag
445 |
446 | # find the commit matching the tag
447 | findtag = self.getRevTags(tag)
448 |
449 | if not findtag:
450 | raise ValidationError("Couldn't find tag")
451 |
452 | properties["force_revision"] = findtag[0]["rev"]
453 |
454 | # find the branch matching the tag
455 | branch = None
456 | branchver = re.search(r"v([0-9]+\.[0-9]+)", tag)
457 | for b in branchNames:
458 | if b.endswith(branchver[1]):
459 | branch = b
460 |
461 | if not branch:
462 | raise ValidationError("Couldn't find branch")
463 |
464 | properties["force_branch"] = branch
465 |
466 | def parse_from_arg(self, s):
467 | if self.strict and s not in self._choice_list:
468 | raise ValidationError(
469 | "'%s' does not belong to list of available choices '%s'"
470 | % (s, self._choice_list)
471 | )
472 | return s
473 |
474 |
475 | @util.renderer
476 | @defer.inlineCallbacks
477 | def builderNames(props):
478 | """since we have per branch and per target builders,
479 | address the relevant builder for each new buildrequest
480 | based on the request's desired branch and target.
481 | """
482 | branch = props.getProperty("branch")
483 | target = props.getProperty("target", "")
484 |
485 | if target == "all":
486 | target = ""
487 |
488 | # if that didn't work, try sourcestamp to find a branch
489 | if not branch:
490 | # match builders with target branch
491 | ss = props.sourcestamps[0]
492 | if ss:
493 | branch = ss["branch"]
494 | else:
495 | log.msg("couldn't find builder")
496 | return [] # nothing works
497 |
498 | bname = branch + "_" + target
499 | builders = []
500 |
501 | for b in (yield props.master.data.get(("builders",))):
502 | if not b["name"].startswith(bname):
503 | continue
504 | builders.append(b["name"])
505 |
506 | return builders
507 |
508 |
509 | c["schedulers"] = []
510 | c["schedulers"].append(
511 | AnyBranchScheduler(
512 | name="all",
513 | change_filter=util.ChangeFilter(branch=branchNames),
514 | treeStableTimer=tree_stable_timer,
515 | builderNames=builderNames,
516 | )
517 | )
518 |
519 | c["schedulers"].append(
520 | ForceScheduler(
521 | name="force",
522 | buttonName="Force builds",
523 | label="Force build details",
524 | builderNames=["00_force_build"],
525 | codebases=[
526 | util.CodebaseParameter(
527 | "",
528 | label="Repository",
529 | branch=util.FixedParameter(name="branch", default=""),
530 | revision=util.FixedParameter(name="revision", default=""),
531 | repository=util.FixedParameter(name="repository", default=""),
532 | project=util.FixedParameter(name="project", default=""),
533 | )
534 | ],
535 | reason=util.StringParameter(
536 | name="reason",
537 | label="Reason",
538 | default="Trigger build",
539 | required=True,
540 | size=80,
541 | ),
542 | properties=[
543 | # NB: avoid nesting to simplify processing of properties
544 | util.ChoiceStringParameter(
545 | name="target",
546 | label="Build target",
547 | default="all",
548 | choices=["all"] + [t for b in branchNames for t in targets[b]],
549 | ),
550 | TagChoiceParameter(name="tag", label="Build tag", default=""),
551 | ],
552 | )
553 | )
554 |
555 | c["schedulers"].append(
556 | schedulers.Triggerable(name="trigger", builderNames=builderNames, priority=20)
557 | )
558 |
559 | # BUILDERS
560 |
561 | # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
562 | # what steps, and which workers can execute them. Note that any particular build will
563 | # only take place on one worker.
564 |
565 |
566 | def IsNoMasterBuild(step):
567 | branch = step.getProperty("branch")
568 | return branch not in ["main", "master"]
569 |
570 |
571 | def IsUsignEnabled(step):
572 | branch = step.getProperty("branch")
573 | return branch and branches[branch].get("usign_key")
574 |
575 |
576 | def IsSignEnabled(step):
577 | branch = step.getProperty("branch")
578 | return IsUsignEnabled(step) or branch and branches[branch].get("gpg_key")
579 |
580 |
581 | def IsKmodArchiveEnabled(step):
582 | branch = step.getProperty("branch")
583 | return branch and branches[branch].get("kmod_archive")
584 |
585 |
586 | def IsKmodArchiveAndRsyncEnabled(step):
587 | branch = step.getProperty("branch")
588 | return bool(IsKmodArchiveEnabled(step) and branches[branch].get("bin_url"))
589 |
590 |
591 | def IsRemoteShaSumsAvailable(step):
592 | return step.getProperty("have_remote_shasums")
593 |
594 |
595 | def GetBaseVersion(branch):
596 | if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", branch):
597 | return branch.split("-")[1]
598 | else:
599 | return "main"
600 |
601 |
602 | @properties.renderer
603 | def GetVersionPrefix(props):
604 | branch = props.getProperty("branch")
605 | basever = GetBaseVersion(branch)
606 | if props.hasProperty("tag") and re.match(
607 | r"^v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]
608 | ):
609 | return "%s/" % props["tag"][1:]
610 | elif basever != "main":
611 | return "%s-SNAPSHOT/" % basever
612 | else:
613 | return ""
614 |
615 |
616 | @util.renderer
617 | def GetConfigSeed(props):
618 | branch = props.getProperty("branch")
619 | return branch and branches[branch].get("config_seed") or ""
620 |
621 |
622 | @util.renderer
623 | def GetRsyncParams(props, srcorbin, urlorkey):
624 | # srcorbin: 'bin' or 'src'; urlorkey: 'url' or 'key'
625 | branch = props.getProperty("branch")
626 | opt = srcorbin + "_" + urlorkey
627 | return branch and branches[branch].get(opt)
628 |
629 |
630 | @util.renderer
631 | def GetUsignKey(props):
632 | branch = props.getProperty("branch")
633 | return branch and branches[branch].get("usign_key")
634 |
635 |
636 | def GetNextBuild(builder, requests):
637 | for r in requests:
638 | if r.properties:
639 | # order tagged build first
640 | if r.properties.hasProperty("tag"):
641 | return r
642 |
643 | r = requests[0]
644 | # log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
645 | return r
646 |
647 |
648 | def MakeEnv(overrides=None, tryccache=False):
649 | env = {
650 | "CCC": Interpolate("%(prop:cc_command:-gcc)s"),
651 | "CCXX": Interpolate("%(prop:cxx_command:-g++)s"),
652 | }
653 | if tryccache:
654 | env["CC"] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
655 | env["CXX"] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
656 | env["CCACHE"] = Interpolate("%(prop:ccache_command:-)s")
657 | else:
658 | env["CC"] = env["CCC"]
659 | env["CXX"] = env["CCXX"]
660 | env["CCACHE"] = ""
661 | if overrides is not None:
662 | env.update(overrides)
663 | return env
664 |
665 |
666 | @properties.renderer
667 | def NetLockDl(props, extralock=None):
668 | lock = None
669 | if props.hasProperty("dl_lock"):
670 | lock = NetLocks[props["dl_lock"]]
671 | if lock is not None:
672 | return [lock.access("exclusive")]
673 | else:
674 | return []
675 |
676 |
677 | @properties.renderer
678 | def NetLockUl(props):
679 | lock = None
680 | if props.hasProperty("ul_lock"):
681 | lock = NetLocks[props["ul_lock"]]
682 | if lock is not None:
683 | return [lock.access("exclusive")]
684 | else:
685 | return []
686 |
687 |
688 | def IsTargetSelected(target):
689 | def CheckTargetProperty(step):
690 | selected_target = step.getProperty("target", "all")
691 | if selected_target != "all" and selected_target != target:
692 | return False
693 | return True
694 |
695 | return CheckTargetProperty
696 |
697 |
698 | @util.renderer
699 | def UsignSec2Pub(props):
700 | branch = props.getProperty("branch")
701 | try:
702 | comment = (
703 | branches[branch].get("usign_comment") or "untrusted comment: secret key"
704 | )
705 | seckey = branches[branch].get("usign_key")
706 | seckey = base64.b64decode(seckey)
707 | except Exception:
708 | return None
709 |
710 | return "{}\n{}".format(
711 | re.sub(r"\bsecret key$", "public key", comment),
712 | base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]),
713 | )
714 |
715 |
716 | def canStartBuild(builder, wfb, request):
717 | """filter out non tag requests for tag_only workers."""
718 | wtagonly = wfb.worker.properties.getProperty("tag_only")
719 | tag = request.properties.getProperty("tag")
720 |
721 | if wtagonly and not tag:
722 | return False
723 |
724 | return True
725 |
726 |
727 | c["builders"] = []
728 |
729 | workerNames = []
730 |
731 | for worker in c["workers"]:
732 | workerNames.append(worker.workername)
733 |
734 | # add a single LocalWorker to handle the forcebuild builder
735 | c["workers"].append(LocalWorker("__local_force_build", max_builds=1))
736 |
737 | force_factory = BuildFactory()
738 | force_factory.addStep(
739 | steps.Trigger(
740 | name="trigger_build",
741 | schedulerNames=["trigger"],
742 | sourceStamps=[
743 | {
744 | "codebase": "",
745 | "branch": Property("force_branch"),
746 | "revision": Property("force_revision"),
747 | "repository": repo_url,
748 | "project": "",
749 | }
750 | ],
751 | set_properties={
752 | "reason": Property("reason"),
753 | "tag": Property("tag"),
754 | "target": Property("target"),
755 | },
756 | )
757 | )
758 |
759 | c["builders"].append(
760 | BuilderConfig(
761 | name="00_force_build", workername="__local_force_build", factory=force_factory
762 | )
763 | )
764 |
765 |
766 | # CUSTOM CLASS
767 |
768 | # Extension of ShellCommand and sets in property:
769 | # - True: the command succeded
770 | # - False: the command failed
771 | class ShellCommandAndSetProperty(buildstep.ShellMixin, buildstep.BuildStep):
772 | name = "shellandsetproperty"
773 | renderables = ['property']
774 |
775 | def __init__(
776 | self,
777 | property=None,
778 | **kwargs,
779 | ):
780 | kwargs = self.setupShellMixin(kwargs)
781 |
782 | self.property = property
783 |
784 | super().__init__(**kwargs)
785 |
786 | @defer.inlineCallbacks
787 | def run(self):
788 | cmd = yield self.makeRemoteShellCommand()
789 |
790 | yield self.runCommand(cmd)
791 |
792 | self.setProperty(self.property, not cmd.didFail(), "ShellCommandAndSetProperty Step")
793 |
794 | return cmd.results()
795 |
796 |
797 | # NB the phase1 build factory assumes workers are single-build only
798 | def prepareFactory(target):
799 | (target, subtarget) = target.split("/")
800 |
801 | factory = BuildFactory()
802 |
803 | # setup shared work directory if required
804 | factory.addStep(
805 | ShellCommand(
806 | name="sharedwd",
807 | descriptionDone="Shared work directory set up",
808 | command='test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
809 | workdir=".",
810 | haltOnFailure=True,
811 | )
812 | )
813 |
814 | # find number of cores
815 | factory.addStep(
816 | SetPropertyFromCommand(
817 | name="nproc",
818 | property="nproc",
819 | description="Finding number of CPUs",
820 | command=["nproc"],
821 | )
822 | )
823 |
824 | # find gcc and g++ compilers
825 | factory.addStep(
826 | FileDownload(
827 | name="dlfindbinpl",
828 | mastersrc=scripts_dir + "/findbin.pl",
829 | workerdest="../findbin.pl",
830 | mode=0o755,
831 | )
832 | )
833 |
834 | factory.addStep(
835 | SetPropertyFromCommand(
836 | name="gcc",
837 | property="cc_command",
838 | description="Finding gcc command",
839 | command=["../findbin.pl", "gcc", "", ""],
840 | haltOnFailure=True,
841 | )
842 | )
843 |
844 | factory.addStep(
845 | SetPropertyFromCommand(
846 | name="g++",
847 | property="cxx_command",
848 | description="Finding g++ command",
849 | command=["../findbin.pl", "g++", "", ""],
850 | haltOnFailure=True,
851 | )
852 | )
853 |
854 | # see if ccache is available
855 | factory.addStep(
856 | SetPropertyFromCommand(
857 | name="ccache",
858 | property="ccache_command",
859 | description="Testing for ccache command",
860 | command=["which", "ccache"],
861 | haltOnFailure=False,
862 | flunkOnFailure=False,
863 | warnOnFailure=False,
864 | hideStepIf=lambda r, s: r == results.FAILURE,
865 | )
866 | )
867 |
868 | # check out the source
869 | # Git() runs:
870 | # if repo doesn't exist: 'git clone repourl'
871 | # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -f -f -d -x'. Only works with mode='full'
872 | # git cat-file -e
873 | # git checkout -f
874 | # git checkout -B
875 | # git rev-parse HEAD
876 | factory.addStep(
877 | Git(
878 | name="git",
879 | repourl=repo_url,
880 | mode="full",
881 | method="fresh",
882 | locks=NetLockDl,
883 | haltOnFailure=True,
884 | )
885 | )
886 |
887 | # workaround for https://github.com/openwrt/buildbot/issues/5
888 | factory.addStep(
889 | Git(
890 | name="git me once more please",
891 | repourl=repo_url,
892 | mode="full",
893 | method="fresh",
894 | locks=NetLockDl,
895 | haltOnFailure=True,
896 | )
897 | )
898 |
899 | # update remote refs
900 | factory.addStep(
901 | ShellCommand(
902 | name="fetchrefs",
903 | description="Fetching Git remote refs",
904 | descriptionDone="Git remote refs fetched",
905 | command=[
906 | "git",
907 | "fetch",
908 | "origin",
909 | Interpolate(
910 | "+refs/heads/%(prop:branch)s:refs/remotes/origin/%(prop:branch)s"
911 | ),
912 | ],
913 | haltOnFailure=True,
914 | )
915 | )
916 |
917 | # getver.sh requires local branches to track upstream otherwise version computation fails.
918 | # Git() does not set tracking branches when cloning or switching, so work around this here
919 | factory.addStep(
920 | ShellCommand(
921 | name="trackupstream",
922 | description="Setting upstream branch",
923 | descriptionDone="getver.sh is happy now",
924 | command=["git", "branch", "-u", Interpolate("origin/%(prop:branch)s")],
925 | haltOnFailure=True,
926 | )
927 | )
928 |
929 | # Verify that Git HEAD points to a tag or branch
930 | # Ref: https://web.archive.org/web/20190729224316/http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
931 | factory.addStep(
932 | ShellCommand(
933 | name="gitverify",
934 | description="Ensuring that Git HEAD is pointing to a branch or tag",
935 | descriptionDone="Git HEAD is sane",
936 | command=(
937 | "git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || "
938 | "git show-ref --tags --dereference 2>/dev/null | sed -ne "
939 | '"/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."'
940 | ),
941 | haltOnFailure=True,
942 | )
943 | )
944 |
945 | factory.addStep(
946 | StringDownload(
947 | name="ccachecc",
948 | s='#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
949 | workerdest="../ccache_cc.sh",
950 | mode=0o755,
951 | )
952 | )
953 |
954 | factory.addStep(
955 | StringDownload(
956 | name="ccachecxx",
957 | s='#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
958 | workerdest="../ccache_cxx.sh",
959 | mode=0o755,
960 | )
961 | )
962 |
963 | # feed
964 | factory.addStep(
965 | ShellCommand(
966 | name="updatefeeds",
967 | description="Updating feeds",
968 | command=["./scripts/feeds", "update"],
969 | env=MakeEnv(tryccache=True),
970 | haltOnFailure=True,
971 | locks=NetLockDl,
972 | )
973 | )
974 |
975 | # feed
976 | factory.addStep(
977 | ShellCommand(
978 | name="installfeeds",
979 | description="Installing feeds",
980 | command=["./scripts/feeds", "install", "-a"],
981 | env=MakeEnv(tryccache=True),
982 | haltOnFailure=True,
983 | )
984 | )
985 |
986 | # seed config
987 | factory.addStep(
988 | StringDownload(
989 | name="dlconfigseed",
990 | s=Interpolate("%(kw:seed)s\n", seed=GetConfigSeed),
991 | workerdest=".config",
992 | mode=0o644,
993 | )
994 | )
995 |
996 | # configure
997 | factory.addStep(
998 | ShellCommand(
999 | name="newconfig",
1000 | descriptionDone=".config seeded",
1001 | command=Interpolate(
1002 | "printf 'CONFIG_TARGET_%(kw:target)s=y\\n"
1003 | "CONFIG_TARGET_%(kw:target)s_%(kw:subtarget)s=y\\n"
1004 | "CONFIG_SIGNED_PACKAGES=%(kw:usign:#?|y|n)s\\n' >> .config",
1005 | target=target,
1006 | subtarget=subtarget,
1007 | usign=GetUsignKey,
1008 | ),
1009 | )
1010 | )
1011 |
1012 | factory.addStep(
1013 | ShellCommand(
1014 | name="defconfig",
1015 | description="Populating .config",
1016 | command=["make", "defconfig"],
1017 | env=MakeEnv(),
1018 | )
1019 | )
1020 |
1021 | # check arch - exit early if does not exist - NB: some targets do not define CONFIG_TARGET_target_subtarget
1022 | factory.addStep(
1023 | ShellCommand(
1024 | name="checkarch",
1025 | description="Checking architecture",
1026 | descriptionDone="Architecture validated",
1027 | command='grep -sq CONFIG_TARGET_%s=y .config && grep -sq CONFIG_TARGET_SUBTARGET=\\"%s\\" .config'
1028 | % (target, subtarget),
1029 | logEnviron=False,
1030 | want_stdout=False,
1031 | want_stderr=False,
1032 | haltOnFailure=True,
1033 | flunkOnFailure=False, # this is not a build FAILURE - TODO mark build as SKIPPED
1034 | )
1035 | )
1036 |
1037 | # find libc suffix
1038 | factory.addStep(
1039 | SetPropertyFromCommand(
1040 | name="libc",
1041 | property="libc",
1042 | description="Finding libc suffix",
1043 | command=[
1044 | "sed",
1045 | "-ne",
1046 | '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }',
1047 | ".config",
1048 | ],
1049 | )
1050 | )
1051 |
1052 | # install build key
1053 | factory.addStep(
1054 | StringDownload(
1055 | name="dlkeybuildpub",
1056 | s=Interpolate("%(kw:sec2pub)s", sec2pub=UsignSec2Pub),
1057 | workerdest="key-build.pub",
1058 | mode=0o600,
1059 | doStepIf=IsUsignEnabled,
1060 | )
1061 | )
1062 |
1063 | factory.addStep(
1064 | StringDownload(
1065 | name="dlkeybuild",
1066 | s="# fake private key",
1067 | workerdest="key-build",
1068 | mode=0o600,
1069 | doStepIf=IsUsignEnabled,
1070 | )
1071 | )
1072 |
1073 | factory.addStep(
1074 | StringDownload(
1075 | name="dlkeybuilducert",
1076 | s="# fake certificate",
1077 | workerdest="key-build.ucert",
1078 | mode=0o600,
1079 | doStepIf=IsUsignEnabled,
1080 | )
1081 | )
1082 |
1083 | # prepare dl
1084 | factory.addStep(
1085 | ShellCommand(
1086 | name="dldir",
1087 | description="Preparing dl/",
1088 | descriptionDone="dl/ prepared",
1089 | command='mkdir -p ../dl && rm -rf "build/dl" && ln -s ../../dl "build/dl"',
1090 | workdir=Property("builddir"),
1091 | logEnviron=False,
1092 | want_stdout=False,
1093 | )
1094 | )
1095 |
1096 | # cleanup dl
1097 | factory.addStep(
1098 | ShellCommand(
1099 | name="dlprune",
1100 | description="Pruning dl/",
1101 | descriptionDone="dl/ pruned",
1102 | command="find dl/ -mindepth 1 -atime +15 -delete -print",
1103 | logEnviron=False,
1104 | haltOnFailure=False,
1105 | flunkOnFailure=False,
1106 | warnOnFailure=False,
1107 | )
1108 | )
1109 |
1110 | # prepare tar
1111 | factory.addStep(
1112 | ShellCommand(
1113 | name="dltar",
1114 | description="Building and installing GNU tar",
1115 | descriptionDone="GNU tar built and installed",
1116 | command=[
1117 | "make",
1118 | Interpolate("-j%(prop:nproc:-1)s"),
1119 | "tools/tar/compile",
1120 | "V=s",
1121 | ],
1122 | env=MakeEnv(tryccache=True),
1123 | haltOnFailure=True,
1124 | )
1125 | )
1126 |
1127 | # populate dl
1128 | factory.addStep(
1129 | ShellCommand(
1130 | name="dlrun",
1131 | description="Populating dl/",
1132 | descriptionDone="dl/ populated",
1133 | command=["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
1134 | env=MakeEnv(),
1135 | logEnviron=False,
1136 | locks=NetLockDl,
1137 | )
1138 | )
1139 |
1140 | factory.addStep(
1141 | ShellCommand(
1142 | name="cleanbase",
1143 | description="Cleaning base-files",
1144 | command=["make", "package/base-files/clean", "V=s"],
1145 | )
1146 | )
1147 |
1148 | # build
1149 | factory.addStep(
1150 | ShellCommand(
1151 | name="tools",
1152 | description="Building and installing tools",
1153 | descriptionDone="Tools built and installed",
1154 | command=[
1155 | "make",
1156 | Interpolate("-j%(prop:nproc:-1)s"),
1157 | "tools/install",
1158 | "V=s",
1159 | ],
1160 | env=MakeEnv(tryccache=True),
1161 | haltOnFailure=True,
1162 | )
1163 | )
1164 |
1165 | factory.addStep(
1166 | ShellCommand(
1167 | name="toolchain",
1168 | description="Building and installing toolchain",
1169 | descriptionDone="Toolchain built and installed",
1170 | command=[
1171 | "make",
1172 | Interpolate("-j%(prop:nproc:-1)s"),
1173 | "toolchain/install",
1174 | "V=s",
1175 | ],
1176 | env=MakeEnv(),
1177 | haltOnFailure=True,
1178 | )
1179 | )
1180 |
1181 | factory.addStep(
1182 | ShellCommand(
1183 | name="kmods",
1184 | description="Building kmods",
1185 | descriptionDone="Kmods built",
1186 | command=[
1187 | "make",
1188 | Interpolate("-j%(prop:nproc:-1)s"),
1189 | "target/compile",
1190 | "V=s",
1191 | "IGNORE_ERRORS=n m",
1192 | "BUILD_LOG=1",
1193 | ],
1194 | env=MakeEnv(),
1195 | haltOnFailure=True,
1196 | )
1197 | )
1198 |
1199 | # find kernel version
1200 | factory.addStep(
1201 | SetPropertyFromCommand(
1202 | name="kernelversion",
1203 | property="kernelversion",
1204 | description="Finding the effective Kernel version",
1205 | command=(
1206 | "make --no-print-directory -C target/linux/ "
1207 | "val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | "
1208 | "xargs printf '%s-%s-%s\\n'"
1209 | ),
1210 | env={"TOPDIR": Interpolate("%(prop:builddir)s/build")},
1211 | )
1212 | )
1213 |
1214 | factory.addStep(
1215 | ShellCommand(
1216 | name="pkgclean",
1217 | description="Cleaning up package build",
1218 | descriptionDone="Package build cleaned up",
1219 | command=["make", "package/cleanup", "V=s"],
1220 | )
1221 | )
1222 |
1223 | factory.addStep(
1224 | ShellCommand(
1225 | name="pkgbuild",
1226 | description="Building packages",
1227 | descriptionDone="Packages built",
1228 | command=[
1229 | "make",
1230 | Interpolate("-j%(prop:nproc:-1)s"),
1231 | "package/compile",
1232 | "V=s",
1233 | "IGNORE_ERRORS=n m",
1234 | "BUILD_LOG=1",
1235 | ],
1236 | env=MakeEnv(),
1237 | haltOnFailure=True,
1238 | )
1239 | )
1240 |
1241 | factory.addStep(
1242 | ShellCommand(
1243 | name="pkginstall",
1244 | description="Installing packages",
1245 | descriptionDone="Packages installed",
1246 | command=[
1247 | "make",
1248 | Interpolate("-j%(prop:nproc:-1)s"),
1249 | "package/install",
1250 | "V=s",
1251 | ],
1252 | env=MakeEnv(),
1253 | haltOnFailure=True,
1254 | )
1255 | )
1256 |
1257 | factory.addStep(
1258 | ShellCommand(
1259 | name="images",
1260 | description="Building and installing images",
1261 | descriptionDone="Images built and installed",
1262 | command=[
1263 | "make",
1264 | Interpolate("-j%(prop:nproc:-1)s"),
1265 | "target/install",
1266 | "V=s",
1267 | ],
1268 | env=MakeEnv(),
1269 | haltOnFailure=True,
1270 | )
1271 | )
1272 |
1273 | factory.addStep(
1274 | ShellCommand(
1275 | name="buildinfo",
1276 | description="Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1277 | command="make -j1 buildinfo V=s || true",
1278 | env=MakeEnv(),
1279 | haltOnFailure=True,
1280 | )
1281 | )
1282 |
1283 | factory.addStep(
1284 | ShellCommand(
1285 | name="json_overview_image_info",
1286 | description="Generating profiles.json in target folder",
1287 | command="make -j1 json_overview_image_info V=s || true",
1288 | env=MakeEnv(),
1289 | haltOnFailure=True,
1290 | )
1291 | )
1292 |
1293 | factory.addStep(
1294 | ShellCommand(
1295 | name="kmoddir",
1296 | descriptionDone="Kmod directory created",
1297 | command=[
1298 | "mkdir",
1299 | "-p",
1300 | Interpolate(
1301 | "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s",
1302 | target=target,
1303 | subtarget=subtarget,
1304 | ),
1305 | ],
1306 | haltOnFailure=True,
1307 | doStepIf=IsKmodArchiveEnabled,
1308 | )
1309 | )
1310 |
1311 | factory.addStep(
1312 | ShellCommand(
1313 | name="kmodprepare",
1314 | description="Preparing kmod archive",
1315 | descriptionDone="Kmod archive prepared",
1316 | command=[
1317 | "rsync",
1318 | "--remove-source-files",
1319 | "--include=/kmod-*.ipk",
1320 | "--include=/kmod-*.apk",
1321 | "--exclude=*",
1322 | "-va",
1323 | Interpolate(
1324 | "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/",
1325 | target=target,
1326 | subtarget=subtarget,
1327 | ),
1328 | Interpolate(
1329 | "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/",
1330 | target=target,
1331 | subtarget=subtarget,
1332 | ),
1333 | ],
1334 | haltOnFailure=True,
1335 | doStepIf=IsKmodArchiveEnabled,
1336 | )
1337 | )
1338 |
1339 | factory.addStep(
1340 | ShellCommand(
1341 | name="pkgindex",
1342 | description="Indexing packages",
1343 | descriptionDone="Packages indexed",
1344 | command=[
1345 | "make",
1346 | Interpolate("-j%(prop:nproc:-1)s"),
1347 | "package/index",
1348 | "V=s",
1349 | "CONFIG_SIGNED_PACKAGES=",
1350 | ],
1351 | env=MakeEnv(),
1352 | haltOnFailure=True,
1353 | )
1354 | )
1355 |
1356 | factory.addStep(
1357 | ShellCommand(
1358 | name="kmodindex",
1359 | description="Indexing kmod archive",
1360 | descriptionDone="Kmod archive indexed",
1361 | command=[
1362 | "make",
1363 | Interpolate("-j%(prop:nproc:-1)s"),
1364 | "package/index",
1365 | "V=s",
1366 | "CONFIG_SIGNED_PACKAGES=",
1367 | Interpolate(
1368 | "PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/",
1369 | target=target,
1370 | subtarget=subtarget,
1371 | ),
1372 | ],
1373 | env=MakeEnv(),
1374 | haltOnFailure=True,
1375 | doStepIf=IsKmodArchiveEnabled,
1376 | )
1377 | )
1378 |
1379 | factory.addStep(
1380 | ShellCommand(
1381 | name="checksums",
1382 | description="Calculating checksums",
1383 | descriptionDone="Checksums calculated",
1384 | command=["make", "-j1", "checksum", "V=s"],
1385 | env=MakeEnv(),
1386 | haltOnFailure=True,
1387 | )
1388 | )
1389 |
1390 | # download remote sha256sums to 'target-sha256sums'
1391 | factory.addStep(
1392 | ShellCommandAndSetProperty(
1393 | name="target-sha256sums",
1394 | description="Fetching remote sha256sums for target",
1395 | descriptionDone="Remote sha256sums for target fetched",
1396 | command=["rsync", Interpolate("-z%(prop:rsync_ipv4:+4)s")]
1397 | + rsync_defopts
1398 | + [
1399 | Interpolate(
1400 | "%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums",
1401 | url=GetRsyncParams.withArgs("bin", "url"),
1402 | target=target,
1403 | subtarget=subtarget,
1404 | prefix=GetVersionPrefix,
1405 | ),
1406 | "target-sha256sums",
1407 | ],
1408 | env={
1409 | "RSYNC_PASSWORD": Interpolate(
1410 | "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")
1411 | )
1412 | },
1413 | property="have_remote_shasums",
1414 | logEnviron=False,
1415 | haltOnFailure=False,
1416 | flunkOnFailure=False,
1417 | warnOnFailure=False,
1418 | doStepIf=util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1419 | )
1420 | )
1421 |
1422 | factory.addStep(
1423 | ShellCommand(
1424 | name="target-sha256sums_kmodsparse",
1425 | description="Extract kmods from remote sha256sums",
1426 | descriptionDone="Kmods extracted",
1427 | command="sed \"/ \\*kmods\\//! d\" target-sha256sums | tee target-sha256sums-kmods",
1428 | haltOnFailure=False,
1429 | doStepIf=IsRemoteShaSumsAvailable,
1430 | )
1431 | )
1432 |
1433 | factory.addStep(
1434 | ShellCommand(
1435 | name="mergesha256sum",
1436 | description="Merge sha256sums kmods with sha256sums",
1437 | descriptionDone="Sha256sums merged",
1438 | command=[
1439 | "sort",
1440 | "-t", " ",
1441 | "-k", 2,
1442 | "-u",
1443 | Interpolate(
1444 | "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums",
1445 | target=target,
1446 | subtarget=subtarget,
1447 | ),
1448 | "target-sha256sums-kmods",
1449 | "-o",
1450 | Interpolate(
1451 | "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums",
1452 | target=target,
1453 | subtarget=subtarget,
1454 | ),
1455 | ],
1456 | env={"LC_ALL": "C"},
1457 | haltOnFailure=False,
1458 | doStepIf=IsRemoteShaSumsAvailable,
1459 | )
1460 | )
1461 |
1462 | # sign
1463 | factory.addStep(
1464 | MasterShellCommand(
1465 | name="signprepare",
1466 | descriptionDone="Temporary signing directory prepared",
1467 | command=["mkdir", "-p", "%s/signing" % (work_dir)],
1468 | haltOnFailure=True,
1469 | doStepIf=IsSignEnabled,
1470 | )
1471 | )
1472 |
1473 | factory.addStep(
1474 | ShellCommand(
1475 | name="signpack",
1476 | description="Packing files to sign",
1477 | descriptionDone="Files to sign packed",
1478 | command=Interpolate(
1479 | "find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ "
1480 | "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ "
1481 | "-mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or "
1482 | "-name Packages -print0 -or -name packages.adb -print0 "
1483 | "| xargs -0 tar -czf sign.tar.gz",
1484 | target=target,
1485 | subtarget=subtarget,
1486 | ),
1487 | haltOnFailure=True,
1488 | doStepIf=IsSignEnabled,
1489 | )
1490 | )
1491 |
1492 | factory.addStep(
1493 | FileUpload(
1494 | workersrc="sign.tar.gz",
1495 | masterdest="%s/signing/%s.%s.tar.gz" % (work_dir, target, subtarget),
1496 | haltOnFailure=True,
1497 | doStepIf=IsSignEnabled,
1498 | )
1499 | )
1500 |
1501 | factory.addStep(
1502 | MasterShellCommand(
1503 | name="signfiles",
1504 | description="Signing files",
1505 | descriptionDone="Files signed",
1506 | command=[
1507 | "%s/signall.sh" % (scripts_dir),
1508 | "%s/signing/%s.%s.tar.gz" % (work_dir, target, subtarget),
1509 | Interpolate("%(prop:branch)s"),
1510 | ],
1511 | env={"CONFIG_INI": os.getenv("BUILDMASTER_CONFIG", "./config.ini")},
1512 | haltOnFailure=True,
1513 | doStepIf=IsSignEnabled,
1514 | )
1515 | )
1516 |
1517 | factory.addStep(
1518 | FileDownload(
1519 | name="dlsigntargz",
1520 | mastersrc="%s/signing/%s.%s.tar.gz" % (work_dir, target, subtarget),
1521 | workerdest="sign.tar.gz",
1522 | haltOnFailure=True,
1523 | doStepIf=IsSignEnabled,
1524 | )
1525 | )
1526 |
1527 | factory.addStep(
1528 | ShellCommand(
1529 | name="signunpack",
1530 | description="Unpacking signed files",
1531 | descriptionDone="Signed files unpacked",
1532 | command=["tar", "-xzf", "sign.tar.gz"],
1533 | haltOnFailure=True,
1534 | doStepIf=IsSignEnabled,
1535 | )
1536 | )
1537 |
1538 | # upload
1539 | factory.addStep(
1540 | ShellCommand(
1541 | name="dirprepare",
1542 | descriptionDone="Upload directory structure prepared",
1543 | command=[
1544 | "mkdir",
1545 | "-p",
1546 | Interpolate(
1547 | "tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s",
1548 | target=target,
1549 | subtarget=subtarget,
1550 | prefix=GetVersionPrefix,
1551 | ),
1552 | ],
1553 | haltOnFailure=True,
1554 | )
1555 | )
1556 |
1557 | factory.addStep(
1558 | ShellCommand(
1559 | name="linkprepare",
1560 | descriptionDone="Repository symlink prepared",
1561 | command=[
1562 | "ln",
1563 | "-s",
1564 | "-f",
1565 | Interpolate(
1566 | "../packages-%(kw:basever)s",
1567 | basever=util.Transform(GetBaseVersion, Property("branch")),
1568 | ),
1569 | Interpolate(
1570 | "tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix
1571 | ),
1572 | ],
1573 | doStepIf=IsNoMasterBuild,
1574 | haltOnFailure=True,
1575 | )
1576 | )
1577 |
1578 | factory.addStep(
1579 | ShellCommand(
1580 | name="kmoddirprepare",
1581 | descriptionDone="Kmod archive upload directory prepared",
1582 | command=[
1583 | "mkdir",
1584 | "-p",
1585 | Interpolate(
1586 | "tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s",
1587 | target=target,
1588 | subtarget=subtarget,
1589 | prefix=GetVersionPrefix,
1590 | ),
1591 | ],
1592 | haltOnFailure=True,
1593 | doStepIf=IsKmodArchiveEnabled,
1594 | )
1595 | )
1596 |
1597 | factory.addStep(
1598 | ShellCommand(
1599 | name="dirupload",
1600 | description="Uploading directory structure",
1601 | descriptionDone="Directory structure uploaded",
1602 | command=["rsync", Interpolate("-az%(prop:rsync_ipv4:+4)s")]
1603 | + rsync_defopts
1604 | + [
1605 | "tmp/upload/",
1606 | Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("bin", "url")),
1607 | ],
1608 | env={
1609 | "RSYNC_PASSWORD": Interpolate(
1610 | "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")
1611 | )
1612 | },
1613 | haltOnFailure=True,
1614 | logEnviron=False,
1615 | locks=NetLockUl,
1616 | doStepIf=util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1617 | )
1618 | )
1619 |
1620 | # build list of files to upload
1621 | factory.addStep(
1622 | FileDownload(
1623 | name="dlsha2rsyncpl",
1624 | mastersrc=scripts_dir + "/sha2rsync.pl",
1625 | workerdest="../sha2rsync.pl",
1626 | mode=0o755,
1627 | )
1628 | )
1629 |
1630 | factory.addStep(
1631 | ShellCommand(
1632 | name="buildlist",
1633 | description="Building list of files to upload",
1634 | descriptionDone="List of files to upload built",
1635 | command=[
1636 | "../sha2rsync.pl",
1637 | "target-sha256sums",
1638 | Interpolate(
1639 | "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums",
1640 | target=target,
1641 | subtarget=subtarget,
1642 | ),
1643 | "rsynclist",
1644 | ],
1645 | haltOnFailure=True,
1646 | )
1647 | )
1648 |
1649 | factory.addStep(
1650 | FileDownload(
1651 | name="dlrsync.sh",
1652 | mastersrc=scripts_dir + "/rsync.sh",
1653 | workerdest="../rsync.sh",
1654 | mode=0o755,
1655 | )
1656 | )
1657 |
1658 | # upload new files and update existing ones
1659 | factory.addStep(
1660 | ShellCommand(
1661 | name="targetupload",
1662 | description="Uploading target files",
1663 | descriptionDone="Target files uploaded",
1664 | command=[
1665 | "../rsync.sh",
1666 | "--exclude=/kmods/",
1667 | "--exclude=/kmods/**",
1668 | "--files-from=rsynclist",
1669 | "--delay-updates",
1670 | "--partial-dir=.~tmp~%s~%s" % (target, subtarget),
1671 | ]
1672 | + rsync_defopts
1673 | + [
1674 | Interpolate("-a%(prop:rsync_ipv4:+4)s"),
1675 | Interpolate(
1676 | "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/",
1677 | target=target,
1678 | subtarget=subtarget,
1679 | ),
1680 | Interpolate(
1681 | "%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/",
1682 | url=GetRsyncParams.withArgs("bin", "url"),
1683 | target=target,
1684 | subtarget=subtarget,
1685 | prefix=GetVersionPrefix,
1686 | ),
1687 | ],
1688 | env={
1689 | "RSYNC_PASSWORD": Interpolate(
1690 | "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")
1691 | )
1692 | },
1693 | haltOnFailure=True,
1694 | logEnviron=False,
1695 | doStepIf=util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1696 | )
1697 | )
1698 |
1699 | # delete files which don't exist locally
1700 | factory.addStep(
1701 | ShellCommand(
1702 | name="targetprune",
1703 | description="Pruning target files",
1704 | descriptionDone="Target files pruned",
1705 | command=[
1706 | "../rsync.sh",
1707 | "--exclude=/kmods/",
1708 | "--delete",
1709 | "--existing",
1710 | "--ignore-existing",
1711 | "--delay-updates",
1712 | "--partial-dir=.~tmp~%s~%s" % (target, subtarget),
1713 | ]
1714 | + rsync_defopts
1715 | + [
1716 | Interpolate("-a%(prop:rsync_ipv4:+4)s"),
1717 | Interpolate(
1718 | "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/",
1719 | target=target,
1720 | subtarget=subtarget,
1721 | ),
1722 | Interpolate(
1723 | "%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/",
1724 | url=GetRsyncParams.withArgs("bin", "url"),
1725 | target=target,
1726 | subtarget=subtarget,
1727 | prefix=GetVersionPrefix,
1728 | ),
1729 | ],
1730 | env={
1731 | "RSYNC_PASSWORD": Interpolate(
1732 | "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")
1733 | )
1734 | },
1735 | haltOnFailure=True,
1736 | logEnviron=False,
1737 | locks=NetLockUl,
1738 | doStepIf=util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1739 | )
1740 | )
1741 |
1742 | factory.addStep(
1743 | ShellCommand(
1744 | name="kmodupload",
1745 | description="Uploading kmod archive",
1746 | descriptionDone="Kmod archive uploaded",
1747 | command=[
1748 | "../rsync.sh",
1749 | "--delete",
1750 | "--delay-updates",
1751 | "--partial-dir=.~tmp~%s~%s" % (target, subtarget),
1752 | ]
1753 | + rsync_defopts
1754 | + [
1755 | Interpolate("-a%(prop:rsync_ipv4:+4)s"),
1756 | Interpolate(
1757 | "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/",
1758 | target=target,
1759 | subtarget=subtarget,
1760 | ),
1761 | Interpolate(
1762 | "%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/",
1763 | url=GetRsyncParams.withArgs("bin", "url"),
1764 | target=target,
1765 | subtarget=subtarget,
1766 | prefix=GetVersionPrefix,
1767 | ),
1768 | ],
1769 | env={
1770 | "RSYNC_PASSWORD": Interpolate(
1771 | "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")
1772 | )
1773 | },
1774 | haltOnFailure=True,
1775 | logEnviron=False,
1776 | locks=NetLockUl,
1777 | doStepIf=IsKmodArchiveAndRsyncEnabled,
1778 | )
1779 | )
1780 |
1781 | factory.addStep(
1782 | ShellCommand(
1783 | name="sourcelist",
1784 | description="Finding source archives to upload",
1785 | descriptionDone="Source archives to upload found",
1786 | command=(
1787 | "find dl/ -maxdepth 1 -type f -not -size 0 "
1788 | "-not -name '.*' -not -name '*.hash' -not -name "
1789 | "'*.dl' -newer .config -printf '%f\\n' > sourcelist"
1790 | ),
1791 | haltOnFailure=True,
1792 | )
1793 | )
1794 |
1795 | factory.addStep(
1796 | ShellCommand(
1797 | name="sourceupload",
1798 | description="Uploading source archives",
1799 | descriptionDone="Source archives uploaded",
1800 | command=[
1801 | "../rsync.sh",
1802 | "--files-from=sourcelist",
1803 | "--size-only",
1804 | "--delay-updates",
1805 | ]
1806 | + rsync_defopts
1807 | + [
1808 | Interpolate(
1809 | "--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s",
1810 | target=target,
1811 | subtarget=subtarget,
1812 | ),
1813 | Interpolate("-a%(prop:rsync_ipv4:+4)s"),
1814 | "dl/",
1815 | Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("src", "url")),
1816 | ],
1817 | env={
1818 | "RSYNC_PASSWORD": Interpolate(
1819 | "%(kw:key)s", key=GetRsyncParams.withArgs("src", "key")
1820 | )
1821 | },
1822 | haltOnFailure=True,
1823 | logEnviron=False,
1824 | locks=NetLockUl,
1825 | doStepIf=util.Transform(bool, GetRsyncParams.withArgs("src", "url")),
1826 | )
1827 | )
1828 |
1829 | factory.addStep(
1830 | ShellCommand(
1831 | name="df",
1832 | description="Reporting disk usage",
1833 | command=["df", "-h", "."],
1834 | env={"LC_ALL": "C"},
1835 | logEnviron=False,
1836 | haltOnFailure=False,
1837 | flunkOnFailure=False,
1838 | warnOnFailure=False,
1839 | alwaysRun=True,
1840 | )
1841 | )
1842 |
1843 | factory.addStep(
1844 | ShellCommand(
1845 | name="du",
1846 | description="Reporting estimated file space usage",
1847 | command=["du", "-sh", "."],
1848 | env={"LC_ALL": "C"},
1849 | logEnviron=False,
1850 | haltOnFailure=False,
1851 | flunkOnFailure=False,
1852 | warnOnFailure=False,
1853 | alwaysRun=True,
1854 | )
1855 | )
1856 |
1857 | factory.addStep(
1858 | ShellCommand(
1859 | name="ccachestat",
1860 | description="Reporting ccache stats",
1861 | command=["ccache", "-s"],
1862 | logEnviron=False,
1863 | want_stderr=False,
1864 | haltOnFailure=False,
1865 | flunkOnFailure=False,
1866 | warnOnFailure=False,
1867 | doStepIf=util.Transform(bool, Property("ccache_command")),
1868 | )
1869 | )
1870 |
1871 | return factory
1872 |
1873 |
1874 | for brname in branchNames:
1875 | for target in targets[brname]:
1876 | bldrname = brname + "_" + target
1877 | c["builders"].append(
1878 | BuilderConfig(
1879 | name=bldrname,
1880 | workernames=workerNames,
1881 | factory=prepareFactory(target),
1882 | tags=[
1883 | brname,
1884 | ],
1885 | nextBuild=GetNextBuild,
1886 | canStartBuild=canStartBuild,
1887 | )
1888 | )
1889 |
1890 |
1891 | # STATUS TARGETS
1892 |
1893 | # 'status' is a list of Status Targets. The results of each build will be
1894 | # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1895 | # including web pages, email senders, and IRC bots.
1896 |
1897 | if "status_bind" in inip1:
1898 | c["www"] = {
1899 | "port": inip1.get("status_bind"),
1900 | "plugins": {"waterfall_view": True, "console_view": True, "grid_view": True},
1901 | }
1902 |
1903 | if "status_user" in inip1 and "status_password" in inip1:
1904 | c["www"]["auth"] = util.UserPasswordAuth(
1905 | [(inip1.get("status_user"), inip1.get("status_password"))]
1906 | )
1907 | c["www"]["authz"] = util.Authz(
1908 | allowRules=[util.AnyControlEndpointMatcher(role="admins")],
1909 | roleMatchers=[
1910 | util.RolesFromUsername(
1911 | roles=["admins"], usernames=[inip1.get("status_user")]
1912 | )
1913 | ],
1914 | )
1915 |
1916 | c["services"] = []
1917 | if ini.has_section("irc"):
1918 | iniirc = ini["irc"]
1919 | irc_host = iniirc.get("host", None)
1920 | irc_port = iniirc.getint("port", 6667)
1921 | irc_chan = iniirc.get("channel", None)
1922 | irc_nick = iniirc.get("nickname", None)
1923 | irc_pass = iniirc.get("password", None)
1924 |
1925 | if irc_host and irc_nick and irc_chan:
1926 | irc = reporters.IRC(
1927 | irc_host,
1928 | irc_nick,
1929 | port=irc_port,
1930 | password=irc_pass,
1931 | channels=[irc_chan],
1932 | notify_events=["exception", "problem", "recovery"],
1933 | )
1934 |
1935 | c["services"].append(irc)
1936 |
1937 | c["revlink"] = util.RevlinkMatch(
1938 | [r"https://git.openwrt.org/openwrt/(.*).git"],
1939 | r"https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s",
1940 | )
1941 |
1942 | # DB URL
1943 |
1944 | c["db"] = {
1945 | # This specifies what database buildbot uses to store its state. You can leave
1946 | # this at its default for all but the largest installations.
1947 | "db_url": "sqlite:///state.sqlite",
1948 | }
1949 |
1950 | c["buildbotNetUsageData"] = None
1951 |
--------------------------------------------------------------------------------
/phase2/buildbot.tac:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from twisted.application import service
4 | from buildbot.master import BuildMaster
5 |
6 | basedir = '.'
7 | rotateLength = 10000000
8 | maxRotatedFiles = 10
9 | configfile = 'master.cfg'
10 |
11 | # Default umask for server
12 | umask = None
13 |
14 | # if this is a relocatable tac file, get the directory containing the TAC
15 | if basedir == '.':
16 | import os
17 | basedir = os.path.abspath(os.path.dirname(__file__))
18 |
19 | # note: this line is matched against to check that this is a buildmaster
20 | # directory; do not edit it.
21 | application = service.Application('buildmaster')
22 | from twisted.python.logfile import LogFile
23 | from twisted.python.log import ILogObserver, FileLogObserver
24 | logfile = LogFile.fromFullPath(os.path.join(basedir, "twistd.log"), rotateLength=rotateLength,
25 | maxRotatedFiles=maxRotatedFiles)
26 | application.setComponent(ILogObserver, FileLogObserver(logfile).emit)
27 |
28 | m = BuildMaster(basedir, configfile, umask)
29 | m.setServiceParent(application)
30 | m.log_rotation.rotateLength = rotateLength
31 | m.log_rotation.maxRotatedFiles = maxRotatedFiles
32 |
--------------------------------------------------------------------------------
/phase2/config.ini.example:
--------------------------------------------------------------------------------
1 | [general]
2 | title = OpenWrt Project
3 | title_url = http://openwrt.org/
4 | workdir = /buildbot
5 |
6 | [phase2]
7 | buildbot_url = http://phase2.builds.openwrt.org/
8 | status_bind = tcp:8011:interface=127.0.0.1
9 | status_user = example
10 | status_password = example
11 | port = 9990
12 | persistent = false
13 | git_ssh = true
14 | git_ssh_key = -----BEGIN RSA PRIVATE KEY-----
15 | MIIEpAIBAAKCAQEAuCJwo6OmrRDxcGfsMgBhq0vdzp2ZIdqnedFH8u6tVYLt9WDU
16 | ...
17 | mHzkh8Uv4OAWTjiLGycbXa0/31hu9PCeNzYmjjrp8tcGjsiJJFxydgS+wc0i2UPV
18 | nSI+JbmAAF9vw6gj2i+Hqx7UloRd0tEv/leX354T5lO06LMiNhvN9g==
19 | -----END RSA PRIVATE KEY-----
20 |
21 | [repo]
22 | url = https://git.openwrt.org/openwrt/openwrt.git
23 | branch = main
24 |
25 | [rsync]
26 | binary_url = user@example.org::upload-packages
27 | binary_password = example
28 | source_url = user@example.org::upload-sources
29 | source_password = example2
30 | sdk_url = user@example.org::download-binary
31 | sdk_password = example3
32 | sdk_pattern = openwrt-sdk-*.tar.*
33 |
34 | [gpg]
35 | key = -----BEGIN PGP PRIVATE KEY BLOCK-----
36 | Version: GnuPG v2
37 |
38 | mQGNBFX4kxkBDACcTUVUl6zbn4r9tDap0/aCpcK9MO+HPatS7p2aBGY51kh78Ixr
39 | ...
40 | HwHLaFTMvYFY7WJDwA==
41 | -----END PGP PRIVATE KEY BLOCK-----
42 | passphrase = secret password
43 | comment = Unattended build signature
44 |
45 | [usign]
46 | key = RWRCSwAAA...OihABfuLvGRVfVaJ6wLf0=
47 | comment = Unattended build signature
48 |
49 | [apk]
50 | key = -----BEGIN EC PRIVATE KEY-----
51 | MHcCAQEEIIP54p1G0UgCleLObh07Gxq0S0Iz22OQpkUj8S1AzXB9oAoGCCqGSM49
52 | ...
53 | -----END EC PRIVATE KEY-----
54 |
55 | [worker 1]
56 | phase = 2
57 | name = worker-example-1
58 | password = example
59 | builds = 1
60 |
61 | [worker 2]
62 | phase = 2
63 | name = worker-example-2
64 | password = example2
65 | builds = 3
66 |
--------------------------------------------------------------------------------
/phase2/master.cfg:
--------------------------------------------------------------------------------
1 | # -*- python -*-
2 | # ex: set syntax=python:
3 |
4 | import os
5 | import re
6 | import sys
7 | import base64
8 | import subprocess
9 | import configparser
10 |
11 | from dateutil.tz import tzutc
12 | from datetime import datetime, timedelta
13 |
14 | from twisted.internet import defer
15 | from twisted.python import log
16 |
17 | from buildbot import locks
18 | from buildbot.data import resultspec
19 | from buildbot.changes import filter
20 | from buildbot.changes.gitpoller import GitPoller
21 | from buildbot.config import BuilderConfig
22 | from buildbot.plugins import schedulers
23 | from buildbot.plugins import steps
24 | from buildbot.plugins import util
25 | from buildbot.process import results
26 | from buildbot.process.factory import BuildFactory
27 | from buildbot.process.properties import Property
28 | from buildbot.process.properties import Interpolate
29 | from buildbot.process import properties
30 | from buildbot.schedulers.basic import SingleBranchScheduler
31 | from buildbot.schedulers.forcesched import ForceScheduler
32 | from buildbot.steps.master import MasterShellCommand
33 | from buildbot.steps.shell import SetPropertyFromCommand
34 | from buildbot.steps.shell import ShellCommand
35 | from buildbot.steps.transfer import FileDownload
36 | from buildbot.steps.transfer import FileUpload
37 | from buildbot.steps.transfer import StringDownload
38 | from buildbot.worker import Worker
39 |
40 |
41 | if not os.path.exists("twistd.pid"):
42 | with open("twistd.pid", "w") as pidfile:
43 | pidfile.write("{}".format(os.getpid()))
44 |
45 | ini = configparser.ConfigParser()
46 | ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
47 |
48 | buildbot_url = ini.get("phase2", "buildbot_url")
49 |
50 | # This is a sample buildmaster config file. It must be installed as
51 | # 'master.cfg' in your buildmaster's base directory.
52 |
53 | # This is the dictionary that the buildmaster pays attention to. We also use
54 | # a shorter alias to save typing.
55 | c = BuildmasterConfig = {}
56 |
57 | ####### BUILDWORKERS
58 |
59 | # The 'workers' list defines the set of recognized buildworkers. Each element is
60 | # a Worker object, specifying a unique worker name and password. The same
61 | # worker name and password must be configured on the worker.
62 |
63 | worker_port = 9990
64 | persistent = False
65 |
66 | if ini.has_option("phase2", "port"):
67 | worker_port = ini.get("phase2", "port")
68 |
69 | if ini.has_option("phase2", "persistent"):
70 | persistent = ini.getboolean("phase2", "persistent")
71 |
72 | c['workers'] = []
73 |
74 | for section in ini.sections():
75 | if section.startswith("worker "):
76 | if ini.has_option(section, "name") and ini.has_option(section, "password") and \
77 | ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
78 | name = ini.get(section, "name")
79 | password = ini.get(section, "password")
80 | sl_props = { 'shared_wd': True }
81 |
82 | if ini.has_option(section, "shared_wd"):
83 | sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
84 |
85 | c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
86 |
87 | # 'workerPortnum' defines the TCP port to listen on for connections from workers.
88 | # This must match the value configured into the buildworkers (with their
89 | # --master option)
90 | c['protocols'] = {'pb': {'port': worker_port}}
91 |
92 | # coalesce builds
93 | c['collapseRequests'] = True
94 |
95 | # Reduce amount of backlog data
96 | c['configurators'] = [util.JanitorConfigurator(
97 | logHorizon=timedelta(days=3),
98 | hour=6,
99 | )]
100 |
101 | ####### CHANGESOURCES
102 |
103 | work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
104 | scripts_dir = os.path.abspath("../scripts")
105 |
106 | rsync_bin_url = ini.get("rsync", "binary_url")
107 | rsync_bin_key = ini.get("rsync", "binary_password")
108 |
109 | rsync_src_url = None
110 | rsync_src_key = None
111 |
112 | if ini.has_option("rsync", "source_url"):
113 | rsync_src_url = ini.get("rsync", "source_url")
114 | rsync_src_key = ini.get("rsync", "source_password")
115 |
116 | rsync_sdk_url = None
117 | rsync_sdk_key = None
118 | rsync_sdk_pat = "openwrt-sdk-*.tar.*"
119 |
120 | if ini.has_option("rsync", "sdk_url"):
121 | rsync_sdk_url = ini.get("rsync", "sdk_url")
122 |
123 | if ini.has_option("rsync", "sdk_password"):
124 | rsync_sdk_key = ini.get("rsync", "sdk_password")
125 |
126 | if ini.has_option("rsync", "sdk_pattern"):
127 | rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
128 |
129 | rsync_defopts = ["-4", "-v", "--timeout=120"]
130 |
131 | repo_url = ini.get("repo", "url")
132 | repo_branch = "main"
133 |
134 | if ini.has_option("repo", "branch"):
135 | repo_branch = ini.get("repo", "branch")
136 |
137 | usign_key = None
138 | usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
139 |
140 | if ini.has_option("usign", "key"):
141 | usign_key = ini.get("usign", "key")
142 |
143 | if ini.has_option("usign", "comment"):
144 | usign_comment = ini.get("usign", "comment")
145 |
146 |
147 | # find arches
148 | arches = [ ]
149 | archnames = [ ]
150 |
151 | if not os.path.isdir(work_dir+'/source.git'):
152 | subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
153 | else:
154 | subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
155 |
156 | os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
157 | findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
158 | stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
159 |
160 | while True:
161 | line = findarches.stdout.readline()
162 | if not line:
163 | break
164 | at = line.decode().strip().split()
165 | arches.append(at)
166 | archnames.append(at[0])
167 |
168 |
169 | # find feeds
170 | feeds = []
171 | feedbranches = dict()
172 |
173 | c['change_source'] = []
174 |
175 | def parse_feed_entry(line):
176 | parts = line.strip().split()
177 | if parts[0].startswith("src-git"):
178 | feeds.append(parts)
179 | url = parts[2].strip().split(';')
180 | branches = [url[1]] if len(url) > 1 else ['main', 'master']
181 | feedbranches[url[0]] = branches
182 | c['change_source'].append(GitPoller(url[0], branches=branches, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollInterval=300))
183 |
184 | make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
185 | env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
186 |
187 | line = make.stdout.readline()
188 | if line:
189 | parse_feed_entry(str(line, 'utf-8'))
190 |
191 | with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f:
192 | for line in f:
193 | parse_feed_entry(line)
194 |
195 | if len(c['change_source']) == 0:
196 | log.err("FATAL ERROR: no change_sources defined, aborting!")
197 | sys.exit(-1)
198 |
199 | ####### SCHEDULERS
200 |
201 | # Configure the Schedulers, which decide how to react to incoming changes. In this
202 | # case, just kick off a 'basebuild' build
203 |
204 | c['schedulers'] = []
205 | c['schedulers'].append(SingleBranchScheduler(
206 | name = "all",
207 | change_filter = filter.ChangeFilter(
208 | filter_fn = lambda change: change.branch in feedbranches[change.repository]
209 | ),
210 | treeStableTimer = 60,
211 | builderNames = archnames))
212 |
213 | c['schedulers'].append(ForceScheduler(
214 | name = "force",
215 | buttonName = "Force builds",
216 | label = "Force build details",
217 | builderNames = [ "00_force_build" ],
218 |
219 | codebases = [
220 | util.CodebaseParameter(
221 | "",
222 | label = "Repository",
223 | branch = util.FixedParameter(name = "branch", default = ""),
224 | revision = util.FixedParameter(name = "revision", default = ""),
225 | repository = util.FixedParameter(name = "repository", default = ""),
226 | project = util.FixedParameter(name = "project", default = "")
227 | )
228 | ],
229 |
230 | reason = util.StringParameter(
231 | name = "reason",
232 | label = "Reason",
233 | default = "Trigger build",
234 | required = True,
235 | size = 80
236 | ),
237 |
238 | properties = [
239 | util.NestedParameter(
240 | name="options",
241 | label="Build Options",
242 | layout="vertical",
243 | fields=[
244 | util.ChoiceStringParameter(
245 | name = "architecture",
246 | label = "Build architecture",
247 | default = "all",
248 | choices = [ "all" ] + archnames
249 | )
250 | ]
251 | )
252 | ]
253 | ))
254 |
255 | ####### BUILDERS
256 |
257 | # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
258 | # what steps, and which workers can execute them. Note that any particular build will
259 | # only take place on one worker.
260 |
261 | @properties.renderer
262 | def GetDirectorySuffix(props):
263 | verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
264 | if props.hasProperty("release_version"):
265 | m = verpat.match(props["release_version"])
266 | if m is not None:
267 | return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
268 | return ""
269 |
270 | @properties.renderer
271 | def GetCwd(props):
272 | if props.hasProperty("builddir"):
273 | return props["builddir"]
274 | elif props.hasProperty("workdir"):
275 | return props["workdir"]
276 | else:
277 | return "/"
278 |
279 | def IsArchitectureSelected(target):
280 | def CheckArchitectureProperty(step):
281 | try:
282 | options = step.getProperty("options")
283 | if isinstance(options, dict):
284 | selected_arch = options.get("architecture", "all")
285 | if selected_arch != "all" and selected_arch != target:
286 | return False
287 | except KeyError:
288 | pass
289 |
290 | return True
291 |
292 | return CheckArchitectureProperty
293 |
294 | def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
295 | try:
296 | seckey = base64.b64decode(seckey)
297 | except Exception:
298 | return None
299 |
300 | return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
301 | base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
302 |
303 | def IsSharedWorkdir(step):
304 | return bool(step.getProperty("shared_wd"))
305 |
306 | @defer.inlineCallbacks
307 | def getNewestCompleteTime(bldr):
308 | """Returns the complete_at of the latest completed and not SKIPPED
309 | build request for this builder, or None if there are no such build
310 | requests. We need to filter out SKIPPED requests because we're
311 | using collapseRequests=True which is unfortunately marking all
312 | previous requests as complete when new buildset is created.
313 |
314 | @returns: datetime instance or None, via Deferred
315 | """
316 |
317 | bldrid = yield bldr.getBuilderId()
318 | completed = yield bldr.master.data.get(
319 | ('builders', bldrid, 'buildrequests'),
320 | [
321 | resultspec.Filter('complete', 'eq', [True]),
322 | resultspec.Filter('results', 'ne', [results.SKIPPED]),
323 | ],
324 | order=['-complete_at'], limit=1)
325 | if not completed:
326 | return
327 |
328 | complete_at = completed[0]['complete_at']
329 |
330 | last_build = yield bldr.master.data.get(
331 | ('builds', ),
332 | [
333 | resultspec.Filter('builderid', 'eq', [bldrid]),
334 | ],
335 | order=['-started_at'], limit=1)
336 |
337 | if last_build and last_build[0]:
338 | last_complete_at = last_build[0]['complete_at']
339 | if last_complete_at and (last_complete_at > complete_at):
340 | return last_complete_at
341 |
342 | return complete_at
343 |
344 | @defer.inlineCallbacks
345 | def prioritizeBuilders(master, builders):
346 | """Returns sorted list of builders by their last timestamp of completed and
347 | not skipped build.
348 |
349 | @returns: list of sorted builders
350 | """
351 |
352 | def is_building(bldr):
353 | return bool(bldr.building) or bool(bldr.old_building)
354 |
355 | def bldr_info(bldr):
356 | d = defer.maybeDeferred(getNewestCompleteTime, bldr)
357 | d.addCallback(lambda complete_at: (complete_at, bldr))
358 | return d
359 |
360 | def bldr_sort(item):
361 | (complete_at, bldr) = item
362 |
363 | if not complete_at:
364 | date = datetime.min
365 | complete_at = date.replace(tzinfo=tzutc())
366 |
367 | if is_building(bldr):
368 | date = datetime.max
369 | complete_at = date.replace(tzinfo=tzutc())
370 |
371 | return (complete_at, bldr.name)
372 |
373 | results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
374 | results.sort(key=bldr_sort)
375 |
376 | for r in results:
377 | log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
378 |
379 | return [r[1] for r in results]
380 |
381 | c['prioritizeBuilders'] = prioritizeBuilders
382 | c['builders'] = []
383 |
384 | dlLock = locks.WorkerLock("worker_dl")
385 |
386 | workerNames = [ ]
387 |
388 | for worker in c['workers']:
389 | workerNames.append(worker.workername)
390 |
391 | force_factory = BuildFactory()
392 |
393 | c['builders'].append(BuilderConfig(
394 | name = "00_force_build",
395 | workernames = workerNames,
396 | factory = force_factory))
397 |
398 | for arch in arches:
399 | ts = arch[1].split('/')
400 |
401 | factory = BuildFactory()
402 |
403 | # setup shared work directory if required
404 | factory.addStep(ShellCommand(
405 | name = "sharedwd",
406 | description = "Setting up shared work directory",
407 | command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
408 | workdir = ".",
409 | haltOnFailure = True,
410 | doStepIf = IsSharedWorkdir))
411 |
412 | # find number of cores
413 | factory.addStep(SetPropertyFromCommand(
414 | name = "nproc",
415 | property = "nproc",
416 | description = "Finding number of CPUs",
417 | command = ["nproc"]))
418 |
419 | # prepare workspace
420 | factory.addStep(FileDownload(
421 | mastersrc = scripts_dir + '/cleanup.sh',
422 | workerdest = "../cleanup.sh",
423 | mode = 0o755))
424 |
425 | if not persistent:
426 | factory.addStep(ShellCommand(
427 | name = "cleanold",
428 | description = "Cleaning previous builds",
429 | command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
430 | workdir = ".",
431 | haltOnFailure = True,
432 | timeout = 2400))
433 |
434 | factory.addStep(ShellCommand(
435 | name = "cleanup",
436 | description = "Cleaning work area",
437 | command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
438 | workdir = ".",
439 | haltOnFailure = True,
440 | timeout = 2400))
441 |
442 | factory.addStep(ShellCommand(
443 | name = "mksdkdir",
444 | description = "Preparing SDK directory",
445 | command = ["mkdir", "-p", "sdk"],
446 | haltOnFailure = True))
447 |
448 | factory.addStep(ShellCommand(
449 | name = "downloadsdk",
450 | description = "Downloading SDK archive",
451 | command = ["rsync"] + rsync_defopts + ["-a", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
452 | env={'RSYNC_PASSWORD': rsync_sdk_key},
453 | haltOnFailure = True,
454 | logEnviron = False))
455 |
456 | factory.addStep(ShellCommand(
457 | name = "unpacksdk",
458 | description = "Unpacking SDK archive",
459 | command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
460 | haltOnFailure = True))
461 |
462 | factory.addStep(ShellCommand(
463 | name = "updatesdk",
464 | description = "Updating SDK",
465 | command = "rsync " + (" ").join(rsync_defopts) + " --checksum -a sdk_update/ sdk/ && rm -rf sdk_update",
466 | haltOnFailure = True))
467 |
468 | factory.addStep(ShellCommand(
469 | name = "cleancmdlinks",
470 | description = "Sanitizing host command symlinks",
471 | command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
472 | haltOnFailure = True))
473 |
474 | factory.addStep(StringDownload(
475 | name = "writeversionmk",
476 | s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
477 | workerdest = "sdk/getversion.mk",
478 | mode = 0o755))
479 |
480 | factory.addStep(SetPropertyFromCommand(
481 | name = "getversion",
482 | property = "release_version",
483 | description = "Finding SDK release version",
484 | workdir = "build/sdk",
485 | command = ["make", "-f", "getversion.mk"]))
486 |
487 | # install build key
488 | if usign_key is not None:
489 | factory.addStep(StringDownload(
490 | name = "dlkeybuildpub",
491 | s = UsignSec2Pub(usign_key, usign_comment),
492 | workerdest = "sdk/key-build.pub",
493 | mode = 0o600))
494 |
495 | factory.addStep(StringDownload(
496 | name = "dlkeybuild",
497 | s = "# fake private key",
498 | workerdest = "sdk/key-build",
499 | mode = 0o600))
500 |
501 | factory.addStep(StringDownload(
502 | name = "dlkeybuilducert",
503 | s = "# fake certificate",
504 | workerdest = "sdk/key-build.ucert",
505 | mode = 0o600))
506 |
507 | factory.addStep(ShellCommand(
508 | name = "mkdldir",
509 | description = "Preparing download directory",
510 | command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
511 | haltOnFailure = True))
512 |
513 | factory.addStep(ShellCommand(
514 | name = "mkconf",
515 | description = "Preparing SDK configuration",
516 | workdir = "build/sdk",
517 | command = ["sh", "-c", "rm -f .config && make defconfig"]))
518 |
519 | factory.addStep(FileDownload(
520 | mastersrc = scripts_dir + '/ccache.sh',
521 | workerdest = 'sdk/ccache.sh',
522 | mode = 0o755))
523 |
524 | factory.addStep(ShellCommand(
525 | name = "prepccache",
526 | description = "Preparing ccache",
527 | workdir = "build/sdk",
528 | command = ["./ccache.sh"],
529 | haltOnFailure = True))
530 |
531 | factory.addStep(ShellCommand(
532 | name = "updatefeeds",
533 | description = "Updating feeds",
534 | workdir = "build/sdk",
535 | command = ["./scripts/feeds", "update", "-f"],
536 | haltOnFailure = True))
537 |
538 | factory.addStep(ShellCommand(
539 | name = "installfeeds",
540 | description = "Installing feeds",
541 | workdir = "build/sdk",
542 | command = ["./scripts/feeds", "install", "-a"],
543 | haltOnFailure = True))
544 |
545 | factory.addStep(ShellCommand(
546 | name = "logclear",
547 | description = "Clearing failure logs",
548 | workdir = "build/sdk",
549 | command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
550 | haltOnFailure = False,
551 | flunkOnFailure = False,
552 | warnOnFailure = True,
553 | ))
554 |
555 | factory.addStep(ShellCommand(
556 | name = "compile",
557 | description = "Building packages",
558 | workdir = "build/sdk",
559 | timeout = 3600,
560 | command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
561 | env = {'CCACHE_BASEDIR': Interpolate("%(kw:cwd)s", cwd=GetCwd)},
562 | haltOnFailure = True))
563 |
564 | factory.addStep(ShellCommand(
565 | name = "mkfeedsconf",
566 | description = "Generating pinned feeds.conf",
567 | workdir = "build/sdk",
568 | command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
569 |
570 | factory.addStep(ShellCommand(
571 | name = "checksums",
572 | description = "Calculating checksums",
573 | descriptionDone="Checksums calculated",
574 | workdir = "build/sdk",
575 | command = "cd bin/packages/%s; " %(arch[0])
576 | + "find . -type f -not -name 'sha256sums' -printf \"%P\n\" | "
577 | + "sort | xargs -r ../../../staging_dir/host/bin/mkhash -n sha256 | "
578 | + r"sed -ne 's!^\(.*\) \(.*\)$!\1 *\2!p' > sha256sums",
579 | haltOnFailure = True
580 | ))
581 |
582 | if ini.has_option("gpg", "key") or usign_key is not None:
583 | factory.addStep(MasterShellCommand(
584 | name = "signprepare",
585 | description = "Preparing temporary signing directory",
586 | command = ["mkdir", "-p", "%s/signing" %(work_dir)],
587 | haltOnFailure = True
588 | ))
589 |
590 | factory.addStep(ShellCommand(
591 | name = "signpack",
592 | description = "Packing files to sign",
593 | workdir = "build/sdk",
594 | command = "find bin/packages/%s/ -mindepth 1 -maxdepth 2 -type f " %(arch[0])
595 | + "-name sha256sums -print0 -or "
596 | + "-name Packages -print0 -or "
597 | + "-name packages.adb -print0 | "
598 | + "xargs -0 tar -czf sign.tar.gz",
599 | haltOnFailure = True
600 | ))
601 |
602 | factory.addStep(FileUpload(
603 | workersrc = "sdk/sign.tar.gz",
604 | masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
605 | haltOnFailure = True
606 | ))
607 |
608 | factory.addStep(MasterShellCommand(
609 | name = "signfiles",
610 | description = "Signing files",
611 | command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
612 | env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
613 | haltOnFailure = True
614 | ))
615 |
616 | factory.addStep(FileDownload(
617 | mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
618 | workerdest = "sdk/sign.tar.gz",
619 | haltOnFailure = True
620 | ))
621 |
622 | factory.addStep(ShellCommand(
623 | name = "signunpack",
624 | description = "Unpacking signed files",
625 | workdir = "build/sdk",
626 | command = ["tar", "-xzf", "sign.tar.gz"],
627 | haltOnFailure = True
628 | ))
629 |
630 | # download remote sha256sums to 'target-sha256sums'
631 | factory.addStep(ShellCommand(
632 | name = "target-sha256sums",
633 | description = "Fetching remote sha256sums for arch",
634 | command = ["rsync"] + rsync_defopts + ["-z", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/sha256sums", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0]), "arch-sha256sums"],
635 | env={'RSYNC_PASSWORD': rsync_bin_key},
636 | logEnviron = False,
637 | haltOnFailure = False,
638 | flunkOnFailure = False,
639 | warnOnFailure = False,
640 | ))
641 |
642 | factory.addStep(FileDownload(
643 | name="dlrsync.sh",
644 | mastersrc = scripts_dir + "/rsync.sh",
645 | workerdest = "../rsync.sh",
646 | mode = 0o755
647 | ))
648 |
649 | factory.addStep(FileDownload(
650 | name = "dlsha2rsyncpl",
651 | mastersrc = scripts_dir + "/sha2rsync.pl",
652 | workerdest = "../sha2rsync.pl",
653 | mode = 0o755,
654 | ))
655 |
656 | factory.addStep(ShellCommand(
657 | name = "buildlist",
658 | description = "Building list of files to upload",
659 | workdir = "build/sdk",
660 | command = ["../../sha2rsync.pl", "../arch-sha256sums", "bin/packages/%s/sha256sums" %(arch[0]), "rsynclist"],
661 | haltOnFailure = True,
662 | ))
663 |
664 | factory.addStep(ShellCommand(
665 | name = "uploadprepare",
666 | description = "Preparing package directory",
667 | workdir = "build/sdk",
668 | command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
669 | env={'RSYNC_PASSWORD': rsync_bin_key},
670 | haltOnFailure = True,
671 | logEnviron = False
672 | ))
673 |
674 | factory.addStep(ShellCommand(
675 | name = "packageupload",
676 | description = "Uploading package files",
677 | workdir = "build/sdk",
678 | command = ["../../rsync.sh"] + rsync_defopts + ["--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
679 | env={'RSYNC_PASSWORD': rsync_bin_key},
680 | haltOnFailure = True,
681 | logEnviron = False
682 | ))
683 |
684 | factory.addStep(ShellCommand(
685 | name = "packageprune",
686 | description = "Pruning package files",
687 | workdir = "build/sdk",
688 | command = ["../../rsync.sh"] + rsync_defopts + ["--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
689 | env={'RSYNC_PASSWORD': rsync_bin_key},
690 | haltOnFailure = True,
691 | logEnviron = False
692 | ))
693 |
694 | factory.addStep(ShellCommand(
695 | name = "logprepare",
696 | description = "Preparing log directory",
697 | workdir = "build/sdk",
698 | command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
699 | env={'RSYNC_PASSWORD': rsync_bin_key},
700 | haltOnFailure = True,
701 | logEnviron = False
702 | ))
703 |
704 | factory.addStep(ShellCommand(
705 | name = "logfind",
706 | description = "Finding failure logs",
707 | workdir = "build/sdk/logs/package/feeds",
708 | command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
709 | haltOnFailure = False,
710 | flunkOnFailure = False,
711 | warnOnFailure = True,
712 | ))
713 |
714 | factory.addStep(ShellCommand(
715 | name = "logcollect",
716 | description = "Collecting failure logs",
717 | workdir = "build/sdk",
718 | command = ["rsync"] + rsync_defopts + ["-a", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
719 | haltOnFailure = False,
720 | flunkOnFailure = False,
721 | warnOnFailure = True,
722 | ))
723 |
724 | factory.addStep(ShellCommand(
725 | name = "logupload",
726 | description = "Uploading failure logs",
727 | workdir = "build/sdk",
728 | command = ["../../rsync.sh"] + rsync_defopts + ["--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-az", "faillogs/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
729 | env={'RSYNC_PASSWORD': rsync_bin_key},
730 | haltOnFailure = False,
731 | flunkOnFailure = False,
732 | warnOnFailure = True,
733 | logEnviron = False
734 | ))
735 |
736 | if rsync_src_url is not None:
737 | factory.addStep(ShellCommand(
738 | name = "sourcelist",
739 | description = "Finding source archives to upload",
740 | workdir = "build/sdk",
741 | command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
742 | haltOnFailure = True
743 | ))
744 |
745 | factory.addStep(ShellCommand(
746 | name = "sourceupload",
747 | description = "Uploading source archives",
748 | workdir = "build/sdk",
749 | command = ["../../rsync.sh"] + rsync_defopts + ["--files-from=sourcelist", "--size-only", "--delay-updates",
750 | Interpolate("--partial-dir=.~tmp~%(kw:archname)s~%(prop:workername)s", archname=arch[0]), "-a", "dl/", "%s/" %(rsync_src_url)],
751 | env={'RSYNC_PASSWORD': rsync_src_key},
752 | haltOnFailure = False,
753 | flunkOnFailure = False,
754 | warnOnFailure = True,
755 | logEnviron = False
756 | ))
757 |
758 | factory.addStep(ShellCommand(
759 | name = "df",
760 | description = "Reporting disk usage",
761 | command=["df", "-h", "."],
762 | env={'LC_ALL': 'C'},
763 | haltOnFailure = False,
764 | flunkOnFailure = False,
765 | warnOnFailure = False,
766 | alwaysRun = True
767 | ))
768 |
769 | factory.addStep(ShellCommand(
770 | name = "du",
771 | description = "Reporting estimated file space usage",
772 | command=["du", "-sh", "."],
773 | env={'LC_ALL': 'C'},
774 | haltOnFailure = False,
775 | flunkOnFailure = False,
776 | warnOnFailure = False,
777 | alwaysRun = True
778 | ))
779 |
780 | factory.addStep(ShellCommand(
781 | name = "ccachestat",
782 | description = "Reporting ccache stats",
783 | command=["ccache", "-s"],
784 | want_stderr = False,
785 | haltOnFailure = False,
786 | flunkOnFailure = False,
787 | warnOnFailure = False,
788 | alwaysRun = True,
789 | ))
790 |
791 | c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
792 |
793 | c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
794 | force_factory.addStep(steps.Trigger(
795 | name = "trigger_%s" % arch[0],
796 | description = "Triggering %s build" % arch[0],
797 | schedulerNames = [ "trigger_%s" % arch[0] ],
798 | set_properties = { "reason": Property("reason") },
799 | doStepIf = IsArchitectureSelected(arch[0])
800 | ))
801 |
802 | ####### STATUS arches
803 |
804 | # 'status' is a list of Status arches. The results of each build will be
805 | # pushed to these arches. buildbot/status/*.py has a variety to choose from,
806 | # including web pages, email senders, and IRC bots.
807 |
808 | if ini.has_option("phase2", "status_bind"):
809 | c['www'] = {
810 | 'port': ini.get("phase2", "status_bind"),
811 | 'plugins': {
812 | 'waterfall_view': True,
813 | 'console_view': True,
814 | 'grid_view': True
815 | }
816 | }
817 |
818 | if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
819 | c['www']['auth'] = util.UserPasswordAuth([
820 | (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
821 | ])
822 | c['www']['authz'] = util.Authz(
823 | allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
824 | roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
825 | )
826 |
827 | ####### PROJECT IDENTITY
828 |
829 | # the 'title' string will appear at the top of this buildbot
830 | # installation's html.WebStatus home page (linked to the
831 | # 'titleURL') and is embedded in the title of the waterfall HTML page.
832 |
833 | c['title'] = ini.get("general", "title")
834 | c['titleURL'] = ini.get("general", "title_url")
835 |
836 | # the 'buildbotURL' string should point to the location where the buildbot's
837 | # internal web server (usually the html.WebStatus page) is visible. This
838 | # typically uses the port number set in the Waterfall 'status' entry, but
839 | # with an externally-visible host name which the buildbot cannot figure out
840 | # without some help.
841 |
842 | c['buildbotURL'] = buildbot_url
843 |
844 | ####### DB URL
845 |
846 | c['db'] = {
847 | # This specifies what database buildbot uses to store its state. You can leave
848 | # this at its default for all but the largest installations.
849 | 'db_url' : "sqlite:///state.sqlite",
850 | }
851 |
852 | c['buildbotNetUsageData'] = None
853 |
--------------------------------------------------------------------------------
/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | cram==0.7
2 | black==23.12.1
3 | ruff==0.1.9
4 | flake8==6.1.0
5 |
--------------------------------------------------------------------------------
/scripts/ccache.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | export LC_ALL=C
4 |
5 | mkdir -p "$HOME/.ccache" || exit 1
6 |
7 | grep -sq max_size "$HOME/.ccache/ccache.conf" || \
8 | echo "max_size = 10.0G" >> "$HOME/.ccache/ccache.conf" || exit 1
9 |
10 | grep -sq compiler_check "$HOME/.ccache/ccache.conf" || \
11 | echo "compiler_check = %compiler% -dumpmachine; %compiler% -dumpversion" >> "$HOME/.ccache/ccache.conf" || exit 1
12 |
13 | for dir in $(make --no-print-directory val.TOOLCHAIN_DIR val.STAGING_DIR val.STAGING_DIR_HOST V=s | grep staging_dir/); do
14 | if [ ! -L "$dir/ccache" ] || [ -L "$dir/ccache" -a ! -d "$dir/ccache" ]; then
15 | mkdir -vp "$dir" || exit 1
16 | rm -vrf "$dir/ccache" || exit 1
17 | ln -vs "$HOME/.ccache" "$dir/ccache" || exit 1
18 | fi
19 | done
20 |
21 | exit 0
22 |
--------------------------------------------------------------------------------
/scripts/cleanup.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | export LC_ALL=C
4 |
5 | master_url="$1"
6 | current_worker="$2"
7 | current_builder="$3"
8 | current_mode="$4"
9 |
10 | worker_id="$(wget -qO- "${master_url%/}/api/v2/workers/$current_worker" | sed -rne 's#^ +"workerid": ([0-9]+),?$#\1#p')"
11 | active_builder_ids="$(wget -qO- "${master_url%/}/api/v2/workers/$worker_id/builds" | sed -rne '/"builderid"/ { s/^.+: ([0-9]+),$/\1/; h }; /"state_string"/ { s/^.+: "([^"]*)".*$/\1/; H; x; s/\n/ /; p }' | sed -ne 's/ building$//p')"
12 |
13 | find /tmp/ -maxdepth 1 -mtime +1 '(' -name 'npm-*' -or -name 'jsmake-*' ')' -print0 | xargs -0 -r rm -vr
14 |
15 | is_running() {
16 | local id="$(wget -qO- "${master_url%/}/api/v2/builders/${1//\//_}" | sed -rne 's#^ +"builderid": ([0-9]+),$#\1#p')"
17 | local running_builder_id
18 | for running_builder_id in $active_builder_ids; do
19 | if [ "$running_builder_id" = "$id" ]; then
20 | return 0
21 | fi
22 | done
23 | return 1
24 | }
25 |
26 | do_cleanup() {
27 | printf "Cleaning up '$current_builder' work directory"
28 |
29 | if [ -d .git ]; then
30 | echo " using git"
31 | git reset --hard HEAD
32 | git clean -f -d -x
33 | else
34 | find . -mindepth 1 -maxdepth 1 | while read entry; do
35 | rm -vrf "$entry" | while read entry2; do
36 | case "$entry2" in *directory[:\ ]*)
37 | printf "."
38 | esac
39 | done
40 | done
41 | fi
42 |
43 | echo ""
44 | }
45 |
46 | #
47 | # Sanity check, current builder should be in running builders list
48 | #
49 |
50 | if ! is_running "$current_builder"; then
51 | echo "Current builder '$current_builder' not found in current builders list, aborting cleanup."
52 | exit 1
53 | fi
54 |
55 |
56 | #
57 | # Clean up leftovers
58 | #
59 |
60 | if [ "$current_mode" = full ]; then
61 | (
62 | if ! flock -x -w 2700 200; then
63 | echo "Unable to obtain exclusive lock, aborting cleanup."
64 | exit 1
65 | fi
66 |
67 | for build_dir in ../*; do
68 |
69 | current_builder="${build_dir##*/}"
70 | build_dir="$(readlink -f "$build_dir")"
71 |
72 | if [ -z "$build_dir" ] || [ -L "$build_dir" ] || [ ! -d "$build_dir/build" ]; then
73 | continue
74 | fi
75 |
76 | if is_running "$current_builder"; then
77 | echo "Skipping currently active '$current_builder' work directory."
78 | continue
79 | fi
80 |
81 | (
82 | cd "$build_dir/build"
83 | do_cleanup
84 | )
85 | done
86 |
87 | ) 200>../cleanup.lock
88 |
89 | #
90 | # Clean up current build
91 | #
92 |
93 | else
94 | if [ -d build ]; then (
95 | cd build
96 | do_cleanup
97 | ); fi
98 | fi
99 |
100 | exit 0
101 |
--------------------------------------------------------------------------------
/scripts/findbin.pl:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env perl
2 |
3 | use strict;
4 | use warnings;
5 |
6 | sub vernum($) {
7 | if ($_[0] =~ m!^((?:\d+\.)+\d+)$!) {
8 | my ($maj, $min) = split /\./, $1;
9 | return int($maj) * 256 + int($min);
10 | }
11 |
12 | return 0;
13 | }
14 |
15 | sub vercmp($$$) {
16 | my ($op, $v1, $v2) = @_;
17 |
18 | if ($op eq 'lt') { return $v1 < $v2 }
19 | elsif ($op eq 'le') { return $v1 <= $v2 }
20 | elsif ($op eq 'gt') { return $v1 > $v2 }
21 | elsif ($op eq 'ge') { return $v1 >= $v2 }
22 | elsif ($op eq 'eq') { return $v1 == $v2 }
23 |
24 | return 0;
25 | }
26 |
27 | sub findbin($$$) {
28 | my ($basename, $compare, $maxvstr) = @_;
29 |
30 | my $lastversion = 0;
31 | my $cmpversion = vernum($maxvstr);
32 | my $prog = undef;
33 |
34 | foreach my $dir (split /:/, $ENV{'PATH'}) {
35 | foreach my $bin (glob("$dir/$basename?*"), "$dir/$basename") {
36 | if (-x $bin && open BIN, '-|', $bin, '--version') {
37 | my $vers = 0;
38 | my $line = readline(BIN) || '';
39 |
40 | foreach my $token (split /\s+/, $line) {
41 | $vers = vernum($token);
42 | last if $vers > 0;
43 | }
44 |
45 | if ($vers > 0 && (!$cmpversion || vercmp($compare, $vers, $cmpversion))) {
46 | if ($vers > $lastversion) {
47 | $lastversion = $vers;
48 | $prog = $bin;
49 | }
50 | }
51 |
52 | close BIN;
53 | }
54 | }
55 | }
56 |
57 | return $prog;
58 | }
59 |
60 | my $bin = findbin($ARGV[0], $ARGV[1], $ARGV[2]);
61 |
62 | if (defined $bin) {
63 | printf "%s\n", $bin;
64 | exit 0;
65 | }
66 | else {
67 | warn "Cannot find a $ARGV[0] command with version $ARGV[1] $ARGV[2]\n";
68 | exit 1;
69 | }
70 |
--------------------------------------------------------------------------------
/scripts/makebranch.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | git_author="Release Management"
4 | git_email="lede-dev@lists.infradead.org"
5 |
6 | base_url="http://downloads.lede-project.org/releases"
7 |
8 | [ -f "./feeds.conf.default" ] || {
9 | echo "Please execute as ./${0##*/}" >&2
10 | exit 1
11 | }
12 |
13 | usage() {
14 | {
15 | echo ""
16 | echo "Usage: $0 [-i] [-a ] [-e ] \\"
17 | echo " [-u ] -n -v "
18 | echo ""
19 | echo "-i"
20 | echo "Exit successfully if branch already exists"
21 | echo ""
22 | echo "-a Git author [$git_author]"
23 | echo "Override the author name used for automated Git commits"
24 | echo ""
25 | echo "-e Git email [$git_email]"
26 | echo "Override the email used for automated Git commits"
27 | echo ""
28 | echo "-u Download base url [$base_url]"
29 | echo "Use the given URL as base for download repositories"
30 | echo ""
31 | exit 1
32 | } >&2
33 | }
34 |
35 | while getopts "a:e:iu:n:v:" opt; do
36 | case "$opt" in
37 | a) git_author="$OPTARG" ;;
38 | e) git_email="$OPTARG" ;;
39 | i) ignore_existing=1 ;;
40 | u) base_url="${OPTARG%/}" ;;
41 | n) codename="$OPTARG" ;;
42 | v)
43 | case "$OPTARG" in
44 | [0-9]*.[0-9]*)
45 | version="$(echo "$OPTARG" | cut -d. -f1-2)"
46 | ;;
47 | *)
48 | echo "Unexpected version format: $OPTARG" >&2
49 | exit 1
50 | ;;
51 | esac
52 | ;;
53 | \?)
54 | echo "Unexpected option: -$OPTARG" >&2
55 | usage
56 | ;;
57 | :)
58 | echo "Missing argument for option: -$OPTARG" >&2
59 | usage
60 | ;;
61 | esac
62 | done
63 |
64 | [ -n "$codename" -a -n "$version" ] || usage
65 |
66 | if git rev-parse "lede-${version}^{tree}" >/dev/null 2>/dev/null; then
67 | if [ -z "$ignore_existing" ]; then
68 | echo "Branch lede-${version} already exists!" >&2
69 | exit 1
70 | fi
71 |
72 | exit 0
73 | fi
74 |
75 | revnum="$(./scripts/getver.sh)"
76 | githash="$(git log --format=%h -1)"
77 |
78 | prev_branch="$(git symbolic-ref -q HEAD)"
79 |
80 | if [ "$prev_branch" != "refs/heads/main" ]; then
81 | echo "Expecting current branch name to be \"main\"," \
82 | "but it is \"${prev_branch#refs/heads/}\" - aborting."
83 |
84 | exit 1
85 | fi
86 |
87 | export GIT_AUTHOR_NAME="$git_author"
88 | export GIT_AUTHOR_EMAIL="$git_email"
89 | export GIT_COMMITTER_NAME="$git_author"
90 | export GIT_COMMITTER_EMAIL="$git_email"
91 |
92 | git checkout -b "lede-$version"
93 |
94 | while read type name url; do
95 | case "$type" in
96 | src-git)
97 | case "$url" in
98 | *^*|*\;*) : ;;
99 | *)
100 | ref="$(git ls-remote "$url" "lede-$version")"
101 |
102 | if [ -z "$ref" ]; then
103 | echo "WARNING: Feed \"$name\" provides no" \
104 | "\"lede-$version\" branch - using main!" >&2
105 | else
106 | url="$url;lede-$version"
107 | fi
108 | ;;
109 | esac
110 | echo "$type $name $url"
111 | ;;
112 | src-*)
113 | echo "$type $name $url"
114 | ;;
115 | esac
116 | done < feeds.conf.default > feeds.conf.branch && \
117 | mv feeds.conf.branch feeds.conf.default
118 |
119 | sed -e 's!^RELEASE:=.*!RELEASE:='"$codename"'!g' \
120 | -e 's!\(VERSION_NUMBER:=\$(if .*\),[^,]*)!\1,'"$version-SNAPSHOT"')!g' \
121 | -e 's!\(VERSION_REPO:=\$(if .*\),[^,]*)!\1,'"$base_url/$version-SNAPSHOT"')!g' \
122 | include/version.mk > include/version.branch && \
123 | mv include/version.branch include/version.mk
124 |
125 | sed -e 's!http://downloads.lede-project.org/[^"]*!'"$base_url/$version-SNAPSHOT"'!g' \
126 | package/base-files/image-config.in > package/base-files/image-config.branch && \
127 | mv package/base-files/image-config.branch package/base-files/image-config.in
128 |
129 | git commit -sm "LEDE v$version: set branch defaults" \
130 | feeds.conf.default \
131 | include/version.mk \
132 | package/base-files/image-config.in
133 |
134 | git --no-pager log -p -1
135 | git push origin "refs/heads/lede-$version:refs/heads/lede-$version"
136 | git checkout "${prev_branch#refs/heads/}"
137 |
--------------------------------------------------------------------------------
/scripts/rsync.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash -x
2 |
3 | export LC_ALL=C
4 |
5 | set -o pipefail
6 |
7 | PV=`which pv`
8 | RSYNC=rsync
9 |
10 | if [[ -x $PV ]]; then
11 | $RSYNC "$@" | $PV -t -i 60 -f
12 | else
13 | $RSYNC "$@"
14 | fi
15 |
--------------------------------------------------------------------------------
/scripts/sec2pubkey.pl:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env perl
2 |
3 | use strict;
4 | use MIME::Base64;
5 |
6 | my @lines = (-t STDIN) ? () : <>;
7 |
8 | if (@lines == 0) {
9 | die "Usage: $0 < key.sec > key.pub\n";
10 | }
11 |
12 | my $seckey = decode_base64(pop @lines);
13 | my $comment = shift(@lines) || "untrusted comment: secret key";
14 |
15 | chomp($comment);
16 |
17 | $comment =~ s/\bsecret key$/public key/;
18 |
19 | if (length($seckey) != 104) {
20 | die "Unexpected secret key length\n";
21 | }
22 |
23 | my $pubkey = encode_base64(substr($seckey, 0, 2) . substr($seckey, 32, 8) . substr($seckey, 72), "");
24 |
25 | printf "%s\n%s\n", $comment, $pubkey;
26 |
--------------------------------------------------------------------------------
/scripts/sha2rsync.pl:
--------------------------------------------------------------------------------
1 | #!/usr/bin/perl -w
2 |
3 | # ./sha2rsync.pl
4 |
5 | # is the filename of sha256sums fetched from server
6 | # is the filename of sha256sums generated locally
7 | # is the filename of the list of files to upload
8 |
9 | # and are files formatted as follows:
10 | #checksum *pathtofile
11 |
12 | # both files must be sorted based on pathtofile: the script performs
13 | # in-place merge (O(n+m)) of both lists based on that assumption.
14 | # and are parsed only once.
15 |
16 | # the script cannot currently handle any other type of input
17 |
18 | # the script will generate , a list of files suitable for
19 | # using with "rsync --files-from="
20 |
21 | # if doesn't exist, all files in are added to the
22 | # upload list.
23 | # if exists, the files are added if:
24 | # - they're not present in
25 | # - they're present in AND their checksums differ
26 |
27 | # the script will clobber
28 |
29 | use strict;
30 | use warnings;
31 | use integer;
32 |
33 | die ("wrong number of arguments!") if ($#ARGV+1 != 3);
34 |
35 | my $shapat = qr/^(\w+) \*(.+)$/;
36 |
37 | my $rlist = $ARGV[0];
38 | my $llist = $ARGV[1];
39 | my $torsync = $ARGV[2];
40 |
41 | my $rlist_fh = undef;
42 | my $llist_fh = undef;
43 | my $torsync_fh = undef;
44 |
45 | open($torsync_fh, ">", $torsync) or die("can't create output file!");
46 | open($llist_fh, "<", $llist) or die("can't read local list!");
47 | open($rlist_fh, "<", $rlist) or $rlist_fh = undef;
48 |
49 | my $lline = readline($llist_fh);
50 | my $rline = defined($rlist_fh) ? readline($rlist_fh) : undef;
51 |
52 |
53 | MAINLOOP: while () {
54 | # run this loop as long as we have content from both rlist and llist
55 | last (MAINLOOP) unless (defined($lline) && defined($rline));
56 |
57 | chomp($lline);
58 | my ($lcsum, $lfname) = $lline =~ $shapat;
59 |
60 | chomp($rline);
61 | my ($rcsum, $rfname) = $rline =~ $shapat;
62 |
63 | # compare current remote and local filenames
64 | my $rtlcmp = ($rfname cmp $lfname);
65 |
66 | while ($rtlcmp < 0) { # remote fname is before current local fname: remote file doesn't exist localy
67 | $rline = readline($rlist_fh);
68 | next (MAINLOOP);
69 | }
70 |
71 | while ($rtlcmp > 0) { # remote fname is after current local fname: local file doesn't exist remotely
72 | add_file($lfname); # add lfname to upload list
73 | $lline = readline($llist_fh);
74 | next (MAINLOOP);
75 | }
76 |
77 | # if we end here, rtlcmp == 0: fnames matched, the file exist localy and remotely
78 |
79 | # fetch next line of both streams for the next iteration
80 | $lline = readline($llist_fh);
81 | $rline = readline($rlist_fh);
82 |
83 | # and skip if csums match
84 | next (MAINLOOP) if ($lcsum eq $rcsum);
85 |
86 | # otherwise add the file as it's different
87 | add_file($lfname);
88 | }
89 |
90 | # deal with remainder of llist if any
91 | while (defined($lline)) {
92 | chomp($lline);
93 | my ($lcsum, $lfname) = $lline =~ $shapat;
94 | add_file($lfname);
95 | $lline = readline($llist_fh);
96 | }
97 |
98 | # unconditionally add some mandatory files to rsynclist
99 | # add them last so they're transferred last: if everything else transferred correctly
100 | my @feeds = qw(base luci packages routing telephony);
101 | my @additional_files;
102 |
103 | for my $feed (@feeds) {
104 | push @additional_files, (
105 | "$feed/packages.adb.asc",
106 | "$feed/packages.adb.sig",
107 | "$feed/Packages.asc",
108 | "$feed/Packages.sig",
109 | );
110 | }
111 |
112 | push @additional_files, qw(
113 | sha256sums.asc
114 | sha256sums.sig
115 | sha256sums
116 | );
117 |
118 | (my $basedir = $llist) =~ s!/[^/]+$!!;
119 |
120 | foreach my $file (@additional_files) {
121 | if (-f "$basedir/$file") {
122 | add_file($file);
123 | }
124 | }
125 |
126 | exit (0);
127 |
128 | sub add_file {
129 | my $fname = shift;
130 | print $torsync_fh "$fname\n";
131 | }
132 |
--------------------------------------------------------------------------------
/scripts/signall.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | tarball="$1"
4 | branch="$2"
5 |
6 | tmpdir="signall.$$"
7 | tarball="$(readlink -f "$tarball")"
8 |
9 | finish() { rm -rf "$tmpdir"; exit $1; }
10 |
11 | iniget() {
12 | local file="$1" section="$2" option="$3"
13 |
14 | sed -rne '
15 | /\['"$section"'\]/,$ {
16 | /^[ \t]*'"$option"'[ \t]*=[ \t]*/ {
17 | s/^[^=]+=[ \t]*//; h;
18 | :c; n;
19 | /^([ \t]|$)/ {
20 | s/^[ \t]+//; H;
21 | b c
22 | };
23 | x; p; q
24 | }
25 | }
26 | ' "$file" | sed -e :a -e '/^\n*$/{$d;N;ba' -e '}'
27 | }
28 |
29 | trap "finish 255" HUP INT TERM
30 |
31 | if [ ! -f "$tarball" ] || [ ! -f "${CONFIG_INI:-config.ini}" ]; then
32 | echo "Usage: [CONFIG_INI=...] $0 " >&2
33 | finish 1
34 | fi
35 |
36 | [ ! -e "$tmpdir" ] || {
37 | echo "Temporary directory $tmpdir already exists!" >&2
38 | finish 2
39 | }
40 |
41 | umask 077
42 | mkdir "$tmpdir" "$tmpdir/tar" "$tmpdir/gpg" "$tmpdir/gpg/private-keys-v1.d" || finish 2
43 |
44 | umask 022
45 | chmod 0755 "$tmpdir/tar"
46 | tar -C "$tmpdir/tar/" -xzf "$tarball" || finish 3
47 |
48 | loopback=""
49 |
50 | case "$(gpg --version | head -n1)" in
51 | *\ 2.*) loopback=1 ;;
52 | esac
53 |
54 | if [ -z "$branch" ]; then
55 | GPGKEY="$(iniget "${CONFIG_INI:-config.ini}" gpg key)"
56 | GPGKEYID="$(iniget "${CONFIG_INI:-config.ini}" gpg keyid)"
57 | GPGPASS="$(iniget "${CONFIG_INI:-config.ini}" gpg passphrase)"
58 | GPGCOMMENT="$(iniget "${CONFIG_INI:-config.ini}" gpg comment)"
59 |
60 | USIGNKEY="$(iniget "${CONFIG_INI:-config.ini}" usign key)"
61 | USIGNCOMMENT="$(iniget "${CONFIG_INI:-config.ini}" usign comment)"
62 |
63 | APKSIGNKEY="$(iniget "${CONFIG_INI:-config.ini}" apk key)"
64 | else
65 | GPGKEY="$(iniget "${CONFIG_INI:-config.ini}" "branch $branch" "gpg_key")"
66 | GPGKEYID="$(iniget "${CONFIG_INI:-config.ini}" "branch $branch" "gpg_keyid")"
67 | GPGPASS="$(iniget "${CONFIG_INI:-config.ini}" "branch $branch" "gpg_passphrase")"
68 | GPGCOMMENT="$(iniget "${CONFIG_INI:-config.ini}" "branch $branch" "gpg_comment")"
69 |
70 | USIGNKEY="$(iniget "${CONFIG_INI:-config.ini}" "branch $branch" "usign_key")"
71 | USIGNCOMMENT="$(iniget "${CONFIG_INI:-config.ini}" "branch $branch" "usign_comment")"
72 |
73 | APKSIGNKEY="$(iniget "${CONFIG_INI:-config.ini}" "branch $branch" "apk_key")"
74 | fi
75 |
76 | if [ -n "$APKSIGNKEY" ]; then
77 | umask 077
78 | echo "$APKSIGNKEY" > "$tmpdir/apk.pem"
79 |
80 | umask 022
81 | find "$tmpdir/tar/" -type f -name "packages.adb" -print0 | while IFS= read -r -d '' file; do
82 | if ! "${APK_BIN:-apk}" adbsign --allow-untrusted --sign-key "$(readlink -f "$tmpdir/apk.pem")" "$file"; then
83 | finish 3
84 | fi
85 | done
86 |
87 | find "$tmpdir/tar/" -type f -name sha256sums | while read -r file; do
88 | dir=$(dirname "$file")
89 | pushd "$dir" > /dev/null || finish 3
90 |
91 | grep 'packages\.adb' sha256sums | while IFS= read -r line; do
92 | filename="${line#*' *'}"
93 | # Skip updating hash of previous kmods/ if not found in sign tar (already signed)
94 | [ ! -f "$filename" ] && [[ "$filename" == kmods/* ]] && continue
95 | escaped_filename="${filename//\//\\\/}"
96 | escaped_filename="${escaped_filename//&/\\&}"
97 | checksum_output=$(sha256sum --binary -- "$filename")
98 | new_checksum_line="${checksum_output%% *} *${checksum_output#*' *'}"
99 | sed -i "s#.*[[:space:]]\*$escaped_filename\$#$new_checksum_line#" sha256sums
100 | done
101 |
102 | popd > /dev/null || finish 3
103 | done
104 | fi
105 |
106 | if echo "$GPGKEY" | grep -q "BEGIN PGP PRIVATE KEY BLOCK" && [ -z "$GPGKEYID" ]; then
107 | umask 077
108 | echo "$GPGPASS" > "$tmpdir/gpg.pass"
109 | echo "$GPGKEY" | gpg --batch --homedir "$tmpdir/gpg" \
110 | ${loopback:+--pinentry-mode loopback --no-tty --passphrase-fd 0} \
111 | ${GPGPASS:+--passphrase-file "$tmpdir/gpg.pass"} \
112 | --import - || finish 4
113 |
114 | umask 022
115 | find "$tmpdir/tar/" -type f -not -name "*.asc" -and -not -name "*.sig" -exec \
116 | gpg --no-version --batch --yes -a -b \
117 | --homedir "$(readlink -f "$tmpdir/gpg")" \
118 | ${loopback:+--pinentry-mode loopback --no-tty --passphrase-fd 0} \
119 | ${GPGPASS:+--passphrase-file "$(readlink -f "$tmpdir/gpg.pass")"} \
120 | ${GPGCOMMENT:+--comment="$GPGCOMMENT"} \
121 | -o "{}.asc" "{}" \; || finish 4
122 | fi
123 |
124 | if [ -n "$GPGKEYID" ]; then
125 | find "$tmpdir/tar/" -type f -not -name "*.asc" -and -not -name "*.sig" -print0 | while IFS= read -r -d '' file; do
126 | if ! gpg --no-version --batch --detach-sign --armor \
127 | --local-user "${GPGKEYID}" \
128 | ${GPGCOMMENT:+--comment="$GPGCOMMENT"} \
129 | --homedir /home/buildbot/.gnupg "${file}.asc" "$file"; then
130 | finish 4
131 | fi
132 | done
133 | fi
134 |
135 | if [ -n "$USIGNKEY" ]; then
136 | USIGNID="$(echo "$USIGNKEY" | base64 -d -i | dd bs=1 skip=32 count=8 2>/dev/null | od -v -t x1 | sed -rne 's/^0+ //p' | tr -d ' ')"
137 |
138 | if ! echo "$USIGNID" | grep -qxE "[0-9a-f]{16}"; then
139 | echo "Invalid usign key specified" >&2
140 | finish 5
141 | fi
142 |
143 | umask 077
144 | printf "untrusted comment: %s\n%s\n" "${USIGNCOMMENT:-key ID $USIGNID}" "$USIGNKEY" > "$tmpdir/usign.sec"
145 |
146 | umask 022
147 | find "$tmpdir/tar/" -type f -not -name "*.asc" -and -not -name "*.sig" -exec \
148 | signify-openbsd -S -s "$(readlink -f "$tmpdir/usign.sec")" -m "{}" \; || finish 5
149 | fi
150 |
151 | tar -C "$tmpdir/tar/" -czf "$tarball" . || finish 6
152 |
153 | finish 0
154 |
--------------------------------------------------------------------------------
/tests/cram/master/01-logs.t:
--------------------------------------------------------------------------------
1 | Check that logs have expected content after container startup:
2 |
3 | $ docker logs test-master
4 | updating existing installation
5 | creating /master/master.cfg.sample
6 | creating database (sqlite:///state.sqlite)
7 | buildmaster configured in /master
8 |
--------------------------------------------------------------------------------
/tests/cram/master/02-apk.t:
--------------------------------------------------------------------------------
1 | Check that apk is available and usable in master container:
2 |
3 | $ docker run --entrypoint apk local/master | grep usage
4 | usage: apk [...] COMMAND [...]
5 |
--------------------------------------------------------------------------------
/tests/cram/worker/01-logs.t:
--------------------------------------------------------------------------------
1 | Check that logs have expected content after container startup:
2 |
3 | $ docker logs test-worker
4 | updating existing installation
5 | mkdir /builder/info
6 | Creating info/admin, you need to edit it appropriately.
7 | Creating info/host, you need to edit it appropriately.
8 | Not creating info/access_uri - add it if you wish
9 | Please edit the files in /builder/info appropriately.
10 | worker configured in /builder
11 |
--------------------------------------------------------------------------------