├── .github ├── ISSUE_TEMPLATE │ ├── config.yml │ └── new_worker.yml └── workflows │ └── build.yml ├── .gitignore ├── LICENSE.txt ├── Makefile ├── README.md ├── buildbot.sh ├── master ├── .gitignore ├── buildbot.tac ├── custom │ ├── __init__.py │ ├── auth.py │ ├── builders.py │ ├── discord_reporter.py │ ├── email_formatter.py │ ├── factories.py │ ├── pr_reporter.py │ ├── pr_testing.py │ ├── release_dashboard.py │ ├── schedulers.py │ ├── settings.py │ ├── static │ │ └── dashboard.css │ ├── steps.py │ ├── templates │ │ └── releasedashboard.html │ ├── testsuite_utils.py │ └── workers.py └── master.cfg ├── requirements.in ├── requirements.txt └── worker_example.Dockerfile /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: true 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/new_worker.yml: -------------------------------------------------------------------------------- 1 | name: Add a new worker 2 | description: A request to add a new worker 3 | title: "New worker request: " 4 | labels: ["add worker"] 5 | body: 6 | - type: markdown 7 | attributes: 8 | value: I would like to add a new worker to the fleet 9 | - type: markdown 10 | attributes: 11 | value: | 12 | ## Owner information: 13 | 14 | Any fields that can be filled in with public information from the given GitHub profile can be left blank. 15 | - type: input 16 | id: owner-username 17 | attributes: 18 | label: Username 19 | description: Often the owner's last name, but not required to be. Should be the same for all workers owned by the owner. 20 | validations: 21 | required: true 22 | - type: input 23 | id: owner-github 24 | attributes: 25 | label: GitHub username 26 | description: If the username of the person opening the issue, can be left blank 27 | placeholder: "@username" 28 | - type: input 29 | id: owner-name 30 | attributes: 31 | label: Name 32 | - type: input 33 | id: owner-email 34 | attributes: 35 | label: Email address 36 | description: If you're requesting a new password, we'll send it to you at this address 37 | placeholder: user at domain dot tld 38 | - type: dropdown 39 | id: owner-password 40 | attributes: 41 | label: Password status 42 | description: Each worker needs a password to connect to the master, but they're not required to be unique if you own multiple workers. 43 | options: 44 | - I need a new owner password 45 | - I need a new worker password 46 | - I will use an existing owner password 47 | validations: 48 | required: true 49 | - type: markdown 50 | attributes: 51 | value: "## Worker information:" 52 | - type: input 53 | id: worker-arch 54 | attributes: 55 | label: Processor architecture 56 | value: amd64 57 | validations: 58 | required: true 59 | - type: input 60 | id: worker-os 61 | attributes: 62 | label: Operating System 63 | placeholder: Windows 11, Arch Linux, macOS 11.3, etc. 64 | validations: 65 | required: true 66 | - type: textarea 67 | id: worker-special 68 | attributes: 69 | label: Anything special about the worker? 70 | description: Details that would impact what kind of builds should be scheduled on this worker, existing PR number adding configuration, etc. 71 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: Build 2 | 3 | on: [push, pull_request, workflow_dispatch] 4 | 5 | permissions: {} 6 | 7 | env: 8 | FORCE_COLOR: 1 9 | 10 | jobs: 11 | build: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v4 15 | with: 16 | persist-credentials: false 17 | - name: Setup Python 18 | uses: actions/setup-python@v5 19 | with: 20 | python-version: 3.9 21 | - name: Check configuration 22 | run: make check PIP=pip BUILDBOT=buildbot 23 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | !.gitignore 3 | backup/ 4 | bin/ 5 | buildbot/ 6 | lib/ 7 | venv/ 8 | worker/ 9 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2017 Zachary Ware, Victor Stinner, Pablo Galindo Salgado, and contributors 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of 4 | this software and associated documentation files (the "Software"), to deal in 5 | the Software without restriction, including without limitation the rights to 6 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software is furnished to do so, 8 | subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 15 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 16 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 17 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 18 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 19 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | PYTHON_VERSION=3.9 2 | SYSTEM_PYTHON=python$(PYTHON_VERSION) 3 | VENV_DIR=./venv 4 | PIP=$(VENV_DIR)/bin/pip 5 | # make stop-server kills all processes named "python" 6 | PKILL_NAME="python" 7 | BUILDBOT=$(VENV_DIR)/bin/buildbot 8 | VENV_CHECK=$(VENV_DIR)/lib/python$(PYTHON_VERSION)/site-packages/buildbot/master.py 9 | USER=buildbot 10 | LOGLINES=50 11 | 12 | # Setup targets 13 | 14 | .PHONY: venv regen-requirements 15 | 16 | venv: $(VENV_CHECK) 17 | 18 | clean: 19 | rm -rf venv 20 | 21 | $(VENV_CHECK): requirements.txt 22 | $(SYSTEM_PYTHON) -m venv --clear venv 23 | $(PIP) install -U pip 24 | $(PIP) install -r requirements.txt 25 | 26 | regen-requirements: 27 | $(SYSTEM_PYTHON) -m venv --clear venv 28 | $(PIP) install -U pip 29 | $(PIP) install -U -r requirements.in 30 | $(PIP) freeze > requirements.txt 31 | 32 | # Test targets 33 | 34 | .PHONY: check 35 | 36 | check: $(VENV_CHECK) 37 | $(BUILDBOT) checkconfig master 38 | 39 | # Management targets 40 | 41 | .PHONY: update-master start-master restart-master stop-master 42 | 43 | update-master: stop-master 44 | @if [ `git rev-parse --symbolic-full-name HEAD` = "refs/heads/main" ]; \ 45 | then \ 46 | git pull; \ 47 | else \ 48 | echo "Not on main, not pulling updates"; \ 49 | fi 50 | $(MAKE) run-target TARGET=upgrade-master LOGLINES=0 51 | $(MAKE) check 52 | $(MAKE) start-master 53 | 54 | start-master: TARGET=start 55 | start-master: run-target 56 | 57 | restart-master: TARGET=restart 58 | restart-master: run-target 59 | 60 | stop-master: TARGET=stop 61 | stop-master: run-target 62 | # issue #384: sometimes when "buildbot stop master" sends SIGINT to 63 | # Twisted, the server goes in a broken state: it's being "shut down", 64 | # but it never completes. The server stays forever in this state: it is 65 | # still "running" but no longer schedules new jobs. Kill the process 66 | # to make sure that it goes bad to a known state (don't run anymore). 67 | echo "Buildbot processes (look for process name: $(PKILL_NAME))" 68 | pgrep -u $(USER) $(PKILL_NAME) ||: 69 | echo "Send SIGKILL to remaining buildbot processes (if any)" 70 | pkill -KILL -u $(USER) $(PKILL_NAME) ||: 71 | 72 | run-target: $(VENV_CHECK) 73 | $(BUILDBOT) $(TARGET) master; tail -n$(LOGLINES) master/twistd.log 74 | 75 | git-update-requirements: 76 | git switch main 77 | git pull 78 | git switch -c reqs main 79 | make regen-requirements 80 | git ci -a -m "run make regen-requirements" 81 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # buildmaster-config 2 | 3 | [Buildbot](https://buildbot.net/) master configuration for 4 | [buildbot.python.org](http://buildbot.python.org/all/). 5 | 6 | [![Build Status](https://travis-ci.org/python/buildmaster-config.svg?branch=master)](https://travis-ci.org/python/buildmaster-config) 7 | 8 | ## Private settings 9 | 10 | The production server uses /etc/buildbot/settings.yaml configuration file which 11 | contains secrets like the IRC nickname password. 12 | 13 | ## Update requirements 14 | 15 | Run locally: 16 | 17 | make git-update-requirements 18 | 19 | Review updated packages, pay attention to buildbot updates. Create a PR. Merge 20 | the PR. The new venv will be recreated automatically on the server. 21 | 22 | Upgrading buildbot sometimes requires to run the command: 23 | 24 | ./venv/bin/buildbot upgrade-master /data/buildbot/master 25 | 26 | Make sure that the server is running, and then remove the old virtual environment: 27 | 28 | rm -rf old-venv 29 | 30 | ## Hosting 31 | 32 | The buildbot master is hosted on the PSF Infrastructure and is managed via 33 | [salt](https://github.com/python/psf-salt/blob/master/salt/buildbot/init.sls). 34 | 35 | psycopg2 also requires libpq-dev: 36 | 37 | sudo apt-get install libpq-dev 38 | 39 | - Backend host address is `buildbot.nyc1.psf.io`. 40 | - The host is behind the PSF HaProxy cluster which is CNAMEd by `buildbot.python.org`. 41 | - Database is hosted on a managed Postgres cluster, including backups. 42 | - Remote backups of `/etc/buildbot/settings.yaml` are taken hourly and retained for 90 days. 43 | - No other state for the buildbot host is backed up! 44 | 45 | Configurations from this repository are applied from the `master` branch on 46 | a `*/15` cron interval using the `update-master` target in `Makefile`. 47 | 48 | Python 3.9 is installed manually using ``pyenv`` (which was also installed 49 | manually). Commands to install Python 3.9: 50 | 51 | pyenv update 52 | pyenv install 3.9.1 53 | pyenv global 3.8.1 3.9.1 54 | 55 | 56 | ## Add a worker 57 | 58 | The list of workers is stored in `/etc/buildbot/settings.yaml` on the server. 59 | A worker password should be made of 14 characters (a-z, A-Z, 0-9 and special 60 | characters), for example using KeePassX. 61 | 62 | * Generate a password 63 | * Add the password in `/etc/buildbot/settings.yaml` 64 | * Restart the buildbot server: `make restart-master` 65 | 66 | Documentation: http://docs.buildbot.net/current/manual/configuration/workers.html#defining-workers 67 | -------------------------------------------------------------------------------- /buildbot.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | exec /srv/buildbot/venv/bin/buildbot $@ 3 | -------------------------------------------------------------------------------- /master/.gitignore: -------------------------------------------------------------------------------- 1 | http.log* 2 | twistd.log* 3 | twistd.pid 4 | state.sqlite* 5 | custom.*/ 6 | bolen-dmg-*/ 7 | 2.7.*/ 8 | 3.*.*/ 9 | settings.yaml 10 | workers/ 11 | gitpoller-work/ 12 | master.cfg.sample 13 | -------------------------------------------------------------------------------- /master/buildbot.tac: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from twisted.application import service 4 | from buildbot.master import BuildMaster 5 | 6 | basedir = '.' 7 | 8 | rotateLength = 10000000 9 | maxRotatedFiles = 10 10 | configfile = 'master.cfg' 11 | 12 | # Default umask for server 13 | umask = None 14 | 15 | # if this is a relocatable tac file, get the directory containing the TAC 16 | if basedir == '.': 17 | import os.path 18 | basedir = os.path.abspath(os.path.dirname(__file__)) 19 | 20 | # note: this line is matched against to check that this is a buildmaster 21 | # directory; do not edit it. 22 | application = service.Application('buildmaster') 23 | from twisted.python.logfile import LogFile 24 | from twisted.python.log import ILogObserver, FileLogObserver 25 | logfile = LogFile.fromFullPath(os.path.join(basedir, "twistd.log"), 26 | rotateLength=rotateLength, 27 | maxRotatedFiles=maxRotatedFiles) 28 | application.setComponent(ILogObserver, FileLogObserver(logfile).emit) 29 | 30 | m = BuildMaster(basedir, configfile, umask) 31 | m.setServiceParent(application) 32 | m.log_rotation.rotateLength = rotateLength 33 | m.log_rotation.maxRotatedFiles = maxRotatedFiles 34 | -------------------------------------------------------------------------------- /master/custom/__init__.py: -------------------------------------------------------------------------------- 1 | MAIN_BRANCH_VERSION = "3.15" 2 | # The Git branch is called "main", but we give it a different name in buildbot. 3 | # See git_branches in master/master.cfg. 4 | MAIN_BRANCH_NAME = "3.x" 5 | JUNIT_FILENAME = "test-results.xml" 6 | -------------------------------------------------------------------------------- /master/custom/auth.py: -------------------------------------------------------------------------------- 1 | from buildbot.www.auth import NoAuth 2 | from buildbot.plugins import util 3 | 4 | from twisted.python import log 5 | 6 | 7 | def set_up_authorization(settings): 8 | if bool(settings.do_auth): 9 | auth = util.GitHubAuth( 10 | clientId=str(settings.github_auth_id), 11 | clientSecret=str(settings.github_auth_secret), 12 | apiVersion=4, 13 | getTeamsMembership=True, 14 | ) 15 | authz = util.Authz( 16 | allowRules=[ 17 | # Admins can do anything. 18 | util.AnyEndpointMatcher(role="admins", defaultDeny=False), 19 | # Allow authors to stop, force or rebuild their own builds, 20 | # allow core devs to stop, force or rebuild any build. 21 | util.StopBuildEndpointMatcher(role="owner", defaultDeny=False), 22 | util.StopBuildEndpointMatcher( 23 | role="buildbot-owners", defaultDeny=False 24 | ), 25 | util.StopBuildEndpointMatcher(role="python-triage", defaultDeny=False), 26 | util.StopBuildEndpointMatcher(role="python-core"), 27 | util.RebuildBuildEndpointMatcher(role="owner", defaultDeny=False), 28 | util.RebuildBuildEndpointMatcher( 29 | role="python-triage", defaultDeny=False 30 | ), 31 | util.RebuildBuildEndpointMatcher( 32 | role="buildbot-owners", defaultDeny=False 33 | ), 34 | util.RebuildBuildEndpointMatcher(role="python-core"), 35 | util.ForceBuildEndpointMatcher(role="owner", defaultDeny=False), 36 | util.ForceBuildEndpointMatcher(role="python-triage", defaultDeny=False), 37 | util.ForceBuildEndpointMatcher(role="python-core"), 38 | # Allow release managers to enable/disable schedulers. 39 | util.EnableSchedulerEndpointMatcher(role="python-release-managers"), 40 | # Future-proof control endpoints. 41 | util.AnyControlEndpointMatcher(role="admins"), 42 | ], 43 | roleMatchers=[ 44 | util.RolesFromGroups(groupPrefix="python/"), 45 | util.RolesFromOwner(role="owner"), 46 | util.RolesFromUsername( 47 | roles=["admins"], 48 | usernames=[ 49 | "zware", 50 | "vstinner", 51 | "bitdancer", 52 | "pitrou", 53 | "pablogsal", 54 | ], 55 | ), 56 | ], 57 | ) 58 | else: 59 | log.err("WARNING: Web UI is completely open") 60 | # Completely open 61 | auth = NoAuth() 62 | authz = util.Authz() 63 | 64 | return auth, authz 65 | -------------------------------------------------------------------------------- /master/custom/builders.py: -------------------------------------------------------------------------------- 1 | from custom.factories import ( 2 | UnixBuild, 3 | UnixPerfBuild, 4 | RHEL8Build, 5 | CentOS9Build, 6 | FedoraStableBuild, 7 | FedoraRawhideBuild, 8 | FedoraRawhideFreedthreadingBuild, 9 | UnixAsanBuild, 10 | UnixAsanDebugBuild, 11 | UnixBigmemBuild, 12 | UnixTraceRefsBuild, 13 | UnixVintageParserBuild, 14 | UnixRefleakBuild, 15 | UnixNoGilBuild, 16 | UnixNoGilRefleakBuild, 17 | MacOSAsanNoGilBuild, 18 | AIXBuild, 19 | AIXBuildWithXLC, 20 | ClangUnixBuild, 21 | ClangUbsanLinuxBuild, 22 | ClangUbsanFunctionLinuxBuild, 23 | ClangUnixInstalledBuild, 24 | SharedUnixBuild, 25 | SlowNonDebugUnixBuild, 26 | SlowUnixInstalledBuild, 27 | NonDebugUnixBuild, 28 | UnixInstalledBuild, 29 | LTONonDebugUnixBuild, 30 | LTOPGONonDebugBuild, 31 | RHEL8NoBuiltinHashesUnixBuild, 32 | RHEL8NoBuiltinHashesUnixBuildExceptBlake2, 33 | CentOS9NoBuiltinHashesUnixBuild, 34 | CentOS9NoBuiltinHashesUnixBuildExceptBlake2, 35 | Windows64Build, 36 | Windows64BigmemBuild, 37 | Windows64NoGilBuild, 38 | Windows64PGOBuild, 39 | Windows64PGONoGilBuild, 40 | Windows64RefleakBuild, 41 | Windows64ReleaseBuild, 42 | MacOSArmWithBrewBuild, 43 | MacOSArmWithBrewNoGilBuild, 44 | MacOSArmWithBrewNoGilRefleakBuild, 45 | WindowsARM64Build, 46 | WindowsARM64ReleaseBuild, 47 | Wasm32WasiCrossBuild, 48 | Wasm32WasiPreview1DebugBuild, 49 | IOSARM64SimulatorBuild, 50 | AndroidBuild, 51 | ValgrindBuild, 52 | ) 53 | 54 | # A builder can be marked as stable when at least the 10 latest builds are 55 | # successful, but it's way better to wait at least for at least one week of 56 | # successful builds before considering to mark a builder as stable. 57 | STABLE = "stable" 58 | 59 | # New builders should always be marked as unstable. If a stable builder starts 60 | # to fail randomly, it can be downgraded to unstable if it is not a Tier-1 or 61 | # Tier-2 builder. 62 | UNSTABLE = "unstable" 63 | 64 | # https://peps.python.org/pep-0011/ defines Platform Support Tiers 65 | TIER_1 = "tier-1" 66 | TIER_2 = "tier-2" 67 | TIER_3 = "tier-3" 68 | NO_TIER = None 69 | 70 | 71 | # -- Stable Tier-1 builder ---------------------------------------------- 72 | STABLE_BUILDERS_TIER_1 = [ 73 | # Linux x86-64 GCC 74 | ("AMD64 Debian root", "angelico-debian-amd64", UnixBuild), 75 | ("AMD64 Ubuntu Shared", "bolen-ubuntu", SharedUnixBuild), 76 | ("AMD64 Fedora Stable", "cstratak-fedora-stable-x86_64", FedoraStableBuild), 77 | ("AMD64 Fedora Stable Refleaks", "cstratak-fedora-stable-x86_64", UnixRefleakBuild), 78 | ("AMD64 Fedora Stable LTO", "cstratak-fedora-stable-x86_64", LTONonDebugUnixBuild), 79 | ("AMD64 Fedora Stable LTO + PGO", "cstratak-fedora-stable-x86_64", LTOPGONonDebugBuild), 80 | ("AMD64 RHEL8", "cstratak-RHEL8-x86_64", RHEL8Build), 81 | ("AMD64 RHEL8 Refleaks", "cstratak-RHEL8-x86_64", UnixRefleakBuild), 82 | ("AMD64 RHEL8 LTO", "cstratak-RHEL8-x86_64", LTONonDebugUnixBuild), 83 | ("AMD64 RHEL8 LTO + PGO", "cstratak-RHEL8-x86_64", LTOPGONonDebugBuild), 84 | ("AMD64 CentOS9 NoGIL", "itamaro-centos-aws", UnixNoGilBuild), 85 | ("AMD64 CentOS9 NoGIL Refleaks", "itamaro-centos-aws", UnixNoGilRefleakBuild), 86 | 87 | # Windows x86-64 MSVC 88 | ("AMD64 Windows10", "bolen-windows10", Windows64Build), 89 | ("AMD64 Windows11 Bigmem", "ambv-bb-win11", Windows64BigmemBuild), 90 | ("AMD64 Windows11 Non-Debug", "ware-win11", Windows64ReleaseBuild), 91 | ("AMD64 Windows11 Refleaks", "ware-win11", Windows64RefleakBuild), 92 | ("AMD64 Windows Server 2022 NoGIL", "itamaro-win64-srv-22-aws", Windows64NoGilBuild), 93 | ("AMD64 Windows PGO NoGIL", "itamaro-win64-srv-22-aws", Windows64PGONoGilBuild), 94 | 95 | # macOS x86-64 clang 96 | ("x86-64 macOS", "billenstein-macos", UnixBuild), 97 | ("x86-64 MacOS Intel NoGIL", "itamaro-macos-intel-aws", UnixNoGilBuild), 98 | ("x86-64 MacOS Intel ASAN NoGIL", "itamaro-macos-intel-aws", MacOSAsanNoGilBuild), 99 | ] 100 | 101 | 102 | # -- Stable Tier-2 builder ---------------------------------------------- 103 | STABLE_BUILDERS_TIER_2 = [ 104 | # Fedora Linux x86-64 Clang 105 | ("AMD64 Fedora Stable Clang", "cstratak-fedora-stable-x86_64", ClangUnixBuild), 106 | ("AMD64 Fedora Stable Clang Installed", "cstratak-fedora-stable-x86_64", ClangUnixInstalledBuild), 107 | 108 | # Fedora Linux ppc64le GCC 109 | ("PPC64LE Fedora Stable", "cstratak-fedora-stable-ppc64le", FedoraStableBuild), 110 | ("PPC64LE Fedora Stable Refleaks", "cstratak-fedora-stable-ppc64le", UnixRefleakBuild), 111 | ("PPC64LE Fedora Stable LTO", "cstratak-fedora-stable-ppc64le", LTONonDebugUnixBuild), 112 | ("PPC64LE Fedora Stable LTO + PGO", "cstratak-fedora-stable-ppc64le", LTOPGONonDebugBuild), 113 | 114 | # RHEL8 ppc64le GCC 115 | ("PPC64LE RHEL8", "cstratak-RHEL8-ppc64le", RHEL8Build), 116 | ("PPC64LE RHEL8 Refleaks", "cstratak-RHEL8-ppc64le", UnixRefleakBuild), 117 | ("PPC64LE RHEL8 LTO", "cstratak-RHEL8-ppc64le", LTONonDebugUnixBuild), 118 | ("PPC64LE RHEL8 LTO + PGO", "cstratak-RHEL8-ppc64le", LTOPGONonDebugBuild), 119 | 120 | # Fedora Linux aarch64 GCC/clang 121 | ("aarch64 Fedora Stable", "cstratak-fedora-stable-aarch64", FedoraStableBuild), 122 | ("aarch64 Fedora Stable Refleaks", "cstratak-fedora-stable-aarch64", UnixRefleakBuild), 123 | ("aarch64 Fedora Stable Clang", "cstratak-fedora-stable-aarch64", ClangUnixBuild), 124 | ("aarch64 Fedora Stable Clang Installed", "cstratak-fedora-stable-aarch64", ClangUnixInstalledBuild), 125 | ("aarch64 Fedora Stable LTO", "cstratak-fedora-stable-aarch64", LTONonDebugUnixBuild), 126 | ("aarch64 Fedora Stable LTO + PGO", "cstratak-fedora-stable-aarch64", LTOPGONonDebugBuild), 127 | 128 | # RHEL8 aarch64 GCC 129 | ("aarch64 RHEL8", "cstratak-RHEL8-aarch64", RHEL8Build), 130 | ("aarch64 RHEL8 Refleaks", "cstratak-RHEL8-aarch64", UnixRefleakBuild), 131 | ("aarch64 RHEL8 LTO", "cstratak-RHEL8-aarch64", LTONonDebugUnixBuild), 132 | ("aarch64 RHEL8 LTO + PGO", "cstratak-RHEL8-aarch64", LTOPGONonDebugBuild), 133 | 134 | ("aarch64 Ubuntu 22.04 BigMem", "diegorusso-aarch64-bigmem", UnixBigmemBuild), 135 | 136 | # macOS aarch64 clang 137 | ("ARM64 macOS", "pablogsal-macos-m1", MacOSArmWithBrewBuild), 138 | ("ARM64 MacOS M1 NoGIL", "itamaro-macos-arm64-aws", MacOSArmWithBrewNoGilBuild), 139 | ("ARM64 MacOS M1 Refleaks NoGIL", "itamaro-macos-arm64-aws", MacOSArmWithBrewNoGilRefleakBuild), 140 | 141 | # WASI 142 | ("wasm32-wasi Non-Debug", "bcannon-wasi", Wasm32WasiCrossBuild), 143 | ("wasm32-wasi", "bcannon-wasi", Wasm32WasiPreview1DebugBuild), 144 | ] 145 | 146 | 147 | # -- Stable Tier-3 builder ---------------------------------------------- 148 | STABLE_BUILDERS_TIER_3 = [ 149 | 150 | # Fedora Linux s390x GCC/Clang 151 | ("s390x Fedora Stable", "cstratak-fedora-stable-s390x", UnixBuild), 152 | ("s390x Fedora Stable Refleaks", "cstratak-fedora-stable-s390x", UnixRefleakBuild), 153 | ("s390x Fedora Stable Clang", "cstratak-fedora-stable-s390x", ClangUnixBuild), 154 | ("s390x Fedora Stable Clang Installed", "cstratak-fedora-stable-s390x", ClangUnixInstalledBuild), 155 | ("s390x Fedora Stable LTO", "cstratak-fedora-stable-s390x", LTONonDebugUnixBuild), 156 | ("s390x Fedora Stable LTO + PGO", "cstratak-fedora-stable-s390x", LTOPGONonDebugBuild), 157 | 158 | # RHEL9 GCC 159 | ("s390x RHEL9", "cstratak-rhel9-s390x", UnixBuild), 160 | ("s390x RHEL9 Refleaks", "cstratak-rhel9-s390x", UnixRefleakBuild), 161 | ("s390x RHEL9 LTO", "cstratak-rhel9-s390x", LTONonDebugUnixBuild), 162 | ("s390x RHEL9 LTO + PGO", "cstratak-rhel9-s390x", LTOPGONonDebugBuild), 163 | 164 | # RHEL8 GCC 165 | ("s390x RHEL8", "cstratak-rhel8-s390x", UnixBuild), 166 | ("s390x RHEL8 Refleaks", "cstratak-rhel8-s390x", UnixRefleakBuild), 167 | ("s390x RHEL8 LTO", "cstratak-rhel8-s390x", LTONonDebugUnixBuild), 168 | ("s390x RHEL8 LTO + PGO", "cstratak-rhel8-s390x", LTOPGONonDebugBuild), 169 | 170 | # Fedora Linux ppc64le Clang 171 | ("PPC64LE Fedora Stable Clang", "cstratak-fedora-stable-ppc64le", ClangUnixBuild), 172 | ("PPC64LE Fedora Stable Clang Installed", "cstratak-fedora-stable-ppc64le", ClangUnixInstalledBuild), 173 | 174 | # Linux armv7l (32-bit) GCC 175 | ("ARM Raspbian", "gps-raspbian", SlowNonDebugUnixBuild), 176 | ("ARM64 Raspbian", "stan-raspbian", SlowNonDebugUnixBuild), 177 | 178 | # FreBSD x86-64 clang 179 | ("AMD64 FreeBSD", "ware-freebsd", UnixBuild), 180 | ("AMD64 FreeBSD Refleaks", "ware-freebsd", UnixRefleakBuild), 181 | ("AMD64 FreeBSD14", "opsec-fbsd14", UnixBuild), 182 | 183 | # Windows aarch64 MSVC 184 | ("ARM64 Windows", "linaro-win-arm64", WindowsARM64Build), 185 | ("ARM64 Windows Non-Debug", "linaro-win-arm64", WindowsARM64ReleaseBuild), 186 | 187 | # iOS 188 | ("iOS ARM64 Simulator", "rkm-arm64-ios-simulator", IOSARM64SimulatorBuild), 189 | 190 | # Android 191 | ("aarch64 Android", "mhsmith-android-aarch64", AndroidBuild), 192 | ("AMD64 Android", "mhsmith-android-x86_64", AndroidBuild), 193 | ] 194 | 195 | 196 | # -- Stable No Tier builders -------------------------------------------- 197 | STABLE_BUILDERS_NO_TIER = [ 198 | # Linux x86-64 GCC/Clang 199 | # Special builds: FIPS, ASAN, UBSAN, TraceRefs, Perf, etc. 200 | ("AMD64 RHEL8 FIPS Only Blake2 Builtin Hash", "cstratak-RHEL8-fips-x86_64", RHEL8NoBuiltinHashesUnixBuildExceptBlake2), 201 | ("AMD64 Arch Linux Asan", "pablogsal-arch-x86_64", UnixAsanBuild), 202 | ("AMD64 Arch Linux Asan Debug", "pablogsal-arch-x86_64", UnixAsanDebugBuild), 203 | ("AMD64 Arch Linux TraceRefs", "pablogsal-arch-x86_64", UnixTraceRefsBuild), 204 | ("AMD64 Arch Linux Perf", "pablogsal-arch-x86_64", UnixPerfBuild), 205 | # UBSAN with -fno-sanitize=function, without which we currently fail (as 206 | # tracked in gh-111178). The full "AMD64 Arch Linux Usan" is unstable, below 207 | ("AMD64 Arch Linux Usan Function", "pablogsal-arch-x86_64", ClangUbsanFunctionLinuxBuild), 208 | 209 | # Linux x86 (32-bit) GCC 210 | ("x86 Debian Non-Debug with X", "ware-debian-x86", NonDebugUnixBuild), 211 | ("x86 Debian Installed with X", "ware-debian-x86", UnixInstalledBuild), 212 | ] 213 | 214 | 215 | # -- Unstable Tier-1 builders ------------------------------------------- 216 | UNSTABLE_BUILDERS_TIER_1 = [ 217 | # Linux x86-64 GCC 218 | # Fedora Rawhide is unstable 219 | ("AMD64 Fedora Rawhide", "cstratak-fedora-rawhide-x86_64", FedoraRawhideBuild), 220 | ("AMD64 Fedora Rawhide Refleaks", "cstratak-fedora-rawhide-x86_64", UnixRefleakBuild), 221 | ("AMD64 Fedora Rawhide LTO", "cstratak-fedora-rawhide-x86_64", LTONonDebugUnixBuild), 222 | ("AMD64 Fedora Rawhide LTO + PGO", "cstratak-fedora-rawhide-x86_64", LTOPGONonDebugBuild), 223 | 224 | ("AMD64 Ubuntu", "skumaran-ubuntu-x86_64", UnixBuild), 225 | 226 | ("AMD64 Arch Linux VintageParser", "pablogsal-arch-x86_64", UnixVintageParserBuild), 227 | 228 | ("AMD64 RHEL8 FIPS No Builtin Hashes", "cstratak-RHEL8-fips-x86_64", RHEL8NoBuiltinHashesUnixBuild), 229 | 230 | ("AMD64 CentOS9", "cstratak-CentOS9-x86_64", CentOS9Build), 231 | ("AMD64 CentOS9 Refleaks", "cstratak-CentOS9-x86_64", UnixRefleakBuild), 232 | ("AMD64 CentOS9 LTO", "cstratak-CentOS9-x86_64", LTONonDebugUnixBuild), 233 | ("AMD64 CentOS9 LTO + PGO", "cstratak-CentOS9-x86_64", LTOPGONonDebugBuild), 234 | ("AMD64 CentOS9 FIPS Only Blake2 Builtin Hash", "cstratak-CentOS9-fips-x86_64", CentOS9NoBuiltinHashesUnixBuildExceptBlake2), 235 | ("AMD64 CentOS9 FIPS No Builtin Hashes", "cstratak-CentOS9-fips-x86_64", CentOS9NoBuiltinHashesUnixBuild), 236 | 237 | ("AMD64 Arch Linux Valgrind", "pablogsal-arch-x86_64", ValgrindBuild), 238 | 239 | # Windows MSVC 240 | ("AMD64 Windows PGO", "bolen-windows10", Windows64PGOBuild), 241 | ] 242 | 243 | 244 | # -- Unstable Tier-2 builders ------------------------------------------- 245 | UNSTABLE_BUILDERS_TIER_2 = [ 246 | # Linux x86-64 Clang 247 | # Fedora Rawhide is unstable 248 | # UBSan is a special build 249 | ("AMD64 Fedora Rawhide Clang", "cstratak-fedora-rawhide-x86_64", ClangUnixBuild), 250 | ("AMD64 Fedora Rawhide Clang Installed", "cstratak-fedora-rawhide-x86_64", ClangUnixInstalledBuild), 251 | 252 | # Fedora Linux ppc64le GCC 253 | # Fedora Rawhide is unstable 254 | ("PPC64LE Fedora Rawhide", "cstratak-fedora-rawhide-ppc64le", FedoraRawhideBuild), 255 | ("PPC64LE Fedora Rawhide Refleaks", "cstratak-fedora-rawhide-ppc64le", UnixRefleakBuild), 256 | ("PPC64LE Fedora Rawhide LTO", "cstratak-fedora-rawhide-ppc64le", LTONonDebugUnixBuild), 257 | ("PPC64LE Fedora Rawhide LTO + PGO", "cstratak-fedora-rawhide-ppc64le", LTOPGONonDebugBuild), 258 | 259 | # CentOS Stream 9 Linux ppc64le GCC 260 | ("PPC64LE CentOS9", "cstratak-CentOS9-ppc64le", CentOS9Build), 261 | ("PPC64LE CentOS9 Refleaks", "cstratak-CentOS9-ppc64le", UnixRefleakBuild), 262 | ("PPC64LE CentOS9 LTO", "cstratak-CentOS9-ppc64le", LTONonDebugUnixBuild), 263 | ("PPC64LE CentOS9 LTO + PGO", "cstratak-CentOS9-ppc64le", LTOPGONonDebugBuild), 264 | 265 | # Fedora Linux aarch64 GCC/Clang 266 | # Fedora Rawhide is unstable 267 | ("aarch64 Fedora Rawhide", "cstratak-fedora-rawhide-aarch64", FedoraRawhideBuild), 268 | ("aarch64 Fedora Rawhide Refleaks", "cstratak-fedora-rawhide-aarch64", UnixRefleakBuild), 269 | ("aarch64 Fedora Rawhide Clang", "cstratak-fedora-rawhide-aarch64", ClangUnixBuild), 270 | ("aarch64 Fedora Rawhide Clang Installed", "cstratak-fedora-rawhide-aarch64", ClangUnixInstalledBuild), 271 | ("aarch64 Fedora Rawhide LTO", "cstratak-fedora-rawhide-aarch64", LTONonDebugUnixBuild), 272 | ("aarch64 Fedora Rawhide LTO + PGO", "cstratak-fedora-rawhide-aarch64", LTOPGONonDebugBuild), 273 | 274 | # CentOS Stream 9 Linux aarch64 GCC 275 | ("aarch64 CentOS9 Refleaks", "cstratak-CentOS9-aarch64", UnixRefleakBuild), 276 | ("aarch64 CentOS9 LTO", "cstratak-CentOS9-aarch64", LTONonDebugUnixBuild), 277 | ("aarch64 CentOS9 LTO + PGO", "cstratak-CentOS9-aarch64", LTOPGONonDebugBuild), 278 | 279 | # WebAssembly 280 | ("wasm32 WASI 8Core", "kushaldas-wasi", Wasm32WasiCrossBuild), 281 | ] 282 | 283 | 284 | # -- Unstable Tier-3 builders ------------------------------------------- 285 | UNSTABLE_BUILDERS_TIER_3 = [ 286 | # Linux ppc64le Clang 287 | # Fedora Rawhide is unstable 288 | ("PPC64LE Fedora Rawhide Clang", "cstratak-fedora-rawhide-ppc64le", ClangUnixBuild), 289 | ("PPC64LE Fedora Rawhide Clang Installed", "cstratak-fedora-rawhide-ppc64le", ClangUnixInstalledBuild), 290 | 291 | # Linux s390x GCC/Clang 292 | ("s390x Fedora Rawhide", "cstratak-fedora-rawhide-s390x", UnixBuild), 293 | ("s390x Fedora Rawhide Refleaks", "cstratak-fedora-rawhide-s390x", UnixRefleakBuild), 294 | ("s390x Fedora Rawhide Clang", "cstratak-fedora-rawhide-s390x", ClangUnixBuild), 295 | ("s390x Fedora Rawhide Clang Installed", "cstratak-fedora-rawhide-s390x", ClangUnixInstalledBuild), 296 | ("s390x Fedora Rawhide LTO", "cstratak-fedora-rawhide-s390x", LTONonDebugUnixBuild), 297 | ("s390x Fedora Rawhide LTO + PGO", "cstratak-fedora-rawhide-s390x", LTOPGONonDebugBuild), 298 | 299 | # FreBSD x86-64 clang 300 | # FreeBSD 15 is CURRENT: development branch (at 2023-10-17) 301 | ("AMD64 FreeBSD15", "opsec-fbsd15", UnixBuild), 302 | ] 303 | 304 | 305 | # -- Unstable No Tier builders ------------------------------------------ 306 | UNSTABLE_BUILDERS_NO_TIER = [ 307 | # Linux x86-64 GCC musl 308 | ("AMD64 Alpine Linux", "ware-alpine", UnixBuild), 309 | 310 | # Linux GCC Fedora Rawhide Freethreading builders 311 | ("AMD64 Fedora Rawhide NoGIL", "cstratak-fedora-rawhide-x86_64", FedoraRawhideFreedthreadingBuild), 312 | ("aarch64 Fedora Rawhide NoGIL", "cstratak-fedora-rawhide-aarch64", FedoraRawhideFreedthreadingBuild), 313 | ("PPC64LE Fedora Rawhide NoGIL", "cstratak-fedora-rawhide-ppc64le", FedoraRawhideFreedthreadingBuild), 314 | ("s390x Fedora Rawhide NoGIL", "cstratak-fedora-rawhide-s390x", FedoraRawhideFreedthreadingBuild), 315 | # Linux GCC Fedora Rawhide Freethreading refleak builders 316 | ("AMD64 Fedora Rawhide NoGIL refleaks", "cstratak-fedora-rawhide-x86_64", UnixNoGilRefleakBuild), 317 | ("aarch64 Fedora Rawhide NoGIL refleaks", "cstratak-fedora-rawhide-aarch64", UnixNoGilRefleakBuild), 318 | ("PPC64LE Fedora Rawhide NoGIL refleaks", "cstratak-fedora-rawhide-ppc64le", UnixNoGilRefleakBuild), 319 | ("s390x Fedora Rawhide NoGIL refleaks", "cstratak-fedora-rawhide-s390x", UnixNoGilRefleakBuild), 320 | 321 | # AIX ppc64 322 | ("PPC64 AIX", "edelsohn-aix-ppc64", AIXBuild), 323 | ("PPC64 AIX XLC", "edelsohn-aix-ppc64", AIXBuildWithXLC), 324 | 325 | # Solaris sparcv9 326 | ("SPARCv9 Oracle Solaris 11.4", "kulikjak-solaris-sparcv9", UnixBuild), 327 | 328 | # riscv64 GCC 329 | ("riscv64 Ubuntu23", "onder-riscv64", SlowUnixInstalledBuild), 330 | 331 | # Arch Usan (see stable "AMD64 Arch Linux Usan Function" above) 332 | ("AMD64 Arch Linux Usan", "pablogsal-arch-x86_64", ClangUbsanLinuxBuild), 333 | ] 334 | 335 | 336 | def get_builders(settings): 337 | # Override with a default simple worker if we are using local workers 338 | if settings.use_local_worker: 339 | local_buildfactory = globals().get(settings.local_worker_buildfactory, UnixBuild) 340 | return [("Test Builder", "local-worker", local_buildfactory, STABLE, NO_TIER)] 341 | 342 | all_builders = [] 343 | for builders, stability, tier in ( 344 | (STABLE_BUILDERS_TIER_1, STABLE, TIER_1), 345 | (STABLE_BUILDERS_TIER_2, STABLE, TIER_2), 346 | (STABLE_BUILDERS_TIER_3, STABLE, TIER_3), 347 | (STABLE_BUILDERS_NO_TIER, STABLE, NO_TIER), 348 | 349 | (UNSTABLE_BUILDERS_TIER_1, UNSTABLE, TIER_1), 350 | (UNSTABLE_BUILDERS_TIER_2, UNSTABLE, TIER_2), 351 | (UNSTABLE_BUILDERS_TIER_3, UNSTABLE, TIER_3), 352 | (UNSTABLE_BUILDERS_NO_TIER, UNSTABLE, NO_TIER), 353 | ): 354 | for name, worker_name, buildfactory in builders: 355 | all_builders.append((name, worker_name, buildfactory, stability, tier)) 356 | return all_builders 357 | 358 | 359 | def get_builder_tier(builder: str) -> str: 360 | # Strip trailing branch name 361 | import re 362 | builder = re.sub(r" 3\.[x\d]+$", "", builder) 363 | 364 | for builders, tier in ( 365 | (STABLE_BUILDERS_TIER_1, TIER_1), 366 | (STABLE_BUILDERS_TIER_2,TIER_2), 367 | (STABLE_BUILDERS_TIER_3, TIER_3), 368 | (STABLE_BUILDERS_NO_TIER, NO_TIER), 369 | (UNSTABLE_BUILDERS_TIER_1, TIER_1), 370 | (UNSTABLE_BUILDERS_TIER_2, TIER_2), 371 | (UNSTABLE_BUILDERS_TIER_3, TIER_3), 372 | (UNSTABLE_BUILDERS_NO_TIER, NO_TIER), 373 | ): 374 | for name, _, _ in builders: 375 | if name == builder: 376 | if tier == NO_TIER: 377 | return "no tier" 378 | else: 379 | return tier 380 | 381 | return "unknown tier" 382 | 383 | 384 | # Match builder name (excluding the branch name) of builders that should only 385 | # run on the main and PR branches. 386 | ONLY_MAIN_BRANCH = ( 387 | "Alpine Linux", 388 | "ARM64 Windows", 389 | "Windows PGO", 390 | "AMD64 Arch Linux Perf", 391 | "AMD64 Arch Linux Valgrind", 392 | ) 393 | -------------------------------------------------------------------------------- /master/custom/discord_reporter.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | from twisted.internet import defer 4 | from twisted.python import log 5 | 6 | from buildbot.process.properties import Properties 7 | from buildbot.process.results import ( 8 | CANCELLED, 9 | EXCEPTION, 10 | FAILURE, 11 | RETRY, 12 | SKIPPED, 13 | SUCCESS, 14 | WARNINGS, 15 | ) 16 | from buildbot.util.giturlparse import giturlparse 17 | from buildbot.plugins import reporters 18 | from buildbot.reporters.utils import getDetailsForBuild 19 | 20 | from custom.builders import get_builder_tier 21 | from custom.testsuite_utils import get_logs_and_tracebacks_from_build 22 | 23 | MESSAGE = """\ 24 | :warning: **Buildbot failure** :warning: 25 | 26 | The buildbot **{buildername}** ({tier}) has failed when building commit {sha}(https://github.com/python/cpython/commit/{sha}). 27 | 28 | You can take a look at the buildbot page here: 29 | 30 | {build_url} 31 | 32 | ``` 33 | {failed_test_text} 34 | ``` 35 | """ 36 | 37 | 38 | class DiscordReporter(reporters.HttpStatusPush): 39 | name = "DiscordReporter" 40 | 41 | def __init__(self, *args, verbose=True, **kwargs): 42 | self.verbose = True 43 | super().__init__(*args, **kwargs) 44 | 45 | @defer.inlineCallbacks 46 | def sendMessage(self, reports): 47 | build = reports[0]["builds"][0] 48 | 49 | props = Properties.fromDict(build["properties"]) 50 | props.master = self.master 51 | 52 | if build["complete"]: 53 | state = { 54 | SUCCESS: "success", 55 | WARNINGS: "success", 56 | FAILURE: "failure", 57 | SKIPPED: "success", 58 | EXCEPTION: "error", 59 | RETRY: "pending", 60 | CANCELLED: "error", 61 | }.get(build["results"], "error") 62 | else: 63 | return 64 | 65 | if state != "failure": 66 | return 67 | 68 | yield getDetailsForBuild( 69 | self.master, build, want_logs=True, want_logs_content=True, want_steps=True 70 | ) 71 | 72 | logs, _ = get_logs_and_tracebacks_from_build(build) 73 | 74 | sourcestamps = build["buildset"].get("sourcestamps") 75 | 76 | if not (sourcestamps and sourcestamps[0]): 77 | return 78 | 79 | changes = yield self.master.data.get(("builds", build["buildid"], "changes")) 80 | 81 | if len(changes) != 1: 82 | return 83 | 84 | change = changes[0] 85 | change_comments = change["comments"] 86 | 87 | if not change_comments: 88 | return 89 | 90 | # GH-42, gh-42, or #42 91 | m = re.search(r"\((?:GH-|#)(\d+)\)", change_comments, flags=re.IGNORECASE) 92 | 93 | if m is None: 94 | return 95 | 96 | issue = m.groups()[-1] 97 | 98 | project = sourcestamps[0]["project"] 99 | 100 | if "/" in project: 101 | repoOwner, repoName = project.split("/") 102 | else: 103 | giturl = giturlparse(sourcestamps[0]["repository"]) 104 | repoOwner = giturl.owner 105 | repoName = giturl.repo 106 | 107 | if self.verbose: 108 | log.msg( 109 | "Updating github status: repoOwner={repoOwner}, repoName={repoName}".format( 110 | repoOwner=repoOwner, repoName=repoName 111 | ) 112 | ) 113 | 114 | try: 115 | sha = change["revision"] 116 | yield self.createReport( 117 | build=build, 118 | sha=sha, 119 | logs=logs, 120 | ) 121 | if self.verbose: 122 | log.msg( 123 | "Issued a dicord comment for {repoOwner}/{repoName} " 124 | "at {sha}, issue {issue}.".format( 125 | repoOwner=repoOwner, 126 | repoName=repoName, 127 | sha=sha, 128 | issue=issue, 129 | ) 130 | ) 131 | except Exception as e: 132 | log.err( 133 | e, 134 | "Failed to issue a discord comment for {repoOwner}/{repoName} " 135 | "at {sha}, issue {issue}.".format( 136 | repoOwner=repoOwner, 137 | repoName=repoName, 138 | sha=sha, 139 | issue=issue, 140 | ), 141 | ) 142 | 143 | def _getURLForBuild(self, builderid, build_number): 144 | prefix = self.master.config.buildbotURL 145 | return prefix + "#/builders/%d/builds/%d" % (builderid, build_number) 146 | 147 | def createReport( 148 | self, 149 | build, 150 | sha, 151 | logs, 152 | ): 153 | buildername = build["builder"]["name"] 154 | 155 | message = MESSAGE.format( 156 | buildername=buildername, 157 | tier=get_builder_tier(buildername), 158 | build_url=self._getURLForBuild( 159 | build["builder"]["builderid"], build["number"] 160 | ), 161 | sha=sha, 162 | failed_test_text=logs.format_failing_tests(), 163 | ) 164 | 165 | payload = {"content": message, "embeds": []} 166 | 167 | return self._http.post("", json=payload) 168 | -------------------------------------------------------------------------------- /master/custom/email_formatter.py: -------------------------------------------------------------------------------- 1 | from buildbot.plugins import reporters 2 | 3 | from custom.testsuite_utils import get_logs_and_tracebacks_from_build 4 | 5 | MAIL_TEMPLATE = """\ 6 | The Buildbot has detected a {{ status_detected }} on builder {{ buildername }} while building {{ projects }}. 7 | Full details are available at: 8 | {{ build_url }} 9 | 10 | Buildbot URL: {{ buildbot_url }} 11 | 12 | Worker for this Build: {{ workername }} 13 | 14 | Build Reason: {{ build['properties'].get('reason', [""])[0] }} 15 | Blamelist: {{ ", ".join(blamelist) }} 16 | 17 | {{ summary }} 18 | 19 | 20 | Summary of the results of the build (if available): 21 | =================================================== 22 | 23 | {{ build['final_log'].test_summary() }} 24 | 25 | 26 | Captured traceback 27 | ================== 28 | 29 | {{ "\n\n".join(build['tracebacks']) }} 30 | 31 | 32 | Test report 33 | =========== 34 | 35 | {{ build['final_log'].format_failing_tests() }} 36 | 37 | 38 | 39 | Sincerely, 40 | -The Buildbot 41 | """ 42 | 43 | TESTS_STEP = "test" 44 | 45 | 46 | class CustomMessageFormatter(reporters.MessageFormatter): 47 | def buildAdditionalContext(self, master, ctx): 48 | ctx.update(self.context) 49 | build = ctx["build"] 50 | 51 | logs, tracebacks = get_logs_and_tracebacks_from_build(build) 52 | 53 | ctx["build"]["tracebacks"] = tracebacks 54 | ctx["build"]["final_log"] = logs 55 | 56 | 57 | MESSAGE_FORMATTER = CustomMessageFormatter( 58 | template=MAIL_TEMPLATE, 59 | template_type="plain", 60 | want_logs=True, 61 | want_logs_content=True, 62 | want_properties=True, 63 | want_steps=True, 64 | ) 65 | -------------------------------------------------------------------------------- /master/custom/factories.py: -------------------------------------------------------------------------------- 1 | import os.path 2 | from buildbot.process import factory 3 | from buildbot.steps.shell import ( 4 | Configure, 5 | Compile, 6 | ShellCommand, 7 | SetPropertyFromCommand, 8 | ) 9 | 10 | from buildbot.plugins import util 11 | 12 | from . import (MAIN_BRANCH_VERSION, MAIN_BRANCH_NAME, 13 | JUNIT_FILENAME) 14 | from .steps import ( 15 | Test, 16 | Clean, 17 | Install, 18 | LockInstall, 19 | Uninstall, 20 | UploadTestResults, 21 | ) 22 | 23 | # This (default) timeout is for each individual test file. 24 | # It is a bit more than the default faulthandler timeout in regrtest.py 25 | # (the latter isn't easily changed under Windows). 26 | TEST_TIMEOUT = 20 * 60 # 20 minutes 27 | 28 | # Refleak timeout (-R 3:3) for each individual test file. 29 | REFLEAK_TIMEOUT = 45 * 60 # 45 minutes 30 | 31 | 32 | def step_timeout(timeout): 33 | # timeout is the regrtest timeout in seconds. If a test runs longer than 34 | # the timeout, it should be killed by faulthandler. Give 10 minutes to 35 | # faulthandler to kill the process. Tests should always be shorter than the 36 | # buildbot step timeout, unless faulthandler fails to kill the process. 37 | return timeout + 10 * 60 38 | 39 | 40 | class BaseBuild(factory.BuildFactory): 41 | factory_tags = [] 42 | test_timeout = TEST_TIMEOUT 43 | 44 | def __init__(self, source, *, extra_tags=[], **kwargs): 45 | super().__init__([source]) 46 | self.setup(**kwargs) 47 | self.tags = self.factory_tags + extra_tags 48 | 49 | 50 | ############################################################################## 51 | ############################### UNIX BUILDS ################################ 52 | ############################################################################## 53 | 54 | 55 | def has_option(option, test_options): 56 | # return True for option='-j' and test_options=['-uall', '-j2'] 57 | return option in ' '.join(test_options) 58 | 59 | 60 | class UnixBuild(BaseBuild): 61 | configureFlags = ["--with-pydebug"] 62 | compile_environ = {} 63 | interpreterFlags = "" 64 | testFlags = ["-j2"] 65 | makeTarget = "all" 66 | test_environ = {} 67 | build_out_of_tree = False 68 | 69 | def setup(self, parallel, branch, test_with_PTY=False, **kwargs): 70 | out_of_tree_dir = "build_oot" 71 | 72 | # Adjust the timeout for this worker 73 | self.test_timeout *= kwargs.get("timeout_factor", 1) 74 | 75 | # In 3.9 and 3.10, test_asyncio wasn't split out, and refleaks tests 76 | # need more time. 77 | if branch in ("3.9", "3.10") and has_option("-R", self.testFlags): 78 | self.test_timeout *= 2 79 | 80 | if self.build_out_of_tree: 81 | self.addStep( 82 | ShellCommand( 83 | name="mkdir out-of-tree directory", 84 | description="Create out-of-tree directory", 85 | command=["mkdir", "-p", out_of_tree_dir], 86 | warnOnFailure=True, 87 | ) 88 | ) 89 | 90 | if self.build_out_of_tree: 91 | configure_cmd = "../configure" 92 | oot_kwargs = {'workdir': os.path.join("build", out_of_tree_dir)} 93 | else: 94 | configure_cmd = "./configure" 95 | oot_kwargs = {} 96 | configure_cmd = [configure_cmd, "--prefix", "$(PWD)/target"] 97 | configure_cmd += self.configureFlags 98 | self.addStep( 99 | Configure(command=configure_cmd, **oot_kwargs) 100 | ) 101 | compile = ["make", self.makeTarget] 102 | testopts = list(self.testFlags) 103 | if not has_option("-R", self.testFlags): 104 | testopts.extend(("--junit-xml", JUNIT_FILENAME)) 105 | if parallel: 106 | compile = ["make", parallel, self.makeTarget] 107 | testopts.append(parallel) 108 | if not has_option("-j", testopts): 109 | testopts.append("-j2") 110 | test = [ 111 | "make", 112 | "buildbottest", 113 | "TESTOPTS=" + " ".join(testopts) + " ${BUILDBOT_TESTOPTS}", 114 | f"TESTPYTHONOPTS={self.interpreterFlags}", 115 | f"TESTTIMEOUT={self.test_timeout}", 116 | ] 117 | 118 | self.addStep(Compile(command=compile, 119 | env=self.compile_environ, 120 | **oot_kwargs)) 121 | self.addStep( 122 | ShellCommand( 123 | name="pythoninfo", 124 | description="pythoninfo", 125 | command=["make", "pythoninfo"], 126 | warnOnFailure=True, 127 | env=self.test_environ, 128 | **oot_kwargs 129 | ) 130 | ) 131 | self.addStep(Test( 132 | command=test, 133 | timeout=step_timeout(self.test_timeout), 134 | usePTY=test_with_PTY, 135 | env=self.test_environ, 136 | **oot_kwargs 137 | )) 138 | if branch not in ("3",) and not has_option("-R", self.testFlags): 139 | filename = JUNIT_FILENAME 140 | if self.build_out_of_tree: 141 | filename = os.path.join(out_of_tree_dir, filename) 142 | self.addStep(UploadTestResults(branch, filename=filename)) 143 | self.addStep(Clean(**oot_kwargs)) 144 | 145 | 146 | class UnixPerfBuild(UnixBuild): 147 | buildersuffix = ".perfbuild" 148 | configureFlags = ["CFLAGS=-fno-omit-frame-pointer -mno-omit-leaf-frame-pointer"] 149 | 150 | 151 | class UnixTraceRefsBuild(UnixBuild): 152 | def setup(self, parallel, branch, test_with_PTY=False, **kwargs): 153 | self.configureFlags = ["--with-pydebug", "--with-trace-refs"] 154 | return super().setup(parallel, branch, test_with_PTY=test_with_PTY, **kwargs) 155 | 156 | 157 | class UnixVintageParserBuild(UnixBuild): 158 | buildersuffix = ".oldparser" # to get unique directory names on master 159 | test_environ = {'PYTHONOLDPARSER': 'old'} 160 | 161 | 162 | class UnixRefleakBuild(UnixBuild): 163 | buildersuffix = ".refleak" 164 | testFlags = ["-R", "3:3", "-u-cpu"] 165 | test_timeout = REFLEAK_TIMEOUT 166 | factory_tags = ["refleak"] 167 | 168 | 169 | class UnixNoGilBuild(UnixBuild): 170 | buildersuffix = ".nogil" 171 | configureFlags = ["--with-pydebug", "--disable-gil"] 172 | factory_tags = ["nogil"] 173 | # 2024-04-11: Free-threading can still be slower than regular build in some 174 | # code paths, so tolerate longer timeout. 175 | test_timeout = int(TEST_TIMEOUT * 1.5) 176 | 177 | 178 | class UnixNoGilRefleakBuild(UnixBuild): 179 | buildersuffix = ".refleak.nogil" 180 | configureFlags = ["--with-pydebug", "--disable-gil"] 181 | testFlags = ["-R", "3:3", "-u-cpu"] 182 | test_timeout = REFLEAK_TIMEOUT 183 | factory_tags = ["nogil", "refleak"] 184 | 185 | 186 | class UnixInstalledBuild(BaseBuild): 187 | buildersuffix = ".installed" 188 | configureFlags = [] 189 | interpreterFlags = ["-Wdefault", "-bb", "-E"] 190 | defaultTestOpts = ["-rwW", "-uall", "-j2"] 191 | makeTarget = "all" 192 | installTarget = "install" 193 | factory_tags = ["installed"] 194 | 195 | def setup(self, parallel, branch, test_with_PTY=False, **kwargs): 196 | if branch == MAIN_BRANCH_NAME: 197 | branch = MAIN_BRANCH_VERSION 198 | elif branch == "custom": 199 | branch = "3" 200 | installed_python = f"./target/bin/python{branch}" 201 | self.addStep( 202 | Configure( 203 | command=["./configure", "--prefix", "$(PWD)/target"] 204 | + self.configureFlags 205 | ) 206 | ) 207 | 208 | compile = ["make", self.makeTarget] 209 | install = ["make", self.installTarget] 210 | testopts = list(self.defaultTestOpts) 211 | testopts.append(f"--timeout={self.test_timeout}") 212 | if parallel: 213 | compile = ["make", parallel, self.makeTarget] 214 | install = ["make", parallel, self.installTarget] 215 | testopts.append(parallel) 216 | 217 | test = [installed_python, 218 | *self.interpreterFlags, 219 | "-m", "test", 220 | *testopts] 221 | 222 | self.addStep(Compile(command=compile)) 223 | self.addStep(Install(command=install)) 224 | self.addStep(LockInstall()) 225 | self.addStep( 226 | ShellCommand( 227 | name="pythoninfo", 228 | description="pythoninfo", 229 | command=[installed_python, "-m", "test.pythoninfo"], 230 | warnOnFailure=True, 231 | ) 232 | ) 233 | self.addStep(Test( 234 | command=test, 235 | timeout=step_timeout(self.test_timeout), 236 | usePTY=test_with_PTY, 237 | )) 238 | self.addStep(Uninstall()) 239 | self.addStep(Clean()) 240 | 241 | 242 | class UnixAsanBuild(UnixBuild): 243 | buildersuffix = ".asan" 244 | configureFlags = ["--without-pymalloc", "--with-address-sanitizer"] 245 | factory_tags = ["asan", "sanitizer"] 246 | # See https://bugs.python.org/issue42985 for more context on why 247 | # SIGSEGV is ignored on purpose. 248 | compile_environ = {'ASAN_OPTIONS': 'detect_leaks=0:allocator_may_return_null=1:handle_segv=0'} 249 | test_environ = {'ASAN_OPTIONS': 'detect_leaks=0:allocator_may_return_null=1:handle_segv=0'} 250 | test_timeout = 35 * 60 # 35 minutes 251 | 252 | 253 | class UnixAsanDebugBuild(UnixAsanBuild): 254 | buildersuffix = ".asan_debug" 255 | configureFlags = UnixAsanBuild.configureFlags + ["--with-pydebug"] 256 | 257 | 258 | class UnixAsanNoGilBuild(UnixAsanBuild): 259 | buildersuffix = ".asan.nogil" 260 | configureFlags = UnixAsanBuild.configureFlags + ["--disable-gil"] 261 | factory_tags = UnixAsanBuild.factory_tags + ["nogil"] 262 | 263 | 264 | class UnixBuildWithoutDocStrings(UnixBuild): 265 | configureFlags = ["--with-pydebug", "--without-doc-strings"] 266 | 267 | 268 | class UnixBigmemBuild(UnixBuild): 269 | buildersuffix = ".bigmem" 270 | testFlags = [ 271 | "-M60g", "-j4", "-uall,extralargefile", 272 | "--prioritize=test_bigmem,test_lzma,test_bz2,test_re,test_array" 273 | ] 274 | test_timeout = TEST_TIMEOUT * 4 275 | factory_tags = ["bigmem"] 276 | 277 | 278 | class AIXBuild(UnixBuild): 279 | configureFlags = [ 280 | "--with-pydebug", 281 | "--with-openssl=/opt/aixtools", 282 | ] 283 | 284 | 285 | class AIXBuildWithXLC(UnixBuild): 286 | buildersuffix = ".xlc" 287 | configureFlags = [ 288 | "--with-pydebug", 289 | "--with-openssl=/opt/aixtools", 290 | "CC=xlc_r", 291 | "LD=xlc_r", 292 | ] 293 | factory_tags = ["xlc"] 294 | 295 | 296 | class NonDebugUnixBuild(UnixBuild): 297 | buildersuffix = ".nondebug" 298 | # Enable assertions regardless. Some children will override this, 299 | # that is fine. 300 | configureFlags = ["CFLAGS=-UNDEBUG"] 301 | factory_tags = ["nondebug"] 302 | 303 | 304 | class PGOUnixBuild(NonDebugUnixBuild): 305 | buildersuffix = ".pgo" 306 | configureFlags = ["--enable-optimizations"] 307 | factory_tags = ["pgo"] 308 | 309 | def setup(self, parallel, branch, *args, **kwargs): 310 | # Only Python >3.10 has --with-readline=edit 311 | if branch != '3.9': 312 | # Use libedit instead of libreadline on this buildbot for 313 | # some libedit Linux compilation coverage. 314 | self.configureFlags = self.configureFlags + ["--with-readline=edit"] 315 | return super().setup(parallel, branch, *args, **kwargs) 316 | 317 | 318 | class ClangUnixBuild(UnixBuild): 319 | buildersuffix = ".clang" 320 | configureFlags = [ 321 | "CC=clang", 322 | "LD=clang", 323 | "--with-pydebug", 324 | ] 325 | factory_tags = ["clang"] 326 | 327 | 328 | class ClangUbsanLinuxBuild(UnixBuild): 329 | buildersuffix = ".clang-ubsan" 330 | configureFlags = [ 331 | "CC=clang", 332 | "LD=clang", 333 | "CFLAGS=-fno-sanitize-recover", 334 | "--with-undefined-behavior-sanitizer", 335 | ] 336 | factory_tags = ["clang", "ubsan", "sanitizer"] 337 | 338 | 339 | class ClangUbsanFunctionLinuxBuild(UnixBuild): 340 | buildersuffix = ".clang-ubsan-function" 341 | configureFlags = [ 342 | "CC=clang", 343 | "LD=clang", 344 | "CFLAGS=-fsanitize=undefined -fno-sanitize=function -fsanitize-recover", 345 | "--with-undefined-behavior-sanitizer", 346 | ] 347 | factory_tags = ["clang", "ubsan", "sanitizer"] 348 | 349 | 350 | class ClangUnixInstalledBuild(UnixInstalledBuild): 351 | buildersuffix = ".clang-installed" 352 | configureFlags = [ 353 | "CC=clang", 354 | "LD=clang", 355 | ] 356 | factory_tags = ["clang", "installed"] 357 | 358 | 359 | class SharedUnixBuild(UnixBuild): 360 | configureFlags = ["--with-pydebug", "--enable-shared"] 361 | factory_tags = ["shared"] 362 | 363 | 364 | # faulthandler uses a timeout 5 minutes smaller: it should be enough for the 365 | # slowest test. 366 | SLOW_TIMEOUT = 40 * 60 367 | 368 | 369 | # These use a longer timeout for very slow buildbots. 370 | class SlowNonDebugUnixBuild(NonDebugUnixBuild): 371 | test_timeout = SLOW_TIMEOUT 372 | testFlags = [*NonDebugUnixBuild.testFlags, "-u-cpu"] 373 | 374 | 375 | class SlowUnixInstalledBuild(UnixInstalledBuild): 376 | test_timeout = SLOW_TIMEOUT 377 | 378 | 379 | class LTONonDebugUnixBuild(NonDebugUnixBuild): 380 | buildersuffix = ".lto" 381 | configureFlags = [ 382 | "--with-lto", 383 | ] 384 | factory_tags = ["lto", "nondebug"] 385 | 386 | 387 | class LTOPGONonDebugBuild(NonDebugUnixBuild): 388 | buildersuffix = ".lto-pgo" 389 | configureFlags = [ 390 | "--with-lto", 391 | "--enable-optimizations", 392 | ] 393 | factory_tags = ["lto", "pgo", "nondebug"] 394 | 395 | 396 | class ClangLTOPGONonDebugBuild(NonDebugUnixBuild): 397 | buildersuffix = ".clang.lto-pgo" 398 | configureFlags = [ 399 | "CC=clang", 400 | "LD=clang", 401 | "--with-lto", 402 | "--enable-optimizations", 403 | ] 404 | factory_tags = ["lto", "pgo", "nondebug", "clang"] 405 | 406 | 407 | class RHEL8Build(UnixBuild): 408 | # Build Python on 64-bit RHEL8. 409 | configureFlags = [ 410 | "--with-pydebug", 411 | "--with-platlibdir=lib64", 412 | "--enable-ipv6", 413 | "--enable-shared", 414 | "--with-computed-gotos=yes", 415 | "--with-dbmliborder=gdbm:ndbm:bdb", 416 | # FIXME: enable these flags 417 | # "--with-system-expat", 418 | # "--with-system-ffi", 419 | "--enable-loadable-sqlite-extensions", 420 | "--with-ssl-default-suites=openssl", 421 | "--without-static-libpython", 422 | "--with-lto", 423 | # Not all workers have dtrace installed 424 | # "--with-dtrace", 425 | # Not all workers have Valgrind headers installed 426 | # "--with-valgrind", 427 | ] 428 | 429 | # Building Python out of tree: similar to what the specfile does, but 430 | # buildbot uses a single subdirectory, and the specfile uses two 431 | # sub-directories. 432 | # 433 | # On Fedora/RHEL specfile, the following directories are used: 434 | # /builddir/build/BUILD/Python-3.11: source code 435 | # /builddir/build/BUILD/Python-3.11/build/optimized: configure, make, tests 436 | build_out_of_tree = True 437 | 438 | 439 | class CentOS9Build(RHEL8Build): 440 | # Build on 64-bit CentOS Stream 9. 441 | # For now, it's the same as RHEL8, but later it may get different 442 | # options. 443 | pass 444 | 445 | 446 | class FedoraStableBuild(RHEL8Build): 447 | # Build Python on 64-bit Fedora Stable. 448 | # 449 | # Try to be as close as possible to the Fedora specfile used to build 450 | # the RPM package: 451 | # https://src.fedoraproject.org/rpms/python3.11/blob/rawhide/f/python3.11.spec 452 | configureFlags = RHEL8Build.configureFlags + [ 453 | # Options specific to Fedora 454 | # FIXME: enable this flag 455 | # "--with-system-libmpdec", 456 | # Don't make a buildbot fail when pip/setuptools is updated in Python, 457 | # whereas the buildbot uses older versions. 458 | # "--with-wheel-pkg-dir=/usr/share/python-wheels/", 459 | ] 460 | 461 | 462 | class FedoraRawhideBuild(FedoraStableBuild): 463 | # Build on 64-bit Fedora Rawhide. 464 | # For now, it's the same than Fedora Stable, but later it may get different 465 | # options. 466 | pass 467 | 468 | 469 | class FedoraRawhideFreedthreadingBuild(FedoraRawhideBuild): 470 | # Build on 64-bit Fedora Rawhide. 471 | buildersuffix = ".nogil" 472 | configureFlags = FedoraRawhideBuild.configureFlags + [ 473 | "--disable-gil", 474 | ] 475 | factory_tags = ["nogil"] 476 | 477 | 478 | class RHEL8NoBuiltinHashesUnixBuildExceptBlake2(RHEL8Build): 479 | # Build on 64-bit RHEL8 using: --with-builtin-hashlib-hashes=blake2 480 | buildersuffix = ".no-builtin-hashes-except-blake2" 481 | configureFlags = RHEL8Build.configureFlags + [ 482 | "--with-builtin-hashlib-hashes=blake2" 483 | ] 484 | factory_tags = ["no-builtin-hashes-except-blake2"] 485 | 486 | 487 | class RHEL8NoBuiltinHashesUnixBuild(RHEL8Build): 488 | # Build on 64-bit RHEL8 using: --without-builtin-hashlib-hashes 489 | buildersuffix = ".no-builtin-hashes" 490 | configureFlags = RHEL8Build.configureFlags + [ 491 | "--without-builtin-hashlib-hashes" 492 | ] 493 | factory_tags = ["no-builtin-hashes"] 494 | 495 | 496 | class CentOS9NoBuiltinHashesUnixBuildExceptBlake2(CentOS9Build): 497 | # Build on 64-bit CentOS Stream 9 using: --with-builtin-hashlib-hashes=blake2 498 | buildersuffix = ".no-builtin-hashes-except-blake2" 499 | configureFlags = CentOS9Build.configureFlags + [ 500 | "--with-builtin-hashlib-hashes=blake2" 501 | ] 502 | factory_tags = ["no-builtin-hashes-except-blake2"] 503 | 504 | 505 | class CentOS9NoBuiltinHashesUnixBuild(CentOS9Build): 506 | # Build on 64-bit CentOS Stream 9 using: --without-builtin-hashlib-hashes 507 | buildersuffix = ".no-builtin-hashes" 508 | configureFlags = CentOS9Build.configureFlags + [ 509 | "--without-builtin-hashlib-hashes" 510 | ] 511 | factory_tags = ["no-builtin-hashes"] 512 | 513 | 514 | ############################################################################## 515 | ############################ MACOS BUILDS ################################## 516 | ############################################################################## 517 | 518 | class MacOSArmWithBrewBuild(UnixBuild): 519 | buildersuffix = ".macos-with-brew" 520 | configureFlags = UnixBuild.configureFlags + [ 521 | "--with-openssl=/opt/homebrew/opt/openssl@3", 522 | "CPPFLAGS=-I/opt/homebrew/include", 523 | "LDFLAGS=-L/opt/homebrew/lib", 524 | "PKG_CONFIG_PATH=/opt/homebrew/opt/tcl-tk@8/lib/pkgconfig", 525 | ] 526 | # These tests are known to crash on M1 macs (see bpo-45289). 527 | testFlags = [*UnixBuild.testFlags, 528 | "-x", "test_dbm", "test_dbm_ndbm", "test_shelve"] 529 | 530 | 531 | class MacOSArmWithBrewNoGilBuild(UnixNoGilBuild): 532 | buildersuffix = ".macos-with-brew.nogil" 533 | configureFlags = [ 534 | *UnixNoGilBuild.configureFlags, 535 | "--with-openssl=/opt/homebrew/opt/openssl@3", 536 | "CPPFLAGS=-I/opt/homebrew/include", 537 | "LDFLAGS=-L/opt/homebrew/lib", 538 | "PKG_CONFIG_PATH=/opt/homebrew/opt/tcl-tk@8/lib/pkgconfig", 539 | ] 540 | 541 | 542 | class MacOSArmWithBrewRefleakBuild(UnixRefleakBuild): 543 | buildersuffix = ".macos-with-brew.refleak" 544 | configureFlags = [ 545 | *UnixRefleakBuild.configureFlags, 546 | "--with-openssl=/opt/homebrew/opt/openssl@3", 547 | "CPPFLAGS=-I/opt/homebrew/include", 548 | "LDFLAGS=-L/opt/homebrew/lib", 549 | "PKG_CONFIG_PATH=/opt/homebrew/opt/tcl-tk@8/lib/pkgconfig", 550 | ] 551 | 552 | 553 | class MacOSArmWithBrewNoGilRefleakBuild(UnixNoGilRefleakBuild): 554 | buildersuffix = ".macos-with-brew.refleak.nogil" 555 | configureFlags = [ 556 | *UnixNoGilRefleakBuild.configureFlags, 557 | "--with-openssl=/opt/homebrew/opt/openssl@3", 558 | "CPPFLAGS=-I/opt/homebrew/include", 559 | "LDFLAGS=-L/opt/homebrew/lib", 560 | "PKG_CONFIG_PATH=/opt/homebrew/opt/tcl-tk@8/lib/pkgconfig", 561 | ] 562 | 563 | 564 | class MacOSAsanNoGilBuild(UnixAsanNoGilBuild): 565 | buildersuffix = ".macos-with-brew.asan.nogil" 566 | configureFlags = UnixAsanNoGilBuild.configureFlags + [ 567 | "--with-openssl=/opt/homebrew/opt/openssl@3", 568 | "CPPFLAGS=-I/opt/homebrew/include", 569 | "LDFLAGS=-L/opt/homebrew/lib", 570 | "PKG_CONFIG_PATH=/opt/homebrew/opt/tcl-tk@8/lib/pkgconfig", 571 | ] 572 | asan_options = 'detect_leaks=0:allocator_may_return_null=1:handle_segv=0' 573 | compile_environ = {'ASAN_OPTIONS': asan_options} 574 | test_environ = { 575 | 'ASAN_OPTIONS': asan_options, 576 | # Note: Need to set `MallocNanoZone=0` environment variable to workaround a macOS issue. 577 | # This was needed to workaround an issue with this builder that manifested as failures in 3 tests: 578 | # test_cmd_line, test_posix, test_subprocess 579 | # These failures seem to be related to the occurrence of this warning: 580 | # python.exe(74602,0x7ff84626a700) malloc: nano zone abandoned due to inability to reserve vm space. 581 | # It is unclear why (or if) it's *directly* causing the test failures, but setting `MallocNanoZone=0` 582 | # disables this optimization (and fixes the tests), which appears to be interfering with ASAN. See also: 583 | # https://stackoverflow.com/questions/64126942/malloc-nano-zone-abandoned-due-to-inability-to-preallocate-reserved-vm-space 584 | # https://github.com/python/buildmaster-config/issues/450 (and attached PR) 585 | 'MallocNanoZone': '0', 586 | } 587 | 588 | 589 | ############################################################################## 590 | ############################ WINDOWS BUILDS ################################ 591 | ############################################################################## 592 | 593 | 594 | class BaseWindowsBuild(BaseBuild): 595 | build_command = [r"Tools\buildbot\build.bat"] 596 | test_command = [r"Tools\buildbot\test.bat"] 597 | clean_command = [r"Tools\buildbot\clean.bat"] 598 | python_command = [r"python.bat"] 599 | buildFlags = ["-p", "Win32"] 600 | testFlags = ["-p", "Win32", "-j2"] 601 | cleanFlags = [] 602 | factory_tags = ["win32"] 603 | 604 | def setup(self, parallel, branch, **kwargs): 605 | build_command = self.build_command + self.buildFlags 606 | test_command = [*self.test_command, *self.testFlags] 607 | if not has_option("-R", self.testFlags): 608 | test_command.extend((r"--junit-xml", JUNIT_FILENAME)) 609 | clean_command = self.clean_command + self.cleanFlags 610 | if parallel: 611 | test_command.append(parallel) 612 | self.addStep(Compile(command=build_command)) 613 | self.addStep( 614 | ShellCommand( 615 | name="pythoninfo", 616 | description="pythoninfo", 617 | command=self.python_command + ["-m", "test.pythoninfo"], 618 | warnOnFailure=True, 619 | ) 620 | ) 621 | test_command.extend(("--timeout", str(self.test_timeout))) 622 | self.addStep(Test( 623 | command=test_command, 624 | timeout=step_timeout(self.test_timeout), 625 | )) 626 | if branch not in ("3",) and not has_option("-R", self.testFlags): 627 | self.addStep(UploadTestResults(branch)) 628 | self.addStep(Clean(command=clean_command)) 629 | 630 | 631 | class WindowsBuild(BaseWindowsBuild): 632 | buildersuffix = '.x32' 633 | 634 | 635 | class WindowsRefleakBuild(BaseWindowsBuild): 636 | buildersuffix = ".x32.refleak" 637 | testFlags = ["-j2", "-R", "3:3", "-u-cpu"] 638 | test_timeout = REFLEAK_TIMEOUT 639 | factory_tags = ["win32", "refleak"] 640 | 641 | 642 | class SlowWindowsBuild(WindowsBuild): 643 | test_timeout = TEST_TIMEOUT * 2 644 | testFlags = ["-j2", "-u-cpu", "-u-largefile"] 645 | 646 | 647 | class Windows64Build(BaseWindowsBuild): 648 | buildFlags = ["-p", "x64"] 649 | testFlags = ["-p", "x64", "-j2"] 650 | cleanFlags = ["-p", "x64"] 651 | factory_tags = ["win64"] 652 | 653 | 654 | class Windows64BigmemBuild(BaseWindowsBuild): 655 | buildersuffix = ".bigmem" 656 | buildFlags = ["-p", "x64"] 657 | testFlags = [ 658 | "-p", "x64", "-M33g", "-uall,extralargefile", 659 | "--prioritize=test_bigmem,test_lzma,test_bz2,test_array,test_hashlib,test_zlib" 660 | ] 661 | test_timeout = TEST_TIMEOUT * 4 662 | cleanFlags = ["-p", "x64"] 663 | factory_tags = ["win64", "bigmem"] 664 | 665 | 666 | class Windows64RefleakBuild(Windows64Build): 667 | buildersuffix = ".refleak" 668 | testFlags = ["-p", "x64", *WindowsRefleakBuild.testFlags] 669 | test_timeout = REFLEAK_TIMEOUT 670 | factory_tags = ["win64", "refleak"] 671 | 672 | 673 | class Windows64ReleaseBuild(Windows64Build): 674 | buildersuffix = ".nondebug" 675 | buildFlags = Windows64Build.buildFlags + ["-c", "Release"] 676 | testFlags = [*Windows64Build.testFlags, "+d"] 677 | # keep default cleanFlags, both configurations get cleaned 678 | factory_tags = ["win64", "nondebug"] 679 | 680 | 681 | class Windows64PGOBuild(Windows64ReleaseBuild): 682 | buildersuffix = ".pgo" 683 | buildFlags = Windows64Build.buildFlags + ["--pgo"] 684 | testFlags = [*Windows64Build.testFlags, "+d"] 685 | factory_tags = ["win64", "nondebug", "pgo"] 686 | 687 | 688 | class Windows64NoGilBuild(Windows64Build): 689 | buildersuffix = '.x64.nogil' 690 | buildFlags = Windows64Build.buildFlags + ["--disable-gil"] 691 | testFlags = Windows64Build.testFlags + ["--disable-gil"] 692 | factory_tags = ["win64", "nogil"] 693 | 694 | 695 | class Windows64PGONoGilBuild(Windows64PGOBuild): 696 | buildersuffix = '.nogil.pgo' 697 | buildFlags = Windows64PGOBuild.buildFlags + ["--disable-gil"] 698 | testFlags = Windows64PGOBuild.testFlags + ["--disable-gil"] 699 | factory_tags = ["win64", "nogil", "nondebug", "pgo"] 700 | 701 | 702 | class WindowsARM64Build(BaseWindowsBuild): 703 | buildFlags = ["-p", "ARM64"] 704 | testFlags = ["-p", "ARM64", "-j2"] 705 | cleanFlags = ["-p", "ARM64"] 706 | factory_tags = ["win-arm64"] 707 | 708 | 709 | class WindowsARM64ReleaseBuild(WindowsARM64Build): 710 | buildersuffix = ".nondebug" 711 | buildFlags = WindowsARM64Build.buildFlags + ["-c", "Release"] 712 | testFlags = [*WindowsARM64Build.testFlags, "+d"] 713 | # keep default cleanFlags, both configurations get cleaned 714 | factory_tags = ["win-arm64", "nondebug"] 715 | 716 | ############################################################################## 717 | ############################## WASI BUILDS ################################# 718 | ############################################################################## 719 | 720 | 721 | class UnixCrossBuild(UnixBuild): 722 | extra_configure_flags = [] 723 | host_configure_cmd = ["../../configure"] 724 | host = None 725 | host_make_cmd = ["make"] 726 | can_execute_python = True 727 | 728 | def setup(self, parallel, branch, test_with_PTY=False, **kwargs): 729 | assert self.host is not None, "Must set self.host on cross builds" 730 | 731 | out_of_tree_dir = "build_oot" 732 | oot_dir_path = os.path.join("build", out_of_tree_dir) 733 | oot_build_path = os.path.join(oot_dir_path, "build") 734 | oot_host_path = os.path.join(oot_dir_path, "host") 735 | 736 | self.addStep( 737 | SetPropertyFromCommand( 738 | name="Gather build triple from worker", 739 | description="Get the build triple config.guess", 740 | command="./config.guess", 741 | property="build_triple", 742 | warnOnFailure=True, 743 | ) 744 | ) 745 | 746 | # Create out of tree directory for "build", the platform we are 747 | # currently running on 748 | self.addStep( 749 | ShellCommand( 750 | name="mkdir build out-of-tree directory", 751 | description="Create build out-of-tree directory", 752 | command=["mkdir", "-p", oot_build_path], 753 | warnOnFailure=True, 754 | ) 755 | ) 756 | # Create directory for "host", the platform we want to compile *for* 757 | self.addStep( 758 | ShellCommand( 759 | name="mkdir host out-of-tree directory", 760 | description="Create host out-of-tree directory", 761 | command=["mkdir", "-p", oot_host_path], 762 | warnOnFailure=True, 763 | ) 764 | ) 765 | 766 | # First, we build the "build" Python, which we need to cross compile 767 | # the "host" Python 768 | self.addStep( 769 | Configure( 770 | name="Configure build Python", 771 | command=["../../configure"], 772 | workdir=oot_build_path 773 | ) 774 | ) 775 | if parallel: 776 | compile = ["make", parallel] 777 | else: 778 | compile = ["make"] 779 | 780 | self.addStep( 781 | Compile( 782 | name="Compile build Python", 783 | command=compile, 784 | workdir=oot_build_path 785 | ) 786 | ) 787 | 788 | # Now that we have a "build" architecture Python, we can use that 789 | # to build a "host" (also known as the target we are cross compiling) 790 | # Take a copy so that the class-level definition isn't tainted 791 | configure_cmd = list(self.host_configure_cmd) 792 | configure_cmd += ["--prefix", "$(PWD)/target/host"] 793 | configure_cmd += self.configureFlags + self.extra_configure_flags 794 | configure_cmd += [util.Interpolate("--build=%(prop:build_triple)s")] 795 | configure_cmd += [f"--host={self.host}"] 796 | configure_cmd += ["--with-build-python=../build/python"] 797 | self.addStep( 798 | Configure( 799 | name="Configure host Python", 800 | command=configure_cmd, 801 | env=self.compile_environ, 802 | workdir=oot_host_path 803 | ) 804 | ) 805 | 806 | testopts = list(self.testFlags) 807 | if not has_option("-R", self.testFlags): 808 | testopts.extend((" --junit-xml", JUNIT_FILENAME)) 809 | if parallel: 810 | testopts.append(parallel) 811 | if not has_option("-j", self.testFlags): 812 | testopts.append("-j2") 813 | 814 | test = [ 815 | "make", 816 | "buildbottest", 817 | "TESTOPTS=" + " ".join(testopts) + " ${BUILDBOT_TESTOPTS}", 818 | f"TESTPYTHONOPTS={self.interpreterFlags}", 819 | f"TESTTIMEOUT={self.test_timeout}", 820 | ] 821 | 822 | if parallel: 823 | compile = self.host_make_cmd + [parallel, self.makeTarget] 824 | else: 825 | compile = self.host_make_cmd + [self.makeTarget] 826 | self.addStep( 827 | Compile( 828 | name="Compile host Python", 829 | command=compile, 830 | env=self.compile_environ, 831 | workdir=oot_host_path, 832 | ) 833 | ) 834 | if self.can_execute_python: 835 | self.addStep( 836 | ShellCommand( 837 | name="pythoninfo", 838 | description="pythoninfo", 839 | command=["make", "pythoninfo"], 840 | warnOnFailure=True, 841 | env=self.test_environ, 842 | workdir=oot_host_path, 843 | ) 844 | ) 845 | self.addStep(Test( 846 | command=test, 847 | timeout=step_timeout(self.test_timeout), 848 | usePTY=test_with_PTY, 849 | env=self.test_environ, 850 | workdir=oot_host_path, 851 | )) 852 | if branch not in ("3",) and not has_option("-R", self.testFlags): 853 | filename = os.path.join(oot_host_path, JUNIT_FILENAME) 854 | self.addStep(UploadTestResults(branch, filename=filename)) 855 | self.addStep( 856 | Clean( 857 | name="Clean build Python", 858 | workdir=oot_build_path, 859 | ) 860 | ) 861 | self.addStep( 862 | Clean( 863 | name="Clean host Python", 864 | workdir=oot_host_path, 865 | ) 866 | ) 867 | 868 | 869 | # Deprecated since Python 3.13; can be dropped once `wasi.py` is in all versions. 870 | class Wasm32WasiCrossBuild(UnixCrossBuild): 871 | """wasm32-wasi builder 872 | 873 | * WASI SDK >= 16 must be installed to default path /opt/wasi-sdk 874 | * wasmtime must be installed and on PATH 875 | """ 876 | 877 | buildersuffix = ".wasi.nondebug" 878 | factory_tags = ["wasm", "wasi", "nondebug"] 879 | extra_configure_flags = [ 880 | # debug builds exhaust the limited call stack on WASI 881 | "--without-pydebug", 882 | ] 883 | host = "wasm32-unknown-wasi" 884 | host_configure_cmd = ["../../Tools/wasm/wasi-env", "../../configure"] 885 | 886 | def setup(self, parallel, branch, test_with_PTY=False, **kwargs): 887 | self.addStep( 888 | SetPropertyFromCommand( 889 | name="Find config.site-wasm32-wasi", 890 | description="Search Tools/wasm for config.site-wasm32-wasi", 891 | command="find Tools/wasm -name config.site-wasm32-wasi", 892 | property="config_site", 893 | warnOnFailure=True, 894 | ) 895 | ) 896 | self.compile_environ = super().compile_environ.copy() 897 | self.compile_environ.update( 898 | CONFIG_SITE=util.Interpolate("../../%(prop:config_site)s") 899 | ) 900 | self.addStep( 901 | ShellCommand( 902 | name="Touch srcdir Modules/Setup.local", 903 | description="Hack to work around wasmtime mapdir issue", 904 | command=["touch", "Modules/Setup.local"], 905 | haltOnFailure=True, 906 | ) 907 | ) 908 | super().setup(parallel, branch, test_with_PTY=test_with_PTY, **kwargs) 909 | 910 | 911 | class _Wasm32WasiPreview1Build(UnixBuild): 912 | """Build Python for wasm32-wasi using Tools/wasm/wasi.py.""" 913 | buildersuffix = ".wasi" 914 | factory_tags = ["wasm", "wasi"] 915 | # pydebug and append_suffix defined in subclasses. 916 | 917 | def __init__(self, source, *, extra_tags=[], **kwargs): 918 | if not self.pydebug: 919 | extra_tags.append("nondebug") 920 | self.buildersuffix += self.append_suffix 921 | super().__init__(source, extra_tags=extra_tags, **kwargs) 922 | 923 | def setup(self, parallel, branch, test_with_PTY=False, **kwargs): 924 | wasi_py = "Tools/wasm/wasi.py" 925 | host_triple = "wasm32-wasip1" 926 | host_path = f"build/cross-build/{host_triple}" 927 | 928 | # Build Python 929 | build_configure = ["python3", wasi_py, "configure-build-python"] 930 | if self.pydebug: 931 | build_configure.extend(["--", "--with-pydebug"]) 932 | self.addStep( 933 | Configure( 934 | name="Configure build Python", 935 | command=build_configure, 936 | ) 937 | ) 938 | self.addStep( 939 | Compile( 940 | name="Compile build Python", 941 | command=["python3", wasi_py, "make-build-python"], 942 | ) 943 | ) 944 | 945 | # Host/WASI Python 946 | self.addStep( 947 | # Pydebug build automatically inferred from build Python. 948 | Configure( 949 | name="Configure host Python", 950 | command=["python3", wasi_py, "configure-host"], 951 | ) 952 | ) 953 | self.addStep( 954 | Compile( 955 | name="Compile host Python", 956 | command=["python3", wasi_py, "make-host"], 957 | ) 958 | ) 959 | 960 | self.addStep( 961 | ShellCommand( 962 | name="pythoninfo", 963 | description="pythoninfo", 964 | command=["make", "pythoninfo"], 965 | warnOnFailure=True, 966 | workdir=host_path, 967 | ) 968 | ) 969 | 970 | # Copied from UnixBuild. 971 | testopts = list(self.testFlags) 972 | if not has_option("-R", self.testFlags): 973 | testopts.extend(("--junit-xml", JUNIT_FILENAME)) 974 | if parallel: 975 | testopts.append(parallel) 976 | if not has_option("-j", testopts): 977 | testopts.append("-j2") 978 | test = [ 979 | "make", 980 | "buildbottest", 981 | "TESTOPTS=" + " ".join(testopts) + " ${BUILDBOT_TESTOPTS}", 982 | f"TESTPYTHONOPTS={self.interpreterFlags}", 983 | f"TESTTIMEOUT={self.test_timeout}", 984 | ] 985 | self.addStep( 986 | Test( 987 | command=test, 988 | timeout=step_timeout(self.test_timeout), 989 | usePTY=test_with_PTY, 990 | env=self.test_environ, 991 | workdir=host_path, 992 | ) 993 | ) 994 | if branch not in ("3",) and not has_option("-R", self.testFlags): 995 | filename = os.path.join(host_path, JUNIT_FILENAME) 996 | self.addStep(UploadTestResults(branch, filename=filename)) 997 | 998 | self.addStep( 999 | Clean( 1000 | name="Clean the builds", 1001 | command=["python3", wasi_py, "clean"], 1002 | ) 1003 | ) 1004 | 1005 | 1006 | # Preventing this from running on versions older than 3.13 is managed in 1007 | # master.cfg. 1008 | class Wasm32WasiPreview1DebugBuild(_Wasm32WasiPreview1Build): 1009 | append_suffix = ".debug" 1010 | pydebug = True 1011 | testFlags = ["-u-cpu"] 1012 | 1013 | 1014 | ############################################################################## 1015 | ################################ IOS BUILDS ################################ 1016 | ############################################################################## 1017 | 1018 | class _IOSSimulatorBuild(UnixBuild): 1019 | """iOS Simulator build. 1020 | 1021 | * Xcode must be installed, with all licenses accepted 1022 | * The iOS Simulator must be installed 1023 | * The ~buildbot/support/iphonesimulator.{arch} folder must be populated 1024 | with pre-compiled builds of libFFI, XZ, Bzip2 and OpenSSL. 1025 | 1026 | Subclasses should define `arch`. 1027 | 1028 | This workflow is largely the same as the UnixCrossBuild, except that: 1029 | * It has the required iOS configure options baked in 1030 | * It isolates the build path so that Homebrew and other macOS libraries 1031 | can't leak into the build 1032 | * It adds the environment flags and configuration paths for the binary 1033 | dependencies. 1034 | * It installs the host python after build (which finalizes the Framework 1035 | build) 1036 | * It invokes `make testios` as a test target 1037 | """ 1038 | buildersuffix = ".iOS-simulator" 1039 | ios_min_version = "" # use the default from the configure file 1040 | factory_tags = ["iOS"] 1041 | extra_configure_flags = [] 1042 | host_configure_cmd = ["../../configure"] 1043 | 1044 | def __init__(self, source, **kwargs): 1045 | self.buildersuffix += f".{self.arch}" 1046 | self.host = f"{self.arch}-apple-ios{self.ios_min_version}-simulator" 1047 | 1048 | super().__init__(source, **kwargs) 1049 | 1050 | def setup(self, parallel, branch, test_with_PTY=False, **kwargs): 1051 | out_of_tree_dir = "build_oot" 1052 | oot_dir_path = os.path.join("build", out_of_tree_dir) 1053 | oot_build_path = os.path.join(oot_dir_path, "build") 1054 | oot_host_path = os.path.join(oot_dir_path, "host") 1055 | 1056 | # Create out of tree directory for "build", the platform we are 1057 | # currently running on 1058 | self.addStep( 1059 | ShellCommand( 1060 | name="mkdir build out-of-tree directory", 1061 | description="Create build out-of-tree directory", 1062 | command=["mkdir", "-p", oot_build_path], 1063 | warnOnFailure=True, 1064 | ) 1065 | ) 1066 | # Create directory for "host", the platform we want to compile *for* 1067 | self.addStep( 1068 | ShellCommand( 1069 | name="mkdir host out-of-tree directory", 1070 | description="Create host out-of-tree directory", 1071 | command=["mkdir", "-p", oot_host_path], 1072 | warnOnFailure=True, 1073 | ) 1074 | ) 1075 | 1076 | # First, we build the "build" Python, which we need to cross compile 1077 | # the "host" Python 1078 | self.addStep( 1079 | Configure( 1080 | name="Configure build Python", 1081 | command=["../../configure"], 1082 | workdir=oot_build_path 1083 | ) 1084 | ) 1085 | if parallel: 1086 | compile = ["make", parallel] 1087 | else: 1088 | compile = ["make"] 1089 | 1090 | self.addStep( 1091 | Compile( 1092 | name="Compile build Python", 1093 | command=compile, 1094 | workdir=oot_build_path 1095 | ) 1096 | ) 1097 | 1098 | # Ensure the host path is isolated from Homebrew et al, but includes 1099 | # the host helper binaries. Also add the configuration paths for 1100 | # library dependencies. 1101 | support_path = f"/Users/buildbot/support/iphonesimulator.{self.arch}" 1102 | compile_environ = dict(self.compile_environ) 1103 | compile_environ.update({ 1104 | "PATH": os.pathsep.join([ 1105 | # This is intentionally a relative path. Buildbot doesn't expose 1106 | # the absolute working directory where the build is running as 1107 | # something that can be expanded into an environment variable. 1108 | "../../iOS/Resources/bin", 1109 | "/usr/bin", 1110 | "/bin", 1111 | "/usr/sbin", 1112 | "/sbin", 1113 | "/Library/Apple/usr/bin", 1114 | ]), 1115 | "LIBLZMA_CFLAGS": f"-I{support_path}/xz/include", 1116 | "LIBLZMA_LIBS": f"-L{support_path}/xz/lib -llzma", 1117 | "BZIP2_CFLAGS": f"-I{support_path}/bzip2/include", 1118 | "BZIP2_LIBS": f"-L{support_path}/bzip2/lib -lbz2", 1119 | "LIBFFI_CFLAGS": f"-I{support_path}/libffi/include", 1120 | "LIBFFI_LIBS": f"-L{support_path}/libffi/lib -lffi", 1121 | }) 1122 | 1123 | # Now that we have a "build" architecture Python, we can use that 1124 | # to build a "host" (also known as the target we are cross compiling) 1125 | # Take a copy so that the class-level definition isn't tainted 1126 | configure_cmd = list(self.host_configure_cmd) 1127 | configure_cmd += self.configureFlags 1128 | configure_cmd += self.extra_configure_flags 1129 | configure_cmd += [ 1130 | f"--with-openssl={support_path}/openssl", 1131 | f"--build={self.arch}-apple-darwin", 1132 | f"--host={self.host}", 1133 | "--with-build-python=../build/python.exe", 1134 | "--enable-framework" 1135 | ] 1136 | 1137 | self.addStep( 1138 | Configure( 1139 | name="Configure host Python", 1140 | command=configure_cmd, 1141 | env=compile_environ, 1142 | workdir=oot_host_path 1143 | ) 1144 | ) 1145 | 1146 | if parallel: 1147 | compile = ["make", parallel, self.makeTarget] 1148 | install = ["make", parallel, "install"] 1149 | else: 1150 | compile = ["make", self.makeTarget] 1151 | install = ["make", "install"] 1152 | 1153 | self.addStep( 1154 | Compile( 1155 | name="Compile host Python", 1156 | command=compile, 1157 | env=compile_environ, 1158 | workdir=oot_host_path, 1159 | ) 1160 | ) 1161 | self.addStep( 1162 | Compile( 1163 | name="Install host Python", 1164 | command=install, 1165 | env=compile_environ, 1166 | workdir=oot_host_path, 1167 | ) 1168 | ) 1169 | self.addStep( 1170 | Test( 1171 | command=["make", "testios"], 1172 | timeout=step_timeout(self.test_timeout), 1173 | usePTY=test_with_PTY, 1174 | env=self.test_environ, 1175 | workdir=oot_host_path, 1176 | ) 1177 | ) 1178 | 1179 | self.addStep( 1180 | Clean( 1181 | name="Clean build Python", 1182 | workdir=oot_build_path, 1183 | ) 1184 | ) 1185 | self.addStep( 1186 | Clean( 1187 | name="Clean host Python", 1188 | workdir=oot_host_path, 1189 | ) 1190 | ) 1191 | 1192 | 1193 | class IOSARM64SimulatorBuild(_IOSSimulatorBuild): 1194 | """An ARM64 iOS simulator build.""" 1195 | arch = "arm64" 1196 | 1197 | 1198 | ############################################################################## 1199 | ############################## ANDROID BUILDS ############################## 1200 | ############################################################################## 1201 | 1202 | class AndroidBuild(BaseBuild): 1203 | """Build Python for Android on a Linux or Mac machine, and test it using a 1204 | Gradle-managed emulator. 1205 | 1206 | To set up a worker, see cpython/Android/README.md, especially the following 1207 | sections: 1208 | 1209 | * Install everything listed under "Prerequisites". 1210 | * Do any OS-specific setup mentioned under "Testing". 1211 | * If the managed emulator appears to be running out of memory, increase 1212 | its RAM size as described under "Testing". 1213 | """ 1214 | 1215 | def setup(self, **kwargs): 1216 | android_py = "Android/android.py" 1217 | self.addSteps([ 1218 | SetPropertyFromCommand( 1219 | name="Get build triple", 1220 | command=["./config.guess"], 1221 | property="build_triple", 1222 | haltOnFailure=True, 1223 | ), 1224 | Configure( 1225 | name="Configure build Python", 1226 | command=[android_py, "configure-build"], 1227 | ), 1228 | Compile( 1229 | name="Compile build Python", 1230 | command=[android_py, "make-build"], 1231 | ), 1232 | Configure( 1233 | name="Configure host Python", 1234 | command=[android_py, "configure-host", self.host_triple], 1235 | ), 1236 | Compile( 1237 | name="Compile host Python", 1238 | command=[android_py, "make-host", self.host_triple], 1239 | ), 1240 | Test( 1241 | command=[ 1242 | android_py, "test", "--managed", "maxVersion", "-v", "--", 1243 | "-uall", "--single-process", "--rerun", "-W", 1244 | ], 1245 | timeout=step_timeout(self.test_timeout), 1246 | ), 1247 | ShellCommand( 1248 | name="Clean", 1249 | command=[android_py, "clean"], 1250 | ), 1251 | ]) 1252 | 1253 | @util.renderer 1254 | def host_triple(props): 1255 | build_triple = props.getProperty("build_triple") 1256 | return build_triple.split("-")[0] + "-linux-android" 1257 | 1258 | 1259 | class ValgrindBuild(UnixBuild): 1260 | buildersuffix = ".valgrind" 1261 | configureFlags = [ 1262 | "--with-pydebug", 1263 | "--with-valgrind", 1264 | "--without-pymalloc", 1265 | ] 1266 | testFlags = [ 1267 | "test_grammar", 1268 | "test_syntax", 1269 | "test_tokenize", 1270 | "test_fstring", 1271 | "test_ast", 1272 | "test_exceptions", 1273 | ] 1274 | factory_tags = ["valgrind"] 1275 | test_timeout = TEST_TIMEOUT * 5 1276 | 1277 | def setup(self, parallel, branch, **kwargs): 1278 | self.addStep( 1279 | Configure( 1280 | command=["./configure", "--prefix", "$(PWD)/target"] + self.configureFlags 1281 | ) 1282 | ) 1283 | 1284 | compile = ["make", self.makeTarget] 1285 | if parallel: 1286 | compile = ["make", parallel, self.makeTarget] 1287 | 1288 | self.addStep(Compile(command=compile, env=self.compile_environ)) 1289 | 1290 | self.addStep( 1291 | ShellCommand( 1292 | name="pythoninfo", 1293 | description="pythoninfo", 1294 | command=["make", "pythoninfo"], 1295 | warnOnFailure=True, 1296 | env=self.test_environ, 1297 | ) 1298 | ) 1299 | 1300 | test = [ 1301 | "valgrind", 1302 | "--leak-check=full", 1303 | "--show-leak-kinds=definite", 1304 | "--error-exitcode=10", 1305 | "--gen-suppressions=all", 1306 | "--track-origins=yes", 1307 | "--trace-children=no", 1308 | util.Interpolate("--suppressions=%(prop:builddir)s/build/Misc/valgrind-python.supp"), 1309 | "./python", 1310 | "-m", "test", 1311 | *self.testFlags, 1312 | f"--timeout={self.test_timeout}", 1313 | ] 1314 | 1315 | self.addStep(Test( 1316 | command=test, 1317 | timeout=step_timeout(self.test_timeout), 1318 | env=self.test_environ, 1319 | )) 1320 | 1321 | self.addStep(Clean()) 1322 | -------------------------------------------------------------------------------- /master/custom/pr_reporter.py: -------------------------------------------------------------------------------- 1 | import re 2 | import logging 3 | 4 | from twisted.internet import defer 5 | from twisted.python import log 6 | 7 | from buildbot.process.properties import Properties 8 | from buildbot.process.results import ( 9 | CANCELLED, 10 | EXCEPTION, 11 | FAILURE, 12 | RETRY, 13 | SKIPPED, 14 | SUCCESS, 15 | WARNINGS, 16 | ) 17 | from buildbot.util.giturlparse import giturlparse 18 | from buildbot.plugins import reporters 19 | from buildbot.reporters.utils import getDetailsForBuild 20 | 21 | from custom.builders import get_builder_tier 22 | from custom.testsuite_utils import get_logs_and_tracebacks_from_build 23 | 24 | PR_MESSAGE = """\ 25 | :warning::warning::warning: Buildbot failure :warning::warning::warning: 26 | ------------------------------------------------------------------------ 27 | 28 | Hi! The buildbot **{buildername}** ({tier}) has failed when building commit {sha}. 29 | 30 | What do you need to do: 31 | 32 | 1. Don't panic. 33 | 2. Check [the buildbot page in the devguide](https://devguide.python.org/buildbots/) \ 34 | if you don't know what the buildbots are or how they work. 35 | 3. Go to the page of the buildbot that failed ({build_url}) \ 36 | and take a look at the build logs. 37 | 4. Check if the failure is related to this commit ({sha}) or \ 38 | if it is a false positive. 39 | 5. If the failure is related to this commit, please, reflect \ 40 | that on the issue and make a new Pull Request with a fix. 41 | 42 | You can take a look at the buildbot page here: 43 | 44 | {build_url} 45 | 46 | {failed_test_text} 47 | 48 | Summary of the results of the build (if available): 49 | 50 | {summary_text} 51 | 52 |
53 | Click to see traceback logs 54 | 55 | {tracebacks} 56 | 57 |
58 | """ 59 | 60 | 61 | class PrReporterError(Exception): 62 | pass 63 | 64 | 65 | class GitHubPullRequestReporter(reporters.GitHubStatusPush): 66 | name = "GitHubPullRequestReporter" 67 | 68 | @defer.inlineCallbacks 69 | def sendMessage(self, reports): 70 | build = reports[0]['builds'][0] 71 | props = Properties.fromDict(build["properties"]) 72 | props.master = self.master 73 | 74 | if build["complete"]: 75 | state = { 76 | SUCCESS: "success", 77 | WARNINGS: "success", 78 | FAILURE: "failure", 79 | SKIPPED: "success", 80 | EXCEPTION: "error", 81 | RETRY: "pending", 82 | CANCELLED: "error", 83 | }.get(build["results"], "error") 84 | else: 85 | return 86 | 87 | buildid = build.get('buildid', "???") 88 | log.msg("Considering reporting build :{}".format(buildid), logLevel=logging.INFO) 89 | 90 | if state != "failure": 91 | log.msg( 92 | f"Not reporting build {buildid};" 93 | f" state is {state!r} (from {build['results']!r})," 94 | f" not failure ({FAILURE!r})", 95 | logLevel=logging.INFO, 96 | ) 97 | return 98 | 99 | yield getDetailsForBuild(self.master, build, want_logs=True, want_logs_content=True, want_steps=True) 100 | 101 | logs, tracebacks = get_logs_and_tracebacks_from_build(build) 102 | 103 | context = yield props.render(self.context) 104 | 105 | sourcestamps = build["buildset"].get("sourcestamps") 106 | 107 | if not (sourcestamps and sourcestamps[0]): 108 | log.msg("Build {} not reported as it doesn't have source stamps".format(buildid), logLevel=logging.INFO) 109 | return 110 | 111 | changes = yield self.master.data.get(("builds", build["buildid"], "changes")) 112 | 113 | if len(changes) != 1: 114 | log.msg("Build {} not reported as it has more than one change".format(buildid), logLevel=logging.INFO) 115 | return 116 | 117 | change = changes[0] 118 | change_comments = change["comments"] 119 | 120 | if not change_comments: 121 | log.msg("Build {} not reported as no change comments could be found".format(buildid), logLevel=logging.INFO) 122 | return 123 | 124 | # GH-42, gh-42, or #42 125 | m = re.search(r"\((?:GH-|#)(\d+)\)", change_comments, flags=re.IGNORECASE) 126 | 127 | if m is None: 128 | log.msg("Build {} not reported as the issue could not be identified from the title".format( 129 | buildid), logLevel=logging.INFO) 130 | return 131 | 132 | issue = m.groups()[-1] 133 | 134 | project = sourcestamps[0]["project"] 135 | 136 | if "/" in project: 137 | repoOwner, repoName = project.split("/") 138 | else: 139 | giturl = giturlparse(sourcestamps[0]["repository"]) 140 | repoOwner = giturl.owner 141 | repoName = giturl.repo 142 | 143 | log.msg( 144 | "Updating github status: repoOwner={repoOwner}, repoName={repoName}".format( 145 | repoOwner=repoOwner, repoName=repoName 146 | ) 147 | ) 148 | 149 | log.msg("Attempting to issue a PR comment for failed build for build {}".format(buildid), logLevel=logging.INFO) 150 | response = None 151 | try: 152 | repo_user = repoOwner 153 | repo_name = repoName 154 | sha = change["revision"] 155 | target_url = build["url"] 156 | context = context 157 | response = yield self.createStatus( 158 | build=build, 159 | repo_user=repo_user, 160 | repo_name=repo_name, 161 | sha=sha, 162 | state=state, 163 | props=props, 164 | target_url=target_url, 165 | context=context, 166 | issue=issue, 167 | tracebacks=tracebacks, 168 | logs=logs, 169 | ) 170 | if not response or not self.is_status_2xx(response.code): 171 | raise PrReporterError() 172 | log.msg( 173 | "Issued a Pull Request comment for {repoOwner}/{repoName} " 174 | 'at {sha}, context "{context}", issue {issue}.'.format( 175 | repoOwner=repoOwner, 176 | repoName=repoName, 177 | sha=sha, 178 | issue=issue, 179 | context=context, 180 | ) 181 | ) 182 | except Exception as e: 183 | if response: 184 | content = yield response.content() 185 | code = response.code 186 | else: 187 | content = code = "n/a" 188 | log.err( 189 | e, 190 | ( 191 | f'Failed to issue a Pull Request comment for {repoOwner}/{repoName} ' 192 | f'at {sha}, context "{context}", issue {issue}. ' 193 | f'http {code}, {content}' 194 | ), 195 | ) 196 | 197 | def _getURLForBuild(self, builderid, build_number): 198 | prefix = self.master.config.buildbotURL.rstrip('/') 199 | return f"{prefix}/#/builders/{builderid}/builds/{build_number}" 200 | 201 | @defer.inlineCallbacks 202 | def createStatus( 203 | self, 204 | build, 205 | repo_user, 206 | repo_name, 207 | sha, 208 | state, 209 | props, 210 | target_url=None, 211 | context=None, 212 | issue=None, 213 | tracebacks=None, 214 | logs=None, 215 | ): 216 | buildername = build["builder"]["name"] 217 | 218 | message = PR_MESSAGE.format( 219 | buildername=buildername, 220 | tier=get_builder_tier(buildername), 221 | build_url=self._getURLForBuild( 222 | build["builder"]["builderid"], build["number"] 223 | ), 224 | sha=sha, 225 | tracebacks="```python-traceback\n{}\n```".format("\n\n".join(tracebacks)), 226 | summary_text=logs.test_summary(), 227 | failed_test_text=logs.format_failing_tests(), 228 | ) 229 | 230 | payload = {"body": message} 231 | headers = yield self._get_auth_header(props) 232 | 233 | return self._http.post( 234 | "/".join(["/repos", repo_user, repo_name, "issues", issue, "comments"]), 235 | json=payload, 236 | headers=headers, 237 | ) 238 | -------------------------------------------------------------------------------- /master/custom/pr_testing.py: -------------------------------------------------------------------------------- 1 | # Functions to enable buildbot to test pull requests and report back 2 | import re 3 | import logging 4 | 5 | from dateutil.parser import parse as dateparse 6 | 7 | from twisted.internet import defer 8 | from twisted.python import log 9 | 10 | from buildbot.util import httpclientservice 11 | from buildbot.www.hooks.github import GitHubEventHandler 12 | 13 | TESTING_LABEL = ":hammer: test-with-buildbots" 14 | REFLEAK_TESTING_LABEL = ":hammer: test-with-refleak-buildbots" 15 | 16 | GITHUB_PROPERTIES_WHITELIST = ["*.labels"] 17 | 18 | BUILD_MESSAGE_HEADER = """\ 19 | :robot: New build scheduled with the buildbot fleet by @{user} for commit {commit} :robot: 20 | 21 | Results will be shown at: 22 | 23 | https://buildbot.python.org/all/#/grid?branch=refs%2Fpull%2F{pr_number}%2Fmerge 24 | 25 | """ 26 | 27 | BUILD_SCHEDULED_MESSAGE_TEMPLATE = BUILD_MESSAGE_HEADER + """\ 28 | If you want to schedule another build, you need to add the {label} label again. 29 | """ 30 | 31 | BUILD_COMMAND_SCHEDULED_MESSAGE_TEMPLATE = BUILD_MESSAGE_HEADER + """\ 32 | The command will test the builders whose names match following regular expression: `{filter}` 33 | 34 | The builders matched are: 35 | {builders} 36 | """ 37 | 38 | BUILDBOT_COMMAND = re.compile(r"!buildbot (.+)") 39 | 40 | 41 | def should_pr_be_tested(change): 42 | return change.properties.getProperty("should_test_pr", False) 43 | 44 | 45 | class CustomGitHubEventHandler(GitHubEventHandler): 46 | def __init__(self, *args, builder_names, **kwargs): 47 | super().__init__(*args, **kwargs) 48 | self.builder_names = builder_names 49 | 50 | @defer.inlineCallbacks 51 | def _post_comment(self, comments_url, comment): 52 | headers = {"User-Agent": "Buildbot"} 53 | if self._token: 54 | headers["Authorization"] = "token " + self._token 55 | 56 | http = yield httpclientservice.HTTPSession( 57 | self.master.httpservice, 58 | self.github_api_endpoint, 59 | headers=headers, 60 | debug=self.debug, 61 | verify=self.verify, 62 | ) 63 | 64 | yield http.post( 65 | comments_url.replace(self.github_api_endpoint, ""), 66 | json={"body": comment}, 67 | ) 68 | 69 | @defer.inlineCallbacks 70 | def _remove_label_and_comment(self, payload, label): 71 | headers = {"User-Agent": "Buildbot"} 72 | if self._token: 73 | headers["Authorization"] = "token " + self._token 74 | 75 | http = yield httpclientservice.HTTPSession( 76 | self.master.httpservice, 77 | self.github_api_endpoint, 78 | headers=headers, 79 | debug=self.debug, 80 | verify=self.verify, 81 | ) 82 | 83 | # Create the comment 84 | url = payload["pull_request"]["comments_url"] 85 | username = payload["sender"]["login"] 86 | commit = payload["pull_request"]["head"]["sha"] 87 | pr_number = payload["pull_request"]["number"] 88 | yield http.post( 89 | url.replace(self.github_api_endpoint, ""), 90 | json={ 91 | "body": BUILD_SCHEDULED_MESSAGE_TEMPLATE.format( 92 | user=username, 93 | commit=commit, 94 | label=label, 95 | pr_number=pr_number, 96 | ) 97 | }, 98 | ) 99 | 100 | # Remove the label 101 | url = payload["pull_request"]["issue_url"] + f"/labels/{label}" 102 | yield http.delete(url.replace(self.github_api_endpoint, "")) 103 | 104 | @defer.inlineCallbacks 105 | def _get_pull_request(self, url): 106 | headers = {"User-Agent": "Buildbot"} 107 | if self._token: 108 | headers["Authorization"] = "token " + self._token 109 | 110 | http = yield httpclientservice.HTTPSession( 111 | self.master.httpservice, 112 | self.github_api_endpoint, 113 | headers=headers, 114 | debug=self.debug, 115 | verify=self.verify, 116 | ) 117 | res = yield http.get(url) 118 | if 200 <= res.code < 300: 119 | data = yield res.json() 120 | return data 121 | 122 | log.msg(f"Failed fetching PR from {url}: response code {res.code}") 123 | return None 124 | 125 | @defer.inlineCallbacks 126 | def _user_has_write_permissions(self, payload, user): 127 | """Check if *user* has write permissions""" 128 | 129 | repo = payload["repository"]["full_name"] 130 | url = f"https://api.github.com/repos/{repo}/collaborators/{user}/permission" 131 | headers = {"User-Agent": "Buildbot"} 132 | if self._token: 133 | headers["Authorization"] = "token " + self._token 134 | http = yield httpclientservice.HTTPSession( 135 | self.master.httpservice, 136 | self.github_api_endpoint, 137 | headers=headers, 138 | debug=self.debug, 139 | verify=self.verify, 140 | ) 141 | res = yield http.get(url) 142 | if 200 <= res.code < 300: 143 | data = yield res.json() 144 | log.msg(f"User {user} has permission {data['permission']} on {repo}") 145 | return data["permission"] in {"admin", "write"} 146 | 147 | log.msg( 148 | f"Failed fetching user permissions from {url}: response code {res.code}" 149 | ) 150 | return False 151 | 152 | def _get_changes_from_pull_request( 153 | self, changes, pr_number, payload, pull_request, event, builder_filter 154 | ): 155 | refname = "refs/pull/{}/{}".format(pr_number, self.pullrequest_ref) 156 | basename = pull_request["base"]["ref"] 157 | commits = pull_request["commits"] 158 | title = pull_request["title"] 159 | comments = pull_request["body"] 160 | properties = self.extractProperties(pull_request) 161 | properties.update({"should_test_pr": True}) 162 | properties.update({"event": event}) 163 | properties.update({"basename": basename}) 164 | properties.update({"builderfilter": builder_filter}) 165 | change = { 166 | "revision": pull_request["head"]["sha"], 167 | "when_timestamp": dateparse(pull_request["created_at"]), 168 | "branch": refname, 169 | "revlink": pull_request["_links"]["html"]["href"], 170 | "repository": payload["repository"]["html_url"], 171 | "project": pull_request["base"]["repo"]["full_name"], 172 | "category": "pull", 173 | "author": payload["sender"]["login"], 174 | "comments": "GitHub Pull Request #{0} ({1} commit{2})\n{3}\n{4}".format( 175 | pr_number, commits, "s" if commits != 1 else "", title, comments 176 | ), 177 | "properties": properties, 178 | } 179 | 180 | if callable(self._codebase): 181 | change["codebase"] = self._codebase(payload) 182 | elif self._codebase is not None: 183 | change["codebase"] = self._codebase 184 | 185 | changes.append(change) 186 | 187 | log.msg( 188 | "Received {} changes from GitHub PR #{}".format(len(changes), pr_number) 189 | ) 190 | return (changes, "git") 191 | 192 | @defer.inlineCallbacks 193 | def handle_issue_comment(self, payload, event): 194 | changes = [] 195 | number = payload["issue"]["number"] 196 | action = payload.get("action") 197 | 198 | # We only care about new comments 199 | if action != "created": 200 | log.msg( 201 | "GitHub PR #{} comment action is not 'created', ignoring".format(number) 202 | ) 203 | return (changes, "git") 204 | 205 | # We only care about comments on PRs, not issues. 206 | if "pull_request" not in payload["issue"]: 207 | log.msg("GitHub PR #{} is not a pull request, ignoring".format(number)) 208 | return (changes, "git") 209 | 210 | comment = payload["comment"]["body"].strip() 211 | 212 | match = BUILDBOT_COMMAND.match(comment) 213 | # If the comment is not a buildbot command, ignore it 214 | if not match: 215 | log.msg( 216 | "GitHub PR #{} comment is not a buildbot command, ignoring".format( 217 | number 218 | ) 219 | ) 220 | return (changes, "git") 221 | 222 | builder_filter = match.group(1).strip() 223 | 224 | # If the command is empty, ignore it 225 | if not builder_filter: 226 | log.msg("GitHub PR #{} command is empty, ignoring".format(number)) 227 | return (changes, "git") 228 | 229 | # If the command is not from a user with write permissions, ignore it 230 | if not ( 231 | yield self._user_has_write_permissions(payload, payload["sender"]["login"]) 232 | ): 233 | log.msg( 234 | "GitHub PR #{} user {} has no write permissions, ignoring".format( 235 | number, payload["sender"]["login"] 236 | ) 237 | ) 238 | yield self._post_comment( 239 | payload["issue"]["comments_url"], 240 | "You don't have write permissions to trigger a build", 241 | ) 242 | return (changes, "git") 243 | 244 | pull_url = payload["issue"]["pull_request"]["url"] 245 | 246 | # We need to fetch the PR data from GitHub because the payload doesn't 247 | # contain a lot of information we need. 248 | pull_request = yield self._get_pull_request(pull_url) 249 | if pull_request is None: 250 | log.msg("Failed to fetch PR #{} from {}".format(number, pull_url)) 251 | return (changes, "git") 252 | 253 | repo_full_name = payload["repository"]["full_name"] 254 | head_sha = pull_request["head"]["sha"] 255 | 256 | log.msg("Processing GitHub PR #{}".format(number), logLevel=logging.DEBUG) 257 | 258 | head_msg = yield self._get_commit_msg(repo_full_name, head_sha) 259 | if self._has_skip(head_msg): 260 | log.msg( 261 | "GitHub PR #{}, Ignoring: " 262 | "head commit message contains skip pattern".format(number) 263 | ) 264 | return ([], "git") 265 | 266 | # This code is related to GitHubPrScheduler 267 | builder_filter_fn = re.compile(builder_filter, re.IGNORECASE) 268 | matched_builders = [ 269 | builder_name 270 | for builder_name in self.builder_names 271 | if builder_filter_fn.search(builder_name) 272 | ] 273 | if not matched_builders: 274 | log.msg(f"GitHub PR #{number}: regex {builder_filter!r} " 275 | f"did not match any builder", logLevel=logging.DEBUG) 276 | yield self._post_comment( 277 | payload["issue"]["comments_url"], 278 | f"The regex {builder_filter!r} did not match any buildbot builder. " 279 | f"Is the requested builder in the list of stable builders?", 280 | ) 281 | return (changes, "git") 282 | 283 | yield self._post_comment( 284 | payload["issue"]["comments_url"], 285 | BUILD_COMMAND_SCHEDULED_MESSAGE_TEMPLATE.format( 286 | user=payload["sender"]["login"], 287 | commit=head_sha, 288 | filter=builder_filter, 289 | pr_number=number, 290 | builders="\n".join( 291 | { 292 | f"- `{builder}`" 293 | for builder in matched_builders 294 | } 295 | ), 296 | ), 297 | ) 298 | 299 | return self._get_changes_from_pull_request( 300 | changes, number, payload, pull_request, event, builder_filter 301 | ) 302 | 303 | @defer.inlineCallbacks 304 | def handle_pull_request(self, payload, event): 305 | 306 | changes = [] 307 | number = payload["number"] 308 | action = payload.get("action") 309 | 310 | if action != "labeled": 311 | log.msg("GitHub PR #{} {}, ignoring".format(number, action)) 312 | return (changes, "git") 313 | 314 | label = payload.get("label")["name"] 315 | if label not in {TESTING_LABEL, REFLEAK_TESTING_LABEL}: 316 | log.msg("Invalid label in PR #{}, ignoring".format(number)) 317 | return (changes, "git") 318 | 319 | repo_full_name = payload["repository"]["full_name"] 320 | head_sha = payload["pull_request"]["head"]["sha"] 321 | 322 | log.msg("Processing GitHub PR #{}".format(number), logLevel=logging.DEBUG) 323 | 324 | head_msg = yield self._get_commit_msg(repo_full_name, head_sha) 325 | if self._has_skip(head_msg): 326 | log.msg( 327 | "GitHub PR #{}, Ignoring: " 328 | "head commit message contains skip pattern".format(number) 329 | ) 330 | return ([], "git") 331 | 332 | yield self._remove_label_and_comment(payload, label) 333 | 334 | builder_filter = "" 335 | if label == TESTING_LABEL: 336 | builder_filter = ".*" 337 | elif label == REFLEAK_TESTING_LABEL: 338 | builder_filter = ".*Refleaks.*" 339 | 340 | return self._get_changes_from_pull_request( 341 | changes, number, payload, payload["pull_request"], event, builder_filter 342 | ) 343 | -------------------------------------------------------------------------------- /master/custom/release_dashboard.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import os 3 | import time 4 | from functools import cached_property, total_ordering 5 | import enum 6 | from dataclasses import dataclass 7 | import itertools 8 | import urllib.request 9 | import urllib.error 10 | import json 11 | from pathlib import Path 12 | from xml.etree import ElementTree 13 | 14 | from flask import Flask 15 | from flask import render_template, request 16 | import jinja2 17 | import humanize 18 | 19 | from buildbot.data.resultspec import Filter 20 | import buildbot.process.results 21 | 22 | N_BUILDS = 200 23 | MAX_CHANGES = 50 24 | 25 | 26 | # Cache result for 6 minutes. Generating the page is slow and a Python build 27 | # takes at least 5 minutes, a common build takes 10 to 30 minutes. There is a 28 | # cronjob that forces a refresh every 5 minutes, so all human requests should 29 | # get a cache hit. 30 | CACHE_DURATION = 6 * 60 31 | 32 | BRANCHES_URL = "https://raw.githubusercontent.com/python/devguide/main/include/release-cycle.json" 33 | 34 | 35 | def _gimme_error(func): 36 | """Debug decorator to turn AttributeError into a different Exception 37 | 38 | jinja2 tends to swallow AttributeError or report it in some place it 39 | didn't happen. When that's a problem, use this decorator to get 40 | a usable traceback. 41 | """ 42 | def decorated(*args, **kwargs): 43 | try: 44 | return func(*args, **kwargs) 45 | except AttributeError as e: 46 | raise _WrappedAttributeError(f'your error: {e!r}') 47 | return decorated 48 | 49 | class _WrappedAttributeError(Exception): pass 50 | 51 | 52 | class DashboardObject: 53 | """Base wrapper for a dashboard object. 54 | 55 | Acts as a dict with the info we get (usually) from JSON API. 56 | 57 | All computed information should be cached using @cached_property. 58 | For a fresh view, discard all these objects and build them again. 59 | (Computing info on demand means the "for & if" logic in the template 60 | doesn't need to be duplicated in Python code.) 61 | 62 | Objects are arranged in a tree: every one (except the root) has a parent. 63 | (Cross-tree references must go through the root.) 64 | 65 | N.B.: In Jinja, mapping keys and attributes are interchangeable. 66 | Shadow the `info` dict wisely. 67 | """ 68 | def __init__(self, parent, info): 69 | self._parent = parent 70 | self._root = parent._root 71 | self._info = info 72 | 73 | def __getitem__(self, key): 74 | return self._info[key] 75 | 76 | def dataGet(self, *args, **kwargs): 77 | """Call Buildbot API""" 78 | # Buildbot sets `buildbot_api` as an attribute on the WSGI app, 79 | # a bit later than we'd like. Get to it dynamically. 80 | return self._root._app.flask_app.buildbot_api.dataGet(*args, **kwargs) 81 | 82 | def __repr__(self): 83 | return f'<{type(self).__name__} at {id(self)}: {self._info}>' 84 | 85 | 86 | class DashboardState(DashboardObject): 87 | """The root of our abstraction, a bit special. 88 | """ 89 | def __init__(self, app): 90 | self._root = self 91 | self._app = app 92 | super().__init__(self, {}) 93 | self._tiers = {} 94 | 95 | @cached_property 96 | def builders(self): 97 | active_builderids = set() 98 | for worker in self.workers: 99 | for cnf in worker["configured_on"]: 100 | active_builderids.add(cnf["builderid"]) 101 | return [ 102 | Builder(self, info) 103 | for info in self.dataGet("/builders") 104 | if info["builderid"] in active_builderids 105 | ] 106 | 107 | @cached_property 108 | def workers(self): 109 | return [Worker(self, info) for info in self.dataGet("/workers")] 110 | 111 | @cached_property 112 | def branches(self): 113 | branches = [] 114 | for version, info in self._app.branch_info.items(): 115 | if info['status'] == 'end-of-life': 116 | continue 117 | if info['branch'] == 'main': 118 | tag = '3.x' 119 | else: 120 | tag = version 121 | branches.append(Branch(self, { 122 | **info, 'version': version, 'tag': tag 123 | })) 124 | branches.append(self._no_branch) 125 | return branches 126 | 127 | @cached_property 128 | def _no_branch(self): 129 | return Branch(self, {'tag': 'no-branch'}) 130 | 131 | @cached_property 132 | def tiers(self): 133 | tiers = [Tier(self, {'tag': f'tier-{n}'}) for n in range(1, 4)] 134 | tiers.append(self._no_tier) 135 | return tiers 136 | 137 | @cached_property 138 | def _no_tier(self): 139 | # Hack: 'tierless' sorts after 'tier-#' alphabetically, 140 | # so we don't need to use numeric priority to sort failures by tier 141 | return Tier(self, {'tag': 'tierless'}) 142 | 143 | @cached_property 144 | def now(self): 145 | return datetime.datetime.now(tz=datetime.timezone.utc) 146 | 147 | 148 | def cached_sorted_property(func=None, /, **sort_kwargs): 149 | """Like cached_property, but calls sorted() on the value 150 | 151 | This is sometimes used just to turn a generator into a list, as the 152 | Jinja template generally likes to know if sequences are empty. 153 | """ 154 | def decorator(func): 155 | def wrapper(*args, **kwargs): 156 | return sorted(func(*args, **kwargs), **sort_kwargs) 157 | return cached_property(wrapper) 158 | if func: 159 | return decorator(func) 160 | return decorator 161 | 162 | 163 | @total_ordering 164 | class Builder(DashboardObject): 165 | @cached_property 166 | def builds(self): 167 | endpoint = ("builders", self["builderid"], "builds") 168 | infos = self.dataGet( 169 | endpoint, 170 | limit=N_BUILDS, 171 | order=["-complete_at"], 172 | filters=[Filter("complete", "eq", ["True"])], 173 | ) 174 | builds = [] 175 | for info in infos: 176 | builds.append(Build(self, info)) 177 | return [Build(self, info) for info in infos] 178 | 179 | @cached_property 180 | def tags(self): 181 | return frozenset(self["tags"]) 182 | 183 | @cached_property 184 | def branch(self): 185 | for branch in self._parent.branches: 186 | if branch.tag in self.tags: 187 | return branch 188 | return self._parent._no_branch 189 | 190 | @cached_property 191 | def tier(self): 192 | for tier in self._parent.tiers: 193 | if tier.tag in self.tags: 194 | return tier 195 | return self._parent._no_tier 196 | 197 | @cached_property 198 | def is_stable(self): 199 | return 'stable' in self.tags 200 | 201 | @cached_property 202 | def is_release_blocking(self): 203 | return self.tier.value in (1, 2) 204 | 205 | def __lt__(self, other): 206 | return self["name"] < other["name"] 207 | 208 | def iter_interesting_builds(self): 209 | """Yield builds except unfinished/skipped/interrupted ones""" 210 | for build in self.builds: 211 | if build["results"] in ( 212 | buildbot.process.results.SUCCESS, 213 | buildbot.process.results.WARNINGS, 214 | buildbot.process.results.FAILURE, 215 | ): 216 | yield build 217 | 218 | @cached_sorted_property() 219 | def problems(self): 220 | latest_build = None 221 | for build in self.iter_interesting_builds(): 222 | latest_build = build 223 | break 224 | 225 | if not latest_build: 226 | yield NoBuilds(self) 227 | return 228 | elif latest_build["results"] == buildbot.process.results.WARNINGS: 229 | yield BuildWarning(latest_build) 230 | elif latest_build["results"] == buildbot.process.results.FAILURE: 231 | failing_streak = 0 232 | first_failing_build = None 233 | for build in self.iter_interesting_builds(): 234 | if build["results"] == buildbot.process.results.FAILURE: 235 | first_failing_build = build 236 | continue 237 | elif build["results"] == buildbot.process.results.SUCCESS: 238 | if latest_build != first_failing_build: 239 | yield BuildFailure(latest_build, first_failing_build) 240 | break 241 | else: 242 | yield BuildFailure(latest_build) 243 | 244 | if not self.connected_workers: 245 | yield BuilderDisconnected(self) 246 | 247 | @cached_sorted_property 248 | def connected_workers(self): 249 | for worker in self._root.workers: 250 | if worker["connected_to"]: 251 | for cnf in worker["configured_on"]: 252 | if cnf["builderid"] == self["builderid"]: 253 | yield worker 254 | 255 | class Worker(DashboardObject): 256 | pass # The JSON is fine! :) 257 | 258 | @total_ordering 259 | class _BranchTierBase(DashboardObject): 260 | """Base class for Branch and Tag""" 261 | # Branches have several kinds of names: 262 | # 'tag': '3.x' (used as key) 263 | # 'version': '3.14' 264 | # 'branch': 'main' 265 | # To prevent confusion, there's no 'name' 266 | 267 | @cached_property 268 | def tag(self): 269 | return self["tag"] 270 | 271 | def __hash__(self): 272 | return hash(self.tag) 273 | 274 | def __eq__(self, other): 275 | if isinstance(other, str): 276 | return self.tag == other 277 | return self.sort_key == other.sort_key 278 | 279 | def __lt__(self, other): 280 | return self.sort_key < other.sort_key 281 | 282 | def __str__(self): 283 | return self.tag 284 | 285 | @total_ordering 286 | class Branch(_BranchTierBase): 287 | @cached_property 288 | def sort_key(self): 289 | if self.tag.startswith("3."): 290 | try: 291 | return (1, int(self.tag[2:])) 292 | except ValueError: 293 | return (2, 99) 294 | return (0, 0) 295 | 296 | @cached_property 297 | def title(self): 298 | if self.tag == '3.x': 299 | return 'main' 300 | return self.tag 301 | 302 | @cached_sorted_property() 303 | def problems(self): 304 | problems = [] 305 | for builder in self._root.builders: 306 | if builder.branch == self: 307 | problems.extend(builder.problems) 308 | return problems 309 | 310 | @cached_property 311 | def featured_problem(self): 312 | try: 313 | return self.problems[0] 314 | except IndexError: 315 | return NoProblem() 316 | 317 | def get_grouped_problems(self): 318 | def key(problem): 319 | return problem.description 320 | for d, problems in itertools.groupby(self.problems, key): 321 | yield d, list(problems) 322 | 323 | 324 | class Tier(_BranchTierBase): 325 | @cached_property 326 | def value(self): 327 | if self.tag.startswith("tier-"): 328 | try: 329 | return int(self.tag[5:]) 330 | except ValueError: 331 | pass 332 | return 99 333 | 334 | @cached_property 335 | def title(self): 336 | return self.tag.title() 337 | 338 | @cached_property 339 | def sort_key(self): 340 | return self.value 341 | 342 | @cached_property 343 | def is_release_blocking(self): 344 | return self.value in {1, 2} 345 | 346 | 347 | class Build(DashboardObject): 348 | @cached_property 349 | def builder(self): 350 | assert self._parent["builderid"] == self["builderid"] 351 | return self._parent 352 | 353 | @cached_property 354 | def changes(self): 355 | infos = self.dataGet( 356 | ("builds", self["buildid"], "changes"), 357 | limit=MAX_CHANGES, 358 | ) 359 | if len(infos) == MAX_CHANGES: 360 | # Buildbot lists changes since the last *successful* build, 361 | # so in a failing streak the list can get very big. 362 | # When this happens, it's probably better to pretend we don't have 363 | # any info (which we'll also get when information is 364 | # scrubbed after some months) 365 | return [] 366 | return [Change(self, info) for info in infos] 367 | 368 | @cached_property 369 | def started_at(self): 370 | started_at = self["started_at"] 371 | if isinstance(started_at, datetime.datetime): 372 | return started_at 373 | if started_at: 374 | return datetime.datetime.fromtimestamp(started_at, 375 | tz=datetime.timezone.utc) 376 | 377 | @cached_property 378 | def age(self): 379 | if self["started_at"]: 380 | return self._root.now - self.started_at 381 | 382 | @cached_property 383 | def results_symbol(self): 384 | if self["results"] == buildbot.process.results.FAILURE: 385 | return '\N{HEAVY BALLOT X}' 386 | if self["results"] == buildbot.process.results.WARNINGS: 387 | return '\N{WARNING SIGN}' 388 | if self["results"] == buildbot.process.results.SUCCESS: 389 | return '\N{HEAVY CHECK MARK}' 390 | if self["results"] == buildbot.process.results.SKIPPED: 391 | return '\N{CIRCLED MINUS}' 392 | if self["results"] == buildbot.process.results.EXCEPTION: 393 | return '\N{CIRCLED DIVISION SLASH}' 394 | if self["results"] == buildbot.process.results.RETRY: 395 | return '\N{ANTICLOCKWISE OPEN CIRCLE ARROW}' 396 | if self["results"] == buildbot.process.results.CANCELLED: 397 | return '\N{CIRCLED TIMES}' 398 | return str(self["results"]) 399 | 400 | @cached_property 401 | def results_string(self): 402 | return buildbot.process.results.statusToString(self["results"]) 403 | 404 | @cached_property 405 | def css_color_class(self): 406 | if self["results"] == buildbot.process.results.SUCCESS: 407 | return 'success' 408 | if self["results"] == buildbot.process.results.WARNINGS: 409 | return 'warning' 410 | if self["results"] == buildbot.process.results.FAILURE: 411 | return 'danger' 412 | return 'unknown' 413 | 414 | @cached_property 415 | def junit_results(self): 416 | if not self._root._app.test_result_dir: 417 | return None 418 | 419 | try: 420 | filepath = ( 421 | self._root._app.test_result_dir 422 | / self.builder.branch.tag 423 | / self.builder["name"] 424 | / f'build_{self["number"]}.xml' 425 | ).resolve() 426 | 427 | # Ensure path doesn't escape test_result_dir 428 | if not filepath.is_relative_to(self._root._app.test_result_dir): 429 | return None 430 | 431 | if not filepath.is_file(): 432 | return None 433 | 434 | with filepath.open() as file: 435 | etree = ElementTree.parse(file) 436 | 437 | # We don't have a logger set up, this returns None on common failures 438 | # (meaning failures won't show on the dashboard). 439 | # TODO: set up monitoring and log failures (in the whole method). 440 | except OSError as e: 441 | return None 442 | except ElementTree.ParseError as e: 443 | return None 444 | 445 | result = JunitResult(self, {}) 446 | for element in etree.iterfind('.//error/..'): 447 | result.add(element) 448 | return result 449 | 450 | @cached_property 451 | def duration(self): 452 | try: 453 | seconds = ( 454 | self["complete_at"] 455 | - self["started_at"] 456 | - self["locks_duration_s"] 457 | ) 458 | except (KeyError, TypeError): 459 | return None 460 | return datetime.timedelta(seconds=seconds) 461 | 462 | 463 | class JunitResult(DashboardObject): 464 | def __init__(self, *args): 465 | super().__init__(*args) 466 | self.contents = {} 467 | self.errors = [] 468 | self.error_types = set() 469 | 470 | def add(self, element): 471 | """Add errors from a XML element. 472 | 473 | JunitResult are arranged in a tree, grouped by test modules, classes 474 | and methods (i.e. dot-separated parts of the test name). 475 | 476 | JunitError instances are added to the lowest level of the tree. 477 | They're deduplicated, because we re-run failing tests and often 478 | get two copies of the same error (with the same traceback). 479 | 480 | Exception type names are added to *all* levels of the tree: 481 | if the details of a test module/class/methods aren't expanded, 482 | the dashboard shows exception types from all the hidden failures. 483 | """ 484 | # Gather all the errors (as dicts), and their exception types 485 | # (as strings), from *element*. 486 | # Usually there's only one error per element. 487 | errors = [] 488 | error_types = set() 489 | for error_elem in element.iterfind('error'): 490 | new_error = JunitError(self, { 491 | **error_elem.attrib, 492 | 'text': error_elem.text, 493 | }) 494 | errors.append(new_error) 495 | error_types.add(new_error["type"]) 496 | 497 | # Find/add the leaf JunitResult, updating result.error_types for each 498 | # Result along the way 499 | result = self 500 | name_parts = element.attrib.get('name', '??').split('.') 501 | if name_parts[0] == 'test': 502 | name_parts.pop(0) 503 | for part in name_parts: 504 | result.error_types.update(error_types) 505 | result = result.contents.setdefault(part, JunitResult(self, {})) 506 | 507 | # Add error details to the leaf 508 | result.error_types.update(error_types) 509 | for error in errors: 510 | if error not in result.errors: 511 | # De-duplicate, since failing tests are re-run and often fail 512 | # the same way 513 | result.errors.extend(errors) 514 | 515 | 516 | class JunitError(DashboardObject): 517 | def __eq__(self, other): 518 | return self._info == other._info 519 | 520 | 521 | class Change(DashboardObject): 522 | pass 523 | 524 | 525 | class Severity(enum.IntEnum): 526 | # "Headings" and concrete values are all sortable enum items 527 | 528 | NO_PROBLEM = enum.auto() 529 | no_builds_yet = enum.auto() 530 | disconnected_unstable_builder = enum.auto() 531 | unstable_warnings = enum.auto() 532 | unstable_builder_failure = enum.auto() 533 | 534 | TRIVIAL = enum.auto() 535 | stable_warnings = enum.auto() 536 | disconnected_stable_builder = enum.auto() 537 | disconnected_blocking_builder = enum.auto() 538 | 539 | CONCERNING = enum.auto() 540 | nonblocking_failure = enum.auto() 541 | 542 | BLOCKING = enum.auto() 543 | release_blocking_failure = enum.auto() 544 | 545 | @cached_property 546 | def css_color_class(self): 547 | if self >= Severity.BLOCKING: 548 | return 'danger' 549 | if self >= Severity.CONCERNING: 550 | return 'warning' 551 | return 'success' 552 | 553 | @cached_property 554 | def symbol(self): 555 | if self >= Severity.BLOCKING: 556 | return '\N{HEAVY BALLOT X}' 557 | if self >= Severity.CONCERNING: 558 | return '\N{WARNING SIGN}' 559 | return '\N{HEAVY CHECK MARK}' 560 | 561 | @cached_property 562 | def releasability(self): 563 | if self >= Severity.BLOCKING: 564 | return 'Unreleasable' 565 | if self >= Severity.CONCERNING: 566 | return 'Concern' 567 | return 'Releasable' 568 | 569 | 570 | class Problem: 571 | def __str__(self): 572 | return self.description 573 | 574 | @cached_property 575 | def order_key(self): 576 | return -self.severity, self.description 577 | 578 | def __eq__(self, other): 579 | return self.order_key == other.order_key 580 | 581 | def __lt__(self, other): 582 | return self.order_key < other.order_key 583 | 584 | @cached_property 585 | def severity(self): 586 | self.severity, self.description = self.get_severity_and_description() 587 | return self.severity 588 | 589 | @cached_property 590 | def description(self): 591 | self.severity, self.description = self.get_severity_and_description() 592 | return self.description 593 | 594 | @property 595 | def affected_builds(self): 596 | return {} 597 | 598 | 599 | @dataclass 600 | class BuildFailure(Problem): 601 | """The most recent build failed""" 602 | latest_build: Build 603 | first_failing_build: 'Build | None' = None 604 | 605 | def get_severity_and_description(self): 606 | if not self.builder.is_stable: 607 | return Severity.unstable_builder_failure, "Unstable build failed" 608 | if self.builder.is_release_blocking: 609 | severity = Severity.release_blocking_failure 610 | else: 611 | severity = Severity.nonblocking_failure 612 | description = f"{self.builder.tier.title} build failed" 613 | return severity, description 614 | 615 | @property 616 | def builder(self): 617 | return self.latest_build.builder 618 | 619 | @cached_property 620 | def affected_builds(self): 621 | result = {"Latest build": self.latest_build} 622 | if self.first_failing_build: 623 | result["Breaking build"] = self.first_failing_build 624 | return result 625 | 626 | 627 | @dataclass 628 | class BuildWarning(Problem): 629 | """The most recent build warns""" 630 | build: Build 631 | 632 | def get_severity_and_description(self): 633 | # Description word order is different from BuildFailure, to tell these 634 | # apart at a glance 635 | if not self.builder.is_stable: 636 | return Severity.unstable_warnings, "Warnings from unstable build" 637 | severity = Severity.stable_warnings 638 | description = f"Warnings from {self.builder.tier.title} build" 639 | return severity, description 640 | 641 | @property 642 | def builder(self): 643 | return self.build.builder 644 | 645 | @cached_property 646 | def affected_builds(self): 647 | return {"Warning build": self.build} 648 | 649 | 650 | @dataclass 651 | class NoBuilds(Problem): 652 | """Builder has no finished builds yet""" 653 | builder: Builder 654 | 655 | description = "Builder has no builds" 656 | severity = Severity.no_builds_yet 657 | 658 | 659 | @dataclass 660 | class BuilderDisconnected(Problem): 661 | """Builder has no finished builds yet""" 662 | builder: Builder 663 | 664 | def get_severity_and_description(self): 665 | if not self.builder.is_stable: 666 | severity = Severity.disconnected_unstable_builder 667 | description = "Disconnected unstable builder" 668 | else: 669 | description = f"Disconnected {self.builder.tier.title} builder" 670 | if self.builder.is_release_blocking: 671 | severity = Severity.disconnected_blocking_builder 672 | else: 673 | severity = Severity.disconnected_stable_builder 674 | for build in self.builder.iter_interesting_builds(): 675 | if build.age and build.age < datetime.timedelta(hours=6): 676 | description += ' (with recent build)' 677 | if severity >= Severity.BLOCKING: 678 | severity = Severity.CONCERNING 679 | if severity >= Severity.CONCERNING: 680 | severity = Severity.TRIVIAL 681 | break 682 | return severity, description 683 | 684 | 685 | class NoProblem(Problem): 686 | """Dummy problem""" 687 | name = "Releasable" 688 | 689 | description = "No problem detected" 690 | severity = Severity.NO_PROBLEM 691 | 692 | 693 | class ReleaseDashboard: 694 | # This doesn't get recreated for every render. 695 | # The Flask app and caches go here. 696 | def __init__(self, test_result_dir=None): 697 | self.flask_app = Flask("test", root_path=os.path.dirname(__file__)) 698 | self.cache = None 699 | 700 | self._refresh_branch_info() 701 | 702 | self.flask_app.jinja_env.add_extension('jinja2.ext.loopcontrols') 703 | self.flask_app.jinja_env.undefined = jinja2.StrictUndefined 704 | 705 | self.test_result_dir = Path(test_result_dir).resolve() 706 | 707 | @self.flask_app.route('/') 708 | @self.flask_app.route("/index.html") 709 | def main(): 710 | force_refresh = request.args.get("refresh", "").lower() in {"1", "yes", "true"} 711 | 712 | if self.cache is not None and not force_refresh: 713 | result, deadline = self.cache 714 | if time.monotonic() <= deadline: 715 | return result 716 | 717 | try: 718 | self._refresh_branch_info() 719 | except urllib.error.HTTPError: 720 | pass 721 | 722 | result = self.get_release_status() 723 | deadline = time.monotonic() + CACHE_DURATION 724 | self.cache = (result, deadline) 725 | return result 726 | 727 | @self.flask_app.template_filter('first_line') 728 | def first_line(text): 729 | return text.partition('\n')[0] 730 | 731 | @self.flask_app.template_filter('committer_name') 732 | def committer_name(text): 733 | return text.partition(' <')[0] 734 | 735 | @self.flask_app.template_filter('format_datetime') 736 | def format_timestamp(dt): 737 | now = datetime.datetime.now(tz=datetime.timezone.utc) 738 | ago = humanize.naturaldelta(now - dt) 739 | return f'{dt:%Y-%m-%d %H:%M:%S}, {ago} ago' 740 | 741 | @self.flask_app.template_filter('format_timedelta') 742 | def format_timedelta(delta): 743 | return humanize.naturaldelta(delta) 744 | 745 | @self.flask_app.template_filter('short_rm_name') 746 | def short_rm_name(full_name): 747 | # DEBT: this assumes the first word of a release manager's name 748 | # is a good way to call them. 749 | # When that's no longer true we should put a name in the data. 750 | return full_name.split()[0] 751 | 752 | def _refresh_branch_info(self): 753 | with urllib.request.urlopen(BRANCHES_URL) as file: 754 | self.branch_info = json.load(file) 755 | 756 | def get_release_status(self): 757 | state = DashboardState(self) 758 | 759 | return render_template( 760 | "releasedashboard.html", 761 | state=state, 762 | Severity=Severity, 763 | generated_at=state.now, 764 | ) 765 | 766 | def get_release_status_app(buildernames=None, **kwargs): 767 | return ReleaseDashboard(**kwargs).flask_app 768 | -------------------------------------------------------------------------------- /master/custom/schedulers.py: -------------------------------------------------------------------------------- 1 | import re 2 | from buildbot.schedulers.basic import AnyBranchScheduler 3 | from twisted.internet import defer 4 | from twisted.python import log 5 | 6 | 7 | class GitHubPrScheduler(AnyBranchScheduler): 8 | def __init__(self, *args, stable_builder_names, **kwargs): 9 | super().__init__(*args, **kwargs) 10 | self.stable_builder_names = stable_builder_names 11 | 12 | @defer.inlineCallbacks 13 | def addBuildsetForChanges(self, **kwargs): 14 | log.msg("Preparing buildset for PR changes") 15 | changeids = kwargs.get("changeids") 16 | if changeids is None or len(changeids) == 0: 17 | log.msg("No changeids found") 18 | yield super().addBuildsetForChanges(**kwargs) 19 | return 20 | 21 | # It is possible that we get multiple changeids if there are multiple 22 | # requests being made in quick succession. All these changeids will 23 | # have the same properties, so we can just pick the first one. 24 | changeid = changeids[0] 25 | change = yield self.master.db.changes.getChange(changeid) 26 | 27 | builder_filter = change.properties.get("builderfilter", None) 28 | event = change.properties.get("event", None) 29 | if event: 30 | # looks like `("issue_comment", "Change")` for a comment 31 | event, _ = event 32 | builder_names = kwargs.get("builderNames", self.builderNames) 33 | if builder_filter and builder_names: 34 | # allow unstable builders only for comment-based trigger 35 | if event != "issue_comment": 36 | builder_names = [ 37 | builder_name 38 | for builder_name in builder_names 39 | if builder_name in self.stable_builder_names 40 | ] 41 | log.msg(f"Considering only stable builders: {builder_names}") 42 | # looks like `("", "Change")` 43 | builder_filter, _ = builder_filter 44 | log.msg(f"Found builder filter: {builder_filter}") 45 | matcher = re.compile(builder_filter, re.IGNORECASE) 46 | builder_names = [ 47 | builder_name 48 | for builder_name in builder_names 49 | if matcher.search(builder_name) 50 | ] 51 | if builder_names: 52 | log.msg(f"Builder names filtered: {builder_names}") 53 | kwargs.update(builderNames=builder_names) 54 | yield super().addBuildsetForChanges(**kwargs) 55 | else: 56 | log.msg("No matching builders after filtering - breaking out") 57 | return 58 | 59 | log.msg("Scheduling regular non-filtered buildset") 60 | yield super().addBuildsetForChanges(**kwargs) 61 | return 62 | -------------------------------------------------------------------------------- /master/custom/settings.py: -------------------------------------------------------------------------------- 1 | import os 2 | import secrets 3 | 4 | import yaml 5 | from twisted.python import log 6 | 7 | 8 | DEFAULTS = dict( 9 | web_port=9011, 10 | worker_port=9021, 11 | irc_notice=False, 12 | irc_host="irc.libera.chat", 13 | irc_channel="#python-dev-notifs", 14 | irc_nick="py-bb-test", 15 | buildbot_url="http://localhost:9011/", 16 | db_url="sqlite:///state.sqlite", 17 | do_auth=False, 18 | send_mail=False, 19 | status_email="example@example.org", 20 | email_relay_host="mail.example.org", 21 | from_email="buildbot@example.org", 22 | verbosity=1, 23 | git_url="https://github.com/python/cpython", 24 | use_local_worker=False, 25 | discord_webook="https://discordapp.com/api/webhooks/", 26 | ) 27 | 28 | 29 | class Settings: 30 | 31 | value = ... 32 | path = None 33 | 34 | def __init__(self, value=..., path=None): 35 | if value is not ...: 36 | self.value = value 37 | self.path = path or [] 38 | 39 | @classmethod 40 | def from_file(cls, filename): 41 | with open(filename) as f: 42 | return cls(yaml.full_load(f), path=[]) 43 | 44 | def __getitem__(self, key): 45 | path_key = real_key = key 46 | if isinstance(key, type(self)): 47 | path_key = real_key = key.value 48 | if real_key is ...: 49 | real_key = "unknown" 50 | path_key = key.path 51 | new_path = self.path + [path_key] 52 | if self.value is not ... and real_key in self.value: 53 | value = self.value[real_key] 54 | if isinstance(value, (list, dict)): 55 | return type(self)(value, path=new_path) 56 | return value 57 | return type(self)(path=new_path) 58 | 59 | __getattr__ = __getitem__ 60 | 61 | def get(self, key, default=...): 62 | if self.value is not ...: 63 | value = self.value.get(key, default) 64 | if value is not ...: 65 | return value 66 | if default is not ...: 67 | return default 68 | return type(self)(path=self.path + [key]) 69 | 70 | def _convert(self, func, default): 71 | if self.value is not ...: 72 | return func(self.value) 73 | default = DEFAULTS.get(".".join(map(str, self.path)), default) 74 | if not os.getenv("CI"): 75 | # Note: We use log.err to make this show up during `checkconfig` 76 | log.err(f"WARNING: No setting at {self.path}, returning {default}") 77 | return func(default) 78 | 79 | def __int__(self): 80 | return self._convert(int, 1) 81 | 82 | def __str__(self): 83 | return self._convert(str, secrets.token_urlsafe(8)) 84 | 85 | def __bool__(self): 86 | return self._convert(bool, False) 87 | -------------------------------------------------------------------------------- /master/custom/static/dashboard.css: -------------------------------------------------------------------------------- 1 | .release_status { 2 | background-color: white; 3 | border-radius: 10px; 4 | box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); 5 | padding: 20px; 6 | margin-top: 20px; 7 | 8 | width: 90%; 9 | max-width: 1200px; 10 | margin: 20px auto 0; 11 | font-family: 'Arial', sans-serif; 12 | 13 | .header { 14 | text-align: center; 15 | margin-bottom: 30px; 16 | } 17 | 18 | .header img { 19 | max-width: 100%; 20 | height: auto; 21 | animation: fadeIn 1s ease-out; 22 | } 23 | 24 | h1, h2 { 25 | color: var(--primary-color); 26 | } 27 | 28 | h1 { 29 | font-size: 2.5em; 30 | margin-top: 20px; 31 | margin-bottom: 2rem; 32 | } 33 | 34 | h2 { 35 | font-size: 2em; 36 | padding-bottom: 10px; 37 | margin-top: 40px; 38 | } 39 | 40 | section { 41 | padding-left: 1.5rem; 42 | } 43 | h3, h4, h5, h6 { 44 | section > & { 45 | margin-left: -1.5rem; 46 | } 47 | } 48 | 49 | .branch-panels { 50 | display: grid; 51 | gap: 1rem; 52 | grid-template-columns: repeat(auto-fit, minmax(9rem, 1fr)); 53 | align-items: stretch; 54 | justify-content: center; 55 | } 56 | .branch-panel { 57 | cursor: pointer; 58 | border-radius: 8px; 59 | overflow: hidden; 60 | box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); 61 | transition: transform 0.3s ease, box-shadow 0.3s ease; 62 | height: 100%; 63 | min-width: 9rem; 64 | max-width: 13rem; 65 | 66 | display: flex; 67 | flex-direction: column; 68 | * { flex-grow: 0; } 69 | 70 | &:hover { 71 | transform: translateY(-5px); 72 | box-shadow: 0 4px 8px rgba(0, 0, 0, 0.2); 73 | } 74 | .releasability { 75 | font-weight: bold; 76 | -color: var(--text-color); 77 | .panel-success & { 78 | color: var(--success-color); 79 | } 80 | .panel-warning & { 81 | color: var(--warning-color); 82 | } 83 | .panel-danger & { 84 | color: var(--danger-color); 85 | } 86 | } 87 | 88 | .panel-heading { 89 | padding: 15px 15px 15px; 90 | font-weight: bold; 91 | text-align: center; 92 | box-shadow: rgba(0, 0, 0, 0.15) 0px -5px 3px -2px inset; 93 | color: white; 94 | 95 | h3 { 96 | font-size: 3rem; 97 | padding: 0; 98 | margin: 0; 99 | } 100 | 101 | .panel-success & { 102 | background-color: var(--success-color); 103 | } 104 | .panel-warning & { 105 | background-color: var(--warning-color-bg); 106 | } 107 | .panel-danger & { 108 | background-color: var(--danger-color); 109 | } 110 | } 111 | 112 | .panel-body { 113 | padding: 10px; 114 | background-color: white; 115 | text-align: center; 116 | flex-grow: 2; 117 | } 118 | 119 | .panel-footer { 120 | background-color: var(--pill-bg-color); 121 | padding: 10px 15px 7px; 122 | text-align: center; 123 | font-size: 75%; 124 | box-shadow: rgba(0, 0, 0, 0.15) 0px 5px 3px -2px inset; 125 | line-height: 1.5; 126 | } 127 | } 128 | 129 | section.branch-status { 130 | --status-color: var(--background-color); 131 | border-radius: 20px; 132 | box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); 133 | box-shadow: 0 2px 4px color-mix(in srgb, var(--status-color) 20%, color-mix(in srgb, var(--background-color) 50%, transparent)); 134 | border-left: 2px solid var(--status-color); 135 | overflow: clip; 136 | h3 { 137 | padding-left: 1rem; 138 | background-color: var(--status-color); 139 | color: var(--background-color); 140 | position: sticky; 141 | top: 0; 142 | z-index: 9; 143 | } 144 | margin-bottom: 1rem; 145 | h4.tier-name { 146 | font-weight: normal; 147 | font-size: 100%; 148 | } 149 | & > section { 150 | padding-bottom: 0.5rem; 151 | } 152 | h5 { 153 | margin-bottom: 0; 154 | } 155 | &:empty { 156 | padding-bottom: 0; 157 | border-color: transparent; 158 | h3 { 159 | border-radius: 20px; 160 | } 161 | } 162 | 163 | &.status-success { --status-color: var(--success-color); } 164 | &.status-warning { --status-color: var(--warning-color); } 165 | &.status-danger { --status-color: var(--danger-color); } 166 | .build-dots { 167 | white-space: nowrap; 168 | overflow: hidden; 169 | } 170 | .build-dot { 171 | display: inline-block; 172 | margin: 0px; 173 | width: 1rem; 174 | height: 1rem; 175 | background-color: var(--pill-bg-color); 176 | color: var(--pill-text-color); 177 | font-size: .75rem; 178 | line-height: 1rem; 179 | border-radius: .5rem; 180 | padding: 0 0 .4rem; 181 | text-align: center; 182 | vertical-align: .125rem; 183 | &.build-results-success { background-color: var(--success-color); color: white; } 184 | &.build-results-warning { background-color: var(--warning-color-bg); } 185 | &.build-results-danger { background-color: var(--danger-color); color: white; } 186 | } 187 | .tag { 188 | display: inline-block; 189 | background-color: var(--pill-bg-color); 190 | color: var(--pill-text-color); 191 | font-size: .7rem; 192 | border-radius: .5rem; 193 | padding: .2em .5em .3em; 194 | } 195 | .junit-result { 196 | > .junit-result { 197 | margin-left: 2rem; 198 | } 199 | &[open] > summary .exception-summary { 200 | display: none; 201 | } 202 | .exception-summary code:before { 203 | content: ' 🞬 '; 204 | color: var(--danger-color); 205 | font-family: var(--font-family-sans-serif); 206 | } 207 | } 208 | .junit-error { 209 | margin-left: 2rem; 210 | summary:before { 211 | content: '🞬 '; 212 | color: var(--danger-color); 213 | } 214 | pre { 215 | padding: .5rem; 216 | border-radius: .25rem; 217 | margin-right: 1rem; 218 | background-color: var(--pill-bg-color); 219 | border: 1px solid rgba(0, 0, 0, .2); 220 | box-shadow: rgba(0, 0, 0, .2) 0px 1px 2px -1px inset; 221 | } 222 | } 223 | } 224 | } 225 | 226 | :root { 227 | --primary-color: #306998; 228 | --secondary-color: #FFD43B; 229 | --background-color: #f4f4f4; 230 | --text-color: #333; 231 | --success-color: #4CAF60; 232 | --warning-color: #C2870F; 233 | --warning-color-bg: #E4B615; /* nicer "traffic light" amber , but not enough contrast for text */ 234 | --danger-color: #f44336; 235 | --pill-bg-color: #e0e0e0; 236 | --pill-text-color: #333; 237 | --pill-hover-bg-color: #306998; 238 | --pill-hover-text-color: #ffffff; 239 | } 240 | 241 | body { 242 | font-family: 'Arial', sans-serif; 243 | line-height: 1.6; 244 | color: var(--text-color); 245 | background-color: var(--background-color); 246 | margin: 0; 247 | padding: 0; 248 | transition: all 0.3s ease; 249 | scroll-behavior: smooth; 250 | } 251 | 252 | code { 253 | color: inherit; 254 | } 255 | -------------------------------------------------------------------------------- /master/custom/steps.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | from buildbot.plugins import steps, util 4 | from buildbot.process.results import SUCCESS, WARNINGS 5 | from buildbot.steps.shell import ShellCommand, Test as BaseTest 6 | from buildbot.steps.source.git import Git as _Git 7 | from buildbot.steps.source.github import GitHub as _GitHub 8 | 9 | from . import JUNIT_FILENAME 10 | 11 | 12 | class Git(_Git): 13 | # GH-68: If "git clone" fails, mark the whole build as WARNING 14 | # (warnOnFailure), not as "FAILURE" (flunkOnFailure) 15 | haltOnFailure = True 16 | flunkOnFailure = False 17 | warnOnFailure = True 18 | 19 | 20 | class GitHub(_GitHub): 21 | # GH-68: If "git clone" fails, mark the whole build as WARNING 22 | # (warnOnFailure), not as "FAILURE" (flunkOnFailure) 23 | haltOnFailure = True 24 | flunkOnFailure = False 25 | warnOnFailure = True 26 | 27 | 28 | class Test(BaseTest): 29 | # Regular expression used to catch warnings, errors and bugs 30 | warningPattern = ( 31 | # regrtest saved_test_environment warning: 32 | # Warning -- files was modified by test_distutils 33 | # test.support @reap_threads: 34 | # Warning -- threading_cleanup() failed to cleanup ... 35 | r"Warning -- ", 36 | 37 | # Py_FatalError() call 38 | r"Fatal Python error:", 39 | 40 | # PyErr_WriteUnraisable() exception: usually, error in 41 | # garbage collector or destructor 42 | r"Exception ignored in:", 43 | 44 | # faulthandler_exc_handler(): Windows exception handler installed with 45 | # AddVectoredExceptionHandler() by faulthandler.enable() 46 | r"Windows fatal exception:", 47 | 48 | # Resource warning: unclosed file, socket, etc. 49 | # NOTE: match the "ResourceWarning" anywhere, not only at the start 50 | r"ResourceWarning", 51 | 52 | # regrtest: At least one test failed. Log a warning even if the test 53 | # passed on the second try, to notify that a test is unstable. 54 | r"Re-running failed tests in verbose mode", 55 | 56 | # Re-running test 'test_multiprocessing_fork' in verbose mode 57 | r"Re-running test.* in verbose mode", 58 | 59 | # Thread last resort exception handler in t_bootstrap() 60 | r"Unhandled exception in thread started by ", 61 | 62 | # test_os leaked [6, 6, 6] memory blocks, sum=18, 63 | r"test_[^ ]+ leaked ", 64 | 65 | # FAIL: test_stdin_broken_pipe (test.test_asyncio...) 66 | r"FAIL: ", 67 | 68 | # ERROR: test_pipe_handle (test.test_asyncio...) 69 | r"ERROR: ", 70 | 71 | # test.* ... unexpected success 72 | r"unexpected success", 73 | 74 | # Kill worker process 15215 running for 1350.1 sec 75 | r"Kill worker process ", 76 | 77 | # test test_ssl failed -- multiple errors occurred; run in verbose mode for details 78 | r"test .* failed -- multiple errors occurred; run in verbose mode for details", 79 | 80 | # OSError: [Errno 28] No space left on device: ... 81 | r'No space left on device', 82 | ) 83 | # Use ".*" prefix to search the regex anywhere since stdout is mixed 84 | # with stderr, so warnings are not always written at the start 85 | # of a line. The log consumer calls warningPattern.match(line) 86 | warningPattern = r".*(?:%s)" % "|".join(warningPattern) 87 | warningPattern = re.compile(warningPattern) 88 | 89 | # if tests have warnings, mark the overall build as WARNINGS (orange) 90 | warnOnWarnings = True 91 | 92 | # 4 hours should be enough even for refleak builds. In practice, 93 | # faulthandler kills worker processes with a way shorter timeout 94 | # (regrtest --timeout parameter). 95 | maxTime = 4 * 60 * 60 96 | # Give SIGTERM 30 seconds to shut things down before SIGKILL. 97 | sigtermTime = 30 98 | 99 | # Treat "regrtest --fail-rerun" exit code (5) as WARNINGS 100 | # https://github.com/python/cpython/issues/108834 101 | decodeRC = { 102 | 0: SUCCESS, 103 | 104 | # Treat --fail-rerun exit code (5) to WARNINGS, when a test failed but 105 | # passed when run again in verbose mode in a fresh process (unstable 106 | # test). 107 | 5: WARNINGS, # EXITCODE_RERUN_FAIL 108 | 109 | # Any exit code not present in the dictionary is treated as FAILURE. 110 | # So there is no need to map each regrtest exit code to FAILURE. 111 | # 112 | # 2: FAILURE, # EXITCODE_BAD_TEST 113 | # 3: FAILURE, # EXITCODE_ENV_CHANGED 114 | # 4: FAILURE, # EXITCODE_NO_TESTS_RAN 115 | # 130: FAILURE, # EXITCODE_INTERRUPTED 116 | } 117 | 118 | def evaluateCommand(self, cmd): 119 | if cmd.didFail(): 120 | self.setProperty("test_failed_to_build", True) 121 | return super().evaluateCommand(cmd) 122 | 123 | 124 | class Clean(ShellCommand): 125 | name = "clean" 126 | flunkOnFailure = False 127 | warnOnFailure = True 128 | description = ["cleaning"] 129 | descriptionDone = ["clean"] 130 | command = ["make", "distclean"] 131 | alwaysRun = True 132 | 133 | 134 | class CleanupTest(ShellCommand): 135 | name = "cleantest" 136 | description = ["cleaning previous tests"] 137 | descriptionDone = ["clean previous tests"] 138 | flunkOnFailure = False 139 | warnOnFailure = True 140 | 141 | 142 | class Install(ShellCommand): 143 | name = "install" 144 | description = ["installing"] 145 | descriptionDone = ["Installed"] 146 | command = ["make", "install"] 147 | haltOnFailure = True 148 | 149 | 150 | class LockInstall(ShellCommand): 151 | name = "chmod" 152 | description = ["changing permissions"] 153 | descriptionDone = ["made install dir unwritable"] 154 | command = ["chmod", "-R", "-w", "target/"] 155 | 156 | 157 | class Uninstall(ShellCommand): 158 | name = "uninstall" 159 | description = ["uninstalling"] 160 | descriptionDone = ["Uninstalled"] 161 | command = "chmod -R +w target/ && rm -rf target/" 162 | alwaysRun = True 163 | usePTY = False 164 | # GH-68: For "Install" builbot workers, when "git clone" fails, the 165 | # uninstall step fails since the target/ directory doesn't exist. In this 166 | # case, only mark the build as WARNING (warnOnFailure), instead of 167 | # "FAILURE" (flunkOnFailure). 168 | warnOnFailure = True 169 | 170 | 171 | class UploadTestResults(steps.FileUpload): 172 | warnOnFailure = True 173 | haltOnFailure = False 174 | flunkOnFailure = False 175 | alwaysRun = True 176 | 177 | def _has_the_build_failed(self, build): 178 | return self.getProperty("test_failed_to_build") 179 | 180 | def __init__(self, branch, filename=JUNIT_FILENAME): 181 | super().__init__( 182 | doStepIf=self._has_the_build_failed, 183 | workersrc=filename, 184 | masterdest=util.Interpolate( 185 | f"/data/www/buildbot/test-results/{branch}/%(prop:buildername)s/build_%(prop:buildnumber)s.xml" 186 | ), 187 | mode=0o755, 188 | ) 189 | -------------------------------------------------------------------------------- /master/custom/templates/releasedashboard.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Python Branch Release Status Dashboard 7 | 8 | 9 | 10 | 11 | {% macro build_dot(build) -%} 12 | 17 | {{ build.results_symbol }} 18 | 19 | {% endmacro -%} 20 | {%- macro build_summary(build) -%} 21 | #{{- build.number -}} 22 | {{- ' ' -}} 23 | ( 24 | {{- build.results_string -}}, 25 | {{- ' ' -}} 26 | {{- build.started_at | format_datetime -}} 27 | {%- if build.duration -%} 28 | ; took {{ build.duration | format_timedelta -}} 29 | {%- endif -%} 30 | ) 31 | {%- endmacro -%} 32 | {% macro build_info(build) -%} 33 | {{ build_dot(build) }} 34 | {{ build_summary(build) }} 35 | {% if build.builder.is_stable %} 36 | {% if build.junit_results %} 37 | {% for name, result in build.junit_results.contents.items() %} 38 | {{ junit_result(build.junit_results, name, toplevel=True) }} 39 | {% endfor %} 40 | {% endif %} 41 | {% if build.changes %} 42 |
43 | 44 | {{ build.changes|length }} 45 | change{% if build.changes|length != 1 %}s{% endif %} 46 | 47 |
    48 | {% for change in build.changes %} 49 |
  • 50 | 51 | {{ change.comments | first_line }} 52 | 53 | by {{ change.author | committer_name }} 54 | {% if change.files %} 55 |
    56 | 57 | {{ change.files|length }} 58 | file{% if change.files|length != 1 %}s{% endif %} 59 | changed 60 | 61 |
      62 | {% for file in change.files %} 63 |
    • 64 | {{ file }} 65 |
    • 66 | {% endfor %} 67 |
    68 |
    69 | {% endif %} 70 |
  • 71 | {% endfor -%} 72 |
73 |
74 | {% endif -%} 75 | {% endif -%} 76 | {% endmacro -%} 77 | {% macro junit_result(result, name, toplevel=False) %} 78 | {% if (result.contents | length == 1) and (not result.errors) %} 79 | {% for cname, child in result.contents.items() %} 80 | {{ junit_result( 81 | child, 82 | name + ('.' if name else '') + cname, 83 | toplevel=toplevel, 84 | ) }} 85 | {% endfor %} 86 | {% else %} 87 |
88 | 89 | {{ name }} 90 | 91 | ( 92 | {%- for type in result.error_types | sort -%} 93 | {%- if loop.index > 3 -%} 94 | ... 95 | {% break %} 96 | {%- endif -%} 97 | {{ type }} 98 | {%- if not loop.last %}, {% endif -%} 99 | {%- endfor -%} 100 | ) 101 | 102 | 103 | {% for error in result.errors %} 104 |
105 | {{ error.type }} 106 |
{{ error.text }}
107 |
108 | {% endfor %} 109 | {% for cname, child in result.contents.items() %} 110 | {{ junit_result(child, cname) }} 111 | {% endfor %} 112 |
113 | {% endif %} 114 | {% endmacro %} 115 | 116 |
117 |
118 |

Python Release Status Dashboard

119 |
120 | 121 |
122 | {% for branch in state.branches %} 123 | {% if branch.tag == 'no-branch' %} 124 | {% continue %} 125 | {% endif %} 126 |
131 |
132 |

133 | {{ branch.version }} 134 |

135 |
136 |
137 |
138 | 139 | {{ branch.featured_problem.severity.symbol }} 140 | 141 | {{ branch.featured_problem.severity.releasability }} 142 |
143 | {{ branch.featured_problem }} 144 |
145 | 165 |
166 | {% endfor %} 167 |
168 | 169 |

Problems by Branch

170 | 171 | {% for branch in state.branches %} 172 |
173 |

174 | {{ branch.title }} 175 | {% if branch.version is defined and branch.version != branch.title %} 176 | ({{ branch.version }}) 177 | {% endif %} 178 |

179 | {% for description, problems in branch.get_grouped_problems() %} 180 |
Severity.TRIVIAL %}open{% endif %}> 181 | 182 | {{ description }} ({{ problems|length }}) 183 | 184 |
185 | {% for problem in problems %} 186 |
187 | {% if problem.builder is defined %} 188 | {% set builder = problem.builder %} 189 |
190 | 191 | {{ builder.name -}} 192 | 193 | {% for tag in builder.tags %} 194 | 195 | {{ tag }} 196 | 197 | {% endfor %} 198 |
199 | {% if not builder.connected_workers %} 200 |
201 | Disconnected! 🔌 202 | {% if builder.builds %} 203 | Last build 204 | {{ builder.builds[0].started_at | format_datetime }} 205 | {% endif %} 206 |
207 | {% endif %} 208 |
209 | {% for build in builder.builds[:70] %} 210 | {{ build_dot(build) }} 211 | {% endfor %} 212 |
213 | {% for label, build in problem.affected_builds.items() %} 214 |
215 | {{ label }}: {{ build_info(build) }} 216 |
217 | {% endfor %} 218 | {% else %} 219 | {{ problem }} 220 | {% endif %} 221 |
222 | {% endfor %} 223 |
224 |
225 | {% endfor %} 226 |
227 | {% endfor %} 228 | 229 |
230 | Generated at 231 |
232 |
233 | 238 | 239 | 240 | 241 | -------------------------------------------------------------------------------- /master/custom/testsuite_utils.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | TESTS_STEP = "test" 4 | 5 | TRACEBACK_REGEX = re.compile( 6 | r""" 7 | Traceback # Lines containing "Traceback" 8 | [\s\S]+? # Match greedy any text (preserving ASCII flags). 9 | (?=^(?:\d|test|\Z|\n|ok)) # Stop matching in lines starting with 10 | # a number (log time), "test" or the end 11 | # of the string. 12 | """, 13 | re.MULTILINE | re.VERBOSE, 14 | ) 15 | 16 | LEAKS_REGEX = re.compile(r"(test_\w+) leaked \[.*] (.*),.*", re.MULTILINE) 17 | 18 | 19 | class Logs: 20 | def __init__(self, raw_logs): 21 | self._logs = raw_logs 22 | 23 | @property 24 | def raw_logs(self): 25 | return self._logs 26 | 27 | def _get_test_results(self, header): 28 | test_regexp = re.compile( 29 | rf""" 30 | ^\d+\s{header}: # Lines starting with "header" 31 | [\s\S]+? # Match greedy any text (preserving ASCII flags). 32 | (?=^(?:\d|test|\Z|Total)) # Stop matching in lines starting with 33 | # a number (log time), "test" or the end 34 | # of the string. 35 | """, 36 | re.MULTILINE | re.VERBOSE, 37 | ) 38 | 39 | failed_blocks = list(set(test_regexp.findall(self._logs))) 40 | if not failed_blocks: 41 | return set() 42 | # Pick the last re-run of the test 43 | block = failed_blocks[-1] 44 | tests = [] 45 | for line in block.split("\n")[1:]: 46 | if not line: 47 | continue 48 | test_names = line.split(" ") 49 | tests.extend(test for test in test_names if test) 50 | return set(tests) 51 | 52 | def get_tracebacks(self): 53 | yield from set(TRACEBACK_REGEX.findall(self._logs)) 54 | 55 | def get_leaks(self): 56 | for test_name, resource in set(LEAKS_REGEX.findall(self._logs)): 57 | yield test_name, resource 58 | 59 | def get_failed_tests(self): 60 | yield from set(self._get_test_results(r"tests?\sfailed")) 61 | 62 | def get_rerun_tests(self): 63 | yield from set(self._get_test_results(r"re-run\stests?")) 64 | 65 | def get_failed_subtests(self): 66 | failed_subtest_regexp = re.compile( 67 | r"=+" # Decoration prefix 68 | r"\n[A-Z]+:" # Test result (e.g. FAIL:) 69 | r"\s(\w+)\s" # test name (e.g. test_tools) 70 | r"\((.*?)\)" # subtest name (e.g. test.test_tools.test_unparse.DirectoryTestCase) 71 | r".*" # Trailing text (e.g. filename) 72 | r"\n*" # End of the line 73 | r".*" # Maybe some test description 74 | r"-+", # Trailing decoration 75 | re.MULTILINE | re.VERBOSE, 76 | ) 77 | for test, subtest in set(failed_subtest_regexp.findall(self._logs)): 78 | yield test, subtest 79 | 80 | def test_summary(self): 81 | result_start = [ 82 | match.start() for match in re.finditer("== Tests result", self._logs) 83 | ] 84 | if not result_start: 85 | return "" 86 | result_start = result_start[-1] 87 | result_end = [ 88 | match.start() for match in re.finditer("Tests result:", self._logs) 89 | ] 90 | if not result_end: 91 | return "" 92 | result_end = result_end[-1] 93 | return self._logs[result_start:result_end] 94 | 95 | def format_failing_tests(self): 96 | 97 | text = [] 98 | failed = list(self.get_failed_tests()) 99 | if failed: 100 | text.append("Failed tests:\n") 101 | text.extend([f"- {test_name}" for test_name in failed]) 102 | text.append("") 103 | failed_subtests = list(self.get_failed_subtests()) 104 | if failed_subtests: 105 | text.append("Failed subtests:\n") 106 | text.extend([f"- {test} - {subtest}" for test, subtest in failed_subtests]) 107 | text.append("") 108 | leaked = list(self.get_leaks()) 109 | if leaked: 110 | text.append("Test leaking resources:\n") 111 | text.extend( 112 | [f"- {test_name}: {resource}" for test_name, resource in leaked] 113 | ) 114 | text.append("") 115 | return "\n".join(text) 116 | 117 | 118 | def construct_tracebacks_from_build_stderr(build): 119 | for step in build["steps"]: 120 | try: 121 | test_log = step["logs"][0]["content"]["content"] 122 | except IndexError: 123 | continue 124 | test_log = "\n".join( 125 | [line.lstrip("e") for line in test_log.splitlines() if line.startswith("e")] 126 | ) 127 | if not test_log: 128 | continue 129 | yield test_log 130 | 131 | 132 | def get_logs_and_tracebacks_from_build(build): 133 | test_log = "" 134 | try: 135 | test_step = [step for step in build["steps"] if step["name"] == TESTS_STEP][0] 136 | test_log = test_step["logs"][0]["content"]["content"] 137 | test_log = "\n".join([line.lstrip("eo") for line in test_log.splitlines()]) 138 | except IndexError: 139 | pass 140 | logs = Logs(test_log) 141 | tracebacks = list(logs.get_tracebacks()) 142 | if not tracebacks: 143 | tracebacks = list(construct_tracebacks_from_build_stderr(build)) 144 | return logs, tracebacks 145 | -------------------------------------------------------------------------------- /master/custom/workers.py: -------------------------------------------------------------------------------- 1 | # -*- python -*- vi:ft=python: 2 | # kate: indent-mode python; hl python; 3 | # vim:set ts=8 sw=4 sts=4 et: 4 | 5 | from functools import partial 6 | 7 | from buildbot.plugins import worker as _worker 8 | 9 | from custom.factories import MAIN_BRANCH_NAME 10 | 11 | 12 | # By default, the buildmaster sends a simple, non-blocking message to each 13 | # worker every hour. These keepalives ensure that traffic is flowing over the 14 | # underlying TCP connection, allowing the system’s network stack to detect any 15 | # problems before a build is started. 16 | # 17 | # The default is 3600 seconds. Use a shorter interval to avoid 18 | # "lost remote step" on the worker side. 19 | # https://bugs.python.org/issue41642 20 | KEEPALIVE = 60 21 | 22 | 23 | class CPythonWorker: 24 | def __init__( 25 | self, 26 | settings, 27 | name, 28 | tags=None, 29 | branches=None, 30 | not_branches=None, 31 | parallel_builders=None, 32 | parallel_tests=None, 33 | ): 34 | self.name = name 35 | self.tags = tags or set() 36 | self.branches = branches 37 | self.not_branches = not_branches 38 | self.parallel_builders = parallel_builders 39 | self.parallel_tests = parallel_tests 40 | worker_settings = settings.workers[name] 41 | owner = name.split("-")[0] 42 | owner_settings = settings.owners[owner] 43 | pw = worker_settings.get("password", None) or owner_settings.password 44 | owner_email = owner_settings.get("email", None) 45 | emails = [str(owner_email)] if owner_email else [] 46 | if settings.use_local_worker: 47 | self.bb_worker = _worker.LocalWorker(name) 48 | else: 49 | self.bb_worker = _worker.Worker(name, str(pw), 50 | notify_on_missing=emails, 51 | keepalive_interval=KEEPALIVE) 52 | 53 | 54 | def get_workers(settings): 55 | cpw = partial(CPythonWorker, settings) 56 | if settings.use_local_worker: 57 | return [cpw(name="local-worker")] 58 | return [ 59 | cpw( 60 | name="angelico-debian-amd64", 61 | tags=['linux', 'unix', 'debian', 'amd64', 'x86-64'], 62 | ), 63 | cpw( 64 | name="billenstein-macos", 65 | tags=['macOS', 'unix', 'amd64', 'x86-64'], 66 | ), 67 | cpw( 68 | name="bolen-ubuntu", 69 | tags=['linux', 'unix', 'ubuntu', 'amd64', 'x86-64'], 70 | ), 71 | cpw( 72 | name="bolen-windows10", 73 | tags=['windows', 'win10', 'amd64', 'x86-64'], 74 | ), 75 | cpw( 76 | name="cstratak-fedora-rawhide-x86_64", 77 | tags=['linux', 'unix', 'fedora', 'amd64', 'x86-64'], 78 | parallel_tests=10, 79 | ), 80 | cpw( 81 | name="cstratak-fedora-stable-x86_64", 82 | tags=['linux', 'unix', 'fedora', 'amd64', 'x86-64'], 83 | parallel_tests=10, 84 | ), 85 | cpw( 86 | name="cstratak-RHEL8-x86_64", 87 | tags=['linux', 'unix', 'rhel', 'amd64', 'x86-64'], 88 | parallel_tests=10, 89 | ), 90 | cpw( 91 | name="cstratak-RHEL8-fips-x86_64", 92 | tags=['linux', 'unix', 'rhel', 'amd64', 'x86-64', 'fips'], 93 | parallel_tests=6, 94 | ), 95 | cpw( 96 | name="cstratak-CentOS9-x86_64", 97 | tags=['linux', 'unix', 'rhel', 'amd64', 'x86-64'], 98 | parallel_tests=6, 99 | ), 100 | cpw( 101 | name="cstratak-CentOS9-fips-x86_64", 102 | tags=['linux', 'unix', 'rhel', 'amd64', 'x86-64', 'fips'], 103 | parallel_tests=6, 104 | ), 105 | cpw( 106 | name="cstratak-fedora-rawhide-ppc64le", 107 | tags=['linux', 'unix', 'fedora', 'ppc64le'], 108 | parallel_tests=10, 109 | ), 110 | cpw( 111 | name="cstratak-fedora-stable-ppc64le", 112 | tags=['linux', 'unix', 'fedora', 'ppc64le'], 113 | parallel_tests=10, 114 | ), 115 | cpw( 116 | name="cstratak-RHEL8-ppc64le", 117 | tags=['linux', 'unix', 'rhel', 'ppc64le'], 118 | parallel_tests=10, 119 | ), 120 | cpw( 121 | name="cstratak-CentOS9-ppc64le", 122 | tags=['linux', 'unix', 'rhel', 'ppc64le'], 123 | parallel_tests=10, 124 | ), 125 | cpw( 126 | name="cstratak-fedora-rawhide-aarch64", 127 | tags=['linux', 'unix', 'fedora', 'arm', 'arm64', 'aarch64'], 128 | parallel_tests=40, 129 | ), 130 | cpw( 131 | name="cstratak-fedora-stable-aarch64", 132 | tags=['linux', 'unix', 'fedora', 'arm', 'arm64', 'aarch64'], 133 | parallel_tests=40, 134 | ), 135 | cpw( 136 | name="cstratak-RHEL8-aarch64", 137 | tags=['linux', 'unix', 'rhel', 'arm', 'arm64', 'aarch64'], 138 | parallel_tests=40, 139 | ), 140 | cpw( 141 | name="cstratak-CentOS9-aarch64", 142 | tags=['linux', 'unix', 'rhel', 'arm', 'arm64', 'aarch64'], 143 | parallel_tests=40, 144 | ), 145 | cpw( 146 | name="diegorusso-aarch64-bigmem", 147 | tags=['linux', 'unix', 'ubuntu', 'arm', 'arm64', 'aarch64', 'bigmem'], 148 | not_branches=['3.9', '3.10', '3.11', '3.12', '3.13'], 149 | parallel_tests=4, 150 | ), 151 | cpw( 152 | name="cstratak-fedora-rawhide-s390x", 153 | tags=['linux', 'unix', 'fedora', 's390x'], 154 | parallel_tests=10, 155 | ), 156 | cpw( 157 | name="cstratak-fedora-stable-s390x", 158 | tags=['linux', 'unix', 'fedora', 's390x'], 159 | parallel_tests=10, 160 | ), 161 | cpw( 162 | name="cstratak-rhel8-s390x", 163 | tags=['linux', 'unix', 'rhel', 's390x'], 164 | parallel_tests=10, 165 | ), 166 | cpw( 167 | name="cstratak-rhel9-s390x", 168 | tags=['linux', 'unix', 'rhel', 's390x'], 169 | parallel_tests=10, 170 | ), 171 | cpw( 172 | name="edelsohn-aix-ppc64", 173 | tags=['aix', 'unix', 'ppc64'], 174 | parallel_tests=10, 175 | ), 176 | cpw( 177 | name="gps-raspbian", 178 | tags=['linux', 'unix', 'raspbian', 'debian', 'armv6', 'armv7l', 179 | 'aarch32', 'arm'], 180 | parallel_tests=4, 181 | # Raspbian Debian bullseye ships with 3.9, bookworm with 3.11. 182 | not_branches=['3.7', '3.8'], 183 | ), 184 | cpw( 185 | name="stan-raspbian", 186 | tags=['linux', 'unix', 'raspbian', 'debian', 'armv8', 187 | 'aarch64', 'arm'], 188 | parallel_tests=4, 189 | # Tests fail with latin1 encoding on 3.12, probably earlier 190 | not_branches=['3.12', '3.11', '3.10', '3.9'] 191 | ), 192 | cpw( 193 | name="kulikjak-solaris-sparcv9", 194 | tags=['solaris', 'unix', 'sparc', 'sparcv9'], 195 | parallel_tests=16, 196 | ), 197 | cpw( 198 | name="pablogsal-arch-x86_64", 199 | tags=['linux', 'unix', 'arch', 'amd64', 'x86-64'], 200 | ), 201 | cpw( 202 | name="pablogsal-macos-m1", 203 | tags=['macOS', 'unix', 'arm', 'arm64'], 204 | parallel_tests=4, 205 | ), 206 | cpw( 207 | name="skumaran-ubuntu-x86_64", 208 | tags=['linux', 'unix', 'ubuntu', 'amd64', 'x86-64'], 209 | ), 210 | cpw( 211 | name="ware-alpine", 212 | tags=['linux', 'unix', 'alpine', 'docker', 'amd64', 'x86-64'], 213 | branches=[MAIN_BRANCH_NAME], 214 | ), 215 | cpw( 216 | name="ware-freebsd", 217 | tags=['freebsd', 'bsd', 'unix', 'amd64', 'x86-64'], 218 | parallel_tests=4, 219 | ), 220 | cpw( 221 | name="opsec-fbsd14", 222 | tags=['freebsd', 'bsd', 'unix', 'amd64', 'x86-64'], 223 | parallel_tests=4, 224 | ), 225 | cpw( 226 | name="opsec-fbsd15", 227 | tags=['freebsd', 'bsd', 'unix', 'amd64', 'x86-64'], 228 | parallel_tests=4, 229 | ), 230 | cpw( 231 | name="ware-debian-x86", 232 | tags=['linux', 'unix', 'debian', 'x86'], 233 | parallel_tests=4, 234 | ), 235 | cpw( 236 | name="ware-win11", 237 | tags=['windows', 'win11', 'amd64', 'x86-64'], 238 | parallel_tests=2, 239 | ), 240 | cpw( 241 | name="linaro-win-arm64", 242 | tags=['windows', 'arm64'], 243 | parallel_tests=4, 244 | ), 245 | cpw( 246 | name="bcannon-wasi", 247 | tags=['wasm', 'wasi'], 248 | not_branches=['3.9', '3.10'], 249 | parallel_tests=2, 250 | parallel_builders=2, 251 | ), 252 | cpw( 253 | name="ambv-bb-win11", 254 | tags=['windows', 'win11', 'amd64', 'x86-64', 'bigmem'], 255 | not_branches=['3.9', '3.10', '3.11', '3.12', '3.13'], 256 | parallel_tests=4, 257 | ), 258 | cpw( 259 | name="itamaro-centos-aws", 260 | tags=['linux', 'unix', 'rhel', 'amd64', 'x86-64'], 261 | not_branches=['3.9', '3.10', '3.11', '3.12'], 262 | parallel_tests=10, 263 | parallel_builders=2, 264 | ), 265 | cpw( 266 | name="itamaro-win64-srv-22-aws", 267 | tags=['windows', 'win-srv-22', 'amd64', 'x86-64'], 268 | not_branches=['3.9', '3.10', '3.11', '3.12'], 269 | parallel_tests=10, 270 | parallel_builders=2, 271 | ), 272 | cpw( 273 | name="itamaro-macos-intel-aws", 274 | tags=['macOS', 'unix', 'amd64', 'x86-64'], 275 | not_branches=['3.9', '3.10', '3.11', '3.12'], 276 | parallel_tests=10, 277 | ), 278 | cpw( 279 | name="itamaro-macos-arm64-aws", 280 | tags=['macOS', 'unix', 'arm', 'arm64'], 281 | not_branches=['3.9', '3.10', '3.11', '3.12'], 282 | parallel_tests=10, 283 | ), 284 | cpw( 285 | name="kushaldas-wasi", 286 | tags=['wasm', 'wasi'], 287 | not_branches=['3.9', '3.10'], 288 | parallel_tests=4, 289 | parallel_builders=2, 290 | ), 291 | cpw( 292 | name="onder-riscv64", 293 | tags=['linux', 'unix', 'ubuntu', 'riscv64'], 294 | not_branches=['3.9', '3.10'], 295 | parallel_tests=4, 296 | ), 297 | cpw( 298 | name="rkm-arm64-ios-simulator", 299 | tags=['iOS'], 300 | not_branches=['3.9', '3.10', '3.11', '3.12'], 301 | parallel_builders=4, 302 | ), 303 | cpw( 304 | name="mhsmith-android-aarch64", 305 | tags=['android'], 306 | not_branches=['3.9', '3.10', '3.11', '3.12'], 307 | parallel_builders=1, # All builds use the same emulator and app ID. 308 | ), 309 | cpw( 310 | name="mhsmith-android-x86_64", 311 | tags=['android'], 312 | not_branches=['3.9', '3.10', '3.11', '3.12'], 313 | parallel_builders=1, # All builds use the same emulator and app ID. 314 | ), 315 | ] 316 | -------------------------------------------------------------------------------- /master/master.cfg: -------------------------------------------------------------------------------- 1 | # -*- python -*- vi:ft=python: 2 | # kate: indent-mode python; hl python; 3 | # vim:set ts=8 sw=4 sts=4 et: 4 | 5 | # This is a sample buildmaster config file. It must be installed as 6 | # 'master.cfg' in your buildmaster's base directory (although the filename 7 | # can be changed with the --basedir option to 'mktap buildbot master'). 8 | 9 | # It has one job: define a dictionary named BuildmasterConfig. This 10 | # dictionary has a variety of keys to control different aspects of the 11 | # buildmaster. They are documented in docs/config.xhtml . 12 | 13 | import os 14 | import subprocess 15 | import sys 16 | 17 | from datetime import datetime, timedelta 18 | from functools import partial 19 | 20 | from buildbot.plugins import reporters, schedulers, util 21 | from buildbot import locks 22 | from twisted.python import log 23 | from twisted.internet import defer 24 | 25 | import sentry_sdk 26 | from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration 27 | 28 | sys.path.append(os.path.dirname(__file__)) 29 | 30 | # Make sure locals are up to date on reconfig 31 | for k in list(sys.modules): 32 | if k.split(".")[0] in ["custom"]: 33 | sys.modules.pop(k) 34 | 35 | from custom import MAIN_BRANCH_NAME # noqa: E402 36 | from custom.auth import set_up_authorization # noqa: E402 37 | from custom.email_formatter import MESSAGE_FORMATTER # noqa: E402 38 | from custom.pr_reporter import GitHubPullRequestReporter # noqa: E402 39 | from custom.discord_reporter import DiscordReporter # noqa: E402 40 | from custom.pr_testing import ( # noqa: E402 41 | CustomGitHubEventHandler, 42 | should_pr_be_tested, 43 | ) 44 | from custom.settings import Settings # noqa: E402 45 | from custom.steps import Git, GitHub # noqa: E402 46 | from custom.workers import get_workers # noqa: E402 47 | from custom.schedulers import GitHubPrScheduler # noqa: E402 48 | from custom.release_dashboard import get_release_status_app # noqa: E402 49 | from custom.builders import ( # noqa: E402 50 | get_builders, 51 | STABLE, 52 | ONLY_MAIN_BRANCH, 53 | ) 54 | 55 | 56 | def set_up_sentry(): 57 | try: 58 | release_sha = subprocess.check_output(["git", "rev-parse", "HEAD"], text=True) 59 | except subprocess.SubprocessError: 60 | log.err('Could not get current SHA for the release!') 61 | return 62 | sentry_dsn = settings.get('sentry_dsn', None) 63 | if sentry_dsn is None: 64 | log.err('The sentry DSN could not be found in the settings!') 65 | return 66 | sentry_sdk.init(dsn=sentry_dsn, release=release_sha, 67 | integrations=[SqlalchemyIntegration()]) 68 | 69 | def logToSentry(event): 70 | if not event.get('isError') or 'failure' not in event: 71 | return 72 | 73 | f = event['failure'] 74 | sentry_sdk.capture_exception((f.type, f.value, f.getTracebackObject())) 75 | 76 | log.addObserver(logToSentry) 77 | 78 | 79 | settings_path = os.path.join('/etc', 'buildbot', 'settings.yaml') 80 | settings_path = os.environ.get('PYBUILDBOT_SETTINGS_PATH', settings_path) 81 | 82 | try: 83 | settings = Settings.from_file(settings_path) 84 | set_up_sentry() 85 | except FileNotFoundError: 86 | log.err(f"WARNING: settings file could not be found at {settings_path}") 87 | settings = Settings() 88 | 89 | 90 | WORKERS = get_workers(settings) 91 | WORKERS_BY_NAME = {w.name: w for w in WORKERS} 92 | 93 | BUILDERS = get_builders(settings) 94 | 95 | AUTH, AUTHZ = set_up_authorization(settings) 96 | 97 | # This is the dictionary that the buildmaster pays attention to. We also use 98 | # a shorter alias to save typing. 99 | c = BuildmasterConfig = {} 100 | 101 | c["db_url"] = str(settings.db_url) 102 | 103 | # configure a janitor which will delete all logs older than 6 months, 104 | # and will run on sundays at noon 105 | c["configurators"] = [ 106 | util.JanitorConfigurator( 107 | logHorizon=timedelta(days=180), 108 | hour=12, 109 | dayOfWeek=6, 110 | ) 111 | ] 112 | 113 | # Note: these cache values are not currently tuned in any meaningful way. 114 | # Some are taken straight from the buildbot docs at 115 | # https://docs.buildbot.net/4.2.1/manual/configuration/global.html#caches 116 | # and others are just guesses. For now, they're mostly meant to see if 117 | # there's any appreciable impact on performance or memory usage. 118 | c["caches"] = { 119 | "Changes": 100, 120 | "Builds": 500, 121 | "chdicts": 100, 122 | "BuildRequests": 100, 123 | "SourceStamps": 200, 124 | "ssdicts": 200, 125 | "objectids": 10, 126 | "usdicts": 100, 127 | } 128 | 129 | # workers are set up in workers.py 130 | c["workers"] = [w.bb_worker for w in WORKERS] 131 | 132 | # repo url, buildbot category name, git branch name 133 | git_url = str(settings.git_url) 134 | git_branches = [ 135 | (git_url, MAIN_BRANCH_NAME, "main"), 136 | (git_url, "3.14", "3.14"), 137 | (git_url, "3.13", "3.13"), 138 | (git_url, "3.12", "3.12"), 139 | (git_url, "3.11", "3.11"), 140 | (git_url, "3.10", "3.10"), 141 | (git_url, "3.9", "3.9"), 142 | ] 143 | 144 | # common Git() and GitHub() keyword arguments 145 | GIT_KWDS = { 146 | "timeout": 3600, 147 | # "git clean -fdx": remove all files which are not tracked by Git, 148 | # ignoring the .gitignore rules (ex: remove also ".o" files). 149 | "mode": "full", 150 | "method": "fresh", 151 | } 152 | 153 | c["builders"] = [] 154 | c["schedulers"] = [] 155 | 156 | parallel = {w.name: f"-j{w.parallel_tests}" for w in WORKERS if w.parallel_tests} 157 | extra_factory_args = { 158 | "cstratak-RHEL8-ppc64le": { 159 | # Increase the timeout on this slow worker 160 | "timeout_factor": 2, 161 | }, 162 | "bcannon-wasi": { 163 | # Increase the timeout on this slow worker 164 | "timeout_factor": 2, 165 | }, 166 | 167 | } 168 | 169 | # The following with the worker owners' agreement 170 | cpulock = locks.WorkerLock( 171 | "cpu", 172 | maxCountForWorker={ 173 | w.name: w.parallel_builders for w in WORKERS if w.parallel_builders 174 | }, 175 | ) 176 | 177 | 178 | def is_important_file(filename): 179 | unimportant_prefixes = ( 180 | ".github/", 181 | ".gitignore", 182 | ".vsts", 183 | "Misc/", 184 | "Doc/", 185 | "Demo/", 186 | ) 187 | unimportant_suffixes = ( 188 | ".md", 189 | ".rst", 190 | ".yml", 191 | ".yaml", 192 | "README", 193 | ) 194 | if filename.lstrip("\\/").startswith(unimportant_prefixes): 195 | return False 196 | return not filename.endswith(unimportant_suffixes) 197 | 198 | 199 | def is_important_change(change): 200 | return any(is_important_file(filename) for filename in change.files) 201 | 202 | 203 | def is_within_time_range(now, start, end): 204 | if start <= end: 205 | return start <= now <= end 206 | else: 207 | return now >= start or now <= end 208 | 209 | 210 | def get_delay(now, end): 211 | today = datetime.today() 212 | now = datetime.combine(today, now) 213 | end = datetime.combine(today, end) 214 | 215 | if now > end: 216 | end += timedelta(days=1) 217 | 218 | difference = end - now 219 | return difference.total_seconds() 220 | 221 | 222 | # Avoid a build to be started between start and end time and delay such build 223 | # at end time 224 | def no_builds_between(start, end): 225 | start = datetime.strptime(start, "%H:%M").time() 226 | end = datetime.strptime(end, "%H:%M").time() 227 | def canStartBuild(builder, wfb, request): 228 | now = datetime.now().time() 229 | if is_within_time_range(now, start, end): 230 | delay = get_delay(now, end) 231 | # Adapted from: https://docs.buildbot.net/current/manual/customization.html#canstartbuild-functions 232 | wfb.worker.quarantine_timeout = delay 233 | wfb.worker.putInQuarantine() 234 | # This does not take the worker out of quarantine, it only resets 235 | # the timeout value to default (restarting the default 236 | # exponential backoff) 237 | wfb.worker.resetQuarantine() 238 | return False 239 | # Schedule the build now 240 | return True 241 | return canStartBuild 242 | 243 | 244 | github_status_builders = [] 245 | release_status_builders = [] 246 | mail_status_builders = [] 247 | 248 | # Regular builders 249 | 250 | for branch_num, (git_url, branchname, git_branch) in enumerate(git_branches): 251 | buildernames = [] 252 | refleakbuildernames = [] 253 | for name, worker_name, buildfactory, stability, tier in BUILDERS: 254 | if any( 255 | pattern in name for pattern in ONLY_MAIN_BRANCH 256 | ) and branchname != MAIN_BRANCH_NAME: 257 | # Workers known to be broken on older branches: let's focus on 258 | # supporting these platforms in the main branch. 259 | continue 260 | # Only 3.9 has two parsers mode from Python 3.9 and higher 261 | if "VintageParser" in name and branchname != "3.9": 262 | continue 263 | 264 | worker = WORKERS_BY_NAME[worker_name] 265 | if worker.not_branches and branchname in worker.not_branches: 266 | continue 267 | if worker.branches and branchname not in worker.branches: 268 | continue 269 | 270 | buildername = name + " " + branchname 271 | source = Git(repourl=git_url, branch=git_branch, **GIT_KWDS) 272 | f = buildfactory( 273 | source, 274 | parallel=parallel.get(worker_name), 275 | branch=branchname, 276 | **extra_factory_args.get(worker_name, {}), 277 | ) 278 | tags = [branchname, stability, *getattr(f, "tags", [])] 279 | if tier: 280 | tags.append(tier) 281 | 282 | # Only 3.11+ for WebAssembly builds 283 | if "wasm" in tags: 284 | # WASM wasn't a supported platform until 3.11. 285 | if branchname in {"3.9", "3.10"}: 286 | continue 287 | # Tier 3 support is 3.11 & 3.12. 288 | elif "nondebug" in tags and branchname not in {"3.11", "3.12"}: 289 | continue 290 | # Tier 2 support is 3.13+. 291 | elif "nondebug" not in tags and branchname in {"3.11", "3.12"}: 292 | continue 293 | 294 | # Only 3.13+ for NoGIL builds 295 | if 'nogil' in tags and branchname in {"3.9", "3.10", "3.11", "3.12"}: 296 | continue 297 | 298 | # Only 3.12+ for FIPS builders 299 | if 'fips' in tags and branchname in {"3.9", "3.10", "3.11"}: 300 | continue 301 | 302 | if 'refleak' in tags: 303 | refleakbuildernames.append(buildername) 304 | else: 305 | buildernames.append(buildername) 306 | # disable notifications for unstable builders 307 | # (all these lists are the same now, but we might need to 308 | # diverge gain later.) 309 | if stability == STABLE: 310 | mail_status_builders.append(buildername) 311 | github_status_builders.append(buildername) 312 | release_status_builders.append(buildername) 313 | 314 | builder = util.BuilderConfig( 315 | name=buildername, 316 | workernames=[worker_name], 317 | builddir="%s.%s%s" 318 | % (branchname, worker_name, getattr(f, "buildersuffix", "")), 319 | factory=f, 320 | tags=tags, 321 | locks=[cpulock.access("counting")], 322 | ) 323 | 324 | # This worker runs pyperformance at 12am UTC. If a build is scheduled between 325 | # 10pm UTC and 2am UTC, it will be delayed to 2am UTC. 326 | if worker_name == "diegorusso-aarch64-bigmem": 327 | builder.canStartBuild = no_builds_between("22:00", "2:00") 328 | 329 | # This worker restarts every day at 9am UTC to work around issues stemming from 330 | # failing bigmem tests trashing disk space and fragmenting RAM. Builds scheduled 331 | # between 07:20am - 9:20am UTC will be delayed to 9:20am UTC. 332 | if worker_name == "ambv-bb-win11": 333 | builder.canStartBuild = no_builds_between("7:20", "9:20") 334 | 335 | c["builders"].append(builder) 336 | 337 | c["schedulers"].append( 338 | schedulers.SingleBranchScheduler( 339 | name=branchname, 340 | change_filter=util.ChangeFilter(branch=git_branch), 341 | treeStableTimer=30, # seconds 342 | builderNames=buildernames, 343 | fileIsImportant=is_important_change, 344 | ) 345 | ) 346 | if refleakbuildernames: 347 | c["schedulers"].append( 348 | schedulers.SingleBranchScheduler( 349 | name=branchname + "-refleak", 350 | change_filter=util.ChangeFilter(branch=git_branch), 351 | # Wait this many seconds for no commits before starting a build 352 | # NB: During extremely busy times, this can cause the builders 353 | # to never actually fire. The current expectation is that it 354 | # won't ever actually be that busy, but we need to keep an eye 355 | # on that. 356 | treeStableTimer=1 * 60 * 60, # h * m * s 357 | builderNames=refleakbuildernames, 358 | ) 359 | ) 360 | 361 | 362 | # Set up Pull Request builders 363 | 364 | stable_pull_request_builders = [] 365 | all_pull_request_builders = [] 366 | 367 | for name, worker_name, buildfactory, stability, tier in BUILDERS: 368 | buildername = f"{name} PR" 369 | all_pull_request_builders.append(buildername) 370 | if stability == STABLE: 371 | stable_pull_request_builders.append(buildername) 372 | 373 | source = GitHub(repourl=git_url, **GIT_KWDS) 374 | 375 | f = buildfactory( 376 | source, 377 | parallel=parallel.get(worker_name), 378 | # Use the same downstream branch names as the "custom" 379 | # builder (check what the factories are doing with this 380 | # parameter for more info). 381 | branch="3", 382 | **extra_factory_args.get(worker_name, {}), 383 | ) 384 | 385 | tags = ["PullRequest", stability, *getattr(f, "tags", [])] 386 | if tier: 387 | tags.append(tier) 388 | 389 | builder = util.BuilderConfig( 390 | name=buildername, 391 | workernames=[worker_name], 392 | builddir="%s.%s%s" 393 | % ("pull_request", worker_name, getattr(f, "buildersuffix", "")), 394 | factory=f, 395 | tags=tags, 396 | locks=[cpulock.access("counting")], 397 | ) 398 | 399 | # This worker runs pyperformance at 12am. If a build is scheduled between 400 | # 10pm and 2am, it will be delayed at 2am. 401 | if worker_name == "diegorusso-aarch64-bigmem": 402 | builder.canStartBuild = no_builds_between("22:00", "2:00") 403 | 404 | c["builders"].append(builder) 405 | 406 | c["schedulers"].append( 407 | GitHubPrScheduler( 408 | name="pull-request-scheduler", 409 | change_filter=util.ChangeFilter(filter_fn=should_pr_be_tested), 410 | treeStableTimer=30, # seconds 411 | builderNames=all_pull_request_builders, 412 | stable_builder_names=set(stable_pull_request_builders), 413 | ) 414 | ) 415 | 416 | 417 | # Set up aditional schedulers 418 | 419 | c["schedulers"].append( 420 | schedulers.ForceScheduler( 421 | name="force", 422 | builderNames=[builder.name for builder in c["builders"]], 423 | reason=util.FixedParameter(name="reason", label="reason", default=""), 424 | codebases=[ 425 | util.CodebaseParameter( 426 | "", 427 | label="CPython repository", 428 | # will generate nothing in the form, but branch, revision, repository, 429 | # and project are needed by buildbot scheduling system so we 430 | # need to pass a value ("") 431 | branch=util.FixedParameter(name="branch", default=""), 432 | revision=util.FixedParameter(name="revision", default=""), 433 | repository=util.FixedParameter(name="repository", default=""), 434 | project=util.FixedParameter(name="project", default=""), 435 | ), 436 | ], 437 | ) 438 | ) 439 | 440 | # 'workerPortnum' defines the TCP port to listen on. This must match the value 441 | # configured into the buildworkers (with their --master option) 442 | 443 | c["protocols"] = {"pb": {"port": "tcp:{}".format(settings.worker_port)}} 444 | 445 | # 'www' is the configuration for everything accessible via 446 | # http[s]://buildbot.python.org/all/ 447 | 448 | c["www"] = dict( 449 | port=f"tcp:{int(settings.web_port)}", 450 | auth=AUTH, 451 | authz=AUTHZ, 452 | change_hook_dialects={ 453 | "github": { 454 | "class": partial( 455 | CustomGitHubEventHandler, 456 | builder_names=all_pull_request_builders, 457 | ), 458 | "secret": str(settings.github_change_hook_secret), 459 | "strict": True, 460 | "token": settings.github_status_token, 461 | }, 462 | }, 463 | plugins=dict(waterfall_view={}, console_view={}, grid_view={}), 464 | avatar_methods=[util.AvatarGitHub(token=settings.github_status_token)], 465 | ws_ping_interval=30, 466 | ) 467 | 468 | # 'services' is a list of Status Targets. The results of each build will be 469 | # pushed to these targets. buildbot/reporters/*.py has a variety to choose from, 470 | # including web pages, email senders, and IRC bots. 471 | 472 | c["services"] = [] 473 | 474 | 475 | status_email = str(settings.status_email) 476 | if bool(settings.send_mail): 477 | c["services"].append( 478 | reporters.MailNotifier( 479 | generators=[ 480 | reporters.BuildSetStatusGenerator( 481 | mode='problem', 482 | builders=mail_status_builders, 483 | message_formatter=MESSAGE_FORMATTER, 484 | ), 485 | reporters.WorkerMissingGenerator(workers='all'), 486 | ], 487 | fromaddr=str(settings.from_email), 488 | relayhost=str(settings.email_relay_host), 489 | extraRecipients=[status_email], 490 | sendToInterestedUsers=False, 491 | extraHeaders={"Reply-To": status_email}, 492 | ) 493 | ) 494 | 495 | if bool(settings.irc_notice): 496 | irc_args = dict( 497 | host=str(settings.irc_host), 498 | nick=str(settings.irc_nick), 499 | channels=[dict(channel=str(settings.irc_channel))], 500 | notify_events=set( 501 | settings.get( 502 | 'irc_notify_events', 503 | # 'cancelled' is not logged to avoid spaming IRC when 504 | # a "pull-request-scheduler" is cancelled 505 | ['better', 'worse', 'exception'] 506 | ) 507 | ), 508 | useColors=True, 509 | ) 510 | password = settings.get('irc_password', None) 511 | if password: 512 | irc_args['useSASL'] = True 513 | irc_args['password'] = password 514 | c["services"].append(reporters.IRC(**irc_args)) 515 | 516 | c["services"].append( 517 | reporters.GitHubStatusPush( 518 | str(settings.github_status_token), 519 | generators=[ 520 | reporters.BuildStartEndStatusGenerator( 521 | builders=github_status_builders + all_pull_request_builders, 522 | ), 523 | ], 524 | verbose=bool(settings.verbosity), 525 | ) 526 | ) 527 | 528 | start_formatter = reporters.MessageFormatterRenderable('Build started.') 529 | end_formatter = reporters.MessageFormatterRenderable('Build done.') 530 | pending_formatter = reporters.MessageFormatterRenderable('Build pending.') 531 | c["services"].append( 532 | GitHubPullRequestReporter( 533 | str(settings.github_status_token), 534 | generators=[ 535 | reporters.BuildRequestGenerator(formatter=pending_formatter), 536 | reporters.BuildStartEndStatusGenerator( 537 | builders=github_status_builders, 538 | start_formatter=start_formatter, 539 | end_formatter=end_formatter, 540 | ), 541 | ], 542 | verbose=bool(settings.verbosity), 543 | ) 544 | ) 545 | 546 | c["services"].append( 547 | DiscordReporter( 548 | str(settings.discord_webhook), 549 | generators=[ 550 | reporters.BuildRequestGenerator(formatter=pending_formatter), 551 | reporters.BuildStartEndStatusGenerator( 552 | builders=github_status_builders, 553 | start_formatter=start_formatter, 554 | end_formatter=end_formatter, 555 | ), 556 | ], 557 | verbose=bool(settings.verbosity), 558 | ) 559 | ) 560 | 561 | 562 | # if you set 'manhole', you can telnet into the buildmaster and get an 563 | # interactive python shell, which may be useful for debugging buildbot 564 | # internals. It is probably only useful for buildbot developers. 565 | # from buildbot.master import Manhole 566 | # c['manhole'] = Manhole(9999, "admin", "oneddens") 567 | 568 | # the 'title' string will be used to describe the project that this 569 | # buildbot is working on. For example, it is used as the title of the 570 | # waterfall HTML page. The 'titleURL' string will be used to provide a link 571 | # from buildbot HTML pages to your project's home page. 572 | 573 | c["title"] = "Python" 574 | c["titleURL"] = "https://www.python.org/" 575 | 576 | # the 'buildbotURL' string should point to the location where the buildbot's 577 | # internal web server (usually the html.Waterfall page) is visible. This 578 | # typically uses the port number set in the Waterfall 'status' entry, but 579 | # with an externally-visible host name which the buildbot cannot figure out 580 | # without some help. 581 | 582 | c["buildbotURL"] = str(settings.buildbot_url) 583 | 584 | # disable sending of 'buildbotNetUsageData' for now, to improve startup time 585 | c["buildbotNetUsageData"] = None 586 | 587 | c['change_source'] = [] 588 | 589 | c['www']['plugins']['wsgi_dashboards'] = [ 590 | { 591 | 'name': 'release_status', 592 | 'caption': 'Release Status', 593 | 'app': get_release_status_app( 594 | release_status_builders, 595 | test_result_dir='/data/www/buildbot/test-results/'), 596 | 'order': 2, 597 | 'icon': 'rocket' 598 | } 599 | ] 600 | -------------------------------------------------------------------------------- /requirements.in: -------------------------------------------------------------------------------- 1 | buildbot[bundle,tls] 2 | buildbot_wsgi_dashboards 3 | flask 4 | humanize 5 | PyYAML 6 | requests 7 | treq 8 | sentry-sdk 9 | psycopg2 10 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | alembic==1.15.2 2 | attrs==25.3.0 3 | autobahn==24.4.2 4 | Automat==25.4.16 5 | blinker==1.9.0 6 | buildbot==4.3.0 7 | buildbot-console-view==4.3.0 8 | buildbot-grid-view==4.3.0 9 | buildbot-waterfall-view==4.3.0 10 | buildbot-worker==4.3.0 11 | buildbot-wsgi-dashboards==4.3.0 12 | buildbot-www==4.3.0 13 | certifi==2025.4.26 14 | cffi==1.17.1 15 | charset-normalizer==3.4.2 16 | click==8.1.8 17 | constantly==23.10.4 18 | croniter==6.0.0 19 | cryptography==45.0.2 20 | Flask==3.1.1 21 | greenlet==3.2.2 22 | humanize==4.12.3 23 | hyperlink==21.0.0 24 | idna==3.10 25 | importlib_metadata==8.7.0 26 | incremental==24.7.2 27 | itsdangerous==2.2.0 28 | Jinja2==3.1.6 29 | Mako==1.3.10 30 | MarkupSafe==3.0.2 31 | msgpack==1.1.0 32 | packaging==25.0 33 | psycopg2==2.9.10 34 | pyasn1==0.6.1 35 | pyasn1_modules==0.4.2 36 | pycparser==2.22 37 | PyJWT==2.10.1 38 | pyOpenSSL==25.1.0 39 | python-dateutil==2.9.0.post0 40 | pytz==2025.2 41 | PyYAML==6.0.2 42 | requests==2.32.3 43 | sentry-sdk==2.28.0 44 | service-identity==24.2.0 45 | six==1.17.0 46 | SQLAlchemy==2.0.41 47 | tomli==2.2.1 48 | treq==24.9.1 49 | Twisted==24.11.0 50 | txaio==23.1.1 51 | typing_extensions==4.13.2 52 | unidiff==0.7.5 53 | urllib3==2.4.0 54 | Werkzeug==3.1.3 55 | zipp==3.21.0 56 | zope.interface==7.2 57 | -------------------------------------------------------------------------------- /worker_example.Dockerfile: -------------------------------------------------------------------------------- 1 | # This is an example worker setup, currently used by the ware-alpine worker. 2 | 3 | FROM python:3.6-alpine3.7 4 | 5 | ARG WORKER_VERSION=9999 6 | RUN apk update \ 7 | && apk add --no-cache \ 8 | ca-certificates \ 9 | bzip2-dev \ 10 | coreutils \ 11 | dpkg-dev dpkg \ 12 | expat-dev \ 13 | gcc \ 14 | gdbm-dev \ 15 | git \ 16 | libc-dev \ 17 | libffi-dev \ 18 | libnsl-dev \ 19 | libtirpc-dev \ 20 | linux-headers \ 21 | make \ 22 | ncurses-dev \ 23 | openssl \ 24 | openssl-dev \ 25 | pax-utils \ 26 | readline-dev \ 27 | sqlite-dev \ 28 | tcl-dev \ 29 | tk \ 30 | tk-dev \ 31 | xz-dev \ 32 | zlib-dev \ 33 | xvfb \ 34 | && update-ca-certificates \ 35 | && pip install -U pip setuptools wheel --no-cache-dir \ 36 | && pip install -U "buildbot-worker<=${WORKER_VERSION}" --no-cache-dir \ 37 | && adduser -h /buildbot -S -D buildbot 38 | 39 | USER buildbot 40 | WORKDIR /buildbot 41 | ARG BUILDER_NAME=test-worker 42 | ARG BUILDER_PASS=badsecret 43 | ARG BUILDER_MASTER=localhost:9021 44 | ARG BUILDER_INFO="Alpine Linux 3.7 Docker image" 45 | ARG BUILDER_ADMIN="Somebody who should have set BUILDER_ADMIN" 46 | RUN buildbot-worker create-worker buildarea ${BUILDER_MASTER} ${BUILDER_NAME} ${BUILDER_PASS} \ 47 | && echo "${BUILDER_INFO}" > buildarea/info/host \ 48 | && echo "${BUILDER_ADMIN}" > buildarea/info/admin 49 | 50 | ENV DISPLAY :99 51 | 52 | CMD ["/bin/sh", "-c", "Xvfb $DISPLAY -ac & buildbot-worker start --nodaemon buildarea"] 53 | --------------------------------------------------------------------------------