├── .gitignore ├── .travis.yml ├── CONTRIBUTING.md ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── master ├── build-rust-manifest.py ├── buildbot.patch ├── buildbot.tac ├── ec2buildslave-take-subnet_id.patch ├── exponential-retry.patch ├── master.cfg ├── master.cfg.txt.sample ├── passwords.py.sample ├── prune-changes-in-batches-of-100.patch ├── public_html │ ├── bg_gradient.jpg │ ├── default.css │ ├── favicon.ico │ └── robots.txt ├── slave-list.txt.sample └── templates │ ├── about.html │ ├── authfail.html │ ├── box_macros.html │ ├── build.html │ ├── build_line.html │ ├── builder.html │ ├── builders.html │ ├── buildslave.html │ ├── buildslaves.html │ ├── buildstatus.html │ ├── buildstep.html │ ├── change.html │ ├── change_macros.html │ ├── change_sources.html │ ├── console.html │ ├── directory.html │ ├── empty.html │ ├── feed_atom10.xml │ ├── feed_description.html │ ├── feed_rss20.xml │ ├── footer.html │ ├── forms.html │ ├── grid.html │ ├── grid_macros.html │ ├── grid_transposed.html │ ├── jsonhelp.html │ ├── layout.html │ ├── logs.html │ ├── onelineperbuild.html │ ├── onelineperbuildonebuilder.html │ ├── revmacros.html │ ├── root.html │ ├── testresult.html │ ├── user.html │ ├── users.html │ ├── users_table.html │ ├── waterfall.html │ └── waterfallhelp.html ├── osx-adduser.sh ├── rust-bot-cert.pem ├── rust-buildbot-master-stunnel.conf ├── rust-buildbot-slave-stunnel.conf ├── rust-buildbot-win32-slave-stunnel.conf ├── setup-slave.sh └── slaves ├── README.md ├── android ├── Dockerfile ├── accept-licenses.sh ├── install-ndk.sh └── install-sdk.sh ├── dist ├── Dockerfile ├── build_binutils.sh ├── build_cmake.sh ├── build_curl.sh ├── build_gcc.sh ├── build_gdb.sh ├── build_git.sh ├── build_openssl.sh ├── build_pkgconfig.sh ├── build_python.sh └── build_tar.sh ├── linux-cross ├── Dockerfile ├── README.md ├── aarch64-linux-gnu.config ├── arm-linux-gnueabi.config ├── arm-linux-gnueabihf.config ├── arm-linux-musleabi.config ├── arm-linux-musleabihf.config ├── armv7-linux-gnueabihf.config ├── armv7-linux-musleabihf.config ├── build-libunwind.sh ├── build_dragonfly_toolchain.sh ├── build_freebsd_toolchain.sh ├── build_netbsd_toolchain.sh ├── build_powerpc64le_linux_toolchain.sh ├── build_rumprun.sh ├── build_toolchain.sh ├── build_toolchain_root.sh ├── mips-linux-musl.config ├── mipsel-linux-musl.config ├── patches │ └── glibc │ │ └── 2.12.2 │ │ ├── 001-PowerPC-Remove-unnecessary-mnew-mnemonics.patch │ │ ├── 001-Prevent-inlining-in-PPC64-initfini.s.patch │ │ └── 001-Use-.machine-to-prevent-AS-from-complaining-about-z9.patch ├── powerpc-linux-gnu.config ├── powerpc64-linux-gnu.config └── s390x-linux-gnu.config ├── linux ├── Dockerfile └── build-musl.sh └── start-docker-slave.sh /.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | master/nightly* 3 | master/auto* 4 | master/dist* 5 | master/try* 6 | master/snap3* 7 | master/gitpoller* 8 | master/beta* 9 | master/stable* 10 | master/cargo* 11 | master/tmp 12 | master/http.log 13 | master/master.cfg.txt 14 | master/master.cfg.sample 15 | master/passwords.py 16 | master/passwords.pyc 17 | master/public_html 18 | master/runtests 19 | master/slave-list.txt 20 | master/state.sqlite 21 | master/twistd.log 22 | master/twistd.pid 23 | rust-bot-privkey.pem 24 | rust-bot-sign-secretkey.asc 25 | rust-bot-sign-passphrase 26 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: generic 2 | sudo: required 3 | dist: trusty 4 | 5 | install: 6 | - sudo apt-get update 7 | - sudo apt-get install -y libffi-dev libssl-dev python-dev 8 | - sudo pip install buildbot boto pyopenssl 9 | 10 | script: 11 | - cp master/master.cfg.txt.sample master/master.cfg.txt 12 | - cp master/slave-list.txt.sample master/slave-list.txt 13 | - cp master/passwords.py.sample master/passwords.py 14 | - mkdir $HOME/.ec2 15 | - echo "$AWS_ACCESS_KEY" >> $HOME/.ec2/aws_id 16 | - echo "$AWS_SECRET_KEY" >> $HOME/.ec2/aws_id 17 | - (cd master && buildbot checkconfig) 18 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to the Rust-Buildbot infrastructure 2 | 3 | Thank you for your interest in contributing to our infrastructure! The 4 | workflow for this project is similar to, but simpler than, that of the [Rust 5 | language][rust]. 6 | 7 | ## Making Changes 8 | 9 | Submit your changes as a pull request to this repository. If there's a 10 | specific contributor who should review the change, mention them with `r? 11 | @theirname` in the body of your commit message. 12 | 13 | The rust-highfive bot will take a look at your PR and make sure you have a 14 | reviewer. 15 | 16 | After all of the reviewer's concerns have been addressed, anyone with 17 | permissions can merge your PR. 18 | 19 | ## Testing Changes 20 | 21 | If you're an enterprising contributor who'd like to spin up your very own 22 | intsance of rust-buildbot for any reason, we need your help! 23 | 24 | It isn't easy to run your own rust-buildbot for testing changes (or any other 25 | reason) right now, but we're working on it! If you have any specific 26 | complaints about why contributing to this project is hard, please file issues 27 | about them or comment about them on [this issue][issue]! 28 | 29 | [rust]: https://github.com/rust-lang/rust/blob/master/CONTRIBUTING.md 30 | [issue]: https://github.com/rust-lang/rust-buildbot/issues/15 31 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Copyright (c) 2015 The Rust Project Developers 2 | 3 | Permission is hereby granted, free of charge, to any 4 | person obtaining a copy of this software and associated 5 | documentation files (the "Software"), to deal in the 6 | Software without restriction, including without 7 | limitation the rights to use, copy, modify, merge, 8 | publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software 10 | is furnished to do so, subject to the following 11 | conditions: 12 | 13 | The above copyright notice and this permission notice 14 | shall be included in all copies or substantial portions 15 | of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 18 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 19 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 20 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 21 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 22 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 23 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 24 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 25 | DEALINGS IN THE SOFTWARE. 26 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # The Rust Project's buildbot config 2 | 3 | This is the code for the buildbot instance used by Rust at 4 | http://buildbot.rust-lang.org/builders. It is currently not in a 5 | condition that allows people to easily set up their own instances. 6 | 7 | # Slave configuration 8 | 9 | Slaves communicate with buildbot through an ssh tunnel for which 10 | you'll need the stunnel tool. Use whichever version of of the 11 | buildslave software the other slaves are running. 12 | 13 | This repo includes a configure script for creating the stunnel 14 | configuration. From within the repo, run 'setup-slave.sh', enter the 15 | name, password, and master address that you were provided. This 16 | creates the stunnel configuration file called 17 | `rust-buildbot-slave-stunnel-final.conf`. 18 | 19 | The first time you run this script it will start stunnel and the 20 | buildslave, but typically you would run stunnel and buildslave at 21 | reboot using cron: 22 | 23 | ``` 24 | > stunnel rust-buildbot-slave-stunnel-final.conf && buildslave restart slave 25 | ``` 26 | 27 | There is more information on slave configuration in the file `slaves/README.md` 28 | 29 | # Running the Master 30 | 31 | ``` 32 | $ cd rust-buildbot/master 33 | $ buildbot start 34 | ``` 35 | 36 | # Adding a new builder 37 | 38 | This requires some digging in `master.cfg`. If the new builder will be gated, 39 | you'll also have to add it in `/home/rustbuild/homu/cfg.toml`. 40 | 41 | * Choose the new builder's name. 42 | 43 | * If it runs tests with optimizations enabled, the name will contain 44 | `-opt`. 45 | 46 | * If the tests are run without optimizations, the name should contain 47 | `-nopt-t`. 48 | 49 | * If you're toggling a new option, pick the unique string that represents 50 | the option at hand. 51 | 52 | * Add the new builder name to the `auto_platforms_dev` and 53 | `auto_platforms_prod` lists. 54 | 55 | * Add the new builder to `dist_nogate_platforms` (or the alternative for 56 | gated). Its name will have acquired an `auto-` prefix at this point. 57 | 58 | * Under `for p in auto_platforms`, add logic to check for the unique string 59 | from the name. Yes, it's terrible. I'm sorry. 60 | 61 | * Under the next `for p in auto_platforms`, set your new flag according to the 62 | value you read from the unique string in the previous step. 63 | 64 | Pull requests to simplify this workflow are welcome. 65 | 66 | # It's broken! 67 | 68 | Sometimes the queue gets stuck. The most obvious symptom is if a PR takes 69 | substantially longer than usual to build. 70 | 71 | 1) Does Homu know about the PR? 72 | 73 | See whether the PR shows up in [Homu's queue](http://buildbot.rust-lang.org/homu/queue/rust). 74 | 75 | If the PR is missing from the queue, one can repeat the `r+`. If that doesn't 76 | make it show up after a few minutes, restart Homu on the buildmaster. 77 | 78 | 2) Did Homu tell Buildbot about the PR? 79 | 80 | If the PR is listed as "pending" in the Homu queue, check for pending jobs on 81 | [the grid](http://buildbot.rust-lang.org/grid?branch=auto&width=10). 82 | 83 | If there are no pending jobs in the grid, have Homu repeat the request to 84 | Buildbot by having someone with the right permissions say `@bors: retry force` 85 | on the PR that's stuck. 86 | 87 | Occasionally, a try build on which `retry force` didn't work will succeed after 88 | [resetting state](https://github.com/rust-lang/rust/pull/30845#issuecomment-171712657) 89 | and bumping the priority with a comment like `@bors r- try- r=nmatsakis 90 | retry force p=1 abc123` 91 | 92 | 3) Is Buildbot running the PR? 93 | 94 | If the grid is aware of the jobs, check for lost buildslaves. When a builder 95 | gets lost, its name will be purple and its status will include "slave lost". 96 | This means that either the host needs to be booted or the buildbot process on 97 | it needs to be restarted. 98 | 99 | If the above steps fail, restart the Buildbot process on the production 100 | buildmaster. 101 | 102 | ## Homu is dead! 103 | 104 | If the Homu status page linked above won't load, something is wrong with Homu. 105 | 106 | To start Homu, SSH into the buildmaster as root, then: 107 | 108 | ``` 109 | # screen -R # Or `screen -S homu` if there's no screen session 110 | # su rustbuild 111 | $ cd /home/rustbuild/homu 112 | $ .venv/bin/python homu/main.py 2>&1 | tee -a homu.log 113 | ``` 114 | 115 | Often attaching to the screen then killing Homu (ctrl+c) and rerunning the 116 | prior command (up-arrow, enter) is all this takes. 117 | 118 | # The Dev Environment 119 | 120 | Consult the dev buildmaster's `master/passwords.py` for credentials, and log 121 | into the web interface (point your browser at the dev buildmaster's public IP, 122 | which is available from the AWS console) as `any-build`. 123 | 124 | To trigger a full nightly build, append `/builders/nightly-dist-rustc-trigger` 125 | to the URL and force a build on the trigger builder. This will spin up the 126 | other builders and create a nightly, whose files will be uploaded to the 127 | `dev-static-rust-lang-org` s3 bucket at the end. 128 | 129 | # Testing Locally 130 | 131 | ## `master.cfg.txt` 132 | 133 | To do things with this Buildbot on your local machine, you'll need to create 134 | the file `master/master.cfg.txt`. `master.cfg` reads secrets out of it at 135 | startup. 136 | 137 | ``` 138 | env prod 139 | master_addy 11.22.333.4444:5678 140 | git_source git://github.com/rust-lang/rust.git 141 | cargo_source git://github.com/rust-lang/cargo.git 142 | packaging_source git://github.com/rust-lang/rust-packaging.git 143 | buildbot_source https://github.com/rust-lang/rust-buildbot 144 | buildbot_branch master 145 | s3_addy s3://your-bucket-name-here 146 | s3_cargo_addy s3://your-other-bucket-name-here 147 | homu_secret RFqnZtXnRhD66qv11WOGIkuGn2YzvylOcxlqqXZmSq4RaLpXfb 148 | dist_server_addy http://your-bucket-name.here.s3-website-aws-region.amazonaws.com 149 | ``` 150 | 151 | * `master_addy` is the address and port where buildmaster is running 152 | * `s3_addy` and `s3_cargo_addy` are buckets where artefacts will get uploaded 153 | * `homu_secret` is a string that you can get from `pwgen -s 64 -n 1` or so, 154 | which also appears in `~/rustbuild/homu/cfg.toml` under `repo.rust.buildbot` 155 | and `repo.cargo.buildbot`. 156 | * `dist_server_addy` is the url of that bucket where stuff gets uploaded 157 | * `buildbot_source` and `buildbot_branch` are the repo/branch to check out on 158 | docker images when they boot up to start the slave. 159 | 160 | ## `master/slave-list.txt` 161 | 162 | ``` 163 | # This file is a slave-name / password file that should 164 | # not be checked in to the git repository for rust buildbot, 165 | # just held in the master work-dir on the host it's running on 166 | 167 | # these are aws latent 168 | linux1 pA$sw0rd ami=ami-a1b2c3d4 instance_type=c3.2xlarge max_builds=3 169 | linux2 pA$sw0rd ami=ami-a1b2c3d4 instance_type=c3.2xlarge max_builds=3 170 | linux3 pA$sw0rd ami=ami-a1b2c3d4 instance_type=c3.2xlarge max_builds=3 171 | linux4 pA$sw0rd ami=ami-a1b2c3d4 instance_type=c3.2xlarge max_builds=3 172 | linux5 pA$sw0rd ami=ami-a1b2c3d4 instance_type=c3.2xlarge max_builds=3 173 | linux6 pA$sw0rd ami=ami-a1b2c3d4 instance_type=c3.2xlarge max_builds=3 174 | 175 | # AWS 176 | linux-64-x-android-t pA$sw0rd ami=ami-00112233 instance_type=c3.2xlarge max_builds=1 special=true 177 | 178 | # this is an old CentOS 6 EC2 VM used to build snapshots 179 | linux-snap pA$sw0rd ami=ami-00112233 instance_type=c3.2xlarge max_builds=3 snap=true dist=true special=true 180 | 181 | # these two are aws latent 182 | win1 pA$sw0rd ami=ami-a1b2c3d4 instance_type=c3.2xlarge max_builds=3 snap=true 183 | win2 pA$sw0rd ami=ami-a1b2c3d4 instance_type=c3.2xlarge max_builds=3 snap=true 184 | win3 pA$sw0rd ami=ami-a1b2c3d4 instance_type=c3.2xlarge max_builds=3 snap=true 185 | win4 pA$sw0rd ami=ami-a1b2c3d4 instance_type=c3.2xlarge max_builds=3 snap=true 186 | win5 pA$sw0rd ami=ami-a1b2c3d4 instance_type=c3.2xlarge max_builds=3 snap=true 187 | win6 pA$sw0rd ami=ami-a1b2c3d4 instance_type=c3.2xlarge max_builds=3 snap=true 188 | win7 pA$sw0rd ami=ami-a1b2c3d4 instance_type=c3.2xlarge max_builds=3 snap=true 189 | win8 pA$sw0rd ami=ami-a1b2c3d4 instance_type=c3.2xlarge max_builds=3 snap=true 190 | 191 | # bug #21434 makes max_builds=1 fail for dist builds because DirectoryUpload hits locking exceptions 192 | windist1 pA$sw0rd ami=ami-a1b2c3d4 instance_type=c3.2xlarge max_builds=1 snap=true dist=true special=true 193 | windist2 pA$sw0rd ami=ami-a1b2c3d4 instance_type=c3.2xlarge max_builds=1 snap=true dist=true special=true 194 | windist3 pA$sw0rd ami=ami-a1b2c3d4 instance_type=c3.2xlarge max_builds=1 snap=true dist=true special=true 195 | windist4 pA$sw0rd ami=ami-a1b2c3d4 instance_type=c3.2xlarge max_builds=1 snap=true dist=true special=true 196 | 197 | # community-maintained -- these never connect to dev 198 | #bitrig1 pA$sw0rd max_builds=2 snap=true 199 | #freebsd10_32-1 pA$sw0rd max_builds=2 snap=true 200 | #freebsd10_64-1 pA$sw0rd max_builds=2 snap=true 201 | ``` 202 | 203 | The passwords and AMI IDs must, of course, be replaced with usable values. 204 | 205 | ## `master/passwords.py` 206 | 207 | ``` 208 | users = [ 209 | ('username', 'hunter2'), 210 | ('otheruser', 'wordpass') 211 | ] 212 | ``` 213 | These are for logging into the Buildbot web interface. 214 | 215 | # License 216 | 217 | This software is distributed under the terms of both the MIT license 218 | and/or the Apache License (Version 2.0), at your option. 219 | 220 | See [LICENSE-APACHE](LICENSE-APACHE), [LICENSE-MIT](LICENSE-MIT) for details. 221 | -------------------------------------------------------------------------------- /master/buildbot.patch: -------------------------------------------------------------------------------- 1 | --- /usr/local/lib/python2.6/dist-packages/buildbot-0.8.5-py2.6.egg/buildbot/db/changes.py.orig 2012-11-14 10:38:43.000000000 -0800 2 | +++ /usr/local/lib/python2.6/dist-packages/buildbot-0.8.5-py2.6.egg/buildbot/db/changes.py 2012-11-14 12:16:59.000000000 -0800 3 | @@ -124,13 +124,40 @@ 4 | d = self.db.pool.do(thd) 5 | return d 6 | 7 | - def getRecentChanges(self, count): 8 | + def getRecentChanges(self, count, revFilter=None, debugInfo=None): 9 | def thd(conn): 10 | - # get the changeids from the 'changes' table 11 | + 12 | changes_tbl = self.db.model.changes 13 | + wh = None 14 | + if revFilter != None: 15 | + n_wheres = 0 16 | + for k in ["branch", "author", "repository", "project"]: 17 | + if k in revFilter: 18 | + # FIXME: sanitize v here? How careful is SQLAlchemy? 19 | + v = revFilter[k] 20 | + if debugInfo: 21 | + debugInfo[k] = v 22 | + if type(v) == list: 23 | + clause = (changes_tbl.c[k] == v[0]) 24 | + for vv in v[1:]: 25 | + clause = clause | (changes_tbl.c[k] == vv) 26 | + else: 27 | + clause = (changes_tbl.c[k] == v) 28 | + 29 | + if n_wheres > 0: 30 | + wh = wh & clause 31 | + else: 32 | + wh = clause 33 | + n_wheres += 1 34 | + 35 | + if debugInfo: 36 | + debugInfo["whereclause"] = str(wh) 37 | + 38 | + # get the changeids from the 'changes' table 39 | q = sa.select([changes_tbl.c.changeid], 40 | - order_by=[sa.desc(changes_tbl.c.changeid)], 41 | - limit=count) 42 | + whereclause=wh, 43 | + order_by=[sa.desc(changes_tbl.c.changeid)], 44 | + limit=count) 45 | rp = conn.execute(q) 46 | changeids = [ row.changeid for row in rp ] 47 | rp.close() 48 | --- /usr/local/lib/python2.6/dist-packages/buildbot-0.8.5-py2.6.egg/buildbot/status/web/console.py.orig 2012-11-14 10:01:32.000000000 -0800 49 | +++ /usr/local/lib/python2.6/dist-packages/buildbot-0.8.5-py2.6.egg/buildbot/status/web/console.py 2012-11-14 13:10:00.000000000 -0800 50 | @@ -163,11 +163,11 @@ 51 | return allChanges 52 | 53 | @defer.deferredGenerator 54 | - def getAllChanges(self, request, status, debugInfo): 55 | + def getAllChanges(self, request, status, revFilter, debugInfo): 56 | master = request.site.buildbot_service.master 57 | 58 | wfd = defer.waitForDeferred( 59 | - master.db.changes.getRecentChanges(25)) 60 | + master.db.changes.getRecentChanges(25, revFilter, debugInfo)) 61 | yield wfd 62 | chdicts = wfd.getResult() 63 | 64 | @@ -441,7 +441,7 @@ 65 | if self.comparator.isRevisionEarlier(build, revision): 66 | firstNotIn = build 67 | break 68 | - else: 69 | + elif build.revision == revision.revision: 70 | introducedIn = build 71 | 72 | # Get the results of the first build with the revision, and the 73 | @@ -637,19 +637,27 @@ 74 | 75 | # Get all changes we can find. This is a DB operation, so it must use 76 | # a deferred. 77 | - d = self.getAllChanges(request, status, debugInfo) 78 | + 79 | + revFilter = {} 80 | + if branch != ANYBRANCH: 81 | + revFilter['branch'] = branch 82 | + if devName: 83 | + revFilter['author'] = devName 84 | + if repository: 85 | + revFilter['repository'] = repository 86 | + if project: 87 | + revFilter['project'] = project 88 | + 89 | + d = self.getAllChanges(request, status, revFilter, debugInfo) 90 | def got_changes(allChanges): 91 | debugInfo["source_all"] = len(allChanges) 92 | 93 | - revFilter = {} 94 | - if branch != ANYBRANCH: 95 | - revFilter['branch'] = branch 96 | - if devName: 97 | - revFilter['who'] = devName 98 | - if repository: 99 | - revFilter['repository'] = repository 100 | - if project: 101 | - revFilter['project'] = project 102 | + # FIXME: called 'who' in console and Change.py, 'author' in db model, 103 | + # 'name' in query string. Pointless difference, reconcile. 104 | + if "author" in revFilter: 105 | + revFilter['who'] = revFilter['author'] 106 | + del revFilter['author'] 107 | + 108 | revisions = list(self.filterRevisions(allChanges, max_revs=numRevs, 109 | filter=revFilter)) 110 | debugInfo["revision_final"] = len(revisions) 111 | -------------------------------------------------------------------------------- /master/buildbot.tac: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from twisted.application import service 4 | from buildbot.master import BuildMaster 5 | 6 | basedir = '/home/rustbuild/rust-buildbot/master' 7 | rotateLength = 10000000 8 | maxRotatedFiles = 10 9 | configfile = 'master.cfg' 10 | 11 | # Default umask for server 12 | umask = None 13 | 14 | # if this is a relocatable tac file, get the directory containing the TAC 15 | if basedir == '.': 16 | import os.path 17 | basedir = os.path.abspath(os.path.dirname(__file__)) 18 | 19 | # note: this line is matched against to check that this is a buildmaster 20 | # directory; do not edit it. 21 | application = service.Application('buildmaster') 22 | from twisted.python.logfile import LogFile 23 | from twisted.python.log import ILogObserver, FileLogObserver 24 | logfile = LogFile.fromFullPath(os.path.join(basedir, "twistd.log"), 25 | rotateLength=rotateLength, 26 | maxRotatedFiles=maxRotatedFiles) 27 | application.setComponent(ILogObserver, FileLogObserver(logfile).emit) 28 | 29 | m = BuildMaster(basedir, configfile, umask) 30 | m.setServiceParent(application) 31 | m.log_rotation.rotateLength = rotateLength 32 | m.log_rotation.maxRotatedFiles = maxRotatedFiles 33 | 34 | -------------------------------------------------------------------------------- /master/ec2buildslave-take-subnet_id.patch: -------------------------------------------------------------------------------- 1 | --- ec2buildslave.py 2011-09-03 12:59:09.000000000 -0700 2 | +++ /usr/local/lib/python2.6/dist-packages/buildbot-0.8.5-py2.6.egg/buildbot/ec2buildslave.py 2013-07-19 22:49:28.000000000 -0700 3 | @@ -48,6 +48,7 @@ 4 | aws_id_file_path=None, user_data=None, region=None, 5 | keypair_name='latent_buildbot_slave', 6 | security_name='latent_buildbot_slave', 7 | + subnet_id=None, 8 | max_builds=None, notify_on_missing=[], missing_timeout=60*20, 9 | build_wait_timeout=60*10, properties={}, locks=None): 10 | 11 | @@ -82,6 +83,7 @@ 12 | self.instance_type = instance_type 13 | self.keypair_name = keypair_name 14 | self.security_name = security_name 15 | + self.subnet_id = subnet_id 16 | self.user_data = user_data 17 | if identifier is None: 18 | assert secret_identifier is None, ( 19 | @@ -242,9 +244,14 @@ 20 | 21 | def _start_instance(self): 22 | image = self.get_image() 23 | - reservation = image.run( 24 | - key_name=self.keypair_name, security_groups=[self.security_name], 25 | - instance_type=self.instance_type, user_data=self.user_data) 26 | + if self.subnet_id: 27 | + reservation = image.run( 28 | + key_name=self.keypair_name, subnet_id=self.subnet_id, 29 | + instance_type=self.instance_type, user_data=self.user_data) 30 | + else: 31 | + reservation = image.run( 32 | + key_name=self.keypair_name, security_groups=[self.security_name], 33 | + instance_type=self.instance_type, user_data=self.user_data) 34 | self.instance = reservation.instances[0] 35 | log.msg('%s %s starting instance %s' % 36 | (self.__class__.__name__, self.slavename, self.instance.id)) 37 | @@ -263,10 +270,9 @@ 38 | minutes = duration//60 39 | seconds = duration%60 40 | log.msg('%s %s instance %s started on %s ' 41 | - 'in about %d minutes %d seconds (%s)' % 42 | + 'in about %d minutes %d seconds' % 43 | (self.__class__.__name__, self.slavename, 44 | - self.instance.id, self.dns, minutes, seconds, 45 | - self.output.output)) 46 | + self.instance.id, self.dns, minutes, seconds)) 47 | if self.elastic_ip is not None: 48 | self.instance.use_ip(self.elastic_ip) 49 | return [self.instance.id, 50 | @@ -295,7 +301,12 @@ 51 | self.conn.disassociate_address(self.elastic_ip.public_ip) 52 | instance.update() 53 | if instance.state not in (SHUTTINGDOWN, TERMINATED): 54 | - instance.terminate() 55 | + if hasattr(instance, "terminate"): 56 | + instance.terminate() 57 | + elif hasattr(instance, "stop"): 58 | + instance.stop() 59 | + else: 60 | + raise ValueError('instance has no method for stopping') 61 | log.msg('%s %s terminating instance %s' % 62 | (self.__class__.__name__, self.slavename, instance.id)) 63 | duration = 0 64 | -------------------------------------------------------------------------------- /master/exponential-retry.patch: -------------------------------------------------------------------------------- 1 | commit 95deef27d7c531ead19e0ac86a9aa1546d4ee7f9 2 | Author: Dustin J. Mitchell 3 | Date: Mon Jan 23 00:07:26 2012 -0600 4 | 5 | Re-run queries after certain OperationalErrors 6 | 7 | Sometimes malformed SQL can generate an OperationalError, so this looks 8 | at the message itself. It's unclear how this will work with 9 | localization. 10 | 11 | The code uses an exponential backoff algorithm (with a relatively small 12 | multiplier), and will retry for a day, which seems a reasonable 13 | get-the-db-server-fixed time. 14 | 15 | Fixes #2005. Hoepfully for good! 16 | 17 | diff --git a/master/buildbot/db/pool.py b/master/buildbot/db/pool.py 18 | index 2c878bc..56ebfb7 100644 19 | --- a/master/buildbot/db/pool.py 20 | +++ b/master/buildbot/db/pool.py 21 | @@ -21,6 +21,7 @@ import os 22 | import sqlalchemy as sa 23 | import twisted 24 | import tempfile 25 | +from buildbot.process import metrics 26 | from twisted.internet import reactor, threads, defer 27 | from twisted.python import threadpool, failure, versions, log 28 | 29 | @@ -150,71 +151,92 @@ class DBThreadPool(threadpool.ThreadPool): 30 | reactor.removeSystemEventTrigger(self._stop_evt) 31 | self._stop() 32 | 33 | - def do(self, callable, *args, **kwargs): 34 | - def thd(): 35 | - conn = self.engine.contextual_connect() 36 | + # Try about 170 times over the space of a day, with the last few tries 37 | + # being about an hour apart. This is designed to span a reasonable amount 38 | + # of time for repairing a broken database server, while still failing 39 | + # actual problematic queries eventually 40 | + BACKOFF_START = 1.0 41 | + BACKOFF_MULT = 1.05 42 | + MAX_OPERATIONALERROR_TIME = 3600*24 # one day 43 | + def __thd(self, with_engine, callable, args, kwargs): 44 | + # try to call callable(arg, *args, **kwargs) repeatedly until no 45 | + # OperationalErrors occur, where arg is either the engine (with_engine) 46 | + # or a connection (not with_engine) 47 | + backoff = self.BACKOFF_START 48 | + start = time.time() 49 | + while True: 50 | + if with_engine: 51 | + arg = self.engine 52 | + else: 53 | + arg = self.engine.contextual_connect() 54 | + 55 | if self.__broken_sqlite: # see bug #1810 56 | - conn.execute("select * from sqlite_master") 57 | + arg.execute("select * from sqlite_master") 58 | try: 59 | - rv = callable(conn, *args, **kwargs) 60 | + rv = callable(arg, *args, **kwargs) 61 | assert not isinstance(rv, sa.engine.ResultProxy), \ 62 | "do not return ResultProxy objects!" 63 | + except sa.exc.OperationalError, e: 64 | + text = e.orig.args[0] 65 | + if "Lost connection" in text \ 66 | + or "database is locked" in text: 67 | + 68 | + # see if we've retried too much 69 | + elapsed = time.time() - start 70 | + if elapsed > self.MAX_OPERATIONALERROR_TIME: 71 | + raise 72 | + 73 | + metrics.MetricCountEvent.log( 74 | + "DBThreadPool.retry-on-OperationalError") 75 | + log.msg("automatically retrying query after " 76 | + "OperationalError (%ss sleep)" % backoff) 77 | + 78 | + # sleep (remember, we're in a thread..) 79 | + time.sleep(backoff) 80 | + backoff *= self.BACKOFF_MULT 81 | + 82 | + # and re-try 83 | + continue 84 | + else: 85 | + raise 86 | finally: 87 | - conn.close() 88 | - return rv 89 | - return threads.deferToThreadPool(reactor, self, thd) 90 | + if not with_engine: 91 | + arg.close() 92 | + break 93 | + return rv 94 | + 95 | + def do(self, callable, *args, **kwargs): 96 | + return threads.deferToThreadPool(reactor, self, 97 | + self.__thd, False, callable, args, kwargs) 98 | 99 | def do_with_engine(self, callable, *args, **kwargs): 100 | - def thd(): 101 | - if self.__broken_sqlite: # see bug #1810 102 | - self.engine.execute("select * from sqlite_master") 103 | - rv = callable(self.engine, *args, **kwargs) 104 | - assert not isinstance(rv, sa.engine.ResultProxy), \ 105 | - "do not return ResultProxy objects!" 106 | - return rv 107 | - return threads.deferToThreadPool(reactor, self, thd) 108 | + return threads.deferToThreadPool(reactor, self, 109 | + self.__thd, True, callable, args, kwargs) 110 | 111 | # older implementations for twisted < 0.8.2, which does not have 112 | # deferToThreadPool; this basically re-implements it, although it gets some 113 | # of the synchronization wrong - the thread may still be "in use" when the 114 | # deferred fires in the parent, which can lead to database accesses hopping 115 | # between threads. In practice, this should not cause any difficulty. 116 | - def do_081(self, callable, *args, **kwargs): # pragma: no cover 117 | - d = defer.Deferred() 118 | - def thd(): 119 | - try: 120 | - conn = self.engine.contextual_connect() 121 | - if self.__broken_sqlite: # see bug #1810 122 | - conn.execute("select * from sqlite_master") 123 | + if twisted.version < versions.Version('twisted', 8, 2, 0): 124 | + def __081_wrap(self, with_engine, callable, args, kwargs): # pragma: no cover 125 | + d = defer.Deferred() 126 | + def thd(): 127 | try: 128 | - rv = callable(conn, *args, **kwargs) 129 | - assert not isinstance(rv, sa.engine.ResultProxy), \ 130 | - "do not return ResultProxy objects!" 131 | - finally: 132 | - conn.close() 133 | - reactor.callFromThread(d.callback, rv) 134 | - except: 135 | - reactor.callFromThread(d.errback, failure.Failure()) 136 | - self.callInThread(thd) 137 | - return d 138 | - def do_with_engine_081(self, callable, *args, **kwargs): # pragma: no cover 139 | - d = defer.Deferred() 140 | - def thd(): 141 | - try: 142 | - conn = self.engine 143 | - if self.__broken_sqlite: # see bug #1810 144 | - conn.execute("select * from sqlite_master") 145 | - rv = callable(conn, *args, **kwargs) 146 | - assert not isinstance(rv, sa.engine.ResultProxy), \ 147 | - "do not return ResultProxy objects!" 148 | - reactor.callFromThread(d.callback, rv) 149 | - except: 150 | - reactor.callFromThread(d.errback, failure.Failure()) 151 | - self.callInThread(thd) 152 | - return d 153 | + reactor.callFromThread(d.callback, 154 | + self.__thd(with_engine, callable, args, kwargs)) 155 | + except: 156 | + reactor.callFromThread(d.errback, 157 | + failure.Failure()) 158 | + self.callInThread(thd) 159 | + return d 160 | + 161 | + def do_081(self, callable, *args, **kwargs): # pragma: no cover 162 | + return self.__081_wrap(False, callable, args, kwargs) 163 | + 164 | + def do_with_engine_081(self, callable, *args, **kwargs): # pragma: no cover 165 | + return self.__081_wrap(True, callable, args, kwargs) 166 | 167 | - # use the 0.8.1 versions on old Twisteds 168 | - if twisted.version < versions.Version('twisted', 8, 2, 0): 169 | do = do_081 170 | do_with_engine = do_with_engine_081 171 | 172 | diff --git a/master/buildbot/test/unit/test_db_pool.py b/master/buildbot/test/unit/test_db_pool.py 173 | index 1f8ab0f..ca68369 100644 174 | --- a/master/buildbot/test/unit/test_db_pool.py 175 | +++ b/master/buildbot/test/unit/test_db_pool.py 176 | @@ -13,9 +13,11 @@ 177 | # 178 | # Copyright Buildbot Team Members 179 | 180 | +import os 181 | +import time 182 | import sqlalchemy as sa 183 | from twisted.trial import unittest 184 | -from twisted.internet import defer 185 | +from twisted.internet import defer, reactor 186 | from buildbot.db import pool 187 | from buildbot.test.util import db 188 | 189 | @@ -104,6 +106,48 @@ class Basic(unittest.TestCase): 190 | return d 191 | 192 | 193 | +class Stress(unittest.TestCase): 194 | + 195 | + def setUp(self): 196 | + setup_engine = sa.create_engine('sqlite:///test.sqlite') 197 | + setup_engine.execute("pragma journal_mode = wal") 198 | + setup_engine.execute("CREATE TABLE test (a integer, b integer)") 199 | + 200 | + self.engine = sa.create_engine('sqlite:///test.sqlite') 201 | + self.engine.optimal_thread_pool_size = 2 202 | + self.pool = pool.DBThreadPool(self.engine) 203 | + 204 | + def tearDown(self): 205 | + self.pool.shutdown() 206 | + os.unlink("test.sqlite") 207 | + 208 | + @defer.deferredGenerator 209 | + def test_inserts(self): 210 | + def write(conn): 211 | + trans = conn.begin() 212 | + conn.execute("INSERT INTO test VALUES (1, 1)") 213 | + time.sleep(31) 214 | + trans.commit() 215 | + d1 = self.pool.do(write) 216 | + 217 | + def write2(conn): 218 | + trans = conn.begin() 219 | + conn.execute("INSERT INTO test VALUES (1, 1)") 220 | + trans.commit() 221 | + d2 = defer.Deferred() 222 | + d2.addCallback(lambda _ : 223 | + self.pool.do(write2)) 224 | + reactor.callLater(0.1, d2.callback, None) 225 | + 226 | + wfd = defer.waitForDeferred( 227 | + defer.DeferredList([ d1, d2 ])) 228 | + yield wfd 229 | + wfd.getResult() 230 | + 231 | + # don't run this test, since it takes 30s 232 | + del test_inserts 233 | + 234 | + 235 | class BasicWithDebug(Basic): 236 | 237 | # same thing, but with debug=True 238 | -------------------------------------------------------------------------------- /master/master.cfg.txt.sample: -------------------------------------------------------------------------------- 1 | env dev 2 | master_addy 1.1.1.1:9000 3 | git_source https://github.com/rust-lang/rust 4 | cargo_source https://github.com/rust-lang/cargo 5 | packaging_source https://github.com/rust-lang/rust-packaging 6 | buildbot_source https://github.com/rust-lang/rust-buildbot 7 | buildbot_branch master 8 | s3_addy s3://my-bucket 9 | s3_cargo_addy s3://my-bucket 10 | homu_secret my-secret 11 | dist_server_addy http://my-bucket.s3-website-us-west-1.amazonaws.com 12 | public_dist_server_addy http://dev-static.rust-lang.org 13 | -------------------------------------------------------------------------------- /master/passwords.py.sample: -------------------------------------------------------------------------------- 1 | users = [ 2 | ('any-build', '123'), 3 | ('no-dist', '123') 4 | ] 5 | -------------------------------------------------------------------------------- /master/prune-changes-in-batches-of-100.patch: -------------------------------------------------------------------------------- 1 | --- buildbot/db/changes.py 2013-07-19 16:10:55.000000000 -0700 2 | +++ buildbot/db/changes.py.orig 2012-11-14 10:38:43.000000000 -0800 3 | @@ -209,12 +182,9 @@ 4 | for table_name in ('scheduler_changes', 'sourcestamp_changes', 5 | 'change_files', 'change_links', 6 | 'change_properties', 'changes', 'change_users'): 7 | - remaining = ids_to_delete[:] 8 | - while remaining: 9 | - batch, remaining = remaining[:100], remaining[100:] 10 | - table = self.db.model.metadata.tables[table_name] 11 | - conn.execute( 12 | - table.delete(table.c.changeid.in_(batch))) 13 | + table = self.db.model.metadata.tables[table_name] 14 | + conn.execute( 15 | + table.delete(table.c.changeid.in_(ids_to_delete))) 16 | return self.db.pool.do(thd) 17 | 18 | def _chdict_from_change_row_thd(self, conn, ch_row): 19 | -------------------------------------------------------------------------------- /master/public_html/bg_gradient.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rust-lang-deprecated/rust-buildbot/e6af1eaa2b6273daf13754692601cd4a500a1885/master/public_html/bg_gradient.jpg -------------------------------------------------------------------------------- /master/public_html/default.css: -------------------------------------------------------------------------------- 1 | #logo { 2 | vertical-align: top; 3 | float: left; 4 | margin-right: 30px; 5 | } 6 | table.branches { 7 | font-family: Verdana, sans-serif; 8 | font-size: 120%; 9 | margin: 2em; 10 | } 11 | 12 | td.branch-name { 13 | background-color: #aaa; 14 | text-align: right; 15 | padding-right: 1em; 16 | } 17 | 18 | td.branch-view { 19 | margin: 10em; 20 | } 21 | 22 | body.interface { 23 | margin-left: 30px; 24 | margin-right: 30px; 25 | margin-top: 20px; 26 | margin-bottom: 50px; 27 | padding: 0; 28 | background: url(bg_gradient.jpg) repeat-x; 29 | font-family: Verdana, sans-serif; 30 | font-size: 10px; 31 | background-color: #fff; 32 | color: #333; 33 | } 34 | 35 | a:link,a:visited,a:active { 36 | color: #444; 37 | } 38 | 39 | table { 40 | border-spacing: 1px 1px; 41 | } 42 | 43 | table td { 44 | padding: 3px 4px 3px 4px; 45 | text-align: center; 46 | } 47 | 48 | .Project { 49 | min-width: 6em; 50 | } 51 | 52 | .LastBuild,.Activity { 53 | padding: 0 0 0 4px; 54 | } 55 | 56 | .LastBuild,.Activity,.Builder,.BuildStep { 57 | min-width: 5em; 58 | } 59 | 60 | /* Chromium Specific styles */ 61 | div.BuildResultInfo { 62 | color: #444; 63 | } 64 | 65 | div.Announcement { 66 | margin-bottom: 1em; 67 | } 68 | 69 | div.Announcement>a:hover { 70 | color: black; 71 | } 72 | 73 | div.Announcement>div.Notice { 74 | background-color: #afdaff; 75 | padding: 0.5em; 76 | font-size: 16px; 77 | text-align: center; 78 | } 79 | 80 | div.Announcement>div.Open { 81 | border: 3px solid #8fdf5f; 82 | padding: 0.5em; 83 | font-size: 16px; 84 | text-align: center; 85 | } 86 | 87 | div.Announcement>div.Closed { 88 | border: 5px solid #e98080; 89 | padding: 0.5em; 90 | font-size: 24px; 91 | font-weight: bold; 92 | text-align: center; 93 | } 94 | 95 | td.Time { 96 | color: #000; 97 | border-bottom: 1px solid #aaa; 98 | background-color: #eee; 99 | } 100 | 101 | td.Activity,td.Change,td.Builder { 102 | color: #333333; 103 | background-color: #CCCCCC; 104 | } 105 | 106 | td.Change { 107 | border-radius: 5px; 108 | -webkit-border-radius: 5px; 109 | -moz-border-radius: 5px; 110 | } 111 | 112 | td.Event { 113 | color: #777; 114 | background-color: #ddd; 115 | border-radius: 5px; 116 | -webkit-border-radius: 5px; 117 | -moz-border-radius: 5px; 118 | } 119 | 120 | td.Activity { 121 | border-top-left-radius: 10px; 122 | -webkit-border-top-left-radius: 10px; 123 | -moz-border-radius-topleft: 10px; 124 | min-height: 20px; 125 | padding: 2px 0 2px 0; 126 | } 127 | 128 | td.idle,td.waiting,td.offline,td.building { 129 | border-top-left-radius: 0px; 130 | -webkit-border-top-left-radius: 0px; 131 | -moz-border-radius-topleft: 0px; 132 | } 133 | 134 | .LastBuild { 135 | border-top-left-radius: 5px; 136 | -webkit-border-top-left-radius: 5px; 137 | -moz-border-radius-topleft: 5px; 138 | border-top-right-radius: 5px; 139 | -webkit-border-top-right-radius: 5px; 140 | -moz-border-radius-topright: 5px; 141 | } 142 | 143 | /* Console view styles */ 144 | td.DevRev { 145 | padding: 4px 8px 4px 8px; 146 | color: #333333; 147 | border-top-left-radius: 5px; 148 | -webkit-border-top-left-radius: 5px; 149 | -moz-border-radius-topleft: 5px; 150 | background-color: #eee; 151 | width: 1%; 152 | } 153 | 154 | td.DevRevCollapse { 155 | border-bottom-left-radius: 5px; 156 | -webkit-border-bottom-left-radius: 5px; 157 | -moz-border-radius-bottomleft: 5px; 158 | } 159 | 160 | td.DevName { 161 | padding: 4px 8px 4px 8px; 162 | color: #333333; 163 | background-color: #eee; 164 | width: 1%; 165 | text-align: left; 166 | } 167 | 168 | td.DevStatus { 169 | padding: 4px 4px 4px 4px; 170 | color: #333333; 171 | background-color: #eee; 172 | } 173 | 174 | td.DevSlave { 175 | padding: 4px 4px 4px 4px; 176 | color: #333333; 177 | background-color: #eee; 178 | } 179 | 180 | td.first { 181 | border-top-left-radius: 5px; 182 | -webkit-border-top-left-radius: 5px; 183 | -moz-border-radius-topleft: 5px; 184 | } 185 | 186 | td.last { 187 | border-top-right-radius: 5px; 188 | -webkit-border-top-right-radius: 5px; 189 | -moz-border-radius-topright: 5px; 190 | } 191 | 192 | td.DevStatusCategory { 193 | border-radius: 5px; 194 | -webkit-border-radius: 5px; 195 | -moz-border-radius: 5px; 196 | border-width: 1px; 197 | border-style: solid; 198 | } 199 | 200 | td.DevStatusCollapse { 201 | border-bottom-right-radius: 5px; 202 | -webkit-border-bottom-right-radius: 5px; 203 | -moz-border-radius-bottomright: 5px; 204 | } 205 | 206 | td.DevDetails { 207 | font-weight: normal; 208 | padding: 8px 8px 8px 8px; 209 | color: #333333; 210 | background-color: #eee; 211 | text-align: left; 212 | } 213 | 214 | td.DevDetails li a { 215 | padding-right: 5px; 216 | } 217 | 218 | td.DevComment { 219 | font-weight: normal; 220 | padding: 8px 8px 8px 8px; 221 | color: #333333; 222 | border-bottom-right-radius: 5px; 223 | -webkit-border-bottom-right-radius: 5px; 224 | -moz-border-radius-bottomright: 5px; 225 | border-bottom-left-radius: 5px; 226 | -webkit-border-bottom-left-radius: 5px; 227 | -moz-border-radius-bottomleft: 5px; 228 | background-color: #eee; 229 | text-align: left; 230 | } 231 | 232 | td.Alt { 233 | background-color: #ddd; 234 | } 235 | 236 | tr.Alt { 237 | background-color: #ddd; 238 | } 239 | 240 | .legend { 241 | border-radius: 5px; 242 | -webkit-border-radius: 5px; 243 | -moz-border-radius: 5px; 244 | width: 100px; 245 | max-width: 100px; 246 | text-align: center; 247 | padding: 2px 2px 2px 2px; 248 | height: 14px; 249 | white-space: nowrap; 250 | } 251 | 252 | .DevStatusBox { 253 | text-align: center; 254 | height: 20px; 255 | padding: 0 2px; 256 | line-height: 0; 257 | white-space: nowrap; 258 | } 259 | 260 | .DevStatusBox a { 261 | opacity: 0.85; 262 | border-width: 1px; 263 | border-style: solid; 264 | border-radius: 4px; 265 | -webkit-border-radius: 4px; 266 | -moz-border-radius: 4px; 267 | display: block; 268 | width: 90%; 269 | height: 20px; 270 | line-height: 20px; 271 | margin-left: auto; 272 | margin-right: auto; 273 | } 274 | 275 | .DevSlaveBox { 276 | text-align: center; 277 | height: 10px; 278 | padding: 0 2px; 279 | line-height: 0; 280 | white-space: nowrap; 281 | } 282 | 283 | .DevSlaveBox a { 284 | opacity: 0.85; 285 | border-width: 1px; 286 | border-style: solid; 287 | border-radius: 4px; 288 | -webkit-border-radius: 4px; 289 | -moz-border-radius: 4px; 290 | display: block; 291 | width: 90%; 292 | height: 10px; 293 | line-height: 20px; 294 | margin-left: auto; 295 | margin-right: auto; 296 | } 297 | 298 | a.noround { 299 | border-radius: 0px; 300 | -webkit-border-radius: 0px; 301 | -moz-border-radius: 0px; 302 | position: relative; 303 | margin-top: -8px; 304 | margin-bottom: -8px; 305 | height: 36px; 306 | border-top-width: 0; 307 | border-bottom-width: 0; 308 | } 309 | 310 | a.begin { 311 | border-top-width: 1px; 312 | position: relative; 313 | margin-top: 0px; 314 | margin-bottom: -7px; 315 | height: 27px; 316 | border-top-left-radius: 4px; 317 | -webkit-border-top-left-radius: 4px; 318 | -moz-border-radius-topleft: 4px; 319 | border-top-right-radius: 4px; 320 | -webkit-border-top-right-radius: 4px; 321 | -moz-border-radius-topright: 4px; 322 | } 323 | 324 | a.end { 325 | border-bottom-width: 1px; 326 | position: relative; 327 | margin-top: -7px; 328 | margin-bottom: 0px; 329 | height: 27px; 330 | border-bottom-left-radius: 4px; 331 | -webkit-border-bottom-left-radius: 4px; 332 | -moz-border-radius-bottomleft: 4px; 333 | border-bottom-right-radius: 4px; 334 | -webkit-border-bottom-right-radius: 4px; 335 | -moz-border-radius-bottomright: 4px; 336 | } 337 | 338 | .center_align { 339 | text-align: center; 340 | } 341 | 342 | .right_align { 343 | text-align: right; 344 | } 345 | 346 | .left_align { 347 | text-align: left; 348 | } 349 | 350 | div.BuildWaterfall { 351 | border-radius: 7px; 352 | -webkit-border-radius: 7px; 353 | -moz-border-radius: 7px; 354 | position: absolute; 355 | left: 0px; 356 | top: 0px; 357 | background-color: #FFFFFF; 358 | padding: 4px 4px 4px 4px; 359 | float: left; 360 | display: none; 361 | border-width: 1px; 362 | border-style: solid; 363 | } 364 | 365 | /* LastBuild, BuildStep states */ 366 | .success { 367 | color: #000; 368 | background-color: #8d4; 369 | border-color: #4F8530; 370 | } 371 | 372 | .failure { 373 | color: #000; 374 | background-color: #e88; 375 | border-color: #A77272; 376 | } 377 | 378 | .warnings { 379 | color: #FFFFFF; 380 | background-color: #fa3; 381 | border-color: #C29D46; 382 | } 383 | 384 | .skipped { 385 | color: #000; 386 | background: #AADDEE; 387 | border-color: #AADDEE; 388 | } 389 | 390 | .exception,.retry { 391 | color: #FFFFFF; 392 | background-color: #c6c; 393 | border-color: #ACA0B3; 394 | } 395 | 396 | .start { 397 | color: #000; 398 | background-color: #ccc; 399 | border-color: #ccc; 400 | } 401 | 402 | .running,.waiting,td.building { 403 | color: #000; 404 | background-color: #fd3; 405 | border-color: #C5C56D; 406 | } 407 | 408 | .offline,td.offline { 409 | color: #FFFFFF; 410 | background-color: #777777; 411 | border-color: #dddddd; 412 | } 413 | 414 | 415 | .start { 416 | border-bottom-left-radius: 10px; 417 | -webkit-border-bottom-left-radius: 10px; 418 | -moz-border-radius-bottomleft: 10px; 419 | border-bottom-right-radius: 10px; 420 | -webkit-border-bottom-right-radius: 10px; 421 | -moz-border-radius-bottomright: 10px; 422 | } 423 | 424 | .notstarted { 425 | border-width: 1px; 426 | border-style: solid; 427 | border-color: #aaa; 428 | background-color: #fff; 429 | } 430 | 431 | .closed { 432 | background-color: #ff0000; 433 | } 434 | 435 | .closed .large { 436 | font-size: 1.5em; 437 | font-weight: bolder; 438 | } 439 | 440 | td.Project a:hover,td.start a:hover { 441 | color: #000; 442 | } 443 | 444 | .mini-box { 445 | text-align: center; 446 | height: 20px; 447 | padding: 0 2px; 448 | line-height: 0; 449 | white-space: nowrap; 450 | } 451 | 452 | .mini-box a { 453 | border-radius: 0; 454 | -webkit-border-radius: 0; 455 | -moz-border-radius: 0; 456 | display: block; 457 | width: 100%; 458 | height: 20px; 459 | line-height: 20px; 460 | margin-top: -30px; 461 | } 462 | 463 | .mini-closed { 464 | -box-sizing: border-box; 465 | -webkit-box-sizing: border-box; 466 | border: 4px solid red; 467 | } 468 | 469 | /* grid styles */ 470 | table.Grid { 471 | border-collapse: collapse; 472 | } 473 | 474 | table.Grid tr td { 475 | padding: 0.2em; 476 | margin: 0px; 477 | text-align: center; 478 | } 479 | 480 | table.Grid tr td.title { 481 | font-size: 90%; 482 | border-right: 1px gray solid; 483 | border-bottom: 1px gray solid; 484 | } 485 | 486 | table.Grid tr td.sourcestamp { 487 | font-size: 90%; 488 | } 489 | 490 | table.Grid tr td.builder { 491 | text-align: right; 492 | font-size: 90%; 493 | } 494 | 495 | table.Grid tr td.build { 496 | border: 1px gray solid; 497 | } 498 | 499 | /* column container */ 500 | div.column { 501 | margin: 0 2em 2em 0; 502 | float: left; 503 | } 504 | 505 | /* info tables */ 506 | table.info { 507 | border-spacing: 1px; 508 | } 509 | 510 | table.info td { 511 | padding: 0.1em 1em 0.1em 1em; 512 | text-align: center; 513 | } 514 | 515 | table.info th { 516 | padding: 0.2em 1.5em 0.2em 1.5em; 517 | text-align: center; 518 | } 519 | 520 | table.info td.left { 521 | text-align: left 522 | } 523 | 524 | .alt { 525 | background-color: #f6f6f6; 526 | } 527 | 528 | li { 529 | padding: 0.1em 1em 0.1em 1em; 530 | } 531 | 532 | .result { 533 | padding: 0.3em 1em 0.3em 1em; 534 | } 535 | 536 | /* log view */ 537 | .log * { 538 | vlink: #800080; 539 | font-family: "Courier New", courier, monotype, monospace; 540 | } 541 | 542 | span.stdout { 543 | color: black; 544 | } 545 | 546 | span.stderr { 547 | color: red; 548 | } 549 | 550 | span.header { 551 | color: blue; 552 | } 553 | 554 | /* revision & email */ 555 | .revision .full { 556 | display: none; 557 | } 558 | 559 | .user .email { 560 | display: none; 561 | } 562 | 563 | /* change comments (use regular colors here) */ 564 | pre.comments>a:link,pre.comments>a:visited { 565 | color: blue; 566 | } 567 | 568 | pre.comments>a:active { 569 | color: purple; 570 | } 571 | -------------------------------------------------------------------------------- /master/public_html/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rust-lang-deprecated/rust-buildbot/e6af1eaa2b6273daf13754692601cd4a500a1885/master/public_html/favicon.ico -------------------------------------------------------------------------------- /master/public_html/robots.txt: -------------------------------------------------------------------------------- 1 | User-agent: * 2 | Disallow: /waterfall 3 | Disallow: /builders 4 | Disallow: /changes 5 | Disallow: /buildslaves 6 | Disallow: /schedulers 7 | Disallow: /one_line_per_build 8 | Disallow: /builders 9 | Disallow: /grid 10 | Disallow: /tgrid 11 | -------------------------------------------------------------------------------- /master/slave-list.txt.sample: -------------------------------------------------------------------------------- 1 | linux1 00000000000000000000 docker=alexcrichton/rust-slave-linux:2015-10-19b ami=ami-457cbc01 instance_type=c3.2xlarge max_builds=3 2 | linux2 00000000000000000000 docker=alexcrichton/rust-slave-linux:2015-10-19b ami=ami-457cbc01 instance_type=c3.2xlarge max_builds=3 3 | linux3 00000000000000000000 docker=alexcrichton/rust-slave-linux:2015-10-19b ami=ami-457cbc01 instance_type=c3.2xlarge max_builds=3 4 | linux4 00000000000000000000 docker=alexcrichton/rust-slave-linux:2015-10-19b ami=ami-457cbc01 instance_type=c3.2xlarge max_builds=3 5 | lincross1 00000000000000000000 docker=alexcrichton/rust-slave-linux-cross:2015-10-20 ami=ami-457cbc01 instance_type=c3.2xlarge max_builds=4 dist=true 6 | linux-64-x-android-t 00000000000000000000 docker=alexcrichton/rust-slave-android:2015-10-19 ami=ami-457cbc01 instance_type=c3.2xlarge max_builds=1 special=true 7 | dist-arm-android 00000000000000000000 docker=alexcrichton/rust-slave-android:2015-10-19 ami=ami-457cbc01 instance_type=c3.2xlarge max_builds=3 special=true dist=true 8 | linux-snap 00000000000000000000 docker=alexcrichton/rust-slave-dist:2015-10-20b ami=ami-457cbc01 instance_type=c3.2xlarge max_builds=3 snap=true dist=true special=true 9 | win1 00000000000000000000 ami=ami-8743bdc3 instance_type=c3.2xlarge max_builds=3 snap=true 10 | win2 00000000000000000000 ami=ami-8743bdc3 instance_type=c3.2xlarge max_builds=3 snap=true 11 | windist1 00000000000000000000 ami=ami-8743bdc3 instance_type=c3.2xlarge max_builds=1 snap=true dist=true special=true 12 | windist2 00000000000000000000 ami=ami-8743bdc3 instance_type=c3.2xlarge max_builds=1 snap=true dist=true special=true 13 | 14 | bitrig1 asdf max_builds=2 snap=true 15 | freebsd10_32-1 asfd max_builds=2 snap=true 16 | freebsd10_64-1 asdf max_builds=2 snap=true 17 | dragonflybsd-64-opt asdf max_builds=2 snap=true 18 | openbsd-64-opt asdf max_builds=2 snap=true 19 | 20 | mac1 asdf max_builds=2 snap=true dist=true 21 | mac2 asdf max_builds=2 snap=true dist=true ios=true 22 | -------------------------------------------------------------------------------- /master/templates/about.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | 3 | {% block content %} 4 | 5 |

About this Buildbot

6 | 7 |
8 | 9 |

Version Information

10 | 20 | 21 |

Source code

22 | 23 |

Buildbot is a free software project, released under the terms of the 24 | GNU GPL.

25 | 26 |

Please visit the Buildbot Home Page for 27 | more information, including documentation, bug reports, and source 28 | downloads.

29 | 30 |
31 | 32 | {% endblock %} 33 | -------------------------------------------------------------------------------- /master/templates/authfail.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | 3 | {% block content %} 4 | 5 |

Authentication Failed

6 | 7 |

The username or password you entered were not correct. 8 | Please go back and try again. 9 |

10 | 11 | {% endblock %} 12 | -------------------------------------------------------------------------------- /master/templates/box_macros.html: -------------------------------------------------------------------------------- 1 | {% macro box(text=[], comment=None) -%} 2 | 3 | {%- if comment -%}{%- endif -%} 4 | 8 | 9 | {%- if text is string -%} 10 | {{ text }} 11 | {%- else -%} 12 | {{- text|join("
") -}} 13 | {%- endif -%} 14 | 15 | {% endmacro %} 16 | 17 | {# this is currently just the text part of the boxes #} 18 | 19 | {% macro build_box(reason, url, number) -%} 20 | Build {{ number }} 21 | {%- endmacro %} 22 | 23 | {% macro step_box(text, logs, urls, stepinfo) -%} 24 | {%- if text is string -%} 25 | {{ text }} 26 | {%- else -%} 27 | {{- text|join("
") -}} 28 | {%- endif -%} 29 |
30 | {%- for l in logs %} 31 | {{ l.name|e }}
32 | {%- endfor -%} 33 | 34 | {%- for u in urls %} 35 | [{{ u.name|e }}]
36 | {%- endfor -%} 37 | {%- endmacro %} 38 | -------------------------------------------------------------------------------- /master/templates/build.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'forms.html' as forms %} 3 | {% from "change_macros.html" import change with context %} 4 | 5 | {% block content %} 6 | 7 |

8 | Builder {{ b.getBuilder().getName() }} 9 | Build #{{ b.getNumber() }} 10 |

11 | 12 |
13 | 14 | {% if not b.isFinished() %} 15 |

Build In Progress:

16 | 17 | {% if when_time %} 18 |

ETA: {{ when_time }} [{{ when }}]

19 | {% endif %} 20 | 21 | {{ current_step }} 22 | 23 | {% if authz.advertiseAction('stopBuild', request) %} 24 |

Stop Build

25 | {{ forms.stop_build(build_url+"/stop", authz, on_all=False, short=False, label='This Build') }} 26 | {% endif %} 27 | {% else %} 28 |

Results:

29 | 30 |

31 | {{ b.getText()|join(' ')|capitalize }} 32 |

33 | 34 | {% if b.getTestResults() %} 35 |

36 | {% endif %} 37 | {% endif %} 38 | 39 |

40 | {% if sourcestamps|count == 1 %} 41 | SourceStamp: 42 | {% else %} 43 | SourceStamps: 44 | {% endif %} 45 |

46 | 47 | {% for ss in sourcestamps %} 48 |

{{ ss.codebase }}

49 | 50 | {% set ss_class = cycler('alt','') %} 51 | 52 | {% if ss.project %} 53 | 54 | {% endif %} 55 | 56 | {% if ss.repository %} 57 | 58 | {% endif %} 59 | 60 | {% if ss.branch %} 61 | 62 | {% endif %} 63 | 64 | {% if ss.revision %} 65 | 66 | {% endif %} 67 | 68 | {% if got_revisions[ss.codebase] %} 69 | 70 | {% endif %} 71 | 72 | {% if ss.patch %} 73 | 74 | {% endif %} 75 | 76 | {% if ss.changes %} 77 | 78 | {% endif %} 79 | 80 | {% if not ss.branch and not ss.revision and not ss.patch and not ss.changes %} 81 | 82 | {% endif %} 83 |
Project{{ ss.project|projectlink }}
Repository{{ ss.repository|repolink }}
Branch{{ ss.branch|e }}
Revision{{ ss.revision|revlink(ss.repository) }}
Got Revision{{ got_revisions[ss.codebase]|revlink(ss.repository) }}
PatchYES
Changes{{ ss.changes|count }} change{{ 's' if ss.changes|count > 1 else '' }}
Build of most recent revision
84 | {% endfor %} 85 | 86 | {# 87 | # TODO: turn this into a table, or some other sort of definition-list 88 | # that doesn't take up quite so much vertical space 89 | #} 90 | 91 |

BuildSlave:

92 | 93 | {% if slave_url %} 94 |
{{ b.getSlavename()|e }} 95 | {% else %} 96 | {{ b.getSlavename()|e }} 97 | {% endif %} 98 | 99 |

Reason:

100 |

101 | {{ b.getReason()|e }} 102 |

103 | 104 |

Steps and Logfiles:

105 | 106 | {# 107 | # TODO: 108 | # urls = self.original.getURLs() 109 | # ex_url_class = "BuildStep external" 110 | # for name, target in urls.items(): 111 | # text.append('[%s]' % 112 | # (target, ex_url_class, html.escape(name))) 113 | #} 114 | 115 |
    116 | {% for s in steps %} 117 |
  1. 118 |
    119 | {{ s.name }} 120 | {{ s.text }} {{ '( ' + s.time_to_run + ' )' if s.time_to_run else '' }} 121 |
    122 | 123 |
      124 | {% set item_class = cycler('alt', '') %} 125 | {% for l in s.logs %} 126 |
    1. {{ l.name }}
    2. 127 | {% else %} 128 |
    3. - no logs -
    4. 129 | {% endfor %} 130 | 131 | {% for u in s.urls %} 132 |
    5. {{ u.logname }}
    6. 133 | {% endfor %} 134 |
    135 |
  2. 136 | {% endfor %} 137 |
138 | 139 |
140 |
141 | 142 |

Build Properties:

143 | 144 | 145 | 146 | 147 | {% for p in properties %} 148 | {% if p.source != "Force Build Form" %} 149 | 150 | 151 | {% if p.short_value %} 152 | 153 | {% else %} 154 | {% if p.value is not mapping %} 155 | 156 | {% else %} 157 | 164 | {% endif %} 165 | {% endif %} 166 | 167 | 168 | {% endif %} 169 | {% endfor %} 170 |
NameValueSource
{{ p.name|e }}{{ p.short_value|e }} .. [property value too long]{{ p.value|e }} 158 | 159 | {%- for key, value in p.value.items() recursive %} 160 | 161 | {% endfor %} 162 |
{{ key|e }}{{ value|e }}
163 |
{{ p.source|e }}
171 |

Forced Build Properties:

172 | 173 | 174 | 175 | {% for p in properties %} 176 | {% if p.source == "Force Build Form" %} 177 | 178 | 179 | 184 | {% if p.text %} 185 | 186 | {% else %} 187 | 188 | {% endif %} 189 | 190 | {% endif %} 191 | {% endfor %} 192 |
NameLabelValue
{{ p.name|e }} 180 | {% if p.label %} 181 | {{ p.label }} 182 | {% endif %} 183 | {{ p.value|e }}
193 | 194 |

Responsible Users:

195 | 196 | {% if responsible_users %} 197 |
    198 | {% for u in responsible_users %} 199 |
  1. {{ u|user }}
  2. 200 | {% endfor %} 201 |
202 | {% else %} 203 |

no responsible users

204 | {% endif %} 205 | 206 | 207 |

Timing:

208 | 209 | 210 | {% if end %} 211 | 212 | {% endif %} 213 | 214 |
Start{{ start }}
End{{ end }}
Elapsed{{ elapsed }}
215 | 216 | {% if authz.advertiseAction('forceBuild', request) %} 217 |

Resubmit Build:

218 | {{ forms.rebuild_build(build_url+"/rebuild", authz, sourcestamps[0]) }} 219 | {% endif %} 220 | 221 |
222 | 223 |
224 | 225 | {% if has_changes %} 226 |
227 |

All Changes:

228 | {% for ss in sourcestamps %} 229 | {% if ss.changes %} 230 |

{{ ss.codebase }}:

231 |
    232 | {% for c in ss.changes %} 233 |
  1. Change #{{ c.number }}

    234 | {{ change(c.asDict()) }} 235 |
  2. 236 | {% endfor %} 237 |
238 | {% endif %} 239 | {% endfor %} 240 |
241 | {% endif %} 242 | 243 | {% endblock %} 244 | -------------------------------------------------------------------------------- /master/templates/build_line.html: -------------------------------------------------------------------------------- 1 | {% macro build_line(b, include_builder=False) %} 2 | ({{ b.time }}) 3 | Rev: {{ b.rev|shortrev(b.rev_repo) }} 4 | {{ b.results }} 5 | {% if include_builder %} 6 | {{ b.builder_name }} 7 | {% endif %} 8 | #{{ b.buildnum }} - 9 | {{ b.text|capitalize }} 10 | {% endmacro %} 11 | 12 | {% macro build_tr(b, include_builder=False, loop=None) %} 13 | 14 | {{ b.time }} 15 | {{ b.rev|shortrev(b.rev_repo) }} 16 | {{ b.results }} 17 | {%- if include_builder %} 18 | {{ b.builder_name }} 19 | {% endif %} 20 | #{{ b.buildnum }} 21 | {{ b.text|capitalize }} 22 | 23 | {% endmacro %} 24 | 25 | {% macro build_table(builds, include_builder=False) %} 26 | {% if builds %} 27 | 28 | 29 | 30 | 31 | 32 | {%- if include_builder %} 33 | 34 | {% endif %} 35 | 36 | 37 | 38 | {% for b in builds %} 39 | {{ build_tr(b, include_builder, loop) }} 40 | {% endfor %} 41 |
TimeRevisionResultBuilderBuild #Info
42 | {% else %} 43 | No matching builds found 44 | {% endif %} 45 | {% endmacro %} 46 | -------------------------------------------------------------------------------- /master/templates/builder.html: -------------------------------------------------------------------------------- 1 | {% from 'build_line.html' import build_table %} 2 | {% import 'forms.html' as forms %} 3 | 4 | {% extends "layout.html" %} 5 | {% block content %} 6 | 7 |

Builder {{ name }}

8 | 9 |

(view in waterfall)

10 | 11 | {% if description %} 12 |
{{ description }}
13 | {% endif %} 14 | 15 |
16 | 17 | {% if current %} 18 |

Current Builds:

19 | 34 | {% else %} 35 |

No current builds

36 | {% endif %} 37 | 38 | {% if pending %} 39 |

Pending Build Requests:

40 | 64 | 65 | {% if authz.advertiseAction('cancelPendingBuild', request) %} 66 | {{ forms.cancel_pending_build(builder_url+"/cancelbuild", authz, short=False, id='all') }} 67 | {% endif %} 68 | 69 | {% else %} 70 |

No Pending Build Requests

71 | {% endif %} 72 | 73 |

Recent Builds:

74 | 75 | {{ build_table(recent) }} 76 | 77 | Show more 78 | 79 |
80 |
81 | 82 |

Buildslaves:

83 | 84 | {% if slaves %} 85 | 86 | 87 | 88 | 89 | 90 | {% endif %} 91 | {% for s in slaves %} 92 | 93 | 94 | {% if s.connected %} 95 | {% if s.paused %} 96 | 97 | {% else %} 98 | 99 | {% endif %} 100 | {% else %} 101 | 102 | {% endif %} 103 | 104 | 105 | {% else %} 106 | 107 | {% endfor %} 108 |
NameStatusAdmin
{{ s.name|e }}pausedconnectedoffline{{ s.admin|email if s.admin else ""}}
no slaves attached
109 | 110 | {% if authz.advertiseAction('pingBuilder', request) %} 111 |

Ping slaves

112 | {{ forms.ping_builder(builder_url+"/ping", authz) }} 113 | {% endif %} 114 | 115 | {% if authz.advertiseAction('forceBuild', request) and force_schedulers != {} %} 116 |

Force build

117 | {{ forms.force_build(builder_url+"/force", authz, request, False, force_schedulers=force_schedulers,default_props=default_props) }} 118 | {% endif %} 119 | 120 |
121 | 122 | {% endblock %} 123 | -------------------------------------------------------------------------------- /master/templates/builders.html: -------------------------------------------------------------------------------- 1 | {% extends 'layout.html' %} 2 | {% import 'forms.html' as forms %} 3 | {% from "box_macros.html" import box %} 4 | 5 | {% block content %} 6 |

Builders: {{ branches|join(', ')|e }}

7 | 8 | 9 | {% for b in builders %} 10 | 11 | 12 | {% if b.build_url %} 13 | 17 | {% else %} 18 | 19 | {% endif %} 20 | {{ box(**b.current_box) }} 21 | 22 | {% endfor %} 23 |
{{ b.name|e }} 14 | {{ b.build_label }} 15 |
{{ b.build_text }} 16 |
no build
24 | 25 | {% if num_building > 0 %} 26 | {% if authz.advertiseAction('stopAllBuilds', request) or authz.advertiseAction('stopBuild', request) %} 27 |

Stop Selected Builds

28 | {{ forms.stop_build(path_to_root+"builders/_selected/stopselected", authz, on_selected=True, builders=builders, label='Selected Builds') }} 29 |

Stop All Builds

30 | {{ forms.stop_build(path_to_root+"builders/_all/stopall", authz, on_all=True, label='All Builds') }} 31 | {% endif %} 32 | {% endif %} 33 | 34 | {% if num_online > 0 %} 35 | {% if authz.advertiseAction('forceAllBuilds', request) or authz.advertiseAction('forceBuild', request) %} 36 |

Force Selected Builds

37 | {{ forms.force_build(path_to_root+"builders/_selected/forceselected", authz, request, on_selected=True, builders=builders, force_schedulers=force_schedulers, default_props=default_props) }} 38 |

Force All Builds

39 | {{ forms.force_build(path_to_root+"builders/_all/forceall", authz,request, on_all=True, force_schedulers=force_schedulers, default_props=default_props) }} 40 | {% endif %} 41 | {% endif %} 42 | 43 | {% endblock %} 44 | -------------------------------------------------------------------------------- /master/templates/buildslave.html: -------------------------------------------------------------------------------- 1 | {% from 'build_line.html' import build_table, build_line %} 2 | {% import 'forms.html' as forms %} 3 | 4 | {% extends "layout.html" %} 5 | {% block content %} 6 |

Buildslave: {{ slavename|e }}

7 | 8 |
9 | 10 | {% if current %} 11 |

Currently building:

12 | 22 | {% else %} 23 |

No current builds

24 | {% endif %} 25 | 26 |

Recent builds

27 | {{ build_table(recent, True) }} 28 | 29 |
30 |
31 | {% if access_uri %} 32 | Click to Access Slave 33 | {% endif %} 34 | 35 | {% if admin %} 36 |

Administrator

37 |

{{ admin|email }}

38 | {% endif %} 39 | 40 | {% if host %} 41 |

Slave information

42 | Buildbot-Slave {{ slave_version }} 43 |
{{ host|e }}
44 | {% endif %} 45 | 46 |

Connection Status

47 |

48 | {{ connect_count }} connection(s) in the last hour 49 | {% if not slave.isConnected() %} 50 | (not currently connected) 51 | {% else %} 52 |

53 | {% if authz.advertiseAction('gracefulShutdown', request) %} 54 |

Graceful Shutdown

55 | {% if slave.getGraceful() %} 56 |

Slave will shut down gracefully when it is idle.

57 | {% else %} 58 | {{ forms.graceful_shutdown(shutdown_url, authz) }} 59 | {% endif %} 60 | {% endif %} 61 | {% if authz.advertiseAction('pauseSlave', request) %} 62 |

Pause Slave

63 | {{ forms.pause_slave(pause_url, authz, slave.isPaused()) }} 64 | {% endif %} 65 | {% endif %} 66 |
67 | 68 | {% endblock %} 69 | -------------------------------------------------------------------------------- /master/templates/buildslaves.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | 3 | {% block content %} 4 | 5 |

Buildslaves

6 | 7 |
8 | 9 | 10 | 11 | 12 | 13 | {%- if show_builder_column %} 14 | 15 | {%- endif %} 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | {% for s in slaves %} 24 | 25 | 26 | 27 | {%- if show_builder_column %} 28 | 37 | {%- endif %} 38 | 39 | 40 | 41 | 42 | {%- if s.admin -%} 43 | 44 | {%- else -%} 45 | 46 | {%- endif -%} 47 | 48 | 53 | 56 | 57 | {% if s.connected %} 58 | {% if s.running_builds %} 59 | 60 | {% elif s.paused %} 61 | 62 | {% else %} 63 | 64 | {% endif %} 65 | 66 | {% else %} 67 | 68 | {% endif %} 69 | 70 | 71 | {% endfor %} 72 |
NameBuildersBuildBotAdminLast heard fromConnects/HourStatus
{{ s.name }} 29 | {%- if s.builders %} 30 | {%- for b in s.builders %} 31 | {{ b.name }} 32 | {%- endfor %} 33 | {%- else %} 34 | no builders 35 | {%- endif -%} 36 | {{ (s.version or '-')|e }}{{ s.admin|email }}- 49 | {%- if s.last_heard_from_age -%} 50 | {{ s.last_heard_from_age }} ({{ s.last_heard_from_time }}) 51 | {%- endif -%} 52 | 54 | {{ s.connectCount }} 55 | Running {{ s.running_builds }} build(s)PausedIdleNot connected
73 | 74 |
75 | 76 | {% endblock %} 77 | -------------------------------------------------------------------------------- /master/templates/buildstatus.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% from "box_macros.html" import box %} 3 | 4 | {% block header %} 5 | {% endblock %} 6 | 7 | {% block barecontent %} 8 | 9 | {% for r in rows %} 10 | {{ box(**r) }} 11 | {% endfor %} 12 | 13 | {{ box(**build) }} 14 |
15 | {% endblock %} 16 | 17 | {% block footer %} 18 | {% endblock %} 19 | 20 | -------------------------------------------------------------------------------- /master/templates/buildstep.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | 3 | {% block content %} 4 | 5 |

6 | Builder {{ b.getBuilder().getName() }} 7 | build #{{ b.getNumber() }} 8 | step {{ s.getName() }} 9 |

10 | 11 |
12 | 13 | {% if s.isFinished() %} 14 |

Finished

15 |

16 | {%- set text = s.getText() -%} 17 | {%- if text is string %}{{ text|e }} 18 | {%- else %}{{ text|join(" ")|e }}{% endif -%} 19 |

20 | {% else %} 21 |

Not Finished

22 |

ETA {{ s.getETA()|e }} seconds

23 | {% endif %} 24 | 25 | {% set exp = s.getExpectations() %} 26 | {% if exp %} 27 |

Expectations

28 | 33 | {% endif %} 34 | 35 |

Timing

36 | {% if start %} 37 | 38 | 39 | 40 | 41 |
Start{{ start }}
End{{ end or "Not finished" }}
Elapsed{{ elapsed }}
42 | {% else %} 43 | Not started 44 | {% endif %} 45 | 46 |

Logs

47 | 60 | 61 | {% if statistics %} 62 |

Statistics

63 | 64 | 65 | {% for stat in statistics %} 66 | 67 | {% endfor %} 68 |
NameValue
{{ stat.name|e }}{{ stat.value|e }}
69 | {% endif %} 70 | 71 |
72 | 73 | {% endblock %} 74 | -------------------------------------------------------------------------------- /master/templates/change.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% from "change_macros.html" import change with context %} 3 | {% import 'forms.html' as forms %} 4 | 5 | {% block content %} 6 | 7 |

{{ pageTitle }}

8 | 9 |
10 | 11 | {{ change(c) }} 12 | 13 | {% if authz.advertiseAction('stopChange', request) %} 14 |

Cancel Builds For Change:

15 | {{ forms.stop_change_builds("/builders/_all/stopchangeall", c.number, authz) }} 16 | {% endif %} 17 | 18 |
19 | 20 | {% endblock %} 21 | -------------------------------------------------------------------------------- /master/templates/change_macros.html: -------------------------------------------------------------------------------- 1 | {% macro change(c) %} 2 | 3 | 4 | {% set row_class=cycler('alt','') %} 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | {% if c.repository %} 19 | 20 | 21 | 22 | 23 | {% endif %} {% if c.project %} 24 | 25 | 26 | 27 | 28 | {% endif %} {% if c.branch %} 29 | 30 | 31 | 32 | 33 | {% endif %} {% if c.rev %} 34 | 35 | 36 | 38 | 39 | {% endif %} 40 |
Category{{ c.category }}
Changed by{{ c.who|email }}
Changed at{{ c.at }}
Repository{{ c.repository|repolink }}
Project{{ c.project|projectlink }}
Branch{{ c.branch|e }}
Revision{%- if c.revlink -%}{{ c.rev|e }} 37 | {%- else -%}{{ c.rev|revlink(c.repository) }} {%- endif -%}
41 | 42 | {% if c.comments %} 43 |

Comments

44 |
{{ c.comments|changecomment(c.project) }}
45 | {% endif %} 46 | 47 |

Changed files

48 | 60 | 61 | {% if c.properties %} 62 |

Properties

63 | 64 | {% for p in c.properties %} 65 | 66 | 67 | 68 | 69 | {% endfor %} 70 |
{{ p[0]|capitalize|e }}{{ p[1]|e }}
71 | {% endif %} 72 | {%- endmacro %} 73 | 74 | {% macro box_contents(who, url, pageTitle, revision, project) -%} 75 | {{ who|user }} 76 | {%- endmacro %} 77 | -------------------------------------------------------------------------------- /master/templates/change_sources.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | 3 | {% block content %} 4 | 5 |

Changesources

6 | 7 |
8 | 9 | {% if sources %} 10 |
    11 | {% for s in sources -%} 12 |
  1. {{ s.describe() }}
  2. 13 | {% endfor -%} 14 |
15 | {% else %} 16 | none (push only) 17 | {% endif %} 18 | 19 |
20 | 21 | {% endblock %} 22 | -------------------------------------------------------------------------------- /master/templates/console.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | 3 | {% block title %} 4 | {{ title_str('console') }} 5 | {% endblock %} 6 | 7 | {% block head %} 8 | {{ super() }} 9 | 80 | {% endblock %} 81 | 82 | {% block content %} 83 | 84 |

{{ title_str('console') }}

85 | 86 |
87 | 88 | 89 | 103 | 120 | 140 | 141 |
90 | {% if categories|length > 1 %} 91 |
Categories: {% for c in categories %}{{ c.name|e }} {% endfor %} 92 | {% endif %} 93 | {% if codebase %} 94 |
Codebase: {{ codebase|e }} 95 | {% endif %} 96 | {% if repository %} 97 |
Repository: {{ repository|e }} 98 | {% endif %} 99 | {% if project %} 100 |
Project: {{ project|e }} 101 | {% endif %} 102 |
104 |
105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 |
Legend:  PassedFailedWarningsFailed AgainRunningExceptionOfflineNo data
118 |
119 |
121 | 132 |
133 | 137 | 138 |
139 |
142 |
143 | 144 |
145 | 146 | {% set alt_class = cycler('', 'Alt') %} 147 | 148 |
149 | 150 | 151 | {% if categories|length > 1 %} 152 | 153 | 155 | 157 | {% for c in categories %} 158 | 161 | {% endfor %} 162 | 163 | 164 | 165 | {% endif %} 166 | 167 | {% if slaves %} 168 | 169 | 171 | 173 | {% for c in categories %} 174 | 186 | {% endfor %} 187 | 188 | {% endif %} 189 | 190 | {% for r in revisions %} 191 | {% set alt = alt_class.next() %} 192 | {% set firstrev = "first" if loop.first else '' %} 193 | 194 | 195 | 199 | 202 | 203 | {% for c in categories %} 204 | {% set last = "last" if loop.last else "" %} 205 | 218 | {% endfor %} 219 | 220 | 221 | 222 | 225 | 226 | 227 | {% if r.details %} 228 | 229 | 240 | 241 | {% endif %} 242 | 243 | 244 | 246 | 247 | 248 | {% else %} 249 | 250 | {% endfor %} 251 | 252 |
154 | 156 | 159 | {{ c.name|e }} 160 |
170 | 172 | 175 | 176 | 177 | {% for s in slaves[c.name] %} 178 | 182 | {% endfor %} 183 | 184 |
179 | 180 | 181 |
185 |
196 | {{ r.id|shortrev(r.repository) }} 197 | {{ r.date|e }} 198 | 200 | {{ r.who|user }} 201 | 206 | 207 | 208 | {% for b in r.builds[c.name] %} 209 | 214 | {% endfor %} 215 | 216 |
210 | 213 |
217 |
223 | {{ r.comments|changecomment(r.project or None)|replace('\n', '
')|replace(' ','  ') }} 224 |
230 |
    231 | {% for d in r.details %} 232 |
  • {{ d.buildername }}: {{ d.status }} -   233 | {%- for l in d.logs -%} 234 | {{ l.name }} 235 | {%- endfor -%} 236 |
  • 237 | {% endfor %} 238 |
239 |
245 |
No revisions available
253 |
254 | 255 | 256 |
257 |
258 | 259 | 260 | 261 | 262 | 270 | 271 | {% endblock %} 272 | 273 | 274 | {% block footer %} 275 | 276 | {{ super() }} 277 | {#

Debug info: {{ debuginfo }}

#} 278 | {% endblock %} 279 | -------------------------------------------------------------------------------- /master/templates/directory.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | 3 | {% block content %} 4 | 5 |

Directory listing for {{ path }}

6 | 7 | {% set row_class = cycler('alt', '') %} 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | {% for d in directories %} 19 | 20 | 21 | 22 | 23 | 24 | 25 | {% endfor %} 26 | 27 | {% for f in files %} 28 | 29 | 30 | 31 | 32 | 33 | 34 | {% endfor %} 35 |
NameSizeTypeEncoding
{{ d.text }}{{ d.size }}{{ d.type }}{{ d.encoding }}
{{ f.text }}{{ f.size }}{{ f.type }}{{ f.encoding }}
36 | 37 | {% endblock %} 38 | -------------------------------------------------------------------------------- /master/templates/empty.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | 3 | {% block content %} 4 | {{ content }} 5 | {% endblock %} 6 | -------------------------------------------------------------------------------- /master/templates/feed_atom10.xml: -------------------------------------------------------------------------------- 1 | {% from 'feed_description.html' import item_desc %} 2 | 3 | 4 | 5 | {{ title_url }} 6 | {{ pageTitle|e }} 7 | {% if project_url -%} 8 | 9 | 10 | {% endif %} 11 | {%- if description -%} 12 | {{ description }} 13 | {% endif %} 14 | {%- if rfc3339_pubdate -%} 15 | {{ rfc3339_pubdate }} 16 | {% endif -%} 17 | 18 | BuildBot 19 | 20 | 21 | {% for b in builds -%} 22 | 23 | {{ b.pageTitle }} 24 | 25 | 26 |
27 | {{ item_desc(b, title_url, title)|indent(6) }} 28 |
{{ b.log_lines|join('\n')|e }}
29 |
30 |
31 | {% if b.rfc3339_pubdate -%} 32 | {{ b.rfc3339_pubdate }} 33 | {{ b.guid }} 34 | {% endif -%} 35 | Buildbot 36 |
37 | 38 | {% endfor -%} 39 | 40 |
41 | -------------------------------------------------------------------------------- /master/templates/feed_description.html: -------------------------------------------------------------------------------- 1 | {% from 'feed_sources.html' import srcs_desc %} 2 | 3 | {% macro item_desc(b, title_url, title) -%} 4 |

5 | Date: {{ b.date }}
6 | Project home: {{ title|e }}
7 | Builder summary: {{ b.name }}
8 | Build details: Build {{ b.number }}
9 | Author list: {{ b.responsible_users|join(', ') }}
10 | Failed step(s): {{ b.failed_steps|join(', ') }}
11 |

12 | {% for src in b.sources %} 13 | {{ srcs_desc(src) }} 14 | {% endfor %} 15 |

16 | Last lines of the build log: 17 |

18 | {%- endmacro %} 19 | -------------------------------------------------------------------------------- /master/templates/feed_rss20.xml: -------------------------------------------------------------------------------- 1 | {% from 'feed_description.html' import item_desc %} 2 | 3 | 4 | 5 | 6 | {{ pageTitle|e }} 7 | {{ title_url }} 8 | 9 | {% if language -%} 10 | {{ language }} 11 | {% endif %} 12 | {%- if description -%} 13 | {{ description }} 14 | {% endif %} 15 | {%- if rfc822_pubdate -%} 16 | {{ rfc822_pubdate }} 17 | {% endif %} 18 | 19 | {% for b in builds -%} 20 | 21 | {{ b.pageTitle }} 22 | {{ b.link }} 23 | 24 | {{ b.log_lines|join('\n')|e }} 27 | ]]> 28 | 29 | {% if b.rfc822_pubdate -%} 30 | {{ b.rfc822_pubdate }} 31 | {{ b.guid }} 32 | {%- endif %} 33 | 34 | 35 | 36 | {% endfor %} 37 | 38 | 39 | 40 | -------------------------------------------------------------------------------- /master/templates/footer.html: -------------------------------------------------------------------------------- 1 |
2 | 23 | 24 | -------------------------------------------------------------------------------- /master/templates/forms.html: -------------------------------------------------------------------------------- 1 | 2 | {% macro cancel_pending_build(cancel_url, authz, short=False, id='all') %} 3 |
5 | {% if not short %} 6 | {% if id == 'all' %} 7 |

To cancel all builds, fill out the following fields and 8 | push the 'Cancel' button

9 |

To cancel individual builds, click the 'Cancel' buttons above.

10 | {% else %} 11 |

To cancel this build, fill out the following fields and 12 | push the 'Cancel' button

13 | {% endif %} 14 | {% endif %} 15 | 16 | 17 |
18 | {% endmacro %} 19 | 20 | {% macro stop_change_builds(stopchange_url, changenum, authz) %} 21 | {% if not changenum %} 22 |
23 | {% if changenum %} 24 |

To cancel all builds for this change, push the 'Cancel' button

25 | {% else %} 26 |

To cancel builds for this builder for a given change, fill out all 27 | fields and push the 'Cancel' button

28 | {% endif %} 29 | 30 | {% if changenum %} 31 | 32 | {% else %} 33 |
34 | Change #: 35 | 36 |
37 | {% endif %} 38 | 39 |
40 | {% endif %} 41 | {% endmacro %} 42 | 43 | {% macro stop_build(stop_url, authz, on_all=False, on_selected=False, builders=[], short=False, label="Build") %} 44 | {% if not short %} 45 |
47 | {% if not short %} 48 | {% if on_all %} 49 |

To stop all builds, fill out the following field and 50 | push the Stop {{ label }} button

51 | {% elif on_selected %} 52 |

To stop selected builds, select the builders, fill out the 53 | following field and push the Stop {{ label }} button

54 | 55 | {% for b in builders %} 56 | 57 | 58 | 59 | 60 | {% endfor %} 61 |
{{ b.name|e }}
62 | 63 | {% else %} 64 |

To stop this build, fill out the following field and 65 | push the Stop {{ label }} button

66 | {% endif %} 67 | {% endif %} 68 | 69 | {% if not short %} 70 |
71 | Reason: 72 | 73 |
74 | {% endif %} 75 | 76 | 77 |
78 | {% endif %} 79 | {% endmacro %} 80 | 81 | {% macro force_build_scheduler_parameter(f, authz, request, sch, default_props) %} 82 | {% if f and not f.hide and (f.fullName != "username" or not authz.authenticated(request)) %} 83 |
84 | {% if 'text' in f.type or 'int' in f.type %} 85 | {{f.label}} 86 | 87 | {% elif 'bool' in f.type%} 88 | 89 | {{f.label}} 90 | {% elif 'textarea' in f.type %} 91 | {{f.label}} 92 | 93 | {% elif 'list' in f.type %} 94 | {{f.label}} 95 | 96 | 101 | 102 | {% elif 'nested' in f.type %} 103 | {% if f.label %}{{f.label}}{% endif %} 104 | {% for subfield in f.fields %} 105 | {{ force_build_scheduler_parameter(subfield, authz, request, sch, default_props) }} 106 | {% endfor %} 107 | {% endif %} 108 |
109 | {% endif %} 110 | {% endmacro %} 111 | 112 | {% macro force_build_one_scheduler(force_url, authz, request, on_all, on_selected, builders, sch, default_props) %} 113 |
114 | 115 |

{{ sch.name|e }}

116 | {% if on_all %} 117 |

To force a build on all Builders, fill out the following fields 118 | and push the 'Force Build' button

119 | {% elif on_selected %} 120 |

To force a build on certain Builders, select the 121 | builders, fill out the following fields and push the 122 | 'Force Build' button

123 | 124 | 125 | {% for b in builders %} 126 | {% if b.name in sch.builderNames %} 127 | 128 | 129 | 130 | 131 | {% endif %} 132 | {% endfor %} 133 |
{{ b.name|e }}
134 | 135 | {% else %} 136 |

To force a build, fill out the following fields and 137 | push the 'Force Build' button

138 | {% endif %} 139 | 140 | {% for f in sch.all_fields %} 141 | {{ force_build_scheduler_parameter(f, authz, request, sch, default_props) }} 142 | {% endfor %} 143 | 144 | 145 |
146 | {% endmacro %} 147 | {% macro force_build(force_url, authz, request, on_all=False, on_selected=False, builders=[], force_schedulers={},default_props={}) %} 148 | {% for name, sch in force_schedulers.items() | sort %} 149 | {{ force_build_one_scheduler(force_url, authz, request, on_all, on_selected, builders, sch, default_props=default_props) }} 150 | {% endfor %} 151 | 152 | {% endmacro %} 153 | 154 | {% macro graceful_shutdown(shutdown_url, authz) %} 155 |
156 | 157 |

To cause this slave to shut down gracefully when it is idle, 158 | push the 'Graceful Shutdown' button

159 | 160 |
161 | {% endmacro %} 162 | 163 | {% macro pause_slave(pause_url, authz, paused) %} 164 |
165 | 166 | {% if paused %} 167 |

To cause this slave to start running new builds again, 168 | push the 'Unpause Slave' button

169 | {% else %} 170 |

To cause this slave to stop running new builds, 171 | push the 'Pause Slave' button

172 | {% endif %} 173 | 174 | {% if paused %} 175 | 176 | {% else %} 177 | 178 | {% endif %} 179 |
180 | {% endmacro %} 181 | 182 | {% macro clean_shutdown(shutdown_url, authz) %} 183 |
184 |

To cause this master to shut down cleanly, push the 'Clean Shutdown' button.

185 |

No other builds will be started on this master, and the master will 186 | stop once all current builds are finished.

187 | 188 | 189 |
190 | {% endmacro %} 191 | 192 | {% macro cancel_clean_shutdown(cancel_shutdown_url, authz) %} 193 |
194 |

To cancel a previously initiated shutdown, push the 'Cancel Shutdown' button.

195 | 196 | 197 |
198 | {% endmacro %} 199 | 200 | {% macro ping_builder(ping_url, authz) %} 201 |
202 |

To ping the buildslave(s), push the 'Ping' button

203 | 204 |
205 | {% endmacro %} 206 | 207 | {% macro rebuild_build(rebuild_url, authz, ss) %} 208 |
209 | 210 | {% if on_all %} 211 |

To force a build on all Builders, fill out the following fields 212 | and push the 'Force Build' button

213 | {% else %} 214 |

To force a build, fill out the following fields and 215 | push the 'Force Build' button

216 | {% endif %} 217 |
218 | Reason for re-running build: 219 | 220 |
221 | 222 |
223 | {% endmacro %} 224 | 225 | {% macro show_users(users_url, authz) %} 226 |
227 |

To show users, press the 'Show Users' button

228 | 229 | 230 |
231 | {% endmacro %} 232 | -------------------------------------------------------------------------------- /master/templates/grid.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'grid_macros.html' as grid with context %} 3 | 4 | {% block title %} 5 | {{ title_str('grid') }} 6 | {% endblock %} 7 | 8 | {% block content %} 9 | 10 |

{{ title_str('grid') }}

11 | 12 | 13 | 14 | 15 | 18 | 19 | {% for s in stamps %} 20 | {{ grid.stamp_td(s) }} 21 | {% endfor %} 22 | 23 | 24 | {% for builder in builders %} 25 | 26 | {{ grid.builder_td(builder) }} 27 | {% for build in builder.builds %} 28 | {{ grid.build_td(build) }} 29 | {% endfor %} 30 | 31 | {% endfor %} 32 | 33 |
{{ title }} 16 | {{ grid.category_title() }} 17 |
34 | 35 | {% endblock %} 36 | -------------------------------------------------------------------------------- /master/templates/grid_macros.html: -------------------------------------------------------------------------------- 1 | {% macro category_title() -%} 2 | {% if categories %} 3 |
4 | {% trans categories=categories %} 5 | Category:
6 | {% pluralize categories %} 7 | Categories:
8 | {% endtrans %} 9 | {% for c in categories %} 10 | {{ c|e }}
11 | {% endfor %} 12 | {% endif %} 13 | 14 | {% if branch != ANYBRANCH %} 15 |
Branch: {{ branch|e or "trunk" }} 16 | {% endif %} 17 | {%- endmacro %} 18 | 19 | 20 | {% macro stamp_td(sourcestamps) -%} 21 | 22 | {% for ss in sourcestamps %} 23 | {%- if ss.codebase %}{{ ss.codebase|e }}: {% endif %} 24 | {%- if ss.revision -%} 25 | {{ ss.revision|shortrev(ss.repository) }} 26 | {%- else %}latest{% endif %} 27 | {%- if ss.branch %} in {{ ss.branch|e }}{% endif %} 28 | {%- if ss.hasPatch %} [patch]{% endif -%} 29 |
30 | {%- endfor %} 31 | 32 | {%- endmacro %} 33 | 34 | {% macro builder_td(b) -%} 35 | 36 | {{ b.name }} 37 | {%- if b.state != 'idle' or b.n_pending > 0 -%} 38 |
({{ b.state }} 39 | {%- if b.n_pending > 0 -%} 40 | , plus {{ b.n_pending }} 41 | {%- endif -%} 42 | ) 43 | {%- endif -%} 44 | 45 | {%- endmacro %} 46 | 47 | {% macro build_td(build) -%} 48 | {% if build %} 49 | 50 | {{ build.text|join('
') }}
51 | 52 | {% else %} 53 |   54 | {% endif %} 55 | {%- endmacro %} 56 | 57 | -------------------------------------------------------------------------------- /master/templates/grid_transposed.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'grid_macros.html' as grid with context %} 3 | 4 | {% block title %} 5 | {{ title_str('tgrid') }} 6 | {% endblock %} 7 | 8 | {% block content %} 9 | 10 |

{{ title_str('tgrid') }}

11 | 12 | 13 | 14 | 15 | 18 | {% for builder in builders %} 19 | {{ grid.builder_td(builder) }} 20 | {% endfor %} 21 | 22 | 23 | {% for i in range %} 24 | 25 | {{ grid.stamp_td(stamps[i]) }} 26 | {% for b in builder_builds %} 27 | {{ grid.build_td(b[i]) }} 28 | {% endfor %} 29 | 30 | {% endfor %} 31 | 32 |
{{ title }} 16 | {{ grid.category_title() }} 17 |
33 | 34 | {% endblock %} 35 | -------------------------------------------------------------------------------- /master/templates/jsonhelp.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% block content %} 3 | 4 |

{{ text }}

5 |

More Help:

6 | 7 | {% if level != 1 %} 8 |

Parent's Help

9 | {% endif %} 10 | 11 | {% if children %} 12 |

Child Nodes

13 | 20 | {% endif %} 21 | 22 |

Flags:

23 | {{ flags }} 24 | 25 |

Examples:

26 | {{ examples }} 27 | 28 | {% endblock %} 29 | -------------------------------------------------------------------------------- /master/templates/layout.html: -------------------------------------------------------------------------------- 1 | {%- block doctype -%} 2 | 4 | 5 | {% endblock %} 6 | {%- macro title_str(view) %} 7 | Rust build {{ view }} 8 | {%- if branch is defined and branch != '' and branch != ANYBRANCH %} 9 | : {{ branch|e }} 10 | {%- endif %} 11 | {%- endmacro %} 12 | 13 | 14 | 15 | {% block head %} 16 | 17 | {% if metatags %} 18 | {{ metatags }} 19 | {% endif %} 20 | {% if refresh %} 21 | 22 | {% endif %} 23 | {% block title %}{{ pageTitle|e }}{% endblock %} 24 | 25 | 26 | 27 | 28 | {% endblock %} 29 | 30 | 31 | {% block header -%} 32 |
33 | 34 | {% set branches = ['auto', 'try', 'snap-stage3', 'dist-snap'] %} 35 | {% set views = ['console', 'tgrid', 'grid', 'waterfall'] %} 36 | {% set alt_class = cycler('', 'Alt') %} 37 | 38 | {% macro onelink(view,branch) %} 39 | {% if branch is sameas 'all' %} 40 | {{view}} 41 | {% else %} 42 | {{view}} 43 | {% endif %} 44 | {% endmacro %} 45 | 46 | {% macro branchlinks(branch) -%} 47 | 48 | {{ branch|e }} 49 | {% for view in views %} 50 | {{ onelink(view, branch) }} 51 | {% endfor %} 52 | 53 | {%- endmacro %} 54 | 55 | Home 56 | - Waterfall 57 | Grid 58 | T-Grid 59 | Console 60 | Builders 61 | Recent Builds 62 | Buildslaves 63 | Changesources 64 | {% if authz.advertiseAction('showUsersPage', request) %} 65 | Users 66 | {% endif %} 67 | - JSON API 68 | - About 69 |
70 | {% if authz.authenticated(request) %} 71 | {{ authz.getUsernameHTML(request) }} 72 | |Logout 73 | {% elif authz.useHttpHeader and authz.httpLoginUrl %} 74 | Login 75 | {% elif authz.auth %} 76 |
77 | 78 | 79 | 80 |
81 | {% endif %} 82 |
83 | 84 |
85 | 87 | 88 | {{ branchlinks('all') }} 89 | {% for branch in branches %} 90 | {{ branchlinks(branch) }} 91 | {% endfor %} 92 |
93 |
94 | 95 |
96 | {% endblock %} 97 | 98 | {%- block barecontent -%} 99 |
100 | 101 | {% if alert_msg != "" %} 102 |
103 | {{ alert_msg }} 104 |
105 | {% endif %} 106 | 107 |
108 | {%- block content -%} 109 | {%- endblock -%} 110 |
111 | {%- endblock -%} 112 | 113 | {%- block footer -%} 114 | 129 | {% endblock -%} 130 | 131 | 132 | -------------------------------------------------------------------------------- /master/templates/logs.html: -------------------------------------------------------------------------------- 1 | {%- macro page_header(pageTitle, path_to_root, texturl) -%} 2 | 4 | 5 | {{ pageTitle }} 6 | 7 | 8 | 9 | (view as text)
10 |
  
11 | {%- endmacro -%}
12 | 
13 | {%- macro chunks(entries) -%}
14 | {%- for entry in entries -%}
15 |     {{ entry.text|e }}
16 | {%- endfor -%}
17 | {%- endmacro -%}
18 | 
19 | {%- macro page_footer() -%}
20 | 
21 | 22 | 23 | {%- endmacro -%} 24 | -------------------------------------------------------------------------------- /master/templates/onelineperbuild.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% from 'build_line.html' import build_table %} 3 | {% import 'forms.html' as forms %} 4 | 5 | {% block content %} 6 |

Last {{ num_builds }} finished builds: {{ branches|join(', ')|e }}

7 | 8 | {% if builders %} 9 |

of builders: {{ builders|join(", ")|e }}

10 | {% endif %} 11 | 12 |
13 | 14 | {{ build_table(builds, True) }} 15 | 16 |
17 |
18 | 19 | {% if num_building > 0 %} 20 | {% if authz.advertiseAction('stopBuild', request) %} 21 |

Stop All Builds

22 | {{ forms.stop_build("builders/_all/stopall", authz, on_all=True, label='All Builds') }} 23 | {% endif %} 24 | {% endif %} 25 | 26 | {% if num_online > 0 %} 27 | {% if authz.advertiseAction('forceAllBuilds', request) %} 28 |

Force All Builds

29 | {{ forms.force_build("builders/_all/forceall", authz, request, True, force_schedulers=force_schedulers, default_props=default_props) }} 30 | {% endif %} 31 | {% endif %} 32 | 33 | 34 |
35 | 36 | {% endblock %} 37 | -------------------------------------------------------------------------------- /master/templates/onelineperbuildonebuilder.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% from 'build_line.html' import build_line %} 3 | 4 | {% block content %} 5 | 6 |

Last {{ num_builds }} builds of builder {{ builder_name|e }}: 7 | {{ branches|join(', ')|e }} 8 |

9 | 10 | 17 | 18 | {% endblock %} 19 | -------------------------------------------------------------------------------- /master/templates/revmacros.html: -------------------------------------------------------------------------------- 1 | {# both macro pairs must have the same signature #} 2 | 3 | {% macro id_replace(rev, url) -%} 4 | 5 | {%- if rev|length > 40 %}{{ rev[:40] }}... 6 | {%- else %}{{ rev }} 7 | {%- endif -%} 8 | 9 | {%- endmacro %} 10 | 11 | {% macro shorten_replace(short, rev, url) %} 12 |
13 |
14 | {{ short }}... 15 |
16 |
17 | {{ rev }} 18 |
19 |
20 | {% endmacro %} 21 | 22 | {% macro id(rev, url) -%} 23 | 24 | {%- if rev|length > 40 %}{{ rev[:40] }}... 25 | {%- else %}{{ rev }} 26 | {%- endif -%} 27 | 28 | {%- endmacro %} 29 | 30 | {% macro shorten(short, rev, url) %} 31 |
32 |
{{ short }}...
33 |
{{ rev }}
34 |
35 | {% endmacro %} 36 | -------------------------------------------------------------------------------- /master/templates/root.html: -------------------------------------------------------------------------------- 1 | {% extends 'layout.html' %} 2 | {% import 'forms.html' as forms %} 3 | 4 | {% block content %} 5 | 6 |

Welcome to the Buildbot 7 | {%- if title -%} 8 |  for the  9 | {%- if title_url -%} 10 | {{ title }} 11 | {%- else -%} 12 | {{ title }} 13 | {%- endif -%} 14 |  project 15 | {%- endif -%} 16 | ! 17 |

18 | 19 |
20 | 21 | 47 | 48 | {%- if authz.advertiseAction('cleanShutdown', request) -%} 49 | {%- if shutting_down -%} 50 | Master is shutting down
51 | {{ forms.cancel_clean_shutdown(cancel_shutdown_url, authz) }} 52 | {%- else -%} 53 | {{ forms.clean_shutdown(shutdown_url, authz) }} 54 | {%- endif -%} 55 | {%- endif -%} 56 | 57 |

This and other pages can be overridden and customized.

58 | 59 |
60 | 61 | {% endblock %} 62 | -------------------------------------------------------------------------------- /master/templates/testresult.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | 3 | {% block content %} 4 | 5 |

6 | Builder {{ b.getBuilder().getName() }} 7 | build #{{ b.getNumber() }} 8 | test {{ '.'.join(tr.getName()) }} 9 |

10 | 11 |
12 | 13 |

Result

14 |

15 | {{ result_word }} 16 | {%- set text = tr.getText() -%} 17 | {%- if text is string %}{{ text|e }} 18 | {%- else %}{{ text|join(" ")|e }}{% endif -%} 19 |

20 | 21 |

Logs

22 | 31 | 32 |
33 | 34 | {% endblock %} 35 | -------------------------------------------------------------------------------- /master/templates/user.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | 3 | {% block content %} 4 | 5 |

User: {{ user_identifier|e }}

6 | 7 |
8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | {% for attr in user %} 17 | 18 | 19 | 20 | 21 | 22 | 23 | {% endfor %} 24 | 25 |
Attribute TypeAttribute Value
{{ attr|e }}{{ user[attr]|e }}
26 | 27 |
28 | 29 | {% endblock %} 30 | -------------------------------------------------------------------------------- /master/templates/users.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | 3 | {% block content %} 4 | 5 |

Users

6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | {% for user in users %} 15 | 16 | 17 | 18 | 19 | 20 | 21 | {% endfor %} 22 | 23 |
UidIdentifier
{{ user.uid }}{{ user.identifier|e }}
24 | 25 | 26 | 27 | {% endblock %} 28 | -------------------------------------------------------------------------------- /master/templates/users_table.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | 3 | {% block content %} 4 | 5 |

Users

6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | {% for user in users %} 15 | 16 | 17 | 18 | 19 | 20 | 21 | {% endfor %} 22 | 23 |
UidIdentifier
{{ user.uid }}{{ user.identifier|e }}
24 | 25 | 26 | 27 | {% endblock %} 28 | -------------------------------------------------------------------------------- /master/templates/waterfall.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% from "box_macros.html" import box %} 3 | 4 | {% block title %} 5 | {{ title_str('waterfall') }} 6 | {% endblock %} 7 | 8 | {% block content %} 9 | 10 |
11 |

{{ title_str('waterfall') }}

12 | waterfall help 13 |
14 | 15 | 16 | 17 | 18 | 21 | 22 | {% for b in builders %} 23 | 27 | {% endfor %} 28 | 29 | 30 | 31 | 32 | 33 | {% for b in builders %} 34 | 37 | {% endfor %} 38 | 39 | 40 | 41 | 42 | 43 | 44 | {% for b in builders %} 45 | 46 | {% endfor %} 47 | 48 | 49 | {# waterfall contents goes here #} 50 | {% for i in range(gridlen) -%} 51 | 52 | {% for strip in grid -%} 53 | {%- if strip[i] -%}{{ box(**strip[i]) }} 54 | {%- elif no_bubble -%}{{ box() }} 55 | {%- endif -%} 56 | {%- endfor -%} 57 | 58 | {% endfor %} 59 | 60 |
19 | last build 20 | 24 | {{ b.name }}
25 | {{ " ".join(b.top) }} 26 |
current activity 35 | {{ "
".join(b.status) }} 36 |
{{ tz }}changes{{ b.name }}
61 | 62 | {% if nextpage %} 63 | next page 64 | {% endif %} 65 | 66 | {% if no_reload_page %} 67 | Stop Reloading 68 | {% endif %} 69 | 70 | {% endblock %} 71 | -------------------------------------------------------------------------------- /master/templates/waterfallhelp.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% block content %} 3 | 4 |
5 | 6 |

The Waterfall Display

7 | 8 |

The Waterfall display can be controlled by adding query arguments to the 9 | URL. For example, if your Waterfall is accessed via the URL 10 | http://buildbot.example.org:8080, then you could add a 11 | branch= argument (described below) by going to 12 | http://buildbot.example.org:8080?branch=beta4 instead. Remember that 13 | query arguments are separated from each other with ampersands, but they are 14 | separated from the main URL with a question mark, so to add a 15 | branch= and two builder= arguments, you would use 16 | http://buildbot.example.org:8080?branch=beta4&builder=unix&builder=macos.

17 | 18 |

Limiting the Displayed Interval

19 | 20 |

The last_time= argument is a unix timestamp (seconds since the 21 | start of 1970) that will be used as an upper bound on the interval of events 22 | displayed: nothing will be shown that is more recent than the given time. 23 | When no argument is provided, all events up to and including the most recent 24 | steps are included.

25 | 26 |

The first_time= argument provides the lower bound. No events will 27 | be displayed that occurred before this timestamp. Instead of providing 28 | first_time=, you can provide show_time=: in this case, 29 | first_time will be set equal to last_time minus 30 | show_time. show_time overrides first_time.

31 | 32 |

The display normally shows the latest 200 events that occurred in the 33 | given interval, where each timestamp on the left hand edge counts as a single 34 | event. You can add a num_events= argument to override this this.

35 | 36 |

Showing non-Build events

37 | 38 |

By passing show_events=true, you can add the "buildslave 39 | attached", "buildslave detached", and "builder reconfigured" events that 40 | appear in-between the actual builds.

41 | 42 |

43 | 45 | Show non-Build events 46 |

47 | 48 |

Showing only Certain Branches

49 | 50 |

If you provide one or more branch= arguments, the display will be 51 | limited to builds that used one of the given branches. If no branch= 52 | arguments are given, builds from all branches will be displayed.

53 | 54 | Erase the text from these "Show Branch:" boxes to remove that branch filter. 55 | 56 | {% if branches %} 57 | 58 | {% for b in branches %} 59 | 60 | 63 | 64 | {% endfor %} 65 |
Show Branch: 61 | 62 |
66 | {% endif %} 67 | 68 |

Limiting the Builders that are Displayed

69 | 70 |

By adding one or more builder= arguments, the display will be 71 | limited to showing builds that ran on the given builders. This serves to 72 | limit the display to the specific named columns. If no builder= 73 | arguments are provided, all Builders will be displayed.

74 | 75 |

To view a Waterfall page with only a subset of Builders displayed, select 76 | the Builders you are interested in here.

77 | 78 | 79 | {% for bn in all_builders %} 80 | 83 | {% endfor %} 84 |
82 | {{bn}}
85 | 86 |

Limiting the Builds that are Displayed

87 | 88 |

By adding one or more committer= arguments, the display will be 89 | limited to showing builds that were started by the given committer. If no 90 | committer= arguments are provided, all builds will be displayed.

91 | 92 |

To view a Waterfall page with only a subset of Builds displayed, select 93 | the committers your are interested in here.

94 | 95 | Erase the text from these "Show Committer:" boxes to remove that filter. 96 | 97 | {% if committers %} 98 | 99 | {% for cn in committers %} 100 | 101 | 104 | 105 | {% endfor %} 106 |
102 | Show Committer: 103 |
107 | {% endif %} 108 | 109 |

Showing only the Builders with failures

110 | 111 |

By adding the failures_only=true argument, the display will be limited 112 | to showing builders that are currently failing. A builder is considered 113 | failing if the last finished build was not successful, a step in the current 114 | build(s) failed, or if the builder is offline.

115 | 116 |

117 | 119 | Show failures only 120 |

121 | 122 |

Auto-reloading the Page

123 | 124 |

Adding a reload= argument will cause the page to automatically 125 | reload itself after that many seconds.

126 | 127 | 128 | {% for value, name in times %} 129 | 132 | {% endfor %} 133 |
131 | {{ name|e }}
134 | 135 | 136 |

Reload Waterfall Page

137 | 138 | 139 |
140 | {% endblock %} 141 | -------------------------------------------------------------------------------- /osx-adduser.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # ========================= 3 | # Add User OSX Command Line 4 | # ========================= 5 | 6 | # An easy add user script for Max OSX. 7 | # Although I wrote this for 10.7 Lion Server, these commands have been the same since 10.5 Leopard. 8 | # It's pretty simple as it uses and strings together the (rustic and ancient) commands that OSX 9 | # already uses to add users. 10 | 11 | # Customized to be less interactive for Rust Buildbot slaves as of Jan 2016 12 | 13 | # Fail early if insufficient permissions 14 | if [[ $UID -ne 0 ]]; then echo "Please run $0 as root." && exit 1; fi 15 | 16 | USERNAME='rustbuild' 17 | FULLNAME='Rust Buildbot' 18 | # We already have admin credentials on the machine if we're running this 19 | # script, and access the user by su-ing to it. 20 | PASSWORD=$(openssl rand -base64 30) 21 | 22 | SECONDARY_GROUPS="admin _lpadmin _appserveradm _appserverusr" # for an admin user 23 | 24 | # Create a UID that is not currently in use 25 | 26 | # Find out the next available user ID 27 | MAXID=$(dscl . -list /Users UniqueID | awk '{print $2}' | sort -ug | tail -1) 28 | USERID=$((MAXID+1)) 29 | 30 | # Create the user account by running dscl (normally you would have to do each 31 | # of these commands one by one in an obnoxious and time consuming way. 32 | echo "Creating necessary files..." 33 | 34 | dscl . -create /Users/$USERNAME 35 | dscl . -create /Users/$USERNAME UserShell /bin/bash 36 | dscl . -create /Users/$USERNAME RealName "$FULLNAME" 37 | dscl . -create /Users/$USERNAME UniqueID "$USERID" 38 | dscl . -create /Users/$USERNAME PrimaryGroupID 20 39 | dscl . -create /Users/$USERNAME NFSHomeDirectory /Users/$USERNAME 40 | dscl . -passwd /Users/$USERNAME $PASSWORD 41 | 42 | # Add user to any specified groups 43 | 44 | for GROUP in $SECONDARY_GROUPS ; do 45 | dseditgroup -o edit -t user -a $USERNAME $GROUP 46 | done 47 | 48 | # Create the home directory 49 | createhomedir -c 2>&1 | grep -v "shell-init" 50 | 51 | echo "Created user #$USERID: $USERNAME ($FULLNAME)" 52 | -------------------------------------------------------------------------------- /rust-bot-cert.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDxTCCAq2gAwIBAgIJAJI49DJdvpfAMA0GCSqGSIb3DQEBCwUAMHkxCzAJBgNV 3 | BAYTAlVTMRMwEQYDVQQIDApTb21lLVN0YXRlMRUwEwYDVQQKDAxSdXN0IFByb2pl 4 | Y3QxGjAYBgNVBAMMEWJvdC5ydXN0LWxhbmcub3JnMSIwIAYJKoZIhvcNAQkBFhNh 5 | ZG1pbkBydXN0LWxhbmcub3JnMB4XDTE1MDcxNjA2MDgzM1oXDTI1MDcxMzA2MDgz 6 | M1oweTELMAkGA1UEBhMCVVMxEzARBgNVBAgMClNvbWUtU3RhdGUxFTATBgNVBAoM 7 | DFJ1c3QgUHJvamVjdDEaMBgGA1UEAwwRYm90LnJ1c3QtbGFuZy5vcmcxIjAgBgkq 8 | hkiG9w0BCQEWE2FkbWluQHJ1c3QtbGFuZy5vcmcwggEiMA0GCSqGSIb3DQEBAQUA 9 | A4IBDwAwggEKAoIBAQC1oNKcKesRTPIE9EmQ3HpEsr//fLBucfiiVurwAI8TWYn3 10 | Zcpxo7x0cjRAZ1GKQXyLwqSO1GJ88NvwBa2TXreRUq0mNjWODh2nYnNkdI03MPNP 11 | YIvSTuVYP7soGzNkKBKBWiPMhXj7p4Hcaskb0JG6VvbWzecCOVFpaf3SEC2PKm19 12 | HKC87I+wVsqc7pDnHm4lSpINvKumPMSVupu0DZFQNWB+azyXMRU/aCwH6l/aEkIn 13 | e8InmOEZBEnY/TRg5K1kYdMwGMp5fSPKjMui4UWnyrUFDmt8Bc/DPeBGtMU/jebu 14 | xXavQV4SXq+4j3kjNmCkCIX75bmVBFcNDvZVw/5jAgMBAAGjUDBOMB0GA1UdDgQW 15 | BBRqk4utKImBUDCpHbqSGFtRi4iRzDAfBgNVHSMEGDAWgBRqk4utKImBUDCpHbqS 16 | GFtRi4iRzDAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQCNtQ2DjNvH 17 | HsAP41FBrzD5SAe5rIiWcpkZlFtKat2IDFcQvShY+Mp5VAAHwqxvm80S/LRqPycy 18 | s0nE2nbG28bl7A9NbMiy3C3igFVwctKGZbV6Tufv2ASH8xwdB1a8KxK4tTg8eX20 19 | tVRFR8q26VQgk0REOM/A0cElI/myeHQ8CpYlyywLNfHeVY2AMOCZrylpI6fP7xDt 20 | dHL/NOI4mbFOadYzA0sGV1yhefJ661Crm4sxglcjGb8ak8QPhP0YXH6oqptvsjmm 21 | SAQCtaZVo+wq3vOdC0ca3rJVgXckhD9RPoEbwNrWoTORDexBVngQ8PqJn5jrxUUF 22 | ospzOGMYw3Vc 23 | -----END CERTIFICATE----- 24 | -------------------------------------------------------------------------------- /rust-buildbot-master-stunnel.conf: -------------------------------------------------------------------------------- 1 | pid = 2 | 3 | [9988] 4 | accept = 9988 5 | connect = 127.0.0.1:9989 6 | cert = ./rust-bot-cert.pem 7 | key = ./rust-bot-privkey.pem 8 | -------------------------------------------------------------------------------- /rust-buildbot-slave-stunnel.conf: -------------------------------------------------------------------------------- 1 | pid = 2 | 3 | [9987] 4 | client = yes 5 | accept = 127.0.0.1:9987 6 | cafile = ./rust-bot-cert.pem 7 | verify = 3 8 | -------------------------------------------------------------------------------- /rust-buildbot-win32-slave-stunnel.conf: -------------------------------------------------------------------------------- 1 | [9987] 2 | client = yes 3 | accept = 127.0.0.1:9987 4 | connect = 10.22.112.51:9988 5 | cafile = c:/bot/rust-bot-cert.pem 6 | verify = 3 -------------------------------------------------------------------------------- /setup-slave.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | cd `dirname $0` 4 | 5 | if [ "$1" = "--manual" ] 6 | then 7 | echo "Enter slave name: " 8 | read SLAVENAME 9 | echo "Enter slave password: " 10 | read PASSWORD 11 | echo "Enter master address: " 12 | read MASTER_ADDY 13 | else 14 | # Some images take time for the user data to appear 15 | sleep 1 16 | echo "Guessing we are on EC2, reading user-data" 17 | read SLAVENAME PASSWORD MASTER_ADDY <slave/info/admin 26 | echo $HOSTNAME >slave/info/host 27 | 28 | cp rust-buildbot-slave-stunnel.conf rust-buildbot-slave-stunnel-final.conf 29 | echo "connect = ${MASTER_ADDY:?}" >> rust-buildbot-slave-stunnel-final.conf 30 | 31 | case $MACHTYPE in 32 | *-msys) 33 | # strip out a line that doesn't work on windows 34 | cat rust-buildbot-slave-stunnel-final.conf | sed 's/pid =//' > stunnel-tmp.conf && mv stunnel-tmp.conf rust-buildbot-slave-stunnel-final.conf 35 | cp rust-buildbot-slave-stunnel-final.conf "/c/Program Files (x86)/stunnel/stunnel.conf" 36 | net start stunnel 37 | net start buildbot 38 | ;; 39 | *) 40 | echo "starting stunnel..." 41 | for s in stunnel4 stunnel 42 | do 43 | if which $s 44 | then 45 | $s rust-buildbot-slave-stunnel-final.conf || echo "stunnel startup failed, already running?" 46 | fi 47 | done 48 | echo "starting slave..." 49 | if [ "$NODAEMON" = "1" ]; then 50 | buildslave restart --nodaemon slave 51 | else 52 | buildslave restart slave 53 | fi 54 | ;; 55 | esac 56 | -------------------------------------------------------------------------------- /slaves/README.md: -------------------------------------------------------------------------------- 1 | # Buildslaves 2 | 3 | Starting recently we've been trying to ensure that all of our build slaves are 4 | defined via a docker image instead of just using "some random AMI" and then 5 | randomly pulling that AMI forward from time to time. This has some nice concrete 6 | benefits: 7 | 8 | * It's easy to review changes to build slaves 9 | * The build slaves can be reproduced locally 10 | * Knowing what's actually on a build slave is much easier 11 | 12 | Currently the buildslave daemon is run inside of a docker container, but 13 | eventually it would also be nice to run each **build** inside of its own docker 14 | container (to ensure isolation). This is not currently set up, however. 15 | 16 | ## Image architecture 17 | 18 | Each image does a few simple tasks: 19 | 20 | 1. Installs the buildbot buildslave 21 | 2. Sets up the `rustbuild` user 22 | 3. Installs build dependencies 23 | 4. Preps the `buildslave` command to run 24 | 25 | Whenever an image boots it will first clone this repo and then run the 26 | `setup-slave.sh` script at the top of the repo. These actions are controlled via 27 | the entry script of the image, `start-docker-slave.sh`. One notable part of this 28 | script is that the URL and branch of this repo to clone are passed in via "User 29 | Data" on the AMI booted. 30 | 31 | ## Building a docker image 32 | 33 | In the directory of this readme, run: 34 | 35 | ``` 36 | docker build -f linux/Dockerfile . 37 | ``` 38 | 39 | If you want to tag it you can also pass the `-t` flag with the image name. 40 | 41 | ## Publishing a new docker image 42 | 43 | Run this inside this directory: 44 | 45 | ``` 46 | docker build -t alexcrichton/rust-slave-linux:2015-10-15 -f linux/Dockerfile . 47 | ``` 48 | 49 | (note that today's date should be used in the tag name) 50 | 51 | ``` 52 | docker push alexcrichton/rust-slave-linux:2015-10-15 53 | ``` 54 | 55 | ## Debugging a docker image in prod 56 | 57 | 1. Obtain the IP of the slave from the AWS console 58 | 2. SSH into the VM, currently with the username `ec2-user` 59 | 3. Run `docker ps` and look under `NAMES` to find the name of the currently 60 | running container 61 | 4. Run `docker exec -it bash` 62 | 63 | That'll give you a shell into the container so you can poke around and do 64 | whatever you like. Note, though, that if the buildslave daemon is killed it will 65 | likely kill the container and need to be re-run from the external command line. 66 | 67 | ## Recreating the "base AMI" 68 | 69 | All our build slaves run inside of a bare-bones AMI. It should be recreatable as 70 | follows: 71 | 72 | 1. Start up a fresh AMI. 73 | 2. Install docker (go to docker's home page to see how) 74 | 3. Make docker runnable without `sudo` (the install normally says how) 75 | 4. Add the following to `crontab`: 76 | 77 | ``` 78 | @reboot sh -c 'sleep 20 && docker run --privileged `curl -s http://169.254.169.254/latest/user-data | tail -n +2 | head -n 1`' 2>&1 | logger 79 | ``` 80 | 81 | To break this down: 82 | 83 | * `@reboot` - this command is run whenever the AMI is booted 84 | * `sh -c '...' 2>&1 | logger` - run a shell command, piping all of its output 85 | into the `logger` command (e.g. syslog). On the current images this makes 86 | it appear at `/var/log/messages` 87 | * `sleep 20` - wait for the docker daemon to start 88 | * `docker run ...` - run a docker image 89 | * `--privileged` - needed for gdb tests to work (enables `ptrace` I believe) 90 | * `curl ... | tail | head` - the name of the docker image to run is in the 91 | "User Data" of the AMI when buildbot boots it, and this is what fetches it 92 | and parses it out. 93 | 94 | Note that this means that whenever the AMI boots the first thing it will do 95 | is download a likely multi-gigabyte image from the Docker hub to run locally, 96 | but hey that's not so bad! 97 | -------------------------------------------------------------------------------- /slaves/android/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:16.04 2 | 3 | RUN dpkg --add-architecture i386 4 | RUN apt-get -y update 5 | RUN apt-get -y install --force-yes \ 6 | curl make git expect libncurses5:i386 libstdc++6:i386 zlib1g:i386 \ 7 | python-dev python-pip stunnel \ 8 | g++-multilib openjdk-9-jre psmisc unzip cmake 9 | 10 | # Install buildbot and prep it to run 11 | RUN pip install buildbot-slave 12 | RUN groupadd -r rustbuild && useradd -r -g rustbuild rustbuild 13 | RUN mkdir /buildslave && chown rustbuild:rustbuild /buildslave 14 | 15 | # Setup PATH to allow running android tools. 16 | ENV PATH=$PATH:/android/ndk-arm/bin:/android/ndk-aarch64/bin:/android/ndk-x86:/android/sdk/tools:/android/sdk/platform-tools 17 | 18 | # Not sure how to install 64-bit binaries in the sdk? 19 | ENV ANDROID_EMULATOR_FORCE_32BIT=true 20 | 21 | RUN mkdir /android && chown rustbuild:rustbuild /android 22 | RUN mkdir /home/rustbuild && chown rustbuild:rustbuild /home/rustbuild 23 | 24 | WORKDIR /android 25 | USER rustbuild 26 | 27 | COPY android/install-ndk.sh android/install-sdk.sh android/accept-licenses.sh \ 28 | /android/ 29 | 30 | RUN sh install-ndk.sh 31 | RUN sh install-sdk.sh 32 | RUN rm *.sh 33 | 34 | # When running this container, startup buildbot 35 | WORKDIR /buildslave 36 | COPY start-docker-slave.sh start-docker-slave.sh 37 | ENTRYPOINT ["sh", "start-docker-slave.sh"] 38 | -------------------------------------------------------------------------------- /slaves/android/accept-licenses.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/expect -f 2 | 3 | set timeout 1800 4 | set cmd [lindex $argv 0] 5 | set licenses [lindex $argv 1] 6 | 7 | spawn {*}$cmd 8 | expect { 9 | "Do you accept the license '*'*" { 10 | exp_send "y\r" 11 | exp_continue 12 | } 13 | eof 14 | } 15 | -------------------------------------------------------------------------------- /slaves/android/install-ndk.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -ex 4 | 5 | cpgdb() { 6 | cp android-ndk-r11c/prebuilt/linux-x86_64/bin/gdb /android/$1/bin/$2-gdb 7 | cp android-ndk-r11c/prebuilt/linux-x86_64/bin/gdb-orig /android/$1/bin/gdb-orig 8 | cp -r android-ndk-r11c/prebuilt/linux-x86_64/share /android/$1/share 9 | } 10 | 11 | # Prep the Android NDK 12 | # 13 | # See https://github.com/servo/servo/wiki/Building-for-Android 14 | curl -O http://dl.google.com/android/repository/android-ndk-r11c-linux-x86_64.zip 15 | unzip -q android-ndk-r11c-linux-x86_64.zip 16 | bash android-ndk-r11c/build/tools/make-standalone-toolchain.sh \ 17 | --platform=android-9 \ 18 | --toolchain=arm-linux-androideabi-4.9 \ 19 | --install-dir=/android/ndk-arm-9 \ 20 | --ndk-dir=/android/android-ndk-r11c \ 21 | --arch=arm 22 | cpgdb ndk-arm-9 arm-linux-androideabi 23 | bash android-ndk-r11c/build/tools/make-standalone-toolchain.sh \ 24 | --platform=android-21 \ 25 | --toolchain=arm-linux-androideabi-4.9 \ 26 | --install-dir=/android/ndk-arm \ 27 | --ndk-dir=/android/android-ndk-r11c \ 28 | --arch=arm 29 | cpgdb ndk-arm arm-linux-androideabi 30 | bash android-ndk-r11c/build/tools/make-standalone-toolchain.sh \ 31 | --platform=android-21 \ 32 | --toolchain=aarch64-linux-android-4.9 \ 33 | --install-dir=/android/ndk-aarch64 \ 34 | --ndk-dir=/android/android-ndk-r11c \ 35 | --arch=arm64 36 | bash android-ndk-r11c/build/tools/make-standalone-toolchain.sh \ 37 | --platform=android-9 \ 38 | --toolchain=x86-4.9 \ 39 | --install-dir=/android/ndk-x86-9 \ 40 | --ndk-dir=/android/android-ndk-r11c \ 41 | --arch=x86 42 | bash android-ndk-r11c/build/tools/make-standalone-toolchain.sh \ 43 | --platform=android-21 \ 44 | --toolchain=x86_64-4.9 \ 45 | --install-dir=/android/ndk-x86_64 \ 46 | --ndk-dir=/android/android-ndk-r11c \ 47 | --arch=x86_64 48 | 49 | rm -rf ./android-ndk-r11c-linux-x86_64.zip ./android-ndk-r11c 50 | -------------------------------------------------------------------------------- /slaves/android/install-sdk.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -ex 4 | 5 | # Prep the SDK and emulator 6 | # 7 | # Note that the update process requires that we accept a bunch of licenses, and 8 | # we can't just pipe `yes` into it for some reason, so we take the same strategy 9 | # located in https://github.com/appunite/docker by just wrapping it in a script 10 | # which apparently magically accepts the licenses. 11 | 12 | mkdir sdk 13 | curl http://dl.google.com/android/android-sdk_r24.4-linux.tgz | \ 14 | tar xzf - -C sdk --strip-components=1 15 | 16 | filter="platform-tools,android-18,android-21" 17 | filter="$filter,sys-img-x86-android-18" 18 | filter="$filter,sys-img-x86_64-android-18" 19 | filter="$filter,sys-img-armeabi-v7a-android-18" 20 | filter="$filter,sys-img-x86-android-21" 21 | filter="$filter,sys-img-x86_64-android-21" 22 | filter="$filter,sys-img-armeabi-v7a-android-21" 23 | 24 | ./accept-licenses.sh "android - update sdk -a --no-ui --filter $filter" 25 | 26 | echo "no" | android create avd \ 27 | --name arm-18 \ 28 | --target android-18 \ 29 | --abi armeabi-v7a 30 | echo "no" | android create avd \ 31 | --name arm-21 \ 32 | --target android-21 \ 33 | --abi armeabi-v7a 34 | echo "no" | android create avd \ 35 | --name x86-21 \ 36 | --target android-21 \ 37 | --abi x86 38 | echo "no" | android create avd \ 39 | --name x86_64-21 \ 40 | --target android-21 \ 41 | --abi x86_64 42 | -------------------------------------------------------------------------------- /slaves/dist/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM centos:5 2 | 3 | WORKDIR /build 4 | 5 | # Install updates. 6 | RUN yum upgrade -y 7 | 8 | # curl == now we can download things 9 | # bzip2 == now we can download bz2 things 10 | # gcc == now we can build gcc 11 | # make == now we can build gcc 12 | # glibc-devel == libs for gcc to compile against 13 | # perl == run openssl configure script + runtime dep of git 14 | # zlib-devel == needed by basically everyone 15 | # file == needed by the rust build 16 | # xz == needed to extract LLVM sources 17 | # which, stunnel == needed by rust-buildbot startup scripts 18 | RUN yum install -y curl bzip2 gcc make glibc-devel perl zlib-devel file xz \ 19 | which stunnel pkg-config 20 | 21 | ENV PATH=/rustroot/bin:/rust/bin:$PATH 22 | ENV LD_LIBRARY_PATH=/rustroot/lib64:/rustroot/lib 23 | 24 | # prep the buildslave user and some directories 25 | RUN groupadd -r rustbuild && useradd -r -g rustbuild rustbuild 26 | RUN mkdir /buildslave && chown rustbuild:rustbuild /buildslave 27 | RUN mkdir /home/rustbuild 28 | RUN chown rustbuild:rustbuild /home/rustbuild 29 | 30 | # We need a build of openssl which supports SNI to download artifacts from 31 | # static.rust-lang.org. This'll be used to link into libcurl below (and used 32 | # later as well), so build a copy of OpenSSL with dynamic libraries into our 33 | # generic root. 34 | COPY dist/build_openssl.sh /build/ 35 | RUN /bin/bash build_openssl.sh && rm -rf /build 36 | 37 | # The `curl` binary on CentOS doesn't support SNI which is needed for fetching 38 | # some https urls we have, so install a new version of libcurl + curl which is 39 | # using the openssl we just built previously. 40 | # 41 | # Note that we also disable a bunch of optional features of curl that we don't 42 | # really need. 43 | COPY dist/build_curl.sh /build/ 44 | RUN /bin/bash build_curl.sh 45 | 46 | # Install gcc 4.7 which has C++11 support which is required by LLVM 47 | # 48 | # After we're done building we erase the binutils/gcc installs from CentOS to 49 | # ensure that we always use the ones that we just built. 50 | COPY dist/build_gcc.sh /build/ 51 | RUN /bin/bash build_gcc.sh && rm -rf /build 52 | 53 | # binutils < 2.22 has a bug where the 32-bit executables it generates 54 | # immediately segfault in Rust, so we need to install our own binutils. 55 | # 56 | # See https://github.com/rust-lang/rust/issues/20440 for more info 57 | COPY dist/build_binutils.sh /build/ 58 | RUN /bin/bash build_binutils.sh && rm -rf /build 59 | 60 | # libssh2 (a dependency of Cargo) requires cmake 2.8.11 or higher but CentOS 61 | # only has 2.6.4, so build our own 62 | COPY dist/build_cmake.sh /build/ 63 | RUN /bin/bash build_cmake.sh && rm -rf /build 64 | 65 | # tar on CentOS is too old as it doesn't understand the --exclude-vcs option 66 | # that the Rust build system passes it, so install a new version. 67 | COPY dist/build_tar.sh /build/ 68 | RUN /bin/bash build_tar.sh && rm -rf /build 69 | 70 | # CentOS 5.5 has Python 2.4 by default, but LLVM needs 2.7+ 71 | COPY dist/build_python.sh /build/ 72 | RUN /bin/bash build_python.sh && rm -rf /build 73 | 74 | # The Rust test suite requires a relatively new version of gdb, much newer than 75 | # CentOS has to offer by default, and we want it to use the newly installed 76 | # python so it's ordered here. 77 | COPY dist/build_gdb.sh /build/ 78 | RUN /bin/bash build_gdb.sh && rm -rf /build 79 | 80 | # Apparently CentOS 5.5 desn't have `git` in yum, but we're gonna need it for 81 | # cloning, so download and build it here. 82 | COPY dist/build_git.sh /build/ 83 | RUN /bin/bash build_git.sh && rm -rf /build 84 | 85 | # Install buildbot and prep it to run 86 | RUN curl https://bootstrap.pypa.io/get-pip.py | python 87 | RUN pip install buildbot-slave 88 | 89 | # Clean up after ourselves, make sure that `cc` is a thing, and then make the 90 | # default working directory a "home-ish" directory 91 | WORKDIR /buildslave 92 | RUN rm -rf /build 93 | USER rustbuild 94 | COPY start-docker-slave.sh start-docker-slave.sh 95 | ENTRYPOINT ["sh", "start-docker-slave.sh"] 96 | -------------------------------------------------------------------------------- /slaves/dist/build_binutils.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | VERSION=2.25.1 6 | SHA256=b5b14added7d78a8d1ca70b5cb75fef57ce2197264f4f5835326b0df22ac9f22 7 | 8 | curl https://ftp.gnu.org/gnu/binutils/binutils-$VERSION.tar.bz2 | \ 9 | tee >(sha256sum > binutils-$VERSION.tar.bz2.sha256) | tar xjf - 10 | test $SHA256 = $(cut -d ' ' -f 1 binutils-$VERSION.tar.bz2.sha256) || exit 1 11 | 12 | mkdir binutils-build 13 | cd binutils-build 14 | ../binutils-$VERSION/configure --prefix=/rustroot 15 | make -j10 16 | make install 17 | yum erase -y binutils 18 | -------------------------------------------------------------------------------- /slaves/dist/build_cmake.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | VERSION=3.6.3 6 | SHA256=7d73ee4fae572eb2d7cd3feb48971aea903bb30a20ea5ae8b4da826d8ccad5fe 7 | 8 | curl https://cmake.org/files/v${VERSION%\.*}/cmake-$VERSION.tar.gz | \ 9 | tee >(sha256sum > cmake-$VERSION.tar.gz.sha256) | tar xzf - 10 | test $SHA256 = $(cut -d ' ' -f 1 cmake-$VERSION.tar.gz.sha256) || exit 1 11 | 12 | mkdir cmake-build 13 | cd cmake-build 14 | ../cmake-$VERSION/configure --prefix=/rustroot 15 | make -j10 16 | make install 17 | -------------------------------------------------------------------------------- /slaves/dist/build_curl.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | VERSION=7.52.1 6 | SHA256=d16185a767cb2c1ba3d5b9096ec54e5ec198b213f45864a38b3bda4bbf87389b 7 | 8 | curl http://cool.haxx.se/download/curl-$VERSION.tar.bz2 | \ 9 | tee >(sha256sum > curl-$VERSION.tar.bz2.sha256) | tar xjf - 10 | test $SHA256 = $(cut -d ' ' -f 1 curl-$VERSION.tar.bz2.sha256) || exit 1 11 | 12 | mkdir curl-build 13 | cd curl-build 14 | ../curl-$VERSION/configure --prefix=/rustroot --with-ssl=/rustroot \ 15 | --disable-sspi --disable-gopher --disable-smtp --disable-smb \ 16 | --disable-imap --disable-pop3 --disable-tftp --disable-telnet \ 17 | --disable-manual --disable-dict --disable-rtsp --disable-ldaps \ 18 | --disable-ldap 19 | make -j10 20 | make install 21 | yum erase -y curl 22 | -------------------------------------------------------------------------------- /slaves/dist/build_gcc.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | VERSION=4.7.4 6 | SHA256=92e61c6dc3a0a449e62d72a38185fda550168a86702dea07125ebd3ec3996282 7 | 8 | yum install -y wget 9 | curl https://ftp.gnu.org/gnu/gcc/gcc-$VERSION/gcc-$VERSION.tar.bz2 | \ 10 | tee >(sha256sum > gcc-$VERSION.tar.bz2.sha256) | tar xjf - 11 | test $SHA256 = $(cut -d ' ' -f 1 gcc-$VERSION.tar.bz2.sha256) || exit 1 12 | 13 | cd gcc-$VERSION 14 | ./contrib/download_prerequisites 15 | mkdir ../gcc-$VERSION-build 16 | cd ../gcc-$VERSION-build 17 | ../gcc-$VERSION/configure --prefix=/rustroot --enable-languages=c,c++ 18 | make -j10 19 | make install 20 | ln -nsf gcc /rustroot/bin/cc 21 | yum erase -y gcc wget 22 | -------------------------------------------------------------------------------- /slaves/dist/build_gdb.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | VERSION=7.11.1 6 | SHA256=57e9e9aa3172ee16aa1e9c66fef08b4393b51872cc153e3f1ffdf18a57440586 7 | 8 | yum install -y texinfo ncurses-devel 9 | curl https://ftp.gnu.org/gnu/gdb/gdb-$VERSION.tar.gz | \ 10 | tee >(sha256sum > gdb-$VERSION.tar.gz.sha256) | tar xzf - 11 | test $SHA256 = $(cut -d ' ' -f 1 gdb-$VERSION.tar.gz.sha256) || exit 1 12 | 13 | mkdir gdb-build 14 | cd gdb-build 15 | ../gdb-$VERSION/configure --prefix=/rustroot 16 | make -j10 17 | make install 18 | yum erase -y texinfo ncurses-devel 19 | -------------------------------------------------------------------------------- /slaves/dist/build_git.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | VERSION=2.11.0 6 | SHA256=d3be9961c799562565f158ce5b836e2b90f38502d3992a115dfb653d7825fd7e 7 | 8 | yum install -y gettext autoconf 9 | curl https://www.kernel.org/pub/software/scm/git/git-$VERSION.tar.gz | \ 10 | tee >(sha256sum > git-$VERSION.tar.gz.sha256) | tar xzf - 11 | test $SHA256 = $(cut -d ' ' -f 1 git-$VERSION.tar.gz.sha256) || exit 1 12 | 13 | cd git-$VERSION 14 | make configure 15 | ./configure --prefix=/rustroot 16 | make -j10 17 | make install 18 | yum erase -y gettext autoconf 19 | -------------------------------------------------------------------------------- /slaves/dist/build_openssl.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | VERSION=1.0.2k 6 | SHA256=6b3977c61f2aedf0f96367dcfb5c6e578cf37e7b8d913b4ecb6643c3cb88d8c0 7 | 8 | yum install -y setarch 9 | curl ftp://ftp.openssl.org/source/openssl-$VERSION.tar.gz | \ 10 | tee >(sha256sum > openssl-$VERSION.tar.gz.sha256) | tar xzf - 11 | test $SHA256 = $(cut -d ' ' -f 1 openssl-$VERSION.tar.gz.sha256) || exit 1 12 | 13 | cp -r openssl-$VERSION openssl-static-64 14 | cp -r openssl-$VERSION openssl-static-32 15 | cd openssl-$VERSION 16 | ./config --prefix=/rustroot shared -fPIC 17 | make -j10 18 | make install 19 | 20 | # Cargo is going to want to link to OpenSSL statically, so build OpenSSL 21 | # statically for 32/64 bit 22 | cd ../openssl-static-64 23 | ./config --prefix=/rustroot/cargo64 no-dso -fPIC 24 | make -j10 25 | make install 26 | 27 | cd ../openssl-static-32 28 | setarch i386 ./config --prefix=/rustroot/cargo32 no-dso -m32 29 | make -j10 30 | make install 31 | 32 | ln -nsf /rustroot/cargo32 /home/rustbuild/root32 33 | ln -nsf /rustroot/cargo64 /home/rustbuild/root64 34 | 35 | # Make the system cert collection available to the new install. 36 | ln -nsf /etc/pki/tls/cert.pem /rustroot/ssl/ 37 | -------------------------------------------------------------------------------- /slaves/dist/build_pkgconfig.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | VERSION=0.29.1 6 | SHA256=beb43c9e064555469bd4390dcfd8030b1536e0aa103f08d7abf7ae8cac0cb001 7 | 8 | curl http://pkgconfig.freedesktop.org/releases/pkg-config-$VERSION.tar.gz | \ 9 | tee >(sha256sum > pkg-config-$VERSION.tar.gz.sha256) | tar xzf - 10 | test $SHA256 = $(cut -d ' ' -f 1 pkg-config-$VERSION.tar.gz.sha256) || exit 1 11 | 12 | mkdir pkg-config-build 13 | cd pkg-config-build 14 | ../pkg-config-$VERSION/configure --prefix=/rustroot --with-internal-glib 15 | make -j10 16 | make install 17 | -------------------------------------------------------------------------------- /slaves/dist/build_python.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | VERSION=2.7.13 6 | SHA256=35d543986882f78261f97787fd3e06274bfa6df29fac9b4a94f73930ff98f731 7 | 8 | yum install -y bzip2-devel 9 | curl https://www.python.org/ftp/python/$VERSION/Python-$VERSION.tar.xz | \ 10 | tee >(sha256sum > Python-$VERSION.tar.xz.sha256) | tar xJf - 11 | test $SHA256 = $(cut -d ' ' -f 1 Python-$VERSION.tar.xz.sha256) || exit 1 12 | 13 | mkdir python-build 14 | cd python-build 15 | 16 | # Gotta do some hackery to tell python about our custom OpenSSL build, 17 | # but other than that fairly normal. 18 | CFLAGS='-I /rustroot/include' LDFLAGS='-L /rustroot/lib -L /rustroot/lib64' \ 19 | ../Python-$VERSION/configure --prefix=/rustroot 20 | make -j10 21 | make install 22 | yum erase -y bzip2-devel 23 | -------------------------------------------------------------------------------- /slaves/dist/build_tar.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | VERSION=1.29 6 | SHA256=236b11190c0a3a6885bdb8d61424f2b36a5872869aa3f7f695dea4b4843ae2f2 7 | 8 | curl https://ftp.gnu.org/gnu/tar/tar-$VERSION.tar.bz2 | \ 9 | tee >(sha256sum > tar-$VERSION.tar.bz2.sha256) | tar xjf - 10 | test $SHA256 = $(cut -d ' ' -f 1 tar-$VERSION.tar.bz2.sha256) || exit 1 11 | 12 | mkdir tar-build 13 | cd tar-build 14 | 15 | # The weird _FORTIFY_SOURCE option here is passed as a last-ditch attempt to get 16 | # this to build. Apparently there are some inline functions in 17 | # /usr/include/bits/unistd.h which get emitted if _FORTIFY_SOURCE is bigger than 18 | # 0, and apparently tar wants to set this value higher than 0 by default. We 19 | # move it back to get things building (if it works without it though feel free!) 20 | # 21 | # We also pass FORCE_UNSAFE_CONFIGURE as apparently the configure script 22 | # requires us to do that if we're running as root (which we are). Trust me 23 | # though, "I got this". 24 | CFLAGS=-D_FORTIFY_SOURCE=0 FORCE_UNSAFE_CONFIGURE=1 \ 25 | ../tar-$VERSION/configure --prefix=/rustroot 26 | 27 | make -j10 28 | make install 29 | yum erase -y tar 30 | 31 | # Apparently tar's configure script creates this massive directory tree if a 32 | # bunch of nested directories called 'confdir3'. This ends up thwarting rm -rf 33 | # once we try to blow this directory away, which is generally not cool, so we 34 | # use a small C program (shell doesn't work?) to just rename all confdir3 35 | # directories to 'a' so rm -rf will succeed (path name limits shouldn't be hit). 36 | cat > foo.c <<-EOF 37 | #include 38 | #include 39 | #include 40 | #include 41 | #include 42 | #include 43 | 44 | int main() { 45 | struct stat buf; 46 | 47 | while (stat("confdir3", &buf) == 0) { 48 | assert(chdir("confdir3") == 0); 49 | } 50 | assert(chdir("..") == 0); 51 | while (stat("confdir3", &buf) == 0) { 52 | assert(rename("confdir3", "a") == 0); 53 | assert(chdir("..") == 0); 54 | } 55 | return 0; 56 | } 57 | EOF 58 | gcc foo.c 59 | ./a.out 60 | -------------------------------------------------------------------------------- /slaves/linux-cross/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:16.04 2 | 3 | RUN apt-get update 4 | RUN apt-get install -y --force-yes --no-install-recommends \ 5 | curl make cmake git wget file \ 6 | python-dev python-pip python-setuptools stunnel \ 7 | zlib1g-dev \ 8 | bzip2 xz-utils \ 9 | g++ libc6-dev \ 10 | bsdtar \ 11 | cmake \ 12 | rpm2cpio cpio \ 13 | g++-5-mips-linux-gnu libc6-dev-mips-cross \ 14 | g++-5-mipsel-linux-gnu libc6-dev-mipsel-cross \ 15 | pkg-config 16 | 17 | # Rename compilers to variants without version numbers so the build 18 | # configuration in the standard library can pick them up. 19 | RUN \ 20 | for f in `ls /usr/bin/mips*-linux-*-*-5`; do \ 21 | ln -vs $f `echo $f | sed -e 's/-5$//'`; \ 22 | done && \ 23 | for f in `ls /usr/bin/*-linux-*-*-4.8`; do \ 24 | ln -vs $f `echo $f | sed -e 's/-4.8$//'`; \ 25 | done && \ 26 | for f in `ls /usr/bin/*-linux-*-*-4.7`; do \ 27 | ln -vs $f `echo $f | sed -e 's/-4.7$//'`; \ 28 | done 29 | 30 | # Install buildbot and prep it to run 31 | RUN pip install buildbot-slave 32 | RUN groupadd -r rustbuild && useradd -m -r -g rustbuild rustbuild 33 | RUN mkdir /buildslave && chown rustbuild:rustbuild /buildslave 34 | 35 | # Install rumprun cross compiler 36 | WORKDIR /build 37 | COPY linux-cross/build_rumprun.sh /build/ 38 | RUN /bin/bash build_rumprun.sh && rm -rf /build 39 | 40 | # Build/install crosstool-ng cross compilers 41 | # NOTE crosstool-ng can't be executed by root so we execute it under the 42 | # rustbuild user. /x-tools is the crosstool-ng output directory and /build is 43 | # the crosstool-ng build directory so both must be writable by rustbuild 44 | WORKDIR /build 45 | COPY linux-cross/build_toolchain_root.sh /build/ 46 | RUN /bin/bash build_toolchain_root.sh && \ 47 | mkdir /x-tools && \ 48 | chown rustbuild:rustbuild /build && \ 49 | chown rustbuild:rustbuild /x-tools 50 | COPY linux-cross/build_toolchain.sh \ 51 | linux-cross/aarch64-linux-gnu.config \ 52 | linux-cross/arm-linux-gnueabi.config \ 53 | linux-cross/arm-linux-musleabi.config \ 54 | linux-cross/arm-linux-gnueabihf.config \ 55 | linux-cross/arm-linux-musleabihf.config \ 56 | linux-cross/mips-linux-musl.config \ 57 | linux-cross/mipsel-linux-musl.config \ 58 | linux-cross/armv7-linux-gnueabihf.config \ 59 | linux-cross/armv7-linux-musleabihf.config \ 60 | linux-cross/powerpc-linux-gnu.config \ 61 | linux-cross/powerpc64-linux-gnu.config \ 62 | linux-cross/s390x-linux-gnu.config \ 63 | /build/ 64 | COPY linux-cross/patches /build/patches 65 | USER rustbuild 66 | 67 | # Build three full toolchains for the `arm-unknown-linux-gneuabi`, 68 | # `arm-unknown-linux-gnueabihf` and `aarch64-unknown-linux-gnu` targets. We 69 | # build toolchains from scratch to primarily move to an older glibc. Ubuntu 70 | # does indeed have these toolchains in its repositories (so we could install 71 | # that package), but they package a relatively newer version of glibc. In order 72 | # for the binaries we produce to be maximall compatible, we push the glibc 73 | # version back to 2.14 for arm and 2.17 for aarch64 74 | RUN /bin/bash build_toolchain.sh arm-linux-gnueabi 75 | RUN /bin/bash build_toolchain.sh arm-linux-gnueabihf 76 | RUN /bin/bash build_toolchain.sh aarch64-linux-gnu 77 | 78 | # Also build two full toolchains for the `{mips,mipsel}-unknown-linux-musl` 79 | # targets. Currently these are essentially aliases to run on OpenWRT devices and 80 | # are different from the x86_64/i686 MUSL targets in that MUSL is dynamically 81 | # linked instead of statically. As a result, we also need to dynamically link to 82 | # an unwinder and other various runtime bits. 83 | # 84 | # We in theory could *only* build the MUSL library itself and use the standard 85 | # MIPS toolchains installed above to link against the library, except it gets 86 | # difficult figuring out how to link, for example, `gcc_s` dynamically. For that 87 | # reason we just give up and build a whole toolchain which is dedicated to 88 | # targeting this triple. 89 | RUN /bin/bash build_toolchain.sh mips-linux-musl 90 | RUN /bin/bash build_toolchain.sh mipsel-linux-musl 91 | 92 | # Also build a toolchain tuned for the armv7 architecture which is going to be 93 | # used with the armv7-unknown-linux-gnueabihf target. 94 | # 95 | # Why are we not using the arm-linux-gnueabihf toolchain with the armv7 target? 96 | # We actually tried that setup but we hit `ar` errors caused by the different 97 | # codegen options used by crosstool-ng and the rust build system. crosstool-ng 98 | # uses `-march=armv6` to build the toolchain and related C(++) libraries, like 99 | # libstdc++ which gets statically linked to LLVM; on the other hand the rust 100 | # build system builds its C(++) libraries, like LLVM, with `-march=armv7-a`. 101 | # 102 | # By using this armv7 compiler we can ensure the same codegen options are used 103 | # everywhere and avoid these codegen mismatch issues. Also compiling libstdc++ 104 | # for armv7 instead of for armv6 should make rustc (slightly) faster. 105 | RUN /bin/bash build_toolchain.sh armv7-linux-gnueabihf 106 | 107 | # Build a bunch of toolchains for ARM musl targets 108 | RUN /bin/bash build_toolchain.sh arm-linux-musleabi 109 | RUN /bin/bash build_toolchain.sh arm-linux-musleabihf 110 | RUN /bin/bash build_toolchain.sh armv7-linux-musleabihf 111 | 112 | # Also build toolchains for {powerpc{,64},s390x}-unknown-linux-gnu, 113 | # primarily to support older glibc than found in the Ubuntu root. 114 | RUN /bin/bash build_toolchain.sh powerpc-linux-gnu 115 | RUN /bin/bash build_toolchain.sh powerpc64-linux-gnu 116 | RUN /bin/bash build_toolchain.sh s390x-linux-gnu 117 | 118 | USER root 119 | 120 | # Rename all the compilers we just built into /usr/bin and also without 121 | # `-unknown-` in the name because it appears lots of other compilers in Ubuntu 122 | # don't have this name in the component by default either. 123 | # Also rename `-ibm-` out of the s390x compilers. 124 | # Also the aarch64 compiler is prefixed with `aarch64-unknown-linux-gnueabi` 125 | # by crosstool-ng, but Ubuntu just prefixes it with `aarch64-linux-gnu` so 126 | # we'll, additionally, strip the eabi part from its binaries. 127 | RUN \ 128 | for f in `ls /x-tools/*-unknown-linux-*/bin/*-unknown-linux-*`; do \ 129 | g=`basename $f`; \ 130 | ln -vs $f /usr/bin/`echo $g | sed -e 's/-unknown//'`; \ 131 | done && \ 132 | for f in `ls /x-tools/*-ibm-linux-*/bin/*-ibm-linux-*`; do \ 133 | g=`basename $f`; \ 134 | ln -vs $f /usr/bin/`echo $g | sed -e 's/-ibm//'`; \ 135 | done && \ 136 | for f in `ls /usr/bin/aarch64-linux-gnueabi-*`; do \ 137 | g=`basename $f`; \ 138 | mv -v $f /usr/bin/`echo $g | sed -e 's/eabi//'`; \ 139 | done 140 | 141 | COPY linux-cross/build_freebsd_toolchain.sh /tmp/ 142 | RUN bash /tmp/build_freebsd_toolchain.sh i686 143 | RUN bash /tmp/build_freebsd_toolchain.sh x86_64 144 | COPY linux-cross/build_dragonfly_toolchain.sh /tmp/ 145 | RUN bash /tmp/build_dragonfly_toolchain.sh 146 | COPY linux-cross/build_netbsd_toolchain.sh /tmp/ 147 | RUN bash /tmp/build_netbsd_toolchain.sh 148 | 149 | # powerpc64le is built using centos7 glibc, because that has 150 | # backports that weren't committed upstream until glibc-2.19. 151 | COPY linux-cross/build_powerpc64le_linux_toolchain.sh /tmp/ 152 | RUN bash /tmp/build_powerpc64le_linux_toolchain.sh 153 | 154 | # Also build libunwind.a for the ARM musl targets 155 | COPY linux-cross/build-libunwind.sh \ 156 | /build/ 157 | RUN /bin/bash build-libunwind.sh arm-unknown-linux-musleabi 158 | RUN /bin/bash build-libunwind.sh arm-unknown-linux-musleabihf 159 | RUN /bin/bash build-libunwind.sh armv7-unknown-linux-musleabihf 160 | 161 | RUN apt-get install -y --force-yes --no-install-recommends \ 162 | g++-mips64-linux-gnuabi64 \ 163 | g++-mips64el-linux-gnuabi64 164 | 165 | # Instruct rustbuild to use the armv7-linux-gnueabihf toolchain instead of the 166 | # default arm-linux-gnueabihf one 167 | ENV AR_armv7_unknown_linux_gnueabihf=armv7-linux-gnueabihf-ar \ 168 | CC_armv7_unknown_linux_gnueabihf=armv7-linux-gnueabihf-gcc \ 169 | CXX_armv7_unknown_linux_gnueabihf=armv7-linux-gnueabihf-g++ \ 170 | AR_arm_unknown_linux_musleabi=arm-linux-musleabi-ar \ 171 | CC_arm_unknown_linux_musleabi=arm-linux-musleabi-gcc \ 172 | CXX_arm_unknown_linux_musleabi=arm-linux-musleabi-g++ \ 173 | AR_arm_unknown_linux_musleabihf=arm-linux-musleabihf-ar \ 174 | CC_arm_unknown_linux_musleabihf=arm-linux-musleabihf-gcc \ 175 | CXX_arm_unknown_linux_musleabihf=arm-linux-musleabihf-g++ \ 176 | AR_armv7_unknown_linux_musleabihf=armv7-linux-musleabihf-ar \ 177 | CC_armv7_unknown_linux_musleabihf=armv7-linux-musleabihf-gcc \ 178 | CXX_armv7_unknown_linux_musleabihf=armv7-linux-musleabihf-g++ \ 179 | AR_x86_64_unknown_freebsd=x86_64-unknown-freebsd10-ar \ 180 | CC_x86_64_unknown_freebsd=x86_64-unknown-freebsd10-gcc \ 181 | CXX_x86_64_unknown_freebsd=x86_64-unknown-freebsd10-g++ \ 182 | AR_i686_unknown_freebsd=i686-unknown-freebsd10-ar \ 183 | CC_i686_unknown_freebsd=i686-unknown-freebsd10-gcc \ 184 | CXX_i686_unknown_freebsd=i686-unknown-freebsd10-g++ \ 185 | AR_x86_64_unknown_netbsd=x86_64-unknown-netbsd-ar \ 186 | CC_x86_64_unknown_netbsd=x86_64-unknown-netbsd-gcc \ 187 | CXX_x86_64_unknown_netbsd=x86_64-unknown-netbsd-g++ \ 188 | AR_x86_64_unknown_dragonfly=x86_64-unknown-dragonfly-ar \ 189 | CC_x86_64_unknown_dragonfly=x86_64-unknown-dragonfly-gcc \ 190 | CXX_x86_64_unknown_dragonfly=x86_64-unknown-dragonfly-g++ \ 191 | AR_mips_unknown_linux_gnu=mips-linux-gnu-ar \ 192 | CC_mips_unknown_linux_gnu=mips-linux-gnu-gcc-5 \ 193 | CXX_mips_unknown_linux_gnu=mips-linux-gnu-g++-5 \ 194 | AR_mips_unknown_linux_musl=mips-linux-musl-ar \ 195 | CC_mips_unknown_linux_musl=mips-linux-musl-gcc \ 196 | CXX_mips_unknown_linux_musl=mips-linux-musl-g++ \ 197 | AR_mipsel_unknown_linux_gnu=mipsel-linux-gnu-ar \ 198 | CC_mipsel_unknown_linux_gnu=mipsel-linux-gnu-gcc-5 \ 199 | CXX_mipsel_unknown_linux_gnu=mipsel-linux-gnu-g++-5 \ 200 | AR_mipsel_unknown_linux_musl=mipsel-linux-musl-ar \ 201 | CC_mipsel_unknown_linux_musl=mipsel-linux-musl-gcc \ 202 | CXX_mipsel_unknown_linux_musl=mipsel-linux-musl-g++ \ 203 | AR_powerpc_unknown_linux_gnu=powerpc-linux-gnu-ar \ 204 | CC_powerpc_unknown_linux_gnu=powerpc-linux-gnu-gcc \ 205 | CXX_powerpc_unknown_linux_gnu=powerpc-linux-gnu-g++ \ 206 | AR_powerpc64_unknown_linux_gnu=powerpc64-linux-gnu-ar \ 207 | CC_powerpc64_unknown_linux_gnu=powerpc64-linux-gnu-gcc \ 208 | CXX_powerpc64_unknown_linux_gnu=powerpc64-linux-gnu-g++ \ 209 | AR_powerpc64le_unknown_linux_gnu=powerpc64le-linux-gnu-ar \ 210 | CC_powerpc64le_unknown_linux_gnu=powerpc64le-linux-gnu-gcc \ 211 | CXX_powerpc64le_unknown_linux_gnu=powerpc64le-linux-gnu-g++ \ 212 | AR_s390x_unknown_linux_gnu=s390x-linux-gnu-ar \ 213 | CC_s390x_unknown_linux_gnu=s390x-linux-gnu-gcc \ 214 | CXX_s390x_unknown_linux_gnu=s390x-linux-gnu-g++ \ 215 | AR_mips64_unknown_linux_gnuabi64=mips64-linux-gnuabi64-ar \ 216 | CC_mips64_unknown_linux_gnuabi64=mips64-linux-gnuabi64-gcc \ 217 | CXX_mips64_unknown_linux_gnuabi64=mips64-linux-gnuabi64-g++ \ 218 | AR_mips64el_unknown_linux_gnuabi64=mips64el-linux-gnuabi64-ar \ 219 | CC_mips64el_unknown_linux_gnuabi64=mips64el-linux-gnuabi64-gcc \ 220 | CXX_mips64el_unknown_linux_gnuabi64=mips64el-linux-gnuabi64-g++ 221 | 222 | # When running this container, startup buildbot 223 | WORKDIR /buildslave 224 | USER rustbuild 225 | COPY start-docker-slave.sh start-docker-slave.sh 226 | ENTRYPOINT ["sh", "start-docker-slave.sh"] 227 | -------------------------------------------------------------------------------- /slaves/linux-cross/build-libunwind.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -ex 4 | 5 | # To build MUSL targets we're going to need a libunwind lying around, so acquire that 6 | # here and build it. 7 | if [ ! -d "llvm-3.8.0.src" ]; then 8 | curl http://releases.llvm.org/3.8.0/llvm-3.8.0.src.tar.xz | tar xJf - 9 | fi 10 | 11 | if [ ! -d "libunwind-3.8.0.src" ]; then 12 | curl http://releases.llvm.org/3.8.0/libunwind-3.8.0.src.tar.xz | tar xJf - 13 | fi 14 | 15 | rm -rf libunwind-build 16 | mkdir libunwind-build 17 | cd libunwind-build 18 | CC=${1/unknown-/}-gcc CXX=${1/unknown-/}-gcc cmake \ 19 | ../libunwind-3.8.0.src \ 20 | -DLLVM_PATH=../llvm-3.8.0.src \ 21 | -DLIBUNWIND_ENABLE_SHARED=0 22 | VERBOSE=1 make -j1 23 | cp lib/libunwind.a /x-tools/${1}/${1}/sysroot/usr/lib/ 24 | -------------------------------------------------------------------------------- /slaves/linux-cross/build_dragonfly_toolchain.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | ARCH=x86_64 6 | BINUTILS=2.25.1 7 | GCC=5.3.0 8 | DF_VERSION=4.6.0_REL 9 | URL_DFLY_ISO=https://mirror-master.dragonflybsd.org/iso-images/dfly-x86_64-${DF_VERSION}.iso.bz2 10 | 11 | mkdir binutils 12 | cd binutils 13 | 14 | # First up, build binutils 15 | curl https://ftp.gnu.org/gnu/binutils/binutils-$BINUTILS.tar.bz2 | tar xjf - 16 | mkdir binutils-build 17 | cd binutils-build 18 | ../binutils-$BINUTILS/configure \ 19 | --target=$ARCH-unknown-dragonfly 20 | make -j10 21 | make install 22 | cd ../.. 23 | rm -rf binutils 24 | 25 | # Next, download the DragonFly libc and relevant header files 26 | mkdir dragonfly 27 | curl $URL_DFLY_ISO | bzcat | bsdtar xf - -C dragonfly ./usr/include ./usr/lib ./lib 28 | 29 | dst=/usr/local/$ARCH-unknown-dragonfly 30 | 31 | cp -r dragonfly/usr/include $dst/ 32 | cp dragonfly/usr/lib/crt1.o $dst/lib 33 | cp dragonfly/usr/lib/Scrt1.o $dst/lib 34 | cp dragonfly/usr/lib/crti.o $dst/lib 35 | cp dragonfly/usr/lib/crtn.o $dst/lib 36 | cp dragonfly/usr/lib/libc.a $dst/lib 37 | cp dragonfly/usr/lib/libutil.a $dst/lib 38 | #cp dragonfly/usr/lib/libutil_p.a $dst/lib 39 | cp dragonfly/usr/lib/libm.a $dst/lib 40 | cp dragonfly/usr/lib/librt.so.0 $dst/lib 41 | cp dragonfly/usr/lib/libexecinfo.so.1 $dst/lib 42 | cp dragonfly/lib/libc.so.8 $dst/lib 43 | cp dragonfly/lib/libm.so.4 $dst/lib 44 | cp dragonfly/lib/libutil.so.4 $dst/lib 45 | #cp dragonfly/lib/libthr.so.3 $dst/lib/libpthread.so 46 | cp dragonfly/usr/lib/libpthread.so $dst/lib/libpthread.so 47 | cp dragonfly/usr/lib/thread/libthread_xu.so.2 $dst/lib/libpthread.so.0 48 | 49 | ln -s libc.so.8 $dst/lib/libc.so 50 | ln -s libm.so.4 $dst/lib/libm.so 51 | ln -s librt.so.0 $dst/lib/librt.so 52 | ln -s libutil.so.4 $dst/lib/libutil.so 53 | ln -s libexecinfo.so.1 $dst/lib/libexecinfo.so 54 | rm -rf dragonfly 55 | 56 | # Finally, download and build gcc to target DragonFly 57 | mkdir gcc 58 | cd gcc 59 | curl https://ftp.gnu.org/gnu/gcc/gcc-$GCC/gcc-$GCC.tar.bz2 | tar xjf - 60 | cd gcc-$GCC 61 | 62 | # The following three patches are taken from DragonFly's dports collection: 63 | # https://github.com/DragonFlyBSD/DPorts/tree/master/lang/gcc5 64 | # The dports specification for gcc5 contains a few more patches, but they are 65 | # not relevant in this situation, as they are for a language we don't need 66 | # (e.g. java), or a platform which is not supported by DragonFly (e.g. i386, 67 | # powerpc64, ia64, arm). 68 | # 69 | # These patches probably only need to be updated in case the gcc version is 70 | # updated. 71 | 72 | patch -p0 <<'EOF' 73 | --- libatomic/configure.tgt.orig 2015-07-09 16:08:55 UTC 74 | +++ libatomic/configure.tgt 75 | @@ -110,7 +110,7 @@ case "${target}" in 76 | ;; 77 | 78 | *-*-linux* | *-*-gnu* | *-*-k*bsd*-gnu \ 79 | - | *-*-netbsd* | *-*-freebsd* | *-*-openbsd* \ 80 | + | *-*-netbsd* | *-*-freebsd* | *-*-openbsd* | *-*-dragonfly* \ 81 | | *-*-solaris2* | *-*-sysv4* | *-*-irix6* | *-*-osf* | *-*-hpux11* \ 82 | | *-*-darwin* | *-*-aix* | *-*-cygwin*) 83 | # POSIX system. The OS is supported. 84 | EOF 85 | 86 | patch -p0 <<'EOF' 87 | --- libstdc++-v3/config/os/bsd/dragonfly/os_defines.h.orig 2015-07-09 16:08:54 UTC 88 | +++ libstdc++-v3/config/os/bsd/dragonfly/os_defines.h 89 | @@ -29,4 +29,9 @@ 90 | // System-specific #define, typedefs, corrections, etc, go here. This 91 | // file will come before all others. 92 | 93 | +#define _GLIBCXX_USE_C99_CHECK 1 94 | +#define _GLIBCXX_USE_C99_DYNAMIC (!(__ISO_C_VISIBLE >= 1999)) 95 | +#define _GLIBCXX_USE_C99_LONG_LONG_CHECK 1 96 | +#define _GLIBCXX_USE_C99_LONG_LONG_DYNAMIC (_GLIBCXX_USE_C99_DYNAMIC || !defined __LONG_LONG_SUPPORTED) 97 | + 98 | #endif 99 | EOF 100 | 101 | patch -p0 <<'EOF' 102 | --- libstdc++-v3/configure.orig 2016-05-26 18:34:47.163132921 +0200 103 | +++ libstdc++-v3/configure 2016-05-26 18:35:29.594590648 +0200 104 | @@ -52013,7 +52013,7 @@ 105 | 106 | ;; 107 | 108 | - *-freebsd*) 109 | + *-freebsd* | *-dragonfly*) 110 | SECTION_FLAGS='-ffunction-sections -fdata-sections' 111 | 112 | 113 | EOF 114 | 115 | ./contrib/download_prerequisites 116 | 117 | mkdir ../gcc-build 118 | cd ../gcc-build 119 | ../gcc-$GCC/configure \ 120 | --enable-languages=c,c++ \ 121 | --target=$ARCH-unknown-dragonfly \ 122 | --disable-multilib \ 123 | --disable-nls \ 124 | --disable-libgomp \ 125 | --disable-libquadmath \ 126 | --disable-libssp \ 127 | --disable-libvtv \ 128 | --disable-libcilkrts \ 129 | --disable-libada \ 130 | --disable-libsanitizer \ 131 | --disable-libquadmath-support \ 132 | --disable-lto 133 | make -j10 134 | make install 135 | cd ../.. 136 | rm -rf gcc 137 | -------------------------------------------------------------------------------- /slaves/linux-cross/build_freebsd_toolchain.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | ARCH=$1 6 | BINUTILS=2.25.1 7 | GCC=5.3.0 8 | 9 | mkdir binutils 10 | cd binutils 11 | 12 | # First up, build binutils 13 | curl https://ftp.gnu.org/gnu/binutils/binutils-$BINUTILS.tar.bz2 | tar xjf - 14 | mkdir binutils-build 15 | cd binutils-build 16 | ../binutils-$BINUTILS/configure \ 17 | --target=$ARCH-unknown-freebsd10 18 | make -j10 19 | make install 20 | cd ../.. 21 | rm -rf binutils 22 | 23 | # Next, download the FreeBSD libc and relevant header files 24 | 25 | mkdir freebsd 26 | case "$ARCH" in 27 | x86_64) 28 | URL=ftp://ftp.freebsd.org/pub/FreeBSD/releases/amd64/10.2-RELEASE/base.txz 29 | ;; 30 | i686) 31 | URL=ftp://ftp.freebsd.org/pub/FreeBSD/releases/i386/10.2-RELEASE/base.txz 32 | ;; 33 | esac 34 | curl $URL | tar xJf - -C freebsd ./usr/include ./usr/lib ./lib 35 | 36 | dst=/usr/local/$ARCH-unknown-freebsd10 37 | 38 | cp -r freebsd/usr/include $dst/ 39 | cp freebsd/usr/lib/crt1.o $dst/lib 40 | cp freebsd/usr/lib/Scrt1.o $dst/lib 41 | cp freebsd/usr/lib/crti.o $dst/lib 42 | cp freebsd/usr/lib/crtn.o $dst/lib 43 | cp freebsd/usr/lib/libc.a $dst/lib 44 | cp freebsd/usr/lib/libutil.a $dst/lib 45 | cp freebsd/usr/lib/libutil_p.a $dst/lib 46 | cp freebsd/usr/lib/libm.a $dst/lib 47 | cp freebsd/usr/lib/librt.so.1 $dst/lib 48 | cp freebsd/usr/lib/libexecinfo.so.1 $dst/lib 49 | cp freebsd/lib/libc.so.7 $dst/lib 50 | cp freebsd/lib/libm.so.5 $dst/lib 51 | cp freebsd/lib/libutil.so.9 $dst/lib 52 | cp freebsd/lib/libthr.so.3 $dst/lib/libpthread.so 53 | 54 | ln -s libc.so.7 $dst/lib/libc.so 55 | ln -s libm.so.5 $dst/lib/libm.so 56 | ln -s librt.so.1 $dst/lib/librt.so 57 | ln -s libutil.so.9 $dst/lib/libutil.so 58 | ln -s libexecinfo.so.1 $dst/lib/libexecinfo.so 59 | rm -rf freebsd 60 | 61 | # Finally, download and build gcc to target FreeBSD 62 | mkdir gcc 63 | cd gcc 64 | curl https://ftp.gnu.org/gnu/gcc/gcc-$GCC/gcc-$GCC.tar.bz2 | tar xjf - 65 | cd gcc-$GCC 66 | ./contrib/download_prerequisites 67 | 68 | mkdir ../gcc-build 69 | cd ../gcc-build 70 | ../gcc-$GCC/configure \ 71 | --enable-languages=c,c++ \ 72 | --target=$ARCH-unknown-freebsd10 \ 73 | --disable-multilib \ 74 | --disable-nls \ 75 | --disable-libgomp \ 76 | --disable-libquadmath \ 77 | --disable-libssp \ 78 | --disable-libvtv \ 79 | --disable-libcilkrts \ 80 | --disable-libada \ 81 | --disable-libsanitizer \ 82 | --disable-libquadmath-support \ 83 | --disable-lto 84 | make -j10 85 | make install 86 | cd ../.. 87 | rm -rf gcc 88 | -------------------------------------------------------------------------------- /slaves/linux-cross/build_netbsd_toolchain.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | BINUTILS=2.25.1 6 | GCC=5.3.0 7 | 8 | # First up, build binutils 9 | mkdir binutils 10 | cd binutils 11 | curl https://ftp.gnu.org/gnu/binutils/binutils-$BINUTILS.tar.bz2 | tar xjf - 12 | mkdir binutils-build 13 | cd binutils-build 14 | ../binutils-$BINUTILS/configure \ 15 | --target=x86_64-unknown-netbsd 16 | make -j10 17 | make install 18 | cd ../.. 19 | rm -rf binutils 20 | 21 | # Next, download the NetBSD libc and relevant header files 22 | mkdir netbsd 23 | curl ftp://ftp.netbsd.org/pub/NetBSD/NetBSD-7.0/amd64/binary/sets/base.tgz | \ 24 | tar xzf - -C netbsd ./usr/include ./usr/lib ./lib 25 | curl ftp://ftp.netbsd.org/pub/NetBSD/NetBSD-7.0/amd64/binary/sets/comp.tgz | \ 26 | tar xzf - -C netbsd ./usr/include ./usr/lib 27 | 28 | dst=/usr/local/x86_64-unknown-netbsd 29 | cp -r netbsd/usr/include $dst 30 | cp netbsd/usr/lib/crt0.o $dst/lib 31 | cp netbsd/usr/lib/crti.o $dst/lib 32 | cp netbsd/usr/lib/crtn.o $dst/lib 33 | cp netbsd/usr/lib/crtbeginS.o $dst/lib 34 | cp netbsd/usr/lib/crtendS.o $dst/lib 35 | cp netbsd/usr/lib/crtbegin.o $dst/lib 36 | cp netbsd/usr/lib/crtend.o $dst/lib 37 | cp netbsd/usr/lib/gcrt0.o $dst/lib 38 | cp netbsd/usr/lib/libc.a $dst/lib 39 | cp netbsd/usr/lib/libc_p.a $dst/lib 40 | cp netbsd/usr/lib/libc_pic.a $dst/lib 41 | cp netbsd/lib/libc.so.12.193.1 $dst/lib 42 | cp netbsd/lib/libutil.so.7.21 $dst/lib 43 | cp netbsd/usr/lib/libm.a $dst/lib 44 | cp netbsd/usr/lib/libm_p.a $dst/lib 45 | cp netbsd/usr/lib/libm_pic.a $dst/lib 46 | cp netbsd/lib/libm.so.0.11 $dst/lib 47 | cp netbsd/usr/lib/librt.so.1.1 $dst/lib 48 | cp netbsd/usr/lib/libpthread.a $dst/lib 49 | cp netbsd/usr/lib/libpthread_p.a $dst/lib 50 | cp netbsd/usr/lib/libpthread_pic.a $dst/lib 51 | cp netbsd/usr/lib/libpthread.so.1.2 $dst/lib 52 | 53 | ln -s libc.so.12.193.1 $dst/lib/libc.so 54 | ln -s libc.so.12.193.1 $dst/lib/libc.so.12 55 | ln -s libm.so.0.11 $dst/lib/libm.so 56 | ln -s libm.so.0.11 $dst/lib/libm.so.0 57 | ln -s libutil.so.7.21 $dst/lib/libutil.so 58 | ln -s libutil.so.7.21 $dst/lib/libutil.so.7 59 | ln -s libpthread.so.1.2 $dst/lib/libpthread.so 60 | ln -s libpthread.so.1.2 $dst/lib/libpthread.so.1 61 | ln -s librt.so.1.1 $dst/lib/librt.so 62 | 63 | rm -rf netbsd 64 | 65 | # Finally, download and build gcc to target NetBSD 66 | mkdir gcc 67 | cd gcc 68 | curl https://ftp.gnu.org/gnu/gcc/gcc-$GCC/gcc-$GCC.tar.bz2 | tar xjf - 69 | cd gcc-$GCC 70 | ./contrib/download_prerequisites 71 | PATCHES="ftp://ftp.netbsd.org/pub/pkgsrc/pkgsrc-2016Q4/pkgsrc/lang/gcc5/patches/patch-libstdc%2B%2B-v3_config_os_bsd_netbsd_ctype__base.h \ 72 | ftp://ftp.netbsd.org/pub/pkgsrc/pkgsrc-2016Q4/pkgsrc/lang/gcc5/patches/patch-libstdc%2B%2B-v3_config_os_bsd_netbsd_ctype__configure__char.cc" 73 | 74 | for patch in $PATCHES; do 75 | curl $patch | patch -Np0 76 | done 77 | 78 | mkdir ../gcc-build 79 | cd ../gcc-build 80 | ../gcc-$GCC/configure \ 81 | --enable-languages=c,c++ \ 82 | --target=x86_64-unknown-netbsd \ 83 | --disable-libcilkrts \ 84 | --disable-multilib \ 85 | --disable-nls \ 86 | --disable-libgomp \ 87 | --disable-libquadmath \ 88 | --disable-libssp \ 89 | --disable-libvtv \ 90 | --disable-libcilkrt \ 91 | --disable-libada \ 92 | --disable-libsanitizer \ 93 | --disable-libquadmath-support \ 94 | --disable-lto 95 | make -j10 96 | make install 97 | 98 | cd ../.. 99 | rm -rf gcc 100 | -------------------------------------------------------------------------------- /slaves/linux-cross/build_powerpc64le_linux_toolchain.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | BINUTILS=2.25.1 6 | GCC=5.3.0 7 | TARGET=powerpc64le-linux-gnu 8 | SYSROOT=/usr/local/$TARGET/sysroot 9 | 10 | # First, download the CentOS7 glibc.ppc64le and relevant header files. 11 | # (upstream ppc64le support wasn't added until 2.19, which el7 backported.) 12 | mkdir -p $SYSROOT 13 | pushd $SYSROOT 14 | 15 | centos_base=http://mirror.centos.org/altarch/7.3.1611/os/ppc64le/Packages 16 | glibc_v=2.17-157.el7 17 | kernel_v=3.10.0-514.el7 18 | for package in glibc{,-devel,-headers}-$glibc_v kernel-headers-$kernel_v; do 19 | curl $centos_base/$package.ppc64le.rpm | \ 20 | rpm2cpio - | cpio -idm 21 | done 22 | 23 | ln -sT lib64 lib 24 | ln -sT lib64 usr/lib 25 | 26 | popd 27 | 28 | # Next, download and build binutils. 29 | mkdir binutils-$TARGET 30 | pushd binutils-$TARGET 31 | curl https://ftp.gnu.org/gnu/binutils/binutils-$BINUTILS.tar.bz2 | tar xjf - 32 | mkdir binutils-build 33 | cd binutils-build 34 | ../binutils-$BINUTILS/configure --target=$TARGET --with-sysroot=$SYSROOT 35 | make -j10 36 | make install 37 | popd 38 | rm -rf binutils-$TARGET 39 | 40 | # Finally, download and build gcc. 41 | mkdir gcc-$TARGET 42 | pushd gcc-$TARGET 43 | curl https://ftp.gnu.org/gnu/gcc/gcc-$GCC/gcc-$GCC.tar.bz2 | tar xjf - 44 | cd gcc-$GCC 45 | ./contrib/download_prerequisites 46 | 47 | mkdir ../gcc-build 48 | cd ../gcc-build 49 | ../gcc-$GCC/configure \ 50 | --enable-languages=c,c++ \ 51 | --target=$TARGET \ 52 | --with-cpu=power8 \ 53 | --with-sysroot=$SYSROOT \ 54 | --disable-libcilkrts \ 55 | --disable-multilib \ 56 | --disable-nls \ 57 | --disable-libgomp \ 58 | --disable-libquadmath \ 59 | --disable-libssp \ 60 | --disable-libvtv \ 61 | --disable-libcilkrt \ 62 | --disable-libada \ 63 | --disable-libsanitizer \ 64 | --disable-libquadmath-support \ 65 | --disable-lto 66 | make -j10 67 | make install 68 | 69 | popd 70 | rm -rf gcc-$TARGET 71 | -------------------------------------------------------------------------------- /slaves/linux-cross/build_rumprun.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | git clone --recursive https://github.com/rumpkernel/rumprun 6 | cd rumprun 7 | CC=cc ./build-rr.sh -d /usr/local hw 8 | -------------------------------------------------------------------------------- /slaves/linux-cross/build_toolchain.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | mkdir $1 6 | pushd $1 7 | cp ../${1}.config .config 8 | ct-ng oldconfig 9 | ct-ng build 10 | rm -rf .build 11 | popd 12 | rm -rf $1 13 | -------------------------------------------------------------------------------- /slaves/linux-cross/build_toolchain_root.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | install_deps() { 6 | apt-get install -y --force-yes --no-install-recommends \ 7 | automake bison bzip2 ca-certificates curl flex g++ gawk gperf help2man libncurses-dev \ 8 | libtool-bin make texinfo patch wget 9 | } 10 | 11 | # gcc-4.8 can't be built with the make-4 that's ships with Ubuntu 15.10. This overrides it with 12 | # make-3 13 | mk_make() { 14 | local version=3.81 15 | 16 | curl ftp://ftp.gnu.org/gnu/make/make-${version}.tar.gz | tar xz 17 | pushd make-${version} 18 | ./configure --prefix=/usr 19 | make 20 | make install 21 | popd 22 | rm -rf make-${version} 23 | } 24 | 25 | mk_crosstool_ng() { 26 | local version=1.22.0 27 | 28 | curl http://crosstool-ng.org/download/crosstool-ng/crosstool-ng-${version}.tar.bz2 | tar xj 29 | pushd crosstool-ng 30 | ./configure --prefix=/usr/local 31 | make 32 | make install 33 | popd 34 | rm -rf crosstool-ng 35 | } 36 | 37 | main() { 38 | install_deps 39 | mk_make 40 | mk_crosstool_ng 41 | } 42 | 43 | main 44 | -------------------------------------------------------------------------------- /slaves/linux-cross/mips-linux-musl.config: -------------------------------------------------------------------------------- 1 | # 2 | # Automatically generated file; DO NOT EDIT. 3 | # Crosstool-NG Configuration 4 | # 5 | CT_CONFIGURE_has_make381=y 6 | CT_MODULES=y 7 | 8 | # 9 | # Paths and misc options 10 | # 11 | 12 | # 13 | # crosstool-NG behavior 14 | # 15 | # CT_OBSOLETE is not set 16 | # CT_EXPERIMENTAL is not set 17 | # CT_DEBUG_CT is not set 18 | 19 | # 20 | # Paths 21 | # 22 | CT_LOCAL_TARBALLS_DIR="" 23 | CT_WORK_DIR="${CT_TOP_DIR}/.build" 24 | CT_PREFIX_DIR="/x-tools/${CT_TARGET}" 25 | CT_INSTALL_DIR="${CT_PREFIX_DIR}" 26 | CT_RM_RF_PREFIX_DIR=y 27 | CT_REMOVE_DOCS=y 28 | CT_INSTALL_DIR_RO=y 29 | CT_STRIP_HOST_TOOLCHAIN_EXECUTABLES=y 30 | # CT_STRIP_TARGET_TOOLCHAIN_EXECUTABLES is not set 31 | 32 | # 33 | # Downloading 34 | # 35 | # CT_FORBID_DOWNLOAD is not set 36 | # CT_FORCE_DOWNLOAD is not set 37 | CT_CONNECT_TIMEOUT=10 38 | # CT_ONLY_DOWNLOAD is not set 39 | # CT_USE_MIRROR is not set 40 | 41 | # 42 | # Extracting 43 | # 44 | # CT_FORCE_EXTRACT is not set 45 | CT_OVERIDE_CONFIG_GUESS_SUB=y 46 | # CT_ONLY_EXTRACT is not set 47 | CT_PATCH_BUNDLED=y 48 | # CT_PATCH_LOCAL is not set 49 | # CT_PATCH_BUNDLED_LOCAL is not set 50 | # CT_PATCH_LOCAL_BUNDLED is not set 51 | # CT_PATCH_BUNDLED_FALLBACK_LOCAL is not set 52 | # CT_PATCH_LOCAL_FALLBACK_BUNDLED is not set 53 | # CT_PATCH_NONE is not set 54 | CT_PATCH_ORDER="bundled" 55 | 56 | # 57 | # Build behavior 58 | # 59 | CT_PARALLEL_JOBS=0 60 | CT_LOAD="" 61 | CT_USE_PIPES=y 62 | CT_EXTRA_CFLAGS_FOR_BUILD="" 63 | CT_EXTRA_LDFLAGS_FOR_BUILD="" 64 | CT_EXTRA_CFLAGS_FOR_HOST="" 65 | CT_EXTRA_LDFLAGS_FOR_HOST="" 66 | # CT_CONFIG_SHELL_SH is not set 67 | # CT_CONFIG_SHELL_ASH is not set 68 | CT_CONFIG_SHELL_BASH=y 69 | # CT_CONFIG_SHELL_CUSTOM is not set 70 | CT_CONFIG_SHELL="${bash}" 71 | 72 | # 73 | # Logging 74 | # 75 | # CT_LOG_ERROR is not set 76 | # CT_LOG_WARN is not set 77 | CT_LOG_INFO=y 78 | # CT_LOG_EXTRA is not set 79 | # CT_LOG_ALL is not set 80 | # CT_LOG_DEBUG is not set 81 | CT_LOG_LEVEL_MAX="INFO" 82 | # CT_LOG_SEE_TOOLS_WARN is not set 83 | CT_LOG_PROGRESS_BAR=y 84 | CT_LOG_TO_FILE=y 85 | CT_LOG_FILE_COMPRESS=y 86 | 87 | # 88 | # Target options 89 | # 90 | CT_ARCH="mips" 91 | CT_ARCH_SUPPORTS_BOTH_ENDIAN=y 92 | CT_ARCH_SUPPORTS_32=y 93 | CT_ARCH_SUPPORTS_64=y 94 | CT_ARCH_SUPPORTS_WITH_ARCH=y 95 | CT_ARCH_SUPPORTS_WITH_TUNE=y 96 | CT_ARCH_SUPPORTS_WITH_FLOAT=y 97 | CT_ARCH_DEFAULT_BE=y 98 | CT_ARCH_DEFAULT_32=y 99 | CT_ARCH_ARCH="mips32r2" 100 | CT_ARCH_TUNE="" 101 | CT_ARCH_BE=y 102 | # CT_ARCH_LE is not set 103 | CT_ARCH_32=y 104 | # CT_ARCH_64 is not set 105 | CT_ARCH_BITNESS=32 106 | # CT_ARCH_FLOAT_HW is not set 107 | CT_ARCH_FLOAT_SW=y 108 | CT_TARGET_CFLAGS="" 109 | CT_TARGET_LDFLAGS="" 110 | # CT_ARCH_alpha is not set 111 | # CT_ARCH_arm is not set 112 | # CT_ARCH_avr is not set 113 | # CT_ARCH_m68k is not set 114 | CT_ARCH_mips=y 115 | # CT_ARCH_nios2 is not set 116 | # CT_ARCH_powerpc is not set 117 | # CT_ARCH_s390 is not set 118 | # CT_ARCH_sh is not set 119 | # CT_ARCH_sparc is not set 120 | # CT_ARCH_x86 is not set 121 | # CT_ARCH_xtensa is not set 122 | CT_ARCH_alpha_AVAILABLE=y 123 | CT_ARCH_arm_AVAILABLE=y 124 | CT_ARCH_avr_AVAILABLE=y 125 | CT_ARCH_m68k_AVAILABLE=y 126 | CT_ARCH_microblaze_AVAILABLE=y 127 | CT_ARCH_mips_AVAILABLE=y 128 | CT_ARCH_nios2_AVAILABLE=y 129 | CT_ARCH_powerpc_AVAILABLE=y 130 | CT_ARCH_s390_AVAILABLE=y 131 | CT_ARCH_sh_AVAILABLE=y 132 | CT_ARCH_sparc_AVAILABLE=y 133 | CT_ARCH_x86_AVAILABLE=y 134 | CT_ARCH_xtensa_AVAILABLE=y 135 | CT_ARCH_SUFFIX="" 136 | 137 | # 138 | # Generic target options 139 | # 140 | # CT_MULTILIB is not set 141 | CT_ARCH_USE_MMU=y 142 | CT_ARCH_ENDIAN="big" 143 | 144 | # 145 | # Target optimisations 146 | # 147 | # CT_ARCH_FLOAT_AUTO is not set 148 | CT_ARCH_FLOAT="soft" 149 | 150 | # 151 | # mips other options 152 | # 153 | CT_ARCH_mips_o32=y 154 | CT_ARCH_mips_ABI="32" 155 | 156 | # 157 | # Toolchain options 158 | # 159 | 160 | # 161 | # General toolchain options 162 | # 163 | CT_FORCE_SYSROOT=y 164 | CT_USE_SYSROOT=y 165 | CT_SYSROOT_NAME="sysroot" 166 | CT_SYSROOT_DIR_PREFIX="" 167 | CT_WANTS_STATIC_LINK=y 168 | # CT_STATIC_TOOLCHAIN is not set 169 | CT_TOOLCHAIN_PKGVERSION="" 170 | CT_TOOLCHAIN_BUGURL="" 171 | 172 | # 173 | # Tuple completion and aliasing 174 | # 175 | CT_TARGET_VENDOR="unknown" 176 | CT_TARGET_ALIAS_SED_EXPR="" 177 | CT_TARGET_ALIAS="" 178 | 179 | # 180 | # Toolchain type 181 | # 182 | CT_CROSS=y 183 | # CT_CANADIAN is not set 184 | CT_TOOLCHAIN_TYPE="cross" 185 | 186 | # 187 | # Build system 188 | # 189 | CT_BUILD="" 190 | CT_BUILD_PREFIX="" 191 | CT_BUILD_SUFFIX="" 192 | 193 | # 194 | # Misc options 195 | # 196 | # CT_TOOLCHAIN_ENABLE_NLS is not set 197 | 198 | # 199 | # Operating System 200 | # 201 | CT_KERNEL_SUPPORTS_SHARED_LIBS=y 202 | CT_KERNEL="linux" 203 | CT_KERNEL_VERSION="4.3" 204 | # CT_KERNEL_bare_metal is not set 205 | CT_KERNEL_linux=y 206 | CT_KERNEL_bare_metal_AVAILABLE=y 207 | CT_KERNEL_linux_AVAILABLE=y 208 | CT_KERNEL_V_4_3=y 209 | # CT_KERNEL_V_4_2 is not set 210 | # CT_KERNEL_V_4_1 is not set 211 | # CT_KERNEL_V_3_18 is not set 212 | # CT_KERNEL_V_3_14 is not set 213 | # CT_KERNEL_V_3_12 is not set 214 | # CT_KERNEL_V_3_10 is not set 215 | # CT_KERNEL_V_3_4 is not set 216 | # CT_KERNEL_V_3_2 is not set 217 | # CT_KERNEL_V_2_6_32 is not set 218 | # CT_KERNEL_LINUX_CUSTOM is not set 219 | CT_KERNEL_windows_AVAILABLE=y 220 | 221 | # 222 | # Common kernel options 223 | # 224 | CT_SHARED_LIBS=y 225 | 226 | # 227 | # linux other options 228 | # 229 | CT_KERNEL_LINUX_VERBOSITY_0=y 230 | # CT_KERNEL_LINUX_VERBOSITY_1 is not set 231 | # CT_KERNEL_LINUX_VERBOSITY_2 is not set 232 | CT_KERNEL_LINUX_VERBOSE_LEVEL=0 233 | CT_KERNEL_LINUX_INSTALL_CHECK=y 234 | 235 | # 236 | # Binary utilities 237 | # 238 | CT_ARCH_BINFMT_ELF=y 239 | CT_BINUTILS="binutils" 240 | CT_BINUTILS_binutils=y 241 | 242 | # 243 | # GNU binutils 244 | # 245 | # CT_CC_BINUTILS_SHOW_LINARO is not set 246 | CT_BINUTILS_V_2_25_1=y 247 | # CT_BINUTILS_V_2_25 is not set 248 | # CT_BINUTILS_V_2_24 is not set 249 | # CT_BINUTILS_V_2_23_2 is not set 250 | # CT_BINUTILS_V_2_23_1 is not set 251 | # CT_BINUTILS_V_2_22 is not set 252 | # CT_BINUTILS_V_2_21_53 is not set 253 | # CT_BINUTILS_V_2_21_1a is not set 254 | # CT_BINUTILS_V_2_20_1a is not set 255 | # CT_BINUTILS_V_2_19_1a is not set 256 | # CT_BINUTILS_V_2_18a is not set 257 | CT_BINUTILS_VERSION="2.25.1" 258 | CT_BINUTILS_2_25_1_or_later=y 259 | CT_BINUTILS_2_25_or_later=y 260 | CT_BINUTILS_2_24_or_later=y 261 | CT_BINUTILS_2_23_or_later=y 262 | CT_BINUTILS_2_22_or_later=y 263 | CT_BINUTILS_2_21_or_later=y 264 | CT_BINUTILS_2_20_or_later=y 265 | CT_BINUTILS_2_19_or_later=y 266 | CT_BINUTILS_2_18_or_later=y 267 | CT_BINUTILS_HAS_HASH_STYLE=y 268 | CT_BINUTILS_HAS_GOLD=y 269 | CT_BINUTILS_HAS_PLUGINS=y 270 | CT_BINUTILS_HAS_PKGVERSION_BUGURL=y 271 | CT_BINUTILS_LINKER_LD=y 272 | CT_BINUTILS_LINKERS_LIST="ld" 273 | CT_BINUTILS_LINKER_DEFAULT="bfd" 274 | # CT_BINUTILS_PLUGINS is not set 275 | CT_BINUTILS_EXTRA_CONFIG_ARRAY="" 276 | # CT_BINUTILS_FOR_TARGET is not set 277 | 278 | # 279 | # binutils other options 280 | # 281 | 282 | # 283 | # C-library 284 | # 285 | CT_LIBC="musl" 286 | CT_LIBC_VERSION="1.0.5" 287 | # CT_LIBC_glibc is not set 288 | CT_LIBC_musl=y 289 | # CT_LIBC_uClibc is not set 290 | CT_LIBC_avr_libc_AVAILABLE=y 291 | CT_LIBC_glibc_AVAILABLE=y 292 | CT_THREADS="musl" 293 | CT_LIBC_mingw_AVAILABLE=y 294 | CT_LIBC_musl_AVAILABLE=y 295 | CT_LIBC_MUSL_V_1_0=y 296 | CT_LIBC_newlib_AVAILABLE=y 297 | CT_LIBC_none_AVAILABLE=y 298 | CT_LIBC_uClibc_AVAILABLE=y 299 | CT_LIBC_SUPPORT_THREADS_ANY=y 300 | CT_LIBC_SUPPORT_THREADS_NATIVE=y 301 | 302 | # 303 | # Common C library options 304 | # 305 | CT_THREADS_NATIVE=y 306 | CT_LIBC_XLDD=y 307 | 308 | # 309 | # musl other options 310 | # 311 | # CT_LIBC_MUSL_DEBUG is not set 312 | # CT_LIBC_MUSL_WARNINGS is not set 313 | # CT_LIBC_MUSL_OPTIMIZE_NONE is not set 314 | CT_LIBC_MUSL_OPTIMIZE_AUTO=y 315 | # CT_LIBC_MUSL_OPTIMIZE_SPEED is not set 316 | # CT_LIBC_MUSL_OPTIMIZE_SIZE is not set 317 | CT_LIBC_MUSL_OPTIMIZE="auto" 318 | 319 | # 320 | # C compiler 321 | # 322 | CT_CC="gcc" 323 | CT_CC_CORE_PASSES_NEEDED=y 324 | CT_CC_CORE_PASS_1_NEEDED=y 325 | CT_CC_CORE_PASS_2_NEEDED=y 326 | CT_CC_gcc=y 327 | # CT_CC_GCC_SHOW_LINARO is not set 328 | CT_CC_GCC_V_5_2_0=y 329 | # CT_CC_GCC_V_4_9_3 is not set 330 | # CT_CC_GCC_V_4_8_5 is not set 331 | # CT_CC_GCC_V_4_7_4 is not set 332 | # CT_CC_GCC_V_4_6_4 is not set 333 | # CT_CC_GCC_V_4_5_4 is not set 334 | # CT_CC_GCC_V_4_4_7 is not set 335 | # CT_CC_GCC_V_4_3_6 is not set 336 | # CT_CC_GCC_V_4_2_4 is not set 337 | CT_CC_GCC_4_2_or_later=y 338 | CT_CC_GCC_4_3_or_later=y 339 | CT_CC_GCC_4_4_or_later=y 340 | CT_CC_GCC_4_5_or_later=y 341 | CT_CC_GCC_4_6_or_later=y 342 | CT_CC_GCC_4_7_or_later=y 343 | CT_CC_GCC_4_8_or_later=y 344 | CT_CC_GCC_4_9_or_later=y 345 | CT_CC_GCC_5=y 346 | CT_CC_GCC_5_or_later=y 347 | CT_CC_GCC_HAS_GRAPHITE=y 348 | CT_CC_GCC_USE_GRAPHITE=y 349 | CT_CC_GCC_HAS_LTO=y 350 | CT_CC_GCC_USE_LTO=y 351 | CT_CC_GCC_HAS_PKGVERSION_BUGURL=y 352 | CT_CC_GCC_HAS_BUILD_ID=y 353 | CT_CC_GCC_HAS_LNK_HASH_STYLE=y 354 | CT_CC_GCC_USE_GMP_MPFR=y 355 | CT_CC_GCC_USE_MPC=y 356 | CT_CC_GCC_HAS_LIBQUADMATH=y 357 | CT_CC_GCC_HAS_LIBSANITIZER=y 358 | CT_CC_GCC_VERSION="5.2.0" 359 | # CT_CC_LANG_FORTRAN is not set 360 | CT_CC_GCC_ENABLE_CXX_FLAGS="" 361 | CT_CC_GCC_CORE_EXTRA_CONFIG_ARRAY="" 362 | CT_CC_GCC_EXTRA_CONFIG_ARRAY="" 363 | CT_CC_GCC_EXTRA_ENV_ARRAY="" 364 | CT_CC_GCC_STATIC_LIBSTDCXX=y 365 | # CT_CC_GCC_SYSTEM_ZLIB is not set 366 | 367 | # 368 | # Optimisation features 369 | # 370 | 371 | # 372 | # Settings for libraries running on target 373 | # 374 | CT_CC_GCC_ENABLE_TARGET_OPTSPACE=y 375 | # CT_CC_GCC_LIBMUDFLAP is not set 376 | # CT_CC_GCC_LIBGOMP is not set 377 | # CT_CC_GCC_LIBSSP is not set 378 | # CT_CC_GCC_LIBQUADMATH is not set 379 | 380 | # 381 | # Misc. obscure options. 382 | # 383 | CT_CC_CXA_ATEXIT=y 384 | # CT_CC_GCC_DISABLE_PCH is not set 385 | CT_CC_GCC_SJLJ_EXCEPTIONS=m 386 | CT_CC_GCC_LDBL_128=m 387 | # CT_CC_GCC_BUILD_ID is not set 388 | CT_CC_GCC_LNK_HASH_STYLE_DEFAULT=y 389 | # CT_CC_GCC_LNK_HASH_STYLE_SYSV is not set 390 | # CT_CC_GCC_LNK_HASH_STYLE_GNU is not set 391 | # CT_CC_GCC_LNK_HASH_STYLE_BOTH is not set 392 | CT_CC_GCC_LNK_HASH_STYLE="" 393 | CT_CC_GCC_DEC_FLOAT_AUTO=y 394 | # CT_CC_GCC_DEC_FLOAT_BID is not set 395 | # CT_CC_GCC_DEC_FLOAT_DPD is not set 396 | # CT_CC_GCC_DEC_FLOATS_NO is not set 397 | CT_CC_GCC_HAS_ARCH_OPTIONS=y 398 | 399 | # 400 | # archictecture-specific options 401 | # 402 | CT_CC_GCC_mips_llsc=m 403 | CT_CC_GCC_mips_synci=m 404 | # CT_CC_GCC_mips_plt is not set 405 | CT_CC_SUPPORT_CXX=y 406 | CT_CC_SUPPORT_FORTRAN=y 407 | CT_CC_SUPPORT_JAVA=y 408 | CT_CC_SUPPORT_ADA=y 409 | CT_CC_SUPPORT_OBJC=y 410 | CT_CC_SUPPORT_OBJCXX=y 411 | CT_CC_SUPPORT_GOLANG=y 412 | 413 | # 414 | # Additional supported languages: 415 | # 416 | CT_CC_LANG_CXX=y 417 | # CT_CC_LANG_JAVA is not set 418 | 419 | # 420 | # Debug facilities 421 | # 422 | # CT_DEBUG_dmalloc is not set 423 | # CT_DEBUG_duma is not set 424 | # CT_DEBUG_gdb is not set 425 | # CT_DEBUG_ltrace is not set 426 | # CT_DEBUG_strace is not set 427 | 428 | # 429 | # Companion libraries 430 | # 431 | CT_COMPLIBS_NEEDED=y 432 | CT_GMP_NEEDED=y 433 | CT_MPFR_NEEDED=y 434 | CT_ISL_NEEDED=y 435 | CT_MPC_NEEDED=y 436 | CT_COMPLIBS=y 437 | CT_GMP=y 438 | CT_MPFR=y 439 | CT_ISL=y 440 | CT_MPC=y 441 | CT_GMP_V_6_0_0=y 442 | # CT_GMP_V_5_1_3 is not set 443 | # CT_GMP_V_5_1_1 is not set 444 | # CT_GMP_V_5_0_2 is not set 445 | # CT_GMP_V_5_0_1 is not set 446 | # CT_GMP_V_4_3_2 is not set 447 | # CT_GMP_V_4_3_1 is not set 448 | # CT_GMP_V_4_3_0 is not set 449 | CT_GMP_5_0_2_or_later=y 450 | CT_GMP_VERSION="6.0.0a" 451 | CT_MPFR_V_3_1_3=y 452 | # CT_MPFR_V_3_1_2 is not set 453 | # CT_MPFR_V_3_1_0 is not set 454 | # CT_MPFR_V_3_0_1 is not set 455 | # CT_MPFR_V_3_0_0 is not set 456 | # CT_MPFR_V_2_4_2 is not set 457 | # CT_MPFR_V_2_4_1 is not set 458 | # CT_MPFR_V_2_4_0 is not set 459 | CT_MPFR_VERSION="3.1.3" 460 | CT_ISL_V_0_14=y 461 | # CT_ISL_V_0_12_2 is not set 462 | CT_ISL_V_0_14_or_later=y 463 | CT_ISL_V_0_12_or_later=y 464 | CT_ISL_VERSION="0.14" 465 | CT_MPC_V_1_0_3=y 466 | # CT_MPC_V_1_0_2 is not set 467 | # CT_MPC_V_1_0_1 is not set 468 | # CT_MPC_V_1_0 is not set 469 | # CT_MPC_V_0_9 is not set 470 | # CT_MPC_V_0_8_2 is not set 471 | # CT_MPC_V_0_8_1 is not set 472 | # CT_MPC_V_0_7 is not set 473 | CT_MPC_VERSION="1.0.3" 474 | 475 | # 476 | # Companion libraries common options 477 | # 478 | # CT_COMPLIBS_CHECK is not set 479 | 480 | # 481 | # Companion tools 482 | # 483 | 484 | # 485 | # READ HELP before you say 'Y' below !!! 486 | # 487 | # CT_COMP_TOOLS is not set 488 | -------------------------------------------------------------------------------- /slaves/linux-cross/patches/glibc/2.12.2/001-PowerPC-Remove-unnecessary-mnew-mnemonics.patch: -------------------------------------------------------------------------------- 1 | From b3563932f85d60bb0d38b0a5f3b8f4abc133f890 Mon Sep 17 00:00:00 2001 2 | From: Tulio Magno Quites Machado Filho 3 | Date: Thu, 1 Nov 2012 18:00:06 -0500 4 | Subject: [PATCH] PowerPC: Remove unnecessary -mnew-mnemonics. 5 | 6 | --- 7 | sysdeps/powerpc/Makefile | 4 ---- 8 | 1 file changed, 4 deletions(-) 9 | 10 | diff --git a/sysdeps/powerpc/Makefile b/sysdeps/powerpc/Makefile 11 | index 79dd6fa976d5..7442b6709ad1 100644 12 | --- a/sysdeps/powerpc/Makefile 13 | +++ b/sysdeps/powerpc/Makefile 14 | @@ -1,7 +1,3 @@ 15 | -# We always want to use the new mnemonic syntax even if we are on a RS6000 16 | -# machine. 17 | -+cflags += -mnew-mnemonics 18 | - 19 | ifeq ($(subdir),gmon) 20 | sysdep_routines += ppc-mcount 21 | endif 22 | -- 23 | 2.9.3 24 | 25 | -------------------------------------------------------------------------------- /slaves/linux-cross/patches/glibc/2.12.2/001-Prevent-inlining-in-PPC64-initfini.s.patch: -------------------------------------------------------------------------------- 1 | From a4f388e111ce05e2ab7912cff3c9070334bb74ae Mon Sep 17 00:00:00 2001 2 | From: Josh Stone 3 | Date: Fri, 20 Jan 2017 15:41:56 -0800 4 | Subject: [PATCH] Prevent inlining in PPC64 initfini.s 5 | 6 | Ref: https://sourceware.org/ml/libc-alpha/2012-01/msg00195.html 7 | --- 8 | sysdeps/powerpc/powerpc64/Makefile | 2 +- 9 | 1 file changed, 1 insertion(+), 1 deletion(-) 10 | 11 | diff --git a/sysdeps/powerpc/powerpc64/Makefile b/sysdeps/powerpc/powerpc64/Makefile 12 | index 78d4f07e575f..fe96aae4d43e 100644 13 | --- a/sysdeps/powerpc/powerpc64/Makefile 14 | +++ b/sysdeps/powerpc/powerpc64/Makefile 15 | @@ -28,7 +28,7 @@ elide-routines.os += hp-timing 16 | ifneq ($(elf),no) 17 | # The initfini generation code doesn't work in the presence of -fPIC, so 18 | # we use -fpic instead which is much better. 19 | -CFLAGS-initfini.s += -fpic -O1 20 | +CFLAGS-initfini.s += -fpic -O1 -fno-inline 21 | endif 22 | endif 23 | 24 | -- 25 | 2.9.3 26 | 27 | -------------------------------------------------------------------------------- /slaves/linux-cross/patches/glibc/2.12.2/001-Use-.machine-to-prevent-AS-from-complaining-about-z9.patch: -------------------------------------------------------------------------------- 1 | From 2739047682590b1df473401b4febf424f857fccf Mon Sep 17 00:00:00 2001 2 | From: Andreas Krebbel 3 | Date: Sun, 17 Apr 2011 20:43:59 -0400 4 | Subject: [PATCH] Use .machine to prevent AS from complaining about z9-109 5 | instructions in iconv modules 6 | 7 | --- 8 | sysdeps/s390/s390-64/utf16-utf32-z9.c | 5 ++++- 9 | sysdeps/s390/s390-64/utf8-utf16-z9.c | 5 ++++- 10 | sysdeps/s390/s390-64/utf8-utf32-z9.c | 5 ++++- 11 | 3 files changed, 12 insertions(+), 3 deletions(-) 12 | 13 | diff --git a/sysdeps/s390/s390-64/utf16-utf32-z9.c b/sysdeps/s390/s390-64/utf16-utf32-z9.c 14 | index 14daf2118fe5..5bcaaaedec9c 100644 15 | --- a/sysdeps/s390/s390-64/utf16-utf32-z9.c 16 | +++ b/sysdeps/s390/s390-64/utf16-utf32-z9.c 17 | @@ -169,7 +169,10 @@ gconv_end (struct __gconv_step *data) 18 | register unsigned long long outlen asm("11") = outend - outptr; \ 19 | uint64_t cc = 0; \ 20 | \ 21 | - asm volatile ("0: " INSTRUCTION " \n\t" \ 22 | + asm volatile (".machine push \n\t" \ 23 | + ".machine \"z9-109\" \n\t" \ 24 | + "0: " INSTRUCTION " \n\t" \ 25 | + ".machine pop \n\t" \ 26 | " jo 0b \n\t" \ 27 | " ipm %2 \n" \ 28 | : "+a" (pOutput), "+a" (pInput), "+d" (cc), \ 29 | diff --git a/sysdeps/s390/s390-64/utf8-utf16-z9.c b/sysdeps/s390/s390-64/utf8-utf16-z9.c 30 | index 5f73f3c59e21..812a42fae44c 100644 31 | --- a/sysdeps/s390/s390-64/utf8-utf16-z9.c 32 | +++ b/sysdeps/s390/s390-64/utf8-utf16-z9.c 33 | @@ -151,7 +151,10 @@ gconv_end (struct __gconv_step *data) 34 | register unsigned long long outlen asm("11") = outend - outptr; \ 35 | uint64_t cc = 0; \ 36 | \ 37 | - asm volatile ("0: " INSTRUCTION " \n\t" \ 38 | + asm volatile (".machine push \n\t" \ 39 | + ".machine \"z9-109\" \n\t" \ 40 | + "0: " INSTRUCTION " \n\t" \ 41 | + ".machine pop \n\t" \ 42 | " jo 0b \n\t" \ 43 | " ipm %2 \n" \ 44 | : "+a" (pOutput), "+a" (pInput), "+d" (cc), \ 45 | diff --git a/sysdeps/s390/s390-64/utf8-utf32-z9.c b/sysdeps/s390/s390-64/utf8-utf32-z9.c 46 | index 17ef8bc890c3..0ffd848c8124 100644 47 | --- a/sysdeps/s390/s390-64/utf8-utf32-z9.c 48 | +++ b/sysdeps/s390/s390-64/utf8-utf32-z9.c 49 | @@ -155,7 +155,10 @@ gconv_end (struct __gconv_step *data) 50 | register unsigned long long outlen asm("11") = outend - outptr; \ 51 | uint64_t cc = 0; \ 52 | \ 53 | - asm volatile ("0: " INSTRUCTION " \n\t" \ 54 | + asm volatile (".machine push \n\t" \ 55 | + ".machine \"z9-109\" \n\t" \ 56 | + "0: " INSTRUCTION " \n\t" \ 57 | + ".machine pop \n\t" \ 58 | " jo 0b \n\t" \ 59 | " ipm %2 \n" \ 60 | : "+a" (pOutput), "+a" (pInput), "+d" (cc), \ 61 | -- 62 | 2.9.3 63 | 64 | -------------------------------------------------------------------------------- /slaves/linux/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:16.04 2 | 3 | RUN dpkg --add-architecture i386 4 | RUN apt-get update 5 | RUN apt-get install -y \ 6 | curl make xz-utils git \ 7 | python-dev python-pip stunnel \ 8 | g++-multilib libssl-dev libssl-dev:i386 gdb \ 9 | valgrind libc6-dbg:i386 \ 10 | cmake pkg-config 11 | 12 | # Install buildbot and prep it to run 13 | RUN pip install buildbot-slave 14 | RUN groupadd -r rustbuild && useradd -r -g rustbuild rustbuild 15 | RUN mkdir /buildslave && chown rustbuild:rustbuild /buildslave 16 | 17 | WORKDIR /build 18 | COPY linux/build-musl.sh /build/ 19 | 20 | # Install MUSL to support crossing to that target 21 | RUN sh build-musl.sh 22 | 23 | # When running this container, startup buildbot 24 | WORKDIR /buildslave 25 | RUN rm -rf /build 26 | USER rustbuild 27 | COPY start-docker-slave.sh start-docker-slave.sh 28 | ENTRYPOINT ["sh", "start-docker-slave.sh"] 29 | -------------------------------------------------------------------------------- /slaves/linux/build-musl.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -ex 4 | 5 | export CFLAGS="-fPIC -Wa,-mrelax-relocations=no" 6 | export CXXFLAGS="-Wa,-mrelax-relocations=no" 7 | MUSL=musl-1.1.14 8 | # Support building MUSL 9 | curl http://www.musl-libc.org/releases/$MUSL.tar.gz | tar xzf - 10 | cd $MUSL 11 | # for x86_64 12 | ./configure --prefix=/musl-x86_64 --disable-shared 13 | make -j10 14 | make install 15 | make clean 16 | # for i686 17 | CFLAGS="$CFLAGS -m32" ./configure --prefix=/musl-i686 --disable-shared --target=i686 18 | make -j10 19 | make install 20 | cd .. 21 | 22 | # To build MUSL we're going to need a libunwind lying around, so acquire that 23 | # here and build it. 24 | curl http://releases.llvm.org/3.7.0/llvm-3.7.0.src.tar.xz | tar xJf - 25 | curl http://releases.llvm.org/3.7.0/libunwind-3.7.0.src.tar.xz | tar xJf - 26 | mkdir libunwind-build 27 | cd libunwind-build 28 | # for x86_64 29 | cmake ../libunwind-3.7.0.src -DLLVM_PATH=/build/llvm-3.7.0.src \ 30 | -DLIBUNWIND_ENABLE_SHARED=0 31 | make -j10 32 | cp lib/libunwind.a /musl-x86_64/lib 33 | 34 | # (Note: the next cmake call doesn't fully override the previous cached one, so remove the cached 35 | # configuration manually. IOW, if don't do this or call make clean we'll end up building libunwind 36 | # for x86_64 again) 37 | rm -rf * 38 | # for i686 39 | CFLAGS="$CFLAGS -m32" CXXFLAGS="$CXXFLAGS -m32" cmake /build/libunwind-3.7.0.src \ 40 | -DLLVM_PATH=/build/llvm-3.7.0.src \ 41 | -DLIBUNWIND_ENABLE_SHARED=0 42 | make -j10 43 | cp lib/libunwind.a /musl-i686/lib 44 | -------------------------------------------------------------------------------- /slaves/start-docker-slave.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | url=`curl http://169.254.169.254/latest/user-data | tail -n +3 | head -n 1` 4 | branch=`curl http://169.254.169.254/latest/user-data | tail -n +4 | head -n 1` 5 | git clone $url --branch $branch 6 | export NODAEMON=1 7 | exec sh rust-buildbot/setup-slave.sh 8 | --------------------------------------------------------------------------------