├── .github └── workflows │ └── docker-publish.yml ├── Dockerbuild.md ├── Dockerfile ├── LICENSE ├── Makefile ├── README.md ├── build-env.sh ├── kernel_patch_verify ├── kp_common ├── kps ├── kpv ├── llvm-config └── etc │ └── apt │ ├── sources.list.d │ └── llvm.list │ └── trusted.gpg.d │ └── apt.llvm.org.asc └── other-configs ├── etc └── profile.d │ ├── 99-ccache-dir.sh │ ├── 99-coccinelle-path.sh │ └── 99-compiler-path.sh ├── init └── usr └── local └── etc └── gitconfig /.github/workflows/docker-publish.yml: -------------------------------------------------------------------------------- 1 | name: Docker Image CI 2 | 3 | on: 4 | push: 5 | # Publish `master` as Docker `latest` image. 6 | branches: 7 | - master 8 | 9 | # Publish `v1.2.3` tags as releases. 10 | tags: 11 | - v* 12 | 13 | # Run tests for any PRs. 14 | pull_request: 15 | 16 | env: 17 | # TODO: Change variable to your image's name. 18 | IMAGE_NAME: arm-kernel-dev 19 | 20 | jobs: 21 | # Run tests. 22 | # See also https://docs.docker.com/docker-hub/builds/automated-testing/ 23 | test: 24 | runs-on: ubuntu-latest 25 | 26 | steps: 27 | - uses: actions/checkout@v3 28 | 29 | - name: Run tests 30 | run: | 31 | if [ -f docker-compose.test.yml ]; then 32 | docker-compose --file docker-compose.test.yml build 33 | docker-compose --file docker-compose.test.yml run sut 34 | else 35 | docker build --build-arg BASE_DISTRO=debian:stable-slim --build-arg USER_UID=1000 --build-arg INSTALL_GCC=0 . --file Dockerfile 36 | fi 37 | 38 | # Push image to GitHub Packages. 39 | # See also https://docs.docker.com/docker-hub/builds/ 40 | push: 41 | # Ensure test job passes before pushing image. 42 | needs: test 43 | 44 | runs-on: ubuntu-latest 45 | if: github.event_name == 'push' 46 | 47 | steps: 48 | - uses: actions/checkout@v3 49 | 50 | - name: Build image 51 | run: docker build --build-arg BASE_DISTRO=debian:stable-slim --build-arg USER_UID=1000 --build-arg INSTALL_GCC=0 . --file Dockerfile --tag $IMAGE_NAME 52 | 53 | - name: Log into GitHub Container Registry 54 | run: echo "${{ secrets.CR_PAT }}" | docker login https://ghcr.io -u ${{ github.actor }} --password-stdin 55 | 56 | - name: Push image to GitHub Container Registry 57 | run: | 58 | IMAGE_ID=ghcr.io/${{ github.repository_owner }}/$IMAGE_NAME 59 | 60 | # Change all uppercase to lowercase 61 | IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]') 62 | 63 | # Strip git ref prefix from version 64 | VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,') 65 | 66 | # Strip "v" prefix from tag name 67 | [[ "${{ github.ref }}" == "refs/tags/"* ]] && VERSION=$(echo $VERSION | sed -e 's/^v//') 68 | 69 | # Use Docker `latest` tag convention 70 | [ "$VERSION" == "master" ] && VERSION=latest 71 | 72 | echo IMAGE_ID=$IMAGE_ID 73 | echo VERSION=$VERSION 74 | 75 | docker tag $IMAGE_NAME $IMAGE_ID:$VERSION 76 | docker push $IMAGE_ID:$VERSION 77 | 78 | - name: Log into Docker Registry 79 | run: echo "${{ secrets.DOCKER_TOKEN }}" | docker login -u "${{ secrets.DOCKER_USER }}" --password-stdin 80 | 81 | - name: Push image to Docker Registry 82 | run: | 83 | IMAGE_ID="${{ secrets.DOCKER_USER }}"/$IMAGE_NAME 84 | # Strip git ref prefix from version 85 | VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,') 86 | 87 | # Strip "v" prefix from tag name 88 | [[ "${{ github.ref }}" == "refs/tags/"* ]] && VERSION=$(echo $VERSION | sed -e 's/^v//') 89 | 90 | # Use Docker `latest` tag convention 91 | [ "$VERSION" == "master" ] && VERSION=latest 92 | 93 | echo IMAGE_ID=$IMAGE_ID 94 | echo VERSION=$VERSION 95 | 96 | docker tag $IMAGE_NAME $IMAGE_ID:$VERSION 97 | docker push $IMAGE_ID:$VERSION 98 | -------------------------------------------------------------------------------- /Dockerbuild.md: -------------------------------------------------------------------------------- 1 | Introduction 2 | ============ 3 | 4 | Quite often we need to build quite a bunch of applications to get the very 5 | latest tools and environment. Instead of hand holding every developer to get the 6 | latest environment, let's use Docker to bootstrap a common build environment. 7 | 8 | Versions of packages 9 | ==================== 10 | 11 | Update the script `build-env.sh` to pick up the various latest tags and versions 12 | of the app as needed. 13 | 14 | Using a local toolchain 15 | ======================= 16 | 17 | I do provide an option to build a Docker image with tool chain installed 18 | (downloaded from ARM's site), but ARM Toolchain installs can be big (2.8G or 19 | so). Because of this, I would recommend installing them on the host, mounting 20 | the install as a volume, and pointing kpv to that folder. I have assumed you 21 | have all compilers available in `/opt/cross-gcc-linux-9/bin` - customize as 22 | desired. 23 | 24 | Building the docker image 25 | ========================= 26 | 27 | The dependencies to build docker are: 28 | * `docker` or `docker.io` on Debian/Ubuntu 29 | * Proxy settings for docker to pull in required images (if you are behind a 30 | proxy) 31 | 32 | The image Makefile takes the following override variables: 33 | * `INSTALL_GCC` : 0 is default - aka, wont install gcc, you can pick 1, where it 34 | downloads gcc 35 | * `USER_ID` : takes the current user's uid for the docker environment, you can 36 | override this if desired 37 | * `REPO` : if you have your own Docker registry, you can use this along with the 38 | make deploy rule 39 | 40 | Build commands: 41 | * `make` : build image arm-kernel-dev 42 | * `make clean` : I strongly recommend NOT to use my version if you have other 43 | docker images running in your system. 44 | * `make deploy REPO=xyz` : Deploy image to an docker registry 45 | 46 | Using the Docker image with `kernel_patch_verify` 47 | ================================================= 48 | 49 | Use the script `kpv` packaged here just like you would use `kernel_patch_verify` 50 | on your local PC. The `kpv` script is just a wrapper around Docker to run the 51 | container and execute `kernel_patch_verify`. 52 | 53 | You can also start up a shell with the same set of script steps documented in 54 | `kpv` manually. 55 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | ARG BASE_DISTRO=debian:stable-slim 2 | FROM $BASE_DISTRO 3 | 4 | ARG INSTALL_GCC=0 5 | 6 | ARG DEBIAN_FRONTEND noninteractive 7 | RUN apt-get update \ 8 | && apt-get install -y --no-install-recommends \ 9 | aria2 \ 10 | autoconf \ 11 | automake \ 12 | bc \ 13 | bison \ 14 | build-essential \ 15 | ca-certificates \ 16 | ccache \ 17 | codespell \ 18 | diffstat \ 19 | dumb-init \ 20 | flex \ 21 | gcc \ 22 | gettext \ 23 | gnupg2 \ 24 | gosu \ 25 | libcurl4-gnutls-dev \ 26 | libelf-dev \ 27 | libexpat1-dev \ 28 | libgmp-dev \ 29 | libmenhir-ocaml-dev \ 30 | libmpc-dev \ 31 | libparmap-ocaml-dev \ 32 | libpcre-ocaml-dev \ 33 | libpython3.11 \ 34 | libpython3.11-dev \ 35 | libsqlite3-dev \ 36 | libssl-dev \ 37 | libyaml-dev \ 38 | libz-dev \ 39 | menhir \ 40 | ncurses-dev \ 41 | ocaml-findlib \ 42 | ocaml-native-compilers \ 43 | ocaml-nox \ 44 | pkg-config \ 45 | python-is-python3 \ 46 | python3 \ 47 | python3-dev \ 48 | python3-pip \ 49 | python3-ruamel.yaml \ 50 | sqlite3 \ 51 | swig \ 52 | wget \ 53 | xz-utils \ 54 | yamllint \ 55 | && echo "**** cleanup ****" \ 56 | && apt-get autoremove \ 57 | && apt-get clean \ 58 | && rm -rf \ 59 | /tmp/* \ 60 | /var/lib/apt/lists/* \ 61 | /var/tmp/* \ 62 | /var/log/* 63 | 64 | RUN --mount=type=bind,source=build-env.sh,target=/tmp/build-env.sh \ 65 | INSTALL_GCC=$INSTALL_GCC /tmp/build-env.sh 66 | 67 | # Publish the source repository 68 | LABEL org.opencontainers.image.source https://github.com/nmenon/kernel_patch_verify 69 | 70 | # Add our llvm repo configs 71 | COPY llvm-config / 72 | RUN apt-get update \ 73 | && apt-get install -y --no-install-recommends \ 74 | llvm \ 75 | clang \ 76 | lld \ 77 | && echo "**** cleanup ****" \ 78 | && apt-get autoremove \ 79 | && apt-get clean \ 80 | && rm -rf \ 81 | /tmp/* \ 82 | /var/lib/apt/lists/* \ 83 | /var/tmp/* \ 84 | /var/log/* 85 | 86 | COPY other-configs/ / 87 | 88 | RUN echo "**** create developer user and make our folders ****" \ 89 | && useradd -u 1000 -U -d /config -s /bin/false developer \ 90 | && usermod -G users developer \ 91 | && mkdir /workdir && chown developer:developer /workdir \ 92 | && mkdir /ccache && chown developer:developer /ccache \ 93 | && mkdir /config && chown developer:developer /config 94 | 95 | ENTRYPOINT ["/init"] 96 | 97 | CMD ["/usr/bin/bash"] 98 | 99 | VOLUME /workdir 100 | 101 | COPY kernel_patch_verify /usr/bin/kernel_patch_verify 102 | 103 | WORKDIR /workdir 104 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 2, June 1991 3 | 4 | Copyright (C) 1989, 1991 Free Software Foundation, Inc., 5 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 6 | Everyone is permitted to copy and distribute verbatim copies 7 | of this license document, but changing it is not allowed. 8 | 9 | Preamble 10 | 11 | The licenses for most software are designed to take away your 12 | freedom to share and change it. By contrast, the GNU General Public 13 | License is intended to guarantee your freedom to share and change free 14 | software--to make sure the software is free for all its users. This 15 | General Public License applies to most of the Free Software 16 | Foundation's software and to any other program whose authors commit to 17 | using it. (Some other Free Software Foundation software is covered by 18 | the GNU Lesser General Public License instead.) You can apply it to 19 | your programs, too. 20 | 21 | When we speak of free software, we are referring to freedom, not 22 | price. Our General Public Licenses are designed to make sure that you 23 | have the freedom to distribute copies of free software (and charge for 24 | this service if you wish), that you receive source code or can get it 25 | if you want it, that you can change the software or use pieces of it 26 | in new free programs; and that you know you can do these things. 27 | 28 | To protect your rights, we need to make restrictions that forbid 29 | anyone to deny you these rights or to ask you to surrender the rights. 30 | These restrictions translate to certain responsibilities for you if you 31 | distribute copies of the software, or if you modify it. 32 | 33 | For example, if you distribute copies of such a program, whether 34 | gratis or for a fee, you must give the recipients all the rights that 35 | you have. You must make sure that they, too, receive or can get the 36 | source code. And you must show them these terms so they know their 37 | rights. 38 | 39 | We protect your rights with two steps: (1) copyright the software, and 40 | (2) offer you this license which gives you legal permission to copy, 41 | distribute and/or modify the software. 42 | 43 | Also, for each author's protection and ours, we want to make certain 44 | that everyone understands that there is no warranty for this free 45 | software. If the software is modified by someone else and passed on, we 46 | want its recipients to know that what they have is not the original, so 47 | that any problems introduced by others will not reflect on the original 48 | authors' reputations. 49 | 50 | Finally, any free program is threatened constantly by software 51 | patents. We wish to avoid the danger that redistributors of a free 52 | program will individually obtain patent licenses, in effect making the 53 | program proprietary. To prevent this, we have made it clear that any 54 | patent must be licensed for everyone's free use or not licensed at all. 55 | 56 | The precise terms and conditions for copying, distribution and 57 | modification follow. 58 | 59 | GNU GENERAL PUBLIC LICENSE 60 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 61 | 62 | 0. This License applies to any program or other work which contains 63 | a notice placed by the copyright holder saying it may be distributed 64 | under the terms of this General Public License. The "Program", below, 65 | refers to any such program or work, and a "work based on the Program" 66 | means either the Program or any derivative work under copyright law: 67 | that is to say, a work containing the Program or a portion of it, 68 | either verbatim or with modifications and/or translated into another 69 | language. (Hereinafter, translation is included without limitation in 70 | the term "modification".) Each licensee is addressed as "you". 71 | 72 | Activities other than copying, distribution and modification are not 73 | covered by this License; they are outside its scope. The act of 74 | running the Program is not restricted, and the output from the Program 75 | is covered only if its contents constitute a work based on the 76 | Program (independent of having been made by running the Program). 77 | Whether that is true depends on what the Program does. 78 | 79 | 1. You may copy and distribute verbatim copies of the Program's 80 | source code as you receive it, in any medium, provided that you 81 | conspicuously and appropriately publish on each copy an appropriate 82 | copyright notice and disclaimer of warranty; keep intact all the 83 | notices that refer to this License and to the absence of any warranty; 84 | and give any other recipients of the Program a copy of this License 85 | along with the Program. 86 | 87 | You may charge a fee for the physical act of transferring a copy, and 88 | you may at your option offer warranty protection in exchange for a fee. 89 | 90 | 2. You may modify your copy or copies of the Program or any portion 91 | of it, thus forming a work based on the Program, and copy and 92 | distribute such modifications or work under the terms of Section 1 93 | above, provided that you also meet all of these conditions: 94 | 95 | a) You must cause the modified files to carry prominent notices 96 | stating that you changed the files and the date of any change. 97 | 98 | b) You must cause any work that you distribute or publish, that in 99 | whole or in part contains or is derived from the Program or any 100 | part thereof, to be licensed as a whole at no charge to all third 101 | parties under the terms of this License. 102 | 103 | c) If the modified program normally reads commands interactively 104 | when run, you must cause it, when started running for such 105 | interactive use in the most ordinary way, to print or display an 106 | announcement including an appropriate copyright notice and a 107 | notice that there is no warranty (or else, saying that you provide 108 | a warranty) and that users may redistribute the program under 109 | these conditions, and telling the user how to view a copy of this 110 | License. (Exception: if the Program itself is interactive but 111 | does not normally print such an announcement, your work based on 112 | the Program is not required to print an announcement.) 113 | 114 | These requirements apply to the modified work as a whole. If 115 | identifiable sections of that work are not derived from the Program, 116 | and can be reasonably considered independent and separate works in 117 | themselves, then this License, and its terms, do not apply to those 118 | sections when you distribute them as separate works. But when you 119 | distribute the same sections as part of a whole which is a work based 120 | on the Program, the distribution of the whole must be on the terms of 121 | this License, whose permissions for other licensees extend to the 122 | entire whole, and thus to each and every part regardless of who wrote it. 123 | 124 | Thus, it is not the intent of this section to claim rights or contest 125 | your rights to work written entirely by you; rather, the intent is to 126 | exercise the right to control the distribution of derivative or 127 | collective works based on the Program. 128 | 129 | In addition, mere aggregation of another work not based on the Program 130 | with the Program (or with a work based on the Program) on a volume of 131 | a storage or distribution medium does not bring the other work under 132 | the scope of this License. 133 | 134 | 3. You may copy and distribute the Program (or a work based on it, 135 | under Section 2) in object code or executable form under the terms of 136 | Sections 1 and 2 above provided that you also do one of the following: 137 | 138 | a) Accompany it with the complete corresponding machine-readable 139 | source code, which must be distributed under the terms of Sections 140 | 1 and 2 above on a medium customarily used for software interchange; or, 141 | 142 | b) Accompany it with a written offer, valid for at least three 143 | years, to give any third party, for a charge no more than your 144 | cost of physically performing source distribution, a complete 145 | machine-readable copy of the corresponding source code, to be 146 | distributed under the terms of Sections 1 and 2 above on a medium 147 | customarily used for software interchange; or, 148 | 149 | c) Accompany it with the information you received as to the offer 150 | to distribute corresponding source code. (This alternative is 151 | allowed only for noncommercial distribution and only if you 152 | received the program in object code or executable form with such 153 | an offer, in accord with Subsection b above.) 154 | 155 | The source code for a work means the preferred form of the work for 156 | making modifications to it. For an executable work, complete source 157 | code means all the source code for all modules it contains, plus any 158 | associated interface definition files, plus the scripts used to 159 | control compilation and installation of the executable. However, as a 160 | special exception, the source code distributed need not include 161 | anything that is normally distributed (in either source or binary 162 | form) with the major components (compiler, kernel, and so on) of the 163 | operating system on which the executable runs, unless that component 164 | itself accompanies the executable. 165 | 166 | If distribution of executable or object code is made by offering 167 | access to copy from a designated place, then offering equivalent 168 | access to copy the source code from the same place counts as 169 | distribution of the source code, even though third parties are not 170 | compelled to copy the source along with the object code. 171 | 172 | 4. You may not copy, modify, sublicense, or distribute the Program 173 | except as expressly provided under this License. Any attempt 174 | otherwise to copy, modify, sublicense or distribute the Program is 175 | void, and will automatically terminate your rights under this License. 176 | However, parties who have received copies, or rights, from you under 177 | this License will not have their licenses terminated so long as such 178 | parties remain in full compliance. 179 | 180 | 5. You are not required to accept this License, since you have not 181 | signed it. However, nothing else grants you permission to modify or 182 | distribute the Program or its derivative works. These actions are 183 | prohibited by law if you do not accept this License. Therefore, by 184 | modifying or distributing the Program (or any work based on the 185 | Program), you indicate your acceptance of this License to do so, and 186 | all its terms and conditions for copying, distributing or modifying 187 | the Program or works based on it. 188 | 189 | 6. Each time you redistribute the Program (or any work based on the 190 | Program), the recipient automatically receives a license from the 191 | original licensor to copy, distribute or modify the Program subject to 192 | these terms and conditions. You may not impose any further 193 | restrictions on the recipients' exercise of the rights granted herein. 194 | You are not responsible for enforcing compliance by third parties to 195 | this License. 196 | 197 | 7. If, as a consequence of a court judgment or allegation of patent 198 | infringement or for any other reason (not limited to patent issues), 199 | conditions are imposed on you (whether by court order, agreement or 200 | otherwise) that contradict the conditions of this License, they do not 201 | excuse you from the conditions of this License. If you cannot 202 | distribute so as to satisfy simultaneously your obligations under this 203 | License and any other pertinent obligations, then as a consequence you 204 | may not distribute the Program at all. For example, if a patent 205 | license would not permit royalty-free redistribution of the Program by 206 | all those who receive copies directly or indirectly through you, then 207 | the only way you could satisfy both it and this License would be to 208 | refrain entirely from distribution of the Program. 209 | 210 | If any portion of this section is held invalid or unenforceable under 211 | any particular circumstance, the balance of the section is intended to 212 | apply and the section as a whole is intended to apply in other 213 | circumstances. 214 | 215 | It is not the purpose of this section to induce you to infringe any 216 | patents or other property right claims or to contest validity of any 217 | such claims; this section has the sole purpose of protecting the 218 | integrity of the free software distribution system, which is 219 | implemented by public license practices. Many people have made 220 | generous contributions to the wide range of software distributed 221 | through that system in reliance on consistent application of that 222 | system; it is up to the author/donor to decide if he or she is willing 223 | to distribute software through any other system and a licensee cannot 224 | impose that choice. 225 | 226 | This section is intended to make thoroughly clear what is believed to 227 | be a consequence of the rest of this License. 228 | 229 | 8. If the distribution and/or use of the Program is restricted in 230 | certain countries either by patents or by copyrighted interfaces, the 231 | original copyright holder who places the Program under this License 232 | may add an explicit geographical distribution limitation excluding 233 | those countries, so that distribution is permitted only in or among 234 | countries not thus excluded. In such case, this License incorporates 235 | the limitation as if written in the body of this License. 236 | 237 | 9. The Free Software Foundation may publish revised and/or new versions 238 | of the General Public License from time to time. Such new versions will 239 | be similar in spirit to the present version, but may differ in detail to 240 | address new problems or concerns. 241 | 242 | Each version is given a distinguishing version number. If the Program 243 | specifies a version number of this License which applies to it and "any 244 | later version", you have the option of following the terms and conditions 245 | either of that version or of any later version published by the Free 246 | Software Foundation. If the Program does not specify a version number of 247 | this License, you may choose any version ever published by the Free Software 248 | Foundation. 249 | 250 | 10. If you wish to incorporate parts of the Program into other free 251 | programs whose distribution conditions are different, write to the author 252 | to ask for permission. For software which is copyrighted by the Free 253 | Software Foundation, write to the Free Software Foundation; we sometimes 254 | make exceptions for this. Our decision will be guided by the two goals 255 | of preserving the free status of all derivatives of our free software and 256 | of promoting the sharing and reuse of software generally. 257 | 258 | NO WARRANTY 259 | 260 | 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY 261 | FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN 262 | OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES 263 | PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED 264 | OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 265 | MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS 266 | TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE 267 | PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, 268 | REPAIR OR CORRECTION. 269 | 270 | 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 271 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR 272 | REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, 273 | INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING 274 | OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED 275 | TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY 276 | YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER 277 | PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE 278 | POSSIBILITY OF SUCH DAMAGES. 279 | 280 | END OF TERMS AND CONDITIONS 281 | 282 | How to Apply These Terms to Your New Programs 283 | 284 | If you develop a new program, and you want it to be of the greatest 285 | possible use to the public, the best way to achieve this is to make it 286 | free software which everyone can redistribute and change under these terms. 287 | 288 | To do so, attach the following notices to the program. It is safest 289 | to attach them to the start of each source file to most effectively 290 | convey the exclusion of warranty; and each file should have at least 291 | the "copyright" line and a pointer to where the full notice is found. 292 | 293 | {description} 294 | Copyright (C) {year} {fullname} 295 | 296 | This program is free software; you can redistribute it and/or modify 297 | it under the terms of the GNU General Public License as published by 298 | the Free Software Foundation; either version 2 of the License, or 299 | (at your option) any later version. 300 | 301 | This program is distributed in the hope that it will be useful, 302 | but WITHOUT ANY WARRANTY; without even the implied warranty of 303 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 304 | GNU General Public License for more details. 305 | 306 | You should have received a copy of the GNU General Public License along 307 | with this program; if not, write to the Free Software Foundation, Inc., 308 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 309 | 310 | Also add information on how to contact you by electronic and paper mail. 311 | 312 | If the program is interactive, make it output a short notice like this 313 | when it starts in an interactive mode: 314 | 315 | Gnomovision version 69, Copyright (C) year name of author 316 | Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 317 | This is free software, and you are welcome to redistribute it 318 | under certain conditions; type `show c' for details. 319 | 320 | The hypothetical commands `show w' and `show c' should show the appropriate 321 | parts of the General Public License. Of course, the commands you use may 322 | be called something other than `show w' and `show c'; they could even be 323 | mouse-clicks or menu items--whatever suits your program. 324 | 325 | You should also get your employer (if you work as a programmer) or your 326 | school, if any, to sign a "copyright disclaimer" for the program, if 327 | necessary. Here is a sample; alter the names: 328 | 329 | Yoyodyne, Inc., hereby disclaims all copyright interest in the program 330 | `Gnomovision' (which makes passes at compilers) written by James Hacker. 331 | 332 | {signature of Ty Coon}, 1 April 1989 333 | Ty Coon, President of Vice 334 | 335 | This General Public License does not permit incorporating your program into 336 | proprietary programs. If your program is a subroutine library, you may 337 | consider it more useful to permit linking proprietary applications with the 338 | library. If this is what you want to do, use the GNU Lesser General 339 | Public License instead of this License. 340 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: all 2 | 3 | BASE_DISTRO ?= debian:stable-slim 4 | IMAGE_TOBUILD ?= arm-kernel-dev 5 | 6 | REPO ?= 7 | INSTALL_GCC ?= 0 8 | 9 | all: 10 | docker build -t ${IMAGE_TOBUILD} --build-arg BASE_DISTRO=${BASE_DISTRO} \ 11 | --build-arg INSTALL_GCC=${INSTALL_GCC} \ 12 | --pull . 13 | 14 | clean: 15 | docker container prune; \ 16 | docker image prune -f; \ 17 | docker rmi ${IMAGE_TOBUILD} 18 | 19 | deploy: 20 | docker push ${REPO}:${IMAGE_TOBUILD} 21 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | `kernel_patch_verify` 2 | ===================== 3 | 4 | Linux kernel patch static verification helper tool 5 | 6 | [![Kernel Patch Verify Intro video](https://img.youtube.com/vi/HzW4DrDj32w/0.jpg)](https://www.youtube.com/watch?v=HzW4DrDj32w "Kernel Patch Verify Intro video") 7 | 8 | Background and Motivation: 9 | ========================== 10 | 11 | This script came about as a result of various beatings I recieved and saw others 12 | recieve in upstream kernel discussions over the last several years. Did you run 13 | `checkpatch.pl --strict`, did you run `sparse`, is your patch series bisectable, 14 | or the newer ones - `coccinelle` and `smatch`. I mean, why debug or even catch 15 | these in mailing lists, if you can programmatically try to catch them up at 16 | developer's end? 17 | 18 | Many of these were due to: 19 | + Too many tools constantly evolving in linux kernel - too many goodies, and 20 | too less time for every developer to learn 21 | + Having to run too many utilities using varied command lines - most normal 22 | folks dont remember everything everytime. 23 | + In general, few developers are lazy, even knowing tools are not good enough 24 | and almost everyone would like to have something quick and handy.. and if 25 | possible, automated.. 26 | 27 | As a result, while working on "evil vendor" Android kernel as a domain 28 | maintainer, I had originally written a dumb application called Kmake (unrelated 29 | to the current meta build systems). This was my first attempt at automating this 30 | entire process... and was pretty crap code and was meant for me to test 1 single 31 | patch. It was born out of pure desperation from getting a hell lot of crap 32 | patches from too many internal developers. 33 | 34 | As I shared around this original code over the years, I got feedback from 35 | various folks on varied angles: 36 | - Dan Murphy: why cant it report required applications for it to run? 37 | - Tero Kristo: rewrote the entire script and got me thinking along the lines of a 38 | 'patch set that needs to be applied' 39 | - And, others who suggested tiny little things... 40 | 41 | It was obvious, this original script needed a respin and update to newer kernel 42 | world. So, here is another attempt with enough error handling that I could 43 | reasonably think of in the few hours I spend rewriting... And I do not think it 44 | is good enough yet... there are few things I dropped - storing dtbs+zImage per 45 | patch to to allow for seperate verification by boot per patch and similar tiny 46 | little thingies... Maybe later... I guess... 47 | 48 | CONTRIBUTIONS: 49 | ============== 50 | 51 | Just lets keep this on github and send pull requests to merge things up? As a 52 | script developer, we all know that various developers have widely varying tastes. 53 | if you feel an bug fixed or other improvements to share, send me a pull request 54 | and we can collaborate together. 55 | 56 | For folks wanting to write python or perl alternatives, please, go ahead and 57 | create your own tool, just dont ask me to even touch python ;) - this remains in 58 | bash. 59 | 60 | QUICK START NOTES: 61 | ================== 62 | 63 | Easiest way of getting started is to use the docker container, at least most 64 | of the tools are pre-installed in the package (exception of gcc). 65 | 66 | There are two scripts to use when you are in the kernel directory: 67 | 68 | * kpv: This is a wrapper script for kernel\_patch\_verify 69 | * kps: This drops you to the shell using the same environment that kpv runs. 70 | 71 | You can either provide a softlink or use this git repo in your $PATH variable 72 | to operate. 73 | 74 | Side note: LLVM is the only pre-installed cross compiler installed in the 75 | docker container. GCC mount paths are in kp\_common script - see function 76 | `extra_paths` - customize it as needed for your local environment. 77 | 78 | An trivialized example for 1 patch: 79 | 80 | ``` 81 | cd ~/src/linux 82 | git checkout master 83 | git pull 84 | git checkout -b test_branch 85 | git am ~/src/my_patches/something_patch 86 | 87 | # do build and local functional testing - use kernel_self_test if you can.. 88 | 89 | # visually review the patch again for coding style, logic improvements etc. 90 | 91 | # Use prepackaged LLVM to run the checks 92 | kpv -V -C -L -n 1 93 | 94 | # go check and identify the issues found 95 | vim report-kernel-patch-verify.txt 96 | 97 | # do my fixes (alternatively - i can build and check the fixups in the kps env) 98 | 99 | # Final checkup - functional tests 100 | 101 | # Final confirmation report checks 102 | kpv -V -C -L -n 1 103 | 104 | # go check and verify the issues are all closed out 105 | vim report-kernel-patch-verify.txt 106 | 107 | # Generate the patch 108 | git format-patch-M -C -o . -1 109 | 110 | # visually review the patch once again for coding style, logic improvements etc. 111 | vim 0001-abc.patch 112 | 113 | # run get_maintainer script to identify who to send the patch to 114 | ./scripts/get_maintainer.pl ./0001-abc.patch 115 | 116 | # Send the patch 117 | git send-email --to maintainer1@kernel.org --cc list@vger.kernel.org 0001-abc.patch 118 | 119 | ``` 120 | 121 | INSTALL NOTES: 122 | ============== 123 | 124 | Almost nothing - I hope you are already using bash and building kernel - 125 | The script should crib and provide recommendations for missing packages (or is 126 | supposed to ;) ) 127 | 128 | For `-C` or "complete tests", the following are needed: 129 | - smatch: (sudo apt-get install sqlite3 libsqlite3-dev llvm) 130 | 131 | https://blog.linuxplumbersconf.org/2011/ocw/system/presentations/165/original/transcript.txt 132 | 133 | https://smatch.sourceforge.net/ 134 | 135 | NOTE for older Ubuntu installs, use: https://launchpad.net/ubuntu/+source/coccinelle 136 | 137 | - spatch: is provided by the coccinelle package in ubuntu 138 | 139 | :warning: **If your changes include dtb changes, then please optimize your 140 | `.config`, since dtbscheck will take significant time!** 141 | 142 | ``` 143 | sed -n 's/^config \(ARCH.*\)/CONFIG_\1=n/p' arch/arm64/Kconfig.platforms | grep -v K3 >> .config 144 | ``` 145 | 146 | Usage: 147 | ====== 148 | 149 | ``` 150 | ./kernel_patch_verify [-d] [-j CPUs] [-B build_target] [-T tmp_dir_base] [-l logfile] [-C] [-c defconfig_name] [-1]|[-p patch_dir]|[-b base_branch [-t head_branch]] 151 | -d: if not already defined, use CROSS_COMPILE=arm-linux-gnueabi-, ARCH=arm, and builds for ' zImage dtbs' build targets 152 | -j CPUs: override default CPUs count with build (default is 4) 153 | -B build_target: override default build target and use provided build_target 154 | -T temp_dir_base: temporary directory base (default is /tmp) 155 | -l logfile: report file (defaults to ./report-kernel-patch-verify.txt) 156 | -C: run Complete tests(WARNING: could take significant time!) 157 | -c defconfig:_name (default uses current .config + oldconfig) 158 | -1: test the tip of current branch (just a single patch) 159 | -p patch_dir: which directory to take patches from (expects sorted in order) 160 | -b base_branch: test patches from base_branch 161 | -t test_branch: optionally used with -b, till head branch, if not provided, along with -b, default will be tip of current branch 162 | 163 | ``` 164 | 165 | NOTE: 166 | * Only one of -1, -p or (-b,-t) should be used - but at least one of these should be used 167 | * Cannot have a diff pending OR be on a dangling branch `base_branch` should exist as well 168 | * The default tests are selected with view of as minimal time as possible, while the `-C` tests 169 | are the comprehensive tests which are strongly recommended before showing your patches to any other 170 | human being. 171 | 172 | Example usages: 173 | =============== 174 | 175 | * Verify last commmitted patch: 176 | 177 | ``` 178 | ./kernel_patch_verify -1 179 | ``` 180 | 181 | * Verify on top of current branch patches from location `~/tmp/test-patches`: 182 | 183 | ``` 184 | ./kernel_patch_verify -p ~/tmp/test-patches 185 | ``` 186 | 187 | * Verify *from* branch `base_branch` till current branch: 188 | 189 | ``` 190 | ./kernel_patch_verify -b base_branch 191 | ``` 192 | 193 | * Verify all patches *from* current branch *until* `test_branch`: 194 | 195 | ``` 196 | ./kernel_patch_verify -t test_branch 197 | ``` 198 | 199 | * Verify all patches *from* `base_branch` *until* `test_branch`: 200 | 201 | ``` 202 | ./kernel_patch_verify -b base_branch -t test_branch 203 | ``` 204 | 205 | * Verify, with complete tests, all patches *from* `base_branch` *until* 206 | `test_branch`: 207 | 208 | ``` 209 | ./kernel_patch_verify -b base_branch -t test_branch -C 210 | ``` 211 | 212 | * Verify last committed patch on a native x86 build using `make`, `gcc`, and the 213 | `bzImage` target: 214 | 215 | ``` 216 | ./kernel_patch_verify -B bzImage -1 217 | ``` 218 | 219 | * Verify last committed patch on a cross-compiled ARM build using defaults: 220 | 221 | ``` 222 | ./kernel_patch_verify -d -1 223 | ``` 224 | 225 | Some script design stuff: 226 | ========================= 227 | 228 | Alright, the shell script should be readable in it's own I hope... anyways, 229 | tests are organized as: 230 | * `ptest_xyz` -> these tests take the patch as the argument 231 | * `ftest_xyz` -> these tests take c file (impacted by the patch) as the argument 232 | * `btest_xyz` -> there are of two types: the ones that take .o files as arguments 233 | and those that build the entire kernel 234 | 235 | Tests are organized per patch OR overall (basically run before the patch series 236 | and after the patch series). Reports are generated after all the tests are run, 237 | I have not tried to standardize the reports in any way, except that if there is 238 | a 'change' in log, for example: 239 | * A build warning was created with a patch. 240 | * A build warning was removed with a patch in the series. 241 | 242 | This will appear as a a diff (both build warning was removed or added is 243 | considered similar) and that diff is provided in the report log. the final report 244 | log is a consolidation of every single patch, over all results and also provides 245 | information about the tools and versions used. 246 | 247 | Without the `-C` switch, only tests that take a short amount of time will be 248 | ran. Tests that take significant time should be listed under `-C`. I recommend 249 | reading the code to see the list of tests executed. This will also be printed as 250 | you execute the tests. Just remember that false positives are irritable to 251 | developers, so be careful of the results. 252 | 253 | The generic strategy for the test is that everything in stderr is logged, a test 254 | should never throw anything on stdout as it just craps up the developer's screen. 255 | If a test provides result on stdout, redirect it to stderr. Pass/fail criteria is 256 | as follows: 257 | * For `ftest_`, `btest_`, the before and after logs should show 0 diff. If there 258 | are, it assumes new fail introduction 259 | * For `ptest`, no output is a pass, any output tends to be a fail. 260 | 261 | 262 | Author and versioning highlights (chronological): 263 | ------------------------------------------------- 264 | * Nishanth Menon Dec 2013, Dallas, TX, while lying in bed with a slight migraine 265 | staring at a cloudy sky and spewing nonsense into vim... and guessing that no one 266 | might even care about this... 267 | -------------------------------------------------------------------------------- /build-env.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | export HOME=/tmp 4 | source /etc/profile 5 | shopt -s expand_aliases 6 | NPROC=$(nproc) 7 | 8 | # https://git.kernel.org/pub/scm/git/git.git/ 9 | export GIT_TAG=2.48.1 10 | # https://git.kernel.org/pub/scm/utils/dtc/dtc.git 11 | export DTC_TAG=v1.7.2 12 | # https://git.kernel.org/pub/scm/devel/sparse/sparse.git 13 | export SPARSE_TAG=master 14 | # https://repo.or.cz/smatch.git 15 | export SMATCH_TAG=master 16 | # https://github.com/coccinelle/coccinelle/tags 17 | export COCCI_TAG=1.3.0 18 | # https://github.com/devicetree-org/dt-schema/tags 19 | export DTSCHEMA_REV=v2025.02 20 | 21 | ARIA_OPTS=( --summary-interval=5 --timeout=180 --retry-wait=10 -m 0 -x 10 -j 10 ) 22 | 23 | download_build_install_git() 24 | { 25 | local FILE URL 26 | set -x 27 | FILE=git-"$GIT_TAG".tar.gz 28 | URL="https://git.kernel.org/pub/scm/git/git.git/snapshot/${FILE}" 29 | 30 | cd /tmp/ 31 | aria2c "${ARIA_OPTS[@]}" -o "$FILE" "$URL" 32 | mkdir /tmp/git 33 | tar -C /tmp/git --strip-components=1 -xvf "$FILE" 34 | rm $FILE 35 | cd /tmp/git 36 | make -j "$NPROC" prefix=/usr/local 37 | make -j "$NPROC" prefix=/usr/local install 38 | cd /tmp 39 | rm -rf /tmp/git* 40 | set +x 41 | } 42 | 43 | download_build_install_python_deps() 44 | { 45 | # Get latest pip 46 | python -m pip install --upgrade --break-system-packages pip 47 | python -m pip install --upgrade --break-system-packages setuptools 48 | python -m pip install --upgrade --break-system-packages six jsonschema 49 | # scripts/spdxcheck.py dependencies 50 | python -m pip install --upgrade --break-system-packages ply gitpython yamllint rfc3987 pylibfdt 51 | python -m pip install --break-system-packages git+https://github.com/devicetree-org/dt-schema.git@$DTSCHEMA_REV 52 | rm -rf "/tmp/.cache/" 53 | } 54 | 55 | clone_and_cd() 56 | { 57 | cd /tmp && 58 | git clone --progress --depth=1 --branch "$1" "$2" "$3" && 59 | cd /tmp/"$3" 60 | return $? 61 | } 62 | 63 | download_build_install_dtc() 64 | { 65 | local FILE URL 66 | FILE='dtc' 67 | URL="https://git.kernel.org/pub/scm/utils/dtc/dtc.git" 68 | 69 | clone_and_cd "$DTC_TAG" "$URL" "$FILE" 70 | make -j "$NPROC" PREFIX=/usr/local SETUP_PREFIX=/usr/local install NO_PYTHON=1 71 | cd /tmp 72 | rm -rf /tmp/"$FILE" 73 | } 74 | 75 | download_build_install_sparse() 76 | { 77 | local FILE URL 78 | FILE='sparse' 79 | URL="https://git.kernel.org/pub/scm/devel/sparse/sparse.git" 80 | 81 | clone_and_cd "$SPARSE_TAG" "$URL" "$FILE" 82 | make -j "$NPROC" PREFIX=/usr/local install 83 | cd /tmp 84 | rm -rf /tmp/"$FILE" 85 | } 86 | 87 | download_build_install_smatch() 88 | { 89 | local FILE URL 90 | FILE='smatch' 91 | URL="https://repo.or.cz/smatch.git" 92 | 93 | clone_and_cd "$SMATCH_TAG" "$URL" "$FILE" 94 | make -j "$NPROC" PREFIX=/usr/local/smatch install 95 | echo -e '#!/bin/bash\n/usr/local/smatch/bin/smatch -p=kernel $@'>/usr/local/smatch/bin/k_sm_check_script 96 | chmod +x /usr/local/smatch/bin/k_sm_check_script 97 | cd /tmp 98 | rm -rf /tmp/"$FILE" 99 | } 100 | 101 | download_build_install_coccinelle() 102 | { 103 | local FILE URL 104 | FILE='coccinelle' 105 | URL="https://github.com/coccinelle/coccinelle.git" 106 | 107 | clone_and_cd "$COCCI_TAG" "$URL" "$FILE" 108 | ./autogen 109 | ./configure --prefix=/usr/local 110 | make install 111 | cd /tmp 112 | rm -rf /tmp/"$FILE" 113 | } 114 | 115 | download_and_install_armgcc_64() 116 | { 117 | local FILE URL 118 | FILE='aarch64-gcc.tar.xz' 119 | URL="https://developer.arm.com/-/media/Files/downloads/gnu/14.2.rel1/binrel/arm-gnu-toolchain-14.2.rel1-x86_64-aarch64-none-linux-gnu.tar.xz" 120 | 121 | cd /tmp 122 | mkdir -p /opt/cross-gcc-linux-14/ 123 | aria2c "${ARIA_OPTS[@]}" -o "$FILE" "$URL" 124 | tar -C /opt/cross-gcc-linux-14/ --strip-components=1 -xf "$FILE" 125 | rm -f /tmp/"$FILE" 126 | } 127 | 128 | download_and_install_armgcc_32() 129 | { 130 | local FILE URL 131 | FILE='aarch32-gcc.tar.xz' 132 | URL="https://developer.arm.com/-/media/Files/downloads/gnu/14.2.rel1/binrel/arm-gnu-toolchain-14.2.rel1-x86_64-arm-none-linux-gnueabihf.tar.xz" 133 | 134 | cd /tmp 135 | mkdir -p /opt/cross-gcc-linux-14/ 136 | aria2c "${ARIA_OPTS[@]}" -o "$FILE" "$URL" 137 | tar -C /opt/cross-gcc-linux-14/ --strip-components=1 -xf "$FILE" 138 | rm -f /tmp/"$FILE" 139 | } 140 | 141 | download_build_install_git 142 | download_build_install_python_deps 143 | download_build_install_dtc 144 | download_build_install_smatch 145 | download_build_install_sparse 146 | download_build_install_coccinelle 147 | if [ "$INSTALL_GCC" == "1" ]; then 148 | download_and_install_armgcc_64 149 | download_and_install_armgcc_32 150 | else 151 | echo "Skipping install GCC. INSTALL_GCC!=1. make sure that /opt/cross-gcc-linux-14/bin has aarch64-none-linux-gnu- and arm-none-linux-gnueabihf-" 152 | fi 153 | -------------------------------------------------------------------------------- /kernel_patch_verify: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Kernel Patch tester to run some generic kernel static checks 4 | # 5 | # Dec 14, 2013 6 | # 7 | # Copyright (C) 2013 Texas Instruments Incorporated - http://www.ti.com/ 8 | # Nishanth Menon 9 | # 10 | # This program is free software; you can redistribute it and/or modify 11 | # it under the terms of the GNU General Public License version 2 as 12 | # published by the Free Software Foundation. 13 | # 14 | # This program is distributed "as is" WITHOUT ANY WARRANTY of any 15 | # kind, whether express or implied; without even the implied warranty 16 | # of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 17 | # GNU General Public License for more details. 18 | # 19 | # NOTE: smatch: (sudo apt-get install sqlite3 libsqlite3-dev llvm) 20 | # - http://linuxplumbersconf.com/2011/ocw//system/presentations/165/original/transcript.txt 21 | # - http://smatch.sf.net 22 | # spatch is provided by the coccinelle package in ubuntu 23 | 24 | # if started by docker, it is usually a good idea to start ourselves up with right 25 | # env variables. 26 | source /etc/profile 27 | shopt -s expand_aliases 28 | 29 | # extend the path with the supplied extra directories 30 | if [ -n "$KP_PATH" ]; then 31 | export PATH=${KP_PATH}:${PATH} 32 | fi 33 | 34 | ccache=$(which ccache) 35 | 36 | # We would rather that we hit cache more often, than rebuild.. 37 | # See https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/commit/?id=87c94bfb8ad354fb43d2caf870d7ca0b3f98dab3 38 | if [ -z "$KBUILD_BUILD_TIMESTAMP" ]; then 39 | export KBUILD_BUILD_TIMESTAMP='' 40 | fi 41 | if [ -z "$KBUILD_BUILD_VERSION" ]; then 42 | export KBUILD_BUILD_VERSION='kernel_patch_verify_build' 43 | fi 44 | DEF_ARCH=arm 45 | DEF_CROSS_COMPILE="$ccache arm-none-linux-gnueabihf-" 46 | DEF_BUILDTARGETS="zImage dtbs" 47 | 48 | DEF_V8_ARCH=arm64 49 | DEF_V8_CROSS_COMPILE="$ccache aarch64-none-linux-gnu-" 50 | DEF_V8_BUILDTARGETS="Image dtbs" 51 | 52 | # Default parameters 53 | APPS_NEEDED="perl make dtc sparse patch git realpath basename codespell" 54 | 55 | # Use all max num CPUs 56 | KM_CPUS=$(grep -c '^processor' /proc/cpuinfo) 57 | LOG_FILE="./report-kernel-patch-verify.txt" 58 | TEST_B_DIR="/tmp" 59 | 60 | COVER_LETTER="cover-letter.[patch\|diff]" 61 | 62 | LINE_LENGTH=100 63 | 64 | UBOOT_TESTING=0 65 | # We will add to this later.. but allow user to provide his own choice of stuff 66 | if [ -z "$KP_PARAMS" ]; then 67 | KP_PARAMS="" 68 | fi 69 | if [ -z "$KP_TARGETS" ]; then 70 | KP_TARGETS="" 71 | fi 72 | 73 | kmake_single() { 74 | # XXX: kmake operations depend on variable expansion- do not quote variables here. 75 | # Except for compiler option since ccache will be involved 76 | make $KM_A $KP_PARAMS "$KM_C" $KM_L -j1 $@ 77 | } 78 | 79 | kmake() { 80 | # XXX: kmake operations depend on variable expansion- do not quote variables here. 81 | # Except for compiler option since ccache will be involved 82 | make $KM_A $KP_PARAMS "$KM_C" $KM_L -j$KM_CPUS $@ 83 | } 84 | 85 | to_time() { 86 | if [[ "$1" =~ ^-?[0-9]+$ ]]; then 87 | date -d@$1 -u "+%H hrs, %M mins, %S seconds" 88 | else 89 | echo $1 90 | fi 91 | } 92 | ################### 93 | # Run generic test operation 94 | run_test() { 95 | LOG_EXT=$1 96 | shift 97 | LOG_DIR=$1 98 | shift 99 | TEST=$1 100 | shift 101 | echo -e "\tRunning test: $TEST ($LOG_EXT)" 102 | SSEC=$(date "+%s") 103 | "$TEST" "$@" 2> "$LOG_DIR/$TEST-$LOG_EXT" 104 | ESEC=$(date "+%s") 105 | DELTA=$((ESEC - SSEC)) 106 | echo "$DELTA seconds: completed $TEST" 107 | } 108 | 109 | run_test_dummy() { 110 | LOG_EXT=$1 111 | shift 112 | LOG_DIR=$1 113 | shift 114 | TEST=$1 115 | shift 116 | echo -e "\tRunning test: $TEST ($LOG_EXT)" 117 | touch "$LOG_DIR/$TEST-$LOG_EXT" 118 | } 119 | 120 | get_sorted_existing_files() 121 | { 122 | test_files=() 123 | sorted_test_files=() 124 | # If there are no files, then there is nothing to sort.. return empty 125 | if [ -z "$*" ]; then 126 | echo 127 | return 128 | fi 129 | for i in "$@" 130 | do 131 | # If the file exists.. straight forward.. else: 132 | # If the file is an object file, then, it can be a C or assembly file.. 133 | if [ -f "$i" -o -f "${i/%.o/.c}" -o -f "${i/%.o/.S}" -o -f "${i/%.o/.s}" ]; then 134 | test_files+=("$i") 135 | fi 136 | done 137 | IFS=$'\n' sorted_test_files=($(sort <<<"${test_files[*]}")); unset IFS 138 | printf '%s\n' "${sorted_test_files[@]}" 139 | } 140 | 141 | ################### 142 | # Basic tests to run on the patch itself 143 | ptest_am() { 144 | git am "$1" >/dev/null 145 | } 146 | 147 | ptest_check() { 148 | ( "$KDIR"/scripts/checkpatch.pl --codespell --strict "$1" --max-line-length="$LINE_LENGTH" |grep -v "$(basename "$1")" |grep -v '^$'|grep -v '^total'|grep -v '^NOTE:' )1>&2 149 | } 150 | 151 | ################### 152 | # Basic tests to run on the files impacted by the patch 153 | ftest_check_kdoc() { 154 | readarray -t test_files <<< "$(get_sorted_existing_files "$@")" 155 | if [ -n "${test_files[*]}" ]; then 156 | ( ( ( "$KDIR"/scripts/kernel-doc "${test_files[@]}" >/dev/null ) 2>&1 ) | cut -d ':' -f1,3- ) 1>&2 157 | fi 158 | } 159 | 160 | ftest_check_includes() { 161 | readarray -t test_files <<< "$(get_sorted_existing_files "$@")" 162 | if [ -n "${test_files[*]}" ]; then 163 | "$KDIR"/scripts/checkincludes.pl "${test_files[@]}" 1>&2 164 | fi 165 | } 166 | 167 | ftest_check_headerdeps() { 168 | readarray -t test_files <<< "$(get_sorted_existing_files "$@")" 169 | if [ -n "${test_files[*]}" ]; then 170 | "$KDIR"/scripts/headerdep.pl "${test_files[@]}" 1>&2 171 | fi 172 | } 173 | 174 | ytest_dt_binding_check() { 175 | if [ "$UBOOT_TESTING" -eq 0 ]; then 176 | readarray -t test_files <<< "$(get_sorted_existing_files "$@")" 177 | for test_file in "${test_files[@]}" 178 | do 179 | # If adding a new file 180 | if [ -f "$test_file" ]; then 181 | rm -f Documentation/devicetree/bindings/processed-schema-examples.json 182 | D=$(dirname "$test_file") 183 | rm -f "$D"/*.example.* 184 | kmake_single dt_binding_check DT_CHECKER_FLAGS=-m DT_SCHEMA_FILES="$test_file" >/dev/null 185 | fi 186 | done 187 | fi 188 | } 189 | 190 | ytest_dtbs_check() { 191 | if [ "$UBOOT_TESTING" -eq 0 ]; then 192 | # If we have no yamls to check, nothing to run. 193 | if [ -z "$*" ]; then 194 | return 195 | fi 196 | 197 | # Re-Build all the dtbs to get a list (Ignore log) 198 | kmake dtbs > /dev/null 2>&1 199 | 200 | all_dtb_files=$(find . -iname '*.dtb') 201 | readarray -t test_files <<< "$(get_sorted_existing_files "$all_dtb_files")" 202 | if [ -z "${test_files[*]}" ]; then 203 | return 204 | fi 205 | rm -f "${test_files[@]}" 206 | rm -f Documentation/devicetree/bindings/processed-schema-examples.json 207 | find Documentation/devicetree -iname "*.example.*" -print0 | xargs -0 rm -f 2>/dev/null >/dev/null 208 | ( ( ( make -j"$KM_CPUS" dtbs_check > /dev/null ) 2>&1 )|grep -v '^\s\s*'|sort -u ) 1>&2 209 | fi 210 | } 211 | 212 | # property_name_char_strict: requires fixes all the way to bindings fixes 213 | # Which gets tricky to fix in a platform specific manner, so we skip 214 | # by default unless -Z is used. 215 | # dr_mode, bus_freq, mmc-ddr-[0-9]_[0-9]v, mmc-hs[0-9]00-[0-9]_[0-9]v are already standards 216 | # snps,dis_u3_susphy_quirk is new, but still a standard 217 | DTB_FILTER_LIST="property_name_chars_strict|dr_mode|bus_freq|mmc-ddr-[0-9]_[0-9]v|mmc-hs[0-9]00-[0-9]_[0-9]v|snps,dis_u3_susphy_quirk" 218 | dtest_build_dtb() { 219 | if [ "$UBOOT_TESTING" -eq 0 ]; then 220 | readarray -t test_files <<< "$(get_sorted_existing_files "$@")" 221 | if [ -z "${test_files[*]}" ]; then 222 | return 223 | fi 224 | if [ "$DTB_NOSKIP" -eq 1 ]; then 225 | DTB_FILTER_LIST="Eitha3Ohyohngah1mai2" 226 | fi 227 | for test_file in $test_files 228 | do 229 | D=$(dirname "$test_file") 230 | rm -f "$D"/*.dtb 231 | done 232 | rm -f Documentation/devicetree/bindings/processed-schema-examples.json 233 | find Documentation/devicetree -iname "*.example.*" -print0 | xargs -0 rm -f 2>/dev/null >/dev/null 234 | ( ( ( kmake_single W=2 dtbs > /dev/null ) 2>&1 )|cut -d ':' -f1,4- | grep -v '^make$'|grep -v -E "$DTB_FILTER_LIST" | grep -v "__overlay__: Character '_' not recommended in node name") 1>&2 235 | fi 236 | } 237 | 238 | # We use dtb- += test-dtb.dtb to allow for building various devices tree overlays 239 | # However this cannot be tested normally without setting CONFIG_OF_ALL_DTBS 240 | # Unfortunately CONFIG_OF_ALL_DTBS will build every single dtb in platform 241 | # Instead, we hack the Makefile to replace dtb- with dtb-y to force build 242 | # from a test perspective 243 | dtest_hack_for_test_overlays() { 244 | D=$1 245 | DTBMAKEFILE="$D/Makefile" 246 | if [ -f "$DTBMAKEFILE" ]; then 247 | sed -i -e "s/dtb- /dtb-y /g" $DTBMAKEFILE 248 | fi 249 | } 250 | 251 | dtest_revert_hack_for_test_overlays() { 252 | D=$1 253 | DTBMAKEFILE="$D/Makefile" 254 | if [ -f "$DTBMAKEFILE" ]; then 255 | git checkout $DTBMAKEFILE 256 | fi 257 | } 258 | 259 | dtest_build_dtb_to_dts() { 260 | if [ "$UBOOT_TESTING" -eq 0 ]; then 261 | readarray -t test_files <<< "$(get_sorted_existing_files "$@")" 262 | if [ -z "${test_files[*]}" ]; then 263 | return 264 | fi 265 | for test_file in "${test_files[@]}" 266 | do 267 | D=$(dirname "$test_file") 268 | dtest_hack_for_test_overlays $D 269 | rm -f "$D"/*.dtb 270 | done 271 | rm -f Documentation/devicetree/bindings/processed-schema-examples.json 272 | find Documentation/devicetree -iname "*.example.*" -print0 | xargs -0 rm -f 2>/dev/null >/dev/null 273 | (kmake_single W=2 dtbs > /dev/null) 2>/dev/null 274 | TEST_DTBS_FULL="" 275 | for test_file in $test_files 276 | do 277 | D=$(dirname "$test_file") 278 | TEST_DTBS=$(ls "$D"/*.dtb) 279 | TEST_DTBS_FULL=$(echo "$TEST_DTBS_FULL" "$TEST_DTBS" | tr ' ' '\n'|sort -u) 280 | done 281 | for dtb in $TEST_DTBS_FULL 282 | do 283 | dtc -I dtb -O dts "$dtb" >/dev/null 284 | done 285 | for test_file in "${test_files[@]}" 286 | do 287 | dtest_revert_hack_for_test_overlays $D 288 | done 289 | fi 290 | } 291 | 292 | dtest_dtbs_check() { 293 | if [ "$UBOOT_TESTING" -eq 0 ]; then 294 | readarray -t test_files <<< "$(get_sorted_existing_files "$@")" 295 | if [ -z "${test_files[*]}" ]; then 296 | return 297 | fi 298 | for test_file in "${test_files[@]}" 299 | do 300 | D=$(dirname "$test_file") 301 | dtest_hack_for_test_overlays $D 302 | rm -f "$D"/*.dtb "$D"/*.yaml 303 | done 304 | rm -f Documentation/devicetree/bindings/processed-schema-examples.json 305 | find Documentation/devicetree -iname "*.example.*" -print0 | xargs -0 rm -f 2>/dev/null >/dev/null 306 | ( ( ( make -j"$KM_CPUS" dtbs_check > /dev/null ) 2>&1 )|grep -v '^\s\s*'|sort -u ) 1>&2 307 | for test_file in "${test_files[@]}" 308 | do 309 | dtest_revert_hack_for_test_overlays $D 310 | done 311 | fi 312 | } 313 | 314 | dtest_lower_case() { 315 | readarray -t test_files <<< "$(get_sorted_existing_files "$@")" 316 | if [ -z "${test_files[*]}" ]; then 317 | return 318 | fi 319 | grep '0x[0-9A-Z][0-9A-Z]*[, >]' "${test_files[@]}" | grep -v '0x[0-9][0-9]*[, >]' 1>&2 320 | } 321 | 322 | ################### 323 | # Basic build test 324 | btest_mrproper() { 325 | kmake mrproper 1>/dev/null 326 | } 327 | 328 | btest_basic() { 329 | readarray -t test_files <<< "$(get_sorted_existing_files "$@")" 330 | if [ -n "${test_files[*]}" ]; then 331 | rm -f "${test_files[@]}" 2>/dev/null 332 | ( ( ( kmake_single "${test_files[@]}" > /dev/null ) 2>&1 )|cut -d ':' -f1,4- | grep -v '^make$' ) 1>&2 333 | fi 334 | } 335 | 336 | btest_sparse() { 337 | readarray -t test_files <<< "$(get_sorted_existing_files "$@")" 338 | if [ -n "${test_files[*]}" ]; then 339 | ( ( ( kmake_single C=2 "${test_files[@]}" > /dev/null ) 2>&1 )|cut -d ':' -f1,4- ) |grep -v '^mv$' |grep -v '^make$' | grep -v '__ksymtab' 1>&2 340 | fi 341 | } 342 | 343 | btest_smatch() { 344 | readarray -t test_files <<< "$(get_sorted_existing_files "$@")" 345 | if [ -n "${test_files[*]}" ]; then 346 | kmake_single CHECK="$SMATCH" C=2 "${test_files[@]}" | grep -E '(warn|error):' 1>&2 347 | fi 348 | } 349 | 350 | btest_cocci() { 351 | readarray -t test_files <<< "$(get_sorted_existing_files "$@")" 352 | if [ -n "${test_files[*]}" ]; then 353 | kmake_single C=2 CHECK="scripts/coccicheck" MODE=report "${test_files[@]}" 1>&2 354 | fi 355 | } 356 | 357 | btest_stack() { 358 | kmake checkstack 1>&2 359 | } 360 | 361 | btest_include() { 362 | kmake_single includecheck 1>&2 363 | } 364 | 365 | btest_refcheckdocs() { 366 | kmake_single refcheckdocs 1>&2 367 | } 368 | 369 | btest_kbuild() { 370 | if [ "$UBOOT_TESTING" -eq 0 ]; then 371 | kmake C=1 $KP_TARGETS "$MODULES" > /dev/null 372 | else 373 | kmake C=1 $KP_TARGETS "$MODULES" > /dev/null 2> "$TEST_DIR"/err_stuff 374 | # Get rid of standard sparse mess with u-boot 375 | printf '%s\n' "$(< "$TEST_DIR"/err_stuff)"|grep -v 'efi.h'|grep -v 'version.c' |grep -v '_u_boot_list_' 1>&2 376 | fi 377 | } 378 | 379 | defconfig() { 380 | if [ -n "$DEFCONFIG" ]; then 381 | kmake "$DEFCONFIG" >/dev/null 382 | else 383 | cp "$TEST_DIR"/.config .config 384 | kmake olddefconfig >/dev/null 385 | fi 386 | } 387 | 388 | build_all_clean() { 389 | (kmake clean 2>/dev/null )>/dev/null 390 | kmake_single $KP_TARGETS "$MODULES" >/dev/null 391 | } 392 | build_all() { 393 | (kmake $KP_TARGETS "$MODULES" 2>/dev/null) >/dev/null 394 | } 395 | 396 | # executed in sequence 397 | tests_start() { 398 | echo "Running start tests.." 399 | TESTS_ALL_SET="btest_mrproper defconfig" 400 | if [ -n "$COMPLETE_TESTS" ]; then 401 | TESTS_ALL_SET="$TESTS_ALL_SET build_all" 402 | if [ $UBOOT_TESTING -eq 0 ]; then 403 | TESTS_ALL1_SET="btest_stack btest_include btest_refcheckdocs" 404 | fi 405 | fi 406 | echo "Sequential tests to run: $TESTS_ALL_SET" 407 | echo "Parallel tests to run: $TESTS_ALL1_SET" 408 | 409 | for test_s in $TESTS_ALL_SET 410 | do 411 | run_test start "$TEST_DIR" "$test_s" 412 | done 413 | # Run parallel tests 414 | PIDS="" 415 | for test_s in $TESTS_ALL1_SET 416 | do 417 | run_test start "$TEST_DIR" "$test_s" & 418 | PIDS="$PIDS $!" 419 | done 420 | echo "Waiting for PIDs: $PIDS" 421 | for pid in $PIDS 422 | do 423 | wait "$pid" 424 | done 425 | PIDS="" 426 | 427 | if [ -n "$COMPLETE_TESTS" ]; then 428 | build_all 429 | fi 430 | } 431 | 432 | tests_end() { 433 | echo "Running END tests.." 434 | for test_s in $TESTS_ALL_SET 435 | do 436 | run_test end "$TEST_DIR" "$test_s" 437 | done 438 | 439 | # Run parallel tests 440 | PIDS="" 441 | for test_s in $TESTS_ALL1_SET 442 | do 443 | run_test start "$TEST_DIR" "$test_s" & 444 | PIDS="$PIDS $!" 445 | done 446 | echo "Waiting for PIDs: $PIDS" 447 | for pid in $PIDS 448 | do 449 | wait "$pid" 450 | done 451 | PIDS="" 452 | } 453 | 454 | report_tests_end() { 455 | log_marker "::Complete test results START::" 456 | echo -en "\nGeneral Tests: " >> "$LOG_SUMMARY_FILE" 457 | report_tests "$TESTS_ALL_SET" "$TESTS_ALL1_SET" 458 | log_marker "::Complete test results END::" 459 | } 460 | 461 | test_patch() { 462 | patch=$1 463 | readarray -t cfiles <<< "$(diffstat -lp1 "$patch"|grep -P '\.c$'|sort)" 464 | readarray -t ofiles <<< "$(diffstat -lp1 "$patch"|grep -P '\.[Sc]$'|sort|sed -e "s/[Sc]$/o/g")" 465 | readarray -t yfiles <<< "$(diffstat -lp1 "$patch"|grep -P '\.yaml$'|sort)" 466 | readarray -t dfiles <<< "$(diffstat -lp1 "$patch"|grep 'boot/dts'|grep -v 'Makefile'|sort)" 467 | 468 | # Run sequential tests 469 | TESTS_P_SET="ptest_am ptest_check" 470 | TESTS_B_SET="btest_basic btest_sparse" 471 | if [ -n "$COMPLETE_TESTS" ]; then 472 | TESTS_B_SET="$TESTS_B_SET btest_cocci btest_smatch" 473 | fi 474 | # Run the following in parallel 475 | TESTS_C_SET="ftest_check_kdoc" 476 | if [ $UBOOT_TESTING -eq 0 ]; then 477 | TESTS_C_SET="$TESTS_C_SET ftest_check_includes ftest_check_headerdeps" 478 | fi 479 | # Run YAML tests 480 | TESTS_Y_SET="ytest_dt_binding_check" 481 | if [ -n "$COMPLETE_TESTS" ]; then 482 | # Nothing special to do here.. 483 | TESTS_Y_SET="$TESTS_Y_SET ytest_dtbs_check" 484 | fi 485 | # Run DTB tests 486 | TESTS_D_SET="dtest_build_dtb dtest_build_dtb_to_dts dtest_lower_case" 487 | if [ -n "$COMPLETE_TESTS" ]; then 488 | # we need to introduce dtbs_check in a manner which does'nt take time.. 489 | TESTS_D_SET="$TESTS_D_SET dtest_dtbs_check" 490 | DWARNING="(dtbs_check is enabled, HIGHLY RECOMMEND custom .config to save time)" 491 | fi 492 | 493 | echo "Tests to run on C files(parallel): $TESTS_C_SET" 494 | echo "Tests to run on yaml files: $TESTS_Y_SET" 495 | echo "Tests to run on dtb files$DWARNING: $TESTS_D_SET" 496 | echo "Tests to run on Patch: $TESTS_P_SET" 497 | echo "Tests to run on Build: $TESTS_B_SET" 498 | 499 | run_test start "$TEST_DIR" defconfig 500 | # run twice - we just want end build errors.. 501 | run_test start "$TEST_DIR" btest_kbuild "${ofiles[@]}" 502 | run_test start "$TEST_DIR" btest_kbuild "${ofiles[@]}" 503 | 504 | for test_s in $TESTS_B_SET 505 | do 506 | run_test start "$TEST_DIR" "$test_s" "${ofiles[@]}" 507 | done 508 | 509 | for test_s in $TESTS_D_SET 510 | do 511 | run_test start "$TEST_DIR" "$test_s" "${dfiles[@]}" 512 | done 513 | 514 | for test_s in $TESTS_Y_SET 515 | do 516 | run_test start "$TEST_DIR" "$test_s" "${yfiles[@]}" 517 | done 518 | 519 | PIDS="" 520 | for test_s in $TESTS_C_SET 521 | do 522 | run_test start "$TEST_DIR" "$test_s" "${cfiles[@]}" & 523 | PIDS="$PIDS $!" 524 | done 525 | 526 | # wait for all to come back 527 | echo "Waiting for test PIDs: $PIDS" 528 | for pid in $PIDS 529 | do 530 | wait "$pid" 531 | done 532 | PIDS="" 533 | 534 | for test_s in $TESTS_P_SET 535 | do 536 | run_test_dummy start "$TEST_DIR" "$test_s" "$patch" 537 | run_test end "$TEST_DIR" "$test_s" "$patch" 538 | done 539 | 540 | run_test end "$TEST_DIR" defconfig 541 | # run twice - we just want end build errors.. 542 | run_test end "$TEST_DIR" btest_kbuild "${ofiles[@]}" 543 | run_test end "$TEST_DIR" btest_kbuild "${ofiles[@]}" 544 | 545 | for test_s in $TESTS_B_SET 546 | do 547 | run_test end "$TEST_DIR" "$test_s" "${ofiles[@]}" 548 | done 549 | 550 | for test_s in $TESTS_D_SET 551 | do 552 | run_test end "$TEST_DIR" "$test_s" "${dfiles[@]}" 553 | done 554 | 555 | for test_s in $TESTS_Y_SET 556 | do 557 | run_test end "$TEST_DIR" "$test_s" "${yfiles[@]}" 558 | done 559 | 560 | PIDS="" 561 | for test_s in $TESTS_C_SET 562 | do 563 | run_test end "$TEST_DIR" "$test_s" "${cfiles[@]}" & 564 | PIDS="$PIDS $!" 565 | done 566 | 567 | # wait for all to come back 568 | echo "Waiting for test PIDs: $PIDS" 569 | for pid in $PIDS 570 | do 571 | wait "$pid" 572 | done 573 | PIDS="" 574 | } 575 | 576 | report_patch() { 577 | Subject=$(grep '^Subject' "$1") 578 | log_marker "::test results START " "$(basename "$1")" "::" 579 | log_me "Subject: $Subject" 580 | echo -en "\n" "$(basename "$1")" "Tests: " >> "$LOG_SUMMARY_FILE" 581 | report_tests defconfig btest_kbuild "$TESTS_C_SET" "$TESTS_B_SET" "$TESTS_P_SET" "$TESTS_Y_SET" "$TESTS_D_SET" 582 | log_marker "::test results END" "$(basename "$1")" "::" 583 | } 584 | 585 | ################### 586 | # Cleanup handler 587 | on_exit() { 588 | echo -e "\e[0mCleaning up..." 589 | 590 | if [ x != x"$PIDS" ]; then 591 | echo "Killing $PIDS" 592 | killall "$PIDS" 2>/dev/null 593 | fi 594 | 595 | if [ -n "$DEBUG_MODE" ]; then 596 | return 0; 597 | fi 598 | if [ -f "$TEST_DIR/.config" ]; then 599 | echo "restoring .config" 600 | cp "$TEST_DIR"/.config .config 601 | fi 602 | if [ -n "$TEST_DIR" ] && [ -d "$TEST_DIR" ]; then 603 | echo "Removing temp dir" 604 | rm -rf "$TEST_DIR" 2>/dev/null 605 | fi 606 | if [ -n "$CURRENT_BRANCH" ]; then 607 | echo "Restoring to $CURRENT_BRANCH branch" 608 | git reset --hard 2>/dev/null 609 | git checkout "$CURRENT_BRANCH" 2>/dev/null 610 | fi 611 | if [ -n "$TEST_BRANCH_NAME" ]; then 612 | bexists=$(git branch|grep "$TEST_BRANCH_NAME" 2>/dev/null) 613 | if [ -n "$bexists" ]; then 614 | echo "Cleaning up testing branch" 615 | git branch -D "$TEST_BRANCH_NAME" 616 | fi 617 | fi 618 | } 619 | 620 | ################### 621 | # Logging stuff 622 | log_marker() { 623 | MARKER_STRING="================" 624 | if [ "$*" ]; then 625 | echo "$MARKER_STRING" 626 | echo "$*" 627 | echo -e "$MARKER_STRING\n" 628 | else 629 | echo -e "$MARKER_STRING\n\n" 630 | fi 631 | } >> "$LOG_FILE" 632 | 633 | log_me() { 634 | echo "$*" 635 | } >> "$LOG_FILE" 636 | 637 | logs_me() { 638 | echo -e "$*" 639 | } >> "$LOG_SUMMARY_FILE" 640 | 641 | report_tests() { 642 | TESTS=$* 643 | PASS=1 644 | for test in $TESTS 645 | do 646 | start_log=$TEST_DIR/$test-start 647 | end_log=$TEST_DIR/$test-end 648 | diff_log=$TEST_DIR/$test-diff 649 | if [ -f "$start_log" ] && [ -f "$end_log" ]; then 650 | diff -purN "$start_log" "$end_log" > "$diff_log" 651 | fi 652 | done 653 | FAIL_TEST="" 654 | PASS_TEST="" 655 | for test in $TESTS 656 | do 657 | diff_log=$TEST_DIR/$test-diff 658 | if [ -f "$diff_log" ]; then 659 | size=$(stat --format "%s" "$diff_log") 660 | if [ "$size" -ne 0 ]; then 661 | log_me "$test FAILED?" 662 | PASS=0 663 | FAIL_TEST="$FAIL_TEST $test" 664 | else 665 | PASS_TEST="$PASS_TEST $test" 666 | fi 667 | fi 668 | done 669 | if [ $PASS -eq 1 ]; then 670 | log_me Passed: "$TESTS" 671 | else 672 | for test in $TESTS 673 | do 674 | diff_log=$TEST_DIR/$test-diff 675 | if [ -f "$diff_log" ]; then 676 | size=$(stat --format "%s" "$diff_log") 677 | if [ "$size" -ne 0 ]; then 678 | log_marker "$test results:" 679 | printf '%s\n' "$(< "$diff_log")" >> "$LOG_FILE" 680 | fi 681 | fi 682 | done 683 | fi 684 | if [ -z "$FAIL_TEST" ]; then 685 | logs_me "Passed(ALL): $PASS_TEST" 686 | else 687 | logs_me "Failed" 688 | logs_me "\tFail tests: $FAIL_TEST" 689 | logs_me "\tPass tests: $PASS_TEST" 690 | fi 691 | 692 | } 693 | 694 | report_end() { 695 | 696 | log_marker "CPUS used: $KM_CPUS" 697 | log_marker "Application versions" 698 | { 699 | for app in $APPS_NEEDED 700 | do 701 | if [ "$app" = "smatch" ]; then 702 | app=$SMATCH 703 | fi 704 | echo "version of $app:" 705 | which "$app" 2>&1 706 | "$app" --version 2>&1 707 | echo 708 | done 709 | echo "version of dtschema python3 package:" 710 | python3 -m pip list|grep 'dtschema' 711 | echo 712 | } >> "$LOG_FILE" 713 | 714 | log_marker 715 | END_DATE=$(date) 716 | END_SEC=$(date "+%s") 717 | DELTA=$((END_SEC - START_SEC)) 718 | log_marker "Test duration: $DELTA seconds (Started $START_DATE, ended $END_DATE)" 719 | if [ -f "$LOG_SUMMARY_FILE" ]; then 720 | echo -e "\e[106m\e[4m\e[1mTest Summary:\e[0m" 721 | fail=0 722 | while read -r ln 723 | do 724 | empty=${#ln} 725 | # Colored reporting to ensure people dont miss errors 726 | # See http://misc.flogisoft.com/bash/tip_colors_and_formatting 727 | if [ "$empty" -gt 2 ]; then 728 | pass=$(echo "$ln" | grep 'Passed(ALL)') 729 | if [ -z "$pass" ]; then 730 | # Red back, white foreground 731 | echo -e "\e[1m\e[97m\e[101m$ln\e[0m" 732 | fail=1 733 | else 734 | # Green back, white foreground 735 | echo -e "\e[1m\e[97m\e[102m$ln\e[0m" 736 | fi 737 | fi 738 | done < "$LOG_SUMMARY_FILE" 739 | echo "***************** DETAILED RESULTS *********" >> "$LOG_SUMMARY_FILE" 740 | printf '%s\n' "$(< "$LOG_FILE")" >> "$LOG_SUMMARY_FILE" 741 | mv "$LOG_SUMMARY_FILE" "$LOG_FILE" 742 | echo -ne "\e[96m\e[40mComplete report is available here: " 743 | if [ "$fail" -eq 1 ]; then 744 | echo -e "\e[92m\e[41m\e[5m$LOG_FILE\e[0m" 745 | else 746 | echo -e "\e[97m\e[42m\e[5m$LOG_FILE\e[0m" 747 | fi 748 | fi 749 | } 750 | 751 | ################### 752 | # Lets see if we can recommend any missing apps 753 | check_missing_application() { 754 | APPS_MISSING="" 755 | for i in $APPS_NEEDED 756 | do 757 | if [ "$i" = "smatch" ]; then 758 | i=$(printf '%s\n' "$(< "$SMATCH")" | grep 'smatch'|cut -d ' ' -f1) 759 | if [ -z "$i" ]; then 760 | i=smatch 761 | fi 762 | fi 763 | 764 | if ! which "$i" > /dev/null; then 765 | APPS_MISSING="$APPS_MISSING $i" 766 | fi 767 | done 768 | if [ -n "$APPS_MISSING" ]; then 769 | return 2 770 | fi 771 | return 0 772 | } 773 | 774 | recommend_missing_application() { 775 | check_missing_application 776 | if [ -n "$APPS_MISSING" ]; then 777 | echo "Missing Applications in system: $APPS_MISSING" >&2 778 | # Lets see if we can recommend an application 779 | if [ -x /usr/lib/command-not-found ]; then 780 | for i in $APPS_MISSING 781 | do 782 | /usr/lib/command-not-found --no-failure-msg "$i" 783 | done 784 | fi 785 | return 2 786 | fi 787 | return 0 788 | } 789 | 790 | APP_NAME=$0 791 | ################### 792 | # Help 793 | usage() { 794 | # Red background, white foreground 795 | echo -e "\e[1m\e[97m\e[101mError: $*\e[0m" 796 | 797 | printf '%s\n' \ 798 | '' \ 799 | "Usage: $APP_NAME [-d | -V] [-j CPUs] [-B build_target] [-T tmp_dir_base] [-l logfile] [-C] [-c defconfig_name] [-n N][-1..9]|[-p patch_dir]|[-b base_branch [-t head_branch]] [-S smatch_script] -U -Z" \ 800 | '' 801 | 802 | printf '\t%s\n' \ 803 | "-d: if not already defined, use CROSS_COMPILE=$DEF_CROSS_COMPILE, ARCH=$DEF_ARCH, and builds for '$KP_TARGETS $DEF_BUILDTARGETS' build targets" \ 804 | "-V: (default armV8 targets) if not already defined, use CROSS_COMPILE=$DEF_V8_CROSS_COMPILE, ARCH=$DEF_V8_ARCH, and builds for '$KP_TARGETS $DEF_V8_BUILDTARGETS' build targets" \ 805 | "-j CPUs: override default CPUs count with build (default is $KM_CPUS)" \ 806 | "-B build_target: override default build target and use provided build_target" \ 807 | "-T temp_dir_base: temporary directory base (default is $TEST_B_DIR)" \ 808 | "-l logfile: report file (defaults to $LOG_FILE)" \ 809 | "-L Use llvm to build 'LLVM=1 CC='$ccache clang''" \ 810 | "-C: run Complete tests(WARNING: could take significant time!)" \ 811 | "-c defconfig: name (default uses current .config + olddefconfig)" \ 812 | "-[1..9]: test the tip of current branch (1 to 9 number of patches)" \ 813 | "-n N: test the tip of current branch with 'N' number of patches" \ 814 | "-p patch_dir: which directory to take patches from (expects sorted in order)" \ 815 | "-b base_branch: test patches from base_branch" \ 816 | "-t test_branch: optionally used with -b, till head branch, if not provided, along with -b, default will be tip of current branch" \ 817 | "-U : Do u-boot basic sanity tests" \ 818 | "-Z : Dont filter dtb warnings ($DTB_FILTER_LIST)" \ 819 | "-m : maximum line length number to be passed on to checkpatch.pl" \ 820 | "-S smatch_script : Provide a custom smatch_script instead of creating our own" 821 | 822 | printf '%s\n' \ 823 | '' \ 824 | "NOTE: only one of -1, -c, -p or (-b,-t) should be used - but at least one of these should be used" \ 825 | "NOTE: cannot have a diff pending OR be on a dangling branch base_branch should exist as well" \ 826 | '' 827 | 828 | printf '%s\n\t%s\n' \ 829 | "Example usage 1: verify last commmitted patch" \ 830 | "$APP_NAME -1" \ 831 | "Example usage 2: verify on top of current branch patches from location ~/tmp/test-patches" \ 832 | "$APP_NAME -p ~/tmp/test-patches" \ 833 | "Example usage 3: verify *from* branch 'base_branch' till current branch" \ 834 | "$APP_NAME -b base_branch" \ 835 | "Example usage 4: verify from current branch, all patches *until* 'test_branch'" \ 836 | "$APP_NAME -t test_branch" \ 837 | "Example usage 5: verify from branch, all patches from 'base_branch' until 'test_branch'" \ 838 | "$APP_NAME -b base_branch -t test_branch" \ 839 | "Example usage 6: verify from branch Complete tests, all patches from 'base_branch' until 'test_branch'" \ 840 | "$APP_NAME -b base_branch -t test_branch -C" \ 841 | "Example usage 7: on a native x86 build using make, gcc and bzImage, 1 patch" \ 842 | "$APP_NAME -B bzImage -1" \ 843 | "Example usage 7: on a cross_compiled ARM build using defaults, 1 patch" \ 844 | "$APP_NAME -d -1" \ 845 | "Example usage 8: on a cross_compiled ARM build using defaults,15 patches" \ 846 | "$APP_NAME -d -n 15" 847 | 848 | printf '%s\n' '' 849 | 850 | if ! check_missing_application; then 851 | recommend_missing_application 852 | fi 853 | } >&2 854 | 855 | ORIDE=0 856 | DTB_NOSKIP=0 857 | while getopts "S:n:j:c:T:B:l:p:b:t:m:123456789CdDUVZL" opt; do 858 | case $opt in 859 | j) 860 | KM_CPUS=$OPTARG 861 | ;; 862 | Z) 863 | DTB_NOSKIP=1 864 | ;; 865 | B) 866 | export KP_TARGETS="$OPTARG" 867 | ORIDE=1 868 | ;; 869 | d) 870 | if [ -z "$LLVM" ] && [ -z "$CROSS_COMPILE" ]; then 871 | export CROSS_COMPILE="$DEF_CROSS_COMPILE" 872 | fi 873 | if [ -z "$ARCH" ]; then 874 | export ARCH="$DEF_ARCH" 875 | fi 876 | if [ -z "$KP_TARGETS" ] && [ $ORIDE -eq 0 ]; then 877 | export KP_TARGETS="$KP_TARGETS $DEF_BUILDTARGETS" 878 | fi 879 | ;; 880 | V) 881 | if [ -z "$LLVM" ] && [ -z "$CROSS_COMPILE" ]; then 882 | export CROSS_COMPILE="$DEF_V8_CROSS_COMPILE" 883 | fi 884 | if [ -z "$ARCH" ]; then 885 | export ARCH="$DEF_V8_ARCH" 886 | fi 887 | if [ -z "$KP_TARGETS" ] && [ $ORIDE -eq 0 ]; then 888 | export KP_TARGETS="$KP_TARGETS $DEF_V8_BUILDTARGETS" 889 | fi 890 | ;; 891 | L) 892 | export LLVM=1 893 | if [ -n "$CROSS_COMPILE" ]; then 894 | unset CROSS_COMPILE 895 | fi 896 | CC="$ccache clang" 897 | APPS_NEEDED="$APPS_NEEDED clang lld" 898 | ;; 899 | U) 900 | DEF_BUILDTARGETS="" 901 | if [ -n "$KP_TARGETS" ] && [ $ORIDE -eq 0 ]; then 902 | export KP_TARGETS="" 903 | fi 904 | if [ -n "$COMPLETE_TESTS" ]; then 905 | usage "Cannot run complete tests yet on u-boot" 906 | exit 1 907 | fi 908 | UBOOT_TESTING=1 909 | ;; 910 | D) 911 | DEBUG_MODE=1 912 | ;; 913 | c) 914 | DEFCONFIG=$OPTARG 915 | ;; 916 | l) 917 | LOG_FILE=$OPTARG 918 | ;; 919 | T) 920 | TEST_B_DIR=$OPTARG 921 | if [ ! -d "$TEST_B_DIR" ]; then 922 | usage "$TEST_B_DIR does not exist" 923 | exit 1 924 | fi 925 | if [ ! -w "$TEST_B_DIR" ]; then 926 | usage "$TEST_B_DIR is not writable?" 927 | exit 1 928 | fi 929 | ;; 930 | C) 931 | COMPLETE_TESTS=1 932 | KP_PARAMS="$KP_PARAMS W=12 EXTRA_CFLAGS=-W" 933 | APPS_NEEDED="$APPS_NEEDED smatch spatch" 934 | MODULES=modules 935 | if [ "$UBOOT_TESTING" -eq 1 ]; then 936 | usage "Cannot run complete tests yet on u-boot" 937 | exit 1 938 | fi 939 | ;; 940 | [1-9]) 941 | TEST_TOP=yes 942 | if [ -n "${PATCH_DIR}${BASE_BRANCH}${TEST_BRANCH}" ]; then 943 | usage "cannot use -$opt with other options" 944 | exit 1; 945 | fi 946 | PATCHCOUNT=$opt 947 | ;; 948 | n) 949 | TEST_TOP=yes 950 | if [ -n "${PATCH_DIR}${BASE_BRANCH}${TEST_BRANCH}" ]; then 951 | usage "cannot use -n with other options" 952 | exit 1; 953 | fi 954 | PATCHCOUNT=$OPTARG 955 | if [ "$PATCHCOUNT" -eq 0 ]; then 956 | usage "Hey! Do your own '0' patch testing!!!" 957 | exit 1; 958 | fi 959 | ;; 960 | p) 961 | PATCH_DIR=$OPTARG 962 | if [ -n "${TEST_TOP}${BASE_BRANCH}${TEST_BRANCH}" ]; then 963 | usage "cannot use -p with other options" 964 | exit 1; 965 | fi 966 | if [ ! -d "$PATCH_DIR" ]; then 967 | usage "Patch Directory $PATCH_DIR does not exist?" 968 | exit 1; 969 | fi 970 | PATCHES=$(realpath "$PATCH_DIR"/*.patch|grep -v "$COVER_LETTER") 971 | PATCHCOUNT=$(echo "$PATCHES" |wc -l) 972 | if [ "$PATCHCOUNT" -eq 0 ]; then 973 | usage "Patch directory $PATCH_DIR has no patches?" 974 | exit 1; 975 | fi 976 | ;; 977 | b) 978 | BASE_BRANCH=$OPTARG 979 | if [ -n "${TEST_TOP}${PATCH_DIR}" ]; then 980 | usage "cannot use -b with other options" 981 | exit 1; 982 | fi 983 | ;; 984 | t) 985 | TEST_BRANCH=$OPTARG 986 | if [ -n "${TEST_TOP}${PATCH_DIR}" ]; then 987 | usage "cannot use -t with other options" 988 | exit 1; 989 | fi 990 | CHECK=$(git branch|grep "$TEST_BRANCH" 2>/dev/null) 991 | if [ -z "$CHECK" ]; then 992 | usage "Test branch $TEST_BRANCH does not exist?" 993 | exit 1 994 | fi 995 | ;; 996 | m) 997 | LINE_LENGTH=$OPTARG 998 | ;; 999 | S) 1000 | SMATCH=$OPTARG 1001 | ;; 1002 | \?) 1003 | usage "Invalid option: -$OPTARG" 1004 | exit 1 1005 | ;; 1006 | :) 1007 | usage "Option -$OPTARG requires an argument." 1008 | exit 1 1009 | ;; 1010 | esac 1011 | done 1012 | 1013 | if [ -z "${TEST_BRANCH}${BASE_BRANCH}${PATCH_DIR}${TEST_TOP}" ]; then 1014 | usage "Need at least 1 test type" 1015 | exit 2 1016 | fi 1017 | 1018 | if [ -n "${CROSS_COMPILE}" ]; then 1019 | APPS_NEEDED="$APPS_NEEDED ${CROSS_COMPILE}gcc" 1020 | fi 1021 | 1022 | if ! check_missing_application; then 1023 | usage "Missing apps" 1024 | exit 2 1025 | fi 1026 | 1027 | TEST_BRANCH_NAME=kernel-patch-verify.$RANDOM 1028 | TEST_DIR=$TEST_B_DIR/$TEST_BRANCH_NAME 1029 | PATCHD=$TEST_DIR/patches 1030 | LOG_SUMMARY_FILE=$TEST_DIR/summary 1031 | 1032 | # NOW, hook on. cleanup.. we are about to start doing serious stuff. 1033 | trap on_exit EXIT SIGINT SIGTERM 1034 | 1035 | mkdir -p "$TEST_DIR" "$PATCHD" 1036 | 1037 | # Set up custom git config 1038 | export GIT_CONFIG_GLOBAL="$TEST_DIR"/gitconfig 1039 | export GIT_CONFIG_NOSYSTEM=1 1040 | touch "$GIT_CONFIG_GLOBAL" 1041 | git config --global user.name "Kernel Patch Verifier" 1042 | git config --global user.email "noreply@kernel.org" 1043 | git config --global core.abbrev 12 1044 | 1045 | if [ -n "$CC" ]; then 1046 | KM_C="CC=$CC" 1047 | fi 1048 | 1049 | if [ -n "$CROSS_COMPILE" ]; then 1050 | KM_C="CROSS_COMPILE=$CROSS_COMPILE" 1051 | fi 1052 | 1053 | if [ -n "$LLVM" ]; then 1054 | KM_L="LLVM=$LLVM" 1055 | fi 1056 | 1057 | if [ -n "$ARCH" ]; then 1058 | KM_A="ARCH=$ARCH" 1059 | fi 1060 | 1061 | KDIR=$(pwd) 1062 | 1063 | CURRENT_BRANCH=$(git branch | grep '^\*' | cut -d " " -f 2) 1064 | # if we have base or testing branch missing, populate the other as the current branch 1065 | if [ -n "$TEST_BRANCH" ] && [ -z "$BASE_BRANCH" ]; then 1066 | BASE_BRANCH=$CURRENT_BRANCH 1067 | fi 1068 | 1069 | if [ -n "$BASE_BRANCH" ] && [ -z "$TEST_BRANCH" ]; then 1070 | TEST_BRANCH=$CURRENT_BRANCH 1071 | fi 1072 | if [ -n "$TEST_BRANCH" ] && [ "$TEST_BRANCH" = "$BASE_BRANCH" ]; then 1073 | usage "Test branch and base branch are the same '$TEST_BRANCH'.. Hmm.. not sleeping lately?" 1074 | exit 3 1075 | fi 1076 | 1077 | if [ ! -e ".config" ] && [ -z "$DEFCONFIG" ]; then 1078 | usage "No default .config exists nor is a defconfig specified with -c" 1079 | exit 3 1080 | fi 1081 | 1082 | # lets do some basic verification 1083 | gdiff=$(git diff) 1084 | if [ -n "$gdiff" ]; then 1085 | usage "git diff returned data.. you may want to do git reset --hard or stash changes" 1086 | exit 3 1087 | fi 1088 | 1089 | if [ "$CURRENT_BRANCH" = "(no" ]; then 1090 | usage "You are currently on a dangling branch - please checkout a branch to proceed" 1091 | exit 3 1092 | fi 1093 | 1094 | GIT_RM_DIR=$(git rev-parse --git-path rebase-merge) 1095 | GIT_RA_DIR=$(git rev-parse --git-path rebase-apply) 1096 | if [ -e "$GIT_RM_DIR" ] || [ -e "$GIT_RA_DIR" ]; then 1097 | usage "$GIT_RA_DIR or $GIT_RM_DIR exists - implying rebase or am in progress. please cleanup to proceed - 'git am --abort;git rebase --abort' perhaps?" 1098 | exit 3 1099 | fi 1100 | 1101 | cp .config "$TEST_DIR"/.config 2>/dev/null 1102 | if [ -z "$SMATCH" ]; then 1103 | SMATCH=$TEST_DIR/smatch 1104 | echo -e '#!/bin/bash\nsmatch -p=kernel $@'> "$SMATCH" 1105 | chmod +x "$SMATCH" 1106 | fi 1107 | 1108 | # First create a list of patches to test.. 1109 | if [ -n "$TEST_TOP" ]; then 1110 | if ! [[ "$PATCHCOUNT" =~ ^[0-9]+$ ]] ; then 1111 | usage "error: requested number of patches '$PATCHCOUNT' Not a number" 1112 | exit 4 1113 | fi 1114 | git format-patch --no-cover-letter -M -C -o "$PATCHD" -"$PATCHCOUNT" >/dev/null 1115 | git checkout -b "$TEST_BRANCH_NAME" 1116 | git reset --hard HEAD~"$PATCHCOUNT" 1117 | fi 1118 | 1119 | if [ -n "$PATCHES" ]; then 1120 | cp -rf $PATCHES "$PATCHD" 1121 | git checkout -b "$TEST_BRANCH_NAME" 1122 | fi 1123 | 1124 | if [ -n "$TEST_BRANCH" ]; then 1125 | git format-patch --no-cover-letter -M -C -o "$PATCHD" "$BASE_BRANCH".."$TEST_BRANCH" >/dev/null 1126 | PATCHES=$(realpath "$PATCHD"/*.patch|grep -v "$COVER_LETTER") 1127 | PATCHCOUNT=$(echo "$PATCHES" |wc -l) 1128 | if [ "$PATCHCOUNT" -eq 0 ]; then 1129 | usage "$BASE_BRANCH..$TEST_BRANCH generated no patches!" 1130 | exit 4; 1131 | fi 1132 | git branch "$TEST_BRANCH_NAME" "$BASE_BRANCH" >/dev/null 1133 | git checkout "$TEST_BRANCH_NAME" 1134 | fi 1135 | 1136 | if [ -e "$LOG_FILE" ]; then 1137 | echo "$LOG_FILE exists, taking a backup" 1138 | mv "$LOG_FILE" "$LOG_FILE".bak 1139 | fi 1140 | 1141 | START_DATE=$(date) 1142 | START_SEC=$(date "+%s") 1143 | 1144 | #=========== MAIN TEST TRIGGER LOOP ========= 1145 | tests_start 1146 | 1147 | PATCHES=$(realpath "$PATCHD"/*.patch|grep -v "$COVER_LETTER") 1148 | PATCHCOUNT=$(echo "$PATCHES" |wc -l) 1149 | PATCH_NUM=1 1150 | EST_TOTAL="unknown" 1151 | ETA_REMAIN="unknown" 1152 | DELTAP="unknown" 1153 | STARTP_SEC=$(date "+%s") 1154 | for patch in $PATCHES 1155 | do 1156 | echo "Testing Patch ($PATCH_NUM/$PATCHCOUNT):" "$(basename "$patch")" "$(to_time $DELTAP) elapsed, estimated: remaining $(to_time $ETA_REMAIN) / total $(to_time $EST_TOTAL)" 1157 | test_patch "$patch" 1158 | report_patch "$patch" 1159 | NOW_SEC=$(date "+%s") 1160 | DELTAP=$((NOW_SEC - STARTP_SEC)) 1161 | AVG=$((DELTAP / PATCH_NUM)) 1162 | EST_TOTAL=$((AVG * PATCHCOUNT)) 1163 | ETA_REMAIN=$((EST_TOTAL - DELTAP)) 1164 | PATCH_NUM=$((PATCH_NUM + 1)) 1165 | done 1166 | 1167 | tests_end 1168 | 1169 | NOW_SEC=$(date "+%s") 1170 | DELTAP=$((NOW_SEC - STARTP_SEC)) 1171 | echo 1172 | echo "Tests Complete. Time taken: $(to_time $DELTAP)" 1173 | echo 1174 | 1175 | report_tests_end 1176 | report_end 1177 | -------------------------------------------------------------------------------- /kp_common: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | export DOCKER_ARGS IMG_NAME KP_PATH 4 | 5 | # Check if Docker image exists 6 | 7 | # Use Directly from github container registry if not provided: 8 | # dockerhub location is nishanthmenon/arm-kernel-dev 9 | IMG_NAME="${IMG_NAME:-ghcr.io/nmenon/arm-kernel-dev}" 10 | 11 | # TBD: If we are using from github -> I need to figure out how to get that working.. 12 | 13 | # If we are building locally 14 | # IMG_NAME=arm-kernel-dev 15 | 16 | # Check if docker exists 17 | docker=$(which docker) 18 | if [ -z "$docker" ]; then 19 | echo "Please install Docker to be able to function" 20 | exit 1 21 | fi 22 | 23 | # If we are working off docker image from github container reg, make sure 24 | # we have the latest. 25 | if [ "$IMG_NAME" = "ghcr.io/nmenon/arm-kernel-dev" ]; then 26 | docker pull "$IMG_NAME" 27 | fi 28 | ccache=$(which ccache 2> /dev/null) 29 | if [ -z "$ccache" ]; then 30 | if [ ! -d "/tmp/ccache" ]; then 31 | mkdir /tmp/ccache 32 | fi 33 | CCACHEDIR=/tmp/ccache 34 | else 35 | CCACHEDIR=$(ccache -s|grep "cache directory"|sed -e "s/\s\s*/ /g"|cut -d ' ' -f3|xargs echo) 36 | if [ -z "$CCACHEDIR" ]; then 37 | CCACHEDIR=$(ccache -v -s|grep -i "cache directory"|sed -e "s/\s\s*/ /g"|cut -d ':' -f2|xargs echo) 38 | fi 39 | fi 40 | 41 | DOCKER_ARGS=() 42 | DOCKER_ARGS+=(-v /tmp:/tmp) 43 | DOCKER_ARGS+=(-v /opt:/opt) 44 | DOCKER_ARGS+=(-v "$CCACHEDIR":/ccache) 45 | 46 | # Check if current directory is a git directory 47 | if ! git rev-parse --is-inside-work-tree > /dev/null; then 48 | exit 1 49 | fi 50 | 51 | ROOT_GIT=$(git rev-parse --show-toplevel) 52 | # if a submodule or worktree then we need to fetch and preserve the gitdir path 53 | if [ -f "${ROOT_GIT}/.git" ]; then 54 | gitdir_str=$(grep -P -o '(?<=gitdir:\s).*' "${ROOT_GIT}/.git") 55 | relative_str=$(echo "$gitdir_str" | grep -P -o '^(../)*') 56 | if [ -n "$relative_str" ]; then 57 | # relative paths need to be preserved 58 | common_path=$(realpath "$ROOT_GIT/$relative_str") 59 | relative_pwd=$(realpath --relative-to="$common_path" "$PWD") 60 | DOCKER_ARGS+=(-v "$common_path":"/workdir") 61 | DOCKER_ARGS+=(-w "/workdir/${relative_pwd}") 62 | else 63 | # absolute paths can be passed through 64 | gitdir_path=$(git rev-parse --git-common-dir) 65 | DOCKER_ARGS+=(-v "$ROOT_GIT":"/workdir") 66 | DOCKER_ARGS+=(-v "$gitdir_path":"$gitdir_path") 67 | fi 68 | else 69 | # normal git directory, just toss everything in 70 | DOCKER_ARGS+=(-v "$ROOT_GIT":"/workdir") 71 | fi 72 | 73 | # list of paths to append to the PATH variable in the container 74 | extra_paths=( 75 | /workdir/scripts/dtc 76 | /opt/cross-gcc-linux-14/bin 77 | /opt/cross-gcc-linux-13/bin 78 | /opt/cross-gcc-linux-12/bin 79 | /opt/cross-gcc-linux-11/bin 80 | /opt/cross-gcc-linux-10/bin 81 | /opt/cross-gcc-linux-9/bin 82 | ) 83 | 84 | KP_PATH=$(IFS=:; printf '%s' "${extra_paths[*]}") 85 | -------------------------------------------------------------------------------- /kps: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source "$(dirname "$(readlink -f "$0")")/kp_common" 4 | 5 | # If we wanted to get to bash shell: 6 | docker run --rm -ti \ 7 | -e KP_PATH -e CROSS_COMPILE -e ARCH \ 8 | "${DOCKER_ARGS[@]}" \ 9 | "$IMG_NAME" \ 10 | bash --init-file /etc/profile 11 | -------------------------------------------------------------------------------- /kpv: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source "$(dirname "$(readlink -f "$0")")/kp_common" 4 | 5 | docker run --rm -ti \ 6 | -e KP_PATH -e CROSS_COMPILE -e ARCH \ 7 | "${DOCKER_ARGS[@]}" \ 8 | "$IMG_NAME" \ 9 | kernel_patch_verify -S /usr/local/smatch/bin/k_sm_check_script "$@" 10 | -------------------------------------------------------------------------------- /llvm-config/etc/apt/sources.list.d/llvm.list: -------------------------------------------------------------------------------- 1 | deb [signed-by=/etc/apt/trusted.gpg.d/apt.llvm.org.asc] http://apt.llvm.org/bookworm/ llvm-toolchain-bookworm main 2 | deb-src [signed-by=/etc/apt/trusted.gpg.d/apt.llvm.org.asc] http://apt.llvm.org/bookworm/ llvm-toolchain-bookworm main 3 | -------------------------------------------------------------------------------- /llvm-config/etc/apt/trusted.gpg.d/apt.llvm.org.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP PUBLIC KEY BLOCK----- 2 | Version: GnuPG v1.4.12 (GNU/Linux) 3 | 4 | mQINBFE9lCwBEADi0WUAApM/mgHJRU8lVkkw0CHsZNpqaQDNaHefD6Rw3S4LxNmM 5 | EZaOTkhP200XZM8lVdbfUW9xSjA3oPldc1HG26NjbqqCmWpdo2fb+r7VmU2dq3NM 6 | R18ZlKixiLDE6OUfaXWKamZsXb6ITTYmgTO6orQWYrnW6ckYHSeaAkW0wkDAryl2 7 | B5v8aoFnQ1rFiVEMo4NGzw4UX+MelF7rxaaregmKVTPiqCOSPJ1McC1dHFN533FY 8 | Wh/RVLKWo6npu+owtwYFQW+zyQhKzSIMvNujFRzhIxzxR9Gn87MoLAyfgKEzrbbT 9 | DhqqNXTxS4UMUKCQaO93TzetX/EBrRpJj+vP640yio80h4Dr5pAd7+LnKwgpTDk1 10 | G88bBXJAcPZnTSKu9I2c6KY4iRNbvRz4i+ZdwwZtdW4nSdl2792L7Sl7Nc44uLL/ 11 | ZqkKDXEBF6lsX5XpABwyK89S/SbHOytXv9o4puv+65Ac5/UShspQTMSKGZgvDauU 12 | cs8kE1U9dPOqVNCYq9Nfwinkf6RxV1k1+gwtclxQuY7UpKXP0hNAXjAiA5KS5Crq 13 | 7aaJg9q2F4bub0mNU6n7UI6vXguF2n4SEtzPRk6RP+4TiT3bZUsmr+1ktogyOJCc 14 | Ha8G5VdL+NBIYQthOcieYCBnTeIH7D3Sp6FYQTYtVbKFzmMK+36ERreL/wARAQAB 15 | tD1TeWx2ZXN0cmUgTGVkcnUgLSBEZWJpYW4gTExWTSBwYWNrYWdlcyA8c3lsdmVz 16 | dHJlQGRlYmlhbi5vcmc+iQI4BBMBAgAiBQJRPZQsAhsDBgsJCAcDAgYVCAIJCgsE 17 | FgIDAQIeAQIXgAAKCRAVz00Yr090Ibx+EADArS/hvkDF8juWMXxh17CgR0WZlHCC 18 | 9CTBWkg5a0bNN/3bb97cPQt/vIKWjQtkQpav6/5JTVCSx2riL4FHYhH0iuo4iAPR 19 | udC7Cvg8g7bSPrKO6tenQZNvQm+tUmBHgFiMBJi92AjZ/Qn1Shg7p9ITivFxpLyX 20 | wpmnF1OKyI2Kof2rm4BFwfSWuf8Fvh7kDMRLHv+MlnK/7j/BNpKdozXxLcwoFBmn 21 | l0WjpAH3OFF7Pvm1LJdf1DjWKH0Dc3sc6zxtmBR/KHHg6kK4BGQNnFKujcP7TVdv 22 | gMYv84kun14pnwjZcqOtN3UJtcx22880DOQzinoMs3Q4w4o05oIF+sSgHViFpc3W 23 | R0v+RllnH05vKZo+LDzc83DQVrdwliV12eHxrMQ8UYg88zCbF/cHHnlzZWAJgftg 24 | hB08v1BKPgYRUzwJ6VdVqXYcZWEaUJmQAPuAALyZESw94hSo28FAn0/gzEc5uOYx 25 | K+xG/lFwgAGYNb3uGM5m0P6LVTfdg6vDwwOeTNIExVk3KVFXeSQef2ZMkhwA7wya 26 | KJptkb62wBHFE+o9TUdtMCY6qONxMMdwioRE5BYNwAsS1PnRD2+jtlI0DzvKHt7B 27 | MWd8hnoUKhMeZ9TNmo+8CpsAtXZcBho0zPGz/R8NlJhAWpdAZ1CmcPo83EW86Yq7 28 | BxQUKnNHcwj2ebkCDQRRPZQsARAA4jxYmbTHwmMjqSizlMJYNuGOpIidEdx9zQ5g 29 | zOr431/VfWq4S+VhMDhs15j9lyml0y4ok215VRFwrAREDg6UPMr7ajLmBQGau0Fc 30 | bvZJ90l4NjXp5p0NEE/qOb9UEHT7EGkEhaZ1ekkWFTWCgsy7rRXfZLxB6sk7pzLC 31 | DshyW3zjIakWAnpQ5j5obiDy708pReAuGB94NSyb1HoW/xGsGgvvCw4r0w3xPStw 32 | F1PhmScE6NTBIfLliea3pl8vhKPlCh54Hk7I8QGjo1ETlRP4Qll1ZxHJ8u25f/ta 33 | RES2Aw8Hi7j0EVcZ6MT9JWTI83yUcnUlZPZS2HyeWcUj+8nUC8W4N8An+aNps9l/ 34 | 21inIl2TbGo3Yn1JQLnA1YCoGwC34g8QZTJhElEQBN0X29ayWW6OdFx8MDvllbBV 35 | ymmKq2lK1U55mQTfDli7S3vfGz9Gp/oQwZ8bQpOeUkc5hbZszYwP4RX+68xDPfn+ 36 | M9udl+qW9wu+LyePbW6HX90LmkhNkkY2ZzUPRPDHZANU5btaPXc2H7edX4y4maQa 37 | xenqD0lGh9LGz/mps4HEZtCI5CY8o0uCMF3lT0XfXhuLksr7Pxv57yue8LLTItOJ 38 | d9Hmzp9G97SRYYeqU+8lyNXtU2PdrLLq7QHkzrsloG78lCpQcalHGACJzrlUWVP/ 39 | fN3Ht3kAEQEAAYkCHwQYAQIACQUCUT2ULAIbDAAKCRAVz00Yr090IbhWEADbr50X 40 | OEXMIMGRLe+YMjeMX9NG4jxs0jZaWHc/WrGR+CCSUb9r6aPXeLo+45949uEfdSsB 41 | pbaEdNWxF5Vr1CSjuO5siIlgDjmT655voXo67xVpEN4HhMrxugDJfCa6z97P0+ML 42 | PdDxim57uNqkam9XIq9hKQaurxMAECDPmlEXI4QT3eu5qw5/knMzDMZj4Vi6hovL 43 | wvvAeLHO/jsyfIdNmhBGU2RWCEZ9uo/MeerPHtRPfg74g+9PPfP6nyHD2Wes6yGd 44 | oVQwtPNAQD6Cj7EaA2xdZYLJ7/jW6yiPu98FFWP74FN2dlyEA2uVziLsfBrgpS4l 45 | tVOlrO2YzkkqUGrybzbLpj6eeHx+Cd7wcjI8CalsqtL6cG8cUEjtWQUHyTbQWAgG 46 | 5VPEgIAVhJ6RTZ26i/G+4J8neKyRs4vz+57UGwY6zI4AB1ZcWGEE3Bf+CDEDgmnP 47 | LSwbnHefK9IljT9XU98PelSryUO/5UPw7leE0akXKB4DtekToO226px1VnGp3Bov 48 | 1GBGvpHvL2WizEwdk+nfk8LtrLzej+9FtIcq3uIrYnsac47Pf7p0otcFeTJTjSq3 49 | krCaoG4Hx0zGQG2ZFpHrSrZTVy6lxvIdfi0beMgY6h78p6M9eYZHQHc02DjFkQXN 50 | bXb5c6gCHESH5PXwPU4jQEE7Ib9J6sbk7ZT2Mw== 51 | =j+4q 52 | -----END PGP PUBLIC KEY BLOCK----- 53 | -------------------------------------------------------------------------------- /other-configs/etc/profile.d/99-ccache-dir.sh: -------------------------------------------------------------------------------- 1 | export CCACHE_DIR=/ccache 2 | -------------------------------------------------------------------------------- /other-configs/etc/profile.d/99-coccinelle-path.sh: -------------------------------------------------------------------------------- 1 | export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib 2 | export PYTHONPATH=$PYTHONPATH:/usr/local/lib/coccinelle/python 3 | export COCCINELLE_HOME=/usr/local/lib/coccinelle/ 4 | -------------------------------------------------------------------------------- /other-configs/etc/profile.d/99-compiler-path.sh: -------------------------------------------------------------------------------- 1 | export PATH="$PATH:/opt/cross-gcc-linux-14/bin" 2 | export PATH="$PATH:/opt/cross-gcc-linux-13/bin" 3 | export PATH="$PATH:/opt/cross-gcc-linux-12/bin" 4 | export PATH="$PATH:/opt/cross-gcc-linux-11/bin" 5 | export PATH="$PATH:/opt/cross-gcc-linux-10/bin" 6 | export PATH="$PATH:/opt/cross-gcc-linux-9/bin" 7 | -------------------------------------------------------------------------------- /other-configs/init: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | get_attribs() { 4 | local file_stats file_to_test useful_attribs 5 | if file_to_test=$(realpath "$1") && [[ $2 =~ ^[0-9]+$ ]] ; then 6 | useful_attribs=$(stat "$file_to_test" -t) 7 | read -r -a file_stats <<< "${useful_attribs#"$file_to_test"}" 8 | echo "${file_stats["$2"]}" 9 | else 10 | return 1 11 | fi 12 | } 13 | 14 | get_build_uid() { 15 | get_attribs /workdir 3 16 | } 17 | 18 | get_build_gid() { 19 | get_attribs /workdir 4 20 | } 21 | 22 | if NEW_GID=$(get_build_gid) && NEW_UID=$(get_build_uid); then 23 | # bypass everything if podman is remapping the id to root 24 | if [ "${NEW_UID}" == "0" ]; then 25 | if [ "$(id -u)" == "0" ]; then 26 | exec dumb-init -- "$@" 27 | else 28 | echo "Unable to resolve ns mapping!" 29 | fi 30 | fi 31 | 32 | # change the uid and gid of developer otherwise 33 | [ "$NEW_GID" != "$(id -g developer)" ] && groupmod -g "${NEW_GID}" developer 34 | [ "$NEW_UID" != "$(id -u developer)" ] && usermod -u "${NEW_UID}" developer 35 | else 36 | echo "Not able to detect UID/GID for remapping!" 37 | fi 38 | 39 | if [ "$(id -u)" == "$(id -u developer)" ]; then 40 | exec dumb-init -- "$@" 41 | else 42 | exec dumb-init -- gosu developer "$@" 43 | fi 44 | -------------------------------------------------------------------------------- /other-configs/usr/local/etc/gitconfig: -------------------------------------------------------------------------------- 1 | [user] 2 | name = Kernel Test Developer Environment 3 | email = no-reply@kernel.org 4 | --------------------------------------------------------------------------------