├── .github
├── assets
│ ├── gdb-static_logo_dark.inkscape.svg
│ ├── gdb-static_logo_dark.svg
│ ├── gdb-static_logo_light.inkscape.svg
│ └── gdb-static_logo_light.svg
└── workflows
│ ├── pr-pipeline.yaml
│ └── release-pipeline.yaml
├── .gitignore
├── .gitmodules
├── Dockerfile
├── Makefile
├── README.md
├── compilation.md
├── package.json
└── src
├── compilation
├── build.sh
├── download_packages.sh
├── frozen_python_modules.txt
├── full_build_conf.sh
└── utils.sh
└── docker_utils
└── download_musl_toolchains.py
/.github/assets/gdb-static_logo_dark.svg:
--------------------------------------------------------------------------------
1 |
2 |
194 |
--------------------------------------------------------------------------------
/.github/assets/gdb-static_logo_light.svg:
--------------------------------------------------------------------------------
1 |
2 |
194 |
--------------------------------------------------------------------------------
/.github/workflows/pr-pipeline.yaml:
--------------------------------------------------------------------------------
1 | name: gdb-static-pr-pipeline
2 |
3 | on:
4 | pull_request:
5 | branches:
6 | - '*'
7 |
8 | jobs:
9 | build:
10 | env:
11 | TTY_ARG: "" # Disable interactive build mode in cicd since it does not contain a tty device.
12 |
13 | strategy:
14 | matrix:
15 | build_type: ["slim", "full"]
16 | architecture: ["x86_64", "arm", "aarch64", "powerpc", "mips", "mipsel"]
17 |
18 | runs-on: ubuntu-latest
19 | steps:
20 | - uses: actions/checkout@v4
21 | with:
22 | submodules: recursive
23 |
24 | - name: Install dependencies
25 | run: sudo apt-get install -y wget
26 |
27 | - name: Build
28 | run: make build-${{ matrix.architecture }}-${{ matrix.build_type }} -j$((`nproc`+1))
29 |
--------------------------------------------------------------------------------
/.github/workflows/release-pipeline.yaml:
--------------------------------------------------------------------------------
1 | name: gdb-static-release-pipeline
2 |
3 | on:
4 | push:
5 | tags:
6 | - 'v*'
7 |
8 | # Use a non-parallel single job pipeline because artifacts weigh too much. Instead,
9 | # simply build the files in the same job they are released.
10 | jobs:
11 | build_and_publish:
12 | env:
13 | TTY_ARG: "" # Disable interactive build mode in cicd since it does not contain a tty device.
14 |
15 | runs-on: ubuntu-latest
16 | steps:
17 | - uses: actions/checkout@v4
18 | with:
19 | submodules: recursive
20 |
21 | - name: Install dependencies
22 | run: sudo apt-get install -y wget
23 |
24 | - name: Build
25 | run: make build -j$((`nproc`+1))
26 |
27 | - name: Pack
28 | run: make pack
29 |
30 | - name: Publish release
31 | uses: softprops/action-gh-release@v2
32 | with:
33 | files: build/artifacts/gdb-static*.tar.gz
34 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # IDE Folders
2 | .idea/
3 | .vscode/
4 |
5 | # Build folders
6 | build/
7 |
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "cpython-static"]
2 | path = src/submodule_packages/cpython-static
3 | url = git@github.com:guyush1/cpython-static.git
4 | branch = python3.12-static
5 | [submodule "binutils-gdb-static"]
6 | path = src/submodule_packages/binutils-gdb
7 | url = git@github.com:guyush1/binutils-gdb.git
8 | branch = gdb-static
9 | [submodule "src/submodule_packages/pygments"]
10 | path = src/submodule_packages/pygments
11 | url = git@github.com:pygments/pygments.git
12 | [submodule "src/submodule_packages/libexpat"]
13 | path = src/submodule_packages/libexpat
14 | url = git@github.com:guyush1/libexpat.git
15 | [submodule "xz"]
16 | path = src/submodule_packages/xz
17 | url = https://github.com/tukaani-project/xz.git
18 | [submodule "src/submodule_packages/libffi"]
19 | path = src/submodule_packages/libffi
20 | url = git@github.com:libffi/libffi.git
21 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:24.04
2 |
3 | RUN apt update && apt install -y \
4 | autopoint \
5 | binutils-multiarch \
6 | bison \
7 | file \
8 | flex \
9 | g++ \
10 | g++-aarch64-linux-gnu \
11 | g++-arm-linux-gnueabi \
12 | g++-mips-linux-gnu \
13 | g++-mipsel-linux-gnu \
14 | g++-powerpc-linux-gnu \
15 | gcc \
16 | gcc-aarch64-linux-gnu \
17 | gcc-arm-linux-gnueabi \
18 | gcc-mips-linux-gnu \
19 | gcc-mipsel-linux-gnu \
20 | gcc-powerpc-linux-gnu \
21 | git \
22 | libtool \
23 | m4 \
24 | make \
25 | patch \
26 | pkg-config \
27 | python3.12 \
28 | python3-aiohttp \
29 | libpython3-dev \
30 | texinfo \
31 | wget \
32 | xz-utils
33 |
34 | COPY src/docker_utils/download_musl_toolchains.py .
35 | RUN python3.12 -u download_musl_toolchains.py
36 |
37 | WORKDIR /app/gdb
38 |
39 | ENTRYPOINT ["/entrypoint.sh"]
40 | CMD ["bash"]
41 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | ARCHS := x86_64 arm aarch64 powerpc mips mipsel
2 | GDB_BFD_ARCHS := $(shell echo $(ARCHS) | awk '{for(i=1;i<=NF;i++) $$i=$$i"-linux"; print}' OFS=,)
3 |
4 | BASE_BUILD_TARGETS := $(addprefix build-, $(ARCHS))
5 |
6 | SLIM_BUILD_TARGETS := $(addsuffix -slim, $(BASE_BUILD_TARGETS))
7 | FULL_BUILD_TARGETS := $(addsuffix -full, $(BASE_BUILD_TARGETS))
8 | ALL_BUILD_TARGETS := $(SLIM_BUILD_TARGETS) $(FULL_BUILD_TARGETS)
9 |
10 | BASE_PACK_TARGETS := $(addprefix pack-, $(ARCHS))
11 |
12 | FULL_PACK_TARGETS := $(addsuffix -full, $(BASE_PACK_TARGETS))
13 | SLIM_PACK_TARGETS := $(addsuffix -slim, $(BASE_PACK_TARGETS))
14 | ALL_PACK_TARGETS := $(SLIM_PACK_TARGETS) $(FULL_PACK_TARGETS)
15 |
16 | SUBMODULE_PACKAGES := $(wildcard src/submodule_packages/*)
17 | BUILD_PACKAGES_DIR := "build/packages"
18 |
19 | # We would like to run in interactive mode when avaliable (non-ci usually).
20 | # This is disabled by the ci automation manually.
21 | TTY_ARG ?= -it
22 |
23 | .PHONY: clean help download_packages build build-docker-image $(ALL_BUILD_TARGETS) $(ALL_PACK_TARGETS)
24 |
25 | .NOTPARALLEL: build pack
26 |
27 | help:
28 | @echo "Usage:"
29 | @echo " make build"
30 | @echo ""
31 |
32 | @for target in $(ALL_BUILD_TARGETS); do \
33 | echo " $$target"; \
34 | done
35 |
36 | @echo ""
37 | @echo " make clean"
38 |
39 | build/build-docker-image.stamp: Dockerfile src/docker_utils/download_musl_toolchains.py
40 | mkdir -p build
41 | docker buildx build --tag gdb-static .
42 | touch build/build-docker-image.stamp
43 |
44 | build-docker-image: build/build-docker-image.stamp
45 |
46 | build/download-packages.stamp: build/build-docker-image.stamp src/compilation/download_packages.sh
47 | mkdir -p $(BUILD_PACKAGES_DIR)
48 | docker run $(TTY_ARG) --user $(shell id -u):$(shell id -g) \
49 | --rm --volume .:/app/gdb gdb-static env TERM=xterm-256color \
50 | /app/gdb/src/compilation/download_packages.sh /app/gdb/$(BUILD_PACKAGES_DIR)/
51 | touch build/download-packages.stamp
52 |
53 | build/symlink-git-packages.stamp: $(SUBMODULE_PACKAGES)
54 | mkdir -p $(BUILD_PACKAGES_DIR)
55 | ln -sf $(addprefix /app/gdb/, $(SUBMODULE_PACKAGES)) $(BUILD_PACKAGES_DIR)/
56 |
57 | symlink-git-packages: build/symlink-git-packages.stamp
58 |
59 | download-packages: build/download-packages.stamp
60 |
61 | build: $(ALL_BUILD_TARGETS)
62 |
63 | $(SLIM_BUILD_TARGETS): build-%-slim:
64 | @BUILD_TYPE="slim" $(MAKE) _build-$*
65 |
66 | $(FULL_BUILD_TARGETS): build-%-full:
67 | @BUILD_TYPE="full" GDB_BFD_ARCHS=$(GDB_BFD_ARCHS) $(MAKE) _build-$*
68 |
69 | _build-%: symlink-git-packages download-packages build-docker-image
70 | mkdir -p build
71 | docker run $(TTY_ARG) --user $(shell id -u):$(shell id -g) \
72 | --rm --volume .:/app/gdb gdb-static env TERM=xterm-256color \
73 | /app/gdb/src/compilation/build.sh $* /app/gdb/build/ /app/gdb/src $(BUILD_TYPE) $(GDB_BFD_ARCHS)
74 |
75 | pack: $(ALL_PACK_TARGETS)
76 |
77 | $(SLIM_PACK_TARGETS): pack-%-slim:
78 | @BUILD_TYPE="slim" $(MAKE) _pack-$*
79 |
80 | $(FULL_PACK_TARGETS): pack-%-full:
81 | @BUILD_TYPE="full" $(MAKE) _pack-$*
82 |
83 | _pack-%: build-%-$(BUILD_TYPE)
84 | if [ ! -f "build/artifacts/gdb-static-$(BUILD_TYPE)-$*.tar.gz" ]; then \
85 | tar -czf "build/artifacts/gdb-static-$(BUILD_TYPE)-$*.tar.gz" -C "build/artifacts/$*_$(BUILD_TYPE)" .; \
86 | fi
87 |
88 | clean-git-packages:
89 | git submodule foreach 'echo "$$sm_path" | grep "^src/submodule_packages/.*" && git clean -xffd && git restore .'
90 |
91 | clean: clean-git-packages
92 | rm -rf build
93 | # Kill and remove all containers of image gdb-static
94 | docker ps -a | grep -P "^[a-f0-9]+\s+gdb-static\s+" | awk '{print $$1}' | xargs docker rm -f 2>/dev/null || true
95 | docker rmi -f gdb-static 2>/dev/null || true
96 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 | Frozen static builds of everyone's favorite debugger!🧊
11 |
12 |
13 |
26 |
27 | ## TL;DR
28 |
29 | - **Download**: Get the latest release from the [releases page](https://github.com/guyush1/gdb-static/releases/latest).
30 |
31 | ## Introduction
32 |
33 | Who doesn't love GDB? It's such a powerful tool, with such a great package.
34 | But sometimes, you run into one of these problems:
35 | - You can't install GDB on your machine
36 | - You can't install an updated version of GDB on your machine
37 | - Some other strange embedded reasons...
38 |
39 | This is where `gdb-static` comes in! We provide static builds of `gdb` (and `gdbserver` of course), so you can run them on any machine, without any dependencies!
40 |
41 |
42 |
43 | Features
44 |
45 |
46 | - **Static Builds**: No dependencies, no installation, just download and run!
47 | - **Musl Based**: We use Musl in order to create distribution-independant binaries that can work anywhere.
48 | - **Latest Versions**: We keep our builds up-to-date with the latest versions of GDB.
49 | - **Builtin Python (Optional)**: We provide builds with Python support built-in.
50 | - **XML Support**: Our builds come with XML support built-in, which is useful for some GDB commands.
51 | - **Wide Architecture Support**: We support a wide range of architectures:
52 | - aarch64
53 | - arm
54 | - mips
55 | - mipsel
56 | - powerpc
57 | - x86_64
58 |
59 |
60 |
61 | ## Usage
62 |
63 | To get started with `gdb-static`, simply download the build for your architecture from the [releases page](https://github.com/guyush1/gdb-static/releases/latest), extract the archive, and copy the binary to your desired platform.
64 |
65 | You may choose to copy the `gdb` binary to the platform, or use `gdbserver` to debug remotely.
66 |
67 | ## Build types
68 |
69 | We provide two types of builds:
70 | 1. Slim builds, that contains most of the features, beside the ones mentioned below.
71 | 2. Full builds that contains all of the slim build features, and also contains:
72 | * Python support
73 | * Cross-architecture debugging.
74 | Note that in order to enable cross-architecture debugging, we have to disable the simulator feature, since not all targets have a simulator.
75 |
76 | Slim builds are approximately ~10MB. Full builds are approximately ~70MB.
77 | You can edit the full_build_conf.sh file to disable full build exclusive features.
78 |
79 | ## Development
80 |
81 | > [!NOTE]
82 | > Before building, make sure to initialize & sync the git submodules.
83 |
84 | Alternatively, you can build `gdb-static` from source. To do so, follow the instructions below:
85 |
86 |
87 |
88 | Pre-requisites
89 |
90 | To be able to build `gdb-static`, you will need the following tools installed on your machine:
91 |
92 | ###
93 |
94 | - Docker
95 | - Docker buildx
96 | - Git
97 |
98 |
99 |
100 |
101 | Building for a specific architecture
102 |
103 |
104 | To build `gdb-static` for a specific architecture, run the following command:
105 |
106 | ```bash
107 | make build--[slim/full]
108 | ```
109 |
110 | Where `` is the architecture you want to build for, and `slim/full` determines the build type (see [here](#build-types)).
111 |
112 | The resulting binary will be placed in the `build/artifacts/` directory:
113 |
114 | ```bash
115 | build/
116 | └── artifacts/
117 | └── /
118 | └── ...
119 | ```
120 |
121 |
122 |
123 |
124 |
125 | Building for all architectures
126 |
127 |
128 | To build `gdb-static` for all supported architectures, run the following command:
129 |
130 | ```bash
131 | make build
132 | ```
133 |
134 | The resulting binary will be placed in the `build/artifacts/` directory.
135 |
136 |
137 |
138 |
139 | ## Contributing
140 |
141 | - Bug Report: If you see an error message or encounter an issue while using gdb-static, please create a [bug report](https://github.com/guyush1/gdb-static/issues/new?assignees=&labels=bug&title=%F0%9F%90%9B+Bug+Report%3A+).
142 |
143 | - Feature Request: If you have an idea or if there is a capability that is missing and would make `gdb-static` more robust, please submit a [feature request](https://github.com/guyush1/gdb-static/issues/new?assignees=&labels=enhancement&title=%F0%9F%9A%80+Feature+Request%3A+).
144 |
145 | ## Contributors
146 |
147 |
152 |
153 | [//]: contributor-faces
154 |
155 |
156 |
157 |
158 | [//]: contributor-faces
159 |
--------------------------------------------------------------------------------
/compilation.md:
--------------------------------------------------------------------------------
1 | # Notes about this file - read before proceeding!
2 |
3 | While we have already provided the gdb/gdbserver statically compiled binaries for you, some people might want to compile it without our build scripts, or compile a newer version of gdb in the future :).
4 | The rest of the file contains a documentation of the compilation process, in order to help you out.
5 |
6 | NOTE: The compilation guide describes the compilation process in order to create a minimal-working version of gdb. Our build-scripts also provides further capabilites to gdb, such as python and xml support, which are not documented in this file.
7 |
8 | ## In this file
9 |
10 | Environment variables are denoted by <...> throughout this file.
11 |
12 | Please note that when specifying a compilation dir throughout the compilation process (via the environment variable), DO NOT use relative pathing, or special bash characters such as `~`. Relative pathing / special bash characters will not get parsed correctly!
13 |
14 | Instead, always use absolute paths.
15 |
16 | Examples to the throughout the script:
17 | - - arm-linux-musleabi-gcc
18 | - - arm-linux-musleabi-g++
19 | - - arm-linux-musleabi
20 | - - /home/username/projects/libgmp-x.y.z/build-arm/
21 |
22 | ## Choosing the right compiler
23 |
24 | It is recommended to use a musl-based compiler. Unlike glibc, musl is not dependant on your distribution.
25 | Using regular gcc that is glibc based should work most of the time, but we had some people reporting crashes mainly in gdbserver, so we switched to musl-based toolchains.
26 |
27 | # Compiling gdb statically to the host platform
28 |
29 | ## 1) Compiling iconv
30 |
31 | While compiling iconv is not a must, the libc-provided iconv (a utility to convert between encodings) may fail on different architectures,
32 | at least in my experience.
33 | Thus, I recommended using a custom libiconv and compiling it into gdb.
34 |
35 | Download the source from https://github.com/roboticslibrary/libiconv.git
36 |
37 | Make sure to check out to a stable tag (in my case - v1.17).
38 |
39 | Work according to the following steps:
40 | 1. run `./gitsub.sh pull`
41 | 2. run `./autogen.sh` to create the configure script from configure.sh.
42 | 3. create a build dir (e.g build), and then cd into it.
43 | 4. run `../configure --enable-static`
44 | 5. run `cp -r ./include ./lib/.libs/`
45 | 6. run `mkdir ./lib/.libs/lib/`
46 | 7. run `cp ./lib/.libs/libiconv.a ./lib/.libs/lib/`
47 |
48 | ## 2) Compiling gdb
49 |
50 | Clone gdb from from my forked respository - https://github.com/guyush1/binutils-gdb/tree/gdb-static.
51 |
52 | Make sure to check out to the **gdb-static** branch - this branch contains all of the changes i had to do to the build system in order for it to compile gdb statically.
53 |
54 | Work according to the following steps:
55 | 1. create a build dir.
56 | 2. run `../configure --enable-static --with-static-standard-libraries --disable-tui --disable-inprocess-agent --with-libiconv-prefix=/lib/.libs/ --with-libiconv-type=static`
57 | 3. run `make all-gdb -j$(nproc)` - for gdbserver, run `make all-gdbserver -j$(nproc)`.
58 |
59 | gdb will sit under gdb/gdb.
60 | gdbserver will sit under gdbserver/gdbserver.
61 |
62 | # Cross compiling gdb statically to other architectures.
63 |
64 | Cross compiling gdb statically is a bit more complicated then regular static compilation. In order to cross compile gdb statically, we will need to compile libgmp and libmpfr as well as iconv.
65 |
66 | ## 1) Compiling iconv
67 |
68 | Work according to the same process as described under the compilation to the host platform, aside from the configure script:
69 | IV) run `../configure --enable-static CC= CXX= --host=`
70 |
71 | ## 2) Compiling libgmp
72 |
73 | Download and extract the latest edition from https://gmplib.org/.
74 | I used the 6.3.0 edition.
75 |
76 | Work according to the following steps:
77 | 1. Create a build dir and cd into it.
78 | 2. run `../configure CC= CXX= --enable-static --host=`
79 | 3. run `make -j$(nproc)`
80 | 4. run `mkdir ./.libs/include/`
81 | 5. run `cp gmp.h ./.libs/include/`
82 | 6. run `mkdir ./.libs/lib`
83 | 7. run `cp ./.libs/libgmp.a ./.libs/lib`
84 |
85 | ## 3) Compiling libmpfr
86 |
87 | Download and extract the latest edition from https://www.mpfr.org/.
88 | I used the 4.2.1 edition.
89 |
90 | Work according to the following steps:
91 | 1. Create a build dir and cd into it.
92 | 2. run `../configure CC= CXX= --enable-static --with-gmp-build= --host=`
93 | 3. run `make -j$(nproc)`
94 | 4. run `mkdir ./src/.libs/lib`
95 | 5. run `cp ./src/.libs/libmpfr.a ./src/.libs/lib`
96 | 6. run `mkdir ./src/.libs/include`
97 | 7. run `cp ../src/mpfr.h ./src/.libs/include/`
98 |
99 | ## 4) Compiling gdb
100 |
101 | Work according to the same process as described under the compilation to the host platform, aside from the configure script:
102 |
103 | 2. run `../configure --enable-static --with-static-standard-libraries --disable-tui --disable-inprocess-agent --with-libiconv-prefix=/lib/.libs/ --with-libiconv-type=static --with-gmp=/.libs/ --with-mpfr=/src/.libs/ CC= CXX= --host=`
104 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "gdb-static",
3 | "repository": {
4 | "type": "git",
5 | "url": "https://github.com/guyush1/gdb-static"
6 | }
7 | }
--------------------------------------------------------------------------------
/src/compilation/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Include utils library
4 | script_dir=$(dirname "$0")
5 | source "$script_dir/utils.sh"
6 | source "$script_dir/full_build_conf.sh"
7 |
8 | # Don't want random unknown things to fail in the build procecss!
9 | set -e
10 |
11 | function set_compliation_variables() {
12 | # Set compilation variables such as which compiler to use.
13 | #
14 | # Parameters:
15 | # $1: target architecture
16 | #
17 | # Returns:
18 | # 0: success
19 | # 1: failure
20 | supported_archs=("arm" "aarch64" "powerpc" "x86_64" "mips" "mipsel")
21 |
22 | local target_arch="$1"
23 |
24 | if [[ ! " ${supported_archs[@]} " =~ " ${target_arch} " ]]; then
25 | >&2 echo "Error: unsupported target architecture: $target_arch"
26 | return 1
27 | fi
28 |
29 | >&2 fancy_title "Setting compilation variables for $target_arch"
30 |
31 | if [[ "$target_arch" == "arm" ]]; then
32 | CROSS=arm-linux-musleabi-
33 | export HOST=arm-linux-musleabi
34 | elif [[ "$target_arch" == "aarch64" ]]; then
35 | CROSS=aarch64-linux-musl-
36 | export HOST=aarch64-linux-musl
37 | elif [[ "$target_arch" == "powerpc" ]]; then
38 | CROSS=powerpc-linux-musl-
39 | export HOST=powerpc-linux-musl
40 | elif [[ "$target_arch" == "mips" ]]; then
41 | CROSS=mips-linux-musl-
42 | export HOST=mips-linux-musl
43 | elif [[ "$target_arch" == "mipsel" ]]; then
44 | CROSS=mipsel-linux-musl-
45 | export HOST=mipsel-linux-musl
46 | elif [[ "$target_arch" == "x86_64" ]]; then
47 | CROSS=x86_64-linux-musl-
48 | export HOST=x86_64-linux-musl
49 | fi
50 |
51 | export CC="${CROSS}gcc"
52 | export CXX="${CROSS}g++"
53 |
54 | export CFLAGS="-Os"
55 | export CXXFLAGS="-Os"
56 |
57 | # Strip the binary to reduce it's size.
58 | export LDFLAGS="-s"
59 | }
60 |
61 | function set_up_lib_search_path() {
62 | # Set up library-related linker search paths.
63 | #
64 | # Parameters:
65 | # $1: library install dir
66 | # $2: whether to add linker search path or not (include path is always added).
67 | local lib_install_dir="$1"
68 | local add_linker_include_path="$2"
69 |
70 | if [[ $add_linker_include_path == 1 ]]; then
71 | # Add library to the linker's include path.
72 | export LDFLAGS="-L$lib_install_dir/lib $LDFLAGS"
73 | fi
74 |
75 | # Add library standard headers to the CC / CXX flags.
76 | local include_paths="-I$lib_install_dir/include"
77 | export CC="$CC $include_paths"
78 | export CXX="$CXX $include_paths"
79 | }
80 |
81 | function set_up_base_lib_search_paths() {
82 | # Set up library-related linker search paths.
83 | #
84 | # Parameters:
85 | # $1: iconv build dir
86 | # $2: gmp build dir
87 | # $3: mpfr build dir
88 | # $4: ncursesw build dir
89 | # $5: expat build dir
90 | local iconv_build_dir="$1"
91 | local gmp_build_dir="$2"
92 | local mpfr_build_dir="$3"
93 | local ncursesw_build_dir="$4"
94 | local expat_build_dir="$5"
95 |
96 | set_up_lib_search_path $iconv_build_dir 0
97 | set_up_lib_search_path $gmp_build_dir 0
98 | set_up_lib_search_path $mpfr_build_dir 0
99 | set_up_lib_search_path $ncursesw_build_dir 1
100 | set_up_lib_search_path $expat_build_dir 1
101 | }
102 |
103 | function build_iconv() {
104 | # Build libiconv.
105 | #
106 | # Parameters:
107 | # $1: iconv package directory
108 | # $2: target architecture
109 | #
110 | # Echoes:
111 | # The libiconv build directory
112 | #
113 | # Returns:
114 | # 0: success
115 | # 1: failure
116 |
117 | local iconv_dir="$1"
118 | local target_arch="$2"
119 | local iconv_build_dir="$(realpath "$iconv_dir/build-$target_arch")"
120 |
121 | echo "$iconv_build_dir"
122 | mkdir -p "$iconv_build_dir"
123 |
124 | if [[ -f "$iconv_build_dir/lib/libiconv.a" ]]; then
125 | >&2 echo "Skipping build: iconv already built for $target_arch"
126 | return 0
127 | fi
128 |
129 | pushd "$iconv_build_dir" > /dev/null
130 |
131 | >&2 fancy_title "Building libiconv for $target_arch"
132 |
133 | ../configure --enable-static "CC=$CC" "CXX=$CXX" "--host=$HOST" \
134 | "CFLAGS=$CFLAGS" "CXXFLAGS=$CXXFLAGS" --prefix="$(realpath .)" 1>&2
135 | if [[ $? -ne 0 ]]; then
136 | return 1
137 | fi
138 |
139 | make -j$(nproc) 1>&2
140 | if [[ $? -ne 0 ]]; then
141 | return 1
142 | fi
143 |
144 | make -j$(nproc) install 1>&2
145 | if [[ $? -ne 0 ]]; then
146 | return 1
147 | fi
148 |
149 | >&2 fancy_title "Finished building libiconv for $target_arch"
150 |
151 | popd > /dev/null
152 | }
153 |
154 | function build_lzma() {
155 | # Build liblzma.
156 | #
157 | # Parameters:
158 | # $1: lzma package directory
159 | # $2: target architecture
160 | #
161 | # Echoes:
162 | # The lzma build directory
163 | #
164 | # Returns:
165 | # 0: success
166 | # 1: failure
167 |
168 | local lzma_dir="$1"
169 | local target_arch="$2"
170 | local lzma_build_dir="$(realpath "$lzma_dir/build-$target_arch")"
171 |
172 | echo "$lzma_build_dir"
173 | mkdir -p "$lzma_build_dir"
174 |
175 | if [[ -f "$lzma_build_dir/lib/liblzma.a" ]]; then
176 | >&2 echo "Skipping build: lzma already built for $target_arch"
177 | return 0
178 | fi
179 |
180 | pushd "$lzma_build_dir" > /dev/null
181 |
182 | >&2 fancy_title "Building liblzma for $target_arch"
183 |
184 | # Make sure configure exists by running autogen.sh
185 | (
186 | cd .. && ./autogen.sh 1>&2
187 | )
188 |
189 | # lzma's autoconf contains a bug, it's instal prefix is relative
190 | # to the current build directory.
191 | # Hence, we set the prefix here to "/" instead of realpath . .
192 | ../configure --enable-static "CC=$CC" "CXX=$CXX" "--host=$HOST" \
193 | "CFLAGS=$CFLAGS" "CXXFLAGS=$CXXFLAGS" --prefix="/" 1>&2
194 | if [[ $? -ne 0 ]]; then
195 | return 1
196 | fi
197 |
198 | make -j$(nproc) 1>&2
199 | if [[ $? -ne 0 ]]; then
200 | return 1
201 | fi
202 |
203 | make -j$(nproc) install DESTDIR=$lzma_build_dir 1>&2
204 | if [[ $? -ne 0 ]]; then
205 | return 1
206 | fi
207 |
208 | >&2 fancy_title "Finished building liblzma for $target_arch"
209 |
210 | popd > /dev/null
211 | }
212 |
213 |
214 | function build_libgmp() {
215 | # Build libgmp.
216 | #
217 | # Parameters:
218 | # $1: libgmp package directory
219 | # $2: target architecture
220 | #
221 | # Echoes:
222 | # The libgmp build directory
223 | #
224 | # Returns:
225 | # 0: success
226 | # 1: failure
227 |
228 | local gmp_dir="$1"
229 | local target_arch="$2"
230 | local gmp_build_dir="$(realpath "$gmp_dir/build-$target_arch")"
231 |
232 | echo "$gmp_build_dir"
233 | mkdir -p "$gmp_build_dir"
234 |
235 | if [[ -f "$gmp_build_dir/lib/libgmp.a" ]]; then
236 | >&2 echo "Skipping build: libgmp already built for $target_arch"
237 | return 0
238 | fi
239 |
240 | pushd "$gmp_build_dir" > /dev/null
241 |
242 | >&2 fancy_title "Building libgmp for $target_arch"
243 |
244 | ../configure --enable-static "CC=$CC" "CXX=$CXX" "--host=$HOST" \
245 | "CFLAGS=$CFLAGS" "CXXFLAGS=$CXXFLAGS" --prefix="$(realpath .)" 1>&2
246 | if [[ $? -ne 0 ]]; then
247 | return 1
248 | fi
249 |
250 | make -j$(nproc) 1>&2
251 | if [[ $? -ne 0 ]]; then
252 | return 1
253 | fi
254 |
255 | make -j$(nproc) install 1>&2
256 | if [[ $? -ne 0 ]]; then
257 | return 1
258 | fi
259 |
260 | >&2 fancy_title "Finished building libgmp for $target_arch"
261 |
262 | popd > /dev/null
263 | }
264 |
265 | function build_ncurses() {
266 | # Build libncursesw.
267 | #
268 | # Parameters:
269 | # $1: libncursesw package directory
270 | # $2: target architecture
271 | #
272 | # Echoes:
273 | # The libncursesw build directory
274 | #
275 | # Returns:
276 | # 0: success
277 | # 1: failure
278 | local ncurses_dir="$1"
279 | local target_arch="$2"
280 | local ncurses_build_dir="$(realpath "$ncurses_dir/build-$target_arch")"
281 |
282 | # ncurses needs a custom install dir due to it's non-standard compilation directories.
283 | local ncurses_install_dir="$ncurses_build_dir/output"
284 |
285 | echo "$ncurses_install_dir"
286 | mkdir -p "$ncurses_install_dir"
287 |
288 | if [[ -f "$ncurses_install_dir/lib/libncursesw.a" ]]; then
289 | >&2 echo "Skipping build: libncursesw already built for $target_arch"
290 | return 0
291 | fi
292 |
293 | pushd "$ncurses_build_dir" > /dev/null
294 |
295 | >&2 fancy_title "Building libncursesw for $target_arch"
296 |
297 | ../configure --enable-static "CC=$CC" "CXX=$CXX" "--host=$HOST" \
298 | "CFLAGS=$CFLAGS" "CXXFLAGS=$CXXFLAGS" "--enable-widec" \
299 | --prefix="$ncurses_install_dir" --with-default-terminfo-dir="/usr/share/terminfo" 1>&2
300 | if [[ $? -ne 0 ]]; then
301 | return 1
302 | fi
303 |
304 | make -j$(nproc) 1>&2
305 | if [[ $? -ne 0 ]]; then
306 | return 1
307 | fi
308 |
309 | # Install the include & library dirs, but not the terminfo database.
310 | # The user is responsible for supplying the terminal database.
311 | make -j$(nproc) install.includes install.libs 1>&2
312 | if [[ $? -ne 0 ]]; then
313 | return 1
314 | fi
315 |
316 | >&2 fancy_title "Finished building libncursesw for $target_arch"
317 |
318 | popd > /dev/null
319 | }
320 |
321 | function build_libexpat() {
322 | # Build libexpat.
323 | #
324 | # Parameters:
325 | # $1: libexpat package directory
326 | # $2: target architecture
327 | #
328 | # Echoes:
329 | # The libexpat build directory
330 | #
331 | # Returns:
332 | # 0: success
333 | # 1: failure
334 | local libexpat_dir="$1"
335 | local target_arch="$2"
336 | local libexpat_build_dir="$(realpath "$libexpat_dir/build-$target_arch")"
337 |
338 | echo "$libexpat_build_dir"
339 | mkdir -p "$libexpat_build_dir"
340 |
341 | if [[ -f "$libexpat_build_dir/lib/libexpat.a" ]]; then
342 | >&2 echo "Skipping build: libexpat already built for $target_arch"
343 | return 0
344 | fi
345 |
346 | pushd "$libexpat_build_dir" > /dev/null
347 |
348 | >&2 fancy_title "Building libexpat for $target_arch"
349 |
350 | # Generate configure if it doesnt exist.
351 | if [[ ! -f "$libexpat_build_dir/../expat/configure" ]]; then
352 | >&2 ../expat/buildconf.sh ../expat/
353 | fi
354 |
355 | ../expat/configure --enable-static "CC=$CC" "CXX=$CXX" "--host=$HOST" \
356 | "CFLAGS=$CFLAGS" "CXXFLAGS=$CXXFLAGS" --prefix="$(realpath .)" 1>&2
357 | if [[ $? -ne 0 ]]; then
358 | return 1
359 | fi
360 |
361 | make -j$(nproc) 1>&2
362 | if [[ $? -ne 0 ]]; then
363 | return 1
364 | fi
365 |
366 | make -j$(nproc) install 1>&2
367 | if [[ $? -ne 0 ]]; then
368 | return 1
369 | fi
370 |
371 | >&2 fancy_title "Finished building libexpat for $target_arch"
372 |
373 | popd > /dev/null
374 | }
375 |
376 | function build_libffi() {
377 | # Build libffi, for the ctypes python module.
378 | #
379 | # Parameters:
380 | # $1: libffi package directory
381 | # $2: Target architecture
382 | local libffi_dir="$1"
383 | local target_arch="$2"
384 |
385 | pushd "${libffi_dir}" > /dev/null
386 |
387 | local libffi_build_dir="$(realpath "$libffi_dir/build-$target_arch")"
388 |
389 | # libffi needs a custom install dir due to it's non-standard compilation directories.
390 | local libffi_install_dir="$libffi_build_dir/output"
391 | echo "${libffi_install_dir}"
392 |
393 | # Creates both the installation and build dirs because install is in build.
394 | mkdir -p "${libffi_install_dir}"
395 |
396 | if [[ -f "$libffi_install_dir/lib/libffi.a" ]]; then
397 | >&2 echo "Skipping build: libffi already built for $target_arch"
398 | return 0
399 | fi
400 |
401 | >&2 ./autogen.sh
402 | pushd "${libffi_build_dir}" > /dev/null
403 |
404 | >&2 fancy_title "Building libffi for $target_arch"
405 |
406 | >&2 CFLAGS="${CFLAGS} -DNO_JAVA_RAW_API" ../configure \
407 | --enable-silent-rules \
408 | --enable-static \
409 | --disable-shared \
410 | --disable-docs \
411 | --prefix="${libffi_install_dir}"
412 | if [[ $? -ne 0 ]]; then
413 | return 1
414 | fi
415 |
416 | >&2 make -j$(nproc)
417 | if [[ $? -ne 0 ]]; then
418 | return 1
419 | fi
420 |
421 | >&2 make -j$(nproc) install
422 | if [[ $? -ne 0 ]]; then
423 | return 1
424 | fi
425 |
426 | >&2 fancy_title "Finished building libffi for $target_arch"
427 |
428 | popd > /dev/null
429 | popd > /dev/null
430 | }
431 |
432 | function add_to_pkg_config_path() {
433 | # This method add directories to the list that pkg-config looks for .pc (package config) files
434 | # when finding the correct flags for modules.
435 | #
436 | # Parameters:
437 | # $1: The directory to add to the package-config path.
438 | local new_pkg_config_dir="${1}"
439 |
440 | if [[ -n "${PKG_CONFIG_PATH}" ]]; then
441 | export PKG_CONFIG_PATH="${PKG_CONFIG_PATH}:${new_pkg_config_dir}"
442 | else
443 | export PKG_CONFIG_PATH="${new_pkg_config_dir}"
444 | fi
445 | }
446 |
447 | function setup_libffi_env() {
448 | # We need a valid pkg-config file for libffi in order for Python to recognize the package and
449 | # know that it exists. Because we a pkg-config file, we might as well use it in order to ensure
450 | # that we get the correct flags instead of manually typing them.
451 | # Becuase of this, the setup of libffi isn't done in set_up_lib_search_path, as we don't need it.
452 | #
453 | # Parameters:
454 | # $1: Libffi installation dir
455 | local libffi_install_dir="$1"
456 |
457 | # Needed because this is how Python recognizes the available packages.
458 | add_to_pkg_config_path "${libffi_install_dir}/lib/pkgconfig/"
459 |
460 | # If we have a pc file, might as well use it.
461 | local libffi_cflags="$(pkg-config --cflags libffi)"
462 | local libffi_libs="$(pkg-config --libs --static libffi)"
463 |
464 | export CC="${CC} ${libffi_cflags}"
465 | export CXX="${CXX} ${libffi_cflags}"
466 |
467 | export LDFLAGS="${libffi_libs} ${LDFLAGS}"
468 | }
469 |
470 | function build_python() {
471 | # Build python.
472 | #
473 | # Parameters:
474 | # $1: python package directory
475 | # $2: target architecture
476 | # $3: gdb's python module directory parent
477 | # $4: pygment's toplevel source dir.
478 | #
479 | # Echoes:
480 | # The python build directory
481 | #
482 | # Returns:
483 | # 0: success
484 | # 1: failure
485 | local python_dir="$1"
486 | local target_arch="$2"
487 | local gdb_python_parent="$3"
488 | local pygments_source_dir="$4"
489 | local python_lib_dir="$(realpath "$python_dir/build-$target_arch")"
490 |
491 | echo "$python_lib_dir"
492 | mkdir -p "$python_lib_dir"
493 |
494 | # Having a python-config file is an indication that we successfully built python.
495 | if [[ -f "$python_lib_dir/python-config" ]]; then
496 | >&2 echo "Skipping build: libpython already built for $target_arch"
497 | return 0
498 | fi
499 |
500 | pushd "$python_lib_dir" > /dev/null
501 | >&2 fancy_title "Building python for $target_arch"
502 |
503 | export LINKFORSHARED=" "
504 | export MODULE_BUILDTYPE="static"
505 | export CONFIG_SITE="$python_dir/config.site-static"
506 | >&2 CFLAGS="-static" LDFLAGS="-static" ../configure \
507 | --prefix="$(realpath .)" \
508 | --disable-test-modules \
509 | --with-ensurepip=no \
510 | --without-decimal-contextvar \
511 | --build=x86_64-pc-linux-gnu \
512 | --host=$HOST \
513 | --with-build-python=/usr/bin/python3.12 \
514 | --disable-ipv6 \
515 | --disable-shared
516 |
517 | # Extract the regular standard library modules that are to be frozen and include the gdb and pygments custom libraries.
518 | export EXTRA_FROZEN_MODULES="$(printf "%s" "$(< ${script_dir}/frozen_python_modules.txt)" | tr $'\n' ";")"
519 | export EXTRA_FROZEN_MODULES="${EXTRA_FROZEN_MODULES};: gdb = ${gdb_python_parent};: pygments = ${pygments_source_dir}"
520 | >&2 echo "Frozen Modules: ${EXTRA_FROZEN_MODULES}"
521 |
522 | # Regenerate frozen modules with gdb env varaible. Do it after the configure because we need
523 | # the `regen-frozen` makefile.
524 | >&2 python3.12 ../Tools/build/freeze_modules.py
525 | if [[ $? -ne 0 ]]; then
526 | return 1
527 | fi
528 |
529 | >&2 make regen-frozen
530 | if [[ $? -ne 0 ]]; then
531 | return 1
532 | fi
533 |
534 | # Build python after configuring the project and regnerating frozen files.
535 | >&2 make -j$(nproc)
536 | if [[ $? -ne 0 ]]; then
537 | return 1
538 | fi
539 |
540 | # Install python (in build dir using the prefix set above), in order to have a bash (for cross-compilation) python3-config that works.
541 | >&2 make install
542 | if [[ $? -ne 0 ]]; then
543 | return 1
544 | fi
545 |
546 | >&2 fancy_title "Finished building python for $target_arch"
547 | popd > /dev/null
548 | }
549 |
550 | function build_libmpfr() {
551 | # Build libmpfr.
552 | #
553 | # Parameters:
554 | # $1: mpfr package directory
555 | # $2: libgmp build directory
556 | # $3: target architecture
557 | #
558 | # Echoes:
559 | # The libmpfr build directory
560 | #
561 | # Returns:
562 | # 0: success
563 | # 1: failure
564 |
565 | local mpfr_dir="$1"
566 | local libgmp_build_dir="$2"
567 | local target_arch="$3"
568 | local mpfr_build_dir="$(realpath "$mpfr_dir/build-$target_arch")"
569 |
570 | mkdir -p "$mpfr_build_dir"
571 | echo "$mpfr_build_dir"
572 |
573 | if [[ -f "$mpfr_build_dir/lib/libmpfr.a" ]]; then
574 | >&2 echo "Skipping build: libmpfr already built for $target_arch"
575 | return 0
576 | fi
577 |
578 | pushd "$mpfr_dir/build-$target_arch" > /dev/null
579 |
580 | >&2 fancy_title "Building libmpfr for $target_arch"
581 |
582 | ../configure --enable-static --prefix="$(realpath .)" "--with-gmp=$libgmp_build_dir" \
583 | "CC=$CC" "CXX=$CXX" "--host=$HOST" \
584 | "CFLAGS=$CFLAGS" "CXXFLAGS=$CXXFLAGS" 1>&2
585 | if [[ $? -ne 0 ]]; then
586 | return 1
587 | fi
588 |
589 | make -j$(nproc) 1>&2
590 | if [[ $? -ne 0 ]]; then
591 | return 1
592 | fi
593 |
594 | make -j$(nproc) install 1>&2
595 | if [[ $? -ne 0 ]]; then
596 | return 1
597 | fi
598 |
599 | >&2 fancy_title "Finished building libmpfr for $target_arch"
600 |
601 | popd > /dev/null
602 | }
603 |
604 | function build_gdb() {
605 | # Configure and build gdb.
606 | #
607 | # Parameters:
608 | # $1: gdb directory
609 | # $2: target architecture
610 | # $3: libiconv prefix
611 | # $4: libgmp prefix
612 | # $5: libmpfr prefix
613 | # $6: liblzma prefix
614 | # $7: build mode: slim / full.
615 | # $8: gdb cross-architecture binary format support formats (relevant for full builds only).
616 | #
617 | # Echoes:
618 | # The gdb build directory
619 | #
620 | # Returns:
621 | # 0: success
622 | # 1: failure
623 |
624 | local gdb_dir="$1"
625 | local target_arch="$2"
626 | local libiconv_prefix="$3"
627 | local libgmp_prefix="$4"
628 | local libmpfr_prefix="$5"
629 | local liblzma_prefix="$6"
630 | local full_build="$7"
631 | local gdb_bfd_archs="$8"
632 |
633 | local extra_flags=()
634 | if [[ "$full_build" == "yes" ]]; then
635 | if [[ $full_build_cross_arch_debugging -eq 1 ]]; then
636 | extra_flags+=("--enable-targets=$gdb_bfd_archs" "--enable-64-bit-bfd" "--disable-sim")
637 | fi
638 |
639 | if [[ $full_build_python_support -eq 1 ]]; then
640 | extra_flags+=("--with-python=/app/gdb/build/packages/cpython-static/build-$target_arch/bin/python3-config")
641 | else
642 | extra_flags+=("--without-python")
643 | fi
644 |
645 | local gdb_build_dir="$(realpath "$gdb_dir/build-${target_arch}-full")"
646 | else
647 | extra_flags+=("--without-python")
648 | local gdb_build_dir="$(realpath "$gdb_dir/build-${target_arch}-slim")"
649 | fi
650 |
651 | echo "$gdb_build_dir"
652 | mkdir -p "$gdb_build_dir"
653 |
654 | if [[ -f "$gdb_build_dir/gdb/gdb" ]]; then
655 | >&2 echo "Skipping build: gdb already built for $target_arch"
656 | return 0
657 | fi
658 |
659 | pushd "$gdb_build_dir" > /dev/null
660 |
661 | >&2 fancy_title "Building gdb for $target_arch"
662 |
663 | ../configure --enable-static --with-static-standard-libraries --disable-inprocess-agent \
664 | --with-gdb-datadir="/usr/share/gdb" --with-separate-debug-dir="/usr/lib/debug" \
665 | --with-system-gdbinit="/etc/gdb/gdbinit" --with-system-gdbinit-dir="/etc/gdb/gdbinit.d" \
666 | --with-jit-reader-dir="/usr/lib/gdb" \
667 | --with-libiconv-prefix="$libiconv_prefix" --with-libiconv-type=static \
668 | --with-gmp="$libgmp_prefix" \
669 | --with-mpfr="$libmpfr_prefix" \
670 | --enable-tui \
671 | --with-expat --with-libexpat-type=static \
672 | --with-lzma=yes --with-liblzma-prefix="$liblzma_prefix" --with-liblzma-type="static" \
673 | "${extra_flags[@]}" \
674 | "CC=$CC" "CXX=$CXX" "LDFLAGS=$LDFLAGS" "--host=$HOST" \
675 | "CFLAGS=$CFLAGS" "CXXFLAGS=$CXXFLAGS" 1>&2
676 | if [[ $? -ne 0 ]]; then
677 | return 1
678 | fi
679 |
680 | make -j$(nproc) 1>&2
681 | if [[ $? -ne 0 ]]; then
682 | return 1
683 | fi
684 |
685 | >&2 fancy_title "Finished building gdb for $target_arch"
686 |
687 | popd > /dev/null
688 | }
689 |
690 | function install_gdb() {
691 | # Install gdb binaries to an artifacts directory.
692 | #
693 | # Parameters:
694 | # $1: gdb build directory
695 | # $2: artifacts directory
696 | # $3: target architecture
697 | # $4: build mode: slim / full.
698 | #
699 | # Returns:
700 | # 0: success
701 | # 1: failure
702 |
703 | local gdb_build_dir="$1"
704 | local artifacts_dir="$2"
705 | local target_arch="$3"
706 | local full_build="$4"
707 |
708 | if [[ "$full_build" == "yes" ]]; then
709 | local artifacts_location="$artifacts_dir/${target_arch}_full"
710 | else
711 | local artifacts_location="$artifacts_dir/${target_arch}_slim"
712 | fi
713 |
714 | if [[ -d "$artifacts_location" && -n "$(ls -A "$artifacts_location")" ]]; then
715 | >&2 echo "Skipping install: gdb already installed for $target_arch"
716 | return 0
717 | fi
718 |
719 | temp_artifacts_dir="$(mktemp -d)"
720 |
721 | mkdir -p "$artifacts_location"
722 |
723 | make -j$(nproc) -C "$gdb_build_dir" install "DESTDIR=$temp_artifacts_dir" 1>&2
724 | if [[ $? -ne 0 ]]; then
725 | rm -rf "$temp_artifacts_dir"
726 | return 1
727 | fi
728 |
729 | while read file; do
730 | cp "$file" "$artifacts_location/"
731 | done < <(find "$temp_artifacts_dir/usr/local/bin" -type f -executable)
732 |
733 | rm -rf "$temp_artifacts_dir"
734 | }
735 |
736 | function build_and_install_gdb() {
737 | # Build gdb and install it to an artifacts directory.
738 | #
739 | # Parameters:
740 | # $1: gdb package directory
741 | # $2: libiconv prefix
742 | # $3: libgmp prefix
743 | # $4: libmpfr prefix
744 | # $5: liblzma prefix.
745 | # $6: build mode: slim / full.
746 | # $7: gdb cross-architecture binary format support formats (relevant for full builds only).
747 | # $8: install directory
748 | # $9: target architecture
749 | #
750 | # Returns:
751 | # 0: success
752 | # 1: failure
753 |
754 | local gdb_dir="$1"
755 | local libiconv_prefix="$2"
756 | local libgmp_prefix="$3"
757 | local libmpfr_prefix="$4"
758 | local liblzma_prefix="$5"
759 | local full_build="$6"
760 | local gdb_bfd_archs="$7"
761 | local artifacts_dir="$8"
762 | local target_arch="$9"
763 |
764 | gdb_build_dir="$(build_gdb "$gdb_dir" "$target_arch" "$libiconv_prefix" "$libgmp_prefix" "$libmpfr_prefix" "$liblzma_prefix" "$full_build" "$gdb_bfd_archs")"
765 | if [[ $? -ne 0 ]]; then
766 | return 1
767 | fi
768 |
769 | install_gdb "$gdb_build_dir" "$artifacts_dir" "$target_arch" "$full_build"
770 | if [[ $? -ne 0 ]]; then
771 | return 1
772 | fi
773 | }
774 |
775 | function build_gdb_with_dependencies() {
776 | # Build gdb for a specific target architecture.
777 | #
778 | # Parameters:
779 | # $1: target architecture
780 | # $2: build directory
781 | # $3: src directory
782 | # $4: build mode: slim / full.
783 | # $5: gdb cross-architecture binary format support formats (relevant for full builds only).
784 |
785 | local target_arch="$1"
786 | local build_dir="$2"
787 | local source_dir="$3"
788 | local full_build="$4"
789 | local gdb_bfd_archs="$5"
790 | local packages_dir="$build_dir/packages"
791 | local artifacts_dir="$build_dir/artifacts"
792 |
793 | set_compliation_variables "$target_arch"
794 | if [[ $? -ne 0 ]]; then
795 | return 1
796 | fi
797 |
798 | mkdir -p "$packages_dir"
799 |
800 | iconv_build_dir="$(build_iconv "$packages_dir/libiconv" "$target_arch")"
801 | if [[ $? -ne 0 ]]; then
802 | return 1
803 | fi
804 |
805 | gmp_build_dir="$(build_libgmp "$packages_dir/gmp" "$target_arch")"
806 | if [[ $? -ne 0 ]]; then
807 | return 1
808 | fi
809 |
810 | mpfr_build_dir="$(build_libmpfr "$packages_dir/mpfr" "$gmp_build_dir" "$target_arch")"
811 | if [[ $? -ne 0 ]]; then
812 | return 1
813 | fi
814 |
815 | ncursesw_build_dir="$(build_ncurses "$packages_dir/ncurses" "$target_arch")"
816 | if [[ $? -ne 0 ]]; then
817 | return 1
818 | fi
819 |
820 | libexpat_build_dir="$(build_libexpat "$packages_dir/libexpat" "$target_arch")"
821 | if [[ $? -ne 0 ]]; then
822 | return 1
823 | fi
824 |
825 | lzma_build_dir="$(build_lzma "$packages_dir/xz" "$target_arch")"
826 | if [[ $? -ne 0 ]]; then
827 | return 1
828 | fi
829 |
830 | set_up_base_lib_search_paths "$iconv_build_dir" \
831 | "$gmp_build_dir" \
832 | "$mpfr_build_dir" \
833 | "$ncursesw_build_dir" \
834 | "$libexpat_build_dir"
835 |
836 | # Optional build components
837 | if [[ $full_build == "yes" && $full_build_python_support -eq 1 ]]; then
838 | local libffi_install_dir="$(build_libffi "${packages_dir}/libffi" "${target_arch}")"
839 | setup_libffi_env "${libffi_install_dir}"
840 |
841 | local gdb_python_dir="$packages_dir/binutils-gdb/gdb/python/lib/"
842 | local pygments_source_dir="$packages_dir/pygments/"
843 | local python_build_dir="$(build_python "$packages_dir/cpython-static" "$target_arch" "$gdb_python_dir" "$pygments_source_dir")"
844 | if [[ $? -ne 0 ]]; then
845 | return 1
846 | fi
847 | fi
848 |
849 | build_and_install_gdb "$packages_dir/binutils-gdb" \
850 | "$iconv_build_dir" \
851 | "$gmp_build_dir" \
852 | "$mpfr_build_dir" \
853 | "$lzma_build_dir" \
854 | "$full_build" \
855 | "$gdb_bfd_archs" \
856 | "$artifacts_dir" \
857 | "$target_arch"
858 | if [[ $? -ne 0 ]]; then
859 | return 1
860 | fi
861 | }
862 |
863 | function main() {
864 | if [[ $# -lt 4 ]]; then
865 | >&2 echo "Usage: $0 [gdb-bfd-archs]"
866 | exit 1
867 | fi
868 |
869 | local full_build="no"
870 | if [[ "$4" == "full" ]]; then
871 | full_build="yes"
872 | else
873 | full_build="no"
874 | fi
875 |
876 | build_gdb_with_dependencies "$1" "$2" "$3" "$full_build" "$5"
877 | if [[ $? -ne 0 ]]; then
878 | >&2 echo "Error: failed to build gdb with dependencies"
879 | exit 1
880 | fi
881 | }
882 |
883 | main "$@"
884 |
--------------------------------------------------------------------------------
/src/compilation/download_packages.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Include utils library
4 | script_dir=$(dirname "$0")
5 | source "$script_dir/utils.sh"
6 |
7 | # List of package URLs to download
8 | SOURCE_URLS=(
9 | "https://ftp.gnu.org/pub/gnu/libiconv/libiconv-1.17.tar.gz"
10 | "https://ftp.gnu.org/pub/gnu/gmp/gmp-6.3.0.tar.xz"
11 | "https://ftp.gnu.org/pub/gnu/mpfr/mpfr-4.2.1.tar.xz"
12 | "https://ftp.gnu.org/pub/gnu/ncurses/ncurses-6.5.tar.gz"
13 | )
14 |
15 | function unpack_tarball() {
16 | # Unpack a tarball based on its extension.
17 | # Supported extensions: tar, gz, xz.
18 | #
19 | # Parameters:
20 | # $1: tarball
21 | #
22 | # Returns:
23 | # 0: success
24 | # 1: failure
25 |
26 | local tarball="$1"
27 | local extension="${tarball##*.}"
28 |
29 | if [[ ! -f "$tarball" ]]; then
30 | >&2 echo "Error: $tarball does not exist"
31 | return 1
32 | fi
33 |
34 | case "$extension" in
35 | tar | xz)
36 | tar xf "$tarball"
37 | ;;
38 | gz)
39 | tar xzf "$tarball"
40 | ;;
41 | *)
42 | >&2 echo "Error: unknown extension $extension"
43 | return 1
44 | ;;
45 | esac
46 |
47 | if [[ $? -ne 0 ]]; then
48 | >&2 echo "Error: failed to unpack $tarball"
49 | return 1
50 | fi
51 | }
52 |
53 | function download_package() {
54 | # Download a package. Will skip download if the output file already exists.
55 | #
56 | # Parameters:
57 | # $1: URL of the package
58 | # $2: output file
59 | #
60 | # Returns:
61 | # 0: success
62 | # 1: failure
63 |
64 | local url="$1"
65 | local output="$2"
66 |
67 | wget "$url" -O "$output"
68 | if [[ $? -ne 0 ]]; then
69 | >&2 echo "Error: failed to download $url"
70 | return 1
71 | fi
72 | }
73 |
74 | function extract_package() {
75 | # Extract a package. Will skip extraction if the package directory already exists.
76 | #
77 | # Parameters:
78 | # $1: package tarball
79 | # $2: output directory
80 | #
81 | # Returns:
82 | # 0: success
83 | # 1: failure
84 |
85 | local tarball="$1"
86 | local output_dir="$2"
87 | local package_dir="${tarball%.tar*}"
88 | local tarball_realpath="$(realpath "$tarball")"
89 | local temp_dir="$(mktemp -d)"
90 |
91 | if [[ ! -f "$tarball" ]]; then
92 | >&2 echo "Error: $tarball does not exist"
93 | return 1
94 | fi
95 |
96 | pushd "$temp_dir" > /dev/null
97 |
98 | unpack_tarball "$tarball_realpath"
99 | if [[ $? -ne 0 ]]; then
100 | popd > /dev/null
101 | return 1
102 | fi
103 |
104 | popd > /dev/null
105 |
106 | # Make sure output dir is empty, so we could move content into it.
107 | # The directory might not exist, so we need to pass || true so that set -e won't fail us.
108 | rm -rf "$output_dir" || true
109 |
110 | mv "$temp_dir/$package_dir" "$output_dir"
111 | if [[ $? -ne 0 ]]; then
112 | return 1
113 | fi
114 |
115 | rm -rf "$temp_dir"
116 | }
117 |
118 | function download_and_extract_package() {
119 | # Download and extract a package.
120 | #
121 | # Parameters:
122 | # $1: URL of the package
123 | # $2: output directory
124 | #
125 | # Returns:
126 | # 0: success
127 | # 1: failure
128 |
129 | local url="$1"
130 | local output_dir="$2"
131 | local tarball=$(basename "$url")
132 |
133 | download_package "$url" "$tarball"
134 | if [[ $? -ne 0 ]]; then
135 | return 1
136 | fi
137 |
138 | extract_package "$tarball" "$output_dir"
139 | if [[ $? -ne 0 ]]; then
140 | return 1
141 | fi
142 | }
143 |
144 | function package_url_to_dir() {
145 | # Convert a package URL to a directory name.
146 | #
147 | # Parameters:
148 | # $1: package URL
149 | #
150 | # Echoes:
151 | # The package directory name
152 | #
153 | # Returns:
154 | # 0: success
155 | # 1: failure
156 |
157 | local url="$1"
158 |
159 | # The name of the package is the basename of the URL without the version number.
160 | local package_dir=$(basename "$url")
161 | package_dir="${package_dir%%-*}"
162 |
163 | echo "$package_dir"
164 | }
165 |
166 | function download_gdb_packages() {
167 | # Download and extract all required packages for building GDB.
168 | #
169 | # Parameters:
170 | # $1: packages directory
171 | #
172 | # Returns:
173 | # 0: success
174 | # 1: failure
175 |
176 | local packages_dir="$1"
177 | pushd "$packages_dir"
178 |
179 | # Run downloads in parallel
180 | download_pids=()
181 |
182 | fancy_title "Starting download of GDB packages"
183 |
184 | for url in "${SOURCE_URLS[@]}"; do
185 | package_dir=$(package_url_to_dir "$url")
186 | download_and_extract_package "$url" "$package_dir" &
187 | download_pids+=($!)
188 | done
189 |
190 | for pid in "${download_pids[@]}"; do
191 | wait "$pid"
192 | if [[ $? -ne 0 ]]; then
193 | popd
194 | return 1
195 | fi
196 | done
197 |
198 | fancy_title "Finished downloading GDB packages"
199 | popd
200 | }
201 |
202 | function main() {
203 | if [[ $# -ne 1 ]]; then
204 | >&2 echo "Usage: $0 "
205 | exit 1
206 | fi
207 |
208 | download_gdb_packages "$1"
209 | if [[ $? -ne 0 ]]; then
210 | >&2 echo "Error: failed to download GDB packages"
211 | exit 1
212 | fi
213 | }
214 |
215 | main "$@"
216 |
--------------------------------------------------------------------------------
/src/compilation/frozen_python_modules.txt:
--------------------------------------------------------------------------------
1 | _aix_support
2 | antigravity
3 | argparse
4 | ast
5 | base64
6 | bdb
7 | bisect
8 | calendar
9 | cmd
10 | codeop
11 | code
12 |
13 | _collections_abc
14 | colorsys
15 | _compat_pickle
16 | compileall
17 | _compression
18 |
19 | configparser
20 | contextlib
21 | contextvars
22 | copy
23 | copyreg
24 | cProfile
25 | csv
26 |
27 | dataclasses
28 | datetime
29 |
30 | decimal
31 | difflib
32 | dis
33 |
34 |
35 |
36 | enum
37 | filecmp
38 | fileinput
39 | fnmatch
40 | fractions
41 | ftplib
42 | functools
43 | __future__
44 | genericpath
45 | getopt
46 | getpass
47 | gettext
48 | glob
49 | graphlib
50 | gzip
51 | hashlib
52 | heapq
53 | hmac
54 |
55 | imaplib
56 |
57 | inspect
58 | ipaddress
59 |
60 | keyword
61 | linecache
62 | locale
63 |
64 | lzma
65 | _markupbase
66 | mimetypes
67 | modulefinder
68 |
69 | netrc
70 | ntpath
71 | nturl2path
72 | numbers
73 | opcode
74 | operator
75 | optparse
76 | os
77 | _osx_support
78 | pathlib
79 | pdb
80 | <__phello__.**.*>
81 | pickle
82 | pickletools
83 | pkgutil
84 | platform
85 | plistlib
86 | poplib
87 | posixpath
88 | pprint
89 | profile
90 | pstats
91 | pty
92 | _py_abc
93 | pyclbr
94 | py_compile
95 | _pydatetime
96 | _pydecimal
97 | _pyio
98 | _pylong
99 | queue
100 | quopri
101 | random
102 |
103 | reprlib
104 | rlcompleter
105 | sched
106 | selectors
107 | shelve
108 | shlex
109 | shutil
110 | signal
111 | smtplib
112 | socket
113 | socketserver
114 | statistics
115 | stat
116 | stringprep
117 | string
118 | _strptime
119 | struct
120 | subprocess
121 | symtable
122 | sysconfig
123 | tabnanny
124 | tempfile
125 | textwrap
126 | this
127 | _threading_local
128 | threading
129 | timeit
130 | tokenize
131 | token
132 |
133 | traceback
134 | tracemalloc
135 | trace
136 | tty
137 | types
138 | typing
139 |
140 | uuid
141 | warnings
142 | wave
143 | weakref
144 | _weakrefset
145 | webbrowser
146 |
147 | zipapp
148 |
149 |
150 |
--------------------------------------------------------------------------------
/src/compilation/full_build_conf.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # This file contains the build configuration for a full build.
4 | # In order to disable a component, simply change the it to 0.
5 | # This allows us to fully control the binary extensions.
6 |
7 | full_build_cross_arch_debugging=1
8 | full_build_python_support=1
9 |
--------------------------------------------------------------------------------
/src/compilation/utils.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | GREEN="\033[0;32m"
4 | BOLD="\033[1m"
5 | RESET="\033[0m"
6 |
7 | function print_centered() {
8 | # Print a string centered in the terminal.
9 | #
10 | # Parameters:
11 | # $1: string
12 | # $2: line width
13 | #
14 | # Returns:
15 | # 0: success
16 |
17 | local string="$1"
18 | local length=${#string}
19 |
20 | printf "%*s\n" $((($2 + length) / 2)) "$string"
21 | }
22 |
23 | function fancy_title() {
24 | # Print a fancy title.
25 | # The title is centered and surrounded by a line of dashes.
26 | #
27 | # Parameters:
28 | # $1: title
29 | #
30 | # Returns:
31 | # 0: success
32 |
33 | local title="$1"
34 | local length=80
35 | local maximum_title_length=60
36 |
37 | # Set color to green and bold
38 | tput setaf 2
39 | tput bold
40 |
41 | printf "%${length}s\n" | tr ' ' -
42 |
43 | # Split the title into words and print them centered
44 | IFS=' ' read -r -a words <<< "$title"
45 |
46 | line=""
47 | for word in "${words[@]}"; do
48 | if [[ ${#line} -eq 0 ]]; then
49 | line="$word"
50 | elif [[ $(( ${#line} + ${#word} + 1 )) -gt $maximum_title_length ]]; then
51 | print_centered "$line" "$length"
52 | line="$word"
53 | else
54 | line="$line $word"
55 | fi
56 | done
57 |
58 | # Print the last line
59 | if [[ ${#line} -gt 0 ]]; then
60 | print_centered "$line" "$length"
61 | fi
62 |
63 | printf "%${length}s\n" | tr ' ' -
64 |
65 | # Reset color and style
66 | tput sgr0
67 | }
68 |
--------------------------------------------------------------------------------
/src/docker_utils/download_musl_toolchains.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3.12
2 |
3 | from typing import List
4 | from pathlib import Path
5 |
6 | import tarfile
7 | import tempfile
8 | import os
9 | import shutil
10 | import asyncio
11 |
12 | import aiohttp
13 |
14 | ARCHS = {
15 | "x86_64" : "https://more.musl.cc/11/x86_64-linux-musl/x86_64-linux-musl-cross.tgz",
16 | "arm" : "https://more.musl.cc/10/x86_64-linux-musl/arm-linux-musleabi-cross.tgz",
17 | "aarch64" : "https://more.musl.cc/11/x86_64-linux-musl/aarch64-linux-musl-cross.tgz",
18 | "powerpc" : "https://more.musl.cc/11/x86_64-linux-musl/powerpc-linux-musl-cross.tgz",
19 | "mips" : "https://more.musl.cc/11/x86_64-linux-musl/mips-linux-musl-cross.tgz",
20 | "mipsel" : "https://more.musl.cc/11/x86_64-linux-musl/mipsel-linux-musl-cross.tgz",
21 | }
22 | CHUNK_SIZE = 65536
23 | MUSL_TOOLCHAINS_DIR = Path("/musl-toolchains")
24 | ENTRYPOINT = Path("/entrypoint.sh")
25 |
26 | async def download_file(url: str, filename: str):
27 | async with aiohttp.ClientSession() as session:
28 | async with session.get(url) as response:
29 | with open(filename, 'wb') as f:
30 | async for data in response.content.iter_chunked(CHUNK_SIZE):
31 | f.write(data)
32 |
33 | def extract_tarfile(filename: str, dst: Path):
34 | with tarfile.open(filename, "r") as tar:
35 | tar.extractall(path=dst)
36 |
37 | async def download_tarfile(tar_url: str, extraction_dir: Path):
38 | with tempfile.NamedTemporaryFile() as named_tempfile:
39 | await download_file(tar_url, named_tempfile.name)
40 |
41 | # Tarfile extraction is still being done synchronously.
42 | extract_tarfile(named_tempfile.name, extraction_dir)
43 |
44 | print(f"Downloaded & Extracted: {tar_url!r}")
45 |
46 | async def download_archs() -> List[str]:
47 | print(f"Downloading toolchains for architectures: {', '.join(ARCHS.keys())}")
48 |
49 | async with asyncio.TaskGroup() as tg:
50 | for url in ARCHS.values():
51 | tg.create_task(download_tarfile(url, MUSL_TOOLCHAINS_DIR))
52 |
53 | def add_to_path(curr_path: str, package_path: Path):
54 | new_path = str((package_path / "bin").resolve())
55 | if curr_path != "":
56 | return new_path + ":" + curr_path
57 | return new_path
58 |
59 | def main():
60 | os.mkdir(MUSL_TOOLCHAINS_DIR)
61 |
62 | asyncio.run(download_archs())
63 |
64 | updated_path = "$PATH"
65 | for musl_arch_dir in os.scandir(MUSL_TOOLCHAINS_DIR):
66 | updated_path = add_to_path(updated_path, Path(musl_arch_dir.path))
67 |
68 | # Fix the x86_64 dynamic loader if needed:
69 | # Unfortunately, the internal gdb build scripts builds some binaries (that generate documentation)
70 | # in a dynamic manner.
71 | #
72 | # Because we may use a musl-based toolchain, this means that we need to set-up the dynamic loader.
73 | # The fix may seem a little hacky, but it is simple, and is the best we can do.
74 | if "x86_64" in ARCHS:
75 | x86_toolchain_name = ARCHS["x86_64"].split("/")[-1].removesuffix(".tgz")
76 | x86_toolchain_path = MUSL_TOOLCHAINS_DIR / x86_toolchain_name
77 | x86_loader_path = x86_toolchain_path / "x86_64-linux-musl" / "lib" / "libc.so"
78 | shutil.copy2(x86_loader_path, "/lib/ld-musl-x86_64.so.1")
79 |
80 | # Create the entrypoint with the updated path.
81 | with open(ENTRYPOINT, mode="w") as f:
82 | f.write(
83 | f"""#!/usr/bin/env bash
84 | export PATH="$PATH:{updated_path}"
85 | exec "$@"
86 | """)
87 |
88 | # Make sure we can execute the entrypoint.
89 | os.chmod(ENTRYPOINT, 0o755)
90 |
91 | # Append the path to bash.bashrc so that other users will have these paths.
92 | with open("/etc/bash.bashrc", mode="a") as f:
93 | f.write(f"\nexport PATH=\"$PATH:{updated_path}\"")
94 |
95 |
96 | if __name__ == "__main__":
97 | main()
98 |
--------------------------------------------------------------------------------