9 Commits

Author SHA1 Message Date
89f092efb7 Compiling Pygments & dependencies in GDB
Added Pygments to build

This is in order to enable GDB syntax highlighting
2025-01-09 21:20:36 +02:00
f7e97cac7f Merge pull request #25 from guyush1/allow-build-with-and-without-python
build: Allow building gdb with and without python
2024-12-30 23:55:08 +02:00
6738cedefc automation: build python targets in pipeline ci-cd
done using a 2d matrix involving the build type (regular or with python)
2024-12-30 23:21:17 +02:00
5359ff1116 build: Allow building gdb with and without python 2024-12-30 23:21:17 +02:00
17346caf10 Merge pull request #23 from guyush1/reduce-static-python-size
reduce static gdb python size
2024-12-25 23:46:36 +02:00
aa49ade8d4 Strip the executables in order to reduce their size 2024-12-25 21:35:03 +02:00
1dfe3fa6ca Reduce static-gdb size by reducing python size
Updated the python submodule.
The newer submodule will create smaller static python libraries.
2024-12-25 21:35:03 +02:00
c44e67540a Added X64 build prefix
There's no real reason to assume the host machine is X64.
2024-12-21 13:50:39 +02:00
a0ceeff014 Added parallel build to PR workflow
Using a matrix and job separation we can make the architectures compile
parallel to eachother, hopefully reducing the time required for builds
and also simplifying the process of building a single architecture.

A problem that we encountered is that with Python the resulting packed
tars are very large. Each release is in the order of tens of megabytes.
Using artifacts in our pipeline can easily make us surpass the maximum
size limit for free GitHub accounts (500 MB).
Because of this, we use the regular non-parallel pipeline for release
build. Releasing the version from the same job the build was performed
in allows us to directly access the build files instead of using
artifacts.

Separated release and MR pipelines.
2024-12-21 13:50:39 +02:00
9 changed files with 282 additions and 44 deletions

25
.github/workflows/pr-pipeline.yaml vendored Normal file
View File

@ -0,0 +1,25 @@
name: gdb-static-pr-pipeline
on:
pull_request:
branches:
- '*'
jobs:
build:
strategy:
matrix:
build_type: ["build", "build-with-python"]
architecture: ["x86_64", "arm", "aarch64", "powerpc", "mips", "mipsel"]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Install dependencies
run: sudo apt-get install -y wget
- name: Build
run: make ${{ matrix.build_type }}-${{ matrix.architecture }} -j$((`nproc`+1))

View File

@ -1,17 +1,15 @@
name: gdb-static-pipeline
name: gdb-static-release-pipeline
on:
pull_request:
branches:
- '*'
push:
tags:
- 'v*'
# Use a non-parallel single job pipeline because artifacts weigh too much. Instead,
# simply build the files in the same job they are released.
jobs:
build:
build_and_publish:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
@ -26,14 +24,7 @@ jobs:
- name: Pack
run: make pack
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: gdb-static
path: build/artifacts/gdb-static*.tar.gz
- name: Publish release
if: github.event_name == 'push'
uses: softprops/action-gh-release@v2
with:
files: build/artifacts/gdb-static*.tar.gz
files: build/artifacts/gdb-static*.tar.gz

3
.gitmodules vendored
View File

@ -6,3 +6,6 @@
path = src/submodule_packages/binutils-gdb
url = git@github.com:guyush1/binutils-gdb.git
branch = gdb-static
[submodule "src/submodule_packages/pygments"]
path = src/submodule_packages/pygments
url = git@github.com:pygments/pygments.git

View File

@ -1,17 +1,24 @@
ARCHS := x86_64 arm aarch64 powerpc mips mipsel
TARGETS := $(addprefix build-, $(ARCHS))
PYTHON_TARGETS := $(addprefix build-with-python-, $(ARCHS))
ALL_TARGETS := $(TARGETS) $(PYTHON_TARGETS)
PACK_TARGETS := $(addprefix pack-, $(ARCHS))
PYTHON_PACK_TARGETS := $(addprefix pack-with-python-, $(ARCHS))
ALL_PACK_TARGETS := $(PACK_TARGETS) $(PYTHON_PACK_TARGETS)
SUBMODULE_PACKAGES := $(wildcard src/submodule_packages/*)
BUILD_PACKAGES_DIR := "build/packages"
.PHONY: clean help download_packages build build-docker-image $(TARGETS) $(PACK_TARGETS)
.PHONY: clean help download_packages build build-docker-image $(ALL_TARGETS) $(ALL_PACK_TARGETS)
help:
@echo "Usage:"
@echo " make build"
@echo ""
@for target in $(TARGETS); do \
@for target in $(ALL_TARGETS); do \
echo " $$target"; \
done
@ -20,7 +27,7 @@ help:
build/build-docker-image.stamp: Dockerfile
mkdir -p build
docker build -t gdb-static .
docker buildx build --tag gdb-static .
touch build/build-docker-image.stamp
build-docker-image: build/build-docker-image.stamp
@ -40,19 +47,31 @@ symlink-git-packages: build/symlink-git-packages.stamp
download-packages: build/download-packages.stamp
build: $(TARGETS)
build: $(ALL_TARGETS)
$(TARGETS): build-%: symlink-git-packages download-packages build-docker-image
$(TARGETS): build-%:
@$(MAKE) _build-$*
$(PYTHON_TARGETS): build-with-python-%:
@WITH_PYTHON="--with-python" $(MAKE) _build-$*
_build-%: symlink-git-packages download-packages build-docker-image
mkdir -p build
docker run --user $(shell id -u):$(shell id -g) \
--rm --volume .:/app/gdb gdb-static env TERM=xterm-256color \
/app/gdb/src/compilation/build.sh $* /app/gdb/build/ /app/gdb/src
/app/gdb/src/compilation/build.sh $* /app/gdb/build/ /app/gdb/src $(WITH_PYTHON)
pack: $(PACK_TARGETS)
pack: $(ALL_PACK_TARGETS)
$(PACK_TARGETS): pack-%: build-%
if [ ! -f "build/artifacts/gdb-static-$*.tar.gz" ]; then \
tar -czf "build/artifacts/gdb-static-$*.tar.gz" -C "build/artifacts/$*" .; \
$(PACK_TARGETS): pack-%:
@$(MAKE) _pack-$*
$(PYTHON_PACK_TARGETS): pack-with-python-%:
@TAR_EXT="with-python-" ARTIFACT_EXT="_with_python" $(MAKE) _pack-$*
_pack-%: build-%
if [ ! -f "build/artifacts/gdb-static-$(TAR_EXT)$*.tar.gz" ]; then \
tar -czf "build/artifacts/gdb-static-$(TAR_EXT)$*.tar.gz" -C "build/artifacts/$*$(ARTIFACT_EXT)" .; \
fi
clean-git-packages:

View File

@ -43,7 +43,7 @@ function set_compliation_variables() {
CROSS=mipsel-linux-gnu-
export HOST=mipsel-linux-gnu
elif [[ "$target_arch" == "x86_64" ]]; then
CROSS=""
CROSS=x86_64-linux-gnu-
export HOST=x86_64-linux-gnu
fi
@ -52,6 +52,9 @@ function set_compliation_variables() {
export CFLAGS="-O2"
export CXXFLAGS="-O2"
# Strip the binary to reduce it's size.
export LDFLAGS="-s"
}
function set_ncurses_link_variables() {
@ -217,6 +220,8 @@ function build_python() {
# Parameters:
# $1: python package directory
# $2: target architecture
# $3: gdb's python module directory parent
# $4: pygment's toplevel source dir.
#
# Echoes:
# The python build directory
@ -226,6 +231,8 @@ function build_python() {
# 1: failure
local python_dir="$1"
local target_arch="$2"
local gdb_python_parent="$3"
local pygments_source_dir="$4"
local python_lib_dir="$(realpath "$python_dir/build-$target_arch")"
echo "$python_lib_dir"
@ -254,6 +261,17 @@ function build_python() {
--disable-ipv6 \
--disable-shared
# Extract the regular standard library modules that are to be frozen and include the gdb and pygments custom libraries.
export EXTRA_FROZEN_MODULES="$(printf "%s" "$(< ${script_dir}/frozen_python_modules.txt)" | tr $'\n' ";")"
export EXTRA_FROZEN_MODULES="${EXTRA_FROZEN_MODULES};<gdb.**.*>: gdb = ${gdb_python_parent};<pygments.**.*>: pygments = ${pygments_source_dir}"
>&2 echo "Frozen Modules: ${EXTRA_FROZEN_MODULES}"
# Regenerate frozen modules with gdb env varaible. Do it after the configure because we need
# the `regen-frozen` makefile.
>&2 python3.12 ../Tools/build/freeze_modules.py
>&2 make regen-frozen
# Build python after configuring the project and regnerating frozen files.
>&2 make -j $(nproc)
if [[ $? -ne 0 ]]; then
return 1
@ -332,6 +350,7 @@ function build_gdb() {
# $3: libiconv prefix
# $4: libgmp prefix
# $5: libmpfr prefix
# $6: whether to build with python or not
#
# Echoes:
# The gdb build directory
@ -345,7 +364,15 @@ function build_gdb() {
local libiconv_prefix="$3"
local libgmp_prefix="$4"
local libmpfr_prefix="$5"
local gdb_build_dir="$(realpath "$gdb_dir/build-$target_arch")"
local with_python="$6"
if [[ "$with_python" == "yes" ]]; then
local python_flag="--with-python=/app/gdb/build/packages/cpython-static/build-$target_arch/bin/python3-config"
local gdb_build_dir="$(realpath "$gdb_dir/build-${target_arch}_with_python")"
else
local python_flag="--without-python"
local gdb_build_dir="$(realpath "$gdb_dir/build-${target_arch}")"
fi
echo "$gdb_build_dir"
mkdir -p "$gdb_build_dir"
@ -360,7 +387,7 @@ function build_gdb() {
>&2 fancy_title "Building gdb for $target_arch"
../configure -C --enable-static --with-static-standard-libraries --disable-inprocess-agent \
--enable-tui --with-python=/app/gdb/build/packages/cpython-static/build-$target_arch/bin/python3-config \
--enable-tui "$python_flag" \
"--with-libiconv-prefix=$libiconv_prefix" --with-libiconv-type=static \
"--with-gmp=$libgmp_prefix" \
"--with-mpfr=$libmpfr_prefix" \
@ -387,6 +414,7 @@ function install_gdb() {
# $1: gdb build directory
# $2: artifacts directory
# $3: target architecture
# $4: whether gdb was built with or without python
#
# Returns:
# 0: success
@ -395,15 +423,22 @@ function install_gdb() {
local gdb_build_dir="$1"
local artifacts_dir="$2"
local target_arch="$3"
local with_python="$4"
if [[ -d "$artifacts_dir/$target_arch" && -n "$(ls -A "$artifacts_dir/$target_arch")" ]]; then
if [[ "$with_python" == "yes" ]]; then
local artifacts_location="$artifacts_dir/${target_arch}_with_python"
else
local artifacts_location="$artifacts_dir/${target_arch}"
fi
if [[ -d "$artifacts_location" && -n "$(ls -A "$artifacts_location")" ]]; then
>&2 echo "Skipping install: gdb already installed for $target_arch"
return 0
fi
temp_artifacts_dir="$(mktemp -d)"
mkdir -p "$artifacts_dir/$target_arch"
mkdir -p "$artifacts_location"
make -C "$gdb_build_dir" install "DESTDIR=$temp_artifacts_dir" 1>&2
if [[ $? -ne 0 ]]; then
@ -412,7 +447,7 @@ function install_gdb() {
fi
while read file; do
cp "$file" "$artifacts_dir/$target_arch/"
cp "$file" "$artifacts_location/"
done < <(find "$temp_artifacts_dir/usr/local/bin" -type f -executable)
rm -rf "$temp_artifacts_dir"
@ -426,8 +461,9 @@ function build_and_install_gdb() {
# $2: libiconv prefix
# $3: libgmp prefix
# $4: libmpfr prefix
# $5: install directory
# $6: target architecture
# $5: whether to build with python or not
# $6: install directory
# $7: target architecture
#
# Returns:
# 0: success
@ -437,15 +473,16 @@ function build_and_install_gdb() {
local libiconv_prefix="$2"
local libgmp_prefix="$3"
local libmpfr_prefix="$4"
local artifacts_dir="$5"
local target_arch="$6"
local with_python="$5"
local artifacts_dir="$6"
local target_arch="$7"
gdb_build_dir="$(build_gdb "$gdb_dir" "$target_arch" "$libiconv_prefix" "$libgmp_prefix" "$libmpfr_prefix")"
gdb_build_dir="$(build_gdb "$gdb_dir" "$target_arch" "$libiconv_prefix" "$libgmp_prefix" "$libmpfr_prefix" "$with_python")"
if [[ $? -ne 0 ]]; then
return 1
fi
install_gdb "$gdb_build_dir" "$artifacts_dir" "$target_arch"
install_gdb "$gdb_build_dir" "$artifacts_dir" "$target_arch" "$with_python"
if [[ $? -ne 0 ]]; then
return 1
fi
@ -458,10 +495,12 @@ function build_gdb_with_dependencies() {
# $1: target architecture
# $2: build directory
# $3: src directory
# $4: whether to build gdb with python or not
local target_arch="$1"
local build_dir="$2"
local source_dir="$3"
local with_python="$4"
local packages_dir="$build_dir/packages"
local artifacts_dir="$build_dir/artifacts"
@ -493,15 +532,20 @@ function build_gdb_with_dependencies() {
fi
set_ncurses_link_variables "$ncursesw_build_dir"
python_build_dir="$(build_python "$packages_dir/cpython-static" "$target_arch")"
if [[ $? -ne 0 ]]; then
return 1
if [[ "$with_python" == "yes" ]]; then
local gdb_python_dir="$packages_dir/binutils-gdb/gdb/python/lib/"
local pygments_source_dir="$packages_dir/pygments/"
local python_build_dir="$(build_python "$packages_dir/cpython-static" "$target_arch" "$gdb_python_dir" "$pygments_source_dir")"
if [[ $? -ne 0 ]]; then
return 1
fi
fi
build_and_install_gdb "$packages_dir/binutils-gdb" \
"$iconv_build_dir/lib/.libs/" \
"$gmp_build_dir/.libs/" \
"$mpfr_build_dir/src/.libs/" \
"$with_python" \
"$artifacts_dir" \
"$target_arch"
if [[ $? -ne 0 ]]; then
@ -510,12 +554,17 @@ function build_gdb_with_dependencies() {
}
function main() {
if [[ $# -ne 3 ]]; then
>&2 echo "Usage: $0 <target_arch> <build_dir> <src_dir>"
if [[ $# -lt 3 ]]; then
>&2 echo "Usage: $0 <target_arch> <build_dir> <src_dir> [--with-python]"
exit 1
fi
build_gdb_with_dependencies "$1" "$2" "$3"
local with_python="no"
if [[ "$4" == "--with-python" ]]; then
with_python="yes"
fi
build_gdb_with_dependencies "$1" "$2" "$3" "$with_python"
if [[ $? -ne 0 ]]; then
>&2 echo "Error: failed to build gdb with dependencies"
exit 1

View File

@ -0,0 +1,150 @@
abc
_aix_support
antigravity
argparse
ast
base64
bdb
bisect
calendar
cmd
codecs
codeop
code
<collections.**.*>
_collections_abc
colorsys
_compat_pickle
compileall
_compression
<concurrent.**.*>
configparser
contextlib
contextvars
copy
copyreg
cProfile
csv
dataclasses
datetime
<dbm.**.*>
decimal
difflib
dis
<encodings.**.*>
<ensurepip.**.*>
enum
filecmp
fileinput
fnmatch
fractions
ftplib
functools
__future__
genericpath
getopt
getpass
gettext
glob
graphlib
gzip
hashlib
heapq
hmac
imaplib
<importlib.**.*>
inspect
io
ipaddress
<json.**.*>
keyword
linecache
locale
<logging.**.*>
lzma
_markupbase
mimetypes
modulefinder
<multiprocessing.**.*>
netrc
ntpath
nturl2path
numbers
opcode
operator
optparse
os
_osx_support
pathlib
pdb
<__phello__.**.*>
pickle
pickletools
pkgutil
platform
plistlib
poplib
posixpath
pprint
profile
pstats
pty
_py_abc
pyclbr
py_compile
_pydatetime
_pydecimal
_pyio
_pylong
queue
quopri
random
<re.**.*>
reprlib
rlcompleter
sched
selectors
shelve
shlex
shutil
signal
smtplib
socket
socketserver
statistics
stat
stringprep
string
_strptime
struct
subprocess
symtable
sysconfig
tabnanny
tempfile
textwrap
this
_threading_local
threading
timeit
tokenize
token
<tomllib.**.*>
traceback
tracemalloc
trace
tty
types
typing
uuid
warnings
wave
weakref
_weakrefset
webbrowser
<wsgiref.**.*>
zipapp
<zipfile.**.*>
<zoneinfo.**.*>
<email.**.*>
<urllib.**.*>