From a20aee9866c21dfb29fd3d5796a72819a8cbab23 Mon Sep 17 00:00:00 2001 From: Simon Beaudoin Date: Wed, 20 Aug 2025 21:32:55 -0700 Subject: [PATCH 01/14] Fix clear PPA input Signed-off-by: Simon Beaudoin --- .../qcom-build-debian-package-reusable-workflow.yml | 6 +++--- ubuntu/deb_abi_checker.py | 12 ++++++++---- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml index 3e4392c..b01428b 100644 --- a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml +++ b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml @@ -29,12 +29,12 @@ env: # version of the package in the PPA INITIAL_UPLOAD_TO_PPA: false jobs: - qcom-build-debian-package: + build-debian-package-reusable-workflow: runs-on: [self-hosted, Linux, ARM64] steps: - name: Clear PPA if requested - if: ${{ inputs.clear_ppa == 'true' }} + if: ${{ inputs.clear_ppa == true }} run: | rm -rf ${{ env.PPA_HTTP_DIRECTORY }}/* echo "Directory ${{ env.PPA_HTTP_DIRECTORY }} removed" @@ -120,7 +120,7 @@ jobs: # Bit 4 (16): RETURN_PPA_ERROR if (( RET == 0 )); then - echo "ABI check passed" + echo "ABI check returned NO_DIFF" fi if (( RET & 1 )); then diff --git a/ubuntu/deb_abi_checker.py b/ubuntu/deb_abi_checker.py index 4e98ad2..1a826ce 100755 --- a/ubuntu/deb_abi_checker.py +++ b/ubuntu/deb_abi_checker.py @@ -512,6 +512,8 @@ def single_package_abi_checker(repo_package_dir, new_deb_path, new_dev_path, new_ddeb_path, report_dir, include_non_reachable_types=True) + return_value = 0 + # The return value between abidiff and abipkgdiff has the same meaning, so we can use the same analysis if abidiff_result != 0: @@ -539,10 +541,12 @@ def single_package_abi_checker(repo_package_dir, if bit3: result.abi_pkg_diff_result = "COMPATIBLE-DIFF" logger.warning(f"[ABI_CHECKER]: abipkgdiff detected ABI changes") + return_value = RETURN_ABI_COMPATIBLE_DIFF if bit4: # if bit 4 is set, bit 3 must be too, so this fallthrough is ok result.abi_pkg_diff_result = "INCOMPATIBLE-DIFF" logger.warning(f"[ABI_CHECKER]: abipkgdiff detected ABI ***INCOMPATIBLE*** changes.") + return_value = RETURN_ABI_INCOMPATIBLE_DIFF # Print the content of all the files in 'report_dir' for filename in os.listdir(report_dir): @@ -552,11 +556,9 @@ def single_package_abi_checker(repo_package_dir, logger.debug(f"Content of {filename}:") logger.warning(file.read()) else: - # No ABI DIFF result.abi_pkg_diff_result = "NO-DIFF" - logger.info(f"[ABI_CHECKER]/{package_name}: abipkgdiff did not find any differences between old and new packages") - + return_value = RETURN_ABI_NO_DIFF msg = "[ABI_CHECKER]/{package_name}: Although, no {pkg} was found for the {version} package, interpret the results with caution" @@ -582,7 +584,9 @@ def single_package_abi_checker(repo_package_dir, logger.debug(f"[ABI_CHECKER]: Removing temporary directory {abi_check_temp_dir}") shutil.rmtree(abi_check_temp_dir) - return analyze_abi_diff_result(old_version, new_version, abidiff_result) + analyze_abi_diff_result(old_version, new_version, abidiff_result) + + return return_value def extract_deb(deb_path, dev_path, ddeb_path, extract_dir): """Extract the content of a .deb package and its .ddeb to a specified directory.""" From 859f10ba454c88adfcc6b4f2cca39037f16ae795 Mon Sep 17 00:00:00 2001 From: Simon Beaudoin Date: Wed, 20 Aug 2025 22:24:54 -0700 Subject: [PATCH 02/14] Remove function to update permission This is an artefact from when it was necessary to tun build.py with root permission because a chroot was created each time. Chroots are now reused, so root shall never be used anymore, therefore the output folders won't ever need to be chmod'ed anymore. Signed-off-by: Simon Beaudoin --- ubuntu/build.py | 19 +---------------- ubuntu/helpers.py | 54 ----------------------------------------------- 2 files changed, 1 insertion(+), 72 deletions(-) diff --git a/ubuntu/build.py b/ubuntu/build.py index 02b8c60..562c4c2 100755 --- a/ubuntu/build.py +++ b/ubuntu/build.py @@ -32,7 +32,7 @@ from build_deb import PackageBuilder, PackageNotFoundError, PackageBuildError from constants import * from datetime import date -from helpers import create_new_directory, umount_dir, check_if_root, check_and_append_line_in_file, cleanup_file, cleanup_directory, change_folder_perm_read_write, print_build_logs, start_local_apt_server, build_deb_package_gz, pull_debs_wget +from helpers import create_new_directory, umount_dir, check_if_root, check_and_append_line_in_file, cleanup_file, cleanup_directory, print_build_logs, start_local_apt_server, build_deb_package_gz, pull_debs_wget from deb_organize import generate_manifest_map from pack_deb import PackagePacker from flat_meta import create_flat_meta @@ -391,22 +391,5 @@ def parse_arguments(): logger.error(e) ERROR_EXIT_BUILD = True -# Change permissions for output directories if cleanup is enabled -if IS_CLEANUP_ENABLED: - error_during_cleanup = False - - try: - change_folder_perm_read_write(OSS_DEB_OUT_DIR) - change_folder_perm_read_write(PROP_DEB_OUT_DIR) - change_folder_perm_read_write(DEB_OUT_DIR) - change_folder_perm_read_write(OUT_DIR) - except Exception: - error_during_cleanup = True - - finally: - if error_during_cleanup: - logger.critical("Cleanup failed. Exiting.") - exit(1) - logger.info("Script execution sucessful") exit(0) \ No newline at end of file diff --git a/ubuntu/helpers.py b/ubuntu/helpers.py index a9ec028..c5e6cc7 100644 --- a/ubuntu/helpers.py +++ b/ubuntu/helpers.py @@ -301,60 +301,6 @@ def umount_dir(MOUNT_DIR, UMOUNT_HOST_FS=False): if result.returncode != 0 and result.returncode != 32: logger.error(f"Failed to unmount {MOUNT_DIR}: {result.stderr}") -def change_folder_perm_read_write(DIR): - """ - Changes permissions of a directory and its contents to allow read and write access. - - Args: - ----- - - DIR (str): The path to the directory whose permissions are to be changed. - - Raises: - ------- - - Exception: If an error occurs while changing permissions. - """ - try: - # Change permissions for the root folder itself - current_permissions = os.stat(DIR).st_mode - new_permissions = current_permissions - - if current_permissions & stat.S_IWUSR: - new_permissions |= stat.S_IWOTH - - if current_permissions & stat.S_IXUSR: - new_permissions |= stat.S_IXOTH - - os.chmod(DIR, new_permissions) - - for root, dirs, files in os.walk(DIR): - for dir_ in dirs: - dir_path = os.path.join(root, dir_) - current_permissions = os.stat(dir_path).st_mode - new_permissions = current_permissions - - if current_permissions & stat.S_IWUSR: - new_permissions |= stat.S_IWOTH - if current_permissions & stat.S_IXUSR: - new_permissions |= stat.S_IXOTH - - os.chmod(dir_path, new_permissions) - - for file in files: - file_path = os.path.join(root, file) - current_permissions = os.stat(file_path).st_mode - new_permissions = current_permissions - - if current_permissions & stat.S_IWUSR: - new_permissions |= stat.S_IWOTH - if current_permissions & stat.S_IXUSR: - new_permissions |= stat.S_IXOTH - - os.chmod(file_path, new_permissions) - - logger.info(f"Permissions updated conditionally for all folders and files in {DIR}.") - except Exception as e: - logger.error(f"Error while changing permissions: {e}") - def print_build_logs(directory): """ Prints the contents of build log files in a specified directory. From 7e600ffd7e915d85c61c93a511ed7dabe75690a6 Mon Sep 17 00:00:00 2001 From: Simon Beaudoin Date: Wed, 20 Aug 2025 23:40:14 -0700 Subject: [PATCH 03/14] helpers.py : Remove check_if_root() Remove check_if_root() altogether. Root was only necessary when creating the chroot, and now the chroot is re-used every run so no need to create a new one each time. Also, the step of creating a chroot initially will be split into a separate script Signed-off-by: Simon Beaudoin --- ubuntu/build.py | 7 +------ ubuntu/helpers.py | 10 ---------- 2 files changed, 1 insertion(+), 16 deletions(-) diff --git a/ubuntu/build.py b/ubuntu/build.py index 562c4c2..fa95b1d 100755 --- a/ubuntu/build.py +++ b/ubuntu/build.py @@ -32,18 +32,13 @@ from build_deb import PackageBuilder, PackageNotFoundError, PackageBuildError from constants import * from datetime import date -from helpers import create_new_directory, umount_dir, check_if_root, check_and_append_line_in_file, cleanup_file, cleanup_directory, print_build_logs, start_local_apt_server, build_deb_package_gz, pull_debs_wget +from helpers import create_new_directory, umount_dir, check_and_append_line_in_file, cleanup_file, cleanup_directory, print_build_logs, start_local_apt_server, build_deb_package_gz, pull_debs_wget from deb_organize import generate_manifest_map from pack_deb import PackagePacker from flat_meta import create_flat_meta from deb_abi_checker import multiple_repo_deb_abi_checker from color_logger import logger -# Check for root privileges -if not check_if_root(): - logger.critical('Please run this script as root user.') - #exit(1) - DIST = "noble" ARCH = "arm64" CHROOT_SUFFIX = "ubuntu" diff --git a/ubuntu/helpers.py b/ubuntu/helpers.py index c5e6cc7..eff2380 100644 --- a/ubuntu/helpers.py +++ b/ubuntu/helpers.py @@ -24,16 +24,6 @@ from constants import TERMINAL, HOST_FS_MOUNT from color_logger import logger -def check_if_root() -> bool: - """ - Checks if the script is being run with root privileges. - - Returns: - -------- - - bool: True if the script is run as root, False otherwise. - """ - return os.geteuid() == 0 - def check_and_append_line_in_file(file_path, line_to_check, append_if_missing=False): """ Checks if a specific line exists in a file and appends it if it is missing. From 7be4a3d27de7ed2c364304def19e117a18fad8f5 Mon Sep 17 00:00:00 2001 From: Simon Beaudoin Date: Wed, 20 Aug 2025 23:49:32 -0700 Subject: [PATCH 04/14] prep_chroot_env.py : Separate the chroot creation into another script Chroot creation is now a dedicated command-line invokable script. This removes the need for ever having to use root to run the package creation If, and only if, the chroot does not already exist will root be necessary. It will also only be necessary for THIS script, and nothing else, which is much safer and prevents everything created to be owned by root which is anoying. Signed-off-by: Simon Beaudoin --- ...build-debian-package-reusable-workflow.yml | 7 +- ubuntu/build.py | 3 +- ubuntu/build_deb.py | 54 +--------- ubuntu/build_dtb.py | 8 +- ubuntu/build_kernel.py | 8 +- ubuntu/deb_abi_checker.py | 1 + ubuntu/pack_deb.py | 5 +- ubuntu/prep_chroot_env.py | 100 ++++++++++++++++++ 8 files changed, 114 insertions(+), 72 deletions(-) create mode 100755 ubuntu/prep_chroot_env.py diff --git a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml index b01428b..4c35a12 100644 --- a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml +++ b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml @@ -77,7 +77,7 @@ jobs: gzip -9 < ${{ env.PPA_HTTP_DIRECTORY }}/dists/noble/stable/main/binary-arm64/Packages > ${{ env.PPA_HTTP_DIRECTORY }}/dists/noble/stable/main/binary-arm64/Packages.gz fi - - name : Checkout Repository With Submodules + - name: Checkout Repository With Submodules uses: actions/checkout@v4 # Using public GitHub action to checkout repo, see https://github.com/actions/checkout with: clean: true # Make sure the workspace is cleaned up from previous runs @@ -98,8 +98,11 @@ jobs: cp -r debian/ WORKSPACE/sources/${{ github.event.repository.name }} cp -r qcom-example-package-source WORKSPACE/sources/${{ github.event.repository.name }} + - name: Validate Or Create Chroot Environment + run: ./qcom-distro-ubuntu/qcom-build-utils/ubuntu/prep_chroot_env.py --arch arm64 --os-codename noble --suffix ubuntu + - name: Build Debian Package - run : ./qcom-distro-ubuntu/qcom-build-utils/ubuntu/build.py --workspace ./WORKSPACE --gen-debians --no-abi-check + run: ./qcom-distro-ubuntu/qcom-build-utils/ubuntu/build.py --workspace ./WORKSPACE --gen-debians --no-abi-check - name: ABI Check run: | diff --git a/ubuntu/build.py b/ubuntu/build.py index fa95b1d..282e6e7 100755 --- a/ubuntu/build.py +++ b/ubuntu/build.py @@ -43,7 +43,6 @@ ARCH = "arm64" CHROOT_SUFFIX = "ubuntu" CHROOT_NAME = DIST + "-" + ARCH + "-" + CHROOT_SUFFIX -CHROOT_DIR = "/srv/chroot" def parse_arguments(): @@ -259,7 +258,7 @@ def parse_arguments(): DEBIAN_INSTALL_DIR_APT = build_deb_package_gz(DEBIAN_INSTALL_DIR, start_server=True) # Initialize the PackageBuilder to load packages - builder = PackageBuilder(CHROOT_NAME, CHROOT_DIR, SOURCES_DIR, APT_SERVER_CONFIG, MANIFEST_MAP, DEB_OUT_TEMP_DIR, DEB_OUT_DIR, DEB_OUT_DIR_APT, DEBIAN_INSTALL_DIR_APT, IS_CLEANUP_ENABLED, IS_PREPARE_SOURCE) + builder = PackageBuilder(CHROOT_NAME, SOURCES_DIR, APT_SERVER_CONFIG, MANIFEST_MAP, DEB_OUT_TEMP_DIR, DEB_OUT_DIR, DEB_OUT_DIR_APT, DEBIAN_INSTALL_DIR_APT, IS_CLEANUP_ENABLED, IS_PREPARE_SOURCE) builder.load_packages() # Build a specific package if provided, otherwise build all packages diff --git a/ubuntu/build_deb.py b/ubuntu/build_deb.py index 1581b11..251deea 100644 --- a/ubuntu/build_deb.py +++ b/ubuntu/build_deb.py @@ -19,7 +19,7 @@ from queue import Queue from collections import defaultdict, deque from constants import * -from helpers import check_if_root, run_command, check_and_append_line_in_file, create_new_directory, build_deb_package_gz, run_command_for_result, print_build_logs +from helpers import run_command, check_and_append_line_in_file, create_new_directory, build_deb_package_gz, run_command_for_result, print_build_logs from deb_organize import search_manifest_map_for_path from color_logger import logger @@ -36,16 +36,15 @@ class PackageBuildError(Exception): pass class PackageBuilder: - def __init__(self, CHROOT_NAME, CHROOT_DIR, SOURCE_DIR, APT_SERVER_CONFIG, \ + def __init__(self, CHROOT_NAME, SOURCE_DIR, APT_SERVER_CONFIG, \ MANIFEST_MAP=None, DEB_OUT_TEMP_DIR=None, DEB_OUT_DIR=None, DEB_OUT_DIR_APT=None, \ - DEBIAN_INSTALL_DIR_APT=None, IS_CLEANUP_ENABLED=True, IS_PREPARE_SOURCE=False, DIST= "noble", ARCH="arm64", CHROOT_SUFFIX="ubuntu"): + DEBIAN_INSTALL_DIR_APT=None, IS_CLEANUP_ENABLED=True, IS_PREPARE_SOURCE=False): """ Initializes the PackageBuilder instance. Args: ----- - CHROOT_NAME (str): The name of the chroot environment. - - CHROOT_DIR (str): The directory where the chroot environment is found, or created if it doesnt already exist. - SOURCE_DIR (str): The source directory containing the packages to build. - APT_SERVER_CONFIG (list): Configuration for the APT server. - MANIFEST_MAP (dict, optional): A mapping of package paths to their properties. @@ -57,14 +56,9 @@ def __init__(self, CHROOT_NAME, CHROOT_DIR, SOURCE_DIR, APT_SERVER_CONFIG, \ - IS_PREPARE_SOURCE (bool, optional): If True, prepares the source directory before building. Defaults to False. """ self.CHROOT_NAME = CHROOT_NAME - self.CHROOT_DIR = CHROOT_DIR - self.DIST = DIST - self.ARCH = ARCH - self.CHROOT_SUFFIX = CHROOT_SUFFIX self.SOURCE_DIR = SOURCE_DIR self.DEB_OUT_DIR = DEB_OUT_DIR self.APT_SERVER_CONFIG = APT_SERVER_CONFIG - self.CHROOT_NAME = CHROOT_NAME self.MANIFEST_MAP = MANIFEST_MAP self.DEB_OUT_TEMP_DIR = DEB_OUT_TEMP_DIR self.IS_CLEANUP_ENABLED = IS_CLEANUP_ENABLED @@ -72,50 +66,8 @@ def __init__(self, CHROOT_NAME, CHROOT_DIR, SOURCE_DIR, APT_SERVER_CONFIG, \ self.DEB_OUT_DIR_APT = DEB_OUT_DIR_APT self.DEBIAN_INSTALL_DIR_APT = DEBIAN_INSTALL_DIR_APT self.IS_PREPARE_SOURCE = IS_PREPARE_SOURCE - self.DEBIAN_MIRROR = "http://ports.ubuntu.com" self.packages = {} - self.generate_schroot_config() - - def generate_schroot_config(self): - """ - Generates the schroot configuration for the specified chroot environment. - - Raises: - ------- - - Exception: If there is an error creating the schroot environment. - """ - - logger.debug(f"Checking if chroot container '{self.CHROOT_NAME}' is already registered") - - cmd = f"schroot -l | grep chroot:{self.CHROOT_NAME}" - result = subprocess.run(cmd, shell=True, capture_output=True, text=True) - - if result.returncode == 0: - logger.info(f"Schroot container {self.CHROOT_NAME} already exists. Skipping creation.") - return - - logger.warning(f"Schroot container '{self.CHROOT_NAME}' does not exist, creating it for the first time.") - logger.warning(f"The chroot will be created in {self.CHROOT_DIR}/{self.CHROOT_NAME}") - logger.warning(f"Its config will be stored as /etc/schroot/chroot.d/{self.CHROOT_NAME}.conf") - - # this command creates a chroot environment that will be named "{DIST}-{ARCH}-{SUFFIX}" - # We supply our own suffix, otherwise sbuild will use 'sbuild' - cmd = f"sbuild-createchroot --arch={self.ARCH}" \ - f" --chroot-suffix=-{self.CHROOT_SUFFIX}" \ - f" --components=main,universe" \ - f" {self.DIST}" \ - f" {self.CHROOT_DIR}/{self.CHROOT_NAME}" \ - f" {self.DEBIAN_MIRROR}" - - logger.debug(f"Creating schroot environment with command: {cmd}") - - result = subprocess.run(cmd, shell=True, capture_output=True, text=True) - - if result.returncode != 0: - raise Exception(f"Error creating schroot environment: {result.stderr}") - else: - logger.info(f"Schroot environment {self.CHROOT_NAME} created successfully.") def load_packages(self): """Load package metadata from build_config.py and fetch dependencies from control files.""" diff --git a/ubuntu/build_dtb.py b/ubuntu/build_dtb.py index b97607b..2aa755b 100644 --- a/ubuntu/build_dtb.py +++ b/ubuntu/build_dtb.py @@ -16,7 +16,7 @@ import shlex import tempfile import subprocess -from helpers import cleanup_directory, check_if_root +from helpers import cleanup_directory from color_logger import logger def build_dtb(deb_dir, deb_file_regex, combined_dtb_filename, out_dir): @@ -35,12 +35,8 @@ def build_dtb(deb_dir, deb_file_regex, combined_dtb_filename, out_dir): Raises: ------- - - SystemExit: If the script is not run as root, if no matching .deb files are found, - or if there are errors during extraction or processing. + - SystemExit: If no matching .deb files are found, or if there are errors during extraction or processing. """ - if not check_if_root(): - logger.error('Please run this script as root user.') - exit(1) combined_dtb_bin_path = os.path.join(out_dir, 'dtb.bin') if os.path.exists(combined_dtb_bin_path): diff --git a/ubuntu/build_kernel.py b/ubuntu/build_kernel.py index 4dacf3f..da3f9fb 100644 --- a/ubuntu/build_kernel.py +++ b/ubuntu/build_kernel.py @@ -10,7 +10,7 @@ logging, and file management. """ -from helpers import check_if_root, check_and_append_line_in_file, set_env, create_new_directory +from helpers import check_and_append_line_in_file, set_env, create_new_directory from color_logger import logger import os import shutil @@ -28,13 +28,7 @@ def build_kernel(source_dir: str): _______ - SystemExit: If not run as root or if any build step fails - Note: - _____ - - This function must be executed with root privileges """ - if not check_if_root(): - logger.error('Please run this script as root user.') - exit(1) set_env('ARCH', 'arm64') set_env('CROSS_COMPILE', 'aarch64-linux-gnu-') diff --git a/ubuntu/deb_abi_checker.py b/ubuntu/deb_abi_checker.py index 1a826ce..fdd278a 100755 --- a/ubuntu/deb_abi_checker.py +++ b/ubuntu/deb_abi_checker.py @@ -1,4 +1,5 @@ #!/usr/bin/env python3 + # Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries. # # SPDX-License-Identifier: BSD-3-Clause-Clear diff --git a/ubuntu/pack_deb.py b/ubuntu/pack_deb.py index 54907dd..71a77b9 100644 --- a/ubuntu/pack_deb.py +++ b/ubuntu/pack_deb.py @@ -21,7 +21,7 @@ from queue import Queue from collections import defaultdict, deque from constants import * -from helpers import create_new_file, check_if_root, run_command, create_new_directory, run_command_for_result, mount_img, umount_dir, cleanup_file, build_deb_package_gz, parse_debs_manifest +from helpers import create_new_file, run_command, create_new_directory, run_command_for_result, mount_img, umount_dir, cleanup_file, build_deb_package_gz, parse_debs_manifest from deb_organize import search_manifest_map_for_path from color_logger import logger @@ -44,9 +44,6 @@ def __init__(self, MOUNT_DIR, IMAGE_TYPE, VARIANT, OUT_DIR, OUT_SYSTEM_IMG, APT_ - IS_CLEANUP_ENABLED (bool): Flag to enable cleanup of temporary files. """ - if not check_if_root(): - logger.error('Please run this script as root user.') - exit(1) self.cur_file = os.path.dirname(os.path.realpath(__file__)) if not len(os.listdir(MOUNT_DIR)) == 0: diff --git a/ubuntu/prep_chroot_env.py b/ubuntu/prep_chroot_env.py new file mode 100755 index 0000000..d35a37b --- /dev/null +++ b/ubuntu/prep_chroot_env.py @@ -0,0 +1,100 @@ +#!/usr/bin/env python3 + +# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries. +# +# SPDX-License-Identifier: BSD-3-Clause-Clear + +""" +prep_chroot_env.py + +Checks that the resulting chroot is present. Prepares it otherwise. + +Running as root is necessary to create the chroot. +""" + +import os +import sys +import argparse +import subprocess + +from color_logger import logger + +def parse_arguments(): + parser = argparse.ArgumentParser(description="Prepares a chroot environment") + parser.add_argument("--arch", + required=False, + default="arm64", + help="The architecture of the chroot environment. (default: arm64)") + + parser.add_argument("--os-codename", + required=True, + help="The codename of the OS, e.g. noble, bionic, focal, etc.") + + parser.add_argument("--suffix", + required=False, + default="ubuntu", + help="The suffix for the chroot name. (default: ubuntu)") + + args = parser.parse_args() + + return args + +def main(): + + args = parse_arguments() + + logger.debug(f"args: {args}") + + OS_CODENAME = args.os_codename + ARCH = args.arch + SUFFIX = args.suffix + CHROOT_NAME = OS_CODENAME + "-" + ARCH + "-" + SUFFIX + + CHROOT_DIR = "/srv/chroot" + DEBIAN_MIRROR = "http://ports.ubuntu.com" + + logger.debug(f"Checking if chroot container '{CHROOT_NAME}' is already registered") + + cmd = f"schroot -l | grep chroot:{CHROOT_NAME}" + result = subprocess.run(cmd, shell=True, capture_output=True, text=True) + + if result.returncode == 0: + logger.info(f"Schroot container {CHROOT_NAME} already exists. Skipping creation.") + sys.exit(0) + + logger.warning(f"Schroot container '{CHROOT_NAME}' does not exist, creating it for the first time.") + + if os.geteuid() != 0: + logger.critical("Creating a schroot environment requires root privileges") + logger.critical("Please use sudo. Aborting.") + sys.exit(1) + + logger.warning(f"The chroot will be created in {CHROOT_DIR}/{CHROOT_NAME}") + logger.warning(f"Its config will be stored as /etc/schroot/chroot.d/{CHROOT_NAME}-xxxx") + + # this command creates a chroot environment that will be named "{DIST}-{ARCH}-{SUFFIX}" + # We supply our own suffix, otherwise sbuild will use 'sbuild' + cmd = f"sbuild-createchroot --arch={ARCH}" \ + f" --chroot-suffix=-{SUFFIX}" \ + f" --components=main,universe" \ + f" {OS_CODENAME}" \ + f" {CHROOT_DIR}/{CHROOT_NAME}" \ + f" {DEBIAN_MIRROR}" + + logger.debug(f"Creating schroot environment with command: {cmd}") + + result = subprocess.run(cmd, shell=True, capture_output=True, text=True) + + if result.returncode != 0: + logger.critical("Error creating schroot environment!") + logger.critical(f"stderr: {result.stderr}") + logger.critical(f"stdout: {result.stdout}") + sys.exit(1) + + + logger.info(f"Schroot environment {CHROOT_NAME} created successfully.") + + sys.exit(0) + +if __name__ == "__main__": + main() From d6e7c109e3de7120704d0eb7c20fb79b092938a1 Mon Sep 17 00:00:00 2001 From: Simon Beaudoin Date: Thu, 21 Aug 2025 00:31:33 -0700 Subject: [PATCH 05/14] deb_abi_checker.py : Remove abi checker invokation from build.py Separating elements from the monolithic build.py Signed-off-by: Simon Beaudoin --- ...build-debian-package-reusable-workflow.yml | 2 +- ubuntu/build.py | 44 ------------------- ubuntu/deb_abi_checker.py | 22 +++------- 3 files changed, 8 insertions(+), 60 deletions(-) diff --git a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml index 4c35a12..9023764 100644 --- a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml +++ b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml @@ -102,7 +102,7 @@ jobs: run: ./qcom-distro-ubuntu/qcom-build-utils/ubuntu/prep_chroot_env.py --arch arm64 --os-codename noble --suffix ubuntu - name: Build Debian Package - run: ./qcom-distro-ubuntu/qcom-build-utils/ubuntu/build.py --workspace ./WORKSPACE --gen-debians --no-abi-check + run: ./qcom-distro-ubuntu/qcom-build-utils/ubuntu/build.py --workspace ./WORKSPACE --gen-debians - name: ABI Check run: | diff --git a/ubuntu/build.py b/ubuntu/build.py index 282e6e7..8fb9959 100755 --- a/ubuntu/build.py +++ b/ubuntu/build.py @@ -101,10 +101,6 @@ def parse_arguments(): help="Cleanup workspace after build", default=False) parser.add_argument("--prepare-sources", action="store_true", help="Prepares sources, does not build", default=False) - parser.add_argument("--no-abi-check", action="store_true", - help="Skip ABI compatibility check", default=False) - parser.add_argument("--force-abi-check", action="store_true", - help="Skip ABI compatibility check", default=False) # Deprecated parser.add_argument('--chroot-name', type=str, required=False, @@ -180,8 +176,6 @@ def parse_arguments(): PACK_VARIANT = args.pack_variant TARGET_HW = args.flat_meta -NO_ABI_CHECK = args.no_abi_check -FORCE_ABI_CHECK = args.force_abi_check # Define kernel and output directories KERNEL_DIR = args.kernel_src_dir @@ -284,44 +278,6 @@ def parse_arguments(): logger.critical("Debian package generation error. Exiting.") exit(1) - -if NO_ABI_CHECK: - logger.warning("ABI check is explicitely disabled. Skipping ABI check.") -elif (not IF_GEN_DEBIANS and not IS_PREPARE_SOURCE) and not FORCE_ABI_CHECK: - logger.debug("Skipping ABI check since no debian packages generated") -else: - if FORCE_ABI_CHECK and (not IF_GEN_DEBIANS and not IS_PREPARE_SOURCE): - logger.info("Forcing ABI check even if no debian package were built") - - error_during_abi_check = False - - logger.info("Running the ABI checking phase") - - try: - if not APT_SERVER_CONFIG: - raise Exception("No apt server config provided") - - if len(APT_SERVER_CONFIG) > 1: - logger.warning("Multiple apt server configs are not supported yet, picking the first one in the list") - - logger.debug("Running the package ABI checker over the temp folder containing all the repo outputs") - check_passed = multiple_repo_deb_abi_checker(DEB_OUT_TEMP_DIR, APT_SERVER_CONFIG[0]) - - if check_passed: - logger.info("ABI check passed.") - else: - logger.critical("ABI check failed.") - - except Exception as e: - logger.critical(f"Exception during the ABI checking : {e}") - traceback.print_exc() - error_during_abi_check = True - - finally: - if error_during_abi_check: - logger.critical("ABI check failed. Exiting.") - exit(1) - # Pack the image if specified if IF_PACK_IMAGE: error_during_image_packing = False diff --git a/ubuntu/deb_abi_checker.py b/ubuntu/deb_abi_checker.py index fdd278a..6509572 100755 --- a/ubuntu/deb_abi_checker.py +++ b/ubuntu/deb_abi_checker.py @@ -33,13 +33,10 @@ import os import sys import subprocess -import tempfile import shutil import argparse import glob import re -import urllib.request -import urllib.parse import traceback from helpers import create_new_directory from color_logger import logger @@ -155,7 +152,7 @@ def main(): sys.exit(ret) -def multiple_repo_deb_abi_checker(package_dir, apt_server_config, keep_temp=True, specific_apt_version=None) -> bool: +def multiple_repo_deb_abi_checker(package_dir, apt_server_config, keep_temp=True, specific_apt_version=None) -> int: """ Runs the ABI check in a folder containing multiple package folders. @@ -181,33 +178,28 @@ def multiple_repo_deb_abi_checker(package_dir, apt_server_config, keep_temp=True Returns: -------- - - bool: True if the package ABI diff was performed sucessfully, False otherwise. - Note that this does not mean that the ABI diff passed, only that it was performed successfully. + - bool: Aglomeration of bitwise return value of every repo """ - all_repos_successful = True + final_ret = 0 for folder in os.listdir(package_dir): folder_path = os.path.join(package_dir, folder) if os.path.isdir(folder_path): try: - success = single_repo_deb_abi_checker(folder_path, apt_server_config, keep_temp, specific_apt_version) + final_ret |= single_repo_deb_abi_checker(folder_path, apt_server_config, keep_temp, specific_apt_version) except Exception as e: logger.critical(f"Function single_repo_deb_abi_checker threw an exception: {e}") - success = False traceback.print_exc() - - finally: - if not success: - all_repos_successful = False - + sys.exit(-1) + log_file = os.path.join(package_dir, "abi_checker.log") print_results(log_file) - return all_repos_successful + return final_ret def single_repo_deb_abi_checker(repo_package_dir, apt_server_config, keep_temp=True, specific_apt_version=None, print_debug_tree=False) -> int: """ From f6e5d2bcdf8213cce143d6b7eab6fa8cf18c3f01 Mon Sep 17 00:00:00 2001 From: Simon Beaudoin Date: Thu, 21 Aug 2025 00:39:51 -0700 Subject: [PATCH 06/14] Add step to clean workspace from previous runs Signed-off-by: Simon Beaudoin --- .../workflows/qcom-build-debian-package-reusable-workflow.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml index 9023764..e3f4bd8 100644 --- a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml +++ b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml @@ -77,6 +77,9 @@ jobs: gzip -9 < ${{ env.PPA_HTTP_DIRECTORY }}/dists/noble/stable/main/binary-arm64/Packages > ${{ env.PPA_HTTP_DIRECTORY }}/dists/noble/stable/main/binary-arm64/Packages.gz fi + - name: Ensure Workspace Is Clean + run: rm -rf * # Ensure workspace is clean from previous runs + - name: Checkout Repository With Submodules uses: actions/checkout@v4 # Using public GitHub action to checkout repo, see https://github.com/actions/checkout with: From 3fb02bf3b4ca24d9c0b3ef3c887df00319757a77 Mon Sep 17 00:00:00 2001 From: Simon Beaudoin Date: Thu, 21 Aug 2025 00:59:34 -0700 Subject: [PATCH 07/14] Extracting params from qcom-product.conf Signed-off-by: Simon Beaudoin --- ...build-debian-package-reusable-workflow.yml | 116 ++++++++++++------ ubuntu/deb_abi_checker.py | 4 +- 2 files changed, 79 insertions(+), 41 deletions(-) diff --git a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml index e3f4bd8..bf8ecc9 100644 --- a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml +++ b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml @@ -11,10 +11,10 @@ on: description: 'Whether to clear the PPA' type: boolean required: false - default: false + default: false secrets: ACTIONS_SSH_KEY: - required: true + required: true permissions: contents: read @@ -28,18 +28,53 @@ env: # This variable is set to true below if the ABI check is not able to find an initial # version of the package in the PPA INITIAL_UPLOAD_TO_PPA: false + ABI_CHECK_RETURN_VALUE: 0 + + PRODUCT_DISTRO: null + PRODUCT_CODENAME: null + PRODUCT_ARCH: null + jobs: build-debian-package-reusable-workflow: + runs-on: [self-hosted, Linux, ARM64] steps: - - name: Clear PPA if requested + + - name: Ensure Workspace Is Clean + run: rm -rf * # Ensure workspace is clean from previous runs + + - name: Checkout Repository And Submodules Recursively + uses: actions/checkout@v4 # Using public GitHub action to checkout repo, see https://github.com/actions/checkout + with: + clean: true # Make sure the workspace is cleaned up from previous runs + submodules: 'recursive' # Make sure all submodules are recursively checked out + ssh-key: ${{ secrets.ACTIONS_SSH_KEY }} # Add SSH key for cloning private repos + fetch-depth: 1 # Speedup things since full history isn't needed + + - name: Exctract Product Configuration From qcom-product.conf + run: | + CONFIG_FILE="qcom-distro-ubuntu/qcom-product.conf" + + DISTRO=$(grep '^Distro:' "$CONFIG_FILE" | cut -d':' -f2 | xargs) + CODENAME=$(grep '^Codename:' "$CONFIG_FILE" | cut -d':' -f2 | xargs) + ARCH=$(grep '^Arch:' "$CONFIG_FILE" | cut -d':' -f2 | xargs) + + echo "Distro: $DISTRO" + echo "Codename: $CODENAME" + echo "Arch: $ARCH" + + echo "PRODUCT_DISTRO=${DISTRO}" >> $GITHUB_ENV + echo "PRODUCT_CODENAME=${CODENAME}" >> $GITHUB_ENV + echo "PRODUCT_ARCH=${ARCH}" >> $GITHUB_ENV + + - name: Clear PPA If Requested if: ${{ inputs.clear_ppa == true }} run: | rm -rf ${{ env.PPA_HTTP_DIRECTORY }}/* echo "Directory ${{ env.PPA_HTTP_DIRECTORY }} removed" - - name: Setup HTTP PPA server + - name: Verify Local Test HTTP PPA Server Is Running run : | echo "Check if PPA serving folder exists ${{ env.PPA_HTTP_DIRECTORY }}" if [ -d "${{ env.PPA_HTTP_DIRECTORY }}" ]; then @@ -50,7 +85,7 @@ jobs: fi echo "Check if PPA html server is running" - + if screen -list | grep -q "${{ env.PPA_HTTP_SERVER_SCREEN_NAME }}"; then echo "Screen session ${{ env.PPA_HTTP_SERVER_SCREEN_NAME }} already exists. Skipping server start." else @@ -67,34 +102,23 @@ jobs: fi echo "Check if PPA server is populated" - if [ -f "${{ env.PPA_HTTP_DIRECTORY }}/dists/noble/stable/main/binary-arm64/Packages.gz" ]; then + if [ -f "${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages.gz" ]; then echo "PPA server is populated, listing the content of Packages:" - cat ${{ env.PPA_HTTP_DIRECTORY }}/dists/noble/stable/main/binary-arm64/Packages + cat ${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages else echo "Error: PPA server is not populated. Populating" - mkdir -p ${{ env.PPA_HTTP_DIRECTORY }}/dists/noble/stable/main/binary-arm64/ - dpkg-scanpackages ${{ env.PPA_HTTP_DIRECTORY }} > ${{ env.PPA_HTTP_DIRECTORY }}/dists/noble/stable/main/binary-arm64/Packages - gzip -9 < ${{ env.PPA_HTTP_DIRECTORY }}/dists/noble/stable/main/binary-arm64/Packages > ${{ env.PPA_HTTP_DIRECTORY }}/dists/noble/stable/main/binary-arm64/Packages.gz + mkdir -p ${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/ + dpkg-scanpackages ${{ env.PPA_HTTP_DIRECTORY }} > ${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages + gzip -9 < ${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages > ${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages.gz fi - - name: Ensure Workspace Is Clean - run: rm -rf * # Ensure workspace is clean from previous runs - - - name: Checkout Repository With Submodules - uses: actions/checkout@v4 # Using public GitHub action to checkout repo, see https://github.com/actions/checkout - with: - clean: true # Make sure the workspace is cleaned up from previous runs - submodules: 'recursive' # Make sure all submodules are recursively checked out - ssh-key: ${{ secrets.ACTIONS_SSH_KEY }} # Add SSH key for cloning private repos - fetch-depth: 1 # Speedup things since full history isn't needed - - - name: Prepare Workspace Structure + - name: Prepare Workspace Structure For The Build # The build.py script expects a specific directory structure # This step creates the required directories and links the example package sources to the expected location # The build_deb script will go through the WORKSPACE/sources directory and find every "debian" folder and exctract the package names - # from the Control file and build a list of available packages to build. + # from the Control file and build a list of available packages to build. run: | - echo "Listing the content of what was checked out"; tree + echo "Listing the content of what was checked out, exclusing .md files : "; tree -I '*.md' mkdir WORKSPACE mkdir WORKSPACE/sources mkdir WORKSPACE/sources/${{ github.event.repository.name }} @@ -102,20 +126,29 @@ jobs: cp -r qcom-example-package-source WORKSPACE/sources/${{ github.event.repository.name }} - name: Validate Or Create Chroot Environment - run: ./qcom-distro-ubuntu/qcom-build-utils/ubuntu/prep_chroot_env.py --arch arm64 --os-codename noble --suffix ubuntu + run: | + ./qcom-distro-ubuntu/qcom-build-utils/ubuntu/prep_chroot_env.py \ + --arch ${{ env.PRODUCT_ARCH }} \ + --os-codename ${{ env.PRODUCT_CODENAME }} \ + --suffix ${{ env.PRODUCT_DISTRO }} - - name: Build Debian Package - run: ./qcom-distro-ubuntu/qcom-build-utils/ubuntu/build.py --workspace ./WORKSPACE --gen-debians + - name: Build Debian Packages + run: | + ./qcom-distro-ubuntu/qcom-build-utils/ubuntu/build.py \ + --workspace ./WORKSPACE \ + --gen-debians - name: ABI Check run: | set +e + ./qcom-distro-ubuntu/qcom-build-utils/ubuntu/deb_abi_checker.py \ --new-package-dir ./WORKSPACE/debian_packages/temp/${{ github.event.repository.name }} \ - --apt-server-config "deb [arch=arm64 trusted=yes] http://localhost:${{ env.PPA_HTTP_SERVER_PORT }} noble/stable main" + --apt-server-config "deb [arch=${{ env.PRODUCT_ARCH }} trusted=yes] http://localhost:${{ env.PPA_HTTP_SERVER_PORT }} ${{ env.PRODUCT_CODENAME }}/stable main" + RET=$? - set -e - + set -e + echo "ABI check returned $RET" # (0): RETURN_ABI_NO_DIFF @@ -151,7 +184,12 @@ jobs: echo "ABI check failed because there was an error on the PPA" fi - - name: Upload Debian Package To PPA Server If First Build + echo "ABI_CHECK_RETURN_VALUE=${RET}" >> $GITHUB_ENV + + - name: Package Version Increment Check + run: echo "Run package version check here with ret value ${{ env.ABI_CHECK_RETURN_VALUE }}" + + - name: Upload Debian Packages To PPA Server If First Build if: ${{ env.INITIAL_UPLOAD_TO_PPA == 'true' }} run: | echo "Uploading Debian Package To PPA Server..." @@ -160,21 +198,21 @@ jobs: echo "Removing olf Packages/.gz files" - rm -f ${{ env.PPA_HTTP_DIRECTORY }}/dists/noble/stable/main/binary-arm64/Packages || true - rm -f ${{ env.PPA_HTTP_DIRECTORY }}/dists/noble/stable/main/binary-arm64/Packages.gz || true - + rm -f ${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages || true + rm -f ${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages.gz || true + echo "Updating the Packages/.gz files" - touch ${{ env.PPA_HTTP_DIRECTORY }}/dists/noble/stable/main/binary-arm64/Packages + touch ${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages for dir in ${{ env.PPA_HTTP_DIRECTORY }}/oss/*/; do echo "Updating packages for dir $dir" - dpkg-scanpackages "$dir" | sed 's|/var/www/html/github-actions-ppa/||' >> "${{ env.PPA_HTTP_DIRECTORY }}/dists/noble/stable/main/binary-arm64/Packages" - dpkg-scanpackages --type ddeb "$dir" | sed 's|/var/www/html/github-actions-ppa/||' >> "${{ env.PPA_HTTP_DIRECTORY }}/dists/noble/stable/main/binary-arm64/Packages" + dpkg-scanpackages "$dir" | sed 's|/var/www/html/github-actions-ppa/||' >> "${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages" + dpkg-scanpackages --type ddeb "$dir" | sed 's|/var/www/html/github-actions-ppa/||' >> "${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages" done - gzip -9 < "${{ env.PPA_HTTP_DIRECTORY }}/dists/noble/stable/main/binary-arm64/Packages" > "${{ env.PPA_HTTP_DIRECTORY }}/dists/noble/stable/main/binary-arm64/Packages.gz" + gzip -9 < "${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages" > "${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages.gz" echo "Updated the packages.gz files" - echo "Updated the Packages files for suite: noble" \ No newline at end of file + echo "Updated the Packages files for suite: ${{ env.PRODUCT_CODENAME }}" \ No newline at end of file diff --git a/ubuntu/deb_abi_checker.py b/ubuntu/deb_abi_checker.py index 6509572..449f73c 100755 --- a/ubuntu/deb_abi_checker.py +++ b/ubuntu/deb_abi_checker.py @@ -194,7 +194,7 @@ def multiple_repo_deb_abi_checker(package_dir, apt_server_config, keep_temp=True traceback.print_exc() sys.exit(-1) - + log_file = os.path.join(package_dir, "abi_checker.log") print_results(log_file) @@ -578,7 +578,7 @@ def single_package_abi_checker(repo_package_dir, shutil.rmtree(abi_check_temp_dir) analyze_abi_diff_result(old_version, new_version, abidiff_result) - + return return_value def extract_deb(deb_path, dev_path, ddeb_path, extract_dir): From 09080d0faa6eb31ad257d18fd482b9268a5ab8f1 Mon Sep 17 00:00:00 2001 From: Simon Beaudoin Date: Thu, 21 Aug 2025 12:11:57 -0700 Subject: [PATCH 08/14] Use variable for PPA Packages file path Signed-off-by: Simon Beaudoin --- ...build-debian-package-reusable-workflow.yml | 32 +++++++++++-------- 1 file changed, 19 insertions(+), 13 deletions(-) diff --git a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml index bf8ecc9..50cdebc 100644 --- a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml +++ b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml @@ -27,13 +27,15 @@ env: # This variable is set to true below if the ABI check is not able to find an initial # version of the package in the PPA - INITIAL_UPLOAD_TO_PPA: false + INITIAL_UPLOAD_TO_PPA: 'false' ABI_CHECK_RETURN_VALUE: 0 PRODUCT_DISTRO: null PRODUCT_CODENAME: null PRODUCT_ARCH: null + PPA_PACKAGES_FILE_DIR: null + jobs: build-debian-package-reusable-workflow: @@ -68,6 +70,8 @@ jobs: echo "PRODUCT_CODENAME=${CODENAME}" >> $GITHUB_ENV echo "PRODUCT_ARCH=${ARCH}" >> $GITHUB_ENV + echo "PPA_PACKAGES_FILE_DIR=${{env.PPA_HTTP_DIRECTORY}}/dists/$CODENAME/stable/main/binary-$ARCH" >> $GITHUB_ENV + - name: Clear PPA If Requested if: ${{ inputs.clear_ppa == true }} run: | @@ -102,14 +106,14 @@ jobs: fi echo "Check if PPA server is populated" - if [ -f "${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages.gz" ]; then + if [ -f "${{ env.PPA_PACKAGES_FILE_DIR }}/Packages.gz" ]; then echo "PPA server is populated, listing the content of Packages:" - cat ${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages + cat ${{ env.PPA_PACKAGES_FILE_DIR }}/Packages else echo "Error: PPA server is not populated. Populating" - mkdir -p ${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/ - dpkg-scanpackages ${{ env.PPA_HTTP_DIRECTORY }} > ${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages - gzip -9 < ${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages > ${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages.gz + mkdir -p ${{ env.PPA_PACKAGES_FILE_DIR }} + dpkg-scanpackages ${{ env.PPA_HTTP_DIRECTORY }} > ${{ env.PPA_PACKAGES_FILE_DIR }}/Packages + gzip -9 < ${{ env.PPA_PACKAGES_FILE_DIR }}/Packages > ${{ env.PPA_PACKAGES_FILE_DIR }}/Packages.gz fi - name: Prepare Workspace Structure For The Build @@ -118,7 +122,7 @@ jobs: # The build_deb script will go through the WORKSPACE/sources directory and find every "debian" folder and exctract the package names # from the Control file and build a list of available packages to build. run: | - echo "Listing the content of what was checked out, exclusing .md files : "; tree -I '*.md' + echo "Listing the content of what was checked out, exclusing .md files : "; tree -I '*.md|LICENSE.txt' mkdir WORKSPACE mkdir WORKSPACE/sources mkdir WORKSPACE/sources/${{ github.event.repository.name }} @@ -172,6 +176,7 @@ jobs: if (( RET & 4 )); then echo "ABI check returned STRIPPED PACKAGE" + exit 1 fi if (( RET & 8 )); then @@ -187,6 +192,7 @@ jobs: echo "ABI_CHECK_RETURN_VALUE=${RET}" >> $GITHUB_ENV - name: Package Version Increment Check + if: ${{ env.INITIAL_UPLOAD_TO_PPA == 'false' }} run: echo "Run package version check here with ret value ${{ env.ABI_CHECK_RETURN_VALUE }}" - name: Upload Debian Packages To PPA Server If First Build @@ -198,20 +204,20 @@ jobs: echo "Removing olf Packages/.gz files" - rm -f ${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages || true - rm -f ${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages.gz || true + rm -f ${{ env.PPA_PACKAGES_FILE_DIR }}/Packages || true + rm -f ${{ env.PPA_PACKAGES_FILE_DIR }}/Packages.gz || true echo "Updating the Packages/.gz files" - touch ${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages + touch ${{ env.PPA_PACKAGES_FILE_DIR }}/Packages for dir in ${{ env.PPA_HTTP_DIRECTORY }}/oss/*/; do echo "Updating packages for dir $dir" - dpkg-scanpackages "$dir" | sed 's|/var/www/html/github-actions-ppa/||' >> "${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages" - dpkg-scanpackages --type ddeb "$dir" | sed 's|/var/www/html/github-actions-ppa/||' >> "${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages" + dpkg-scanpackages "$dir" | sed 's|/var/www/html/github-actions-ppa/||' >> "${{ env.PPA_PACKAGES_FILE_DIR }}/Packages" + dpkg-scanpackages --type ddeb "$dir" | sed 's|/var/www/html/github-actions-ppa/||' >> "${{ env.PPA_PACKAGES_FILE_DIR }}/Packages" done - gzip -9 < "${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages" > "${{ env.PPA_HTTP_DIRECTORY }}/dists/${{ env.PRODUCT_CODENAME }}/stable/main/binary-${{ env.PRODUCT_ARCH }}/Packages.gz" + gzip -9 < "${{ env.PPA_PACKAGES_FILE_DIR }}/Packages" > "${{ env.PPA_PACKAGES_FILE_DIR }}/Packages.gz" echo "Updated the packages.gz files" From 440460517e8b74ded133c43a5d96148c699e3920 Mon Sep 17 00:00:00 2001 From: Simon Beaudoin Date: Thu, 21 Aug 2025 11:41:47 -0700 Subject: [PATCH 09/14] Separating the version checking into a separate script Signed-off-by: Simon Beaudoin --- ...build-debian-package-reusable-workflow.yml | 27 ++++-- ubuntu/deb_abi_checker.py | 84 +++++++++++-------- ubuntu/deb_version_checker.py | 10 +++ 3 files changed, 80 insertions(+), 41 deletions(-) create mode 100644 ubuntu/deb_version_checker.py diff --git a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml index 50cdebc..77f9a11 100644 --- a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml +++ b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml @@ -148,7 +148,8 @@ jobs: ./qcom-distro-ubuntu/qcom-build-utils/ubuntu/deb_abi_checker.py \ --new-package-dir ./WORKSPACE/debian_packages/temp/${{ github.event.repository.name }} \ - --apt-server-config "deb [arch=${{ env.PRODUCT_ARCH }} trusted=yes] http://localhost:${{ env.PPA_HTTP_SERVER_PORT }} ${{ env.PRODUCT_CODENAME }}/stable main" + --apt-server-config "deb [arch=${{ env.PRODUCT_ARCH }} trusted=yes] http://localhost:${{ env.PPA_HTTP_SERVER_PORT }} ${{ env.PRODUCT_CODENAME }}/stable main" \ + --result-file ./results.txt RET=$? set -e @@ -163,37 +164,47 @@ jobs: # Bit 4 (16): RETURN_PPA_ERROR if (( RET == 0 )); then - echo "ABI check returned NO_DIFF" + echo "✅ ABI check returned NO_DIFF" fi if (( RET & 1 )); then - echo "ABI check returned COMPATIBLE DIFF" + echo "⚠️ABI check returned COMPATIBLE DIFF" fi if (( RET & 2 )); then - echo "ABI check returned INCOMPATIBLE DIFF" + echo "⚠️ABI check returned INCOMPATIBLE DIFF" fi if (( RET & 4 )); then - echo "ABI check returned STRIPPED PACKAGE" + echo "❌ ABI check returned STRIPPED PACKAGE" exit 1 fi if (( RET & 8 )); then - echo "ABI check failed because the PPA did not contained an old version for the package." + echo "⚠️ABI check failed because the PPA did not contained an old version for the package." echo "Assumption is that this is the first time the package was build." echo "INITIAL_UPLOAD_TO_PPA=true" >> $GITHUB_ENV fi if (( RET & 16 )); then - echo "ABI check failed because there was an error on the PPA" + echo "❌ ABI check failed because there was an error on the PPA" fi echo "ABI_CHECK_RETURN_VALUE=${RET}" >> $GITHUB_ENV - name: Package Version Increment Check if: ${{ env.INITIAL_UPLOAD_TO_PPA == 'false' }} - run: echo "Run package version check here with ret value ${{ env.ABI_CHECK_RETURN_VALUE }}" + run: | + echo "Run package version check here with ret value ${{ env.ABI_CHECK_RETURN_VALUE }}" + echo "Content of result file :" + cat ./results.txt + + if grep -qE '^- Version.*FAIL' ./results.txt; then + echo "❌ Test failed: At least one FAIL found in - Version: line" + exit 1 + else + echo "✅ Test passed: All versions are PASS" + fi - name: Upload Debian Packages To PPA Server If First Build if: ${{ env.INITIAL_UPLOAD_TO_PPA == 'true' }} diff --git a/ubuntu/deb_abi_checker.py b/ubuntu/deb_abi_checker.py index 449f73c..3695a8b 100755 --- a/ubuntu/deb_abi_checker.py +++ b/ubuntu/deb_abi_checker.py @@ -65,16 +65,15 @@ def __init__(self, package_name): self.abi_pkg_diff_result = None self.abi_pkg_diff_remark = None + self.abi_pkg_diff_version_check = None self.abi_pkg_diff_output = None # package_name - result global_checker_results: dict[str, ABI_DIFF_Result] = {} +def produce_report(log_file=None): - -def print_results(log_file=None): - - log = "ABI Check results\n" + log = "ABI Check results\n\n" log += ("-" * 100 + "\n") @@ -93,6 +92,7 @@ def print_results(log_file=None): log += f" - Version: {result.old_deb_version}\n" log += f"ABI Package Diff:\n" log += f" - Result: {result.abi_pkg_diff_result}\n" + log += f" - Version: {result.abi_pkg_diff_version_check}\n" log += f" - Remark: {result.abi_pkg_diff_remark}\n" log += f" - Output: {"" if result.abi_pkg_diff_output is not None else result.abi_pkg_diff_output}\n" if result.abi_pkg_diff_output is not None: @@ -126,6 +126,11 @@ def parse_arguments(): parser.add_argument("--old-version", required=False, help="Specific version of the old package to compare against. (optional)") + + parser.add_argument("--result-file", + required=False, + help="Path for the result file") + args = parser.parse_args() return args @@ -148,7 +153,11 @@ def main(): None if not args.old_version else args.old_version, print_debug_tree=print_debug_tree) - print_results(None) + if args.result_file is not None: + if not os.path.isabs(args.result_file): + args.result_file = os.path.abspath(args.result_file) + + produce_report(args.result_file) sys.exit(ret) @@ -197,7 +206,7 @@ def multiple_repo_deb_abi_checker(package_dir, apt_server_config, keep_temp=True log_file = os.path.join(package_dir, "abi_checker.log") - print_results(log_file) + produce_report(log_file) return final_ret @@ -577,7 +586,7 @@ def single_package_abi_checker(repo_package_dir, logger.debug(f"[ABI_CHECKER]: Removing temporary directory {abi_check_temp_dir}") shutil.rmtree(abi_check_temp_dir) - analyze_abi_diff_result(old_version, new_version, abidiff_result) + result.abi_pkg_diff_version_check = analyze_abi_diff_result(old_version, new_version, abidiff_result) return return_value @@ -676,7 +685,7 @@ def version_bumped(old_version, new_version, index): else: return False -def analyze_abi_diff_result(old_version, new_version, abidiff_result): +def analyze_abi_diff_result(old_version, new_version, abidiff_result) -> str: import re # Keep the first part of the version, before the first '-' or '+' @@ -718,62 +727,71 @@ def analyze_abi_diff_result(old_version, new_version, abidiff_result): logger.error(f"[ABI_CHECKER]/[RESULT]: INCOMPATIBLE change detected") if major_bumped: - logger.debug(f"[ABI_CHECKER]/[RESULT]: OK : major version bumbed") + result = "PASS : Major version increased" + logger.debug(f"[ABI_CHECKER]/[RESULT]: {result}") logger.debug("[ABI_CHECKER]/[RESULT]: Increasing the major version for an incompatible ABI is what is required") - result_pass = True + elif minor_bumped: - logger.debug(f"[ABI_CHECKER]/[RESULT]: NOT-OK : minor version bumbed") + result = "FAIL : Minor version increased, needed major increase" + logger.debug(f"[ABI_CHECKER]/[RESULT]: {result}") logger.debug(f"[ABI_CHECKER]/[RESULT]: Increasing only the minor version for an incompatible ABI change is not enough") - result_pass = False + elif patch_bumped: - logger.debug(f"[ABI_CHECKER]/[RESULT]: NOT-OK : patch version bumbed") + result = "FAIL : Patch version increased, needed major increase" + logger.debug(f"[ABI_CHECKER]/[RESULT]: {result}") logger.debug(f"[ABI_CHECKER]/[RESULT]: Increasing only the patch version for an incompatible ABI change is not enough") - result_pass = False + else: - logger.debug(f"[ABI_CHECKER]/[RESULT]: NOT-OK : no version bumped") + result = "FAIL : No version increase" + logger.debug(f"[ABI_CHECKER]/[RESULT]: {result}") logger.debug(f"[ABI_CHECKER]/[RESULT]: Increasing the version number is required for an ABI change") - result_pass = False elif abi_change: # Compatible change logger.warning(f"[ABI_CHECKER]/[RESULT]: COMPATIBLE change detected") if major_bumped: - logger.debug(f"[ABI_CHECKER]/[RESULT]: OK : major version bumbed") + result = "PASS : Major version increased" + logger.debug(f"[ABI_CHECKER]/[RESULT]: {result}") logger.warning(f"[ABI_CHECKER]/[RESULT]: Increasing the major version for a compatible ABI change was probably overkill, but at least it respects version increase") - result_pass = True + elif minor_bumped: - logger.debug(f"[ABI_CHECKER]/[RESULT]: OK : minor version bumbed") + result = "PASS : Minor version increased" + logger.debug(f"[ABI_CHECKER]/[RESULT]: {result}") logger.debug(f"[ABI_CHECKER]/[RESULT]: Increasing the minor version for a compatible ABI change is what is required") - result_pass = True + elif patch_bumped: - logger.debug(f"[ABI_CHECKER]/[RESULT]: NOT-OK : patch version bumbed") + result = "FAIL : Patch version increased, needed minor increase" + logger.debug(f"[ABI_CHECKER]/[RESULT]: {result}") logger.debug(f"[ABI_CHECKER]/[RESULT]: Increasing only the patch number while there is an ABI change, albeit compatible, is not enough") - result_pass = False + else: - logger.debug(f"[ABI_CHECKER]/[RESULT]: NOT-OK : no version bumbed") + result = "FAIL : No version increase" + logger.debug(f"[ABI_CHECKER]/[RESULT]: {result}") logger.debug(f"[ABI_CHECKER]/[RESULT]: Increasing at least the minor version number is required for a compatible ABI change") - result_pass = False else: # No change logger.info(f"[ABI_CHECKER]/[RESULT]: No ABI change detected") if major_bumped: - logger.debug(f"[ABI_CHECKER]/[RESULT]: OK : major version bumbed") + result = "PASS : Major version increased" + logger.debug(f"[ABI_CHECKER]/[RESULT]: {result}") logger.warning("[ABI_CHECKER]/[RESULT]: Increasing the major version when there is no ABI change is probably overkill, but at least it respects version increase") - result_pass = True + elif minor_bumped: - logger.debug(f"[ABI_CHECKER]/[RESULT]: OK : minor version bumbed") + result = "PASS : Minor version increased" + logger.debug(f"[ABI_CHECKER]/[RESULT]: {result}") logger.warning(f"[ABI_CHECKER]/[RESULT]: Increasing the minor version for a compatible ABI change is probably overkill, but at least it respects version increase") - result_pass = True + elif patch_bumped: - logger.debug(f"[ABI_CHECKER]/[RESULT]: OK : patch version bumbed") + result = "PASS : Patch version increased" + logger.debug(f"[ABI_CHECKER]/[RESULT]: {result}") logger.debug(f"[ABI_CHECKER]/[RESULT]: Increasing only the patch number while there is no ABI change seems reasonable") - result_pass = True + else: - logger.debug(f"[ABI_CHECKER]/[RESULT]: OK : no version bump") - result_pass = True + result = "PASS : No version increase" + logger.debug(f"[ABI_CHECKER]/[RESULT]: {result}") - return result_pass + return result if __name__ == "__main__": main() diff --git a/ubuntu/deb_version_checker.py b/ubuntu/deb_version_checker.py new file mode 100644 index 0000000..dfbeab1 --- /dev/null +++ b/ubuntu/deb_version_checker.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python3 + +# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries. +# +# SPDX-License-Identifier: BSD-3-Clause-Clear + +""" +deb_version_checker.py: Package Version Comparison Tool + +""" \ No newline at end of file From 31283872c0d9d812f349925c37d0246e7390c5cc Mon Sep 17 00:00:00 2001 From: Simon Beaudoin Date: Thu, 21 Aug 2025 14:23:27 -0700 Subject: [PATCH 10/14] Git checkout head_ref Have the qcom-build-utils be cloned at the tip of the given ref during CI run to avoid having to propagate updates back up all the time. Signed-off-by: Simon Beaudoin --- .../workflows/qcom-build-debian-package-reusable-workflow.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml index 77f9a11..25ad816 100644 --- a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml +++ b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml @@ -49,6 +49,7 @@ jobs: - name: Checkout Repository And Submodules Recursively uses: actions/checkout@v4 # Using public GitHub action to checkout repo, see https://github.com/actions/checkout with: + ref: ${{ github.head_ref }} clean: true # Make sure the workspace is cleaned up from previous runs submodules: 'recursive' # Make sure all submodules are recursively checked out ssh-key: ${{ secrets.ACTIONS_SSH_KEY }} # Add SSH key for cloning private repos From 689888f88bf1acd99ae7a0b1e8130bcc7973d020 Mon Sep 17 00:00:00 2001 From: Simon Beaudoin Date: Thu, 21 Aug 2025 15:44:33 -0700 Subject: [PATCH 11/14] deb_abi_checker.py : Change the incompatible criteria Now, when a function signature is changed, mark this change as incompatible, as it SHOULD be (what is wrong with the tool author...) Signed-off-by: Simon Beaudoin --- .../qcom-build-debian-package-reusable-workflow.yml | 3 ++- ubuntu/deb_abi_checker.py | 13 +++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml index 25ad816..226caa1 100644 --- a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml +++ b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml @@ -200,13 +200,14 @@ jobs: echo "Content of result file :" cat ./results.txt - if grep -qE '^- Version.*FAIL' ./results.txt; then + if grep -qE '^\s*-\s*Version:\s*.*FAIL' ./results.txt; then echo "❌ Test failed: At least one FAIL found in - Version: line" exit 1 else echo "✅ Test passed: All versions are PASS" fi + - name: Upload Debian Packages To PPA Server If First Build if: ${{ env.INITIAL_UPLOAD_TO_PPA == 'true' }} run: | diff --git a/ubuntu/deb_abi_checker.py b/ubuntu/deb_abi_checker.py index 3695a8b..ec09f8f 100755 --- a/ubuntu/deb_abi_checker.py +++ b/ubuntu/deb_abi_checker.py @@ -543,7 +543,18 @@ def single_package_abi_checker(repo_package_dir, if bit3: result.abi_pkg_diff_result = "COMPATIBLE-DIFF" logger.warning(f"[ABI_CHECKER]: abipkgdiff detected ABI changes") + return_value = RETURN_ABI_COMPATIBLE_DIFF + + match = re.search(r"Functions changes summary:\s+(\d+)\s+Removed,\s+(\d+)\s+Changed,", result.abi_pkg_diff_output) + if match: + changed_count = int(match.group(2)) + if changed_count > 0: + abidiff_result |= 0b1000 + return_value = RETURN_ABI_INCOMPATIBLE_DIFF + result.abi_pkg_diff_result = "INCOMPATIBLE-DIFF" + logger.warning(f"[ABI_CHECKER]: Overriding to INCOMPATIBLE CHANGE since there are changed functions") + if bit4: # if bit 4 is set, bit 3 must be too, so this fallthrough is ok result.abi_pkg_diff_result = "INCOMPATIBLE-DIFF" @@ -557,6 +568,8 @@ def single_package_abi_checker(repo_package_dir, with open(file_path, 'r') as file: logger.debug(f"Content of {filename}:") logger.warning(file.read()) + + else: result.abi_pkg_diff_result = "NO-DIFF" logger.info(f"[ABI_CHECKER]/{package_name}: abipkgdiff did not find any differences between old and new packages") From 71e0a875ffe174b1cf5eeeea3dab0940908f153b Mon Sep 17 00:00:00 2001 From: Simon Beaudoin Date: Thu, 21 Aug 2025 20:04:56 -0700 Subject: [PATCH 12/14] Clone tip of ref of qcom-build-utils Signed-off-by: Simon Beaudoin --- ...-build-debian-package-reusable-workflow.yml | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml index 226caa1..b0c49e6 100644 --- a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml +++ b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml @@ -2,6 +2,10 @@ name: Qualcomm Build Debian Package Reusable Workflow on: workflow_call: inputs: + ref: + description: The ref name thad was used to invoke this reusable workflow + type: string + required: true use_local_ppa: description: 'Whether to use a local PPA for testing' type: boolean @@ -26,7 +30,7 @@ env: PPA_HTTP_SERVER_SCREEN_NAME: github-actions-ppa-http-server # This variable is set to true below if the ABI check is not able to find an initial - # version of the package in the PPA + # version of the package in the PPA. INITIAL_UPLOAD_TO_PPA: 'false' ABI_CHECK_RETURN_VALUE: 0 @@ -46,6 +50,17 @@ jobs: - name: Ensure Workspace Is Clean run: rm -rf * # Ensure workspace is clean from previous runs + # This checkout is done so that any change in qcom-build-utils doesnt't need to be propagated up + # Simply relaunching a job that uses this reusable workflow for a given ref will automatically see the changes + - name: Checkout Tip of qcom-build-utils For The Specified Ref + uses: actions/checkout@v4 + with: + repository: qualcomm-linux/qcom-build-utils + ref: ${{ inputs.ref }} + ssh-key: ${{ secrets.ACTIONS_SSH_KEY }} + path: qcom-build-utils + fetch-depth: 1 + - name: Checkout Repository And Submodules Recursively uses: actions/checkout@v4 # Using public GitHub action to checkout repo, see https://github.com/actions/checkout with: @@ -189,6 +204,7 @@ jobs: if (( RET & 16 )); then echo "❌ ABI check failed because there was an error on the PPA" + exit 1 fi echo "ABI_CHECK_RETURN_VALUE=${RET}" >> $GITHUB_ENV From ecc03befbc392dfa9f4b3e9b7ac0ba657907c591 Mon Sep 17 00:00:00 2001 From: Simon Beaudoin Date: Thu, 21 Aug 2025 20:41:29 -0700 Subject: [PATCH 13/14] Rename directory ubuntu to scripts Signed-off-by: Simon Beaudoin --- .../qcom-build-debian-package-reusable-workflow.yml | 10 +++++----- {ubuntu => scripts}/.gitignore | 0 {ubuntu => scripts}/README.md | 0 {ubuntu => scripts}/__init__.py | 0 {ubuntu => scripts}/add-changelog | 0 {ubuntu => scripts}/apt_server.py | 0 {ubuntu => scripts}/build.py | 0 {ubuntu => scripts}/build_deb.py | 0 {ubuntu => scripts}/build_dtb.py | 0 {ubuntu => scripts}/build_kernel.py | 0 {ubuntu => scripts}/color_logger.py | 0 {ubuntu => scripts}/constants.py | 0 {ubuntu => scripts}/deb_abi_checker.py | 0 {ubuntu => scripts}/deb_organize.py | 0 {ubuntu => scripts}/deb_version_checker.py | 0 {ubuntu => scripts}/flat_meta.py | 0 .../generate_project_info_from_manifest.py | 0 {ubuntu => scripts}/helpers.py | 0 {ubuntu => scripts}/pack_deb.py | 0 {ubuntu => scripts}/prep_chroot_env.py | 0 {ubuntu => scripts}/qubuild | 0 {ubuntu => scripts}/read_dsc.py | 0 {ubuntu => scripts}/requirements.txt | 0 {ubuntu => scripts}/setup-environment | 0 24 files changed, 5 insertions(+), 5 deletions(-) rename {ubuntu => scripts}/.gitignore (100%) rename {ubuntu => scripts}/README.md (100%) rename {ubuntu => scripts}/__init__.py (100%) rename {ubuntu => scripts}/add-changelog (100%) rename {ubuntu => scripts}/apt_server.py (100%) rename {ubuntu => scripts}/build.py (100%) rename {ubuntu => scripts}/build_deb.py (100%) rename {ubuntu => scripts}/build_dtb.py (100%) rename {ubuntu => scripts}/build_kernel.py (100%) rename {ubuntu => scripts}/color_logger.py (100%) rename {ubuntu => scripts}/constants.py (100%) rename {ubuntu => scripts}/deb_abi_checker.py (100%) rename {ubuntu => scripts}/deb_organize.py (100%) rename {ubuntu => scripts}/deb_version_checker.py (100%) rename {ubuntu => scripts}/flat_meta.py (100%) rename {ubuntu => scripts}/generate_project_info_from_manifest.py (100%) rename {ubuntu => scripts}/helpers.py (100%) rename {ubuntu => scripts}/pack_deb.py (100%) rename {ubuntu => scripts}/prep_chroot_env.py (100%) rename {ubuntu => scripts}/qubuild (100%) rename {ubuntu => scripts}/read_dsc.py (100%) rename {ubuntu => scripts}/requirements.txt (100%) rename {ubuntu => scripts}/setup-environment (100%) diff --git a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml index b0c49e6..09d7cef 100644 --- a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml +++ b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml @@ -58,14 +58,14 @@ jobs: repository: qualcomm-linux/qcom-build-utils ref: ${{ inputs.ref }} ssh-key: ${{ secrets.ACTIONS_SSH_KEY }} - path: qcom-build-utils + path: ./qcom-build-utils fetch-depth: 1 - name: Checkout Repository And Submodules Recursively uses: actions/checkout@v4 # Using public GitHub action to checkout repo, see https://github.com/actions/checkout with: ref: ${{ github.head_ref }} - clean: true # Make sure the workspace is cleaned up from previous runs + clean: false # A rm -rf * was done first, don't clean otherwise this would delete qcom-build-utils cloned above submodules: 'recursive' # Make sure all submodules are recursively checked out ssh-key: ${{ secrets.ACTIONS_SSH_KEY }} # Add SSH key for cloning private repos fetch-depth: 1 # Speedup things since full history isn't needed @@ -147,14 +147,14 @@ jobs: - name: Validate Or Create Chroot Environment run: | - ./qcom-distro-ubuntu/qcom-build-utils/ubuntu/prep_chroot_env.py \ + ./qcom-build-utils/scripts/prep_chroot_env.py \ --arch ${{ env.PRODUCT_ARCH }} \ --os-codename ${{ env.PRODUCT_CODENAME }} \ --suffix ${{ env.PRODUCT_DISTRO }} - name: Build Debian Packages run: | - ./qcom-distro-ubuntu/qcom-build-utils/ubuntu/build.py \ + ./qcom-build-utils/scripts/build.py \ --workspace ./WORKSPACE \ --gen-debians @@ -162,7 +162,7 @@ jobs: run: | set +e - ./qcom-distro-ubuntu/qcom-build-utils/ubuntu/deb_abi_checker.py \ + ./qcom-build-utils/scripts/deb_abi_checker.py \ --new-package-dir ./WORKSPACE/debian_packages/temp/${{ github.event.repository.name }} \ --apt-server-config "deb [arch=${{ env.PRODUCT_ARCH }} trusted=yes] http://localhost:${{ env.PPA_HTTP_SERVER_PORT }} ${{ env.PRODUCT_CODENAME }}/stable main" \ --result-file ./results.txt diff --git a/ubuntu/.gitignore b/scripts/.gitignore similarity index 100% rename from ubuntu/.gitignore rename to scripts/.gitignore diff --git a/ubuntu/README.md b/scripts/README.md similarity index 100% rename from ubuntu/README.md rename to scripts/README.md diff --git a/ubuntu/__init__.py b/scripts/__init__.py similarity index 100% rename from ubuntu/__init__.py rename to scripts/__init__.py diff --git a/ubuntu/add-changelog b/scripts/add-changelog similarity index 100% rename from ubuntu/add-changelog rename to scripts/add-changelog diff --git a/ubuntu/apt_server.py b/scripts/apt_server.py similarity index 100% rename from ubuntu/apt_server.py rename to scripts/apt_server.py diff --git a/ubuntu/build.py b/scripts/build.py similarity index 100% rename from ubuntu/build.py rename to scripts/build.py diff --git a/ubuntu/build_deb.py b/scripts/build_deb.py similarity index 100% rename from ubuntu/build_deb.py rename to scripts/build_deb.py diff --git a/ubuntu/build_dtb.py b/scripts/build_dtb.py similarity index 100% rename from ubuntu/build_dtb.py rename to scripts/build_dtb.py diff --git a/ubuntu/build_kernel.py b/scripts/build_kernel.py similarity index 100% rename from ubuntu/build_kernel.py rename to scripts/build_kernel.py diff --git a/ubuntu/color_logger.py b/scripts/color_logger.py similarity index 100% rename from ubuntu/color_logger.py rename to scripts/color_logger.py diff --git a/ubuntu/constants.py b/scripts/constants.py similarity index 100% rename from ubuntu/constants.py rename to scripts/constants.py diff --git a/ubuntu/deb_abi_checker.py b/scripts/deb_abi_checker.py similarity index 100% rename from ubuntu/deb_abi_checker.py rename to scripts/deb_abi_checker.py diff --git a/ubuntu/deb_organize.py b/scripts/deb_organize.py similarity index 100% rename from ubuntu/deb_organize.py rename to scripts/deb_organize.py diff --git a/ubuntu/deb_version_checker.py b/scripts/deb_version_checker.py similarity index 100% rename from ubuntu/deb_version_checker.py rename to scripts/deb_version_checker.py diff --git a/ubuntu/flat_meta.py b/scripts/flat_meta.py similarity index 100% rename from ubuntu/flat_meta.py rename to scripts/flat_meta.py diff --git a/ubuntu/generate_project_info_from_manifest.py b/scripts/generate_project_info_from_manifest.py similarity index 100% rename from ubuntu/generate_project_info_from_manifest.py rename to scripts/generate_project_info_from_manifest.py diff --git a/ubuntu/helpers.py b/scripts/helpers.py similarity index 100% rename from ubuntu/helpers.py rename to scripts/helpers.py diff --git a/ubuntu/pack_deb.py b/scripts/pack_deb.py similarity index 100% rename from ubuntu/pack_deb.py rename to scripts/pack_deb.py diff --git a/ubuntu/prep_chroot_env.py b/scripts/prep_chroot_env.py similarity index 100% rename from ubuntu/prep_chroot_env.py rename to scripts/prep_chroot_env.py diff --git a/ubuntu/qubuild b/scripts/qubuild similarity index 100% rename from ubuntu/qubuild rename to scripts/qubuild diff --git a/ubuntu/read_dsc.py b/scripts/read_dsc.py similarity index 100% rename from ubuntu/read_dsc.py rename to scripts/read_dsc.py diff --git a/ubuntu/requirements.txt b/scripts/requirements.txt similarity index 100% rename from ubuntu/requirements.txt rename to scripts/requirements.txt diff --git a/ubuntu/setup-environment b/scripts/setup-environment similarity index 100% rename from ubuntu/setup-environment rename to scripts/setup-environment From b68a67b38fd46ddbf4e4312dfbb8d9a9c84eb1d9 Mon Sep 17 00:00:00 2001 From: Simon Beaudoin Date: Thu, 21 Aug 2025 21:46:13 -0700 Subject: [PATCH 14/14] Test see if github repo works Signed-off-by: Simon Beaudoin --- ...build-debian-package-reusable-workflow.yml | 100 ++++++------------ 1 file changed, 33 insertions(+), 67 deletions(-) diff --git a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml index 09d7cef..086d6bc 100644 --- a/.github/workflows/qcom-build-debian-package-reusable-workflow.yml +++ b/.github/workflows/qcom-build-debian-package-reusable-workflow.yml @@ -25,9 +25,7 @@ permissions: security-events: write env: - PPA_HTTP_DIRECTORY: /var/www/html/github-actions-ppa - PPA_HTTP_SERVER_PORT: 12345 - PPA_HTTP_SERVER_SCREEN_NAME: github-actions-ppa-http-server + PPA_URL: https://qualcomm-linux.github.io/qcom-oss-staging-ppa/ # This variable is set to true below if the ABI check is not able to find an initial # version of the package in the PPA. @@ -38,7 +36,7 @@ env: PRODUCT_CODENAME: null PRODUCT_ARCH: null - PPA_PACKAGES_FILE_DIR: null + PPA_PACKAGES_FILE_REPO_PATH: null jobs: build-debian-package-reusable-workflow: @@ -86,57 +84,13 @@ jobs: echo "PRODUCT_CODENAME=${CODENAME}" >> $GITHUB_ENV echo "PRODUCT_ARCH=${ARCH}" >> $GITHUB_ENV - echo "PPA_PACKAGES_FILE_DIR=${{env.PPA_HTTP_DIRECTORY}}/dists/$CODENAME/stable/main/binary-$ARCH" >> $GITHUB_ENV + echo "PPA_PACKAGES_FILE_REPO_PATH=dists/$CODENAME/stable/main/binary-$ARCH" >> $GITHUB_ENV - - name: Clear PPA If Requested - if: ${{ inputs.clear_ppa == true }} - run: | - rm -rf ${{ env.PPA_HTTP_DIRECTORY }}/* - echo "Directory ${{ env.PPA_HTTP_DIRECTORY }} removed" - - - name: Verify Local Test HTTP PPA Server Is Running - run : | - echo "Check if PPA serving folder exists ${{ env.PPA_HTTP_DIRECTORY }}" - if [ -d "${{ env.PPA_HTTP_DIRECTORY }}" ]; then - echo "Directory exists" - else - echo "Directory does not exist" - mkdir ${{ env.PPA_HTTP_DIRECTORY }} - fi - - echo "Check if PPA html server is running" - - if screen -list | grep -q "${{ env.PPA_HTTP_SERVER_SCREEN_NAME }}"; then - echo "Screen session ${{ env.PPA_HTTP_SERVER_SCREEN_NAME }} already exists. Skipping server start." - else - screen -dmS ${{ env.PPA_HTTP_SERVER_SCREEN_NAME }} bash -c 'python3 -m http.server ${{ env.PPA_HTTP_SERVER_PORT }} --directory ${{ env.PPA_HTTP_DIRECTORY }}' - echo "Started HTTP server in screen session ${{ env.PPA_HTTP_SERVER_SCREEN_NAME }}" - fi - - echo "Check if PPA server works" - if curl --silent --output /dev/null --fail http://localhost:${{ env.PPA_HTTP_SERVER_PORT }}/; then - echo "HTTP server works" - else - echo "Error: HTTP server is not responding. Exiting" - exit 1 - fi - - echo "Check if PPA server is populated" - if [ -f "${{ env.PPA_PACKAGES_FILE_DIR }}/Packages.gz" ]; then - echo "PPA server is populated, listing the content of Packages:" - cat ${{ env.PPA_PACKAGES_FILE_DIR }}/Packages - else - echo "Error: PPA server is not populated. Populating" - mkdir -p ${{ env.PPA_PACKAGES_FILE_DIR }} - dpkg-scanpackages ${{ env.PPA_HTTP_DIRECTORY }} > ${{ env.PPA_PACKAGES_FILE_DIR }}/Packages - gzip -9 < ${{ env.PPA_PACKAGES_FILE_DIR }}/Packages > ${{ env.PPA_PACKAGES_FILE_DIR }}/Packages.gz - fi - - - name: Prepare Workspace Structure For The Build # The build.py script expects a specific directory structure # This step creates the required directories and links the example package sources to the expected location # The build_deb script will go through the WORKSPACE/sources directory and find every "debian" folder and exctract the package names # from the Control file and build a list of available packages to build. + - name: Prepare Workspace Structure For The Build run: | echo "Listing the content of what was checked out, exclusing .md files : "; tree -I '*.md|LICENSE.txt' mkdir WORKSPACE @@ -164,7 +118,7 @@ jobs: ./qcom-build-utils/scripts/deb_abi_checker.py \ --new-package-dir ./WORKSPACE/debian_packages/temp/${{ github.event.repository.name }} \ - --apt-server-config "deb [arch=${{ env.PRODUCT_ARCH }} trusted=yes] http://localhost:${{ env.PPA_HTTP_SERVER_PORT }} ${{ env.PRODUCT_CODENAME }}/stable main" \ + --apt-server-config "deb [arch=${{ env.PRODUCT_ARCH }} trusted=yes] ${{ env.PPA_URL }} ${{ env.PRODUCT_CODENAME }}/stable main" \ --result-file ./results.txt RET=$? @@ -223,31 +177,43 @@ jobs: echo "✅ Test passed: All versions are PASS" fi + - name: Checkout PPA staging repo + if: ${{ env.INITIAL_UPLOAD_TO_PPA == 'true' }} + uses: actions/checkout@v4 + with: + repository: qualcomm-linux/qcom-oss-staging-ppa + ref: main + ssh-key: ${{ secrets.ACTIONS_SSH_KEY }} + path: ./qcom-oss-staging-ppa + fetch-depth: 1 - name: Upload Debian Packages To PPA Server If First Build if: ${{ env.INITIAL_UPLOAD_TO_PPA == 'true' }} run: | - echo "Uploading Debian Package To PPA Server..." - cp -r ./WORKSPACE/debian_packages/oss/ ${{ env.PPA_HTTP_DIRECTORY }} - echo "Copied, here is the new tree of the PPA :"; tree ${{ env.PPA_HTTP_DIRECTORY }} + echo "Uploading Debian Package To PPA Server : ${{ env.PPA_URL }}" + + for dir in ./WORKSPACE/debian_packages/oss/*/; do + PACKAGE=$(basename $dir) + echo "Creating directory for package $PACKAGE" in the PPA + mkdir -p ./qcom-oss-staging-ppa/pool/${{env.PRODUCT_CODENAME}}/stable/main/$PACKAGE - echo "Removing olf Packages/.gz files" + echo "Packages to copy for $PACKAGE: "; ls ./WORKSPACE/debian_packages/oss/$PACKAGE - rm -f ${{ env.PPA_PACKAGES_FILE_DIR }}/Packages || true - rm -f ${{ env.PPA_PACKAGES_FILE_DIR }}/Packages.gz || true + cp ./WORKSPACE/debian_packages/oss/$PACKAGE/* ./qcom-oss-staging-ppa/pool/${{env.PRODUCT_CODENAME}}/stable/main/$PACKAGE + done - echo "Updating the Packages/.gz files" + cd ./qcom-oss-staging-ppa - touch ${{ env.PPA_PACKAGES_FILE_DIR }}/Packages + dpkg-scanpackages --multiversion pool/${{ env.PRODUCT_CODENAME }} > ${{ env.PPA_PACKAGES_FILE_REPO_PATH }}/Packages + dpkg-scanpackages --type ddeb --multiversion pool/${{ env.PRODUCT_CODENAME }} >> ${{ env.PPA_PACKAGES_FILE_REPO_PATH }}/Packages - for dir in ${{ env.PPA_HTTP_DIRECTORY }}/oss/*/; do - echo "Updating packages for dir $dir" - dpkg-scanpackages "$dir" | sed 's|/var/www/html/github-actions-ppa/||' >> "${{ env.PPA_PACKAGES_FILE_DIR }}/Packages" - dpkg-scanpackages --type ddeb "$dir" | sed 's|/var/www/html/github-actions-ppa/||' >> "${{ env.PPA_PACKAGES_FILE_DIR }}/Packages" - done + gzip -k -f ${{ env.PPA_PACKAGES_FILE_REPO_PATH }}/Packages - gzip -9 < "${{ env.PPA_PACKAGES_FILE_DIR }}/Packages" > "${{ env.PPA_PACKAGES_FILE_DIR }}/Packages.gz" + cat ${{ env.PPA_PACKAGES_FILE_REPO_PATH }}/Packages - echo "Updated the packages.gz files" + git add . + + #TODO craft decent message + git commit -s -m "Uploaded Packages" - echo "Updated the Packages files for suite: ${{ env.PRODUCT_CODENAME }}" \ No newline at end of file + git push \ No newline at end of file