diff --git a/.github/actions/abi_checker/action.yml b/.github/actions/abi_checker/action.yml index 91935f3..00fba7c 100644 --- a/.github/actions/abi_checker/action.yml +++ b/.github/actions/abi_checker/action.yml @@ -18,7 +18,7 @@ runs: set +e ./qcom-build-utils/scripts/ppa_interface.py \ --operation list-versions \ - --apt-config "deb [arch=${{env.ARCH}} trusted=yes] ${{env.REPO_URL}} ${{env.UBUNTU_CODENAME}}/stable main" \ + --apt-config "deb [arch=arm64 trusted=yes] ${{env.REPO_URL}} ${{env.UBUNTU_DISTRIBUTION}}/stable main" \ --package-name ${{env.BUILT_PACKAGE_NAME}} RET=$? @@ -32,7 +32,7 @@ runs: set +e ./qcom-build-utils/scripts/ppa_interface.py \ --operation download \ - --apt-config "deb [arch=${{env.ARCH}} trusted=yes] ${{env.REPO_URL}} ${{env.UBUNTU_CODENAME}}/stable main" \ + --apt-config "deb [arch=arm64 trusted=yes] ${{env.REPO_URL}} ${{env.UBUNTU_DISTRIBUTION}}/stable main" \ --package-name ${{env.BUILT_PACKAGE_NAME}} RET=$? @@ -51,7 +51,7 @@ runs: ./qcom-build-utils/scripts/deb_abi_checker.py \ --new-package-dir ./build-area \ - --apt-server-config "deb [arch=${{env.ARCH}} trusted=yes] ${{env.REPO_URL}} ${{env.UBUNTU_CODENAME}}/stable main" \ + --apt-server-config "deb [arch=arm64 trusted=yes] ${{env.REPO_URL}} ${{env.UBUNTU_DISTRIBUTION}}/stable main" \ --result-file ./results.txt RET=$? diff --git a/.github/actions/build_package/action.yml b/.github/actions/build_package/action.yml index 08c5230..6142c0e 100644 --- a/.github/actions/build_package/action.yml +++ b/.github/actions/build_package/action.yml @@ -18,13 +18,20 @@ runs: steps: - - name: Validate Or Create Chroot Environment + # Normalize the name of the architecture + # Build Architecture: The architecture of the machine performing the build (arm64 when native or amd64 when cross compiling). + # This depends on the runner executing the build + - name: Set Builder Arch variable shell: bash run: | - ./qcom-build-utils/scripts/prep_chroot_env.py \ - --arch ${{env.ARCH}} \ - --os-codename ${{env.UBUNTU_CODENAME}} \ - --suffix ${{env.DISTRO}} + if [ "${{ runner.arch }}" = "X64" ]; then + echo "BUILD_ARCH=amd64" >> $GITHUB_ENV + elif [ "${{ runner.arch }}" = "ARM64" ]; then + echo "BUILD_ARCH=arm64" >> $GITHUB_ENV + else + echo "Unsupported architecture: ${{ runner.arch }}" + exit 1 + fi - name: Prepare Workspace Structure For The Build shell: bash @@ -53,8 +60,8 @@ runs: lintian_flag="--no-run-lintian" fi - if curl -sfI "http://pkg.qualcomm.com/dists/${{env.UBUNTU_CODENAME}}/Release" > /dev/null; then - EXTRA_REPO="--extra-repository='deb [arch=${{env.ARCH}} trusted=yes] http://pkg.qualcomm.com ${{env.UBUNTU_CODENAME}}/stable main'" + if curl -sfI "http://pkg.qualcomm.com/dists/${{env.UBUNTU_DISTRIBUTION}}/Release" > /dev/null; then + EXTRA_REPO="--extra-repository='deb [arch=arm64 trusted=yes] http://pkg.qualcomm.com ${{env.UBUNTU_DISTRIBUTION}}/stable main'" else EXTRA_REPO="" fi @@ -62,10 +69,14 @@ runs: set +e # ℹ️ --git-ignore-branch is necessary because the debian branch actually checked out can be any (ex, debian/1.0.0) because we can build any previous tag + # ℹ️ chroot mode unshare is important to bypass privilege issues with the mounting + # Host Architecture: The architecture for which the binaries are being built (invariably arm64). + gbp buildpackage \ --git-ignore-branch \ - --git-builder="sbuild --arch=${{env.ARCH}} \ - --dist=${{env.UBUNTU_CODENAME}}-${{env.ARCH}}-${{env.DISTRO}} \ + --git-builder="sbuild --host=arm64 \ + --build=${{env.BUILD_ARCH}} \ + --dist=${{env.UBUNTU_DISTRUBUTION}} \ $lintian_flag \ --build-dir ../${{inputs.build-dir}} \ --build-dep-resolver=apt \ diff --git a/.github/actions/push_to_repo/action.yml b/.github/actions/push_to_repo/action.yml index 3281536..95fa3ce 100644 --- a/.github/actions/push_to_repo/action.yml +++ b/.github/actions/push_to_repo/action.yml @@ -40,7 +40,7 @@ runs: set +e ./qcom-build-utils/scripts/ppa_interface.py \ --operation list-versions \ - --apt-config "deb [arch=${{env.ARCH}} trusted=yes] ${{env.REPO_URL}} ${{env.UBUNTU_CODENAME}}/stable main" \ + --apt-config "deb [arch=arm64 trusted=yes] ${{env.REPO_URL}} ${{env.UBUNTU_DISTRIBUTION}}/stable main" \ --package-name ${{env.BUILT_PACKAGE_NAME}} RET=$? @@ -55,7 +55,7 @@ runs: set +e ./qcom-build-utils/scripts/ppa_interface.py \ --operation contains-version \ - --apt-config "deb [arch=${{env.ARCH}} trusted=yes] ${{env.REPO_URL}} ${{env.UBUNTU_CODENAME}}/stable main" \ + --apt-config "deb [arch=arm64 trusted=yes] ${{env.REPO_URL}} ${{env.UBUNTU_DISTRIBUTION}}/stable main" \ --package-name ${{env.BUILT_PACKAGE_NAME}} \ --version ${{env.BUILT_PACKAGE_VERSION}} @@ -88,14 +88,14 @@ runs: if: steps.check-version.outputs.do_upload == 'true' shell: bash run: | - ./qcom-build-utils/scripts/ppa_organizer.py --build-dir ./build-area --output-dir ./pkg-oss-staging-repo/pool/${{env.UBUNTU_CODENAME}}/stable/main + ./qcom-build-utils/scripts/ppa_organizer.py --build-dir ./build-area --output-dir ./pkg-oss-staging-repo/pool/${{env.UBUNTU_DISTRIBUTION}}/stable/main cd ./pkg-oss-staging-repo - PPA_PACKAGES_FILE_REPO_PATH=dists/${{env.UBUNTU_CODENAME}}/stable/main/binary-${{env.ARCH}} + PPA_PACKAGES_FILE_REPO_PATH=dists/${{env.UBUNTU_DISTRIBUTION}}/stable/main/binary-arm64 - dpkg-scanpackages --multiversion pool/${{env.UBUNTU_CODENAME}} > $PPA_PACKAGES_FILE_REPO_PATH/Packages - dpkg-scanpackages --type ddeb --multiversion pool/${{env.UBUNTU_CODENAME}} >> $PPA_PACKAGES_FILE_REPO_PATH/Packages + dpkg-scanpackages --multiversion pool/${{env.UBUNTU_DISTRIBUTION}} > $PPA_PACKAGES_FILE_REPO_PATH/Packages + dpkg-scanpackages --type ddeb --multiversion pool/${{env.UBUNTU_DISTRIBUTION}} >> $PPA_PACKAGES_FILE_REPO_PATH/Packages gzip -k -f $PPA_PACKAGES_FILE_REPO_PATH/Packages @@ -103,10 +103,10 @@ runs: git add . - git config user.name "Github Service Bot" + git config user.name "GitHub Service Bot" git config user.email "githubservice@qti.qualcomm.com" - git commit -s -m "Uploaded Package ${{env.BUILT_PACKAGE_NAME}} at version ${{env.BUILT_PACKAGE_VERSION}} for distro ${{env.UBUNTU_CODENAME}}" + git commit -s -m "Uploaded Package ${{env.BUILT_PACKAGE_NAME}} at version ${{env.BUILT_PACKAGE_VERSION}} for distro ${{env.UBUNTU_DISTRIBUTION}}" git remote set-url origin https://x-access-token:${{inputs.token}}@github.com/${{env.REPO_NAME}}.git diff --git a/.github/workflows/qcom-build-pkg-reusable-workflow.yml b/.github/workflows/qcom-build-pkg-reusable-workflow.yml index 48d8960..9967be7 100644 --- a/.github/workflows/qcom-build-pkg-reusable-workflow.yml +++ b/.github/workflows/qcom-build-pkg-reusable-workflow.yml @@ -18,8 +18,8 @@ on: required: true default: debian/latest - ubuntu-codename: - description: The ubuntu codename to build for. Ex noble, jammy, etc + ubuntu-distribution: + description: The ubuntu distribution to build for. Ex noble, jammy, etc type: string default: noble @@ -43,6 +43,11 @@ on: type: boolean default: false + runner: + description: The runner to use for the build + type: string + default: ubuntu-latest + secrets: TOKEN: required: true @@ -55,29 +60,25 @@ env: REPO_URL: https://qualcomm-linux.github.io/pkg-oss-staging-repo/ REPO_NAME: qualcomm-linux/pkg-oss-staging-repo - UBUNTU_CODENAME: ${{inputs.ubuntu-codename}} - DISTRO: ubuntu - ARCH: arm64 + UBUNTU_DISTRIBUTION: ${{inputs.ubuntu-distribution}} jobs: build-debian-package: - runs-on: [self-hosted, Linux, ARM64] -# runs-on: [self-hosted, lecore-stg-u2404-arm64-xlrg-od-ephem] - -# container: -# image: ubuntu:noble -# options: --volume /srv/chroot:/srv/chroot + runs-on: ${{inputs.runner}} - steps: + defaults: + run: + shell: bash -# - name: Install dependencies -# run: | -# apt-get update -# apt-get install -y git git-buildpackage sbuild debootstrap tree + container: + image: ghcr.io/qualcomm-linux/pkg-builder:${{inputs.runner == 'ubuntu-latest' && 'amd64' || 'arm64'}}-latest + options: --privileged + credentials: + username: ${{vars.DEB_PKG_BOT_CI_USERNAME}} + password: ${{secrets.TOKEN}} - - name: Ensure Workspace Is Clean - run: rm -rf * + steps: - name: Checkout qcom-build-utils uses: actions/checkout@v4 @@ -106,7 +107,7 @@ jobs: pkg-dir: package-repo build-dir: build-area run-lintian: ${{inputs.run-lintian}} - + - name: Run ABI (Application Binary Interface) Check if: ${{inputs.run-abi-checker == true}} uses: ./qcom-build-utils/.github/actions/abi_checker diff --git a/.github/workflows/qcom-container-build-and-upload.yml b/.github/workflows/qcom-container-build-and-upload.yml index 524864f..8a13c1f 100644 --- a/.github/workflows/qcom-container-build-and-upload.yml +++ b/.github/workflows/qcom-container-build-and-upload.yml @@ -1,35 +1,68 @@ name: Container Build And Upload description: | - Builds and uploads to GHCR (GitHub Container Registry) the container used to build the packages - + Builds and uploads to GHCR (GitHub Container Registry) the container used to build the Qualcomm debian packages. + This workflow will assumes the build architecture is amd64 (x86_64) since the github's 'ubuntu-latest' runs-on tag + is used. Using docker's buildx, the Dockerfile in this repo's docker/ folder will be built for amd64 and cross-compiled + for arm64. + on: workflow_dispatch: inputs: - test: - description: no description - type: boolean - default: false + version: + description: The version name to be appended to the image name, default is 'latest' + type: string + default: latest permissions: contents: read security-events: write env: - GITHUB_SERVICE_BOT_USERNAME: "qcom-service-bot" - QCOM_ORG_NAME: "qualcomm-linux" IMAGE_NAME: "pkg-builder" jobs: - container-build-and-upload: + build-image-amd64: runs-on: ubuntu-latest steps: + - name: Checkout Dockerfile + uses: actions/checkout@v4 + with: + ref: ${{github.head_ref}} + sparse-checkout: docker/Dockerfile.amd64 + sparse-checkout-cone-mode: false # single file -> false + + - name: Authenticate to GHCR + run: echo ${{ secrets.DEB_PKG_BOT_CI_TOKEN }} | docker login ghcr.io -u ${{ vars.DEB_PKG_BOT_CI_USERNAME }} --password-stdin + + - name: Build and Tag the Image + run: docker build -f docker/Dockerfile.amd64 -t ghcr.io/${{env.QCOM_ORG_NAME}}/${{env.IMAGE_NAME}}:amd64-${{inputs.version}} ./docker + + - name: Push + run: docker push ghcr.io/${{env.QCOM_ORG_NAME}}/${{env.IMAGE_NAME}}:amd64-${{inputs.version}} + + build-image-arm64: + + runs-on: ["self-hosted", "lecore-prd-u2404-arm64-xlrg-od-ephem"] + + steps: + + - name: Checkout Dockerfile + uses: actions/checkout@v4 + with: + ref: ${{github.head_ref}} + sparse-checkout: docker/Dockerfile.arm64 + sparse-checkout-cone-mode: false # single file -> false + - name: Authenticate to GHCR - run: echo ${{ secrets.DEB_PKG_BOT_CI_TOKEN }} | docker login ghcr.io -u ${{env.GITHUB_SERVICE_BOT_USERNAME}} --password-stdin + run: echo ${{ secrets.DEB_PKG_BOT_CI_TOKEN }} | docker login ghcr.io -u ${{ vars.DEB_PKG_BOT_CI_USERNAME }} --password-stdin - name: Build and Tag the Image - run: docker build -t ghcr.io/${{env.QCOM_ORG_NAME}}/${{env.IMAGE_NAME}}:latest . \ No newline at end of file + run: docker build -f docker/Dockerfile.arm64 -t ghcr.io/${{env.QCOM_ORG_NAME}}/${{env.IMAGE_NAME}}:arm64-${{inputs.version}} ./docker + + - name: Push + run: docker push ghcr.io/${{env.QCOM_ORG_NAME}}/${{env.IMAGE_NAME}}:arm64-${{inputs.version}} diff --git a/.github/workflows/qcom-promote-upstream-reusable-workflow.yml b/.github/workflows/qcom-promote-upstream-reusable-workflow.yml index 3d8e0cf..de1980c 100644 --- a/.github/workflows/qcom-promote-upstream-reusable-workflow.yml +++ b/.github/workflows/qcom-promote-upstream-reusable-workflow.yml @@ -41,30 +41,25 @@ env: NORMALIZED_VERSION: "" DISTRIBUTION: noble - #TODOOOOOOO : Check if a PR branch already exist. This wwould mean that someone or something triggered the + #TODO : Check if a PR branch already exist. This would mean that someone or something triggered the # promotion more than once before it was merged jobs: promote-upstream-version: - runs-on: [self-hosted, Linux, ARM64] -# runs-on: [self-hosted, lecore-stg-u2404-arm64-xlrg-od-ephem] + runs-on: ubuntu-latest -# container: -# image: ubuntu:noble + defaults: + run: + shell: bash - steps: - -# - name: Install dependencies -# run: | -# apt-get update -# apt-get install -y git git-buildpackage + container: + image: ghcr.io/qualcomm-linux/pkg-builder:amd64-latest + credentials: + username: ${{ vars.DEB_PKG_BOT_CI_USERNAME }} + password: ${{ secrets.TOKEN }} - - name: Check Dependencies - run: | - git --version - gbp --version - gh --version + steps: - name: Ensure Workspace Is Clean run: rm -rf * @@ -100,7 +95,7 @@ jobs: path: ./package-repo fetch-depth: 0 - - name: Fetch All Branches and Tags + - name: Show branches/tags and checkout debian/upstream latest run: | cd ./package-repo @@ -139,7 +134,7 @@ jobs: - name: Add Upstream Link As A Remote And Fetch Tags run: | cd ./package-repo - git remote add upstream-source git@github.com:${{inputs.upstream-repo}}.git + git remote add upstream-source https://x-access-token:${{secrets.TOKEN}}@github.com/${{inputs.upstream-repo}}.git git fetch upstream-source "+refs/tags/*:refs/tags/*" - name: Clone Upstream Repo At Specified Tag @@ -174,8 +169,8 @@ jobs: run: | cd ./package-repo - git config user.name "Github Service Bot" - git config user.email "githubservice@qti.qualcomm.com" + git config user.name "${{vars.DEB_PKG_BOT_CI_NAME}}" + git config user.email "${{vars.DEB_PKG_BOT_CI_EMAIL}}" git checkout -b debian/pr/${{env.NORMALIZED_VERSION}}-1 @@ -206,13 +201,12 @@ jobs: -m "Filtered out .git, .github and debian from upstram, and preserved .github debian/latest" \ upstream/latest - #TODO : Think about how to promote the distro -1 version, and if we push the tag or not - name: Promote Changelog run: | cd ./package-repo - export DEBFULLNAME="Github Service Bot" - export DEBEMAIL=githubservice@qti.qualcomm.com + export DEBFULLNAME="${{vars.DEB_PKG_BOT_CI_NAME}}" + export DEBEMAIL="${{vars.DEB_PKG_BOT_CI_EMAIL}}" # use ignore branch because we are not on default debian branch gbp dch \ diff --git a/.github/workflows/qcom-upstream-pr-pkg-build-reusable-workflow.yml b/.github/workflows/qcom-upstream-pr-pkg-build-reusable-workflow.yml index dcd1d16..33c22cc 100644 --- a/.github/workflows/qcom-upstream-pr-pkg-build-reusable-workflow.yml +++ b/.github/workflows/qcom-upstream-pr-pkg-build-reusable-workflow.yml @@ -34,6 +34,11 @@ on: type: boolean default: false + runner: + description: The runner to use for the build + type: string + default: ubuntu-latest + secrets: TOKEN: required: true @@ -46,9 +51,7 @@ env: # For ABI checker REPO_URL: https://qualcomm-linux.github.io/pkg-oss-staging-repo/ - UBUNTU_CODENAME: noble #TODO change this to a param - DISTRO: ubuntu - ARCH: arm64 + UBUNTU_DISTRIBUTION: noble #TODO change this to a param upstream_version: "" distro_revision: "" @@ -56,22 +59,20 @@ env: jobs: pkg-build-pr-check: - runs-on: [self-hosted, Linux, ARM64] -# runs-on: [self-hosted, lecore-stg-u2404-arm64-xlrg-od-ephem] + runs-on: ${{inputs.runner}} -# container: -# image: ubuntu:noble -# options: --volume /srv/chroot:/srv/chroot - - steps: + defaults: + run: + shell: bash -# - name: Install dependencies -# run: | -# apt-get update -# apt-get install -y git git-buildpackage sbuild debootstrap tree + container: + image: ghcr.io/qualcomm-linux/pkg-builder:${{inputs.runner == 'ubuntu-latest' && 'amd64' || 'arm64'}}-latest + options: --privileged + credentials: + username: ${{vars.DEB_PKG_BOT_CI_USERNAME}} + password: ${{secrets.TOKEN}} - - name: Ensure Workspace Is Clean - run: rm -rf * + steps: - name: Print caller info run: | @@ -126,15 +127,15 @@ jobs: run: | cd ./package-repo - git config user.name "Github Service Bot" - git config user.email "githubservice@qti.qualcomm.com" + git config user.name "${{vars.DEB_PKG_BOT_CI_NAME}}" + git config user.email "${{vars.DEB_PKG_BOT_CI_EMAIL}}" git checkout upstream/latest git checkout debian/latest git checkout -b debian/upstream-pr version=$(dpkg-parsechangelog --show-field Version) - + # Split into upstream and distro revision upstream_version="${version%%-*}" # Everything before the first dash distro_revision="${version#*-}" # Everything after the first dash @@ -164,7 +165,7 @@ jobs: --filter=debian \ --no-merge \ ../upstream-repo - + git merge \ --allow-unrelated-histories \ --signoff \ @@ -176,14 +177,14 @@ jobs: run: | cd ./package-repo - export DEBFULLNAME="Github Service Bot" - export DEBEMAIL=githubservice@qti.qualcomm.com + export DEBFULLNAME="${{vars.DEB_PKG_BOT_CI_NAME}}" + export DEBEMAIL="${{vars.DEB_PKG_BOT_CI_EMAIL}}" # use ignore branch because we are not on default debian branch # use -b to ignore new version is less than current version. This happens because of the ~pr# gbp dch \ --ignore-branch \ - --distribution=${{env.UBUNTU_CODENAME}} \ + --distribution=${{env.UBUNTU_DISTRIBUTION}} \ --new-version=${{env.upstream_version}}~pr${{inputs.pr-number}}-${{env.distro_revision}} \ --dch-opt="-b" diff --git a/docker/Dockerfile.amd64 b/docker/Dockerfile.amd64 new file mode 100644 index 0000000..bbdd9e6 --- /dev/null +++ b/docker/Dockerfile.amd64 @@ -0,0 +1,49 @@ +# This dockerfile builds is assumed to be built by an amd64 machine FOR an amd64 machine, which will contain +# a two chroots (noble and questing) to cross compile debian packages for arm64. + +# Use an official Ubuntu base image +FROM ubuntu:24.04 + +ARG TARGETARCH +ENV TARGET_ARCH=${TARGETARCH} + +# Prevent interactive prompts during package installation +ENV DEBIAN_FRONTEND=noninteractive + +# Update package list and install dependenciesquesting +RUN apt-get update && apt-get install -y \ + curl \ + git \ + git-buildpackage \ + sbuild \ + schroot \ + ubuntu-dev-tools \ + crossbuild-essential-arm64 \ + apt-cacher-ng \ + gh \ + debootstrap \ + tree \ + build-essential \ + debhelper \ + abigail-tools \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +RUN sbuild-createchroot --arch=amd64 --components=main,universe noble /srv/chroot/noble http://archive.ubuntu.com/ubuntu +RUN sbuild-createchroot --arch=amd64 --components=main,universe questing /srv/chroot/questing http://archive.ubuntu.com/ubuntu + +RUN chroot /srv/chroot/noble /bin/bash -c "echo 'deb [arch=amd64] http://archive.ubuntu.com/ubuntu noble main restricted universe multiverse' > /etc/apt/sources.list" +RUN chroot /srv/chroot/noble /bin/bash -c "echo 'deb [arch=amd64] http://archive.ubuntu.com/ubuntu noble-updates main restricted universe multiverse' >> /etc/apt/sources.list" +RUN chroot /srv/chroot/noble /bin/bash -c "echo 'deb [arch=amd64] http://archive.ubuntu.com/ubuntu noble-security main restricted universe multiverse' >> /etc/apt/sources.list" + +RUN chroot /srv/chroot/noble /bin/bash -c "echo 'deb [arch=arm64] http://ports.ubuntu.com/ubuntu-ports noble main restricted universe multiverse' > /etc/apt/sources.list.d/arm64-ports.list" +RUN chroot /srv/chroot/noble /bin/bash -c "echo 'deb [arch=arm64] http://ports.ubuntu.com/ubuntu-ports noble main restricted universe multiverse' >> /etc/apt/sources.list.d/arm64-ports.list" +RUN chroot /srv/chroot/noble /bin/bash -c "echo 'deb [arch=arm64] http://ports.ubuntu.com/ubuntu-ports noble-security main restricted universe multiverse' >> /etc/apt/sources.list.d/arm64-ports.list" + +RUN chroot /srv/chroot/noble /bin/bash -c 'dpkg --add-architecture arm64 && apt-get update && apt-get install -y crossbuild-essential-arm64' + +# Set working directory +WORKDIR /workspace + +# Default command +CMD [ "bash" ] \ No newline at end of file diff --git a/docker/Dockerfile.arm64 b/docker/Dockerfile.arm64 new file mode 100644 index 0000000..aa79888 --- /dev/null +++ b/docker/Dockerfile.arm64 @@ -0,0 +1,37 @@ +# This dockerfile builds is assumed to be built by an arm64 machine FOR an arm64 machine, which will contain +# a two chroots (noble and questing) to natively build for arm64. + +# Use an official Ubuntu base image +FROM ubuntu:24.04 + +ARG TARGETARCH +ENV TARGET_ARCH=${TARGETARCH} + +# Prevent interactive prompts during package installation +ENV DEBIAN_FRONTEND=noninteractive + +# Update package list and install dependenciesquesting +RUN apt-get update && apt-get install -y \ + curl \ + git \ + git-buildpackage \ + sbuild \ + schroot \ + apt-cacher-ng \ + gh \ + debootstrap \ + tree \ + build-essential \ + debhelper \ + abigail-tools \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +RUN sbuild-createchroot --arch=arm64 --components=main,universe noble /srv/chroot/noble http://ports.ubuntu.com +RUN sbuild-createchroot --arch=arm64 --components=main,universe questing /srv/chroot/questing http://ports.ubuntu.com + +# Set working directory +WORKDIR /workspace + +# Default command +CMD [ "bash" ] \ No newline at end of file diff --git a/scripts/helpers.py b/scripts/helpers.py index 807edb1..40869cf 100644 --- a/scripts/helpers.py +++ b/scripts/helpers.py @@ -18,136 +18,8 @@ import subprocess import glob from pathlib import Path -from git import Repo -from color_logger import logger - -def check_and_append_line_in_file(file_path, line_to_check, append_if_missing=False): - """ - Checks if a specific line exists in a file and appends it if it is missing. - - Args: - ----- - - file_path (str): The path to the file to check. - - line_to_check (str): The line to check for in the file. - - append_if_missing (bool): If True, appends the line to the file if it is missing. - - Returns: - -------- - - bool: True if the line exists or was appended, False if the line does not exist and append_if_missing is False. - """ - if not os.path.exists(file_path): - logger.error(f"{file_path} does not exist.") - exit(1) - - with open(file_path, "r") as file: - lines = file.readlines() - - for line in lines: - if line.strip() == line_to_check.strip(): - return True - - if append_if_missing: - with open(file_path, "a") as file: - file.write(f"\n{line_to_check}\n") - return True - - return False - -def parse_debs_manifest(manifest_path): - """ - Parses a manifest file and returns a dictionary of module names and their corresponding versions. - """ - DEBS = [] - user_manifest = Path(manifest_path) - if not user_manifest.is_file() or not user_manifest.name.endswith('.manifest'): - raise ValueError(f"Provided manifest path '{user_manifest}' is not a valid '.manifest' file.") - if os.path.isfile(manifest_path): - with open(manifest_path, 'r') as f: - for line in f: - line = line.strip() - if line and not line.startswith('#'): - parts = list(line.split('\t')) - DEBS.append({ - 'package': parts[0], - 'version': parts[1] if len(parts) > 1 else None, - }) - return DEBS - else: - print(f"Manifest file {manifest_path} not found.") - return None - -def run_command(command, check=True, get_object=False, cwd=None): - """ - Executes a shell command and returns the output, logging any errors. - - Args: - ----- - - command (str): The shell command to execute. - - check (bool): If True, raises an exception on a non-zero exit code. - - get_object (bool): If True, returns the result object instead of the output string. - - cwd (str): The working directory to execute the command in. - - Returns: - -------- - - str: The standard output of the command. - - Raises: - ------- - - Exception: If the command fails and check is True. - """ - - logger.debug(f'Running command: {command}') - - try: - result = subprocess.run(command, shell=True, check=check, capture_output=True, text=True, cwd=cwd) - - except subprocess.CalledProcessError as e: - logger.error(f"Command failed with return value: {e.returncode}") - logger.error(f"stderr: {e.stderr.strip() if e.stderr else str(e)}") - logger.error(f"stdout: {e.stdout.strip()}") - raise Exception(e) - - stderr = result.stderr.strip() - if stderr: - if result.returncode == 0: - logger.debug(f"Successful return value, yet there is content in stderr: {stderr}") - else: - logger.error(f"Error: {stderr}") - - return result.stdout.strip() - -def run_command_for_result(command): - """ - Executes a shell command and returns the output and return code in a dictionary. - - Args: - ----- - - command (str): The shell command to execute. - - Returns: - -------- - - dict: A dictionary containing: - - "output" (str): The standard output of the command. - - "returncode" (int): The return code of the command. - """ - command = command.strip() - logger.debug(f'Running for result: {command}') - try: - result = subprocess.check_output(command, shell=True, stderr=subprocess.sys.stdout) - return {"output": result.decode("utf-8").strip(), "returncode": 0} - except subprocess.CalledProcessError as e: - return {"output": e.output.decode("utf-8", errors="ignore").strip(), "returncode": e.returncode} - -def set_env(key, value): - """ - Sets an environment variable. - Args: - ----- - - key (str): The name of the environment variable. - - value (str): The value to set for the environment variable. - """ - os.environ[str(key)] = str(value) +from color_logger import logger def cleanup_directory(dirname): """ @@ -168,28 +40,6 @@ def cleanup_directory(dirname): logger.error(f"Error cleaning directory {dirname}: {e}") raise Exception(e) -def cleanup_file(file_path): - """ - Deletes a specified file. - - Args: - ----- - - file_path (str): The path to the file to delete. - - Raises: - ------- - - Exception: If an error occurs while trying to delete the file. - """ - - logger.debug(f"Cleaning file {file_path}") - - try: - if os.path.exists(file_path): - os.remove(file_path) - except Exception as e: - logger.error(f"Error cleaning file {file_path}: {e}") - raise Exception(e) - def create_new_directory(dirname, delete_if_exists=True): """ Creates a new directory, optionally deleting it if it already exists. @@ -215,231 +65,3 @@ def create_new_directory(dirname, delete_if_exists=True): logger.error(f"Error creating directory {dirname}: {e}") exit(1) -def create_new_file(filepath, delete_if_exists=True) -> str: - """ - Creates a new file, optionally deleting it if it already exists. - - Args: - ----- - - filepath (str): The path to the file to create. - - delete_if_exists (bool): If True, deletes the file if it already exists. - - Returns: - -------- - - str: The path to the created file. - - Raises: - ------- - - SystemExit: If an error occurs while creating the file. - """ - try: - if os.path.exists(filepath): - # Check if the file exists, if so don't do anything - return filepath - # Create the destination directory - with open(filepath, 'w') as f: pass - return filepath - except Exception as e: - logger.error(f"Error creating file {filepath}: {e}") - exit(1) - -def mount_img(IMG_PATH, MOUNT_DIR, MOUNT_HOST_FS=False, MOUNT_IMG=True): - """ - Mounts an image file to a specified directory, with optional host filesystem mounts. - - Args: - ----- - - IMG_PATH (str): The path to the image file to mount. - - MOUNT_DIR (str): The directory to mount the image to. - - MOUNT_HOST_FS (bool): If True, mounts the host filesystem directories. - - MOUNT_IMG (bool): If True, mounts the image file. - """ - if MOUNT_IMG: - create_new_directory(MOUNT_DIR) - run_command(f"mount {IMG_PATH} {MOUNT_DIR}") - if MOUNT_HOST_FS: - for direc in HOST_FS_MOUNT: - run_command(f"mount --bind /{direc} {MOUNT_DIR}/{direc}") - -def umount_dir(MOUNT_DIR, UMOUNT_HOST_FS=False): - """ - Unmounts a specified directory and optionally unmounts host filesystem mounts. - - If the directory is not mounted, (ie, return code 32 from umount) then it is - silently ignored. - - Args: - ----- - - MOUNT_DIR (str): The directory to unmount. - - UMOUNT_HOST_FS (bool): If True, unmounts the host filesystem directories. - """ - - logger.debug(f"umount dir {MOUNT_DIR}") - - if UMOUNT_HOST_FS: - for direc in HOST_FS_MOUNT: - result = subprocess.run(f"umount -l {MOUNT_DIR}/{direc}", - shell=True, capture_output=True, text=True) - - if result.returncode != 0 and result.returncode != 32: - logger.error(f"Failed to unmount {MOUNT_DIR}/{direc}: {result.stderr}") - - result = subprocess.run(f"umount -l {MOUNT_DIR}", - shell=True, capture_output=True, text=True) - if result.returncode != 0 and result.returncode != 32: - logger.error(f"Failed to unmount {MOUNT_DIR}: {result.stderr}") - -def print_build_logs(directory): - """ - Prints the contents of build log files in a specified directory. - - Args: - ----- - - directory (str): The path to the directory containing build logs. - """ - logger.info("===== Build Logs Start ======") - build_logs = [] - for entry in os.listdir(directory): - full_path = os.path.join(directory, entry) - if (os.path.islink(full_path) and entry.endswith(".build")) or entry.endswith(".mmdebstrap.build"): - build_logs.append(entry) - for entry in build_logs: - full_path = os.path.join(directory, entry) - logger.info(f"===== {full_path} =====") - content = None - with open(full_path, 'r') as log_file: - content = log_file.read() - logger.error(content) - logger.info("===== Build Logs End ======") - -def start_local_apt_server(dir): - """ - Starts a local APT server in the specified directory and returns the APT repository line. - - Args: - ----- - - dir (str): The directory to serve as the APT repository. - - Returns: - -------- - - str: The APT repository line to add to sources.list. - """ - - server = AptServer(directory=dir, port=random.randint(7500, 8500)) - server.start() - - return f"deb [trusted=yes arch=arm64] http://localhost:{server.port} stable main" - -def build_deb_package_gz(dir, start_server=True) -> str: - """ - Builds a Debian package and creates a compressed Packages file, optionally starting a local APT server. - - Args: - ----- - - dir (str): The directory where the package is built. - - start_server (bool): If True, starts a local APT server after building the package. - - Returns: - -------- - - str: The APT repository line if a server is started, None otherwise. - - Raises: - ------- - - Exception: If an error occurs while creating the Packages file. - """ - - packages_dir = os.path.join(dir, 'dists', 'stable', 'main', 'binary-arm64') - packages_path = os.path.join(packages_dir, "Packages") - - try: - os.makedirs(packages_dir, exist_ok=True) - - cmd = f'dpkg-scanpackages -m . > {packages_path}' - - result = subprocess.run(cmd, shell=True, cwd=dir, check=False, capture_output=True, text=True) - - if result.returncode != 0: - logger.error(f"Error running : {cmd}") - logger.error(f"stdout : {result.stdout}") - logger.error(f"stderr : {result.stderr}") - - raise Exception(result.stderr) - - # Even with a successful exit code, dpkg-scanpackages still outputs the number of entries written to stderr logger.debug(result.stderr.strip()) - - - cmd = f"gzip -k -f {packages_path}" - result = subprocess.run(cmd, shell=True, cwd=dir, check=False, capture_output=True, text=True) - - if result.returncode != 0: - logger.error(f"Error running : {cmd}") - logger.error(f"stdout : {result.stdout}") - logger.error(f"stderr : {result.stderr}") - - raise Exception(result.stderr) - - logger.debug(f"Packages file created at {packages_path}.gz") - - except Exception as e: - logger.error(f"Error creating Packages file in {dir} : {e}") - raise Exception(e) - - if start_server: - return start_local_apt_server(dir) - return None - - -def pull_debs_wget(manifest_file_path, out_dir,DEBS_to_download_list,base_url): - """ - Downloads Debian packages from a remote repository using wget. - - Args: - ----- - - manifest_file_path (str): Path to the manifest file containing package versions. - - out_dir (str): Directory where downloaded packages will be saved. - - DEBS_to_download_list (list): List of package name prefixes to download. - - base_url (str): Base URL of the repository to download packages from. - - Returns: - -------- - - int: Number of packages successfully downloaded. - - Raises: - ------- - - Exception: If an error occurs while downloading packages. - """ - # Read manifest file - # Parse manifest into a dictionary - with open(manifest_file_path, 'r') as f: - manifest_text = f.read() - - # Parse manifest into a dictionary - version_map = {} - for line in manifest_text.strip().splitlines(): - if not line.strip(): - continue - parts = line.split() - if len(parts) >= 2: - name, version = parts[0], parts[1] - version_map[name] = version - - - # Generate wget links and download - os.makedirs(out_dir, exist_ok=True) - for module in DEBS_to_download_list: - for name, version in version_map.items(): - if name.startswith(module): - first_letter = name[0] - deb_name = f"{name}_{version}_arm64.deb" - url = f"{base_url}/{first_letter}/{name}/{deb_name}" - output_path = os.path.join(out_dir,name,deb_name) - create_new_directory(os.path.join(out_dir,name)) - # Construct wget command - wget_cmd = ["wget", "--no-check-certificate", url, "-O", output_path] - try: - logger.info(f"Downloading {url}...") - subprocess.run(wget_cmd, check=True) - logger.info(f"Saved to {output_path}") - except subprocess.CalledProcessError as e: - logger.error(f"error: Failed to download {url}: {e}") - break # Stop after first match diff --git a/scripts/prep_chroot_env.py b/scripts/prep_chroot_env.py deleted file mode 100755 index 142bddc..0000000 --- a/scripts/prep_chroot_env.py +++ /dev/null @@ -1,100 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries. -# -# SPDX-License-Identifier: BSD-3-Clause-Clear - -""" -prep_chroot_env.py - -Checks that the resulting chroot is present. Prepares it otherwise. - -Running as root is necessary to create the chroot. -""" - -import os -import sys -import argparse -import subprocess - -from color_logger import logger - -def parse_arguments(): - parser = argparse.ArgumentParser(description="Prepares a chroot environment") - parser.add_argument("--arch", - required=False, - default="arm64", - help="The architecture of the chroot environment. (default: arm64)") - - parser.add_argument("--os-codename", - required=True, - help="The codename of the OS, e.g. noble, bionic, focal, etc.") - - parser.add_argument("--suffix", - required=False, - default="ubuntu", - help="The suffix for the chroot name. (default: ubuntu)") - - args = parser.parse_args() - - return args - -def main(): - - args = parse_arguments() - - logger.debug(f"args: {args}") - - OS_CODENAME = args.os_codename - ARCH = args.arch - SUFFIX = args.suffix - CHROOT_NAME = OS_CODENAME + "-" + ARCH + "-" + SUFFIX - - CHROOT_DIR = "/srv/chroot" - DEBIAN_MIRROR = "http://ports.ubuntu.com" - - logger.debug(f"Checking if chroot container '{CHROOT_NAME}' is already registered") - - cmd = f"schroot -l | grep chroot:{CHROOT_NAME}" - result = subprocess.run(cmd, shell=True, capture_output=True, text=True) - - if result.returncode == 0: - logger.info(f"Schroot container {CHROOT_NAME} already exists. Skipping creation.") - sys.exit(0) - - logger.warning(f"Schroot container '{CHROOT_NAME}' does not exist, creating it for the first time.") - - if os.geteuid() != 0: - logger.critical("Creating a schroot environment requires root privileges") - logger.critical("Please use sudo. Aborting.") - sys.exit(1) - - logger.warning(f"The chroot will be created in {CHROOT_DIR}/{CHROOT_NAME}") - logger.warning(f"Its config will be stored as /etc/schroot/chroot.d/{CHROOT_NAME}-xxxx") - - # this command creates a chroot environment that will be named "{DIST}-{ARCH}-{SUFFIX}" (note the explicit '-' in --chroot-suffix) - # We supply our own suffix, otherwise sbuild will use 'sbuild' - cmd = f"sbuild-createchroot --arch={ARCH}" \ - f" --chroot-suffix=-{SUFFIX}" \ - f" --components=main,universe" \ - f" {OS_CODENAME}" \ - f" {CHROOT_DIR}/{CHROOT_NAME}" \ - f" {DEBIAN_MIRROR}" - - logger.debug(f"Creating schroot environment with command: {cmd}") - - result = subprocess.run(cmd, shell=True, capture_output=True, text=True) - - if result.returncode != 0: - logger.critical("Error creating schroot environment!") - logger.critical(f"stderr: {result.stderr}") - logger.critical(f"stdout: {result.stdout}") - sys.exit(1) - - - logger.info(f"Schroot environment {CHROOT_NAME} created successfully.") - - sys.exit(0) - -if __name__ == "__main__": - main()