Skip to content

Commit

Permalink
Add pre-upload Python runtime tests
Browse files Browse the repository at this point in the history
We already test the Python runtimes via the buildpack's own tests,
however, these are run once the compiled Python runtime has already
been uploaded to S3.

This adds tests as part of the compile/package/upload workflow itself.

In addition, these tests now also ensure that all of the optional stdlib
modules have been built - since otherwise the upstream CPython
configure scripts will happily skip modules where libraries/headers
are missing, with only a small warning printed to the logs.
  • Loading branch information
edmorley committed May 9, 2024
1 parent c233dbd commit dbcfbeb
Show file tree
Hide file tree
Showing 2 changed files with 63 additions and 3 deletions.
21 changes: 18 additions & 3 deletions .github/workflows/build_python_runtime.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,13 @@ jobs:
uses: actions/checkout@v4
- name: Build Docker image
run: docker build --platform="linux/amd64" --pull --tag buildenv --build-arg=STACK_VERSION builds/
- name: Build and package Python runtime
- name: Compile and package Python runtime
run: docker run --rm --volume="${PWD}/upload:/tmp/upload" buildenv ./build_python_runtime.sh "${{ inputs.python_version }}"
- name: Test Python runtime
run: |
RUN_IMAGE='heroku/heroku:${{ env.STACK_VERSION }}'
ARCHIVE_FILENAME='python-${{ inputs.python_version }}-ubuntu-${{ env.STACK_VERSION }}.04-amd64.tar.zst'
docker run --rm --volume="${PWD}/upload:/upload:ro" --volume="${PWD}/builds:/builds:ro" "${RUN_IMAGE}" /builds/test_python_runtime.sh "/upload/${ARCHIVE_FILENAME}"
- name: Upload Python runtime archive to S3
if: (!inputs.dry_run)
run: aws s3 sync ./upload "s3://${S3_BUCKET}"
Expand All @@ -63,8 +68,13 @@ jobs:
uses: actions/checkout@v4
- name: Build Docker image
run: docker build --platform="linux/amd64" --pull --tag buildenv --build-arg=STACK_VERSION builds/
- name: Build and package Python runtime
- name: Compile and package Python runtime
run: docker run --rm --volume="${PWD}/upload:/tmp/upload" buildenv ./build_python_runtime.sh "${{ inputs.python_version }}"
- name: Test Python runtime
run: |
RUN_IMAGE='heroku/heroku:${{ env.STACK_VERSION }}'
ARCHIVE_FILENAME='python-${{ inputs.python_version }}-ubuntu-${{ env.STACK_VERSION }}.04-amd64.tar.zst'
docker run --rm --volume="${PWD}/upload:/upload:ro" --volume="${PWD}/builds:/builds:ro" "${RUN_IMAGE}" /builds/test_python_runtime.sh "/upload/${ARCHIVE_FILENAME}"
- name: Upload Python runtime archive to S3
if: (!inputs.dry_run)
run: aws s3 sync ./upload "s3://${S3_BUCKET}"
Expand Down Expand Up @@ -96,8 +106,13 @@ jobs:
rm -rf awscliv2.zip ./aws/
- name: Build Docker image
run: docker build --platform="linux/${{ matrix.arch }}" --pull --tag buildenv --build-arg=STACK_VERSION builds/
- name: Build and package Python runtime
- name: Compile and package Python runtime
run: docker run --rm --volume="${PWD}/upload:/tmp/upload" buildenv ./build_python_runtime.sh "${{ inputs.python_version }}"
- name: Test Python runtime
run: |
RUN_IMAGE='heroku/heroku:${{ env.STACK_VERSION }}'
ARCHIVE_FILENAME='python-${{ inputs.python_version }}-ubuntu-${{ env.STACK_VERSION }}.04-${{ matrix.arch }}.tar.zst'
docker run --rm --volume="${PWD}/upload:/upload:ro" --volume="${PWD}/builds:/builds:ro" "${RUN_IMAGE}" /builds/test_python_runtime.sh "/upload/${ARCHIVE_FILENAME}"
- name: Upload Python runtime archive to S3
if: (!inputs.dry_run)
run: aws s3 sync ./upload "s3://${S3_BUCKET}"
45 changes: 45 additions & 0 deletions builds/test_python_runtime.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
#!/usr/bin/env bash

set -euo pipefail

ARCHIVE_FILEPATH="${1:?"Error: The filepath of the Python runtime archive must be specified as the first argument."}"

# We intentionally extract the Python runtime into a different directory to the one into which it
# was originally installed before being packaged, to check that relocation works (since buildpacks
# depend on it). Since the Python binary was built in shared mode, `LD_LIBRARY_PATH` must be set
# when relocating, so the Python binary (which itself contains very little) can find `libpython`.
INSTALL_DIR=$(mktemp -d)
PYTHON="${INSTALL_DIR}/bin/python"
export LD_LIBRARY_PATH="${INSTALL_DIR}/lib/"

tar --zstd --extract --verbose --file "${ARCHIVE_FILEPATH}" --directory "${INSTALL_DIR}"

# Check Python exists at the `python` alias (and not just `python3`) and can run.
"${PYTHON}" --version

# Check that all dynamically linked libraries exist in the run image (since it has fewer packages than the build image).
if find "${INSTALL_DIR}" -name '*.so' -exec ldd '{}' + | grep 'not found'; then
echo "The above dynamically linked libraries were not found!"
exit 1
fi

# Check that optional and/or system library dependent stdlib modules were built.
optional_stdlib_modules=(
_uuid
bz2
ctypes
curses
dbm.gnu
dbm.ndbm
decimal
lzma
readline
sqlite3
ssl
xml.parsers.expat
zlib
)
if ! "${PYTHON}" -c "import $(IFS=, ; echo "${optional_stdlib_modules[*]}")"; then
echo "The above optional stdlib module failed to import! Check the compile logs to see if it was skipped due to missing libraries/headers."
exit 1
fi

0 comments on commit dbcfbeb

Please sign in to comment.