diff --git a/.ci/docker/build.sh b/.ci/docker/build.sh index ad2f713466c..59d5daa8b8f 100755 --- a/.ci/docker/build.sh +++ b/.ci/docker/build.sh @@ -37,6 +37,10 @@ case "${IMAGE_NAME}" in ARM_SDK=yes CLANG_VERSION=12 ;; + executorch-ubuntu-22.04-qnn-sdk) + QNN_SDK=yes + CLANG_VERSION=12 + ;; executorch-ubuntu-22.04-clang12-android) LINTRUNNER="" CLANG_VERSION=12 @@ -59,6 +63,9 @@ cp ../../requirements-lintrunner.txt ./ # with a new image hash when the content here is updated cp -r ../../examples/arm/ ./arm +# Copy qnn setup script from root to here +cp -r ../../backends/qualcomm/ ./qualcomm + docker build \ --no-cache \ --progress=plain \ @@ -72,6 +79,7 @@ docker build \ --build-arg "LINTRUNNER=${LINTRUNNER:-}" \ --build-arg "BUILD_DOCS=${BUILD_DOCS}" \ --build-arg "ARM_SDK=${ARM_SDK:-}" \ + --build-arg "QNN_SDK=${QNN_SDK:-}" \ --build-arg "ANDROID_NDK_VERSION=${ANDROID_NDK_VERSION:-}" \ -f "${OS}"/Dockerfile \ "$@" \ diff --git a/.ci/docker/ubuntu/Dockerfile b/.ci/docker/ubuntu/Dockerfile index 449cd14b6b4..2aa9f24b677 100644 --- a/.ci/docker/ubuntu/Dockerfile +++ b/.ci/docker/ubuntu/Dockerfile @@ -82,5 +82,10 @@ COPY --chown=ci-user:ci-user ./arm /opt/arm # Set up ARM SDK if needed RUN if [ -n "${ARM_SDK}" ]; then git config --global user.email "ossci@example.com"; git config --global user.name "OSS CI"; bash /opt/arm/setup.sh --i-agree-to-the-contained-eula /opt/arm-sdk; chown -R ci-user:ci-user /opt/arm-sdk; fi +ARG QNN_SDK +COPY --chown=ci-user:ci-user ./qualcomm /opt/qualcomm +# Set up QNN SDK if needed +RUN if [ -n "${QNN_SDK}" ]; then git config --global user.email "ossci@example.com"; git config --global user.name "OSS CI"; fi + USER ci-user CMD ["bash"] diff --git a/.ci/scripts/build-qnn-sdk.sh b/.ci/scripts/build-qnn-sdk.sh new file mode 100644 index 00000000000..d912069b06a --- /dev/null +++ b/.ci/scripts/build-qnn-sdk.sh @@ -0,0 +1,19 @@ +#!/bin/bash +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the BSD-style license found in the +# LICENSE file in the root directory of this source tree. + +set -ex + +build_qnn_backend() { + echo "Start building qnn backend." + export ANDROID_NDK_ROOT=/opt/ndk + export QNN_SDK_ROOT=/tmp/qnn/2.23.0.240531 + export EXECUTORCH_ROOT="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")/.." && pwd)" + + bash backends/qualcomm/scripts/build.sh --skip_aarch64 --job_number 2 --release +} + +build_qnn_backend diff --git a/.ci/scripts/setup-qnn-deps.sh b/.ci/scripts/setup-qnn-deps.sh new file mode 100644 index 00000000000..3b39e1aafe3 --- /dev/null +++ b/.ci/scripts/setup-qnn-deps.sh @@ -0,0 +1,29 @@ +#!/bin/bash +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the BSD-style license found in the +# LICENSE file in the root directory of this source tree. + +set -ex + +install_qnn() { + echo "Start installing qnn." + QNN_INSTALLATION_DIR=/tmp/qnn + mkdir -p "${QNN_INSTALLATION_DIR}" + + curl -Lo /tmp/v2.23.0.24.06.24.zip "https://softwarecenter.qualcomm.com/api/download/software/qualcomm_neural_processing_sdk/v2.23.0.24.06.24.zip" + echo "Finishing downloading qnn sdk." + unzip -qo /tmp/v2.23.0.24.06.24.zip -d /tmp + echo "Finishing unzip qnn sdk." + + + # Print the content for manual verification + ls -lah "/tmp/qairt" + mv "/tmp/qairt"/* "${QNN_INSTALLATION_DIR}" + echo "Finishing installing qnn '${QNN_INSTALLATION_DIR}' ." + + ls -lah "${QNN_INSTALLATION_DIR}" +} + +install_qnn diff --git a/.ci/scripts/test_llama.sh b/.ci/scripts/test_llama.sh index ae795b12ab2..455c1f323fd 100644 --- a/.ci/scripts/test_llama.sh +++ b/.ci/scripts/test_llama.sh @@ -72,6 +72,25 @@ fi echo "COREML option ${COREML}" +if [[ "${MODE}" =~ .*qnn.* ]]; then + QNN=ON + export EXECUTORCH_ROOT="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")/.." && pwd)" + export QNN_SDK_ROOT=/tmp/qnn/2.23.0.240531 + export LD_LIBRARY_PATH="${QNN_SDK_ROOT}/lib/x86_64-linux-clang" + export PYTHONPATH=".." + cp schema/program.fbs exir/_serialize/program.fbs + cp schema/scalar_type.fbs exir/_serialize/scalar_type.fbs + cp -f build-x86/backends/qualcomm/PyQnnManagerAdaptor.cpython-310-x86_64-linux-gnu.so backends/qualcomm/python + cp -f build-x86/backends/qualcomm/PyQnnWrapperAdaptor.cpython-310-x86_64-linux-gnu.so backends/qualcomm/python + +else + QNN=OFF + QNN_SDK_ROOT="" +fi + +echo "QNN option ${QNN}" +echo "QNN_SDK_ROOT: ${QNN_SDK_ROOT}" + if [[ -z "${BUCK:-}" ]]; then BUCK=buck2 fi @@ -96,6 +115,8 @@ cmake_install_executorch_libraries() { -DEXECUTORCH_BUILD_XNNPACK="$XNNPACK" \ -DEXECUTORCH_BUILD_MPS="$MPS" \ -DEXECUTORCH_BUILD_COREML="$COREML" \ + -DEXECUTORCH_BUILD_QNN="$QNN" \ + -DQNN_SDK_ROOT="$QNN_SDK_ROOT" \ -DPYTHON_EXECUTABLE="$PYTHON_EXECUTABLE" \ -Bcmake-out . cmake --build cmake-out -j9 --target install --config Debug @@ -176,6 +197,9 @@ fi if [[ "${COREML}" == "ON" ]]; then EXPORT_ARGS="${EXPORT_ARGS} -kv -v --coreml --disable_dynamic_shape" fi +if [[ "${QNN}" == "ON" ]]; then + EXPORT_ARGS="${EXPORT_ARGS} -kv -v --qnn --disable_dynamic_shape" +fi # Add dynamically linked library location $PYTHON_EXECUTABLE -m examples.models.llama2.export_llama ${EXPORT_ARGS} diff --git a/.github/workflows/docker-builds.yml b/.github/workflows/docker-builds.yml index f773f3aca88..d256af7fcda 100644 --- a/.github/workflows/docker-builds.yml +++ b/.github/workflows/docker-builds.yml @@ -38,6 +38,7 @@ jobs: - docker-image-name: executorch-ubuntu-22.04-clang12 - docker-image-name: executorch-ubuntu-22.04-linter - docker-image-name: executorch-ubuntu-22.04-arm-sdk + - docker-image-name: executorch-ubuntu-22.04-qnn-sdk - docker-image-name: executorch-ubuntu-22.04-clang12-android env: DOCKER_IMAGE: 308535385114.dkr.ecr.us-east-1.amazonaws.com/executorch/${{ matrix.docker-image-name }} diff --git a/.github/workflows/trunk.yml b/.github/workflows/trunk.yml index 9b28d26048b..86e44e647da 100644 --- a/.github/workflows/trunk.yml +++ b/.github/workflows/trunk.yml @@ -270,3 +270,38 @@ jobs: PYTHON_EXECUTABLE=python ${CONDA_RUN} bash examples/models/llama2/install_requirements.sh # Test llama2 PYTHON_EXECUTABLE=python ${CONDA_RUN} bash .ci/scripts/test_llama.sh stories110M.pt "${BUILD_TOOL}" "${DTYPE}" "${MODE}" + + + test-llama-runner-qnn-linux: + name: test-llama-runner-qnn-linux + uses: pytorch/test-infra/.github/workflows/linux_job.yml@main + strategy: + matrix: + dtype: [fp32] + build-tool: [cmake] + mode: [qnn] + fail-fast: false + with: + runner: linux.2xlarge + docker-image: executorch-ubuntu-22.04-qnn-sdk + submodules: 'true' + ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} + timeout: 900 + script: | + # The generic Linux job chooses to use base env, not the one setup by the image + CONDA_ENV=$(conda env list --json | jq -r ".envs | .[-1]") + conda activate "${CONDA_ENV}" + + DTYPE=${{ matrix.dtype }} + BUILD_TOOL=${{ matrix.build-tool }} + MODE=${{ matrix.mode }} + + PYTHON_EXECUTABLE=python bash .ci/scripts/setup-qnn-deps.sh + PYTHON_EXECUTABLE=python bash .ci/scripts/build-qnn-sdk.sh + + # Setup executorch + PYTHON_EXECUTABLE=python bash .ci/scripts/setup-linux.sh buck2 + # Install requirements for export_llama + PYTHON_EXECUTABLE=python bash examples/models/llama2/install_requirements.sh + # Test llama2 + PYTHON_EXECUTABLE=python bash .ci/scripts/test_llama.sh stories110M.pt "${BUILD_TOOL}" "${DTYPE}" "${MODE}" diff --git a/backends/qualcomm/aot/python/PyQnnWrapperAdaptor.h b/backends/qualcomm/aot/python/PyQnnWrapperAdaptor.h index f13b5962b79..98219d97635 100644 --- a/backends/qualcomm/aot/python/PyQnnWrapperAdaptor.h +++ b/backends/qualcomm/aot/python/PyQnnWrapperAdaptor.h @@ -86,7 +86,7 @@ class PyQnnOpWrapper { break; default: QNN_EXECUTORCH_LOG_ERROR( - "%s has invalid data type: %d", name, data_type); + "%s has invalid data type: %d", name.c_str(), data_type); break; } } diff --git a/backends/qualcomm/scripts/build.sh b/backends/qualcomm/scripts/build.sh index be317a2d64b..aafd6252e79 100755 --- a/backends/qualcomm/scripts/build.sh +++ b/backends/qualcomm/scripts/build.sh @@ -25,9 +25,9 @@ usage() { [ "$1" = -h ] && usage BUILD_X86_64="true" -CMAKE_X86_64="cmake-out" +CMAKE_X86_64="build-x86" BUILD_AARCH64="true" -CMAKE_AARCH64="cmake-out-android" +CMAKE_AARCH64="build-android" CLEAN="true" BUILD_TYPE="Debug" BUILD_JOB_NUMBER="16"