From 0fe9981f7aea95614f1b3e821a28c6e6996d5ab6 Mon Sep 17 00:00:00 2001 From: Chen Lai Date: Fri, 18 Oct 2024 13:53:51 -0700 Subject: [PATCH] Optionally add qnn backend to llama runner buck file (#6355) Summary: Include qnn backend as part of the llama runner dependency, and it's control by a build flag. Default to false. Differential Revision: D64334713 --- examples/models/llama/runner/targets.bzl | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/examples/models/llama/runner/targets.bzl b/examples/models/llama/runner/targets.bzl index de12dc4d106..64621be0138 100644 --- a/examples/models/llama/runner/targets.bzl +++ b/examples/models/llama/runner/targets.bzl @@ -9,10 +9,20 @@ def _get_operator_lib(aten = False): else: return ["//executorch/configurations:optimized_native_cpu_ops", "//executorch/extension/llm/custom_ops:custom_ops"] +def get_qnn_dependency(): + # buck build -c executorch.enable_qnn=true //executorch/examples/models/llama/runner:runner + # Check if QNN is enabled before including the dependency + if native.read_config("executorch", "enable_qnn", "false") == "true": + # //executorch/backends/qualcomm:qnn_executorch_backend doesn't work, + # likely due to it's an empty library with dependency only + return [ + "//executorch/backends/qualcomm/runtime:runtime", + ] + return [] + def define_common_targets(): for aten in (True, False): aten_suffix = "_aten" if aten else "" - runtime.cxx_library( name = "runner" + aten_suffix, srcs = [ @@ -27,7 +37,6 @@ def define_common_targets(): visibility = [ "@EXECUTORCH_CLIENTS", ], - # qnn_executorch_backend can be added below //executorch/backends/qualcomm:qnn_executorch_backend exported_deps = [ "//executorch/backends/xnnpack:xnnpack_backend", "//executorch/extension/llm/runner:stats", @@ -46,7 +55,7 @@ def define_common_targets(): # Vulkan API currently cannot build on some platforms (e.g. Apple, FBCODE) # Therefore enable it explicitly for now to avoid failing tests "//executorch/backends/vulkan:vulkan_backend_lib", - ] if native.read_config("llama", "use_vulkan", "0") == "1" else []), + ] if native.read_config("llama", "use_vulkan", "0") == "1" else []) + get_qnn_dependency(), external_deps = [ "libtorch", ] if aten else [],