From 8852002e6d1448d056ae50d3cac607b3045dc866 Mon Sep 17 00:00:00 2001 From: winskuo-quic <143469905+winskuo-quic@users.noreply.github.com> Date: Wed, 17 Sep 2025 02:45:12 +0800 Subject: [PATCH] Qualcomm AI Engine Direct - Rope Fix (#14328) ### Summary Rope fix ### Test plan CI Passes cc: @haowhsu-quic (cherry picked from commit dd7f4d2a5a77b0f290b03f3f2a2560f4051ccd72) --- examples/qualcomm/oss_scripts/llama/model/static_llama.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/examples/qualcomm/oss_scripts/llama/model/static_llama.py b/examples/qualcomm/oss_scripts/llama/model/static_llama.py index 8dcfced95fb..7406b13ee8c 100755 --- a/examples/qualcomm/oss_scripts/llama/model/static_llama.py +++ b/examples/qualcomm/oss_scripts/llama/model/static_llama.py @@ -75,9 +75,10 @@ def __init__(self, layer_idx: int, config: ModelArgs, output_new_cache_only=Fals self.enable_masked_softmax = getattr(config, "enable_masked_softmax", False) self.use_qk_norm = config.use_qk_norm self.qk_norm_before_rope = config.qk_norm_before_rope + # If None, assume each layer uses rope self.use_rope = ( - config.no_rope_layer_interval - and (layer_idx + 1) % config.no_rope_layer_interval + config.no_rope_layer_interval is None + or (layer_idx + 1) % config.no_rope_layer_interval ) if self.use_qk_norm: