From fd9875b37c872b1e5799902e17f251e285b25838 Mon Sep 17 00:00:00 2001 From: Hansong Zhang Date: Thu, 19 Sep 2024 16:14:17 -0700 Subject: [PATCH] Fix javadoc for LlamaModule.java --- .../src/main/java/org/pytorch/executorch/LlamaModule.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/extension/android/src/main/java/org/pytorch/executorch/LlamaModule.java b/extension/android/src/main/java/org/pytorch/executorch/LlamaModule.java index 2fe826fc0af..6de26bc7fe8 100644 --- a/extension/android/src/main/java/org/pytorch/executorch/LlamaModule.java +++ b/extension/android/src/main/java/org/pytorch/executorch/LlamaModule.java @@ -173,7 +173,7 @@ public long prefillPrompt(String prompt, long startPos, int bos, int eos) { * @param prompt The text prompt to LLaVA. * @param seqLen The total sequence length, including the prompt tokens and new tokens. * @param startPos The starting position in KV cache of the input in the LLM. - * @param llamaCallback callback object to receive results. + * @param callback callback object to receive results. * @param echo indicate whether to echo the input prompt or not. * @return The error code. */