From e32370d6fe7caf8f539c98ec80239faf593f3027 Mon Sep 17 00:00:00 2001 From: Thiago Castro Ferreira Date: Mon, 24 Jun 2024 17:17:09 -0300 Subject: [PATCH] Solving bug when LLM parameters are set on data --- aixplain/modules/model/llm_model.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/aixplain/modules/model/llm_model.py b/aixplain/modules/model/llm_model.py index 349ea595..14b9c7f4 100644 --- a/aixplain/modules/model/llm_model.py +++ b/aixplain/modules/model/llm_model.py @@ -196,12 +196,12 @@ def run_async( payload = {"data": data} parameters.update( { - "context": context, - "prompt": prompt, - "history": history, - "temperature": temperature, - "max_tokens": max_tokens, - "top_p": top_p, + "context": payload["context"] if "context" in payload else context, + "prompt": payload["prompt"] if "prompt" in payload else prompt, + "history": payload["history"] if "history" in payload else history, + "temperature": payload["temperature"] if "temperature" in payload else temperature, + "max_tokens": payload["max_tokens"] if "max_tokens" in payload else max_tokens, + "top_p": payload["top_p"] if "top_p" in payload else top_p, } ) payload.update(parameters)