From 908a7db5323b0fbeac68ee30951a46420e258402 Mon Sep 17 00:00:00 2001 From: MoeAD99 Date: Fri, 24 Nov 2023 02:54:04 -0500 Subject: [PATCH] made _resolve_llm method more readable --- agent_utils.py | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/agent_utils.py b/agent_utils.py index a61e409..667448c 100644 --- a/agent_utils.py +++ b/agent_utils.py @@ -30,28 +30,29 @@ def _resolve_llm(llm: str) -> LLM: """Resolve LLM.""" - # TODO: make this less hardcoded with if-else statements + # Done: make this less hardcoded with if-else statements # see if there's a prefix # - if there isn't, assume it's an OpenAI model # - if there is, resolve it tokens = llm.split(":") if len(tokens) == 1: os.environ["OPENAI_API_KEY"] = st.secrets.openai_key - llm = OpenAI(model=llm) - elif tokens[0] == "local": - llm = resolve_llm(llm) - elif tokens[0] == "openai": - os.environ["OPENAI_API_KEY"] = st.secrets.openai_key - llm = OpenAI(model=tokens[1]) - elif tokens[0] == "anthropic": - os.environ["ANTHROPIC_API_KEY"] = st.secrets.anthropic_key - llm = Anthropic(model=tokens[1]) - elif tokens[0] == "replicate": - os.environ["REPLICATE_API_KEY"] = st.secrets.replicate_key - llm = Replicate(model=tokens[1]) + return OpenAI(model=llm) else: - raise ValueError(f"LLM {llm} not recognized.") - return llm + match tokens[0]: + case "local": + return resolve_llm(llm) + case "openai": + os.environ["OPENAI_API_KEY"] = st.secrets.openai_key + return OpenAI(model=tokens[1]) + case "anthropic": + os.environ["ANTHROPIC_API_KEY"] = st.secrets.anthropic_key + return Anthropic(model=tokens[1]) + case "replicate": + os.environ["REPLICATE_API_KEY"] = st.secrets.replicate_key + return Replicate(model=tokens[1]) + case _: + raise ValueError(f"LLM {llm} not recognized.") ####################