Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 13 additions & 5 deletions scrapegraphai/graphs/abstract_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def _create_llm(self, llm_config: dict):
try:
self.model_token = models_tokens["openai"][llm_params["model"]]
except KeyError:
raise ValueError("Model not supported")
raise KeyError("Model not supported")
return OpenAI(llm_params)

elif "azure" in llm_params["model"]:
Expand All @@ -50,14 +50,14 @@ def _create_llm(self, llm_config: dict):
try:
self.model_token = models_tokens["azure"][llm_params["model"]]
except KeyError:
raise ValueError("Model not supported")
raise KeyError("Model not supported")
return AzureOpenAI(llm_params)

elif "gemini" in llm_params["model"]:
try:
self.model_token = models_tokens["gemini"][llm_params["model"]]
except KeyError:
raise ValueError("Model not supported")
raise KeyError("Model not supported")
return Gemini(llm_params)

elif "ollama" in llm_params["model"]:
Expand All @@ -70,19 +70,27 @@ def _create_llm(self, llm_config: dict):
try:
self.model_token = models_tokens["ollama"][llm_params["model"]]
except KeyError:
raise ValueError("Model not supported")
raise KeyError("Model not supported")

return Ollama(llm_params)
elif "hugging_face" in llm_params["model"]:
try:
self.model_token = models_tokens["hugging_face"][llm_params["model"]]
except KeyError:
raise ValueError("Model not supported")
raise KeyError("Model not supported")
return HuggingFace(llm_params)
else:
raise ValueError(
"Model provided by the configuration not supported")

def get_state(self, key=None) -> dict:
"""""
Obtain the current state
"""
if key is not None:
return self.final_state[key]
return self.final_state

def get_execution_info(self):
"""
Returns the execution information of the graph.
Expand Down