Skip to content

Commit

Permalink
Merge pull request #601 from openchatai/feat/parsing
Browse files Browse the repository at this point in the history
Feat/parsing
  • Loading branch information
codebanesr committed Feb 3, 2024
2 parents a60d4c1 + 743ebbc commit 4acc362
Show file tree
Hide file tree
Showing 4 changed files with 63 additions and 34 deletions.
54 changes: 34 additions & 20 deletions llm-server/routes/chat/implementation/chain_strategy.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,20 +49,26 @@ async def handle_request(

top_documents = select_top_documents(actions + flows + knowledgebase)

emit(
f"{session_id}_info", "Checking if actionable ... \n"
) if is_streaming else None
(
emit(f"{session_id}_info", "Checking if actionable ... \n")
if is_streaming
else None
)
next_step = get_next_response_type(
user_message=text,
session_id=session_id,
chat_history=conversations_history,
top_documents=top_documents,
)

emit(
f"{session_id}_info",
f"Is next step actionable: {next_step.actionable}... \n",
) if is_streaming else None
(
emit(
f"{session_id}_info",
f"Is next step actionable: {next_step.actionable}... \n",
)
if is_streaming
else None
)
if next_step.actionable and next_step.api:
# if the LLM given operationID is actually exist, then use it, otherwise fallback to the highest vector space document
llm_predicted_operation_id = (
Expand All @@ -78,9 +84,11 @@ async def handle_request(
[VectorCollections.actions, VectorCollections.flows],
)
# now run it
emit(
f"{session_id}_info", "Executing the actionable item... \n"
) if is_streaming else None
(
emit(f"{session_id}_info", "Executing the actionable item... \n")
if is_streaming
else None
)
response = await run_actionable_item(
bot_id=bot_id,
actionable_item=actionable_item,
Expand All @@ -92,20 +100,26 @@ async def handle_request(
)

response.api_called = True
add_action_call(
operation_id=actionable_item["actions"][0].document.metadata.get(
"operation_id", ""
),
session_id=session_id,
bot_id=bot_id,
)
if (
actionable_item
and "actions" in actionable_item
and actionable_item["actions"]
):
action = actionable_item["actions"][0]
operation_id = action.document.metadata.get("operation_id", "")
add_action_call(
operation_id=operation_id, session_id=session_id, bot_id=bot_id
)
return response

else:
# it means that the user query is "informative" and can be answered using text only
# get the top knowledgeable documents (if any)
emit(
f"{session_id}_info", "Running informative action... \n"
) if is_streaming else None
(
emit(f"{session_id}_info", "Running informative action... \n")
if is_streaming
else None
)
response = await run_informative_item(
informative_item=top_documents,
base_prompt=base_prompt,
Expand Down
2 changes: 1 addition & 1 deletion llm-server/routes/flow/utils/run_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,5 +60,5 @@ async def run_flow(
error=output["error"],
message=output["response"],
api_called=True,
action_ids=flow.get_all_action_ids(),
operation_ids=flow.get_all_action_ids(),
)
2 changes: 1 addition & 1 deletion llm-server/utils/llm_consts.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,5 +80,5 @@ def get_mysql_uri():
max_pages_to_crawl = int(os.getenv("MAX_PAGES_TO_CRAWL", "15"))

enable_followup_questions = (
True if os.getenv("ENABLE_FOLLOWUP_QUESTIONS", "YES") == "YES" else False
True if os.getenv("ENABLE_FOLLOWUP_QUESTIONS", "NO") == "YES" else False
)
39 changes: 27 additions & 12 deletions llm-server/utils/swagger_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,15 @@

class Endpoint:
def __init__(
self,
operation_id,
endpoint_type,
name,
description,
request_body,
parameters,
response,
path,
self,
operation_id,
endpoint_type,
name,
description,
request_body,
parameters,
response,
path,
):
self.operation_id = operation_id
self.type = endpoint_type
Expand Down Expand Up @@ -233,9 +233,24 @@ def get_all_actions(self, bot_id: str):
# Process the payload to resolve any $ref references
processed_payload = self.process_payload(payload)

name = method_data.get(
"operation_id",
method_data.get(
"name",
method_data.get("summary", method_data.get("description")),
),
)
if name is None:
logger.error(
"operation_id_not_found",
bot_id=bot_id,
path=path,
method=method,
)

action_dto = ActionDTO(
api_endpoint=base_uri + path,
name=method_data.get("name", method_data.get("summary", method_data.get('description'))),
name=name,
description=method_data.get("description"),
request_type=method.upper(),
payload=processed_payload,
Expand Down Expand Up @@ -263,8 +278,8 @@ def gather_metadata(self, api_data: dict) -> DefaultDict[str, Dict[str, str]]:

for path, path_item in api_data["paths"].items():
for http_verb, http_details in path_item.items():
summary = http_details.get("summary") or ""
description = http_details.get("description") or ""
summary = http_details.get("summary", "")
description = http_details.get("description", "")
# inconsistent tag behaviour..
# tags = (
# ", ".join([t["name"] for t in http_details.get("tags", [])])
Expand Down

0 comments on commit 4acc362

Please sign in to comment.