Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add exception handling for bad json response and identify UUID issue #23

Closed
wants to merge 10 commits into from
12 changes: 8 additions & 4 deletions remote_llm.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""RESTful API calls to remote LLMs."""
# Standard library imports.
import os
import json
from ast import List
from logging import getLogger
from typing import Dict
Expand Down Expand Up @@ -50,7 +51,6 @@ def generate_text(idempotency_uuid, preprompt, prompt, model, history= None):
pass

if raw_response.status_code == 200:
json_response = raw_response.json()
if json_response.get('error') is not None:
log.error(f"Error generating: {json_response['error']}")
raise HTTPError(
Expand All @@ -62,6 +62,7 @@ def generate_text(idempotency_uuid, preprompt, prompt, model, history= None):
# ... raise an error.
raise HTTPError(f'LLM Router API returned error status code {raw_response.status_code}: '
f'Response: {raw_response.json()}')

# ... Otherwise, if it's an unrecognized HTTP status code, then...
else:
raise HTTPError(f'LLM Router API returned unrecognized status code {raw_response.status_code}: '
Expand All @@ -77,6 +78,7 @@ def get_models():
if token is None:
raise ValueError('LLM Verification Router token is not set')
log.info(f"Getting models from {route}")

raw_response = requests.get(url=route,
headers={'Authorization': f'Bearer {token}'})

Expand All @@ -87,8 +89,10 @@ def get_models():
raise HTTPError(
"Model Error", headers={"Retry-After": str(60000)}
)

return json_response['models']

models = json_response['models']
log.info(f"Got models {models}")
return models

elif 400 <= raw_response.status_code <= 599:
# ... raise an error.
Expand All @@ -99,4 +103,4 @@ def get_models():
raise HTTPError(f'LLM Router API returned unrecognized status code {raw_response.status}: '
f'Response: {raw_response.json()}')