Skip to content

Commit

Permalink
feat(health): added endpoint for services (#989)
Browse files Browse the repository at this point in the history
  • Loading branch information
StanGirard committed Aug 20, 2023
1 parent 0b2e3bc commit ae7852e
Show file tree
Hide file tree
Showing 5 changed files with 42 additions and 8 deletions.
2 changes: 2 additions & 0 deletions backend/chat_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@





@app.exception_handler(HTTPException)
async def http_exception_handler(_, exc):
return JSONResponse(
Expand Down
33 changes: 25 additions & 8 deletions backend/routes/chat_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@
from auth import AuthBearer, get_current_user
from fastapi import APIRouter, Depends, HTTPException, Query, Request
from fastapi.responses import StreamingResponse
from llm.qa_headless import HeadlessQA
from llm.openai import OpenAIBrainPicking
from models.brains import Brain
from llm.qa_headless import HeadlessQA
from models.brain_entity import BrainEntity
from models.brains import Brain
from models.chat import Chat
from models.chats import ChatQuestion
from models.databases.supabase.supabase import SupabaseDB
Expand Down Expand Up @@ -72,6 +72,11 @@ def check_user_limit(
pass


@chat_router.get("/chat/healthz", tags=["Health"])
async def healthz():
return {"status": "ok"}


# get all chats
@chat_router.get("/chat", dependencies=[Depends(AuthBearer())], tags=["Chat"])
async def get_chats(current_user: User = Depends(get_current_user)):
Expand Down Expand Up @@ -268,18 +273,30 @@ async def create_stream_question_handler(
if brain_id:
gpt_answer_generator = OpenAIBrainPicking(
chat_id=str(chat_id),
model=(brain_details or chat_question).model if current_user.user_openai_api_key else "gpt-3.5-turbo",
max_tokens=(brain_details or chat_question).max_tokens if current_user.user_openai_api_key else 0,
temperature=(brain_details or chat_question).temperature if current_user.user_openai_api_key else 256,
model=(brain_details or chat_question).model
if current_user.user_openai_api_key
else "gpt-3.5-turbo",
max_tokens=(brain_details or chat_question).max_tokens
if current_user.user_openai_api_key
else 0,
temperature=(brain_details or chat_question).temperature
if current_user.user_openai_api_key
else 256,
brain_id=str(brain_id),
user_openai_api_key=current_user.user_openai_api_key, # pyright: ignore reportPrivateUsage=none
streaming=True,
)
else:
gpt_answer_generator = HeadlessQA(
model=chat_question.model if current_user.user_openai_api_key else "gpt-3.5-turbo",
temperature=chat_question.temperature if current_user.user_openai_api_key else 256,
max_tokens=chat_question.max_tokens if current_user.user_openai_api_key else 0,
model=chat_question.model
if current_user.user_openai_api_key
else "gpt-3.5-turbo",
temperature=chat_question.temperature
if current_user.user_openai_api_key
else 256,
max_tokens=chat_question.max_tokens
if current_user.user_openai_api_key
else 0,
user_openai_api_key=current_user.user_openai_api_key, # pyright: ignore reportPrivateUsage=none
chat_id=str(chat_id),
streaming=True,
Expand Down
5 changes: 5 additions & 0 deletions backend/routes/crawl_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,11 @@
crawl_router = APIRouter()


@crawl_router.get("/crawl/healthz", tags=["Health"])
async def healthz():
return {"status": "ok"}


@crawl_router.post("/crawl", dependencies=[Depends(AuthBearer())], tags=["Crawl"])
async def crawl_endpoint(
request: Request,
Expand Down
5 changes: 5 additions & 0 deletions backend/routes/misc_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,8 @@ async def root():
Root endpoint to check the status of the API.
"""
return {"status": "OK"}


@misc_router.get("/healthz", tags=["Health"])
async def healthz():
return {"status": "ok"}
5 changes: 5 additions & 0 deletions backend/routes/upload_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,11 @@
upload_router = APIRouter()


@upload_router.get("/upload/healthz", tags=["Health"])
async def healthz():
return {"status": "ok"}


@upload_router.post("/upload", dependencies=[Depends(AuthBearer())], tags=["Upload"])
async def upload_file(
request: Request,
Expand Down

0 comments on commit ae7852e

Please sign in to comment.