Skip to content

Commit

Permalink
OpenAI SSE demo (#103)
Browse files Browse the repository at this point in the history
  • Loading branch information
samuelcolvin committed Dec 17, 2023
1 parent 4575a5b commit 6b7c7cb
Show file tree
Hide file tree
Showing 3 changed files with 403 additions and 22 deletions.
2 changes: 2 additions & 0 deletions demo/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from .db import create_db
from .forms import router as forms_router
from .main import router as main_router
from .sse import router as sse_router
from .tables import router as table_router


Expand All @@ -33,6 +34,7 @@ async def lifespan(app_: FastAPI):
app = FastAPI(lifespan=lifespan)

app.include_router(components_router, prefix='/api/components')
app.include_router(sse_router, prefix='/api/components')
app.include_router(table_router, prefix='/api/table')
app.include_router(forms_router, prefix='/api/forms')
app.include_router(auth_router, prefix='/api/auth')
Expand Down
28 changes: 6 additions & 22 deletions demo/components_list.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,8 @@
from __future__ import annotations as _annotations

import asyncio
from datetime import datetime
from typing import AsyncIterable

from fastapi import APIRouter
from fastapi.responses import StreamingResponse
from fastui import AnyComponent, FastUI
from fastui import components as c
from fastui.events import GoToEvent, PageEvent
Expand Down Expand Up @@ -144,7 +141,12 @@ class Delivery(BaseModel):
c.Div(
components=[
c.Heading(text='Server Load SSE', level=2),
c.Markdown(text='`ServerLoad` can also be used to load content from an SSE stream.'),
c.Markdown(
text=(
'`ServerLoad` can also be used to load content from an SSE stream.\n\n'
"Here the response is the streamed output from OpenAI's GPT-4 chat model."
)
),
c.Button(text='Load SSE content', on_click=PageEvent(name='server-load-sse')),
c.Div(
components=[
Expand Down Expand Up @@ -219,21 +221,3 @@ class Delivery(BaseModel):
async def modal_view() -> list[AnyComponent]:
await asyncio.sleep(0.5)
return [c.Paragraph(text='This is some dynamic content. Open devtools to see me being fetched from the server.')]


async def sse_generator() -> AsyncIterable[str]:
while True:
d = datetime.now()
m = FastUI(
root=[
c.Div(components=[c.Text(text=f'Time {d:%H:%M:%S}')], class_name='font-monospace'),
c.Paragraph(text='This content is updated every second using an SSE stream.'),
]
)
yield f'data: {m.model_dump_json(by_alias=True)}\n\n'
await asyncio.sleep(1)


@router.get('/sse')
async def sse_experiment() -> StreamingResponse:
return StreamingResponse(sse_generator(), media_type='text/event-stream')
Loading

0 comments on commit 6b7c7cb

Please sign in to comment.