Skip to content

Commit

Permalink
feat: support stream mode
Browse files Browse the repository at this point in the history
  • Loading branch information
JimmyLv committed Mar 13, 2023
1 parent 783e928 commit 7374075
Show file tree
Hide file tree
Showing 7 changed files with 76 additions and 30 deletions.
12 changes: 9 additions & 3 deletions components/SummaryResult.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,14 @@ export function SummaryResult({
shouldShowTimestamp?: boolean;
}) {
const { toast } = useToast();
const { summaryArray, formattedSummary } = formatSummary(summary);
const formattedCachedSummary = summary?.startsWith('"')
? summary
.substring(1, summary.length - 1)
.split("\\n")
.join("\n")
: summary;

const { summaryArray, formattedSummary } = formatSummary(formattedCachedSummary);
const summaryNote =
formattedSummary +
"\n\n #BibiGPT自动总结 b.jimmylv.cn @吕立青_JimmyLv \nBV1fX4y1Q7Ux";
Expand All @@ -36,7 +43,6 @@ export function SummaryResult({
navigator.clipboard.writeText(summaryNote);
toast({ description: "复制成功 ✂️" });
};

return (
<div className="mb-8 px-4">
<h3 className="m-8 mx-auto max-w-3xl border-t-2 border-dashed pt-8 text-center text-2xl font-bold sm:text-4xl">
Expand Down Expand Up @@ -67,7 +73,7 @@ export function SummaryResult({
))
) : (
<div className="markdown-body">
<Markdown>{summary}</Markdown>
<Markdown>{formattedCachedSummary}</Markdown>
</div>
)}
</div>
Expand Down
22 changes: 21 additions & 1 deletion hooks/useSummarize.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { useToast } from "~/hooks/use-toast";
import { UserConfig, VideoConfig } from "~/lib/types";
import { RATE_LIMIT_COUNT } from "~/utils/constants";

export function useSummarize(showSingIn: (show: boolean) => void) {
export function useSummarize(showSingIn: (show: boolean) => void, enableStream: boolean = true) {
const [loading, setLoading] = useState(false);
const [summary, setSummary] = useState<string>("");
const { toast } = useToast();
Expand Down Expand Up @@ -70,6 +70,26 @@ export function useSummarize(showSingIn: (show: boolean) => void) {
return;
}

if (enableStream) {
// This data is a ReadableStream
const data = response.body;
if (!data) {
return;
}

const reader = data.getReader();
const decoder = new TextDecoder();
let done = false;

while (!done) {
const { value, done: doneReading } = await reader.read();
done = doneReading;
const chunkValue = decoder.decode(value);
setSummary((prev) => prev + chunkValue);
}
setLoading(false);
return;
}
// await readStream(response, setSummary);
const result = await response.json();
if (result.errorMessage) {
Expand Down
26 changes: 21 additions & 5 deletions lib/openai/fetchOpenAIResult.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
import { Redis } from "@upstash/redis";
import {
createParser,
ParsedEvent,
ReconnectInterval,
} from "eventsource-parser";
import { trimOpenAiResult } from "~/lib/openai/trimOpenAiResult";
import { VideoConfig } from "~/lib/types";
import { isDev } from "~/utils/env";

export enum ChatGPTAgent {
Expand Down Expand Up @@ -31,7 +33,8 @@ export interface OpenAIStreamPayload {

export async function fetchOpenAIResult(
payload: OpenAIStreamPayload,
apiKey: string
apiKey: string,
videoConfig: VideoConfig
) {
const encoder = new TextEncoder();
const decoder = new TextDecoder();
Expand All @@ -50,31 +53,44 @@ export async function fetchOpenAIResult(
throw new Error("OpenAI API: " + res.statusText);
}

const { showTimestamp, videoId } = videoConfig;
const redis = Redis.fromEnv();
const cacheId = showTimestamp ? `timestamp-${videoId}` : videoId;

if (!payload.stream) {
const result = await res.json();
return trimOpenAiResult(result);
const betterResult = trimOpenAiResult(result);

const data = await redis.set(cacheId, betterResult);
console.info(`video ${cacheId} cached:`, data);
isDev && console.log("========betterResult========", betterResult);

return betterResult;
}

let counter = 0;
let tempData = "";
const stream = new ReadableStream({
async start(controller) {
// callback
function onParse(event: ParsedEvent | ReconnectInterval) {
async function onParse(event: ParsedEvent | ReconnectInterval) {
if (event.type === "event") {
const data = event.data;
// https://beta.openai.com/docs/api-reference/completions/create#completions/create-stream
if (data === "[DONE]") {
// active
controller.close();
const data = await redis.set(cacheId, tempData);
console.info(`video ${cacheId} cached:`, data);
isDev && console.log("========betterResult after streamed========", tempData);
return;
}
try {
const json = JSON.parse(data);
const text = trimOpenAiResult(json);
const text = json.choices[0].delta?.content || "";
// todo: add redis cache
tempData += text;
console.log("=====text====", text);
console.log("=====text====", text, tempData);
if (counter < 2 && (text.match(/\n/) || []).length) {
// this is a prefix character (i.e., "\n\n"), do nothing
return;
Expand Down
8 changes: 4 additions & 4 deletions middleware.ts
Original file line number Diff line number Diff line change
Expand Up @@ -57,10 +57,10 @@ export async function middleware(req: NextRequest, context: NextFetchEvent) {
return redirectShop(req);
}
}

if (isDev) {
return NextResponse.next();
}
//
// if (isDev) {
// return NextResponse.next();
// }
// 👇 below only works for production

if (!userKey) {
Expand Down
21 changes: 13 additions & 8 deletions pages/[...slug].tsx
Original file line number Diff line number Diff line change
Expand Up @@ -30,14 +30,6 @@ export const Home: NextPage<{
const searchParams = useSearchParams();
const licenseKey = searchParams.get("license_key");

// TODO: add mobx or state manager
const [currentVideoId, setCurrentVideoId] = useState<string>("");
const [currentVideoUrl, setCurrentVideoUrl] = useState<string>("");
const [userKey, setUserKey] = useLocalStorage<string>("user-openai-apikey");
const { loading, summary, resetSummary, summarize } =
useSummarize(showSingIn);
const { toast } = useToast();
const { analytics } = useAnalytics();
const {
register,
handleSubmit,
Expand All @@ -49,6 +41,7 @@ export const Home: NextPage<{
formState: { errors },
} = useForm<VideoConfigSchema>({
defaultValues: {
enableStream: true,
showTimestamp: false,
showEmoji: true,
detailLevel: 600,
Expand All @@ -58,6 +51,18 @@ export const Home: NextPage<{
},
resolver: zodResolver(videoConfigSchema),
});

// TODO: add mobx or state manager
const [currentVideoId, setCurrentVideoId] = useState<string>("");
const [currentVideoUrl, setCurrentVideoUrl] = useState<string>("");
const [userKey, setUserKey] = useLocalStorage<string>("user-openai-apikey");
const { loading, summary, resetSummary, summarize } = useSummarize(
showSingIn,
getValues("enableStream")
);
const { toast } = useToast();
const { analytics } = useAnalytics();

useFormPersist("video-config-storage", {
watch,
setValue,
Expand Down
16 changes: 7 additions & 9 deletions pages/api/sumup.ts
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,8 @@ export default async function handler(
}

try {
const payload = {
const stream = true;
const openAiPayload = {
model: "gpt-3.5-turbo",
messages: [
// { role: ChatGPTAgent.system, content: systemPrompt },
Expand All @@ -76,7 +77,7 @@ export default async function handler(
// frequency_penalty: 0,
// presence_penalty: 0,
max_tokens: videoConfig.detailLevel || (userKey ? 800 : 600),
stream: false,
stream,
// n: 1,
};

Expand All @@ -85,13 +86,10 @@ export default async function handler(
userKey,
videoId
);
const result = await fetchOpenAIResult(payload, openaiApiKey);
// TODO: add better logging when dev or prod
const redis = Redis.fromEnv();
const cacheId = shouldShowTimestamp ? `timestamp-${videoId}` : videoId;
const data = await redis.set(cacheId, result);
console.info(`video ${cacheId} cached:`, data);
isDev && console.log("========result========", result);
const result = await fetchOpenAIResult(openAiPayload, openaiApiKey, videoConfig);
if (stream) {
return new Response(result);
}

return NextResponse.json(result);
} catch (error: any) {
Expand Down
1 change: 1 addition & 0 deletions utils/schemas/video.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { z } from "zod";

export const videoConfigSchema = z.object({
// videoId: z.string(),
enableStream: z.boolean().optional(),
showTimestamp: z.boolean().optional(),
showEmoji: z.boolean().optional(),
outputLanguage: z.string().optional(),
Expand Down

1 comment on commit 7374075

@vercel
Copy link

@vercel vercel bot commented on 7374075 Mar 13, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.