Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update websearch prompting & summary prompting #503

Merged
merged 17 commits into from
Oct 12, 2023
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
17 changes: 14 additions & 3 deletions src/lib/buildPrompt.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,22 +23,33 @@ export async function buildPrompt({
preprompt,
}: buildPromptOptions): Promise<string> {
if (webSearch && webSearch.context) {
const lastMsg = messages.slice(-1)[0];
const messagesWithoutLastUsrMsg = messages.slice(0, -1);
const lastUserMsg = messages.slice(-1)[0];
const previousUserMessages = messages.filter((el) => el.from === "user").slice(0, -1);

const previousQuestions =
previousUserMessages.length > 0
? `Previous questions: \n${previousUserMessages
.map(({ content }) => `- ${content}`)
.join("\n")}`
: "";
const currentDate = format(new Date(), "MMMM d, yyyy");
messages = [
...messagesWithoutLastUsrMsg,
{
from: "user",
content: `Please answer my question "${lastUserMsg.content}" using the supplied context below (paragraphs from various websites). For the context, today is ${currentDate}:
content: `I searched the web using the query: ${webSearch.searchQuery}. Today is ${currentDate} and here are the results:
=====================
${webSearch.context}
=====================
So my question is "${lastUserMsg.content}"`,
${previousQuestions}
Answer the question: ${lastMsg.content}
`,
},
];
}

console.log(messages);
return (
model
.chatPromptRender({ messages, preprompt })
Expand Down
1 change: 1 addition & 0 deletions src/lib/server/generateFromDefaultEndpoint.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ export async function generateFromDefaultEndpoint(
...smallModel.parameters,
...parameters,
return_full_text: false,
wait_for_model: true,
nsarrazin marked this conversation as resolved.
Show resolved Hide resolved
};

const randomEndpoint = modelEndpoint(smallModel);
Expand Down
15 changes: 1 addition & 14 deletions src/lib/server/models.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { HF_ACCESS_TOKEN, MODELS, OLD_MODELS, TASK_MODEL } from "$env/static/private";
import type { ChatTemplateInput, WebSearchQueryTemplateInput } from "$lib/types/Template";
import type { ChatTemplateInput } from "$lib/types/Template";
import { compileTemplate } from "$lib/utils/template";
import { z } from "zod";

Expand Down Expand Up @@ -67,15 +67,6 @@ const modelsRaw = z
"{{/each}}" +
"{{assistantMessageToken}}"
),
webSearchQueryPromptTemplate: z
.string()
.default(
"{{userMessageToken}}" +
'My question is: "{{message.content}}". ' +
"Based on the conversation history (my previous questions are: {{previousMessages}}), give me an appropriate query to answer my question for google search. You should not say more than query. You should not say any words except the query. For the context, today is {{currentDate}}" +
"{{userMessageEndToken}}" +
"{{assistantMessageToken}}"
),
promptExamples: z
.array(
z.object({
Expand Down Expand Up @@ -104,10 +95,6 @@ export const models = await Promise.all(
userMessageEndToken: m?.userMessageEndToken || m?.messageEndToken,
assistantMessageEndToken: m?.assistantMessageEndToken || m?.messageEndToken,
chatPromptRender: compileTemplate<ChatTemplateInput>(m.chatPromptTemplate, m),
webSearchQueryPromptRender: compileTemplate<WebSearchQueryTemplateInput>(
m.webSearchQueryPromptTemplate,
m
),
id: m.id || m.name,
displayName: m.displayName || m.name,
preprompt: m.prepromptUrl ? await fetch(m.prepromptUrl).then((r) => r.text()) : m.preprompt,
Expand Down
35 changes: 16 additions & 19 deletions src/lib/server/summarize.ts
Original file line number Diff line number Diff line change
@@ -1,29 +1,26 @@
import { buildPrompt } from "$lib/buildPrompt";
import { generateFromDefaultEndpoint } from "$lib/server/generateFromDefaultEndpoint";
import { defaultModel } from "$lib/server/models";
import { LLM_SUMMERIZATION } from "$env/static/private";
import { generateFromDefaultEndpoint } from "$lib/server/generateFromDefaultEndpoint";
import { smallModel } from "$lib/server/models";
import type { Message } from "$lib/types/Message";

export async function summarize(prompt: string) {
if (!LLM_SUMMERIZATION) {
return prompt.split(/\s+/g).slice(0, 5).join(" ");
}
const userPrompt = `Please summarize the following message: \n` + prompt;

const summaryPrompt = await buildPrompt({
messages: [{ from: "user", content: userPrompt }],
preprompt: `
You are a summarization AI. Your task is to summarize user requests, in a single sentence of less than 5 words. Do not try to answer questions, just summarize the user's request. Start your answer with an emoji relevant to the summary."

Example: "Who is the president of France ?"
Summary: "🇫🇷 President of France request"

Example: "What are the latest news ?"
Summary: "📰 Latest news"

Example: "Can you debug this python code?"
Summary: "🐍 Python code debugging request"
`,
model: defaultModel,
const messages: Array<Omit<Message, "id">> = [
{ from: "user", content: "Who is the president of France ?" },
{ from: "assistant", content: "🇫🇷 President of France request" },
{ from: "user", content: "What are the latest news ?" },
{ from: "assistant", content: "📰 Latest news" },
{ from: "user", content: "Can you debug this python code?" },
{ from: "assistant", content: "🐍 Python code debugging request" },
{ from: "user", content: prompt },
];
nsarrazin marked this conversation as resolved.
Show resolved Hide resolved

const summaryPrompt = smallModel.chatPromptRender({
messages,
preprompt: `You are a summarization AI. Your task is to summarize user requests, in a single sentence of less than 5 words. Do not try to answer questions, just summarize the user's request. Start your answer with an emoji relevant to the summary.`,
});
nsarrazin marked this conversation as resolved.
Show resolved Hide resolved

const generated_text = await generateFromDefaultEndpoint(summaryPrompt).catch((e) => {
Expand Down
59 changes: 53 additions & 6 deletions src/lib/server/websearch/generateQuery.ts
Original file line number Diff line number Diff line change
@@ -1,19 +1,66 @@
import type { Message } from "$lib/types/Message";
import { format } from "date-fns";
import { generateFromDefaultEndpoint } from "../generateFromDefaultEndpoint";
import { defaultModel } from "../models";
import { smallModel } from "../models";

export async function generateQuery(messages: Message[]) {
const currentDate = format(new Date(), "MMMM d, yyyy");
const userMessages = messages.filter(({ from }) => from === "user");
const previousUserMessages = userMessages.slice(0, -1);

const lastMessage = userMessages.slice(-1)[0];
const promptSearchQuery = defaultModel.webSearchQueryPromptRender({
message: lastMessage,
previousMessages: previousUserMessages.map(({ content }) => content).join(" "),
currentDate,

const convQuery: Array<Omit<Message, "id">> = [
{
from: "user",
content: `Previous Questions:
- Who is the president of France?

Current Question: What about Mexico?
`,
},
{
from: "assistant",
content: 'query: "President of Mexico"',
nsarrazin marked this conversation as resolved.
Show resolved Hide resolved
},
{
from: "user",
content: `Previous questions:
- When is the next formula 1 grand prix?

Current Question: Where is it being hosted ?`,
},
{
from: "assistant",
content: 'query: "location of next formula 1 grand prix"',
},
{
from: "user",
content: "Current Question: What type of printhead does the Epson F2270 DTG printer use?",
},
{
from: "assistant",
content: 'query: "Epson F2270 DTG printer printhead"',
},
{
from: "user",
content:
(previousUserMessages.length > 0
? `Previous questions: \n${previousUserMessages
.map(({ content }) => `- ${content}`)
.join("\n")}`
: "") +
"\n\nCurrent Question:" +
lastMessage.content,
},
];

const promptQuery = smallModel.chatPromptRender({
preprompt: `You are tasked with generating web search queries. Give me an appropriate query to answer my question for google search. Your answer should follow the format \`query:"[query here]\`. Today is ${currentDate}`,
messages: convQuery,
});
const searchQuery = await generateFromDefaultEndpoint(promptSearchQuery).then((query) => {

const searchQuery = await generateFromDefaultEndpoint(promptQuery).then((query) => {
// example of generating google query:
// case 1
// user: tell me what happened yesterday
Expand Down
6 changes: 0 additions & 6 deletions src/lib/types/Template.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,3 @@ export type ChatTemplateInput = {
messages: Pick<Message, "from" | "content">[];
preprompt?: string;
};

export type WebSearchQueryTemplateInput = {
message: Pick<Message, "from" | "content">;
previousMessages: string;
currentDate: string;
};