Skip to content

Commit

Permalink
add telegram handler
Browse files Browse the repository at this point in the history
  • Loading branch information
n4ze3m committed Jun 18, 2023
1 parent f86b85b commit d75d006
Show file tree
Hide file tree
Showing 3 changed files with 121 additions and 18 deletions.
86 changes: 86 additions & 0 deletions server/src/integration/handlers/telegram.handler.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
import { PrismaClient } from "@prisma/client";
import { embeddings } from "../../utils/embeddings";
import { DialoqbaseVectorStore } from "../../utils/store";
import { chatModelProvider } from "../../utils/models";
import { ConversationalRetrievalQAChain } from "langchain/chains";
const prisma = new PrismaClient();

export const telegramBotHandler = async (
identifer: string,
message: string,
user_id: number,
) => {
const bot_id = identifer.split("-")[2];

const bot = await prisma.bot.findFirst({
where: {
id: bot_id,
},
});

if (!bot) {
return "Opps! Bot not found";
}

const chat_history = await prisma.botTelegramHistory.findMany({
where: {
chat_id: user_id,
identifier: identifer,
},
});

if (chat_history.length > 10) {
chat_history.splice(0, chat_history.length - 10);
}

let history = chat_history.map((chat) => {
return `Human: ${chat.human}\nAssistant: ${chat.bot}`;
}).join("\n");

const temperature = bot.temperature;

const sanitizedQuestion = message.trim().replaceAll("\n", " ");
const embeddingModel = embeddings(bot.embedding);

const vectorstore = await DialoqbaseVectorStore.fromExistingIndex(
embeddingModel,
{
botId: bot.id,
sourceId: null,
},
);

const model = chatModelProvider(
bot.provider,
bot.model,
temperature,
);

const chain = ConversationalRetrievalQAChain.fromLLM(
model,
vectorstore.asRetriever(),
{
qaTemplate: bot.qaPrompt,
questionGeneratorTemplate: bot.questionGeneratorPrompt,
returnSourceDocuments: true,
},
);

const response = await chain.call({
question: sanitizedQuestion,
chat_history: history,
});

const bot_response = response["text"];

await prisma.botTelegramHistory.create({
data: {
identifier: identifer,
chat_id: user_id,
human: message,
bot: bot_response,
},
});

return bot_response;
};
27 changes: 20 additions & 7 deletions server/src/integration/telegram.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { Bot } from "grammy";
import axios from "axios";
import { telegramBotHandler } from "./handlers/telegram.handler";

export default class TelegramBot {
static get clients() {
Expand All @@ -12,7 +13,6 @@ export default class TelegramBot {
return this._clients.size;
}


static async connect(identifier: string, token: string) {
if (this._clients.has(identifier)) {
await this.disconnect(identifier);
Expand All @@ -21,16 +21,29 @@ export default class TelegramBot {
const bot = new Bot(token);
await bot.api.setMyCommands([
{ command: "start", description: "Start the bot" },
{ command: "help", description: "Show help" },
{ command: "ping", description: "Ping the bot" },
]);

bot.command("start", (ctx) => ctx.reply("Hello!"));
bot.command("help", (ctx) => ctx.reply("Help!"));
bot.on("message:text", (ctx) => {
return ctx.reply(
`Your identifier: ${identifier}`,
bot.command("start", (ctx) => ctx.reply("Hey, How can I assist you?"));
bot.command("ping", (ctx) => ctx.reply("pong"));
bot.on("message:text", async (ctx) => {
// check it's a group chat
if (ctx.chat.type !== "private") {
return ctx.reply("I can only work in private chats.");
}
await ctx.replyWithChatAction(
"typing",
);
// set messaging type
const user_id = ctx.from.id;
const message = await telegramBotHandler(
identifier,
ctx.message.text,
user_id,
);


return await ctx.reply(message,);
});
bot.start();
bot.catch((err) => console.log(`${identifier} error: ${err}`));
Expand Down
26 changes: 15 additions & 11 deletions server/src/routes/bot/handlers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -72,19 +72,23 @@ export const chatRequestHandler = async (
},
);

const chat_history = history
.map((chatMessage) => {
if (chatMessage.type === "human") {
return `Human: ${chatMessage.text}`;
} else if (chatMessage.type === "ai") {
return `Assistant: ${chatMessage.text}`;
} else {
return `${chatMessage.text}`;
}
})
.join("\n");

console.log(chat_history);

const response = await chain.call({
question: sanitizedQuestion,
chat_history: history
.map((chatMessage) => {
if (chatMessage.type === "human") {
return `Human: ${chatMessage.text}`;
} else if (chatMessage.type === "ai") {
return `Assistant: ${chatMessage.text}`;
} else {
return `${chatMessage.text}`;
}
})
.join("\n"),
chat_history: chat_history,
});

return {
Expand Down

0 comments on commit d75d006

Please sign in to comment.