Skip to content

Commit

Permalink
feat: add cache for chatglm conversation id (#1452)
Browse files Browse the repository at this point in the history
  • Loading branch information
yetone committed Apr 20, 2024
1 parent d31ec4b commit 4fda5b0
Show file tree
Hide file tree
Showing 2 changed files with 79 additions and 63 deletions.
80 changes: 40 additions & 40 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

62 changes: 39 additions & 23 deletions src/common/engines/chatglm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,12 @@ import { fetchSSE, getSettings, isDesktopApp } from '@/common/utils'
import { AbstractEngine } from '@/common/engines/abstract-engine'
import { IModel, IMessageRequest } from '@/common/engines/interfaces'
import qs from 'qs'
import { LRUCache } from 'lru-cache'

const cache = new LRUCache<string, string>({
max: 100,
ttl: 1000 * 60 * 60,
})

export const keyChatGLMAccessToken = 'chatglm-access-token'
export const keyChatGLMRefreshToken = 'chatglm-refresh-token'
Expand Down Expand Up @@ -43,39 +49,46 @@ export class ChatGLM extends AbstractEngine {

const assistantID = '65940acff94777010aa6b796'
const conversationTitle = 'OpenAI Translator'
let conversationID = ''
const conversationIDCacheKey = `chatglm-conversation-id-${assistantID}`
let conversationID = cache.get(conversationIDCacheKey) || ''

if (conversationID) {
console.log('Using cached conversation ID:', conversationID)
}

req.onStatusCode?.(200)

const headers = await this.getHeaders()

const conversationListResp = await fetcher(
`https://chatglm.cn/chatglm/backend-api/assistant/conversation/list?${qs.stringify({
assistant_id: assistantID,
page: 1,
page_size: 25,
})}`,
{
method: 'GET',
headers,
}
)
if (!conversationID) {
const conversationListResp = await fetcher(
`https://chatglm.cn/chatglm/backend-api/assistant/conversation/list?${qs.stringify({
assistant_id: assistantID,
page: 1,
page_size: 25,
})}`,
{
method: 'GET',
headers,
}
)

req.onStatusCode?.(conversationListResp.status)
req.onStatusCode?.(conversationListResp.status)

if (!conversationListResp.ok) {
const jsn = await conversationListResp.json()
req.onError?.(jsn.message ?? jsn.msg ?? 'unknown error')
return
}
if (!conversationListResp.ok) {
const jsn = await conversationListResp.json()
req.onError?.(jsn.message ?? jsn.msg ?? 'unknown error')
return
}

const conversationList = await conversationListResp.json()
const conversationList = await conversationListResp.json()

const conversation = conversationList.result.conversation_list.find(
(c: { id: string; title: string }) => c.title === conversationTitle
)
const conversation = conversationList.result.conversation_list.find(
(c: { id: string; title: string }) => c.title === conversationTitle
)

conversationID = conversation?.id
conversationID = conversation?.id
}

if (!conversationID) {
try {
Expand Down Expand Up @@ -128,6 +141,8 @@ export class ChatGLM extends AbstractEngine {
return
}

cache.set(conversationIDCacheKey, conversationID)

let hasError = false
let finished = false
let length = 0
Expand Down Expand Up @@ -185,6 +200,7 @@ export class ChatGLM extends AbstractEngine {
})
},
onError: (err) => {
console.error('err', err)
hasError = true
if (err instanceof Error) {
req.onError(err.message)
Expand Down

0 comments on commit 4fda5b0

Please sign in to comment.