-
Notifications
You must be signed in to change notification settings - Fork 2.2k
/
ioredis.ts
90 lines (83 loc) · 2.51 KB
/
ioredis.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
import { Redis } from "ioredis";
import {
BaseCache,
getCacheKey,
serializeGeneration,
deserializeStoredGeneration,
} from "@langchain/core/caches";
import { Generation } from "@langchain/core/outputs";
/**
* Cache LLM results using Redis.
* @example
* ```typescript
* const model = new ChatOpenAI({
* cache: new RedisCache(new Redis(), { ttl: 60 }),
* });
*
* // Invoke the model with a prompt
* const response = await model.invoke("Do something random!");
* console.log(response);
*
* // Remember to disconnect the Redis client when done
* await redisClient.disconnect();
* ```
*/
export class RedisCache extends BaseCache {
protected redisClient: Redis;
protected ttl?: number;
constructor(
redisClient: Redis,
config?: {
ttl?: number;
}
) {
super();
this.redisClient = redisClient;
this.ttl = config?.ttl;
}
/**
* Retrieves data from the Redis server using a prompt and an LLM key. If
* the data is not found, it returns null.
* @param prompt The prompt used to find the data.
* @param llmKey The LLM key used to find the data.
* @returns The corresponding data as an array of Generation objects, or null if not found.
*/
public async lookup(prompt: string, llmKey: string) {
let idx = 0;
let key = getCacheKey(prompt, llmKey, String(idx));
let value = await this.redisClient.get(key);
const generations: Generation[] = [];
while (value) {
const storedGeneration = JSON.parse(value);
generations.push(deserializeStoredGeneration(storedGeneration));
idx += 1;
key = getCacheKey(prompt, llmKey, String(idx));
value = await this.redisClient.get(key);
}
return generations.length > 0 ? generations : null;
}
/**
* Updates the data in the Redis server using a prompt and an LLM key.
* @param prompt The prompt used to store the data.
* @param llmKey The LLM key used to store the data.
* @param value The data to be stored, represented as an array of Generation objects.
*/
public async update(prompt: string, llmKey: string, value: Generation[]) {
for (let i = 0; i < value.length; i += 1) {
const key = getCacheKey(prompt, llmKey, String(i));
if (this.ttl !== undefined) {
await this.redisClient.set(
key,
JSON.stringify(serializeGeneration(value[i])),
"EX",
this.ttl
);
} else {
await this.redisClient.set(
key,
JSON.stringify(serializeGeneration(value[i]))
);
}
}
}
}