generated from johnlindquist/kenv
-
Notifications
You must be signed in to change notification settings - Fork 2
/
chatgpt.js
151 lines (134 loc) · 4.1 KB
/
chatgpt.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
/*
# Chat with ChatGPT
## <span class="text-primary">👉 Note: LangChain is still in development. This script will keep updating to use the latest APIs</span>
Use `Kit` -> `Manage npm Packages` -> `Update a Package` -> `langchain` to update to install the latest version.
- Opens the `chat` component
- Type a message and press `enter` to send
- The message is sent to the OpenAI API
- The response from OpenAI is displayed in the chat
- Repeat!
*/
// Name: ChatGPT
// Description: Have a Conversation with an AI
// Author: John Lindquist
// Twitter: @johnlindquist
import "@johnlindquist/kit"
import { ChatOpenAI } from "langchain/chat_models/openai"
import { ConversationChain } from "langchain/chains"
import { BufferWindowMemory } from "langchain/memory"
import {
ChatPromptTemplate,
HumanMessagePromptTemplate,
SystemMessagePromptTemplate,
MessagesPlaceholder,
} from "langchain/prompts"
let prompt = ChatPromptTemplate.fromPromptMessages([
SystemMessagePromptTemplate.fromTemplate(
`The following is a conversation with an AI assistant. The assistant is helpful, creative, clever, and very friendly.`
),
new MessagesPlaceholder("history"),
HumanMessagePromptTemplate.fromTemplate("{input}"),
])
let openAIApiKey = await env("OPENAI_API_KEY", {
hint: `Grab a key from <a href="https://platform.openai.com/account/api-keys">here</a>`,
})
let currentMessage = ``
let currentInput = ``
let chatHistoryPreAbort = []
let id = -1
let running = false
let llm = new ChatOpenAI({
openAIApiKey,
streaming: true,
callbacks: [
{
handleLLMStart: async () => {
id = setTimeout(() => {
chat.setMessage(-1, md(`### Sorry, the AI is taking a long time to respond.`))
setLoading(true)
}, 3000)
log(`handleLLMStart`)
currentMessage = ``
chat.addMessage("")
},
handleLLMNewToken: async token => {
clearTimeout(id)
setLoading(false)
if (!token) return
currentMessage += token
let htmlMessage = md(currentMessage)
chat.setMessage(-1, htmlMessage)
},
// Hitting escape to abort throws and error
// Must manually save to memory
handleLLMError: async err => {
warn(`error`, JSON.stringify(err))
running = false
// for (let message of chatHistoryPreAbort) {
// log({ message })
// if (message.text.startsWith(memory.aiPrefix)) {
// await memory.chatHistory.addAIChatMessage(message)
// }
// if (message.text.startsWith(memory.humanPrefix)) {
// await memory.chatHistory.addUserMessage(message)
// }
// await memory.chatHistory.addAIChatMessage(currentMessage)
// await memory.chatHistory.addUserMessage(currentInput)
// }
memory.chatHistory.addUserMessage(currentInput)
memory.chatHistory.addAIChatMessage(currentMessage)
},
handleLLMEnd: async () => {
running = false
log(`handleLLMEnd`)
},
},
],
})
let memory = new BufferWindowMemory({
k: 10,
inputKey: "input", // required when using a signal to abort
returnMessages: true,
})
let chain = new ConversationChain({
llm,
prompt,
memory,
})
let controller = null
await chat({
shortcuts: [
{
name: `Close`,
key: `${cmd}+w`,
onPress: () => {
process.exit()
},
bar: "left",
},
{
name: `Continue Script`,
key: `${cmd}+enter`,
onPress: () => {
submit("")
},
bar: "right",
},
],
onEscape: async () => {
// chatHistoryPreAbort = await memory.chatHistory.getMessages()
// log({ chatHistory: memory.chatHistory })
// log({ chatHistoryPreAbort })
if (running) controller.abort()
},
onSubmit: async input => {
currentInput = input
controller = new AbortController()
running = true
await chain.call({ input, signal: controller.signal })
},
})
let conversation = (await memory.chatHistory.getMessages())
.map(m => (m.constructor.name.startsWith("Human") ? memory.humanPrefix : memory.aiPrefix) + "\n" + m.text)
.join("\n\n")
inspect(conversation)