Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"displayName": "lowcode",
"description": "lowcode tool, support ChatGPT and other LLM",
"author": "wjkang <ruoxieme@gmail.com>",
"version": "1.7.8",
"version": "1.7.9",
"icon": "asset/icon.png",
"publisher": "wjkang",
"repository": "https://github.com/lowcoding/lowcode-vscode",
Expand Down
2 changes: 1 addition & 1 deletion src/utils/emitter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ function mitt<Events extends Record<EventType, unknown>>(

type Events = {
clipboardImage: string;
chatGPTChunck: { text?: string; hasMore: boolean };
chatGPTChunck: { text?: string };
chatGPTComplete: string;
};

Expand Down
11 changes: 8 additions & 3 deletions src/utils/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,13 @@ import { createChatCompletion as openaiCreateChatCompletion } from './openai';
import { emitter } from './emitter';
import { getSyncFolder } from './config';
import { showChatGPTView } from '../webview';
import { getEnv } from './vscodeEnv';

const LLMScript: {
createChatCompletion?: (options: {
messages: { role: 'system' | 'user' | 'assistant'; content: string }[];
handleChunk?: (data: { text?: string; hasMore: boolean }) => void;
handleChunk?: (data: { text?: string }) => void;
lowcodeContext: object;
}) => Promise<string>;
} = {};

Expand All @@ -27,7 +29,7 @@ if (syncFolder) {

export const createChatCompletion = async (options: {
messages: { role: 'system' | 'user' | 'assistant'; content: string }[];
handleChunk?: (data: { text?: string; hasMore: boolean }) => void;
handleChunk?: (data: { text?: string }) => void;
}) => {
if (LLMScript.createChatCompletion) {
const res = await LLMScript.createChatCompletion({
Expand All @@ -38,6 +40,9 @@ export const createChatCompletion = async (options: {
emitter.emit('chatGPTChunck', data);
}
},
lowcodeContext: {
env: getEnv(),
},
});
emitter.emit('chatGPTComplete', res);
return res;
Expand All @@ -57,7 +62,7 @@ export const createChatCompletion = async (options: {

export const createChatCompletionForScript = (options: {
messages: { role: 'system' | 'user' | 'assistant'; content: string }[];
handleChunk?: (data: { text?: string; hasMore: boolean }) => void;
handleChunk?: (data: { text?: string }) => void;
showWebview?: boolean;
}) => {
if (!options.showWebview) {
Expand Down
14 changes: 4 additions & 10 deletions src/utils/openai.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
import * as https from 'https';
import { TextDecoder } from 'util';
import { getChatGPTConfig } from './config';
import { showChatGPTView } from '../webview';

export const createChatCompletion = (options: {
messages: { role: 'system' | 'user' | 'assistant'; content: string }[];
handleChunk?: (data: { text?: string; hasMore: boolean }) => void;
handleChunk?: (data: { text?: string }) => void;
}) =>
new Promise<string>((resolve) => {
let combinedResult = '';
Expand Down Expand Up @@ -41,15 +40,15 @@ export const createChatCompletion = (options: {
if (element.includes('data: ')) {
if (element.includes('[DONE]')) {
if (options.handleChunk) {
options.handleChunk({ hasMore: true, text: '' });
options.handleChunk({ text: '' });
}
return;
}
// remove 'data: '
const data = JSON.parse(element.replace('data: ', ''));
if (data.finish_reason === 'stop') {
if (options.handleChunk) {
options.handleChunk({ hasMore: true, text: '' });
options.handleChunk({ text: '' });
}
return;
}
Expand All @@ -58,15 +57,13 @@ export const createChatCompletion = (options: {
if (options.handleChunk) {
options.handleChunk({
text: openaiRes.replaceAll('\\n', '\n'),
hasMore: true,
});
}
combinedResult += openaiRes;
}
} else {
if (options.handleChunk) {
options.handleChunk({
hasMore: true,
text: element,
});
}
Expand All @@ -84,7 +81,6 @@ export const createChatCompletion = (options: {
res.on('error', (e) => {
if (options.handleChunk) {
options.handleChunk({
hasMore: true,
text: e.toString(),
});
}
Expand All @@ -95,7 +91,6 @@ export const createChatCompletion = (options: {
if (error !== '发生错误:') {
if (options.handleChunk) {
options.handleChunk({
hasMore: true,
text: error,
});
}
Expand All @@ -111,8 +106,7 @@ export const createChatCompletion = (options: {
max_tokens: config.maxTokens,
};
request.on('error', (error) => {
options.handleChunk &&
options.handleChunk({ hasMore: true, text: error.toString() });
options.handleChunk && options.handleChunk({ text: error.toString() });
resolve(error.toString());
});
request.write(JSON.stringify(body));
Expand Down
2 changes: 2 additions & 0 deletions src/utils/vscodeEnv.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import * as path from 'path';
import { workspace } from 'vscode';
import { getSyncFolder } from './config';
import { getExtensionContext } from '../context';

export const rootPath = path.join(workspace.rootPath || '');

Expand Down Expand Up @@ -39,6 +40,7 @@ export const getEnv = () => ({
blockMaterialsPath,
snippetMaterialsPath,
privateMaterialsPath: getSyncFolder(),
extensionContext: getExtensionContext(),
});

export const checkRootPath = () => {
Expand Down