From c44210adb18c1cbf78fe89b8f15dcbc990eb2c23 Mon Sep 17 00:00:00 2001 From: wjkang Date: Mon, 29 Jan 2024 21:52:59 +0800 Subject: [PATCH 1/2] =?UTF-8?q?=E2=9C=A8=20feat:=20remove=20hasMore?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/utils/emitter.ts | 2 +- src/utils/llm.ts | 6 +++--- src/utils/openai.ts | 14 ++++---------- 3 files changed, 8 insertions(+), 14 deletions(-) diff --git a/src/utils/emitter.ts b/src/utils/emitter.ts index d1e69b8..862d6b1 100644 --- a/src/utils/emitter.ts +++ b/src/utils/emitter.ts @@ -125,7 +125,7 @@ function mitt>( type Events = { clipboardImage: string; - chatGPTChunck: { text?: string; hasMore: boolean }; + chatGPTChunck: { text?: string }; chatGPTComplete: string; }; diff --git a/src/utils/llm.ts b/src/utils/llm.ts index c950b7e..7d890d6 100644 --- a/src/utils/llm.ts +++ b/src/utils/llm.ts @@ -8,7 +8,7 @@ import { showChatGPTView } from '../webview'; const LLMScript: { createChatCompletion?: (options: { messages: { role: 'system' | 'user' | 'assistant'; content: string }[]; - handleChunk?: (data: { text?: string; hasMore: boolean }) => void; + handleChunk?: (data: { text?: string }) => void; }) => Promise; } = {}; @@ -27,7 +27,7 @@ if (syncFolder) { export const createChatCompletion = async (options: { messages: { role: 'system' | 'user' | 'assistant'; content: string }[]; - handleChunk?: (data: { text?: string; hasMore: boolean }) => void; + handleChunk?: (data: { text?: string }) => void; }) => { if (LLMScript.createChatCompletion) { const res = await LLMScript.createChatCompletion({ @@ -57,7 +57,7 @@ export const createChatCompletion = async (options: { export const createChatCompletionForScript = (options: { messages: { role: 'system' | 'user' | 'assistant'; content: string }[]; - handleChunk?: (data: { text?: string; hasMore: boolean }) => void; + handleChunk?: (data: { text?: string }) => void; showWebview?: boolean; }) => { if (!options.showWebview) { diff --git a/src/utils/openai.ts b/src/utils/openai.ts index e6ef3e9..1851396 100644 --- a/src/utils/openai.ts +++ b/src/utils/openai.ts @@ -1,11 +1,10 @@ import * as https from 'https'; import { TextDecoder } from 'util'; import { getChatGPTConfig } from './config'; -import { showChatGPTView } from '../webview'; export const createChatCompletion = (options: { messages: { role: 'system' | 'user' | 'assistant'; content: string }[]; - handleChunk?: (data: { text?: string; hasMore: boolean }) => void; + handleChunk?: (data: { text?: string }) => void; }) => new Promise((resolve) => { let combinedResult = ''; @@ -41,7 +40,7 @@ export const createChatCompletion = (options: { if (element.includes('data: ')) { if (element.includes('[DONE]')) { if (options.handleChunk) { - options.handleChunk({ hasMore: true, text: '' }); + options.handleChunk({ text: '' }); } return; } @@ -49,7 +48,7 @@ export const createChatCompletion = (options: { const data = JSON.parse(element.replace('data: ', '')); if (data.finish_reason === 'stop') { if (options.handleChunk) { - options.handleChunk({ hasMore: true, text: '' }); + options.handleChunk({ text: '' }); } return; } @@ -58,7 +57,6 @@ export const createChatCompletion = (options: { if (options.handleChunk) { options.handleChunk({ text: openaiRes.replaceAll('\\n', '\n'), - hasMore: true, }); } combinedResult += openaiRes; @@ -66,7 +64,6 @@ export const createChatCompletion = (options: { } else { if (options.handleChunk) { options.handleChunk({ - hasMore: true, text: element, }); } @@ -84,7 +81,6 @@ export const createChatCompletion = (options: { res.on('error', (e) => { if (options.handleChunk) { options.handleChunk({ - hasMore: true, text: e.toString(), }); } @@ -95,7 +91,6 @@ export const createChatCompletion = (options: { if (error !== '发生错误:') { if (options.handleChunk) { options.handleChunk({ - hasMore: true, text: error, }); } @@ -111,8 +106,7 @@ export const createChatCompletion = (options: { max_tokens: config.maxTokens, }; request.on('error', (error) => { - options.handleChunk && - options.handleChunk({ hasMore: true, text: error.toString() }); + options.handleChunk && options.handleChunk({ text: error.toString() }); resolve(error.toString()); }); request.write(JSON.stringify(body)); From 5153dac7afa9d6a732e39dee246b957d2362e8e0 Mon Sep 17 00:00:00 2001 From: wjkang Date: Mon, 29 Jan 2024 22:16:56 +0800 Subject: [PATCH 2/2] =?UTF-8?q?=E2=9C=A8=20feat:=20add=20extensionContext?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- package.json | 2 +- src/utils/llm.ts | 5 +++++ src/utils/vscodeEnv.ts | 2 ++ 3 files changed, 8 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index a8ae3aa..60f670c 100644 --- a/package.json +++ b/package.json @@ -3,7 +3,7 @@ "displayName": "lowcode", "description": "lowcode tool, support ChatGPT and other LLM", "author": "wjkang ", - "version": "1.7.8", + "version": "1.7.9", "icon": "asset/icon.png", "publisher": "wjkang", "repository": "https://github.com/lowcoding/lowcode-vscode", diff --git a/src/utils/llm.ts b/src/utils/llm.ts index 7d890d6..e819961 100644 --- a/src/utils/llm.ts +++ b/src/utils/llm.ts @@ -4,11 +4,13 @@ import { createChatCompletion as openaiCreateChatCompletion } from './openai'; import { emitter } from './emitter'; import { getSyncFolder } from './config'; import { showChatGPTView } from '../webview'; +import { getEnv } from './vscodeEnv'; const LLMScript: { createChatCompletion?: (options: { messages: { role: 'system' | 'user' | 'assistant'; content: string }[]; handleChunk?: (data: { text?: string }) => void; + lowcodeContext: object; }) => Promise; } = {}; @@ -38,6 +40,9 @@ export const createChatCompletion = async (options: { emitter.emit('chatGPTChunck', data); } }, + lowcodeContext: { + env: getEnv(), + }, }); emitter.emit('chatGPTComplete', res); return res; diff --git a/src/utils/vscodeEnv.ts b/src/utils/vscodeEnv.ts index d2e6efa..441a440 100644 --- a/src/utils/vscodeEnv.ts +++ b/src/utils/vscodeEnv.ts @@ -1,6 +1,7 @@ import * as path from 'path'; import { workspace } from 'vscode'; import { getSyncFolder } from './config'; +import { getExtensionContext } from '../context'; export const rootPath = path.join(workspace.rootPath || ''); @@ -39,6 +40,7 @@ export const getEnv = () => ({ blockMaterialsPath, snippetMaterialsPath, privateMaterialsPath: getSyncFolder(), + extensionContext: getExtensionContext(), }); export const checkRootPath = () => {