From afad8210fc96505f343f46b0fae49cec44579eba Mon Sep 17 00:00:00 2001 From: Jason Tarver Date: Fri, 10 Mar 2023 07:41:10 -0600 Subject: [PATCH] feat: use new gpt-3.5-turbo model (#123) Co-authored-by: Hiroki Osame --- src/commands/aicommits.ts | 4 ++++ src/utils/openai.ts | 25 +++++++++++++++---------- tests/specs/cli.ts | 14 +++++++------- 3 files changed, 26 insertions(+), 17 deletions(-) diff --git a/src/commands/aicommits.ts b/src/commands/aicommits.ts index b30415bc..aa4c1906 100644 --- a/src/commands/aicommits.ts +++ b/src/commands/aicommits.ts @@ -49,6 +49,10 @@ export default async ( ); s.stop('Changes analyzed'); + if (messages.length === 0) { + throw new KnownError('No commit messages were generated. Try again.'); + } + let message: string; if (messages.length === 1) { [message] = messages; diff --git a/src/utils/openai.ts b/src/utils/openai.ts index 8661a3d7..16966ade 100644 --- a/src/utils/openai.ts +++ b/src/utils/openai.ts @@ -1,6 +1,6 @@ import https from 'https'; import type { ClientRequest, IncomingMessage } from 'http'; -import type { CreateCompletionRequest, CreateCompletionResponse } from 'openai'; +import type { CreateChatCompletionRequest, CreateChatCompletionResponse } from 'openai'; import { encoding_for_model as encodingForModel } from '@dqbd/tiktoken'; import { KnownError } from './error.js'; @@ -50,13 +50,13 @@ const httpsPost = async ( request.end(); }); -const createCompletion = async ( +const createChatCompletion = async ( apiKey: string, - json: CreateCompletionRequest, + json: CreateChatCompletionRequest, ) => { const { response, data } = await httpsPost( 'api.openai.com', - '/v1/completions', + '/v1/chat/completions', { Authorization: `Bearer ${apiKey}`, }, @@ -81,7 +81,7 @@ const createCompletion = async ( throw new KnownError(errorMessage); } - return JSON.parse(data) as CreateCompletionResponse; + return JSON.parse(data) as CreateChatCompletionResponse; }; const sanitizeMessage = (message: string) => message.trim().replace(/[\n\r]/g, '').replace(/(\w)\.$/, '$1'); @@ -90,8 +90,9 @@ const deduplicateMessages = (array: string[]) => Array.from(new Set(array)); const getPrompt = (locale: string, diff: string) => `Write an insightful but concise Git commit message in a complete sentence in present tense for the following diff without prefacing it with anything, the response must be in the language ${locale}:\n${diff}`; -const model = 'text-davinci-003'; -const encoder = encodingForModel(model); +const model = 'gpt-3.5-turbo'; +// TODO: update for the new gpt-3.5 model +const encoder = encodingForModel('text-davinci-003'); export const generateCommitMessage = async ( apiKey: string, @@ -110,9 +111,12 @@ export const generateCommitMessage = async ( } try { - const completion = await createCompletion(apiKey, { + const completion = await createChatCompletion(apiKey, { model, - prompt, + messages: [{ + role: 'user', + content: prompt, + }], temperature: 0.7, top_p: 1, frequency_penalty: 0, @@ -124,7 +128,8 @@ export const generateCommitMessage = async ( return deduplicateMessages( completion.choices - .map(choice => sanitizeMessage(choice.text!)), + .filter(choice => choice.message?.content) + .map(choice => sanitizeMessage(choice.message!.content)), ); } catch (error) { const errorAsAny = error as any; diff --git a/tests/specs/cli.ts b/tests/specs/cli.ts index cbe1ae9f..3d6dab69 100644 --- a/tests/specs/cli.ts +++ b/tests/specs/cli.ts @@ -3,9 +3,6 @@ import { createFixture } from 'fs-fixture'; import { createAicommits, createGit } from '../utils.js'; const { OPENAI_KEY } = process.env; -if (!OPENAI_KEY) { - throw new Error('process.env.OPENAI_KEY is necessary to run these tests'); -} export default testSuite(({ describe }) => { if (process.platform === 'win32') { @@ -14,6 +11,11 @@ export default testSuite(({ describe }) => { return; } + if (!OPENAI_KEY) { + console.warn('⚠️ process.env.OPENAI_KEY is necessary to run these tests. Skipping...'); + return; + } + describe('CLI', async ({ test }) => { const data: Record = { firstName: 'Hiroki', @@ -94,10 +96,8 @@ export default testSuite(({ describe }) => { if (stdout.match('└')) { const countChoices = stdout.match(/ {2}[●○]/g)?.length ?? 0; - // 2 choices or less should be generated - // pretty common for it to return 2 results that are the same - // which gets de-duplicated - expect(countChoices <= 2).toBe(true); + // 2 choices should be generated + expect(countChoices).toBe(2); committing.stdin!.write('\r'); committing.stdin!.end();