diff --git a/common/consts/OpenAIConst.ts b/common/consts/OpenAIConst.ts index 6f162c0..7946f50 100644 --- a/common/consts/OpenAIConst.ts +++ b/common/consts/OpenAIConst.ts @@ -15,6 +15,7 @@ export type OpenAIMessageRole = typeof OPENAI_MESSAGE_ROLES[keyof typeof OPENAI_ export const OPENAI_MODEL = { GPT_4_1_MINI: 'gpt-4.1-mini', GPT_4_1: 'gpt-4.1', + GPT_5: 'gpt-5', } as const; export type OpenAIModel = typeof OPENAI_MODEL[keyof typeof OPENAI_MODEL]; diff --git a/common/services/OpenAIService.ts b/common/services/OpenAIService.ts index d099948..91db403 100644 --- a/common/services/OpenAIService.ts +++ b/common/services/OpenAIService.ts @@ -31,15 +31,8 @@ export default class OpenAIService implements OpenAIServiceType { } try { - const completion = await this.openaiClient.chat.completions.create({ - model: options?.model || OPENAI_MODEL.GPT_4_1, - messages: messages.map(msg => ({ - role: msg.role, - content: msg.content, - })), - max_completion_tokens: options?.maxTokens, - temperature: options?.temperature, - }); + const completionParams = this.prepareCompletionParams(messages, options); + const completion = await this.openaiClient.chat.completions.create(completionParams); const response = completion.choices?.[0]?.message?.content; @@ -65,6 +58,34 @@ export default class OpenAIService implements OpenAIServiceType { } } + /** + * Prepares completion parameters with model-specific adjustments. + * @private + */ + private prepareCompletionParams(messages: OpenAIChatHistory, options?: OpenAIChatOptions) { + const model = options?.model || OPENAI_MODEL.GPT_4_1; + + const baseParams = { + model, + messages: messages.map(msg => ({ + role: msg.role, + content: msg.content, + })), + max_completion_tokens: options?.maxTokens, + temperature: options?.temperature, + }; + + // Handle GPT-5 specific parameters if needed + if (model === OPENAI_MODEL.GPT_5) { + // GPT-5 may have specific parameter requirements or limitations + // Currently, we use the same parameters as other models + // This can be extended in the future if GPT-5 requires special handling + return baseParams; + } + + return baseParams; + } + /** * Creates a new conversation with a system message. * @param systemPrompt The system message to set the AI's behavior diff --git a/docs/OpenAIService.md b/docs/OpenAIService.md index adca346..d55ea60 100644 --- a/docs/OpenAIService.md +++ b/docs/OpenAIService.md @@ -78,12 +78,28 @@ conversation = openaiService.addAssistantResponse(conversation, newResponse); ```typescript const response = await openaiService.chat(messages, { - model: 'gpt-4', // 使用するモデル(デフォルト: 'gpt-3.5-turbo') + model: 'gpt-5', // 使用するモデル(デフォルト: 'gpt-4.1') maxTokens: 1000, // 最大トークン数 temperature: 0.7 // 応答のランダム性(0-2) }); ``` +利用可能なモデル: +- `OPENAI_MODEL.GPT_4_1_MINI`: 'gpt-4.1-mini' +- `OPENAI_MODEL.GPT_4_1`: 'gpt-4.1' +- `OPENAI_MODEL.GPT_5`: 'gpt-5' + +```typescript +import { OPENAI_MODEL } from '@common/consts/OpenAIConst'; + +// GPT-5を使用する例 +const response = await openaiService.chat(messages, { + model: OPENAI_MODEL.GPT_5, + maxTokens: 1000, + temperature: 0.7 +}); +``` + ## 型定義 ### OpenAIMessageType diff --git a/tests/OpenAIService.test.ts b/tests/OpenAIService.test.ts index 81d58fd..e8d4996 100644 --- a/tests/OpenAIService.test.ts +++ b/tests/OpenAIService.test.ts @@ -75,6 +75,16 @@ describe('OpenAIService', () => { expect(response).toBe('This is a mock response from OpenAI. (using gpt-4.1)'); }); + it('should support GPT-5 model', async () => { + const messages: OpenAIChatHistory = [ + { role: OPENAI_MESSAGE_ROLES.USER, content: 'Random question' }, + ]; + + const response = await mockService.chat(messages, { model: OPENAI_MODEL.GPT_5 }); + + expect(response).toBe('This is a mock response from OpenAI. (using gpt-5)'); + }); + it('should allow custom default response', async () => { mockService.setDefaultResponse('Custom mock response'); @@ -276,5 +286,22 @@ describe('OpenAIService', () => { expect(typeof response).toBe('string'); expect(response.length).toBeGreaterThan(0); }, 30000); + + it('should handle GPT-5 model with real API', async () => { + const messages: OpenAIChatHistory = [ + { role: OPENAI_MESSAGE_ROLES.USER, content: 'Say "test" and nothing else.' }, + ]; + + const response = await service.chat(messages, { + model: OPENAI_MODEL.GPT_5, + maxTokens: 10, + temperature: 0 + }); + + console.log('OpenAI API GPT-5 response:', response); + + expect(typeof response).toBe('string'); + expect(response.length).toBeGreaterThan(0); + }, 30000); }); });