Skip to content

Commit 73b90fd

Browse files
committed
refactor!: optimize type CodeReviewResult
1 parent 7246d55 commit 73b90fd

File tree

7 files changed

+186
-164
lines changed

7 files changed

+186
-164
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ Supported AI service providers:
3838
```javascript
3939
import { codeReview } from 'cr-asst';
4040

41-
const { content } = await codeReview({
41+
const { reviewReport } = await codeReview({
4242
headRef: 'branch-to-review'
4343
baseRef: 'main',
4444
model: 'gpt-4',

README.zh.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ npx cr-asst --head-ref branch-to-review --base-ref main --model gpt-4 --provider
3838
```javascript
3939
import { codeReview } from 'cr-asst';
4040

41-
const { content } = await codeReview({
41+
const { reviewReport } = await codeReview({
4242
headRef: 'branch-to-review'
4343
baseRef: 'main',
4444
model: 'gpt-4',

src/code_review/index.ts

Lines changed: 74 additions & 70 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,15 @@
11
import { writeFile } from 'node:fs/promises';
22
import chalk from 'chalk';
3-
import { stepCountIs, type LanguageModel, type ModelMessage } from 'ai';
3+
import { stepCountIs, streamText, type LanguageModel, type ModelMessage } from 'ai';
44
import type { CodeReviewOptions, CodeReviewResult } from '../types';
55
import {
6-
getUserPrompt,
7-
getSystemPrompt,
8-
getApprovalCheckPrompt,
9-
getApprovalCheckStatusPrompt,
6+
getReviewReportMessages,
7+
getApprovalCheckCommentMessages,
8+
getApprovalCheckStatusMessages,
109
} from './prompts/index';
1110
import { runCmd } from './utils';
1211
import { reviewReportTools } from './tools';
13-
import { initModel, callModel } from './model';
12+
import { initModel, handleStreamTextResult } from './model';
1413

1514
export async function codeReview(options: CodeReviewOptions): Promise<CodeReviewResult> {
1615
// init model
@@ -22,8 +21,8 @@ export async function codeReview(options: CodeReviewOptions): Promise<CodeReview
2221
model,
2322
});
2423

25-
// generate approval check
26-
const approvalCheck = options.approvalCheck
24+
// generate approval check comment
25+
const approvalCheckComment = options.approvalCheck
2726
? await generateApprovalCheck({
2827
...options,
2928
model,
@@ -33,30 +32,22 @@ export async function codeReview(options: CodeReviewOptions): Promise<CodeReview
3332

3433
// generate approval check status
3534
const approvalCheckStatus =
36-
options.approvalCheck && approvalCheck
35+
options.approvalCheck && approvalCheckComment
3736
? await generateApprovalCheckStatus({
3837
...options,
3938
model,
40-
prevMessages: approvalCheck.messages,
39+
prevMessages: approvalCheckComment.messages,
4140
})
4241
: undefined;
4342

4443
// return
4544
return {
46-
content: reviewReport.text,
47-
reasoningContent: reviewReport.reasoning,
48-
debug: {
49-
diffsCmd: reviewReport.diffsCmd,
50-
diffs: reviewReport.diffs,
51-
stats: reviewReport.stats,
52-
usage: reviewReport.usage,
53-
},
45+
reviewReport,
5446
approvalCheck:
55-
approvalCheck && approvalCheckStatus
47+
approvalCheckComment && approvalCheckStatus
5648
? {
57-
content: approvalCheck.text,
58-
reasoningContent: approvalCheck.reasoning,
59-
approved: approvalCheckStatus.approved,
49+
approvalCheckComment,
50+
approvalCheckStatus,
6051
}
6152
: undefined,
6253
};
@@ -100,29 +91,32 @@ async function generateReviewReport(
10091
}
10192
const diffs = await runCmd('git', diffArgs);
10293

103-
// generate review report
104-
const result = await callModel({
94+
// messages
95+
const messages = await getReviewReportMessages({
96+
systemPromptFile,
97+
promptFile,
98+
disableTools,
99+
diffs,
100+
baseRef,
101+
headRef,
102+
});
103+
104+
// call model
105+
const result = await handleStreamTextResult({
105106
title: 'Review Report',
106-
model,
107-
tools: disableTools ? undefined : reviewReportTools,
108-
stopWhen: stepCountIs(maxSteps),
109-
messages: [
110-
{
111-
role: 'system',
112-
content: await getSystemPrompt({
113-
systemPromptFile,
114-
disableTools,
115-
diffs,
116-
baseRef,
117-
headRef,
118-
}),
119-
},
120-
{ role: 'user', content: await getUserPrompt(promptFile) },
121-
],
122107
print,
123-
temperature,
124-
topP,
125-
topK,
108+
streamTextResult: streamText({
109+
model,
110+
messages,
111+
tools: disableTools ? undefined : reviewReportTools,
112+
stopWhen: stepCountIs(maxSteps),
113+
temperature,
114+
topP,
115+
topK,
116+
onError: ({ error }) => {
117+
throw new Error('failed to call the model', { cause: error });
118+
},
119+
}),
126120
});
127121

128122
// write output file
@@ -133,6 +127,7 @@ async function generateReviewReport(
133127
// return
134128
return {
135129
...result,
130+
messages: [...messages, ...result.messages],
136131
diffsCmd,
137132
diffs,
138133
};
@@ -147,26 +142,32 @@ async function generateApprovalCheck(
147142
// options
148143
const { model, prevMessages, approvalCheck, print, temperature, topP, topK } = options;
149144

150-
// generate approval check
151-
const result = await callModel({
152-
title: 'Approval Check',
153-
model,
154-
messages: [
155-
...prevMessages,
156-
{
157-
role: 'user',
158-
content: await getApprovalCheckPrompt(approvalCheck),
159-
},
160-
],
145+
// messages
146+
const messages = await getApprovalCheckCommentMessages({
147+
prevMessages,
148+
approvalCheck,
149+
});
150+
151+
// call model
152+
const result = await handleStreamTextResult({
153+
title: 'Approval Check Comment',
161154
print,
162-
temperature,
163-
topP,
164-
topK,
155+
streamTextResult: streamText({
156+
model,
157+
messages,
158+
temperature,
159+
topP,
160+
topK,
161+
onError: ({ error }) => {
162+
throw new Error('failed to call the model', { cause: error });
163+
},
164+
}),
165165
});
166166

167167
// return
168168
return {
169169
...result,
170+
messages: [...messages, ...result.messages],
170171
};
171172
}
172173

@@ -179,21 +180,23 @@ async function generateApprovalCheckStatus(
179180
// options
180181
const { model, prevMessages, print, temperature, topP, topK } = options;
181182

182-
// generate approval check
183-
const result = await callModel({
183+
// messages
184+
const messages = getApprovalCheckStatusMessages({ prevMessages });
185+
186+
// call model
187+
const result = await handleStreamTextResult({
184188
title: 'Approval Check Status',
185-
model,
186-
messages: [
187-
...prevMessages,
188-
{
189-
role: 'user',
190-
content: await getApprovalCheckStatusPrompt(),
191-
},
192-
],
193189
print,
194-
temperature,
195-
topP,
196-
topK,
190+
streamTextResult: streamText({
191+
model,
192+
messages,
193+
temperature,
194+
topP,
195+
topK,
196+
onError: ({ error }) => {
197+
throw new Error('failed to call the model', { cause: error });
198+
},
199+
}),
197200
});
198201

199202
// parse approved flag
@@ -202,6 +205,7 @@ async function generateApprovalCheckStatus(
202205
// return
203206
return {
204207
...result,
208+
messages: [...messages, ...result.messages],
205209
approved,
206210
};
207211
}

src/code_review/model.ts

Lines changed: 17 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -2,19 +2,18 @@ import { stdout } from 'node:process';
22
import { inspect } from 'node:util';
33
import chalk from 'chalk';
44
import { fetch, ProxyAgent } from 'undici';
5-
import {
6-
streamText,
7-
type LanguageModel,
8-
type ToolSet,
9-
type ToolChoice,
10-
type ModelMessage,
11-
} from 'ai';
5+
import { type StreamTextResult, type LanguageModel, type ToolSet } from 'ai';
126
import { createOpenAI, type OpenAIProviderSettings } from '@ai-sdk/openai';
137
import { createDeepSeek } from '@ai-sdk/deepseek';
148
import { createXai } from '@ai-sdk/xai';
159
import { createAnthropic } from '@ai-sdk/anthropic';
1610
import { createGoogleGenerativeAI } from '@ai-sdk/google';
17-
import type { CodeReviewOptions, CompletionStats, CompletionUsage } from '../types';
11+
import type {
12+
CodeReviewOptions,
13+
LanguageModelCallResult,
14+
LanguageModelCallStats,
15+
LanguageModelCallUsage,
16+
} from '../types';
1817
import { usageToString, statsToString, getHttpProxyUrl } from './utils';
1918

2019
export function initModel(options: CodeReviewOptions): LanguageModel {
@@ -66,33 +65,13 @@ export function initModel(options: CodeReviewOptions): LanguageModel {
6665
return model;
6766
}
6867

69-
export async function callModel<TOOLS extends ToolSet>(options: {
68+
export async function handleStreamTextResult<TOOLS extends ToolSet>(options: {
7069
title: string;
71-
model: LanguageModel;
72-
messages: ModelMessage[];
7370
print?: boolean;
74-
tools?: TOOLS;
75-
toolChoice?: ToolChoice<TOOLS>;
76-
prepareStep?: Parameters<typeof streamText<TOOLS>>[0]['prepareStep'];
77-
stopWhen?: Parameters<typeof streamText<TOOLS>>[0]['stopWhen'];
78-
temperature?: number;
79-
topP?: number;
80-
topK?: number;
81-
}) {
71+
streamTextResult: StreamTextResult<TOOLS, string>;
72+
}): Promise<LanguageModelCallResult> {
8273
// options
83-
const {
84-
title,
85-
model,
86-
messages,
87-
print,
88-
tools,
89-
toolChoice,
90-
prepareStep,
91-
stopWhen,
92-
temperature,
93-
topP,
94-
topK,
95-
} = options;
74+
const { title, print, streamTextResult } = options;
9675

9776
// print title
9877
if (print) {
@@ -103,24 +82,8 @@ export async function callModel<TOOLS extends ToolSet>(options: {
10382
);
10483
}
10584

106-
// call model
107-
const result = streamText({
108-
model,
109-
messages,
110-
tools,
111-
toolChoice,
112-
prepareStep,
113-
stopWhen,
114-
temperature,
115-
topP,
116-
topK,
117-
onError: ({ error }) => {
118-
throw new Error('failed to call the model', { cause: error });
119-
},
120-
});
121-
12285
// init stats
123-
const stats: CompletionStats = {
86+
const stats: LanguageModelCallStats = {
12487
startedAt: Date.now(),
12588
firstTokenReceivedAt: 0,
12689
finishedAt: 0,
@@ -132,7 +95,7 @@ export async function callModel<TOOLS extends ToolSet>(options: {
13295
let stepCnt = 0;
13396
let textPartCnt = 0;
13497
let reasoningPartCnt = 0;
135-
for await (const streamPart of result.fullStream) {
98+
for await (const streamPart of streamTextResult.fullStream) {
13699
if (streamPart.type === 'start-step') {
137100
if (print) {
138101
console.log(chalk.blue(`[STEP_${stepCnt}]\n`));
@@ -174,11 +137,11 @@ export async function callModel<TOOLS extends ToolSet>(options: {
174137
}
175138

176139
// destructure result
177-
const steps = await result.steps;
140+
const steps = await streamTextResult.steps;
178141
const lastStep = steps[steps.length - 1];
179142
const text = lastStep.text;
180143
const reasoning = lastStep.reasoning;
181-
const usage: CompletionUsage = await result.usage;
144+
const usage: LanguageModelCallUsage = await streamTextResult.usage;
182145

183146
// update usage
184147
if (typeof usage.inputTokens === 'number' && typeof usage.cachedInputTokens === 'number') {
@@ -201,14 +164,11 @@ export async function callModel<TOOLS extends ToolSet>(options: {
201164
console.log();
202165
}
203166

204-
// get all messages
205-
const allMessages: ModelMessage[] = [...messages, ...(await result.response).messages];
206-
207167
// return
208168
return {
209169
text,
210-
reasoning: reasoning.map((r) => r.text).join('\n'),
211-
messages: allMessages,
170+
reasoning: reasoning.map((r) => r.text),
171+
messages: (await streamTextResult.response).messages,
212172
usage,
213173
stats,
214174
};

0 commit comments

Comments
 (0)