2 files changed +15
-2
lines changed Original file line number Diff line number Diff line change @@ -150538,10 +150538,19 @@ exports.Chat = void 0;
150538
150538
const openai_1 = __importDefault(__nccwpck_require__(60047));
150539
150539
class Chat {
150540
150540
openai;
150541
+ isAzure;
150542
+ apiVersion;
150543
+ deployment;
150541
150544
constructor(apikey) {
150545
+ this.isAzure = Boolean(process.env.AZURE_API_VERSION && process.env.AZURE_DEPLOYMENT);
150546
+ this.apiVersion = process.env.AZURE_API_VERSION || '';
150547
+ this.deployment = process.env.AZURE_DEPLOYMENT || '';
150548
+ const baseURL = this.isAzure
150549
+ ? `${process.env.OPENAI_API_ENDPOINT}/openai/deployments/${this.deployment}/chat/completions?api-version=${this.apiVersion}`
150550
+ : process.env.OPENAI_API_ENDPOINT || 'https://api.openai.com/v1';
150542
150551
this.openai = new openai_1.default({
150543
150552
apiKey: apikey,
150544
- baseURL: process.env.OPENAI_API_ENDPOINT || 'https://api.openai.com/v1' ,
150553
+ baseURL,
150545
150554
});
150546
150555
}
150547
150556
generatePrompt = (patch) => {
@@ -150567,7 +150576,8 @@ class Chat {
150567
150576
content: prompt,
150568
150577
}
150569
150578
],
150570
- model: process.env.MODEL || 'gpt-4o-mini',
150579
+ // Use model or deployment name based on the environment
150580
+ model: (this.isAzure ? this.deployment : process.env.MODEL || 'gpt-4o-mini'),
150571
150581
temperature: +(process.env.temperature || 0) || 1,
150572
150582
top_p: +(process.env.top_p || 0) || 1,
150573
150583
max_tokens: process.env.max_tokens
Original file line number Diff line number Diff line change 1
1
export declare class Chat {
2
2
private openai ;
3
+ private isAzure ;
4
+ private apiVersion ?;
5
+ private deployment ?;
3
6
constructor ( apikey : string ) ;
4
7
private generatePrompt ;
5
8
codeReview : ( patch : string ) => Promise < string | null > ;
0 commit comments