Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions cortex-js/src/domain/abstracts/engine.abstract.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,10 @@ import { Extension } from './extension.abstract';
export abstract class EngineExtension extends Extension {
abstract onLoad(): void;

transformPayload?: Function;

transformResponse?: Function;

abstract inference(
dto: any,
headers: Record<string, string>,
Expand All @@ -17,4 +21,5 @@ export abstract class EngineExtension extends Extension {
): Promise<void> {}

async unloadModel(modelId: string): Promise<void> {}

}
38 changes: 32 additions & 6 deletions cortex-js/src/domain/abstracts/oai.abstract.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import { HttpService } from '@nestjs/axios';
import { EngineExtension } from './engine.abstract';
import stream from 'stream';
import stream, { Transform } from 'stream';
import { firstValueFrom } from 'rxjs';
import _ from 'lodash';

export abstract class OAIEngineExtension extends EngineExtension {
abstract apiUrl: string;
Expand All @@ -17,22 +18,47 @@ export abstract class OAIEngineExtension extends EngineExtension {
createChatDto: any,
headers: Record<string, string>,
): Promise<stream.Readable | any> {
const { stream } = createChatDto;
const payload = this.transformPayload ? this.transformPayload(createChatDto) : createChatDto;
const { stream: isStream } = payload;
const additionalHeaders = _.omit(headers, ['content-type', 'authorization']);
const response = await firstValueFrom(
this.httpService.post(this.apiUrl, createChatDto, {
this.httpService.post(this.apiUrl, payload, {
headers: {
'Content-Type': headers['content-type'] ?? 'application/json',
Authorization: this.apiKey
? `Bearer ${this.apiKey}`
: headers['authorization'],
...additionalHeaders,
},
responseType: stream ? 'stream' : 'json',
responseType: isStream ? 'stream' : 'json',
}),
);

if (!response) {
throw new Error('No response');
}

return response.data;
if(!this.transformResponse) {
return response.data;
}
if (isStream) {
const transformResponse = this.transformResponse.bind(this);
const lineStream = new Transform({
transform(chunk, encoding, callback) {
const lines = chunk.toString().split('\n');
const transformedLines = [];
for (const line of lines) {
if (line.trim().length > 0) {
const transformedLine = transformResponse(line);
if (transformedLine) {
transformedLines.push(transformedLine);
}
}
}
callback(null, transformedLines.join(''));
}
});
return response.data.pipe(lineStream);
}
return this.transformResponse(response.data);
}
}
97 changes: 97 additions & 0 deletions cortex-js/src/extensions/anthropic.engine.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
import stream from 'stream';
import { HttpService } from '@nestjs/axios';
import { OAIEngineExtension } from '../domain/abstracts/oai.abstract';
import { ConfigsUsecases } from '@/usecases/configs/configs.usecase';
import { EventEmitter2 } from '@nestjs/event-emitter';
import _ from 'lodash';

/**
* A class that implements the InferenceExtension interface from the @janhq/core package.
* The class provides methods for initializing and stopping a model, and for making inference requests.
* It also subscribes to events emitted by the @janhq/core package and handles new message requests.
*/
export default class AnthropicEngineExtension extends OAIEngineExtension {
apiUrl = 'https://api.anthropic.com/v1/messages';
name = 'anthropic';
productName = 'Anthropic Inference Engine';
description = 'This extension enables Anthropic chat completion API calls';
version = '0.0.1';
apiKey?: string;

constructor(
protected readonly httpService: HttpService,
protected readonly configsUsecases: ConfigsUsecases,
protected readonly eventEmmitter: EventEmitter2,
) {
super(httpService);

eventEmmitter.on('config.updated', async (data) => {
if (data.group === this.name) {
this.apiKey = data.value;
}
});
}

async onLoad() {
const configs = (await this.configsUsecases.getGroupConfigs(
this.name,
)) as unknown as { apiKey: string };
this.apiKey = configs?.apiKey;
if (!configs?.apiKey)
await this.configsUsecases.saveConfig('apiKey', '', this.name);
}

override async inference(dto: any, headers: Record<string, string>): Promise<stream.Readable | any> {
headers['x-api-key'] = this.apiKey as string
headers['Content-Type'] = 'application/json'
headers['anthropic-version'] = '2023-06-01'
return super.inference(dto, headers)
}

transformPayload = (data: any): any => {
return _.pick(data, ['messages', 'model', 'stream', 'max_tokens']);
}

transformResponse = (data: any): string => {
// handling stream response
if (typeof data === 'string' && data.trim().length === 0) {
return '';
}
if (typeof data === 'string' && data.startsWith('event: ')) {
return ''
}
if (typeof data === 'string' && data.startsWith('data: ')) {
data = data.replace('data: ', '');
const parsedData = JSON.parse(data);
if (parsedData.type !== 'content_block_delta') {
return ''
}
const text = parsedData.delta?.text;
//convert to have this format data.choices[0]?.delta?.content
return JSON.stringify({
choices: [
{
delta: {
content: text
}
}
]
})
}
// non-stream response
if (data.content && data.content.length > 0 && data.content[0].text) {
return JSON.stringify({
choices: [
{
delta: {
content: data.content[0].text,
},
},
],
});
}

console.error('Invalid response format:', data);
return '';
}
}
2 changes: 2 additions & 0 deletions cortex-js/src/extensions/extensions.module.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import { HttpModule, HttpService } from '@nestjs/axios';
import { ConfigsUsecases } from '@/usecases/configs/configs.usecase';
import { ConfigsModule } from '@/usecases/configs/configs.module';
import { EventEmitter2, EventEmitterModule } from '@nestjs/event-emitter';
import AnthropicEngineExtension from './anthropic.engine';

const provider = {
provide: 'EXTENSIONS_PROVIDER',
Expand All @@ -18,6 +19,7 @@ const provider = {
new OpenAIEngineExtension(httpService, configUsecases, eventEmitter),
new GroqEngineExtension(httpService, configUsecases, eventEmitter),
new MistralEngineExtension(httpService, configUsecases, eventEmitter),
new AnthropicEngineExtension(httpService, configUsecases, eventEmitter),
],
};

Expand Down
3 changes: 0 additions & 3 deletions cortex-js/src/infrastructure/commanders/chat.command.ts
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@ export class ChatCommand extends CommandRunner {

async run(passedParams: string[], options: ChatOptions): Promise<void> {
let modelId = passedParams[0];
const checkingSpinner = ora('Checking model...').start();
// First attempt to get message from input or options
// Extract input from 1 to end of array
let message = options.message ?? passedParams.slice(1).join(' ');
Expand All @@ -68,11 +67,9 @@ export class ChatCommand extends CommandRunner {
} else if (models.length > 0) {
modelId = await this.modelInquiry(models);
} else {
checkingSpinner.fail('Model ID is required');
exit(1);
}
}
checkingSpinner.succeed(`Model found`);

if (!message) options.attach = true;
const result = await this.chatCliUsecases.chat(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,5 @@ export enum Engines {
groq = 'groq',
mistral = 'mistral',
openai = 'openai',
anthropic = 'anthropic',
}
1 change: 0 additions & 1 deletion cortex-js/src/usecases/chat/chat.usecases.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ export class ChatUsecases {
const engine = (await this.extensionRepository.findOne(
model!.engine ?? Engines.llamaCPP,
)) as EngineExtension | undefined;

if (engine == null) {
throw new Error(`No engine found with name: ${model.engine}`);
}
Expand Down