diff --git a/backend/src/build-system/__tests__/db-requirement-document.md b/backend/src/build-system/__tests__/db-requirement-document.md new file mode 100644 index 00000000..3b0d0881 --- /dev/null +++ b/backend/src/build-system/__tests__/db-requirement-document.md @@ -0,0 +1,230 @@ +### Database Requirements Document + +#### 1. Overview + +- **Project Scope**: Design and implement a database to support a Spotify-like music web application, facilitating personalized music streaming, content management, and user interaction. +- **Database Purpose**: Store and manage user profiles, music content, playlists, playback states, and preferences to support dynamic, personalized user experiences. +- **General Requirements**: + - Ensure high availability and scalability to handle concurrent user activity. + - Support real-time data updates for personalized recommendations and playback. + - Ensure data integrity and enforce business rules. + +--- + +#### 2. Entity Definitions + +##### User + +- **Description**: Represents registered users of the application. +- **Business Rules**: + - Each user must have a unique email. + - Users can manage their preferences and account details. +- **Key Attributes**: + - `user_id` (Primary Key) + - `username` (Unique, required) + - `email` (Unique, required) + - `password_hash` (Required) + - `subscription_type` (e.g., Free, Premium) + - `preferences` (e.g., theme, audio quality) + - `created_at`, `updated_at` +- **Relationships**: + - One-to-many with `Playlist`. + - Many-to-many with `Song` for liked songs. + +##### Song + +- **Description**: Represents individual songs available on the platform. +- **Business Rules**: + - Each song must have an associated album and artist. + - Songs may belong to multiple playlists. +- **Key Attributes**: + - `song_id` (Primary Key) + - `title` (Required) + - `artist_id` (Foreign Key) + - `album_id` (Foreign Key) + - `duration` (In seconds) + - `genre` (Category) + - `release_date` +- **Relationships**: + - Many-to-one with `Album` and `Artist`. + - Many-to-many with `Playlist`. + +##### Artist + +- **Description**: Represents artists whose songs are on the platform. +- **Key Attributes**: + - `artist_id` (Primary Key) + - `name` (Required) + - `bio` + - `profile_image` + - `created_at`, `updated_at` +- **Relationships**: + - One-to-many with `Song` and `Album`. + +##### Album + +- **Description**: Represents music albums. +- **Key Attributes**: + - `album_id` (Primary Key) + - `title` (Required) + - `artist_id` (Foreign Key) + - `release_date` + - `cover_image` +- **Relationships**: + - One-to-many with `Song`. + +##### Playlist + +- **Description**: Represents user-created or curated playlists. +- **Business Rules**: + - A playlist must belong to a user or be globally curated. +- **Key Attributes**: + - `playlist_id` (Primary Key) + - `name` (Required) + - `user_id` (Foreign Key, nullable for curated playlists) + - `description` + - `is_curated` (Boolean) + - `created_at`, `updated_at` +- **Relationships**: + - Many-to-many with `Song`. + +##### PlaybackState + +- **Description**: Tracks the playback state for a user. +- **Key Attributes**: + - `playback_id` (Primary Key) + - `user_id` (Foreign Key) + - `current_song_id` (Foreign Key) + - `queue` (Array of `song_id`s) + - `playback_position` (Seconds) + - `volume` + - `created_at`, `updated_at` + +##### Recommendation + +- **Description**: Stores dynamic recommendations for users. +- **Key Attributes**: + - `recommendation_id` (Primary Key) + - `user_id` (Foreign Key) + - `content` (JSON: list of recommended songs, albums, playlists) + - `generated_at` + +--- + +#### 3. Data Requirements + +##### User + +- Fields: + - `user_id`: UUID + - `username`: String (max 50) + - `email`: String (unique, max 100) + - `password_hash`: String + - `subscription_type`: Enum (Free, Premium) + - `preferences`: JSON + - `created_at`, `updated_at`: Timestamps +- Constraints: + - `email` and `username` must be unique. + - Enforce non-null constraints on required fields. +- Indexing: + - Index on `email` and `user_id`. + +##### Song + +- Fields: + - `song_id`: UUID + - `title`: String (max 100) + - `artist_id`, `album_id`: Foreign Keys + - `duration`: Integer + - `genre`: String + - `release_date`: Date +- Constraints: + - Non-null constraints on `title`, `artist_id`, and `album_id`. +- Indexing: + - Index on `title` and `genre`. + +##### Playlist + +- Fields: + - `playlist_id`: UUID + - `name`: String (max 50) + - `user_id`: Foreign Key + - `description`: String + - `is_curated`: Boolean + - `created_at`, `updated_at`: Timestamps +- Constraints: + - Enforce foreign key constraints for `user_id`. +- Indexing: + - Index on `user_id`. + +##### PlaybackState + +- Fields: + - `playback_id`: UUID + - `user_id`: Foreign Key + - `current_song_id`: Foreign Key + - `queue`: JSON + - `playback_position`: Integer + - `volume`: Float + - `created_at`, `updated_at`: Timestamps +- Constraints: + - Ensure valid `user_id` and `current_song_id`. + +--- + +#### 4. Relationships + +- `User` to `Playlist`: One-to-many. +- `Playlist` to `Song`: Many-to-many (junction table: `playlist_song`). +- `Song` to `Album`: Many-to-one. +- `Song` to `Artist`: Many-to-one. +- `User` to `PlaybackState`: One-to-one. +- Referential Integrity: + - Cascade delete for dependent entities (e.g., playlists when a user is deleted). + +--- + +#### 5. Data Access Patterns + +- Common Queries: + - Fetch user playlists, liked songs, and playback state. + - Search for songs, albums, or artists. + - Fetch recommended content dynamically. +- Indexing: + - Full-text search for song titles and artist names. + - Index on foreign keys for join performance. + +--- + +#### 6. Security Requirements + +- Access Control: + - Restrict user data to authenticated sessions. +- Data Privacy: + - Hash sensitive data (e.g., passwords). +- Audit: + - Log user activity and data changes. + +--- + +#### 7. Performance Requirements + +- Expected Volume: + - Millions of songs and playlists. + - Thousands of concurrent users. +- Growth: + - Plan for 10x growth in user and song data over 5 years. +- Optimizations: + - Cache frequently accessed data (e.g., recommendations). + - Use partitioning for large tables. + +--- + +#### 8. Additional Considerations + +- Backups: + - Automated daily backups. +- Archiving: + - Move inactive playlists to archival storage after 1 year. +- Integration: + - Support for third-party authentication and external APIs. diff --git a/backend/src/build-system/__tests__/test-database-schemas.spec.ts b/backend/src/build-system/__tests__/test-database-schemas.spec.ts new file mode 100644 index 00000000..a7224af9 --- /dev/null +++ b/backend/src/build-system/__tests__/test-database-schemas.spec.ts @@ -0,0 +1,49 @@ +import { BuilderContext } from 'src/build-system/context'; +import { DBSchemaHandler } from '../node/database-schemas/schemas'; +import { readFileSync } from 'fs'; +import markdownToTxt from 'markdown-to-txt'; + +jest.mock('fs', () => ({ + readFileSync: jest.fn(() => 'mock content'), +})); + +const RUN_INTEGRATION_TESTS = process.env.RUN_INTEGRATION_TESTS === 'true'; + +describe('DBSchemaHandler', () => { + describe('Integration Tests', () => { + (RUN_INTEGRATION_TESTS ? describe : describe.skip)( + 'Schema Generation Tests', + () => { + it('should generate schema for blog system', async () => { + const handler = new DBSchemaHandler(); + const context = new BuilderContext( + { + id: 'test', + name: 'test db schema', + version: '1.0.0', + description: 'test db schema', + steps: [], + }, + 'test-id-schema-1', + ); + + const mdFileContent = readFileSync( + './db-requirement.document.md', + 'utf-8', + ); + const plainText = markdownToTxt(mdFileContent); + const result = await handler.run(context, plainText); + console.log(result); + }, 30000); + }, + ); + }); + + describe('Unit Tests', () => { + it('should initialize correctly', () => { + const handler = new DBSchemaHandler(); + expect(handler).toBeDefined(); + expect(handler.id).toBe('OP:DATABASE:SCHEMAS'); + }); + }); +}); diff --git a/backend/src/build-system/__tests__/test.spec.ts b/backend/src/build-system/__tests__/test.spec.ts index 4f8dde5b..40ea0e69 100644 --- a/backend/src/build-system/__tests__/test.spec.ts +++ b/backend/src/build-system/__tests__/test.spec.ts @@ -4,7 +4,6 @@ import { BuildSequenceExecutor } from '../executor'; import { BuildHandlerManager } from '../hanlder-manager'; import { ProjectInitHandler } from '../node/project-init'; import { BuildSequence } from '../types'; - describe('Project Init Handler Test', () => { let context: BuilderContext; let executor: BuildSequenceExecutor; @@ -35,7 +34,7 @@ describe('Project Init Handler Test', () => { handlerManager = BuildHandlerManager.getInstance(); handlerManager.clear(); - context = new BuilderContext(testSequence); + context = new BuilderContext(testSequence, 'id'); executor = new BuildSequenceExecutor(context); }); @@ -47,22 +46,6 @@ describe('Project Init Handler Test', () => { }); }); - describe('State Management', () => { - test('should update execution state correctly', async () => { - let state = context.getState(); - expect(state.completed.size).toBe(0); - expect(state.pending.size).toBe(0); - - await executor.executeSequence(testSequence); - - state = context.getState(); - expect(state.completed.size).toBe(1); - expect(state.completed.has('op:PROJECT::STATE:SETUP')).toBe(true); - expect(state.pending.size).toBe(0); - expect(state.failed.size).toBe(0); - }); - }); - describe('Direct Handler Execution', () => { test('should be able to run handler directly', async () => { const handler = new ProjectInitHandler(); diff --git a/backend/src/build-system/node/database-requirements-document/index.ts b/backend/src/build-system/node/database-requirements-document/index.ts new file mode 100644 index 00000000..9219df74 --- /dev/null +++ b/backend/src/build-system/node/database-requirements-document/index.ts @@ -0,0 +1,33 @@ +import { BuildHandler, BuildResult } from 'src/build-system/types'; +import { BuilderContext } from 'src/build-system/context'; +import { ModelProvider } from 'src/common/model-provider'; +import * as fs from 'fs'; +import * as path from 'path'; +import { prompts } from './prompt'; +import { Logger } from '@nestjs/common'; + +export class DatabaseRequirementHandler implements BuildHandler { + readonly id = 'op:DATABASE_REQ::STATE:GENERATE'; + readonly logger = new Logger('DatabaseRequirementHandler'); + async run(context: BuilderContext, args: unknown): Promise { + this.logger.log('Generating Database Requirements Document...'); + const projectName = + context.getData('projectName') || 'Default Project Name'; + + const prompt = prompts.generateDatabaseRequirementPrompt( + projectName, + args as string, + ); + const model = ModelProvider.getInstance(); + const dbRequirementsContent = await model.chatSync( + { + content: prompt, + }, + 'gpt-4o-mini', + ); + return { + success: true, + data: dbRequirementsContent, + }; + } +} diff --git a/backend/src/build-system/node/database-requirements-document/prompt.ts b/backend/src/build-system/node/database-requirements-document/prompt.ts new file mode 100644 index 00000000..4033bbfe --- /dev/null +++ b/backend/src/build-system/node/database-requirements-document/prompt.ts @@ -0,0 +1,102 @@ +export const prompts = { + generateDatabaseRequirementPrompt: ( + projectName: string, + uxDatamap: string, + ): string => { + return `You are a Database Architect and System Analyst. Your task is to analyze the provided UX Datamap document and generate a comprehensive Database Requirements Document that will support all the data needs identified in the UX design, based on the following inputs: + - Project name: ${projectName} + - UX Datamap Content: ${uxDatamap} + +Follow these guidelines to generate the database requirements: + +### Instructions and Rules: +1. Analyze all data elements mentioned in the UX Datamap +2. Identify entities and their relationships +3. Determine data types and constraints +4. Consider data persistence requirements +5. Plan for scalability and performance + +### Database Requirements Structure: +--- +### Database Requirements Document +#### 1. Overview +- Project scope +- Database purpose +- General requirements + +#### 2. Entity Definitions +For each identified entity: +- Entity name and description +- Business rules and constraints +- Key attributes +- Relationships with other entities +- Data retention requirements + +#### 3. Data Requirements +For each entity: +- Required fields +- Field types and constraints +- Validation rules +- Default values +- Indexing requirements + +#### 4. Relationships +- Entity relationships +- Cardinality +- Referential integrity rules +- Cascade rules + +#### 5. Data Access Patterns +- Common query patterns +- Search requirements +- Sorting and filtering needs +- Performance considerations + +#### 6. Security Requirements +- Access control +- Data privacy considerations +- Audit requirements +- Encryption needs + +#### 7. Performance Requirements +- Expected data volume +- Growth projections +- Query performance requirements +- Caching strategies + +#### 8. Additional Considerations +- Backup requirements +- Archive strategy +- Data migration needs +- Integration requirements + +Your reply must start with: "\`\`\`DatabaseRequirements" and end with "\`\`\`". + +Focus on creating practical, implementable database requirements that will effectively support the user experience described in the UX Datamap. Consider: + +1. Data Storage & Structure: + - Choose appropriate data types + - Define indexes for performance + - Plan for scalability + +2. Data Integrity: + - Define constraints + - Establish relationships + - Set validation rules + +3. Performance & Scalability: + - Query optimization + - Caching strategy + - Partitioning needs + +4. Security & Compliance: + - Access control + - Data encryption + - Audit requirements + +5. Maintenance & Operations: + - Backup strategy + - Archive policy + - Migration path`; + }, +}; diff --git a/backend/src/build-system/node/database-schemas/prompt.ts b/backend/src/build-system/node/database-schemas/prompt.ts new file mode 100644 index 00000000..98411fc7 --- /dev/null +++ b/backend/src/build-system/node/database-schemas/prompt.ts @@ -0,0 +1,121 @@ +export const prompts = { + // Step 1: Analyze requirements and generate database tasks + analyzeDatabaseRequirements: ( + projectName: string, + dbRequirements: string, + databaseType: string = 'PostgreSQL', + ): string => { + return `You are a Database Architect specializing in ${databaseType}. Your task is to analyze the provided database requirements document and create a clear plan for schema generation. Use the following requirements as input: + +${dbRequirements} + +Generate a structured analysis describing what needs to be created for each database entity. Your reply must start with "\`\`\`DBAnalysis" and end with "\`\`\`". + +For each entity in the requirements: +1. What tables/collections need to be created +2. What indexes are necessary +3. What constraints must be enforced +4. What relationships need to be established + +Example output format: + +\`\`\`DBAnalysis +Database: ${projectName} +Type: ${databaseType} + +Entity: Users +- Primary table for user accounts +- Required fields: id, email, username, password_hash, subscription_type +- Unique constraints on email and username +- Indexes needed on email and username for quick lookup +- JSON/JSONB field for preferences +- Timestamps for created_at and updated_at + +Entity: Playlists +- Primary table for playlist management +- Required fields: id, title, user_id, is_system_generated +- Foreign key relationship to Users +- Index on user_id for quick user playlist lookup +- Composite index on (user_id, title) for unique user playlists + +[Continue for all entities...] + +Required Junction Tables: +1. playlist_songs + - Manages N:M relationship between playlists and songs + - Needs position field for song order + - Indexes on both foreign keys + +2. song_genres + - Manages N:M relationship between songs and genres + - Indexes on both foreign keys + +Performance Considerations: +1. User table needs hash indexes on email and username +2. Playlist_songs needs index on position for queue management +3. Songs table needs full text search capability +\`\`\``; + }, + + // Step 2: Generate actual schema based on analysis + generateDatabaseSchema: ( + dbAnalysis: string, + databaseType: string = 'PostgreSQL', + ): string => { + return `You are a Database Engineer specializing in ${databaseType}. Generate the complete database schema based on the following analysis, using appropriate ${databaseType} syntax and features: + + Here is dbAnalysis content {${dbAnalysis}} + +Rules for generation: +1. Use ${databaseType}-specific data types and features +2. Do not include any comments in the output +3. Use standardized naming conventions +4. Include all necessary constraints and indexes +5. Generate schema in the correct creation order for dependencies + +Your reply must start with "\`\`\`${databaseType}" and end with "\`\`\`". + +For PostgreSQL, output format should be like: +\`\`\`sql +CREATE TYPE subscription_type_enum AS ENUM ('FREE', 'PREMIUM', 'FAMILY'); + +CREATE TABLE users ( + id UUID DEFAULT gen_random_uuid(), + email VARCHAR(255) NOT NULL, + username VARCHAR(50) NOT NULL, + password_hash VARCHAR(255) NOT NULL, + subscription_type subscription_type_enum NOT NULL DEFAULT 'FREE', + preferences JSONB DEFAULT '{"theme":"light","audioQuality":"high"}', + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (id), + UNIQUE (email), + UNIQUE (username) +); + +CREATE INDEX idx_users_email ON users(email); +[Continue with other tables...] +\`\`\` + +For MongoDB, output format should be like: +\`\`\`javascript +db.createCollection("users", { + validator: { + $jsonSchema: { + bsonType: "object", + required: ["email", "username", "password_hash", "subscription_type"], + properties: { + email: { bsonType: "string" }, + username: { bsonType: "string" }, + password_hash: { bsonType: "string" }, + subscription_type: { enum: ["FREE", "PREMIUM", "FAMILY"] } + } + } + } +}); + +db.users.createIndex({ "email": 1 }, { unique: true }); +[Continue with other collections...] +\`\`\``; + }, +}; diff --git a/backend/src/build-system/node/database-schemas/schemas.ts b/backend/src/build-system/node/database-schemas/schemas.ts new file mode 100644 index 00000000..44a1c7cd --- /dev/null +++ b/backend/src/build-system/node/database-schemas/schemas.ts @@ -0,0 +1,48 @@ +import { BuildHandler, BuildResult } from 'src/build-system/types'; +import { BuilderContext } from 'src/build-system/context'; +import { prompts } from './prompt'; +import { Logger } from '@nestjs/common'; + +export class DBSchemaHandler implements BuildHandler { + readonly id = 'OP:DATABASE:SCHEMAS'; + + readonly logger = new Logger('DBSchemaHandler'); + + async run(context: BuilderContext, args: unknown): Promise { + const projectName = + context.getData('projectName') || 'Default Project Name'; + const databaseType = context.getData('databaseType') || 'PostgreSQL'; + + const analysisPrompt = prompts.analyzeDatabaseRequirements( + projectName, + args as string, + databaseType, + ); + + const dbAnalysis = await context.model.chatSync( + { + content: analysisPrompt, + }, + 'gpt-4o-mini', + ); + + const schemaPrompt = prompts.generateDatabaseSchema( + dbAnalysis, + databaseType, + ); + + const schemaContent = await context.model.chatSync( + { + content: schemaPrompt, + }, + 'gpt-4o-mini', + ); + + return { + success: true, + data: { + schema: schemaContent, + }, + }; + } +} diff --git a/backend/src/build-system/node/product_requirements_document/prd.ts b/backend/src/build-system/node/product_requirements_document/prd.ts index e65052f2..fc386f0c 100644 --- a/backend/src/build-system/node/product_requirements_document/prd.ts +++ b/backend/src/build-system/node/product_requirements_document/prd.ts @@ -34,7 +34,7 @@ export class PRDHandler implements BuildHandler { private async generatePRDFromLLM(prompt: string): Promise { const modelProvider = ModelProvider.getInstance(); - const model = 'gpt-3.5-turbo'; + const model = 'gpt-4o-mini'; const prdContent = await modelProvider.chatSync({ content: prompt }, model); this.logger.log('Received full PRD content from LLM server.'); return prdContent; diff --git a/backend/src/build-system/node/ux-datamap/index.ts b/backend/src/build-system/node/ux-datamap/index.ts index 7f79d72a..7393a3b3 100644 --- a/backend/src/build-system/node/ux-datamap/index.ts +++ b/backend/src/build-system/node/ux-datamap/index.ts @@ -15,6 +15,7 @@ export class UXDatamapHandler implements BuildHandler { const uxGoals = context.getData('uxGoals') || 'Default UX Goals'; // generate the UX Data Map prompt dynamically + const prompt = prompts.generateUXDataMapPrompt( projectName, args as string, @@ -22,16 +23,12 @@ export class UXDatamapHandler implements BuildHandler { 'web', ); - // Use ModelProsvider or another service to generate the document const uxDatamapContent = await context.model.chatSync( { content: prompt, }, - 'gpt-3.5-turbo', + 'gpt-4o-mini', ); - - // Store the generated document in the context - context.setData('uxDatamapDocument', uxDatamapContent); return { success: true, data: uxDatamapContent, diff --git a/backend/src/common/model-provider/index.ts b/backend/src/common/model-provider/index.ts index 1f7f6b4b..b4f9135b 100644 --- a/backend/src/common/model-provider/index.ts +++ b/backend/src/common/model-provider/index.ts @@ -1,17 +1,5 @@ import { Logger } from '@nestjs/common'; import { HttpService } from '@nestjs/axios'; -import { ChatCompletionChunk, StreamStatus } from 'src/chat/chat.model'; - -export interface ChatInput { - content: string; - attachments?: Array<{ - type: string; - content: string | Buffer; - name?: string; - }>; - contextLength?: number; - temperature?: number; -} export interface ModelProviderConfig { endpoint: string; @@ -39,7 +27,6 @@ export class ModelProvider { } return new ModelProvider(new HttpService(), { - // TODO: adding into env endpoint: 'http://localhost:3001', }); } @@ -54,17 +41,43 @@ export class ModelProvider { model: string, chatId?: string, ): Promise { - const chatStream = this.chat(input, model, chatId); - let content = ''; - for await (const chunk of chatStream) { - if (chunk.status === StreamStatus.STREAMING) { - content += chunk.choices - .map((choice) => choice.delta?.content || '') - .join(''); + this.logger.debug('Starting chatSync', { model, chatId }); + + this.resetState(); + + try { + const chatStream = this.chat(input, model, chatId); + let content = ''; + + this.logger.debug('Starting to process chat stream'); + for await (const chunk of chatStream) { + if (chunk.status === StreamStatus.STREAMING) { + const newContent = chunk.choices + .map((choice) => choice.delta?.content || '') + .join(''); + content += newContent; + } } + + return content; + } catch (error) { + this.logger.error('Error in chatSync:', error); + throw error; + } finally { + this.cleanup(); + this.logger.debug('ChatSync cleanup completed'); + } + } + + private resetState() { + this.logger.debug('Resetting provider state'); + this.isDone = false; + this.chunkQueue = []; + this.resolveNextChunk = null; + if (this.responseSubscription) { + this.responseSubscription.unsubscribe(); + this.responseSubscription = null; } - this.logger.log('Aggregated content from chat stream:', content); - return content; } chat( @@ -73,16 +86,13 @@ export class ModelProvider { chatId?: string, ): CustomAsyncIterableIterator { const chatInput = this.normalizeChatInput(input); - const selectedModel = model || this.config.defaultModel || undefined; - if (selectedModel === undefined) { - this.logger.error('No model selected for chat request'); - return; - } + const selectedModel = model || this.config.defaultModel; - this.logger.debug( - `Chat request - Model: ${selectedModel}, ChatId: ${chatId || 'N/A'}`, - { input: chatInput }, - ); + if (!selectedModel) { + const error = new Error('No model selected for chat request'); + this.logger.error(error.message); + throw error; + } const iterator: CustomAsyncIterableIterator = { next: () => this.handleNext(), @@ -98,16 +108,14 @@ export class ModelProvider { } private normalizeChatInput(input: ChatInput | string): ChatInput { - if (typeof input === 'string') { - return { content: input }; - } - return input; + return typeof input === 'string' ? { content: input } : input; } - private handleNext(): Promise> { + private async handleNext(): Promise> { return new Promise>((resolve) => { if (this.chunkQueue.length > 0) { - resolve({ done: false, value: this.chunkQueue.shift()! }); + const chunk = this.chunkQueue.shift()!; + resolve({ done: false, value: chunk }); } else if (this.isDone) { resolve({ done: true, value: undefined }); } else { @@ -129,10 +137,14 @@ export class ModelProvider { } private cleanup() { + this.logger.debug('Cleaning up provider'); this.isDone = true; if (this.responseSubscription) { this.responseSubscription.unsubscribe(); + this.responseSubscription = null; } + this.chunkQueue = []; + this.resolveNextChunk = null; } private createRequestPayload( @@ -178,9 +190,11 @@ export class ModelProvider { } private handleStreamEnd(model: string) { - this.logger.debug('Stream ended'); + this.logger.debug('Stream ended, handling completion'); + if (!this.isDone) { const doneChunk = this.createDoneChunk(model); + if (this.resolveNextChunk) { this.resolveNextChunk({ done: false, value: doneChunk }); this.resolveNextChunk = null; @@ -189,35 +203,42 @@ export class ModelProvider { } } - setTimeout(() => { + Promise.resolve().then(() => { + this.logger.debug('Setting done state'); this.isDone = true; if (this.resolveNextChunk) { this.resolveNextChunk({ done: true, value: undefined }); this.resolveNextChunk = null; } - }, 0); + }); } private handleStreamError(error: any, model: string) { - this.logger.error('Error in stream:', error); + this.logger.error('Stream error occurred:', error); const doneChunk = this.createDoneChunk(model); if (this.resolveNextChunk) { + this.logger.debug('Resolving waiting promise with error done chunk'); this.resolveNextChunk({ done: false, value: doneChunk }); - setTimeout(() => { + Promise.resolve().then(() => { this.isDone = true; - this.resolveNextChunk?.({ done: true, value: undefined }); - this.resolveNextChunk = null; - }, 0); + if (this.resolveNextChunk) { + this.resolveNextChunk({ done: true, value: undefined }); + this.resolveNextChunk = null; + } + }); } else { + this.logger.debug('Queueing error done chunk'); this.chunkQueue.push(doneChunk); - setTimeout(() => { + Promise.resolve().then(() => { this.isDone = true; - }, 0); + }); } } private startChat(input: ChatInput, model: string, chatId?: string) { + this.resetState(); + const payload = this.createRequestPayload(input, model, chatId); this.responseSubscription = this.httpService @@ -230,9 +251,11 @@ export class ModelProvider { .subscribe({ next: (response) => { let buffer = ''; + response.data.on('data', (chunk: Buffer) => { buffer += chunk.toString(); let newlineIndex; + while ((newlineIndex = buffer.indexOf('\n')) !== -1) { const line = buffer.slice(0, newlineIndex).trim(); buffer = buffer.slice(newlineIndex + 1); @@ -240,6 +263,8 @@ export class ModelProvider { if (line.startsWith('data: ')) { const jsonStr = line.slice(6); if (jsonStr === '[DONE]') { + this.logger.debug('Received [DONE] signal'); + this.handleStreamEnd(model); return; } try { @@ -251,31 +276,41 @@ export class ModelProvider { } } }); - response.data.on('end', () => this.handleStreamEnd(model)); + + response.data.on('end', () => { + this.logger.debug('Response stream ended'); + this.handleStreamEnd(model); + }); + }, + error: (error) => { + this.logger.error('Error in chat request:', error); + this.handleStreamError(error, model); }, - error: (error) => this.handleStreamError(error, model), }); } private isValidChunk(chunk: any): boolean { - return ( + const isValid = chunk && typeof chunk.id === 'string' && typeof chunk.object === 'string' && typeof chunk.created === 'number' && - typeof chunk.model === 'string' - ); + typeof chunk.model === 'string'; + + if (!isValid) { + this.logger.warn('Invalid chunk structure', chunk); + } + + return isValid; } public async fetchModelsName() { try { - this.logger.debug('Requesting model tags from /tags endpoint.'); - - // Make a GET request to /tags + this.logger.debug('Fetching model tags'); const response = await this.httpService .get(`${this.config.endpoint}/tags`, { responseType: 'json' }) .toPromise(); - this.logger.debug('Model tags received:', response.data); + this.logger.debug('Model tags received', response.data); return response.data; } catch (error) { this.logger.error('Error fetching model tags:', error); @@ -283,3 +318,39 @@ export class ModelProvider { } } } + +export enum StreamStatus { + STREAMING = 'streaming', + DONE = 'done', +} + +export class ChatCompletionChunk { + id: string; + object: string; + created: number; + model: string; + systemFingerprint: string | null; + choices: ChatCompletionChoice[]; + status: StreamStatus; +} + +export interface ChatInput { + content: string; + attachments?: Array<{ + type: string; + content: string | Buffer; + name?: string; + }>; + contextLength?: number; + temperature?: number; +} + +class ChatCompletionDelta { + content?: string; +} + +class ChatCompletionChoice { + index: number; + delta: ChatCompletionDelta; + finishReason: string | null; +}