diff --git a/prisma/migrate.ts b/prisma/migrate.ts index e357cfb..c51dd16 100644 --- a/prisma/migrate.ts +++ b/prisma/migrate.ts @@ -43,6 +43,9 @@ const modelMappingKeys = [ 'review', 'review_item', 'review_item_comment', + 'llm_provider', + 'llm_model', + 'ai_workflow' ]; const subModelMappingKeys = { review_item_comment: ['reviewItemComment', 'appeal', 'appealResponse'], @@ -102,6 +105,9 @@ const reviewItemCommentAppealResponseIdMap = readIdMap( ); const uploadIdMap = readIdMap('uploadIdMap'); const submissionIdMap = readIdMap('submissionIdMap'); +const llmProviderIdMap = readIdMap('llmProviderIdMap'); +const llmModelIdMap = readIdMap('llmModelIdMap'); +const aiWorkflowIdMap = readIdMap('aiWorkflowIdMap'); // read resourceSubmissionSet const rsSetFile = '.tmp/resourceSubmissionSet.json'; @@ -808,7 +814,6 @@ async function processType(type: string, subtype?: string) { case 'scorecard': { console.log(`[${type}][${file}] Processing file`); const processedData = jsonData[key] - .filter((sc) => !scorecardIdMap.has(sc.scorecard_id)) .map((sc) => { const id = nanoid(14); scorecardIdMap.set(sc.scorecard_id, id); @@ -1342,6 +1347,177 @@ async function processType(type: string, subtype?: string) { } break; } + case 'llm_provider': { + console.log(`[${type}][${subtype}][${file}] Processing file`); + const idToLegacyIdMap = {}; + const processedData = jsonData[key] + .map((c) => { + const id = nanoid(14); + llmProviderIdMap.set( + c.llm_provider_id, + id, + ); + idToLegacyIdMap[id] = c.llm_provider_id; + return { + id: id, + name: c.name, + createdAt: new Date(c.create_date), + createdBy: c.create_user, + }; + }); + + const totalBatches = Math.ceil(processedData.length / batchSize); + for (let i = 0; i < processedData.length; i += batchSize) { + const batchIndex = i / batchSize + 1; + console.log( + `[${type}][${subtype}][${file}] Processing batch ${batchIndex}/${totalBatches}`, + ); + const batch = processedData.slice(i, i + batchSize); + await prisma.llmProvider + .createMany({ + data: batch, + }) + .catch(async () => { + console.error( + `[${type}][${subtype}][${file}] An error occurred, retrying individually`, + ); + for (const item of batch) { + await prisma.llmProvider + .create({ + data: item, + }) + .catch((err) => { + llmProviderIdMap.delete( + idToLegacyIdMap[item.id], + ); + console.error( + `[${type}][${subtype}][${file}] Error code: ${err.code}, LegacyId: ${idToLegacyIdMap[item.id]}`, + ); + }); + } + }); + } + break; + } + case 'llm_model': { + console.log(`[${type}][${subtype}][${file}] Processing file`); + const idToLegacyIdMap = {}; + const processedData = jsonData[key] + .map((c) => { + const id = nanoid(14); + llmModelIdMap.set( + c.llm_model_id, + id, + ); + idToLegacyIdMap[id] = c.llm_model_id; + console.log(llmProviderIdMap.get(c.provider_id), 'c.provider_id') + return { + id: id, + providerId: llmProviderIdMap.get(c.provider_id), + name: c.name, + description: c.description, + icon: c.icon, + url: c.url, + createdAt: new Date(c.create_date), + createdBy: c.create_user, + }; + }); + + console.log(llmProviderIdMap, processedData, 'processedData') + + const totalBatches = Math.ceil(processedData.length / batchSize); + for (let i = 0; i < processedData.length; i += batchSize) { + const batchIndex = i / batchSize + 1; + console.log( + `[${type}][${subtype}][${file}] Processing batch ${batchIndex}/${totalBatches}`, + ); + const batch = processedData.slice(i, i + batchSize); + await prisma.llmModel + .createMany({ + data: batch, + }) + .catch(async () => { + console.error( + `[${type}][${subtype}][${file}] An error occurred, retrying individually`, + ); + for (const item of batch) { + await prisma.llmModel + .create({ + data: item, + }) + .catch((err) => { + llmModelIdMap.delete( + idToLegacyIdMap[item.id], + ); + console.error( + `[${type}][${subtype}][${file}] Error code: ${err.code}, LegacyId: ${idToLegacyIdMap[item.id]}`, + ); + }); + } + }); + } + break; + } + case 'ai_workflow': { + console.log(`[${type}][${subtype}][${file}] Processing file`); + const idToLegacyIdMap = {}; + const processedData = jsonData[key] + .map((c) => { + const id = nanoid(14); + aiWorkflowIdMap.set( + c.ai_workflow_id, + id, + ); + idToLegacyIdMap[id] = c.ai_workflow_id; + return { + id: id, + llmId: llmModelIdMap.get(c.llm_id), + name: c.name, + description: c.description, + defUrl: c.def_url, + gitId: c.git_id, + gitOwner: c.git_owner, + scorecardId: scorecardIdMap.get(c.scorecard_id), + createdAt: new Date(c.create_date), + createdBy: c.create_user, + updatedAt: new Date(c.modify_date), + updatedBy: c.modify_user, + }; + }); + + const totalBatches = Math.ceil(processedData.length / batchSize); + for (let i = 0; i < processedData.length; i += batchSize) { + const batchIndex = i / batchSize + 1; + console.log( + `[${type}][${subtype}][${file}] Processing batch ${batchIndex}/${totalBatches}`, + ); + const batch = processedData.slice(i, i + batchSize); + await prisma.aiWorkflow + .createMany({ + data: batch, + }) + .catch(async () => { + console.error( + `[${type}][${subtype}][${file}] An error occurred, retrying individually`, + ); + for (const item of batch) { + await prisma.aiWorkflow + .create({ + data: item, + }) + .catch((err) => { + aiWorkflowIdMap.delete( + idToLegacyIdMap[item.id], + ); + console.error( + `[${type}][${subtype}][${file}] Error code: ${err.code}, LegacyId: ${idToLegacyIdMap[item.id]}`, + ); + }); + } + }); + } + break; + } default: console.warn(`No processor defined for type: ${type}`); return; @@ -1509,6 +1685,9 @@ migrate() }, { key: 'uploadIdMap', value: uploadIdMap }, { key: 'submissionIdMap', value: submissionIdMap }, + { key: 'llmProviderIdMap', value: llmProviderIdMap }, + { key: 'llmModelIdMap', value: llmModelIdMap }, + { key: 'aiWorkflowIdMap', value: aiWorkflowIdMap } ].forEach((f) => { if (!fs.existsSync('.tmp')) { fs.mkdirSync('.tmp'); diff --git a/prisma/migrations/20250829225539_ai_workflows/migration.sql b/prisma/migrations/20250829225539_ai_workflows/migration.sql new file mode 100644 index 0000000..c557635 --- /dev/null +++ b/prisma/migrations/20250829225539_ai_workflows/migration.sql @@ -0,0 +1,121 @@ +-- CreateTable +CREATE TABLE "llmProvider" ( + "id" VARCHAR(14) NOT NULL DEFAULT nanoid(), + "name" VARCHAR NOT NULL, + "createdAt" TIMESTAMP(3) NOT NULL, + "createdBy" TEXT NOT NULL, + + CONSTRAINT "llmProvider_pkey" PRIMARY KEY ("id") +); + +-- CreateTable +CREATE TABLE "llmModel" ( + "id" VARCHAR(14) NOT NULL DEFAULT nanoid(), + "providerId" VARCHAR(14) NOT NULL, + "name" VARCHAR NOT NULL, + "description" TEXT NOT NULL, + "icon" VARCHAR, + "url" VARCHAR, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "createdBy" TEXT NOT NULL, + + CONSTRAINT "llmModel_pkey" PRIMARY KEY ("id") +); + +-- CreateTable +CREATE TABLE "aiWorkflow" ( + "id" VARCHAR(14) NOT NULL DEFAULT nanoid(), + "name" VARCHAR NOT NULL, + "llmId" VARCHAR(14) NOT NULL, + "description" TEXT NOT NULL, + "defUrl" VARCHAR NOT NULL, + "gitId" VARCHAR NOT NULL, + "gitOwner" VARCHAR NOT NULL, + "scorecardId" VARCHAR(14) NOT NULL, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "createdBy" TEXT NOT NULL, + "updatedAt" TIMESTAMP(3) NOT NULL, + "updatedBy" TEXT NOT NULL, + + CONSTRAINT "aiWorkflow_pkey" PRIMARY KEY ("id") +); + +-- CreateTable +CREATE TABLE "aiWorkflowRun" ( + "id" VARCHAR(14) NOT NULL DEFAULT nanoid(), + "workflowId" VARCHAR(14) NOT NULL, + "submissionId" VARCHAR(14) NOT NULL, + "startedAt" TIMESTAMP(3), + "completedAt" TIMESTAMP(3), + "gitRunId" VARCHAR NOT NULL, + "score" DOUBLE PRECISION, + "status" VARCHAR NOT NULL, + + CONSTRAINT "aiWorkflowRun_pkey" PRIMARY KEY ("id") +); + +-- CreateTable +CREATE TABLE "aiWorkflowRunItem" ( + "id" VARCHAR(14) NOT NULL DEFAULT nanoid(), + "workflowRunId" VARCHAR(14) NOT NULL, + "scorecardQuestionId" VARCHAR(14) NOT NULL, + "content" TEXT NOT NULL, + "upVotes" INTEGER NOT NULL DEFAULT 0, + "downVotes" INTEGER NOT NULL DEFAULT 0, + "questionScore" DOUBLE PRECISION, + "createdAt" TIMESTAMP(3) NOT NULL, + "createdBy" TEXT NOT NULL, + + CONSTRAINT "aiWorkflowRunItem_pkey" PRIMARY KEY ("id") +); + +-- CreateTable +CREATE TABLE "aiWorkflowRunItemComment" ( + "id" VARCHAR(14) NOT NULL DEFAULT nanoid(), + "workflowRunItemId" VARCHAR(14) NOT NULL, + "userId" TEXT NOT NULL, + "content" TEXT NOT NULL, + "parentId" VARCHAR(14), + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "createdBy" TEXT NOT NULL, + "updatedAt" TIMESTAMP(3) NOT NULL, + "updatedBy" TEXT NOT NULL, + + CONSTRAINT "aiWorkflowRunItemComment_pkey" PRIMARY KEY ("id") +); + +-- CreateIndex +CREATE UNIQUE INDEX "llmProvider_name_key" ON "llmProvider"("name"); + +-- CreateIndex +CREATE UNIQUE INDEX "llmModel_name_key" ON "llmModel"("name"); + +-- CreateIndex +CREATE UNIQUE INDEX "aiWorkflow_name_key" ON "aiWorkflow"("name"); + +-- AddForeignKey +ALTER TABLE "llmModel" ADD CONSTRAINT "llmModel_providerId_fkey" FOREIGN KEY ("providerId") REFERENCES "llmProvider"("id") ON DELETE RESTRICT ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "aiWorkflow" ADD CONSTRAINT "aiWorkflow_llmId_fkey" FOREIGN KEY ("llmId") REFERENCES "llmModel"("id") ON DELETE RESTRICT ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "aiWorkflow" ADD CONSTRAINT "aiWorkflow_scorecardId_fkey" FOREIGN KEY ("scorecardId") REFERENCES "scorecard"("id") ON DELETE RESTRICT ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "aiWorkflowRun" ADD CONSTRAINT "aiWorkflowRun_workflowId_fkey" FOREIGN KEY ("workflowId") REFERENCES "aiWorkflow"("id") ON DELETE RESTRICT ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "aiWorkflowRun" ADD CONSTRAINT "aiWorkflowRun_submissionId_fkey" FOREIGN KEY ("submissionId") REFERENCES "submission"("id") ON DELETE RESTRICT ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "aiWorkflowRunItem" ADD CONSTRAINT "aiWorkflowRunItem_workflowRunId_fkey" FOREIGN KEY ("workflowRunId") REFERENCES "aiWorkflowRun"("id") ON DELETE RESTRICT ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "aiWorkflowRunItem" ADD CONSTRAINT "aiWorkflowRunItem_scorecardQuestionId_fkey" FOREIGN KEY ("scorecardQuestionId") REFERENCES "scorecardQuestion"("id") ON DELETE RESTRICT ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "aiWorkflowRunItemComment" ADD CONSTRAINT "aiWorkflowRunItemComment_workflowRunItemId_fkey" FOREIGN KEY ("workflowRunItemId") REFERENCES "aiWorkflowRunItem"("id") ON DELETE RESTRICT ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "aiWorkflowRunItemComment" ADD CONSTRAINT "aiWorkflowRunItemComment_parentId_fkey" FOREIGN KEY ("parentId") REFERENCES "aiWorkflowRunItemComment"("id") ON DELETE SET NULL ON UPDATE CASCADE; diff --git a/prisma/schema.prisma b/prisma/schema.prisma index 33c13be..f244e7a 100644 --- a/prisma/schema.prisma +++ b/prisma/schema.prisma @@ -32,6 +32,7 @@ model scorecard { scorecardGroups scorecardGroup[] reviews review[] + aiWorkflow aiWorkflow[] // Indexes for faster searches @@index([challengeTrack]) @@ -131,7 +132,8 @@ model scorecardQuestion { scaleMin Int? // Minimum value for scale (used when type is SCALE) scaleMax Int? // Maximum value for scale (used when type is SCALE) - section scorecardSection @relation(fields: [scorecardSectionId], references: [id], onDelete: Cascade) + section scorecardSection @relation(fields: [scorecardSectionId], references: [id], onDelete: Cascade) + aiWorkflowRunItem aiWorkflowRunItem[] @@index([id]) // Index for direct ID lookups @@index([scorecardSectionId]) // Index for joining with scorecardSection table @@ -140,24 +142,24 @@ model scorecardQuestion { } model review { - id String @id @default(dbgenerated("gen_random_uuid()")) @db.VarChar(36) - legacyId String? - resourceId String - phaseId String - submissionId String? @db.VarChar(14) // Associated submission - legacySubmissionId String? - scorecardId String // Associated scorecard - committed Boolean @default(false) - finalScore Float? - initialScore Float? - typeId String? - metadata Json? - status String? - reviewDate DateTime? - createdAt DateTime @default(now()) - createdBy String - updatedAt DateTime @updatedAt - updatedBy String + id String @id @default(dbgenerated("gen_random_uuid()")) @db.VarChar(36) + legacyId String? + resourceId String + phaseId String + submissionId String? @db.VarChar(14) // Associated submission + legacySubmissionId String? + scorecardId String // Associated scorecard + committed Boolean @default(false) + finalScore Float? + initialScore Float? + typeId String? + metadata Json? + status String? + reviewDate DateTime? + createdAt DateTime @default(now()) + createdBy String + updatedAt DateTime @updatedAt + updatedBy String scorecard scorecard @relation(fields: [scorecardId], references: [id], onDelete: Cascade) submission submission? @relation(fields: [submissionId], references: [id], onDelete: Cascade) @@ -186,6 +188,7 @@ model reviewItem { review review @relation(fields: [reviewId], references: [id], onDelete: Cascade) reviewItemComments reviewItemComment[] + @@index([reviewId]) // Index for joining with review table @@index([id]) // Index for direct ID lookups @@index([scorecardQuestionId]) // Index for joining with scorecardQuestion table @@ -206,6 +209,7 @@ model reviewItemComment { reviewItem reviewItem @relation(fields: [reviewItemId], references: [id], onDelete: Cascade) appeal appeal? + @@index([reviewItemId]) // Index for joining with reviewItem table @@index([id]) // Index for direct ID lookups @@index([resourceId]) // Index for filtering by resource (commenter) @@ -308,9 +312,9 @@ model contactRequest { } model reviewType { - id String @id @default(dbgenerated("gen_random_uuid()")) @db.VarChar(36) - name String - isActive Boolean + id String @id @default(dbgenerated("gen_random_uuid()")) @db.VarChar(36) + name String + isActive Boolean @@index([id]) // Index for direct ID lookups // Indexes for faster searches @@ -319,67 +323,68 @@ model reviewType { } model reviewSummation { - id String @id @default(dbgenerated("gen_random_uuid()")) @db.VarChar(36) - submissionId String @db.VarChar(14) // Associated submission - legacySubmissionId String? - aggregateScore Float - scorecardId String? - scorecardLegacyId String? - isPassing Boolean - isFinal Boolean? - reviewedDate DateTime? - createdAt DateTime @default(now()) - createdBy String - updatedAt DateTime @updatedAt - updatedBy String - - submission submission @relation(fields: [submissionId], references: [id], onDelete: Cascade) + id String @id @default(dbgenerated("gen_random_uuid()")) @db.VarChar(36) + submissionId String @db.VarChar(14) // Associated submission + legacySubmissionId String? + aggregateScore Float + scorecardId String? + scorecardLegacyId String? + isPassing Boolean + isFinal Boolean? + reviewedDate DateTime? + createdAt DateTime @default(now()) + createdBy String + updatedAt DateTime @updatedAt + updatedBy String + + submission submission @relation(fields: [submissionId], references: [id], onDelete: Cascade) @@index([submissionId]) // Index for joining with submission table @@index([scorecardId]) // Index for joining with scorecard table } model submission { - id String @id @default(dbgenerated("nanoid()")) @db.VarChar(14) + id String @id @default(dbgenerated("nanoid()")) @db.VarChar(14) // informix data - legacySubmissionId String? - type SubmissionType - status SubmissionStatus - screeningScore Decimal? - initialScore Decimal? - finalScore Decimal? - placement Int? - userRank Int? - markForPurchase Boolean? - prizeId BigInt? - fileSize Int? - viewCount Int? - systemFileName String? - thurgoodJobId String? + legacySubmissionId String? + type SubmissionType + status SubmissionStatus + screeningScore Decimal? + initialScore Decimal? + finalScore Decimal? + placement Int? + userRank Int? + markForPurchase Boolean? + prizeId BigInt? + fileSize Int? + viewCount Int? + systemFileName String? + thurgoodJobId String? // ES data - url String? - memberId String? - challengeId String? - legacyChallengeId BigInt? - submissionPhaseId String? - fileType String? - esId String? @db.Uuid - submittedDate DateTime? - - createdAt DateTime @default(now()) - createdBy String - updatedAt DateTime? @updatedAt - updatedBy String? + url String? + memberId String? + challengeId String? + legacyChallengeId BigInt? + submissionPhaseId String? + fileType String? + esId String? @db.Uuid + submittedDate DateTime? + + createdAt DateTime @default(now()) + createdBy String + updatedAt DateTime? @updatedAt + updatedBy String? // relation - legacyUploadId String? - uploadId String? @db.VarChar(14) - upload upload? @relation(fields: [uploadId], references: [id]) + legacyUploadId String? + uploadId String? @db.VarChar(14) + upload upload? @relation(fields: [uploadId], references: [id]) - review review[] - reviewSummation reviewSummation[] + review review[] + reviewSummation reviewSummation[] resourceSubmissions resourceSubmission[] + aiWorkflowRun aiWorkflowRun[] @@index([memberId]) @@index([challengeId]) @@ -420,53 +425,49 @@ enum ReviewApplicationRole { } model reviewOpportunity { - id String @id @default(dbgenerated("nanoid()")) @db.VarChar(14) - challengeId String - status ReviewOpportunityStatus - type ReviewOpportunityType - openPositions Int - startDate DateTime - duration Int - basePayment Float - incrementalPayment Float + id String @id @default(dbgenerated("nanoid()")) @db.VarChar(14) + challengeId String + status ReviewOpportunityStatus + type ReviewOpportunityType + openPositions Int + startDate DateTime + duration Int + basePayment Float + incrementalPayment Float applications reviewApplication[] - createdAt DateTime @default(now()) - createdBy String - updatedAt DateTime @updatedAt - updatedBy String + createdAt DateTime @default(now()) + createdBy String + updatedAt DateTime @updatedAt + updatedBy String @@unique([challengeId, type]) - @@index([id]) // Index for direct ID lookups @@index([challengeId]) // Index for filtering by challenge } - model reviewApplication { - id String @id @default(dbgenerated("nanoid()")) @db.VarChar(14) - userId String - handle String + id String @id @default(dbgenerated("nanoid()")) @db.VarChar(14) + userId String + handle String opportunityId String - role ReviewApplicationRole - status ReviewApplicationStatus + role ReviewApplicationRole + status ReviewApplicationStatus opportunity reviewOpportunity @relation(fields: [opportunityId], references: [id], onDelete: Cascade) - createdAt DateTime @default(now()) - createdBy String - updatedAt DateTime @updatedAt - updatedBy String + createdAt DateTime @default(now()) + createdBy String + updatedAt DateTime @updatedAt + updatedBy String @@unique([opportunityId, userId, role]) - @@index([id]) // Index for direct ID lookups @@index([userId]) @@index([opportunityId]) } - enum UploadType { SUBMISSION TEST_CASE @@ -496,23 +497,22 @@ enum SubmissionStatus { FAILED_CHECKPOINT_REVIEW } - model upload { - id String @id @default(dbgenerated("nanoid()")) @db.VarChar(14) - legacyId String? - projectId String - resourceId String - type UploadType - status UploadStatus - parameter String? - url String? - desc String? - projectPhaseId String? - - createdAt DateTime @default(now()) - createdBy String - updatedAt DateTime? @updatedAt - updatedBy String? + id String @id @default(dbgenerated("nanoid()")) @db.VarChar(14) + legacyId String? + projectId String + resourceId String + type UploadType + status UploadStatus + parameter String? + url String? + desc String? + projectPhaseId String? + + createdAt DateTime @default(now()) + createdBy String + updatedAt DateTime? @updatedAt + updatedBy String? submissions submission[] @@ -520,29 +520,117 @@ model upload { @@index([legacyId]) } - model resourceSubmission { - id String @id @default(dbgenerated("nanoid()")) @db.VarChar(14) - resourceId String - submissionId String? - legacySubmissionId String? + id String @id @default(dbgenerated("nanoid()")) @db.VarChar(14) + resourceId String + submissionId String? + legacySubmissionId String? - createdAt DateTime @default(now()) - createdBy String - updatedAt DateTime? @updatedAt - updatedBy String? + createdAt DateTime @default(now()) + createdBy String + updatedAt DateTime? @updatedAt + updatedBy String? submissions submission? @relation(fields: [submissionId], references: [id]) } model gitWebhookLog { id String @id @default(dbgenerated("nanoid()")) @db.VarChar(14) - eventId String // X-GitHub-Delivery header - event String // X-GitHub-Event header - eventPayload Json // Complete webhook payload + eventId String // X-GitHub-Delivery header + event String // X-GitHub-Event header + eventPayload Json // Complete webhook payload createdAt DateTime @default(now()) @@index([eventId]) @@index([event]) @@index([createdAt]) } + +model llmProvider { + id String @id @default(dbgenerated("nanoid()")) @db.VarChar(14) + name String @unique @db.VarChar + createdAt DateTime @db.Timestamp(3) + createdBy String @db.Text + + models llmModel[] +} + +model llmModel { + id String @id @default(dbgenerated("nanoid()")) @db.VarChar(14) + providerId String @db.VarChar(14) + name String @unique @db.VarChar + description String @db.Text + icon String? @db.VarChar + url String? @db.VarChar + createdAt DateTime @default(now()) @db.Timestamp(3) + createdBy String @db.Text + + provider llmProvider @relation(fields: [providerId], references: [id]) + workflows aiWorkflow[] +} + +model aiWorkflow { + id String @id @default(dbgenerated("nanoid()")) @db.VarChar(14) + name String @unique @db.VarChar + llmId String @db.VarChar(14) + description String @db.Text + defUrl String @db.VarChar + gitId String @db.VarChar + gitOwner String @db.VarChar + scorecardId String @db.VarChar(14) + createdAt DateTime @default(now()) @db.Timestamp(3) + createdBy String @db.Text + updatedAt DateTime @db.Timestamp(3) + updatedBy String @db.Text + + llm llmModel @relation(fields: [llmId], references: [id]) + scorecard scorecard @relation(fields: [scorecardId], references: [id]) + runs aiWorkflowRun[] +} + +model aiWorkflowRun { + id String @id @default(dbgenerated("nanoid()")) @db.VarChar(14) + workflowId String @db.VarChar(14) + submissionId String @db.VarChar(14) + startedAt DateTime? @db.Timestamp(3) + completedAt DateTime? @db.Timestamp(3) + gitRunId String @db.VarChar + score Float? @db.DoublePrecision + status String @db.VarChar + + workflow aiWorkflow @relation(fields: [workflowId], references: [id]) + submission submission @relation(fields: [submissionId], references: [id]) + items aiWorkflowRunItem[] +} + +model aiWorkflowRunItem { + id String @id @default(dbgenerated("nanoid()")) @db.VarChar(14) + workflowRunId String @db.VarChar(14) + scorecardQuestionId String @db.VarChar(14) + content String @db.Text + upVotes Int @default(0) + downVotes Int @default(0) + questionScore Float? @db.DoublePrecision + createdAt DateTime @db.Timestamp(3) + createdBy String @db.Text + + run aiWorkflowRun @relation(fields: [workflowRunId], references: [id]) + question scorecardQuestion @relation(fields: [scorecardQuestionId], references: [id]) + comments aiWorkflowRunItemComment[] +} + +model aiWorkflowRunItemComment { + id String @id @default(dbgenerated("nanoid()")) @db.VarChar(14) + workflowRunItemId String @db.VarChar(14) + userId String @db.Text + content String @db.Text + parentId String? @db.VarChar(14) + createdAt DateTime @default(now()) @db.Timestamp(3) + createdBy String @db.Text + updatedAt DateTime @db.Timestamp(3) + updatedBy String @db.Text + + item aiWorkflowRunItem @relation(fields: [workflowRunItemId], references: [id]) + parent aiWorkflowRunItemComment? @relation("CommentHierarchy", fields: [parentId], references: [id]) + replies aiWorkflowRunItemComment[] @relation("CommentHierarchy") +}