Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions AGENTS.md
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,14 @@ This file provides guidance to coding agents when working with code in this repo
- `.infra/common.ts` - Worker subscription definitions
- `.infra/index.ts` - Main Pulumi deployment configuration

## Best Practices & Lessons Learned

**Avoiding Code Duplication:**
- **Always check for existing implementations** before creating new helper functions. Use Grep or Glob tools to search for similar function names or logic patterns across the codebase.
- **Prefer extracting to common utilities** when logic needs to be shared. Place shared helpers in appropriate `src/common/` subdirectories (e.g., `src/common/opportunity/` for opportunity-related helpers).
- **Export and import, don't duplicate**: When you need the same logic in multiple places, export the function from its original location and import it where needed. This ensures a single source of truth and prevents maintenance issues.
- **Example lesson**: When implementing `handleOpportunityKeywordsUpdate`, the function was duplicated in both `src/common/opportunity/parse.ts` and `src/schema/opportunity.ts`. This caused lint failures and maintenance burden. The correct approach was to export it from `parse.ts` and import it in `opportunity.ts`.

## Pull Requests

Keep PR descriptions concise and to the point. Reviewers should not be exhausted by lengthy explanations.
Expand Down
161 changes: 161 additions & 0 deletions __tests__/schema/opportunity.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6399,3 +6399,164 @@ describe('query opportunityStats', () => {
expect(res.errors[0].extensions.code).toBe('FORBIDDEN');
});
});

describe('mutation reimportOpportunity', () => {
const MUTATION = /* GraphQL */ `
mutation ReimportOpportunity($payload: ReimportOpportunityInput!) {
reimportOpportunity(payload: $payload) {
id
title
tldr
content {
overview {
content
}
requirements {
content
}
responsibilities {
content
}
}
keywords {
keyword
}
}
}
`;

beforeEach(async () => {
jest.resetAllMocks();

const transport = createMockBrokkrTransport();
const serviceClient = {
instance: createClient(BrokkrService, transport),
garmr: createGarmrMock(),
};

jest
.spyOn(brokkrCommon, 'getBrokkrClient')
.mockImplementation((): ServiceClient<typeof BrokkrService> => {
return serviceClient;
});
});

it('should require authentication', async () => {
loggedUser = null;

await testMutationErrorCode(
client,
{
mutation: MUTATION,
variables: {
payload: {
opportunityId: opportunitiesFixture[0].id,
url: 'https://example.com/job',
},
},
},
'UNAUTHENTICATED',
);
});

it('should require recruiter permission', async () => {
loggedUser = '3'; // User 3 is not a recruiter for opportunity 3

await testMutationErrorCode(
client,
{
mutation: MUTATION,
variables: {
payload: {
opportunityId: opportunitiesFixture[3].id,
url: 'https://example.com/job',
},
},
},
'FORBIDDEN',
);
});

it('should fail when neither file nor URL is provided', async () => {
loggedUser = '2'; // User 2 is a recruiter for opportunity 3

const res = await client.mutate(MUTATION, {
variables: {
payload: {
opportunityId: opportunitiesFixture[3].id,
},
},
});

expect(res.errors).toBeTruthy();
});

it('should reimport opportunity from URL and update all fields', async () => {
loggedUser = '2'; // User 2 is a recruiter for opportunity 3 (which is in DRAFT state)

const fetchSpy = jest.spyOn(globalThis, 'fetch');
const pdfResponse = new Response('Mocked PDF content', {
status: 200,
headers: { 'Content-Type': 'application/pdf' },
});
jest
.spyOn(pdfResponse, 'arrayBuffer')
.mockResolvedValue(new ArrayBuffer(0));
fetchSpy.mockResolvedValueOnce(pdfResponse);

fileTypeFromBuffer.mockResolvedValue({
ext: 'pdf',
mime: 'application/pdf',
});

const uploadResumeFromBufferSpy = jest.spyOn(
googleCloud,
'uploadResumeFromBuffer',
);
uploadResumeFromBufferSpy.mockResolvedValue(
`https://storage.cloud.google.com/${RESUME_BUCKET_NAME}/file`,
);

const deleteFileFromBucketSpy = jest.spyOn(
googleCloud,
'deleteFileFromBucket',
);
deleteFileFromBucketSpy.mockResolvedValue(true);

// Get original opportunity state
const originalOpportunity = await con
.getRepository(OpportunityJob)
.findOneByOrFail({ id: opportunitiesFixture[3].id });

const res = await client.mutate(MUTATION, {
variables: {
payload: {
opportunityId: opportunitiesFixture[3].id,
url: 'https://example.com/updated-job',
},
},
});

expect(res.errors).toBeFalsy();
expect(res.data.reimportOpportunity.id).toBe(opportunitiesFixture[3].id);

// Verify fields were updated with mocked Brokkr response
expect(res.data.reimportOpportunity.title).toBe('Mocked Opportunity Title');
expect(res.data.reimportOpportunity.tldr).toBe(
'This is a mocked TL;DR of the opportunity.',
);
expect(res.data.reimportOpportunity.keywords).toEqual([
{ keyword: 'mock' },
{ keyword: 'opportunity' },
{ keyword: 'test' },
]);

// Verify opportunity still exists and was updated
const updatedOpportunity = await con
.getRepository(OpportunityJob)
.findOneByOrFail({ id: opportunitiesFixture[3].id });

expect(updatedOpportunity.title).toBe('Mocked Opportunity Title');
expect(updatedOpportunity.state).toBe(originalOpportunity.state); // State should be preserved
});
});
110 changes: 110 additions & 0 deletions src/common/opportunity/parse.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ import { OpportunityUserRecruiter } from '../../entity/opportunities/user/Opport
import { findDatasetLocation } from '../../entity/dataset/utils';
import { addOpportunityDefaultQuestionFeedback } from './question';
import type { Opportunity } from '../../entity/opportunities/Opportunity';
import { EntityManager } from 'typeorm';

interface FileUpload {
filename: string;
Expand Down Expand Up @@ -357,3 +358,112 @@ export async function createOpportunityFromParsedData(
return opportunity;
});
}

export interface UpdateOpportunityContext {
con: DataSource;
log: FastifyBaseLogger;
}

/**
* Handles opportunity keywords updates
* Replaces all existing keywords with the new set
*/
export async function handleOpportunityKeywordsUpdate(
entityManager: EntityManager,
opportunityId: string,
keywords: Array<{ keyword: string }> | undefined,
): Promise<void> {
if (!Array.isArray(keywords)) {
return;
}

await entityManager.getRepository(OpportunityKeyword).delete({
opportunityId,
});

await entityManager.getRepository(OpportunityKeyword).insert(
keywords.map((keyword) => ({
opportunityId,
keyword: keyword.keyword,
})),
);
}

/**
* Updates an existing opportunity with all parsed data.
*
* @param ctx - Context with database connection and logger
* @param opportunityId - ID of the opportunity to update
* @param parsedData - The parsed opportunity data from Brokkr
* @returns The opportunity ID
*/
export async function updateOpportunityFromParsedData(
ctx: UpdateOpportunityContext,
opportunityId: string,
parsedData: ParsedOpportunityResult,
): Promise<string> {
const { opportunity: parsedOpportunity, content } = parsedData;

return ctx.con.transaction(async (entityManager) => {
// Fetch the existing opportunity
const existingOpportunity = await entityManager
.getRepository(OpportunityJob)
.findOne({
where: { id: opportunityId },
});

if (!existingOpportunity) {
throw new ValidationError('Opportunity not found');
}

// Build update object with all parsed data
const updateData: Partial<OpportunityJob> = {};

if (parsedOpportunity.title) {
updateData.title = parsedOpportunity.title;
}

if (parsedOpportunity.tldr) {
updateData.tldr = parsedOpportunity.tldr;
}

// Update content - merge with existing to preserve any sections not in parsed data
// Explicitly list content block keys to avoid iterating over protobuf methods
const contentBlockKeys = [
'overview',
'responsibilities',
'requirements',
'whatYoullDo',
'interviewProcess',
] as const;
const mergedContent: Partial<OpportunityContent> = {};
for (const key of contentBlockKeys) {
if (content[key]) {
mergedContent[key] = content[key];
}
}
updateData.content = {
...existingOpportunity.content,
...mergedContent,
} as OpportunityContent;

// Update the opportunity
if (Object.keys(updateData).length > 0) {
await entityManager
.getRepository(OpportunityJob)
.update({ id: opportunityId }, updateData);
}

// Update keywords if present in parsed data
if (parsedOpportunity.keywords?.length) {
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we have handleOpportunityKeywordsUpdate, think it could work here?

await handleOpportunityKeywordsUpdate(
entityManager,
opportunityId,
parsedOpportunity.keywords,
);
}

// Return the opportunity ID
return opportunityId;
});
}
31 changes: 31 additions & 0 deletions src/common/schema/opportunities.ts
Original file line number Diff line number Diff line change
Expand Up @@ -242,6 +242,37 @@ export const parseOpportunitySchema = z
},
);

export const reimportOpportunitySchema = z
.object({
opportunityId: z.uuid(),
url: urlParseSchema.optional(),
file: fileUploadSchema.optional(),
})
.refine(
(data) => {
if (!data.url && !data.file) {
return false;
}

return true;
},
{
error: 'Either url or file must be provided.',
},
)
.refine(
(data) => {
if (data.url && data.file) {
return false;
}

return true;
},
{
error: 'Only one of url or file can be provided.',
},
);

export const createSharedSlackChannelSchema = z.object({
organizationId: z.string().uuid('Organization ID must be a valid UUID'),
email: z.string().email('Email must be a valid email address'),
Expand Down
Loading
Loading