From 4b71d807ec9e93c5986dd6280e7c58c2c864b4a0 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Wed, 29 Oct 2025 18:05:25 +0000 Subject: [PATCH 1/3] feat!: Support invoke with structured output --- .../server-ai-langchain/package.json | 4 +-- .../src/LangChainProvider.ts | 33 +++++++++++++++++++ 2 files changed, 35 insertions(+), 2 deletions(-) diff --git a/packages/ai-providers/server-ai-langchain/package.json b/packages/ai-providers/server-ai-langchain/package.json index 9c7c975e4..e772f0d98 100644 --- a/packages/ai-providers/server-ai-langchain/package.json +++ b/packages/ai-providers/server-ai-langchain/package.json @@ -28,7 +28,7 @@ "license": "Apache-2.0", "devDependencies": { "@langchain/core": "^0.3.0", - "@launchdarkly/server-sdk-ai": "^0.12.3", + "@launchdarkly/server-sdk-ai": "^0.13.0", "@trivago/prettier-plugin-sort-imports": "^4.1.1", "@types/jest": "^29.5.3", "@typescript-eslint/eslint-plugin": "^6.20.0", @@ -48,7 +48,7 @@ }, "peerDependencies": { "@langchain/core": "^0.2.0 || ^0.3.0", - "@launchdarkly/server-sdk-ai": "^0.12.2", + "@launchdarkly/server-sdk-ai": "^0.13.0", "langchain": "^0.2.0 || ^0.3.0" } } diff --git a/packages/ai-providers/server-ai-langchain/src/LangChainProvider.ts b/packages/ai-providers/server-ai-langchain/src/LangChainProvider.ts index 01df29538..030df5dd6 100644 --- a/packages/ai-providers/server-ai-langchain/src/LangChainProvider.ts +++ b/packages/ai-providers/server-ai-langchain/src/LangChainProvider.ts @@ -10,6 +10,7 @@ import type { LDLogger, LDMessage, LDTokenUsage, + StructuredResponse, } from '@launchdarkly/server-sdk-ai'; /** @@ -79,6 +80,38 @@ export class LangChainProvider extends AIProvider { }; } + /** + * Invoke the LangChain model with structured output support. + */ + async invokeStructuredModel( + messages: LDMessage[], + responseStructure: Record, + ): Promise { + // Convert LDMessage[] to LangChain messages + const langchainMessages = LangChainProvider.convertMessagesToLangChain(messages); + + // Get the LangChain response + const response = await this._llm + .withStructuredOutput(responseStructure) + .invoke(langchainMessages); + + // Using structured output doesn't support metrics + const metrics = { + success: true, + usage: { + total: 0, + input: 0, + output: 0, + }, + }; + + return { + data: response, + rawResponse: JSON.stringify(response), + metrics, + }; + } + /** * Get the underlying LangChain model instance. */ From 99ea7bdf935892be8a5b59c70220dacd7c4ce068 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Thu, 30 Oct 2025 17:44:05 +0000 Subject: [PATCH 2/3] catch exceptions in langchain provider --- .../__tests__/LangChainProvider.test.ts | 60 ++++++++ .../src/LangChainProvider.ts | 137 +++++++++++------- 2 files changed, 144 insertions(+), 53 deletions(-) diff --git a/packages/ai-providers/server-ai-langchain/__tests__/LangChainProvider.test.ts b/packages/ai-providers/server-ai-langchain/__tests__/LangChainProvider.test.ts index 7568eff45..b6a6a5f8e 100644 --- a/packages/ai-providers/server-ai-langchain/__tests__/LangChainProvider.test.ts +++ b/packages/ai-providers/server-ai-langchain/__tests__/LangChainProvider.test.ts @@ -155,6 +155,66 @@ describe('LangChainProvider', () => { expect(result.message.content).toBe(''); expect(mockLogger.warn).toHaveBeenCalledTimes(1); }); + + it('returns success=false when model invocation throws an error', async () => { + const error = new Error('Model invocation failed'); + mockLLM.invoke.mockRejectedValue(error); + + const messages = [{ role: 'user' as const, content: 'Hello' }]; + const result = await provider.invokeModel(messages); + + expect(result.metrics.success).toBe(false); + expect(result.message.content).toBe(''); + expect(result.message.role).toBe('assistant'); + expect(mockLogger.error).toHaveBeenCalledWith('LangChain model invocation failed:', error); + }); + }); + + describe('invokeStructuredModel', () => { + let mockLLM: any; + let provider: LangChainProvider; + + beforeEach(() => { + mockLLM = { + withStructuredOutput: jest.fn(), + }; + provider = new LangChainProvider(mockLLM, mockLogger); + jest.clearAllMocks(); + }); + + it('returns success=true for successful invocation', async () => { + const mockResponse = { result: 'structured data' }; + const mockInvoke = jest.fn().mockResolvedValue(mockResponse); + mockLLM.withStructuredOutput.mockReturnValue({ invoke: mockInvoke }); + + const messages = [{ role: 'user' as const, content: 'Hello' }]; + const responseStructure = { type: 'object', properties: {} }; + const result = await provider.invokeStructuredModel(messages, responseStructure); + + expect(result.metrics.success).toBe(true); + expect(result.data).toEqual(mockResponse); + expect(result.rawResponse).toBe(JSON.stringify(mockResponse)); + expect(mockLogger.error).not.toHaveBeenCalled(); + }); + + it('returns success=false when structured model invocation throws an error', async () => { + const error = new Error('Structured invocation failed'); + const mockInvoke = jest.fn().mockRejectedValue(error); + mockLLM.withStructuredOutput.mockReturnValue({ invoke: mockInvoke }); + + const messages = [{ role: 'user' as const, content: 'Hello' }]; + const responseStructure = { type: 'object', properties: {} }; + const result = await provider.invokeStructuredModel(messages, responseStructure); + + expect(result.metrics.success).toBe(false); + expect(result.data).toEqual({}); + expect(result.rawResponse).toBe(''); + expect(result.metrics.usage).toEqual({ total: 0, input: 0, output: 0 }); + expect(mockLogger.error).toHaveBeenCalledWith( + 'LangChain structured model invocation failed:', + error, + ); + }); }); describe('mapProvider', () => { diff --git a/packages/ai-providers/server-ai-langchain/src/LangChainProvider.ts b/packages/ai-providers/server-ai-langchain/src/LangChainProvider.ts index 030df5dd6..20f7a929e 100644 --- a/packages/ai-providers/server-ai-langchain/src/LangChainProvider.ts +++ b/packages/ai-providers/server-ai-langchain/src/LangChainProvider.ts @@ -45,39 +45,53 @@ export class LangChainProvider extends AIProvider { * Invoke the LangChain model with an array of messages. */ async invokeModel(messages: LDMessage[]): Promise { - // Convert LDMessage[] to LangChain messages - const langchainMessages = LangChainProvider.convertMessagesToLangChain(messages); - - // Get the LangChain response - const response: AIMessage = await this._llm.invoke(langchainMessages); - - // Generate metrics early (assumes success by default) - const metrics = LangChainProvider.createAIMetrics(response); - - // Extract text content from the response - let content: string = ''; - if (typeof response.content === 'string') { - content = response.content; - } else { - // Log warning for non-string content (likely multimodal) - this.logger?.warn( - `Multimodal response not supported, expecting a string. Content type: ${typeof response.content}, Content:`, - JSON.stringify(response.content, null, 2), - ); - // Update metrics to reflect content loss - metrics.success = false; - } + try { + // Convert LDMessage[] to LangChain messages + const langchainMessages = LangChainProvider.convertMessagesToLangChain(messages); + + // Get the LangChain response + const response: AIMessage = await this._llm.invoke(langchainMessages); + + // Generate metrics early (assumes success by default) + const metrics = LangChainProvider.createAIMetrics(response); + + // Extract text content from the response + let content: string = ''; + if (typeof response.content === 'string') { + content = response.content; + } else { + // Log warning for non-string content (likely multimodal) + this.logger?.warn( + `Multimodal response not supported, expecting a string. Content type: ${typeof response.content}, Content:`, + JSON.stringify(response.content, null, 2), + ); + // Update metrics to reflect content loss + metrics.success = false; + } - // Create the assistant message - const assistantMessage: LDMessage = { - role: 'assistant', - content, - }; + // Create the assistant message + const assistantMessage: LDMessage = { + role: 'assistant', + content, + }; - return { - message: assistantMessage, - metrics, - }; + return { + message: assistantMessage, + metrics, + }; + } catch (error) { + this.logger?.warn('LangChain model invocation failed:', error); + + return { + message: { + role: 'assistant', + content: '', + }, + metrics: { + success: false, + }, + }; + } } /** @@ -87,29 +101,46 @@ export class LangChainProvider extends AIProvider { messages: LDMessage[], responseStructure: Record, ): Promise { - // Convert LDMessage[] to LangChain messages - const langchainMessages = LangChainProvider.convertMessagesToLangChain(messages); - - // Get the LangChain response - const response = await this._llm - .withStructuredOutput(responseStructure) - .invoke(langchainMessages); - - // Using structured output doesn't support metrics - const metrics = { - success: true, - usage: { - total: 0, - input: 0, - output: 0, - }, - }; + try { + // Convert LDMessage[] to LangChain messages + const langchainMessages = LangChainProvider.convertMessagesToLangChain(messages); + + // Get the LangChain response + const response = await this._llm + .withStructuredOutput(responseStructure) + .invoke(langchainMessages); + + // Using structured output doesn't support metrics + const metrics = { + success: true, + usage: { + total: 0, + input: 0, + output: 0, + }, + }; - return { - data: response, - rawResponse: JSON.stringify(response), - metrics, - }; + return { + data: response, + rawResponse: JSON.stringify(response), + metrics, + }; + } catch (error) { + this.logger?.warn('LangChain structured model invocation failed:', error); + + return { + data: {}, + rawResponse: '', + metrics: { + success: false, + usage: { + total: 0, + input: 0, + output: 0, + }, + }, + }; + } } /** From 4c38dcc677c48d8fcdf18942c1f0ebc5ec1c21ae Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Fri, 31 Oct 2025 15:39:19 +0000 Subject: [PATCH 3/3] fix version in langchain ai package --- packages/ai-providers/server-ai-langchain/package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/ai-providers/server-ai-langchain/package.json b/packages/ai-providers/server-ai-langchain/package.json index e772f0d98..5cb5425c8 100644 --- a/packages/ai-providers/server-ai-langchain/package.json +++ b/packages/ai-providers/server-ai-langchain/package.json @@ -28,7 +28,7 @@ "license": "Apache-2.0", "devDependencies": { "@langchain/core": "^0.3.0", - "@launchdarkly/server-sdk-ai": "^0.13.0", + "@launchdarkly/server-sdk-ai": "^0.12.0", "@trivago/prettier-plugin-sort-imports": "^4.1.1", "@types/jest": "^29.5.3", "@typescript-eslint/eslint-plugin": "^6.20.0", @@ -48,7 +48,7 @@ }, "peerDependencies": { "@langchain/core": "^0.2.0 || ^0.3.0", - "@launchdarkly/server-sdk-ai": "^0.13.0", + "@launchdarkly/server-sdk-ai": "^0.12.0", "langchain": "^0.2.0 || ^0.3.0" } }