diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/anthropic/init.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/anthropic/init.js new file mode 100644 index 000000000000..d90a3acf6157 --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/anthropic/init.js @@ -0,0 +1,9 @@ +import * as Sentry from '@sentry/browser'; + +window.Sentry = Sentry; + +Sentry.init({ + dsn: 'https://public@dsn.ingest.sentry.io/1337', + tracesSampleRate: 1, + debug: true, +}); diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/anthropic/mocks.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/anthropic/mocks.js new file mode 100644 index 000000000000..1f56143e251f --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/anthropic/mocks.js @@ -0,0 +1,61 @@ +// Mock Anthropic client for browser testing +export class MockAnthropic { + constructor(config) { + // eslint-disable-next-line no-console + console.log('[Mock Anthropic] Constructor called with config:', config); + this.apiKey = config.apiKey; + + // Main focus: messages.create functionality + this.messages = { + create: async (...args) => { + // eslint-disable-next-line no-console + console.log('[Mock Anthropic] messages.create called with args:', args); + const params = args[0]; + // Simulate processing time + await new Promise(resolve => setTimeout(resolve, 10)); + + if (params.model === 'error-model') { + const error = new Error('Model not found'); + error.status = 404; + error.headers = { 'x-request-id': 'mock-request-123' }; + throw error; + } + + const response = { + id: 'msg_mock123', + type: 'message', + role: 'assistant', + model: params.model, + content: [ + { + type: 'text', + text: 'Hello from Anthropic mock!', + }, + ], + stop_reason: 'end_turn', + stop_sequence: null, + usage: { + input_tokens: 10, + output_tokens: 15, + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + }, + }; + // eslint-disable-next-line no-console + console.log('[Mock Anthropic] Returning response:', response); + return response; + }, + countTokens: async (..._args) => ({ id: 'mock', type: 'model', model: 'mock', input_tokens: 0 }), + }; + + // Minimal implementations for required interface compliance + this.models = { + list: async (..._args) => ({ id: 'mock', type: 'model', model: 'mock' }), + get: async (..._args) => ({ id: 'mock', type: 'model', model: 'mock' }), + }; + + this.completions = { + create: async (..._args) => ({ id: 'mock', type: 'completion', model: 'mock' }), + }; + } +} diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/anthropic/subject.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/anthropic/subject.js new file mode 100644 index 000000000000..ab0b8c2265b9 --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/anthropic/subject.js @@ -0,0 +1,43 @@ +import * as Sentry from '@sentry/browser'; +import { MockAnthropic } from './mocks.js'; + +// eslint-disable-next-line no-console +console.log('[Anthropic Test] Starting test...'); + +// eslint-disable-next-line no-console +console.log('[Anthropic Test] Creating mock client...'); +const mockClient = new MockAnthropic({ + apiKey: 'mock-api-key', +}); + +// eslint-disable-next-line no-console +console.log('[Anthropic Test] Mock client created:', mockClient); + +// eslint-disable-next-line no-console +console.log('[Anthropic Test] Instrumenting client with Sentry...'); +const client = Sentry.instrumentAnthropicAiClient(mockClient); + +// eslint-disable-next-line no-console +console.log('[Anthropic Test] Client instrumented:', client); + +// Test that manual instrumentation doesn't crash the browser +// The instrumentation automatically creates spans +// eslint-disable-next-line no-console +console.log('[Anthropic Test] Calling messages.create...'); +const response = await client.messages.create({ + model: 'claude-3-haiku-20240307', + messages: [{ role: 'user', content: 'What is the capital of France?' }], + temperature: 0.7, + max_tokens: 100, +}); + +// eslint-disable-next-line no-console +console.log('[Anthropic Test] Response received:', JSON.stringify(response)); + +// eslint-disable-next-line no-console +console.log('[Anthropic Test] Flushing Sentry...'); +// Ensure transaction is flushed in CI +await Sentry.flush(2000); + +// eslint-disable-next-line no-console +console.log('[Anthropic Test] Test completed!'); diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/anthropic/test.ts b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/anthropic/test.ts new file mode 100644 index 000000000000..4705f435dfdd --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/anthropic/test.ts @@ -0,0 +1,58 @@ +import { expect } from '@playwright/test'; +import { sentryTest } from '../../../../utils/fixtures'; +import { envelopeRequestParser, waitForTransactionRequest } from '../../../../utils/helpers'; + +// These tests are not exhaustive because the instrumentation is +// already tested in the node integration tests and we merely +// want to test that the instrumentation does not crash in the browser +// and that gen_ai transactions are sent. + +sentryTest('manual Anthropic instrumentation sends gen_ai transactions', async ({ getLocalTestUrl, page }) => { + // Listen for console logs + page.on('console', msg => { + // eslint-disable-next-line no-console + console.log(`[Browser Console ${msg.type()}]`, msg.text()); + }); + + // Listen for page errors + page.on('pageerror', error => { + // eslint-disable-next-line no-console + console.error('[Browser Error]', error); + }); + + const transactionPromise = waitForTransactionRequest(page, event => { + // eslint-disable-next-line no-console + console.log('[Test] Received transaction event:', JSON.stringify(event, null, 2)); + return !!event.transaction?.includes('claude-3-haiku-20240307'); + }); + + const url = await getLocalTestUrl({ testDir: __dirname }); + // eslint-disable-next-line no-console + console.log('[Test] Navigating to URL:', url); + await page.goto(url); + + // eslint-disable-next-line no-console + console.log('[Test] Waiting for transaction...'); + const req = await transactionPromise; + // eslint-disable-next-line no-console + console.log('[Test] Transaction received!'); + + const eventData = envelopeRequestParser(req); + // eslint-disable-next-line no-console + console.log('[Test] Parsed event data:', JSON.stringify(eventData, null, 2)); + + // Verify it's a gen_ai transaction + expect(eventData.transaction).toBe('messages claude-3-haiku-20240307'); + expect(eventData.contexts?.trace?.op).toBe('gen_ai.messages'); + expect(eventData.contexts?.trace?.origin).toBe('auto.ai.anthropic'); + expect(eventData.contexts?.trace?.data).toMatchObject({ + 'gen_ai.operation.name': 'messages', + 'gen_ai.system': 'anthropic', + 'gen_ai.request.model': 'claude-3-haiku-20240307', + 'gen_ai.request.temperature': 0.7, + 'gen_ai.response.model': 'claude-3-haiku-20240307', + 'gen_ai.response.id': 'msg_mock123', + 'gen_ai.usage.input_tokens': 10, + 'gen_ai.usage.output_tokens': 15, + }); +}); diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/google-genai/init.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/google-genai/init.js new file mode 100644 index 000000000000..d90a3acf6157 --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/google-genai/init.js @@ -0,0 +1,9 @@ +import * as Sentry from '@sentry/browser'; + +window.Sentry = Sentry; + +Sentry.init({ + dsn: 'https://public@dsn.ingest.sentry.io/1337', + tracesSampleRate: 1, + debug: true, +}); diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/google-genai/mocks.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/google-genai/mocks.js new file mode 100644 index 000000000000..d9374b64c990 --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/google-genai/mocks.js @@ -0,0 +1,128 @@ +// Mock Google GenAI client for browser testing +export class MockGoogleGenAI { + constructor(config) { + // eslint-disable-next-line no-console + console.log('[Mock Google GenAI] Constructor called with config:', config); + this.apiKey = config.apiKey; + + // models.generateContent functionality + this.models = { + generateContent: async (...args) => { + // eslint-disable-next-line no-console + console.log('[Mock Google GenAI] models.generateContent called with args:', args); + const params = args[0]; + // Simulate processing time + await new Promise(resolve => setTimeout(resolve, 10)); + + if (params.model === 'error-model') { + const error = new Error('Model not found'); + error.status = 404; + error.headers = { 'x-request-id': 'mock-request-123' }; + throw error; + } + + return { + candidates: [ + { + content: { + parts: [ + { + text: 'Hello from Google GenAI mock!', + }, + ], + role: 'model', + }, + finishReason: 'stop', + index: 0, + }, + ], + usageMetadata: { + promptTokenCount: 8, + candidatesTokenCount: 12, + totalTokenCount: 20, + }, + }; + }, + generateContentStream: async () => { + // Return a promise that resolves to an async generator + return (async function* () { + yield { + candidates: [ + { + content: { + parts: [{ text: 'Streaming response' }], + role: 'model', + }, + finishReason: 'stop', + index: 0, + }, + ], + }; + })(); + }, + }; + + // chats.create implementation + this.chats = { + create: (...args) => { + // eslint-disable-next-line no-console + console.log('[Mock Google GenAI] chats.create called with args:', args); + const params = args[0]; + const model = params.model; + + return { + modelVersion: model, + sendMessage: async (..._messageArgs) => { + // eslint-disable-next-line no-console + console.log('[Mock Google GenAI] chat.sendMessage called with args:', _messageArgs); + // Simulate processing time + await new Promise(resolve => setTimeout(resolve, 10)); + + const response = { + candidates: [ + { + content: { + parts: [ + { + text: 'This is a joke from the chat!', + }, + ], + role: 'model', + }, + finishReason: 'stop', + index: 0, + }, + ], + usageMetadata: { + promptTokenCount: 8, + candidatesTokenCount: 12, + totalTokenCount: 20, + }, + modelVersion: model, // Include model version in response + }; + // eslint-disable-next-line no-console + console.log('[Mock Google GenAI] Returning response:', response); + return response; + }, + sendMessageStream: async () => { + // Return a promise that resolves to an async generator + return (async function* () { + yield { + candidates: [ + { + content: { + parts: [{ text: 'Streaming chat response' }], + role: 'model', + }, + finishReason: 'stop', + index: 0, + }, + ], + }; + })(); + }, + }; + }, + }; + } +} diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/google-genai/subject.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/google-genai/subject.js new file mode 100644 index 000000000000..65123b1eb18d --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/google-genai/subject.js @@ -0,0 +1,58 @@ +import * as Sentry from '@sentry/browser'; +import { MockGoogleGenAI } from './mocks.js'; + +// eslint-disable-next-line no-console +console.log('[Google GenAI Test] Starting test...'); + +// eslint-disable-next-line no-console +console.log('[Google GenAI Test] Creating mock client...'); +const mockClient = new MockGoogleGenAI({ + apiKey: 'mock-api-key', +}); + +// eslint-disable-next-line no-console +console.log('[Google GenAI Test] Mock client created:', mockClient); + +// eslint-disable-next-line no-console +console.log('[Google GenAI Test] Instrumenting client with Sentry...'); +const client = Sentry.instrumentGoogleGenAIClient(mockClient); + +// eslint-disable-next-line no-console +console.log('[Google GenAI Test] Client instrumented:', client); + +// Test that manual instrumentation doesn't crash the browser +// The instrumentation automatically creates spans +// Test both chats and models APIs +// eslint-disable-next-line no-console +console.log('[Google GenAI Test] Creating chat...'); +const chat = client.chats.create({ + model: 'gemini-1.5-pro', + config: { + temperature: 0.8, + topP: 0.9, + maxOutputTokens: 150, + }, + history: [ + { + role: 'user', + parts: [{ text: 'Hello, how are you?' }], + }, + ], +}); + +// eslint-disable-next-line no-console +console.log('[Google GenAI Test] Sending message...'); +const response = await chat.sendMessage({ + message: 'Tell me a joke', +}); + +// eslint-disable-next-line no-console +console.log('[Google GenAI Test] Response received:', JSON.stringify(response)); + +// eslint-disable-next-line no-console +console.log('[Google GenAI Test] Flushing Sentry...'); +// Ensure transaction is flushed in CI +await Sentry.flush(2000); + +// eslint-disable-next-line no-console +console.log('[Google GenAI Test] Test completed!'); diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/google-genai/test.ts b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/google-genai/test.ts new file mode 100644 index 000000000000..316b3fe509a0 --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/google-genai/test.ts @@ -0,0 +1,53 @@ +import { expect } from '@playwright/test'; +import { sentryTest } from '../../../../utils/fixtures'; +import { envelopeRequestParser, waitForTransactionRequest } from '../../../../utils/helpers'; + +// These tests are not exhaustive because the instrumentation is +// already tested in the node integration tests and we merely +// want to test that the instrumentation does not crash in the browser +// and that gen_ai transactions are sent. + +sentryTest('manual Google GenAI instrumentation sends gen_ai transactions', async ({ getLocalTestUrl, page }) => { + // Listen for console logs + page.on('console', msg => { + // eslint-disable-next-line no-console + console.log(`[Browser Console ${msg.type()}]`, msg.text()); + }); + + // Listen for page errors + page.on('pageerror', error => { + // eslint-disable-next-line no-console + console.error('[Browser Error]', error); + }); + + const transactionPromise = waitForTransactionRequest(page, event => { + // eslint-disable-next-line no-console + console.log('[Test] Received transaction event:', JSON.stringify(event, null, 2)); + return !!event.transaction?.includes('gemini-1.5-pro'); + }); + + const url = await getLocalTestUrl({ testDir: __dirname }); + // eslint-disable-next-line no-console + console.log('[Test] Navigating to URL:', url); + await page.goto(url); + + // eslint-disable-next-line no-console + console.log('[Test] Waiting for transaction...'); + const req = await transactionPromise; + // eslint-disable-next-line no-console + console.log('[Test] Transaction received!'); + + const eventData = envelopeRequestParser(req); + // eslint-disable-next-line no-console + console.log('[Test] Parsed event data:', JSON.stringify(eventData, null, 2)); + + // Verify it's a gen_ai transaction + expect(eventData.transaction).toBe('chat gemini-1.5-pro create'); + expect(eventData.contexts?.trace?.op).toBe('gen_ai.chat'); + expect(eventData.contexts?.trace?.origin).toBe('auto.ai.google_genai'); + expect(eventData.contexts?.trace?.data).toMatchObject({ + 'gen_ai.operation.name': 'chat', + 'gen_ai.system': 'google_genai', + 'gen_ai.request.model': 'gemini-1.5-pro', + }); +}); diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/openai/init.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/openai/init.js new file mode 100644 index 000000000000..d90a3acf6157 --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/openai/init.js @@ -0,0 +1,9 @@ +import * as Sentry from '@sentry/browser'; + +window.Sentry = Sentry; + +Sentry.init({ + dsn: 'https://public@dsn.ingest.sentry.io/1337', + tracesSampleRate: 1, + debug: true, +}); diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/openai/mocks.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/openai/mocks.js new file mode 100644 index 000000000000..fe7912a6ff72 --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/openai/mocks.js @@ -0,0 +1,53 @@ +// Mock OpenAI client for browser testing +export class MockOpenAi { + constructor(config) { + // eslint-disable-next-line no-console + console.log('[Mock OpenAI] Constructor called with config:', config); + this.apiKey = config.apiKey; + + this.chat = { + completions: { + create: async (...args) => { + // eslint-disable-next-line no-console + console.log('[Mock OpenAI] chat.completions.create called with args:', args); + const params = args[0]; + // Simulate processing time + await new Promise(resolve => setTimeout(resolve, 10)); + + if (params.model === 'error-model') { + const error = new Error('Model not found'); + error.status = 404; + error.headers = { 'x-request-id': 'mock-request-123' }; + throw error; + } + + const response = { + id: 'chatcmpl-mock123', + object: 'chat.completion', + created: 1677652288, + model: params.model, + system_fingerprint: 'fp_44709d6fcb', + choices: [ + { + index: 0, + message: { + role: 'assistant', + content: 'Hello from OpenAI mock!', + }, + finish_reason: 'stop', + }, + ], + usage: { + prompt_tokens: 10, + completion_tokens: 15, + total_tokens: 25, + }, + }; + // eslint-disable-next-line no-console + console.log('[Mock OpenAI] Returning response:', response); + return response; + }, + }, + }; + } +} diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/openai/subject.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/openai/subject.js new file mode 100644 index 000000000000..ce4580a0d274 --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/openai/subject.js @@ -0,0 +1,46 @@ +import * as Sentry from '@sentry/browser'; +import { MockOpenAi } from './mocks.js'; + +// eslint-disable-next-line no-console +console.log('[OpenAI Test] Starting test...'); + +// eslint-disable-next-line no-console +console.log('[OpenAI Test] Creating mock client...'); +const mockClient = new MockOpenAi({ + apiKey: 'mock-api-key', +}); + +// eslint-disable-next-line no-console +console.log('[OpenAI Test] Mock client created:', mockClient); + +// eslint-disable-next-line no-console +console.log('[OpenAI Test] Instrumenting client with Sentry...'); +const client = Sentry.instrumentOpenAiClient(mockClient); + +// eslint-disable-next-line no-console +console.log('[OpenAI Test] Client instrumented:', client); + +// Test that manual instrumentation doesn't crash the browser +// The instrumentation automatically creates spans +// eslint-disable-next-line no-console +console.log('[OpenAI Test] Calling chat.completions.create...'); +const response = await client.chat.completions.create({ + model: 'gpt-3.5-turbo', + messages: [ + { role: 'system', content: 'You are a helpful assistant.' }, + { role: 'user', content: 'What is the capital of France?' }, + ], + temperature: 0.7, + max_tokens: 100, +}); + +// eslint-disable-next-line no-console +console.log('[OpenAI Test] Response received:', JSON.stringify(response)); + +// eslint-disable-next-line no-console +console.log('[OpenAI Test] Flushing Sentry...'); +// Ensure transaction is flushed in CI +await Sentry.flush(2000); + +// eslint-disable-next-line no-console +console.log('[OpenAI Test] Test completed!'); diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/openai/test.ts b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/openai/test.ts new file mode 100644 index 000000000000..bd92baa661f3 --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/openai/test.ts @@ -0,0 +1,59 @@ +import { expect } from '@playwright/test'; +import { sentryTest } from '../../../../utils/fixtures'; +import { envelopeRequestParser, waitForTransactionRequest } from '../../../../utils/helpers'; + +// These tests are not exhaustive because the instrumentation is +// already tested in the node integration tests and we merely +// want to test that the instrumentation does not crash in the browser +// and that gen_ai transactions are sent. + +sentryTest('manual OpenAI instrumentation sends gen_ai transactions', async ({ getLocalTestUrl, page }) => { + // Listen for console logs + page.on('console', msg => { + // eslint-disable-next-line no-console + console.log(`[Browser Console ${msg.type()}]`, msg.text()); + }); + + // Listen for page errors + page.on('pageerror', error => { + // eslint-disable-next-line no-console + console.error('[Browser Error]', error); + }); + + const transactionPromise = waitForTransactionRequest(page, event => { + // eslint-disable-next-line no-console + console.log('[Test] Received transaction event:', JSON.stringify(event, null, 2)); + return !!event.transaction?.includes('gpt-3.5-turbo'); + }); + + const url = await getLocalTestUrl({ testDir: __dirname }); + // eslint-disable-next-line no-console + console.log('[Test] Navigating to URL:', url); + await page.goto(url); + + // eslint-disable-next-line no-console + console.log('[Test] Waiting for transaction...'); + const req = await transactionPromise; + // eslint-disable-next-line no-console + console.log('[Test] Transaction received!'); + + const eventData = envelopeRequestParser(req); + // eslint-disable-next-line no-console + console.log('[Test] Parsed event data:', JSON.stringify(eventData, null, 2)); + + // Verify it's a gen_ai transaction + expect(eventData.transaction).toBe('chat gpt-3.5-turbo'); + expect(eventData.contexts?.trace?.op).toBe('gen_ai.chat'); + expect(eventData.contexts?.trace?.origin).toBe('auto.ai.openai'); + expect(eventData.contexts?.trace?.data).toMatchObject({ + 'gen_ai.operation.name': 'chat', + 'gen_ai.system': 'openai', + 'gen_ai.request.model': 'gpt-3.5-turbo', + 'gen_ai.request.temperature': 0.7, + 'gen_ai.response.model': 'gpt-3.5-turbo', + 'gen_ai.response.id': 'chatcmpl-mock123', + 'gen_ai.usage.input_tokens': 10, + 'gen_ai.usage.output_tokens': 15, + 'gen_ai.usage.total_tokens': 25, + }); +}); diff --git a/packages/browser/src/index.ts b/packages/browser/src/index.ts index 5e9924fe6da5..ae13e984c85f 100644 --- a/packages/browser/src/index.ts +++ b/packages/browser/src/index.ts @@ -63,6 +63,9 @@ export { zodErrorsIntegration, thirdPartyErrorFilterIntegration, featureFlagsIntegration, + instrumentAnthropicAiClient, + instrumentOpenAiClient, + instrumentGoogleGenAIClient, logger, } from '@sentry/core'; export type { Span, FeatureFlagsIntegration } from '@sentry/core';