Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
177 changes: 140 additions & 37 deletions dev-packages/node-integration-tests/suites/tracing/anthropic/test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ describe('Anthropic integration', () => {
spans: expect.arrayContaining([
// First span - basic message completion without PII
expect.objectContaining({
data: {
data: expect.objectContaining({
'gen_ai.operation.name': 'messages',
'sentry.op': 'gen_ai.messages',
'sentry.origin': 'auto.ai.anthropic',
Expand All @@ -24,43 +24,43 @@ describe('Anthropic integration', () => {
'gen_ai.usage.input_tokens': 10,
'gen_ai.usage.output_tokens': 15,
'gen_ai.usage.total_tokens': 25,
},
}),
description: 'messages claude-3-haiku-20240307',
op: 'gen_ai.messages',
origin: 'auto.ai.anthropic',
status: 'ok',
}),
// Second span - error handling
expect.objectContaining({
data: {
data: expect.objectContaining({
'gen_ai.operation.name': 'messages',
'sentry.op': 'gen_ai.messages',
'sentry.origin': 'auto.ai.anthropic',
'gen_ai.system': 'anthropic',
'gen_ai.request.model': 'error-model',
},
}),
description: 'messages error-model',
op: 'gen_ai.messages',
origin: 'auto.ai.anthropic',
status: 'internal_error',
}),
// Third span - token counting (no response.text because recordOutputs=false by default)
expect.objectContaining({
data: {
data: expect.objectContaining({
'gen_ai.operation.name': 'messages',
'sentry.op': 'gen_ai.messages',
'sentry.origin': 'auto.ai.anthropic',
'gen_ai.system': 'anthropic',
'gen_ai.request.model': 'claude-3-haiku-20240307',
},
}),
description: 'messages claude-3-haiku-20240307',
op: 'gen_ai.messages',
origin: 'auto.ai.anthropic',
status: 'ok',
}),
// Fourth span - models.retrieve
expect.objectContaining({
data: {
data: expect.objectContaining({
'anthropic.response.timestamp': '2024-05-08T05:20:00.000Z',
'gen_ai.operation.name': 'models',
'sentry.op': 'gen_ai.models',
Expand All @@ -69,7 +69,7 @@ describe('Anthropic integration', () => {
'gen_ai.request.model': 'claude-3-haiku-20240307',
'gen_ai.response.id': 'claude-3-haiku-20240307',
'gen_ai.response.model': 'claude-3-haiku-20240307',
},
}),
description: 'models claude-3-haiku-20240307',
op: 'gen_ai.models',
origin: 'auto.ai.anthropic',
Expand All @@ -83,88 +83,191 @@ describe('Anthropic integration', () => {
spans: expect.arrayContaining([
// First span - basic message completion with PII
expect.objectContaining({
data: {
data: expect.objectContaining({
'gen_ai.operation.name': 'messages',
'sentry.op': 'gen_ai.messages',
'sentry.origin': 'auto.ai.anthropic',
'gen_ai.system': 'anthropic',
'gen_ai.request.max_tokens': 100,
'gen_ai.request.messages':
'[{"role":"system","content":"You are a helpful assistant."},{"role":"user","content":"What is the capital of France?"}]',
'gen_ai.request.model': 'claude-3-haiku-20240307',
'gen_ai.request.temperature': 0.7,
'gen_ai.request.max_tokens': 100,
'gen_ai.request.messages': '[{"role":"user","content":"What is the capital of France?"}]',
'gen_ai.response.model': 'claude-3-haiku-20240307',
'gen_ai.response.id': 'msg_mock123',
'gen_ai.response.model': 'claude-3-haiku-20240307',
'gen_ai.response.text': 'Hello from Anthropic mock!',
'gen_ai.system': 'anthropic',
'gen_ai.usage.input_tokens': 10,
'gen_ai.usage.output_tokens': 15,
'gen_ai.usage.total_tokens': 25,
},
'sentry.op': 'gen_ai.messages',
'sentry.origin': 'auto.ai.anthropic',
}),
description: 'messages claude-3-haiku-20240307',
op: 'gen_ai.messages',
origin: 'auto.ai.anthropic',
status: 'ok',
}),
// Second span - error handling with PII
expect.objectContaining({
data: {
data: expect.objectContaining({
'http.request.method': 'POST',
'http.request.method_original': 'POST',
'http.response.header.content-length': 247,
'http.response.status_code': 200,
'otel.kind': 'CLIENT',
'sentry.op': 'http.client',
'sentry.origin': 'auto.http.otel.node_fetch',
'url.path': '/anthropic/v1/messages',
'url.query': '',
'url.scheme': 'http',
}),
op: 'http.client',
origin: 'auto.http.otel.node_fetch',
status: 'ok',
}),

// Second - error handling with PII
expect.objectContaining({
data: expect.objectContaining({
'gen_ai.operation.name': 'messages',
'gen_ai.request.messages': '[{"role":"user","content":"This will fail"}]',
'gen_ai.request.model': 'error-model',
'gen_ai.system': 'anthropic',
'sentry.op': 'gen_ai.messages',
'sentry.origin': 'auto.ai.anthropic',
'gen_ai.system': 'anthropic',
'gen_ai.request.model': 'error-model',
'gen_ai.request.messages': '[{"role":"user","content":"This will fail"}]',
},
}),
description: 'messages error-model',
op: 'gen_ai.messages',
origin: 'auto.ai.anthropic',
status: 'internal_error',
}),
// Third span - token counting with PII (response.text is present because sendDefaultPii=true enables recordOutputs)
expect.objectContaining({
data: {
data: expect.objectContaining({
'http.request.method': 'POST',
'http.request.method_original': 'POST',
'http.response.header.content-length': 15,
'http.response.status_code': 404,
'otel.kind': 'CLIENT',
'sentry.op': 'http.client',
'sentry.origin': 'auto.http.otel.node_fetch',
'url.path': '/anthropic/v1/messages',
'url.query': '',
'url.scheme': 'http',
}),
op: 'http.client',
origin: 'auto.http.otel.node_fetch',
status: 'not_found',
}),

// Third - token counting with PII (response.text is present because sendDefaultPii=true enables recordOutputs)
expect.objectContaining({
data: expect.objectContaining({
'gen_ai.operation.name': 'messages',
'gen_ai.request.messages': '[{"role":"user","content":"What is the capital of France?"}]',
'gen_ai.request.model': 'claude-3-haiku-20240307',
'gen_ai.response.text': '15',
'gen_ai.system': 'anthropic',
'sentry.op': 'gen_ai.messages',
'sentry.origin': 'auto.ai.anthropic',
'gen_ai.system': 'anthropic',
'gen_ai.request.model': 'claude-3-haiku-20240307',
'gen_ai.request.messages': '[{"role":"user","content":"What is the capital of France?"}]',
'gen_ai.response.text': '15', // Only present because recordOutputs=true when sendDefaultPii=true
},
}),
description: 'messages claude-3-haiku-20240307',
op: 'gen_ai.messages',
origin: 'auto.ai.anthropic',
status: 'ok',
}),
// Fourth span - models.retrieve with PII
expect.objectContaining({
data: {
data: expect.objectContaining({
'http.request.method': 'POST',
'http.request.method_original': 'POST',
'http.response.header.content-length': 19,
'http.response.status_code': 200,
'otel.kind': 'CLIENT',
'sentry.op': 'http.client',
'sentry.origin': 'auto.http.otel.node_fetch',
'url.path': '/anthropic/v1/messages/count_tokens',
'url.query': '',
'url.scheme': 'http',
}),
op: 'http.client',
origin: 'auto.http.otel.node_fetch',
status: 'ok',
}),

// Fourth - models.retrieve with PII
expect.objectContaining({
data: expect.objectContaining({
'anthropic.response.timestamp': '2024-05-08T05:20:00.000Z',
'gen_ai.operation.name': 'models',
'sentry.op': 'gen_ai.models',
'sentry.origin': 'auto.ai.anthropic',
'gen_ai.system': 'anthropic',
'gen_ai.request.model': 'claude-3-haiku-20240307',
'gen_ai.response.id': 'claude-3-haiku-20240307',
'gen_ai.response.model': 'claude-3-haiku-20240307',
},
'gen_ai.system': 'anthropic',
'sentry.op': 'gen_ai.models',
'sentry.origin': 'auto.ai.anthropic',
}),
description: 'models claude-3-haiku-20240307',
op: 'gen_ai.models',
origin: 'auto.ai.anthropic',
status: 'ok',
}),
// Fifth span - messages.create with stream: true
expect.objectContaining({
data: expect.objectContaining({
'http.request.method': 'GET',
'http.request.method_original': 'GET',
'http.response.header.content-length': 123,
'http.response.status_code': 200,
'otel.kind': 'CLIENT',
'sentry.op': 'http.client',
'sentry.origin': 'auto.http.otel.node_fetch',
'url.path': '/anthropic/v1/models/claude-3-haiku-20240307',
'url.query': '',
'url.scheme': 'http',
'user_agent.original': 'Anthropic/JS 0.63.0',
}),
op: 'http.client',
origin: 'auto.http.otel.node_fetch',
status: 'ok',
}),

// Fifth - messages.create with stream: true
expect.objectContaining({
data: expect.objectContaining({
'gen_ai.operation.name': 'messages',
'gen_ai.request.messages': '[{"role":"user","content":"What is the capital of France?"}]',
'gen_ai.request.model': 'claude-3-haiku-20240307',
'gen_ai.request.stream': true,
'gen_ai.response.id': 'msg_stream123',
'gen_ai.response.model': 'claude-3-haiku-20240307',
'gen_ai.response.streaming': true,
'gen_ai.response.text': 'Hello from stream!',
'gen_ai.system': 'anthropic',
'gen_ai.usage.input_tokens': 10,
'gen_ai.usage.output_tokens': 15,
'gen_ai.usage.total_tokens': 25,
'sentry.op': 'gen_ai.messages',
'sentry.origin': 'auto.ai.anthropic',
}),
description: 'messages claude-3-haiku-20240307 stream-response',
op: 'gen_ai.messages',
origin: 'auto.ai.anthropic',
status: 'ok',
}),
// Sixth span - messages.stream
expect.objectContaining({
data: expect.objectContaining({
'http.request.method': 'POST',
'http.request.method_original': 'POST',
'http.response.status_code': 200,
'otel.kind': 'CLIENT',
'sentry.op': 'http.client',
'sentry.origin': 'auto.http.otel.node_fetch',
'url.path': '/anthropic/v1/messages',
'url.query': '',
'url.scheme': 'http',
'user_agent.original': 'Anthropic/JS 0.63.0',
}),
op: 'http.client',
origin: 'auto.http.otel.node_fetch',
status: 'ok',
}),

// Sixth - messages.stream
expect.objectContaining({
data: expect.objectContaining({
'gen_ai.operation.name': 'messages',
Expand Down
8 changes: 5 additions & 3 deletions packages/core/src/tracing/anthropic-ai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ import type {
AnthropicAiStreamingEvent,
ContentBlock,
} from './types';
import { handleResponseError, shouldInstrument } from './utils';
import { handleResponseError, messagesFromParams, shouldInstrument } from './utils';

/**
* Extract request attributes from method arguments
Expand Down Expand Up @@ -82,10 +82,12 @@ function extractRequestAttributes(args: unknown[], methodPath: string): Record<s
* This is only recorded if recordInputs is true.
*/
function addPrivateRequestAttributes(span: Span, params: Record<string, unknown>): void {
if ('messages' in params) {
const truncatedMessages = getTruncatedJsonString(params.messages);
const messages = messagesFromParams(params);
if (messages.length) {
const truncatedMessages = getTruncatedJsonString(messages);
span.setAttributes({ [GEN_AI_REQUEST_MESSAGES_ATTRIBUTE]: truncatedMessages });
}

if ('input' in params) {
const truncatedInput = getTruncatedJsonString(params.input);
span.setAttributes({ [GEN_AI_REQUEST_MESSAGES_ATTRIBUTE]: truncatedInput });
Expand Down
13 changes: 13 additions & 0 deletions packages/core/src/tracing/anthropic-ai/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,3 +27,16 @@ export function handleResponseError(span: Span, response: AnthropicAiResponse):
});
}
}

/**
* Include the system prompt in the messages list, if available
*/
export function messagesFromParams(params: Record<string, unknown>): unknown[] {
const { system, messages } = params;

const systemMessages = typeof system === 'string' ? [{ role: 'system', content: params.system }] : [];

const userMessages = Array.isArray(messages) ? messages : messages != null ? [messages] : [];

return [...systemMessages, ...userMessages];
}
56 changes: 56 additions & 0 deletions packages/core/test/lib/utils/anthropic-utils.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
import { describe, expect, it } from 'vitest';
import { messagesFromParams, shouldInstrument } from '../../../src/tracing/anthropic-ai/utils';

describe('anthropic-ai-utils', () => {
describe('shouldInstrument', () => {
it('should instrument known methods', () => {
expect(shouldInstrument('models.get')).toBe(true);
});

it('should not instrument unknown methods', () => {
expect(shouldInstrument('models.unknown.thing')).toBe(false);
});
});

describe('messagesFromParams', () => {
it('includes system message in messages list', () => {
expect(
messagesFromParams({
messages: [{ role: 'user', content: 'hello' }],
system: 'You are a friendly robot awaiting a greeting.',
}),
).toStrictEqual([
{ role: 'system', content: 'You are a friendly robot awaiting a greeting.' },
{ role: 'user', content: 'hello' },
]);
});

it('includes system message along with non-array messages', () => {
expect(
messagesFromParams({
messages: { role: 'user', content: 'hello' },
system: 'You are a friendly robot awaiting a greeting.',
}),
).toStrictEqual([
{ role: 'system', content: 'You are a friendly robot awaiting a greeting.' },
{ role: 'user', content: 'hello' },
]);
});

it('includes system message if no other messages', () => {
expect(
messagesFromParams({
system: 'You are a friendly robot awaiting a greeting.',
}),
).toStrictEqual([{ role: 'system', content: 'You are a friendly robot awaiting a greeting.' }]);
});

it('returns messages if no system message', () => {
expect(
messagesFromParams({
messages: [{ role: 'user', content: 'hello' }],
}),
).toStrictEqual([{ role: 'user', content: 'hello' }]);
});
});
});