Skip to content

Commit 4df0056

Browse files
fix(integrations): anthropic set GEN_AI_OPERATION_NAME (#5185)
#### Issues Contributes to https://linear.app/getsentry/issue/TET-1524/ensure-all-sdks-report-gen-aioperationname
1 parent 6c6705a commit 4df0056

File tree

2 files changed

+18
-0
lines changed

2 files changed

+18
-0
lines changed

sentry_sdk/integrations/anthropic.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -124,6 +124,7 @@ def _set_input_data(span, kwargs, integration):
124124
"""
125125
Set input data for the span based on the provided keyword arguments for the anthropic message creation.
126126
"""
127+
set_data_normalized(span, SPANDATA.GEN_AI_OPERATION_NAME, "chat")
127128
system_prompt = kwargs.get("system")
128129
messages = kwargs.get("messages")
129130
if (

tests/integrations/anthropic/test_anthropic.py

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -118,6 +118,7 @@ def test_nonstreaming_create_message(
118118

119119
assert span["op"] == OP.GEN_AI_CHAT
120120
assert span["description"] == "chat model"
121+
assert span["data"][SPANDATA.GEN_AI_OPERATION_NAME] == "chat"
121122
assert span["data"][SPANDATA.GEN_AI_REQUEST_MODEL] == "model"
122123

123124
if send_default_pii and include_prompts:
@@ -187,6 +188,7 @@ async def test_nonstreaming_create_message_async(
187188

188189
assert span["op"] == OP.GEN_AI_CHAT
189190
assert span["description"] == "chat model"
191+
assert span["data"][SPANDATA.GEN_AI_OPERATION_NAME] == "chat"
190192
assert span["data"][SPANDATA.GEN_AI_REQUEST_MODEL] == "model"
191193

192194
if send_default_pii and include_prompts:
@@ -287,6 +289,7 @@ def test_streaming_create_message(
287289

288290
assert span["op"] == OP.GEN_AI_CHAT
289291
assert span["description"] == "chat model"
292+
assert span["data"][SPANDATA.GEN_AI_OPERATION_NAME] == "chat"
290293
assert span["data"][SPANDATA.GEN_AI_REQUEST_MODEL] == "model"
291294

292295
if send_default_pii and include_prompts:
@@ -391,6 +394,7 @@ async def test_streaming_create_message_async(
391394

392395
assert span["op"] == OP.GEN_AI_CHAT
393396
assert span["description"] == "chat model"
397+
assert span["data"][SPANDATA.GEN_AI_OPERATION_NAME] == "chat"
394398
assert span["data"][SPANDATA.GEN_AI_REQUEST_MODEL] == "model"
395399

396400
if send_default_pii and include_prompts:
@@ -522,6 +526,7 @@ def test_streaming_create_message_with_input_json_delta(
522526

523527
assert span["op"] == OP.GEN_AI_CHAT
524528
assert span["description"] == "chat model"
529+
assert span["data"][SPANDATA.GEN_AI_OPERATION_NAME] == "chat"
525530
assert span["data"][SPANDATA.GEN_AI_REQUEST_MODEL] == "model"
526531

527532
if send_default_pii and include_prompts:
@@ -662,6 +667,7 @@ async def test_streaming_create_message_with_input_json_delta_async(
662667

663668
assert span["op"] == OP.GEN_AI_CHAT
664669
assert span["description"] == "chat model"
670+
assert span["data"][SPANDATA.GEN_AI_OPERATION_NAME] == "chat"
665671
assert span["data"][SPANDATA.GEN_AI_REQUEST_MODEL] == "model"
666672

667673
if send_default_pii and include_prompts:
@@ -725,6 +731,7 @@ def test_span_status_error(sentry_init, capture_events):
725731
assert transaction["spans"][0]["status"] == "internal_error"
726732
assert transaction["spans"][0]["tags"]["status"] == "internal_error"
727733
assert transaction["contexts"]["trace"]["status"] == "internal_error"
734+
assert transaction["spans"][0]["data"][SPANDATA.GEN_AI_OPERATION_NAME] == "chat"
728735

729736

730737
@pytest.mark.asyncio
@@ -749,6 +756,7 @@ async def test_span_status_error_async(sentry_init, capture_events):
749756
assert transaction["spans"][0]["status"] == "internal_error"
750757
assert transaction["spans"][0]["tags"]["status"] == "internal_error"
751758
assert transaction["contexts"]["trace"]["status"] == "internal_error"
759+
assert transaction["spans"][0]["data"][SPANDATA.GEN_AI_OPERATION_NAME] == "chat"
752760

753761

754762
@pytest.mark.asyncio
@@ -796,6 +804,7 @@ def test_span_origin(sentry_init, capture_events):
796804

797805
assert event["contexts"]["trace"]["origin"] == "manual"
798806
assert event["spans"][0]["origin"] == "auto.ai.anthropic"
807+
assert event["spans"][0]["data"][SPANDATA.GEN_AI_OPERATION_NAME] == "chat"
799808

800809

801810
@pytest.mark.asyncio
@@ -823,6 +832,7 @@ async def test_span_origin_async(sentry_init, capture_events):
823832

824833
assert event["contexts"]["trace"]["origin"] == "manual"
825834
assert event["spans"][0]["origin"] == "auto.ai.anthropic"
835+
assert event["spans"][0]["data"][SPANDATA.GEN_AI_OPERATION_NAME] == "chat"
826836

827837

828838
@pytest.mark.skipif(
@@ -926,6 +936,7 @@ def mock_messages_create(*args, **kwargs):
926936

927937
# Verify that the span was created correctly
928938
assert span["op"] == "gen_ai.chat"
939+
assert span["data"][SPANDATA.GEN_AI_OPERATION_NAME] == "chat"
929940
assert SPANDATA.GEN_AI_REQUEST_MESSAGES in span["data"]
930941

931942
# Parse the stored messages
@@ -985,6 +996,7 @@ def test_anthropic_message_truncation(sentry_init, capture_events):
985996
assert len(chat_spans) > 0
986997

987998
chat_span = chat_spans[0]
999+
assert chat_span["data"][SPANDATA.GEN_AI_OPERATION_NAME] == "chat"
9881000
assert SPANDATA.GEN_AI_REQUEST_MESSAGES in chat_span["data"]
9891001

9901002
messages_data = chat_span["data"][SPANDATA.GEN_AI_REQUEST_MESSAGES]
@@ -1052,6 +1064,7 @@ def test_nonstreaming_create_message_with_system_prompt(
10521064

10531065
assert span["op"] == OP.GEN_AI_CHAT
10541066
assert span["description"] == "chat model"
1067+
assert span["data"][SPANDATA.GEN_AI_OPERATION_NAME] == "chat"
10551068
assert span["data"][SPANDATA.GEN_AI_REQUEST_MODEL] == "model"
10561069

10571070
if send_default_pii and include_prompts:
@@ -1130,6 +1143,7 @@ async def test_nonstreaming_create_message_with_system_prompt_async(
11301143

11311144
assert span["op"] == OP.GEN_AI_CHAT
11321145
assert span["description"] == "chat model"
1146+
assert span["data"][SPANDATA.GEN_AI_OPERATION_NAME] == "chat"
11331147
assert span["data"][SPANDATA.GEN_AI_REQUEST_MODEL] == "model"
11341148

11351149
if send_default_pii and include_prompts:
@@ -1240,6 +1254,7 @@ def test_streaming_create_message_with_system_prompt(
12401254

12411255
assert span["op"] == OP.GEN_AI_CHAT
12421256
assert span["description"] == "chat model"
1257+
assert span["data"][SPANDATA.GEN_AI_OPERATION_NAME] == "chat"
12431258
assert span["data"][SPANDATA.GEN_AI_REQUEST_MODEL] == "model"
12441259

12451260
if send_default_pii and include_prompts:
@@ -1354,6 +1369,7 @@ async def test_streaming_create_message_with_system_prompt_async(
13541369

13551370
assert span["op"] == OP.GEN_AI_CHAT
13561371
assert span["description"] == "chat model"
1372+
assert span["data"][SPANDATA.GEN_AI_OPERATION_NAME] == "chat"
13571373
assert span["data"][SPANDATA.GEN_AI_REQUEST_MODEL] == "model"
13581374

13591375
if send_default_pii and include_prompts:
@@ -1414,6 +1430,7 @@ def test_system_prompt_with_complex_structure(sentry_init, capture_events):
14141430
assert len(event["spans"]) == 1
14151431
(span,) = event["spans"]
14161432

1433+
assert span["data"][SPANDATA.GEN_AI_OPERATION_NAME] == "chat"
14171434
assert SPANDATA.GEN_AI_REQUEST_MESSAGES in span["data"]
14181435
stored_messages = json.loads(span["data"][SPANDATA.GEN_AI_REQUEST_MESSAGES])
14191436

0 commit comments

Comments
 (0)