From aec607a215f8b825e85ccc763135d345deca94a6 Mon Sep 17 00:00:00 2001 From: Bob Evans Date: Thu, 21 Mar 2024 12:04:54 -0400 Subject: [PATCH] chore: Removed transaction_id and legacy token count attributes from llm events for openai and langchain --- lib/llm-events/langchain/event.js | 2 -- lib/llm-events/openai/chat-completion-summary.js | 1 - lib/llm-events/openai/event.js | 3 --- .../langchain/chat-completion-message.test.js | 2 -- .../langchain/chat-completion-summary.test.js | 2 -- test/unit/llm-events/langchain/event.test.js | 2 -- test/unit/llm-events/langchain/tool.test.js | 2 -- .../langchain/vector-search-result.test.js | 2 -- .../llm-events/langchain/vector-search.test.js | 2 -- test/unit/llm-events/openai/common.js | 4 ---- test/versioned/langchain/common.js | 4 ---- test/versioned/langchain/tools.tap.js | 1 - test/versioned/openai/common.js | 15 ++------------- test/versioned/openai/embeddings.tap.js | 3 --- 14 files changed, 2 insertions(+), 43 deletions(-) diff --git a/lib/llm-events/langchain/event.js b/lib/llm-events/langchain/event.js index 52d5ae20f..e0e28ca7f 100644 --- a/lib/llm-events/langchain/event.js +++ b/lib/llm-events/langchain/event.js @@ -46,7 +46,6 @@ class LangChainEvent extends BaseEvent { appName span_id request_id - transaction_id trace_id ingest_source = 'Node' vendor = 'langchain' @@ -60,7 +59,6 @@ class LangChainEvent extends BaseEvent { this.appName = agent.config.applications()[0] this.span_id = segment?.id this.request_id = params.runId - this.transaction_id = segment?.transaction?.id this.trace_id = segment?.transaction?.traceId this.langchainMeta = params.metadata this.metadata = agent diff --git a/lib/llm-events/openai/chat-completion-summary.js b/lib/llm-events/openai/chat-completion-summary.js index 2c2b9826d..0d84982bd 100644 --- a/lib/llm-events/openai/chat-completion-summary.js +++ b/lib/llm-events/openai/chat-completion-summary.js @@ -13,7 +13,6 @@ module.exports = class LlmChatCompletionSummary extends LlmEvent { this['request.max_tokens'] = request.max_tokens this['request.temperature'] = request.temperature this['response.number_of_messages'] = request?.messages?.length + response?.choices?.length - this['response.usage.completion_tokens'] = response?.usage?.completion_tokens this['response.choices.finish_reason'] = response?.choices?.[0]?.finish_reason } } diff --git a/lib/llm-events/openai/event.js b/lib/llm-events/openai/event.js index 90d9ebd6b..e31e491e2 100644 --- a/lib/llm-events/openai/event.js +++ b/lib/llm-events/openai/event.js @@ -17,7 +17,6 @@ module.exports = class LlmEvent extends BaseEvent { this.request_id = response?.headers?.['x-request-id'] this.trace_id = segment?.transaction?.traceId this.span_id = segment?.id - this.transaction_id = segment?.transaction?.id this['response.model'] = response.model this.vendor = 'openai' this.ingest_source = 'Node' @@ -38,8 +37,6 @@ module.exports = class LlmEvent extends BaseEvent { responseAttrs(response) { this['response.organization'] = response?.headers?.['openai-organization'] - this['response.usage.total_tokens'] = response?.usage?.total_tokens - this['response.usage.prompt_tokens'] = response?.usage?.prompt_tokens this['response.headers.llmVersion'] = response?.headers?.['openai-version'] this['response.headers.ratelimitLimitRequests'] = response?.headers?.['x-ratelimit-limit-requests'] diff --git a/test/unit/llm-events/langchain/chat-completion-message.test.js b/test/unit/llm-events/langchain/chat-completion-message.test.js index f9ec68f51..b23866a06 100644 --- a/test/unit/llm-events/langchain/chat-completion-message.test.js +++ b/test/unit/llm-events/langchain/chat-completion-message.test.js @@ -42,7 +42,6 @@ tap.beforeEach((t) => { t.context.segment = { id: 'segment-1', transaction: { - id: 'tx-1', traceId: 'trace-1' } } @@ -63,7 +62,6 @@ tap.test('creates entity', async (t) => { ['llm.conversation_id']: 'test-conversation', span_id: 'segment-1', request_id: 'run-1', - transaction_id: 'tx-1', trace_id: 'trace-1', ['metadata.foo']: 'foo', ingest_source: 'Node', diff --git a/test/unit/llm-events/langchain/chat-completion-summary.test.js b/test/unit/llm-events/langchain/chat-completion-summary.test.js index dab399fd9..5f8bb5d92 100644 --- a/test/unit/llm-events/langchain/chat-completion-summary.test.js +++ b/test/unit/llm-events/langchain/chat-completion-summary.test.js @@ -37,7 +37,6 @@ tap.beforeEach((t) => { t.context.segment = { id: 'segment-1', transaction: { - id: 'tx-1', traceId: 'trace-1' }, getDurationInMillis() { @@ -57,7 +56,6 @@ tap.test('creates entity', async (t) => { ['llm.conversation_id']: 'test-conversation', span_id: 'segment-1', request_id: 'run-1', - transaction_id: 'tx-1', trace_id: 'trace-1', ['metadata.foo']: 'foo', ingest_source: 'Node', diff --git a/test/unit/llm-events/langchain/event.test.js b/test/unit/llm-events/langchain/event.test.js index 9c870e135..7c07aab8d 100644 --- a/test/unit/llm-events/langchain/event.test.js +++ b/test/unit/llm-events/langchain/event.test.js @@ -40,7 +40,6 @@ tap.beforeEach((t) => { t.context.segment = { id: 'segment-1', transaction: { - id: 'tx-1', traceId: 'trace-1' } } @@ -57,7 +56,6 @@ tap.test('constructs default instance', async (t) => { ['llm.conversation_id']: 'test-conversation', span_id: 'segment-1', request_id: 'run-1', - transaction_id: 'tx-1', trace_id: 'trace-1', ['metadata.foo']: 'foo', ingest_source: 'Node', diff --git a/test/unit/llm-events/langchain/tool.test.js b/test/unit/llm-events/langchain/tool.test.js index 872c563be..ca9d251e1 100644 --- a/test/unit/llm-events/langchain/tool.test.js +++ b/test/unit/llm-events/langchain/tool.test.js @@ -45,7 +45,6 @@ tap.beforeEach((t) => { }, id: 'segment-1', transaction: { - id: 'tx-1', traceId: 'trace-1' } } @@ -69,7 +68,6 @@ tap.test('constructs default instance', async (t) => { id: /[a-z0-9-]{36}/, appName: 'test-app', span_id: 'segment-1', - transaction_id: 'tx-1', trace_id: 'trace-1', duration: 1.01, ['metadata.foo']: 'foo', diff --git a/test/unit/llm-events/langchain/vector-search-result.test.js b/test/unit/llm-events/langchain/vector-search-result.test.js index 5e12046ea..8d0729cd9 100644 --- a/test/unit/llm-events/langchain/vector-search-result.test.js +++ b/test/unit/llm-events/langchain/vector-search-result.test.js @@ -43,7 +43,6 @@ tap.beforeEach((t) => { t.context.segment = { id: 'segment-1', transaction: { - id: 'tx-1', traceId: 'trace-1' }, getDurationInMillis() { @@ -74,7 +73,6 @@ tap.test('create entity', async (t) => { ['llm.conversation_id']: 'test-conversation', request_id: 'run-1', span_id: 'segment-1', - transaction_id: 'tx-1', trace_id: 'trace-1', ['metadata.foo']: 'foo', ingest_source: 'Node', diff --git a/test/unit/llm-events/langchain/vector-search.test.js b/test/unit/llm-events/langchain/vector-search.test.js index d0b4c8fc9..f1cf836b3 100644 --- a/test/unit/llm-events/langchain/vector-search.test.js +++ b/test/unit/llm-events/langchain/vector-search.test.js @@ -42,7 +42,6 @@ tap.beforeEach((t) => { t.context.segment = { id: 'segment-1', transaction: { - id: 'tx-1', traceId: 'trace-1' }, getDurationInMillis() { @@ -64,7 +63,6 @@ tap.test('create entity', async (t) => { ['llm.conversation_id']: 'test-conversation', 'request_id': 'run-1', 'span_id': 'segment-1', - 'transaction_id': 'tx-1', 'trace_id': 'trace-1', 'ingest_source': 'Node', 'vendor': 'langchain', diff --git a/test/unit/llm-events/openai/common.js b/test/unit/llm-events/openai/common.js index 07921fa50..1eab799cb 100644 --- a/test/unit/llm-events/openai/common.js +++ b/test/unit/llm-events/openai/common.js @@ -51,7 +51,6 @@ function getExpectedResult(tx, event, type, completionId) { 'request_id': 'req-id', 'trace_id': tx.traceId, 'span_id': trace.children[0].id, - 'transaction_id': tx.id, 'response.model': 'gpt-3.5-turbo-0613', 'vendor': 'openai', 'ingest_source': 'Node' @@ -60,8 +59,6 @@ function getExpectedResult(tx, event, type, completionId) { 'duration': trace.children[0].getDurationInMillis(), 'request.model': 'gpt-3.5-turbo-0613', 'response.organization': 'new-relic', - 'response.usage.total_tokens': '30', - 'response.usage.prompt_tokens': '10', 'response.headers.llmVersion': '1.0.0', 'response.headers.ratelimitLimitRequests': '100', 'response.headers.ratelimitLimitTokens': '100', @@ -84,7 +81,6 @@ function getExpectedResult(tx, event, type, completionId) { ['request.max_tokens']: '1000000', ['request.temperature']: 'medium-rare', ['response.number_of_messages']: 3, - ['response.usage.completion_tokens']: 20, ['response.choices.finish_reason']: 'stop', error: false } diff --git a/test/versioned/langchain/common.js b/test/versioned/langchain/common.js index f72da9e27..f03ab6f8e 100644 --- a/test/versioned/langchain/common.js +++ b/test/versioned/langchain/common.js @@ -27,7 +27,6 @@ function assertLangChainVectorSearch({ tx, vectorSearch, responseDocumentSize }) 'appName': 'New Relic for Node.js tests', 'span_id': tx.trace.root.children[0].id, 'trace_id': tx.traceId, - 'transaction_id': tx.id, 'request.k': 1, 'request.query': 'This is an embedding test.', 'ingest_source': 'Node', @@ -48,7 +47,6 @@ function assertLangChainVectorSearchResult({ tx, vectorSearchResult, vectorSearc 'appName': 'New Relic for Node.js tests', 'span_id': tx.trace.root.children[0].id, 'trace_id': tx.traceId, - 'transaction_id': tx.id, 'ingest_source': 'Node', 'vendor': 'langchain', 'metadata.id': '2', @@ -76,7 +74,6 @@ function assertLangChainChatCompletionSummary({ tx, chatSummary, withCallback }) 'appName': 'New Relic for Node.js tests', 'span_id': tx.trace.root.children[0].id, 'trace_id': tx.traceId, - 'transaction_id': tx.id, 'request_id': undefined, 'ingest_source': 'Node', 'vendor': 'langchain', @@ -110,7 +107,6 @@ function assertLangChainChatCompletionMessages({ appName: 'New Relic for Node.js tests', span_id: tx.trace.root.children[0].id, trace_id: tx.traceId, - transaction_id: tx.id, ingest_source: 'Node', vendor: 'langchain', completion_id: chatSummary.id, diff --git a/test/versioned/langchain/tools.tap.js b/test/versioned/langchain/tools.tap.js index 5f96f48fc..26490c38e 100644 --- a/test/versioned/langchain/tools.tap.js +++ b/test/versioned/langchain/tools.tap.js @@ -80,7 +80,6 @@ tap.test('Langchain instrumentation - tools', (t) => { 'appName': 'New Relic for Node.js tests', 'span_id': tx.trace.root.children[0].id, 'trace_id': tx.traceId, - 'transaction_id': tx.id, 'ingest_source': 'Node', 'vendor': 'langchain', 'metadata.key': 'value', diff --git a/test/versioned/openai/common.js b/test/versioned/openai/common.js index 1f54535c9..935b4e64f 100644 --- a/test/versioned/openai/common.js +++ b/test/versioned/openai/common.js @@ -53,7 +53,6 @@ function assertChatCompletionMessages({ 'request_id': '49dbbffbd3c3f4612aa48def69059aad', 'trace_id': tx.traceId, 'span_id': tx.trace.root.children[0].id, - 'transaction_id': tx.id, 'response.model': model, 'vendor': 'openai', 'ingest_source': 'Node', @@ -94,14 +93,13 @@ function assertChatCompletionMessages({ }) } -function assertChatCompletionSummary({ tx, model, chatSummary, tokenUsage, error = false }) { - let expectedChatSummary = { +function assertChatCompletionSummary({ tx, model, chatSummary, error = false }) { + const expectedChatSummary = { 'id': /[a-f0-9]{36}/, 'appName': 'New Relic for Node.js tests', 'request_id': '49dbbffbd3c3f4612aa48def69059aad', 'trace_id': tx.traceId, 'span_id': tx.trace.root.children[0].id, - 'transaction_id': tx.id, 'response.model': model, 'vendor': 'openai', 'ingest_source': 'Node', @@ -119,15 +117,6 @@ function assertChatCompletionSummary({ tx, model, chatSummary, tokenUsage, error 'error': error } - if (tokenUsage) { - expectedChatSummary = { - ...expectedChatSummary, - 'response.usage.total_tokens': 64, - 'response.usage.prompt_tokens': 53, - 'response.usage.completion_tokens': 11 - } - } - this.equal(chatSummary[0].type, 'LlmChatCompletionSummary') this.match(chatSummary[1], expectedChatSummary, 'should match chat summary message') } diff --git a/test/versioned/openai/embeddings.tap.js b/test/versioned/openai/embeddings.tap.js index 6ac1cd96a..e5e7c05ee 100644 --- a/test/versioned/openai/embeddings.tap.js +++ b/test/versioned/openai/embeddings.tap.js @@ -91,15 +91,12 @@ tap.test('OpenAI instrumentation - embedding', (t) => { 'request_id': 'c70828b2293314366a76a2b1dcb20688', 'trace_id': tx.traceId, 'span_id': tx.trace.root.children[0].id, - 'transaction_id': tx.id, 'response.model': 'text-embedding-ada-002-v2', 'vendor': 'openai', 'ingest_source': 'Node', 'request.model': 'text-embedding-ada-002', 'duration': tx.trace.root.children[0].getDurationInMillis(), 'response.organization': 'new-relic-nkmd8b', - 'response.usage.total_tokens': 6, - 'response.usage.prompt_tokens': 6, 'token_count': undefined, 'response.headers.llmVersion': '2020-10-01', 'response.headers.ratelimitLimitRequests': '200',