Skip to content

Commit

Permalink
chore: Removed transaction_id and legacy token count attributes from …
Browse files Browse the repository at this point in the history
…llm events for openai and langchain (#2093)
  • Loading branch information
bizob2828 committed Apr 1, 2024
1 parent cb0fa11 commit df2a0fd
Show file tree
Hide file tree
Showing 14 changed files with 2 additions and 43 deletions.
2 changes: 0 additions & 2 deletions lib/llm-events/langchain/event.js
Expand Up @@ -46,7 +46,6 @@ class LangChainEvent extends BaseEvent {
appName
span_id
request_id
transaction_id
trace_id
ingest_source = 'Node'
vendor = 'langchain'
Expand All @@ -60,7 +59,6 @@ class LangChainEvent extends BaseEvent {
this.appName = agent.config.applications()[0]
this.span_id = segment?.id
this.request_id = params.runId
this.transaction_id = segment?.transaction?.id
this.trace_id = segment?.transaction?.traceId
this.langchainMeta = params.metadata
this.metadata = agent
Expand Down
1 change: 0 additions & 1 deletion lib/llm-events/openai/chat-completion-summary.js
Expand Up @@ -13,7 +13,6 @@ module.exports = class LlmChatCompletionSummary extends LlmEvent {
this['request.max_tokens'] = request.max_tokens
this['request.temperature'] = request.temperature
this['response.number_of_messages'] = request?.messages?.length + response?.choices?.length
this['response.usage.completion_tokens'] = response?.usage?.completion_tokens
this['response.choices.finish_reason'] = response?.choices?.[0]?.finish_reason
}
}
3 changes: 0 additions & 3 deletions lib/llm-events/openai/event.js
Expand Up @@ -17,7 +17,6 @@ module.exports = class LlmEvent extends BaseEvent {
this.request_id = response?.headers?.['x-request-id']
this.trace_id = segment?.transaction?.traceId
this.span_id = segment?.id
this.transaction_id = segment?.transaction?.id
this['response.model'] = response.model
this.vendor = 'openai'
this.ingest_source = 'Node'
Expand All @@ -38,8 +37,6 @@ module.exports = class LlmEvent extends BaseEvent {

responseAttrs(response) {
this['response.organization'] = response?.headers?.['openai-organization']
this['response.usage.total_tokens'] = response?.usage?.total_tokens
this['response.usage.prompt_tokens'] = response?.usage?.prompt_tokens
this['response.headers.llmVersion'] = response?.headers?.['openai-version']
this['response.headers.ratelimitLimitRequests'] =
response?.headers?.['x-ratelimit-limit-requests']
Expand Down
Expand Up @@ -42,7 +42,6 @@ tap.beforeEach((t) => {
t.context.segment = {
id: 'segment-1',
transaction: {
id: 'tx-1',
traceId: 'trace-1'
}
}
Expand All @@ -63,7 +62,6 @@ tap.test('creates entity', async (t) => {
['llm.conversation_id']: 'test-conversation',
span_id: 'segment-1',
request_id: 'run-1',
transaction_id: 'tx-1',
trace_id: 'trace-1',
['metadata.foo']: 'foo',
ingest_source: 'Node',
Expand Down
Expand Up @@ -37,7 +37,6 @@ tap.beforeEach((t) => {
t.context.segment = {
id: 'segment-1',
transaction: {
id: 'tx-1',
traceId: 'trace-1'
},
getDurationInMillis() {
Expand All @@ -57,7 +56,6 @@ tap.test('creates entity', async (t) => {
['llm.conversation_id']: 'test-conversation',
span_id: 'segment-1',
request_id: 'run-1',
transaction_id: 'tx-1',
trace_id: 'trace-1',
['metadata.foo']: 'foo',
ingest_source: 'Node',
Expand Down
2 changes: 0 additions & 2 deletions test/unit/llm-events/langchain/event.test.js
Expand Up @@ -40,7 +40,6 @@ tap.beforeEach((t) => {
t.context.segment = {
id: 'segment-1',
transaction: {
id: 'tx-1',
traceId: 'trace-1'
}
}
Expand All @@ -57,7 +56,6 @@ tap.test('constructs default instance', async (t) => {
['llm.conversation_id']: 'test-conversation',
span_id: 'segment-1',
request_id: 'run-1',
transaction_id: 'tx-1',
trace_id: 'trace-1',
['metadata.foo']: 'foo',
ingest_source: 'Node',
Expand Down
2 changes: 0 additions & 2 deletions test/unit/llm-events/langchain/tool.test.js
Expand Up @@ -45,7 +45,6 @@ tap.beforeEach((t) => {
},
id: 'segment-1',
transaction: {
id: 'tx-1',
traceId: 'trace-1'
}
}
Expand All @@ -69,7 +68,6 @@ tap.test('constructs default instance', async (t) => {
id: /[a-z0-9-]{36}/,
appName: 'test-app',
span_id: 'segment-1',
transaction_id: 'tx-1',
trace_id: 'trace-1',
duration: 1.01,
['metadata.foo']: 'foo',
Expand Down
2 changes: 0 additions & 2 deletions test/unit/llm-events/langchain/vector-search-result.test.js
Expand Up @@ -43,7 +43,6 @@ tap.beforeEach((t) => {
t.context.segment = {
id: 'segment-1',
transaction: {
id: 'tx-1',
traceId: 'trace-1'
},
getDurationInMillis() {
Expand Down Expand Up @@ -74,7 +73,6 @@ tap.test('create entity', async (t) => {
['llm.conversation_id']: 'test-conversation',
request_id: 'run-1',
span_id: 'segment-1',
transaction_id: 'tx-1',
trace_id: 'trace-1',
['metadata.foo']: 'foo',
ingest_source: 'Node',
Expand Down
2 changes: 0 additions & 2 deletions test/unit/llm-events/langchain/vector-search.test.js
Expand Up @@ -42,7 +42,6 @@ tap.beforeEach((t) => {
t.context.segment = {
id: 'segment-1',
transaction: {
id: 'tx-1',
traceId: 'trace-1'
},
getDurationInMillis() {
Expand All @@ -64,7 +63,6 @@ tap.test('create entity', async (t) => {
['llm.conversation_id']: 'test-conversation',
'request_id': 'run-1',
'span_id': 'segment-1',
'transaction_id': 'tx-1',
'trace_id': 'trace-1',
'ingest_source': 'Node',
'vendor': 'langchain',
Expand Down
4 changes: 0 additions & 4 deletions test/unit/llm-events/openai/common.js
Expand Up @@ -51,7 +51,6 @@ function getExpectedResult(tx, event, type, completionId) {
'request_id': 'req-id',
'trace_id': tx.traceId,
'span_id': trace.children[0].id,
'transaction_id': tx.id,
'response.model': 'gpt-3.5-turbo-0613',
'vendor': 'openai',
'ingest_source': 'Node'
Expand All @@ -60,8 +59,6 @@ function getExpectedResult(tx, event, type, completionId) {
'duration': trace.children[0].getDurationInMillis(),
'request.model': 'gpt-3.5-turbo-0613',
'response.organization': 'new-relic',
'response.usage.total_tokens': '30',
'response.usage.prompt_tokens': '10',
'response.headers.llmVersion': '1.0.0',
'response.headers.ratelimitLimitRequests': '100',
'response.headers.ratelimitLimitTokens': '100',
Expand All @@ -84,7 +81,6 @@ function getExpectedResult(tx, event, type, completionId) {
['request.max_tokens']: '1000000',
['request.temperature']: 'medium-rare',
['response.number_of_messages']: 3,
['response.usage.completion_tokens']: 20,
['response.choices.finish_reason']: 'stop',
error: false
}
Expand Down
4 changes: 0 additions & 4 deletions test/versioned/langchain/common.js
Expand Up @@ -27,7 +27,6 @@ function assertLangChainVectorSearch({ tx, vectorSearch, responseDocumentSize })
'appName': 'New Relic for Node.js tests',
'span_id': tx.trace.root.children[0].id,
'trace_id': tx.traceId,
'transaction_id': tx.id,
'request.k': 1,
'request.query': 'This is an embedding test.',
'ingest_source': 'Node',
Expand All @@ -48,7 +47,6 @@ function assertLangChainVectorSearchResult({ tx, vectorSearchResult, vectorSearc
'appName': 'New Relic for Node.js tests',
'span_id': tx.trace.root.children[0].id,
'trace_id': tx.traceId,
'transaction_id': tx.id,
'ingest_source': 'Node',
'vendor': 'langchain',
'metadata.id': '2',
Expand Down Expand Up @@ -76,7 +74,6 @@ function assertLangChainChatCompletionSummary({ tx, chatSummary, withCallback })
'appName': 'New Relic for Node.js tests',
'span_id': tx.trace.root.children[0].id,
'trace_id': tx.traceId,
'transaction_id': tx.id,
'request_id': undefined,
'ingest_source': 'Node',
'vendor': 'langchain',
Expand Down Expand Up @@ -110,7 +107,6 @@ function assertLangChainChatCompletionMessages({
appName: 'New Relic for Node.js tests',
span_id: tx.trace.root.children[0].id,
trace_id: tx.traceId,
transaction_id: tx.id,
ingest_source: 'Node',
vendor: 'langchain',
completion_id: chatSummary.id,
Expand Down
1 change: 0 additions & 1 deletion test/versioned/langchain/tools.tap.js
Expand Up @@ -80,7 +80,6 @@ tap.test('Langchain instrumentation - tools', (t) => {
'appName': 'New Relic for Node.js tests',
'span_id': tx.trace.root.children[0].id,
'trace_id': tx.traceId,
'transaction_id': tx.id,
'ingest_source': 'Node',
'vendor': 'langchain',
'metadata.key': 'value',
Expand Down
15 changes: 2 additions & 13 deletions test/versioned/openai/common.js
Expand Up @@ -53,7 +53,6 @@ function assertChatCompletionMessages({
'request_id': '49dbbffbd3c3f4612aa48def69059aad',
'trace_id': tx.traceId,
'span_id': tx.trace.root.children[0].id,
'transaction_id': tx.id,
'response.model': model,
'vendor': 'openai',
'ingest_source': 'Node',
Expand Down Expand Up @@ -94,14 +93,13 @@ function assertChatCompletionMessages({
})
}

function assertChatCompletionSummary({ tx, model, chatSummary, tokenUsage, error = false }) {
let expectedChatSummary = {
function assertChatCompletionSummary({ tx, model, chatSummary, error = false }) {
const expectedChatSummary = {
'id': /[a-f0-9]{36}/,
'appName': 'New Relic for Node.js tests',
'request_id': '49dbbffbd3c3f4612aa48def69059aad',
'trace_id': tx.traceId,
'span_id': tx.trace.root.children[0].id,
'transaction_id': tx.id,
'response.model': model,
'vendor': 'openai',
'ingest_source': 'Node',
Expand All @@ -119,15 +117,6 @@ function assertChatCompletionSummary({ tx, model, chatSummary, tokenUsage, error
'error': error
}

if (tokenUsage) {
expectedChatSummary = {
...expectedChatSummary,
'response.usage.total_tokens': 64,
'response.usage.prompt_tokens': 53,
'response.usage.completion_tokens': 11
}
}

this.equal(chatSummary[0].type, 'LlmChatCompletionSummary')
this.match(chatSummary[1], expectedChatSummary, 'should match chat summary message')
}
Expand Down
3 changes: 0 additions & 3 deletions test/versioned/openai/embeddings.tap.js
Expand Up @@ -91,15 +91,12 @@ tap.test('OpenAI instrumentation - embedding', (t) => {
'request_id': 'c70828b2293314366a76a2b1dcb20688',
'trace_id': tx.traceId,
'span_id': tx.trace.root.children[0].id,
'transaction_id': tx.id,
'response.model': 'text-embedding-ada-002-v2',
'vendor': 'openai',
'ingest_source': 'Node',
'request.model': 'text-embedding-ada-002',
'duration': tx.trace.root.children[0].getDurationInMillis(),
'response.organization': 'new-relic-nkmd8b',
'response.usage.total_tokens': 6,
'response.usage.prompt_tokens': 6,
'token_count': undefined,
'response.headers.llmVersion': '2020-10-01',
'response.headers.ratelimitLimitRequests': '200',
Expand Down

0 comments on commit df2a0fd

Please sign in to comment.