Skip to content

Commit

Permalink
feat: Added ability to recordLlmFeedbackEvent by traceId (#2043)
Browse files Browse the repository at this point in the history
  • Loading branch information
bizob2828 committed Feb 26, 2024
1 parent 20c7d47 commit b0a4ed1
Show file tree
Hide file tree
Showing 11 changed files with 50 additions and 193 deletions.
71 changes: 22 additions & 49 deletions api.js
Expand Up @@ -23,7 +23,7 @@ const {
assignCLMSymbol,
addCLMAttributes: maybeAddCLMAttributes
} = require('./lib/util/code-level-metrics')
const { LlmFeedbackMessage } = require('./lib/llm-events/openai')
const LlmFeedbackMessage = require('./lib/llm-events/feedback-message')

const ATTR_DEST = require('./lib/config/attribute-filter').DESTINATIONS
const MODULE_TYPE = require('./lib/shim/constants').MODULE_TYPE
Expand Down Expand Up @@ -617,7 +617,7 @@ API.prototype.addIgnoringRule = function addIgnoringRule(pattern) {
* @private
* @see RUM_ISSUES
* @param {number} errorCode Error code from `RUM_ISSUES`.
* @param {boolean} [quiet=false] Be quiet about this failure.
* @param {boolean} [quiet] Be quiet about this failure.
* @returns {string} HTML comment for debugging purposes with specific error code
*/
function _gracefail(errorCode, quiet) {
Expand Down Expand Up @@ -1541,52 +1541,20 @@ API.prototype.getTraceMetadata = function getTraceMetadata() {
return metadata
}

/**
* Get a set of tracked identifiers
*
* @param {object} params Input parameters.
* @param {string} params.responseId The LLM generated identifier for the
* response.
* @returns {LlmTrackedIds|undefined} The tracked identifiers.
*/
API.prototype.getLlmMessageIds = function getLlmMessageIds({ responseId } = {}) {
this.agent.metrics
.getOrCreateMetric(`${NAMES.SUPPORTABILITY.API}/getLlmMessageIds`)
.incrementCallCount()

if (this.agent.config?.ai_monitoring?.enabled !== true) {
logger.warn('getLlmMessageIds invoked but ai_monitoring is disabled.')
return
}

const tx = this.agent.tracer.getTransaction()
if (!tx) {
logger.warn('getLlmMessageIds must be called within the scope of a transaction.')
return
}
return tx.llm.responses.get(responseId)
}

/**
* Record a LLM feedback event which can be viewed in New Relic API Monitoring.
*
* @param {object} params Input parameters.
* @param {string} [params.conversationId=""] If available, the unique
* identifier for the LLM conversation that triggered the event.
* @param {string} [params.requestId=""] If available, the request identifier
* from the remote service.
* @param {string} params.messageId Identifier for the message being rated.
* Obtained from {@link getLlmMessageIds}.
* @param {string} params.traceId Identifier for the feedback event.
* Obtained from {@link getTraceMetadata}.
* @param {string} params.category A tag for the event.
* @param {string} params.rating A indicator of how useful the message was.

Check warning on line 1551 in api.js

View workflow job for this annotation

GitHub Actions / lint (lts/*)

The type 'getTraceMetadata' is undefined
* @param {string} [params.message=""] The message that triggered the event.
* @param {object} [params.metadata={}] Additional key-value pairs to associate
* @param {string} [params.message] The message that triggered the event.
* @param {object} [params.metadata] Additional key-value pairs to associate
* with the recorded event.
*/
API.prototype.recordLlmFeedbackEvent = function recordLlmFeedbackEvent({
conversationId = '',
requestId = '',
messageId,
traceId,
category,
rating,
message = '',
Expand All @@ -1596,6 +1564,13 @@ API.prototype.recordLlmFeedbackEvent = function recordLlmFeedbackEvent({
.getOrCreateMetric(`${NAMES.SUPPORTABILITY.API}/recordLlmFeedbackEvent`)
.incrementCallCount()

if (!traceId) {
logger.warn(
'A feedback event will not be recorded. recordLlmFeedbackEvent must be called with a traceId.'
)
return
}

if (this.agent.config?.ai_monitoring?.enabled !== true) {
logger.warn('recordLlmFeedbackEvent invoked but ai_monitoring is disabled.')
return
Expand All @@ -1604,15 +1579,13 @@ API.prototype.recordLlmFeedbackEvent = function recordLlmFeedbackEvent({
const tx = this.agent.tracer.getTransaction()
if (!tx) {
logger.warn(
'No message feedback events will be recorded. recordLlmFeedbackEvent must be called within the scope of a transaction.'
'A feedback events will not be recorded. recordLlmFeedbackEvent must be called within the scope of a transaction.'
)
return
}

const feedback = new LlmFeedbackMessage({
conversationId,
requestId,
messageId,
traceId,
category,
rating,
message
Expand All @@ -1625,12 +1598,12 @@ API.prototype.recordLlmFeedbackEvent = function recordLlmFeedbackEvent({
*
* @param {object} [options]
* Object with shut down options.
* @param {boolean} [options.collectPendingData=false]
* @param {boolean} [options.collectPendingData]
* If true, the agent will send any pending data to the collector before
* shutting down.
* @param {number} [options.timeout=0]
* @param {number} [options.timeout]
* Time in milliseconds to wait before shutting down.
* @param {boolean} [options.waitForIdle=false]
* @param {boolean} [options.waitForIdle]
* If true, the agent will not shut down until there are no active transactions.
* @param {Function} [cb]
* Callback function that runs when agent stops.
Expand Down Expand Up @@ -1675,12 +1648,12 @@ function _logErrorCallback(error, phase) {
* @private
* @param {object} api instantiation of this file
* @param {object} options shutdown options object
* @param {boolean} [options.collectPendingData=false]
* @param {boolean} [options.collectPendingData]
* If true, the agent will send any pending data to the collector before
* shutting down.
* @param {number} [options.timeout=0]
* @param {number} [options.timeout]
* Time in milliseconds to wait before shutting down.
* @param {boolean} [options.waitForIdle=false]
* @param {boolean} [options.waitForIdle]
* If true, the agent will not shut down until there are no active transactions.
* @param {Function} callback callback function to execute after shutdown process is complete (successful or not)
*/
Expand Down
25 changes: 0 additions & 25 deletions lib/instrumentation/openai.js
Expand Up @@ -11,7 +11,6 @@ const {
LlmEmbedding,
LlmErrorMessage
} = require('../../lib/llm-events/openai')
const LlmTrackedIds = require('../../lib/llm-events/tracked-ids')
const { RecorderSpec } = require('../../lib/shim/specs')

const MIN_VERSION = '4.0.0'
Expand Down Expand Up @@ -91,28 +90,6 @@ function addLlmMeta({ agent, segment }) {
segment.transaction.trace.attributes.addAttribute(DESTINATIONS.TRANS_EVENT, 'llm', true)
}

/**
* Assigns requestId, conversationId and messageIds for a given
* chat completion response on the active transaction.
* This is used for generating LlmFeedbackEvent via `api.recordLlmFeedbackEvent`
*
* @param {object} params input params
* @param {Transaction} params.tx active transaction
* @param {LlmChatCompletionMessage} params.completionMsg chat completion message
* @param {string} params.responseId id of response
*/
function assignIdsToTx({ tx, completionMsg, responseId }) {
const tracker = tx.llm.responses
const trackedIds =
tracker.get(responseId) ??
new LlmTrackedIds({
requestId: completionMsg.request_id,
conversationId: completionMsg['llm.conversation_id']
})
trackedIds.message_ids.push(completionMsg.id)
tracker.set(responseId, trackedIds)
}

/**
* Generates LlmChatCompletionSummary for a chat completion creation.
* Also iterates over both input messages and the first response message
Expand All @@ -135,7 +112,6 @@ function recordChatCompletionMessages({ agent, segment, request, response, err }
}

response.headers = segment[openAiHeaders]
const tx = segment.transaction
// explicitly end segment to consistent duration
// for both LLM events and the segment
segment.end()
Expand All @@ -160,7 +136,6 @@ function recordChatCompletionMessages({ agent, segment, request, response, err }
message
})

assignIdsToTx({ tx, completionMsg, responseId: response.id })
recordEvent({ agent, type: 'LlmChatCompletionMessage', msg: completionMsg })
})

Expand Down
Expand Up @@ -4,14 +4,12 @@
*/

'use strict'
const { makeId } = require('../../util/hashes')
const { makeId } = require('../util/hashes')

module.exports = class LlmFeedbackMessage {
constructor(opts) {
this.id = makeId(32)
this.conversation_id = opts.conversationId
this.request_id = opts.requestId
this.message_id = opts.messageId
this.trace_id = opts.traceId
this.category = opts.category
this.rating = opts.rating
this.message = opts.message
Expand Down
4 changes: 1 addition & 3 deletions lib/llm-events/openai/index.js
Expand Up @@ -8,13 +8,11 @@
const LlmChatCompletionSummary = require('./chat-completion-summary')
const LlmChatCompletionMessage = require('./chat-completion-message')
const LlmEmbedding = require('./embedding')
const LlmFeedbackMessage = require('./feedback-message')
const LlmErrorMessage = require('../error-message')

module.exports = {
LlmChatCompletionMessage,
LlmChatCompletionSummary,
LlmEmbedding,
LlmErrorMessage,
LlmFeedbackMessage
LlmErrorMessage
}
27 changes: 0 additions & 27 deletions lib/llm-events/tracked-ids.js

This file was deleted.

5 changes: 0 additions & 5 deletions lib/transaction/index.js
Expand Up @@ -152,11 +152,6 @@ function Transaction(agent) {
this.isDistributedTrace = null
this.acceptedDistributedTrace = null

// LLM fields.
this.llm = {
responses: new Map()
}

// Lazy evaluate the priority and sampling in case we end up accepting a payload.
this.priority = null
this.sampled = null
Expand Down
47 changes: 16 additions & 31 deletions test/unit/api/api-llm.test.js
Expand Up @@ -35,33 +35,20 @@ tap.test('Agent API LLM methods', (t) => {
helper.unloadAgent(t.context.api.agent)
})

t.test('getLlmMessageIds is no-op when ai_monitoring is disabled', async (t) => {
t.test('recordLlmFeedbackEvent is no-op when no traceId is provided', async (t) => {
const { api } = t.context
api.agent.config.ai_monitoring.enabled = false

const trackedIds = api.getLlmMessageIds({ responseId: 'test' })
t.equal(trackedIds, undefined)
t.equal(loggerMock.warn.callCount, 1)
t.equal(loggerMock.warn.args[0][0], 'getLlmMessageIds invoked but ai_monitoring is disabled.')
})

t.test('geLlmMessageIds is no-op when no transaction is available', async (t) => {
const { api } = t.context
const trackedIds = api.getLlmMessageIds({ responseId: 'test' })
t.equal(trackedIds, undefined)
t.equal(loggerMock.warn.callCount, 1)
t.equal(
loggerMock.warn.args[0][0],
'getLlmMessageIds must be called within the scope of a transaction.'
)
})

t.test('getLlmMessageIds returns undefined for unrecognized id', async (t) => {
const { api } = t.context
helper.runInTransaction(api.agent, () => {
const trackedIds = api.getLlmMessageIds({ responseId: 'test' })
t.equal(trackedIds, undefined)
t.equal(loggerMock.warn.callCount, 0)
const result = api.recordLlmFeedbackEvent({
category: 'test',
rating: 'test'
})
t.equal(result, undefined)
t.equal(loggerMock.warn.callCount, 1)
t.equal(
loggerMock.warn.args[0][0],
'A feedback event will not be recorded. recordLlmFeedbackEvent must be called with a traceId.'
)
})
})

Expand All @@ -70,7 +57,7 @@ tap.test('Agent API LLM methods', (t) => {
api.agent.config.ai_monitoring.enabled = false

const result = api.recordLlmFeedbackEvent({
messageId: 'test',
traceId: 'trace-id',
category: 'test',
rating: 'test'
})
Expand All @@ -86,15 +73,15 @@ tap.test('Agent API LLM methods', (t) => {
const { api } = t.context

const result = api.recordLlmFeedbackEvent({
messageId: 'test',
traceId: 'trace-id',
category: 'test',
rating: 'test'
})
t.equal(result, undefined)
t.equal(loggerMock.warn.callCount, 1)
t.equal(
loggerMock.warn.args[0][0],
'No message feedback events will be recorded. recordLlmFeedbackEvent must be called within the scope of a transaction.'
'A feedback events will not be recorded. recordLlmFeedbackEvent must be called within the scope of a transaction.'
)
})

Expand All @@ -113,7 +100,7 @@ tap.test('Agent API LLM methods', (t) => {

helper.runInTransaction(api.agent, () => {
const result = api.recordLlmFeedbackEvent({
messageId: 'test',
traceId: 'trace-id',
category: 'test-cat',
rating: '5 star',
metadata: { foo: 'foo' }
Expand All @@ -123,9 +110,7 @@ tap.test('Agent API LLM methods', (t) => {
t.equal(event.name, 'LlmFeedbackMessage')
t.match(event.data, {
id: /[\w\d]{32}/,
conversation_id: '',
request_id: '',
message_id: 'test',
trace_id: 'trace-id',
category: 'test-cat',
rating: '5 star',
message: '',
Expand Down
3 changes: 1 addition & 2 deletions test/unit/api/stub.test.js
Expand Up @@ -8,7 +8,7 @@
const tap = require('tap')
const API = require('../../../stub_api')

const EXPECTED_API_COUNT = 35
const EXPECTED_API_COUNT = 34

tap.test('Agent API - Stubbed Agent API', (t) => {
t.autoend()
Expand Down Expand Up @@ -352,7 +352,6 @@ tap.test('Agent API - Stubbed Agent API', (t) => {
})

t.test('exports llm message api', (t) => {
t.type(api.getLlmMessageIds, 'function')
t.type(api.recordLlmFeedbackEvent, 'function')
t.end()
})
Expand Down

0 comments on commit b0a4ed1

Please sign in to comment.