Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ it('returns config with interpolated messagess', async () => {
{ role: 'user', content: 'Score: {{score}}' },
],
_ldMeta: {
versionKey: 'v1',
variationKey: 'v1',
enabled: true,
},
};
Expand Down Expand Up @@ -70,7 +70,7 @@ it('includes context in variables for messages interpolation', async () => {

const mockVariation = {
messages: [{ role: 'system', content: 'User key: {{ldctx.key}}' }],
_ldMeta: { versionKey: 'v1', enabled: true },
_ldMeta: { variationKey: 'v1', enabled: true },
};

mockLdClient.variation.mockResolvedValue(mockVariation);
Expand Down
64 changes: 32 additions & 32 deletions packages/sdk/server-ai/__tests__/LDAIConfigTrackerImpl.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,26 +13,26 @@ const mockLdClient: LDClientMin = {

const testContext: LDContext = { kind: 'user', key: 'test-user' };
const configKey = 'test-config';
const versionKey = 'v1';
const variationKey = 'v1';

beforeEach(() => {
jest.clearAllMocks();
});

it('tracks duration', () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);
tracker.trackDuration(1000);

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:duration:total',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
1000,
);
});

it('tracks duration of async function', async () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);
jest.spyOn(global.Date, 'now').mockReturnValueOnce(1000).mockReturnValueOnce(2000);

const result = await tracker.trackDurationOf(async () => 'test-result');
Expand All @@ -41,49 +41,49 @@ it('tracks duration of async function', async () => {
expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:duration:total',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
1000,
);
});

it('tracks positive feedback', () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);
tracker.trackFeedback({ kind: LDFeedbackKind.Positive });

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:feedback:user:positive',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
1,
);
});

it('tracks negative feedback', () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);
tracker.trackFeedback({ kind: LDFeedbackKind.Negative });

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:feedback:user:negative',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
1,
);
});

it('tracks success', () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);
tracker.trackSuccess();

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:generation',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
1,
);
});

it('tracks OpenAI usage', async () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);
jest.spyOn(global.Date, 'now').mockReturnValueOnce(1000).mockReturnValueOnce(2000);

const TOTAL_TOKENS = 100;
Expand All @@ -101,41 +101,41 @@ it('tracks OpenAI usage', async () => {
expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:duration:total',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
1000,
);

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:generation',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
1,
);

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:tokens:total',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
TOTAL_TOKENS,
);

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:tokens:input',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
PROMPT_TOKENS,
);

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:tokens:output',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
COMPLETION_TOKENS,
);
});

it('tracks Bedrock conversation with successful response', () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);

const TOTAL_TOKENS = 100;
const PROMPT_TOKENS = 49;
Expand All @@ -156,41 +156,41 @@ it('tracks Bedrock conversation with successful response', () => {
expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:generation',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
1,
);

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:duration:total',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
500,
);

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:tokens:total',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
TOTAL_TOKENS,
);

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:tokens:input',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
PROMPT_TOKENS,
);

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:tokens:output',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
COMPLETION_TOKENS,
);
});

it('tracks Bedrock conversation with error response', () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);

const response = {
$metadata: { httpStatusCode: 400 },
Expand All @@ -204,7 +204,7 @@ it('tracks Bedrock conversation with error response', () => {
});

it('tracks tokens', () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);

const TOTAL_TOKENS = 100;
const PROMPT_TOKENS = 49;
Expand All @@ -219,27 +219,27 @@ it('tracks tokens', () => {
expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:tokens:total',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
TOTAL_TOKENS,
);

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:tokens:input',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
PROMPT_TOKENS,
);

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:tokens:output',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
COMPLETION_TOKENS,
);
});

it('only tracks non-zero token counts', () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);

tracker.trackTokens({
total: 0,
Expand All @@ -257,7 +257,7 @@ it('only tracks non-zero token counts', () => {
expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:tokens:input',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
50,
);

Expand All @@ -270,15 +270,15 @@ it('only tracks non-zero token counts', () => {
});

it('returns empty summary when no metrics tracked', () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);

const summary = tracker.getSummary();

expect(summary).toEqual({});
});

it('summarizes tracked metrics', () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);

tracker.trackDuration(1000);
tracker.trackTokens({
Expand Down
4 changes: 2 additions & 2 deletions packages/sdk/server-ai/src/LDAIClientImpl.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import { LDClientMin } from './LDClientMin';
* Metadata assorted with a model configuration variation.
*/
interface LDMeta {
versionKey: string;
variationKey: string;
enabled: boolean;
}

Expand Down Expand Up @@ -44,7 +44,7 @@ export class LDAIClientImpl implements LDAIClient {
this._ldClient,
key,
// eslint-disable-next-line no-underscore-dangle
value._ldMeta?.versionKey ?? '',
value._ldMeta?.variationKey ?? '',
context,
);
// eslint-disable-next-line no-underscore-dangle
Expand Down
6 changes: 3 additions & 3 deletions packages/sdk/server-ai/src/LDAIConfigTrackerImpl.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,13 @@ export class LDAIConfigTrackerImpl implements LDAIConfigTracker {
constructor(
private _ldClient: LDClientMin,
private _configKey: string,
private _versionKey: string,
private _variationKey: string,
private _context: LDContext,
) {}

private _getTrackData(): { versionKey: string; configKey: string } {
private _getTrackData(): { variationKey: string; configKey: string } {
return {
versionKey: this._versionKey,
variationKey: this._variationKey,
configKey: this._configKey,
};
}
Expand Down
Loading