Skip to content

Commit

Permalink
Merge branch 'main' into mikeshi/create-dashboard-from-chart-explorer
Browse files Browse the repository at this point in the history
  • Loading branch information
kodiakhq[bot] committed Feb 2, 2024
2 parents 167eabf + b83e51f commit 22a0165
Show file tree
Hide file tree
Showing 12 changed files with 411 additions and 60 deletions.
6 changes: 6 additions & 0 deletions .changeset/warm-berries-wash.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
---
'@hyperdx/api': patch
'@hyperdx/app': patch
---

refactor + perf: decouple and performance opt metrics tags endpoints
147 changes: 146 additions & 1 deletion packages/api/src/clickhouse/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -593,7 +593,8 @@ export const getCHServerMetrics = async () => {
}, {});
};

export const getMetricsTags = async ({
// ONLY USED IN EXTERNAL API
export const getMetricsTagsDEPRECATED = async ({
teamId,
startTime,
endTime,
Expand Down Expand Up @@ -644,6 +645,150 @@ export const getMetricsTags = async ({
unit: string;
}>
>();
logger.info({
message: 'getMetricsTagsDEPRECATED',
query,
took: Date.now() - ts,
});
return result;
};

export const getMetricsNames = async ({
teamId,
startTime,
endTime,
}: {
teamId: string;
startTime: number; // unix in ms
endTime: number; // unix in ms
}) => {
const tableName = `default.${TableName.Metric}`;
// WARNING: _created_at is ciritcal for the query to work efficiently (by using partitioning)
// TODO: remove 'data_type' in the name field
const query = SqlString.format(
`
SELECT
any(is_delta) as is_delta,
any(is_monotonic) as is_monotonic,
any(unit) as unit,
data_type,
format('{} - {}', name, data_type) as name
FROM ??
WHERE (_timestamp_sort_key >= ? AND _timestamp_sort_key < ?)
AND (_created_at >= fromUnixTimestamp64Milli(?) AND _created_at < fromUnixTimestamp64Milli(?))
GROUP BY name, data_type
ORDER BY name
`,
[
tableName,
msToBigIntNs(startTime),
msToBigIntNs(endTime),
startTime,
endTime,
],
);
const ts = Date.now();
const rows = await client.query({
query,
format: 'JSON',
clickhouse_settings: {
additional_table_filters: buildMetricStreamAdditionalFilters(
null,
teamId,
),
},
});
const result = await rows.json<
ResponseJSON<{
data_type: string;
is_delta: boolean;
is_monotonic: boolean;
name: string;
unit: string;
}>
>();
logger.info({
message: 'getMetricsNames',
query,
took: Date.now() - ts,
});
return result;
};

export const getMetricsTags = async ({
endTime,
metrics,
startTime,
teamId,
}: {
endTime: number; // unix in ms
metrics: {
name: string;
dataType: MetricsDataType;
}[];
startTime: number; // unix in ms
teamId: string;
}) => {
const tableName = `default.${TableName.Metric}`;
// TODO: theoretically, we should be able to traverse each tag's keys and values
// and intersect them to get the final result. Currently this is done on the client side.
const unions = metrics
.map(m =>
SqlString.format(
`
SELECT
groupUniqArray(_string_attributes) AS tags,
data_type,
format('{} - {}', name, data_type) AS combined_name
FROM ??
WHERE name = ?
AND data_type = ?
AND (_timestamp_sort_key >= ? AND _timestamp_sort_key < ?)
AND (_created_at >= fromUnixTimestamp64Milli(?) AND _created_at < fromUnixTimestamp64Milli(?))
GROUP BY name, data_type
`,
[
tableName,
m.name,
m.dataType,
msToBigIntNs(startTime),
msToBigIntNs(endTime),
startTime,
endTime,
],
),
)
.join(' UNION DISTINCT ');
const query = SqlString.format(
`
SELECT
combined_name AS name,
data_type,
tags
FROM (?)
ORDER BY name
`,
[SqlString.raw(unions)],
);

const ts = Date.now();
const rows = await client.query({
query,
format: 'JSON',
clickhouse_settings: {
additional_table_filters: buildMetricStreamAdditionalFilters(
null,
teamId,
),
},
});
const result = await rows.json<
ResponseJSON<{
name: string;
data_type: string;
tags: Record<string, string>[];
}>
>();
logger.info({
message: 'getMetricsTags',
query,
Expand Down
135 changes: 116 additions & 19 deletions packages/api/src/routers/api/__tests__/metrics.test.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,13 @@
import ms from 'ms';

import * as clickhouse from '@/clickhouse';
import { buildMetricSeries, getLoggedInAgent, getServer } from '@/fixtures';

describe('metrics router', () => {
const now = Date.now();
let agent;
let teamId;

const server = getServer();

beforeAll(async () => {
Expand All @@ -16,10 +22,10 @@ describe('metrics router', () => {
await server.stop();
});

it('GET /metrics/tags', async () => {
const { agent, team } = await getLoggedInAgent(server);

const now = Date.now();
beforeEach(async () => {
const { agent: _agent, team } = await getLoggedInAgent(server);
agent = _agent;
teamId = team.id;
await clickhouse.bulkInsertTeamMetricStream(
buildMetricSeries({
name: 'test.cpu',
Expand All @@ -28,7 +34,7 @@ describe('metrics router', () => {
is_monotonic: false,
is_delta: false,
unit: 'Percent',
points: [{ value: 1, timestamp: now }],
points: [{ value: 1, timestamp: now - ms('1d') }],
team_id: team.id,
}),
);
Expand All @@ -40,30 +46,121 @@ describe('metrics router', () => {
is_monotonic: false,
is_delta: false,
unit: 'Percent',
points: [{ value: 1, timestamp: now }],
points: [{ value: 1, timestamp: now - ms('1d') }],
team_id: team.id,
}),
);

const results = await agent.get('/metrics/tags').expect(200);
expect(results.body.data).toEqual([
{
is_delta: false,
await clickhouse.bulkInsertTeamMetricStream(
buildMetricSeries({
name: 'test.cpu2',
tags: { host: 'host2', foo2: 'bar2' },
data_type: clickhouse.MetricsDataType.Gauge,
is_monotonic: false,
is_delta: false,
unit: 'Percent',
data_type: 'Gauge',
name: 'test.cpu - Gauge',
tags: [
points: [{ value: 1, timestamp: now - ms('1d') }],
team_id: team.id,
}),
);
});

it('GET /metrics/names', async () => {
const names = await agent.get('/metrics/names').expect(200);
expect(names.body.data).toMatchInlineSnapshot(`
Array [
Object {
"data_type": "Gauge",
"is_delta": false,
"is_monotonic": false,
"name": "test.cpu - Gauge",
"unit": "Percent",
},
Object {
"data_type": "Gauge",
"is_delta": false,
"is_monotonic": false,
"name": "test.cpu2 - Gauge",
"unit": "Percent",
},
]
`);
});

it('GET /metrics/tags - single metric', async () => {
const tags = await agent
.post('/metrics/tags')
.send({
metrics: [
{
foo2: 'bar2',
host: 'host2',
name: 'test.cpu',
dataType: clickhouse.MetricsDataType.Gauge,
},
],
})
.expect(200);
expect(tags.body.data).toMatchInlineSnapshot(`
Array [
Object {
"data_type": "Gauge",
"name": "test.cpu - Gauge",
"tags": Array [
Object {
"foo2": "bar2",
"host": "host2",
},
Object {
"foo": "bar",
"host": "host1",
},
],
},
]
`);
});

it('GET /metrics/tags - multi metrics', async () => {
const tags = await agent
.post('/metrics/tags')
.send({
metrics: [
{
name: 'test.cpu',
dataType: clickhouse.MetricsDataType.Gauge,
},
{
foo: 'bar',
host: 'host1',
name: 'test.cpu2',
dataType: clickhouse.MetricsDataType.Gauge,
},
],
})
.expect(200);
expect(tags.body.data).toMatchInlineSnapshot(`
Array [
Object {
"data_type": "Gauge",
"name": "test.cpu - Gauge",
"tags": Array [
Object {
"foo2": "bar2",
"host": "host2",
},
Object {
"foo": "bar",
"host": "host1",
},
],
},
Object {
"data_type": "Gauge",
"name": "test.cpu2 - Gauge",
"tags": Array [
Object {
"foo2": "bar2",
"host": "host2",
},
]);
],
},
]
`);
});
});
Loading

0 comments on commit 22a0165

Please sign in to comment.