-
Notifications
You must be signed in to change notification settings - Fork 395
upcoming: [DI-29393] : Utils and Hooks set up for supporting zoom in inside the charts in CloudPulse graphs #13308
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. Weβll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
5469c8e
2c11d9d
262e9c0
f660451
c43f8aa
d26dbd3
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,5 @@ | ||
| --- | ||
| "@linode/manager": Upcoming Features | ||
| --- | ||
|
|
||
| Utils and Hooks set up for supporting zoom in inside the charts in `CloudPulse metrics graphs` ([#13308](https://github.com/linode/manager/pull/13308)) |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,225 @@ | ||
| import { describe, expect, it } from 'vitest'; | ||
|
|
||
| import { | ||
| computeLegendRowsBasedOnData, | ||
| computeZoomedInData, | ||
| getMetricsFromDimensionData, | ||
| } from './CloudPulseZoomInUtils'; | ||
| import { formatToolTip } from './unitConversion'; | ||
|
|
||
| import type { ZoomState } from '../Widget/components/useZoomController'; | ||
| import type { DataSet } from 'src/components/AreaChart/AreaChart'; | ||
| import type { MetricsDisplayRow } from 'src/components/LineGraph/MetricsDisplay'; | ||
|
|
||
| describe('computeZoomedInData', () => { | ||
| const mockData: DataSet[] = [ | ||
| { timestamp: 1000, metric1: 10, metric2: 20 }, | ||
| { timestamp: 2000, metric1: 15, metric2: 25 }, | ||
| { timestamp: 3000, metric1: 20, metric2: 30 }, | ||
| { timestamp: 4000, metric1: 25, metric2: 35 }, | ||
| { timestamp: 5000, metric1: 30, metric2: 40 }, | ||
| ]; | ||
|
|
||
| it('should return original data when zoom is at default (dataMin/dataMax)', () => { | ||
| const zoom: ZoomState = { left: 'dataMin', right: 'dataMax' }; | ||
| const result = computeZoomedInData({ data: mockData, zoom }); | ||
| expect(result).toBe(mockData); | ||
| }); | ||
|
|
||
| it('should return empty array when data is empty', () => { | ||
| const zoom: ZoomState = { left: 1000, right: 3000 }; | ||
| const result = computeZoomedInData({ data: [], zoom }); | ||
| expect(result).toEqual([]); | ||
| }); | ||
|
|
||
| it('should filter data based on zoom range', () => { | ||
| const zoom: ZoomState = { left: 2000, right: 4000 }; | ||
| const result = computeZoomedInData({ data: mockData, zoom }); | ||
| expect(result).toHaveLength(3); | ||
| expect(result[0].timestamp).toBe(2000); | ||
| expect(result[2].timestamp).toBe(4000); | ||
| }); | ||
|
|
||
| it('should handle zoom with dataMin as left', () => { | ||
| const zoom: ZoomState = { left: 'dataMin', right: 3000 }; | ||
| const result = computeZoomedInData({ data: mockData, zoom }); | ||
| expect(result).toHaveLength(3); | ||
| expect(result[0].timestamp).toBe(1000); | ||
| expect(result[2].timestamp).toBe(3000); | ||
| }); | ||
|
|
||
| it('should handle zoom with dataMax as right', () => { | ||
| const zoom: ZoomState = { left: 3000, right: 'dataMax' }; | ||
| const result = computeZoomedInData({ data: mockData, zoom }); | ||
| expect(result).toHaveLength(3); | ||
| expect(result[0].timestamp).toBe(3000); | ||
| expect(result[2].timestamp).toBe(5000); | ||
| }); | ||
|
|
||
| it('should return empty array when left is greater than right', () => { | ||
| const zoom: ZoomState = { left: 4000, right: 2000 }; | ||
| const result = computeZoomedInData({ data: mockData, zoom }); | ||
| expect(result).toEqual([]); | ||
| }); | ||
| }); | ||
|
|
||
| describe('getMetricsFromDimensionData', () => { | ||
| it('should return zeros for empty data', () => { | ||
| const result = getMetricsFromDimensionData([]); | ||
| expect(result).toEqual({ | ||
| average: 0, | ||
| last: 0, | ||
| length: 0, | ||
| max: 0, | ||
| total: 0, | ||
| }); | ||
| }); | ||
|
|
||
| it('should calculate metrics correctly for valid data', () => { | ||
| const data = [10, 20, 30, 40, 50]; | ||
| const result = getMetricsFromDimensionData(data); | ||
| expect(result).toEqual({ | ||
| average: 30, | ||
| last: 50, | ||
| length: 5, | ||
| max: 50, | ||
| total: 150, | ||
| }); | ||
| }); | ||
|
|
||
| it('should handle single value', () => { | ||
| const data = [42]; | ||
| const result = getMetricsFromDimensionData(data); | ||
| expect(result).toEqual({ | ||
| average: 42, | ||
| last: 42, | ||
| length: 1, | ||
| max: 42, | ||
| total: 42, | ||
| }); | ||
| }); | ||
|
|
||
| it('should ignore NaN values', () => { | ||
| const data = [10, NaN, 30, NaN, 50]; | ||
| const result = getMetricsFromDimensionData(data); | ||
| expect(result.total).toBe(90); | ||
| expect(result.max).toBe(50); | ||
| }); | ||
| it('should return 0 as last when last value is NaN', () => { | ||
| const data = [10, 20, NaN]; | ||
| const result = getMetricsFromDimensionData(data); | ||
|
|
||
| expect(result.last).toBe(0); | ||
|
||
| }); | ||
| }); | ||
|
|
||
| describe('computeLegendRowsBasedOnData', () => { | ||
| const mockData: DataSet[] = [ | ||
| { timestamp: 1000, cpu: 10, memory: 20 }, | ||
| { timestamp: 2000, cpu: 15, memory: 25 }, | ||
| { timestamp: 3000, cpu: 20, memory: 30 }, | ||
| ]; | ||
| const failMessage = 'Result should not be undefined'; | ||
|
|
||
| const mockLegendRows: MetricsDisplayRow[] = [ | ||
| { | ||
| legendTitle: 'cpu', | ||
| legendColor: 'blue', | ||
| data: { average: 0, last: 0, length: 0, max: 0, total: 0 }, | ||
| format: (value: number) => formatToolTip(value, 'MB'), | ||
| }, | ||
| { | ||
| legendTitle: 'memory', | ||
| legendColor: 'red', | ||
| data: { average: 0, last: 0, length: 0, max: 0, total: 0 }, | ||
| format: (value: number) => formatToolTip(value, 'MB'), | ||
| }, | ||
| ]; | ||
|
|
||
| it('should return undefined when legendRows is undefined', () => { | ||
| const zoom: ZoomState = { left: 'dataMin', right: 'dataMax' }; | ||
| const result = computeLegendRowsBasedOnData({ | ||
| zoom, | ||
| data: mockData, | ||
| }); | ||
| expect(result).toBeUndefined(); | ||
| }); | ||
|
|
||
| it('should return undefined when data is empty', () => { | ||
| const zoom: ZoomState = { left: 'dataMin', right: 'dataMax' }; | ||
| const result = computeLegendRowsBasedOnData({ | ||
| zoom, | ||
| data: [], | ||
| }); | ||
| expect(result).toBeUndefined(); | ||
| }); | ||
|
|
||
| it('should return original rows when not zoomed', () => { | ||
| const zoom: ZoomState = { left: 'dataMin', right: 'dataMax' }; | ||
| const result = computeLegendRowsBasedOnData({ | ||
| zoom, | ||
| data: mockData, | ||
| legendRows: mockLegendRows, | ||
| }); | ||
| expect(result).toEqual(mockLegendRows); | ||
| }); | ||
|
|
||
| it('should compute metrics based on zoomed data', () => { | ||
| const zoom: ZoomState = { left: 2000, right: 3000 }; | ||
| const result = computeLegendRowsBasedOnData({ | ||
| zoom, | ||
| data: mockData, | ||
| legendRows: mockLegendRows, | ||
| }); | ||
|
|
||
| if (result) { | ||
| expect(result).toHaveLength(2); | ||
| expect(result[0].legendTitle).toBe('cpu'); | ||
| expect(result[0].data.total).toBe(35); | ||
| expect(result[0].data.max).toBe(20); | ||
| expect(result[0].data.last).toBe(20); | ||
| expect(result[1].legendTitle).toBe('memory'); | ||
| expect(result[1].data.total).toBe(55); | ||
| expect(result[1].data.average).toBe(27.5); | ||
| expect(result[1].data.last).toBe(30); | ||
| } else { | ||
| expect.fail(failMessage); | ||
| } | ||
| }); | ||
|
|
||
| it('should preserve legend colors and titles', () => { | ||
| const zoom: ZoomState = { left: 1000, right: 2000 }; | ||
| const result = computeLegendRowsBasedOnData({ | ||
| zoom, | ||
| data: mockData, | ||
| legendRows: mockLegendRows, | ||
| }); | ||
|
|
||
| if (!result) { | ||
| expect.fail(failMessage); | ||
| } | ||
|
|
||
| expect(result[0].legendColor).toBe('blue'); | ||
| expect(result[1].legendColor).toBe('red'); | ||
| }); | ||
|
|
||
| it('should handle missing values in data', () => { | ||
| const dataWithMissing: DataSet[] = [ | ||
| { timestamp: 1000, cpu: 10 }, | ||
| { timestamp: 2000, memory: 25 }, | ||
| ]; | ||
| const zoom: ZoomState = { left: 1000, right: 2000 }; | ||
| const result = computeLegendRowsBasedOnData({ | ||
| zoom, | ||
| data: dataWithMissing, | ||
| legendRows: mockLegendRows, | ||
| }); | ||
|
|
||
| if (!result) { | ||
| expect.fail(failMessage); | ||
| } | ||
|
|
||
| expect(result[0].data.total).toBe(10); | ||
| expect(result[1].data.total).toBe(25); | ||
| }); | ||
| }); | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,141 @@ | ||
| import { type Metrics, roundTo } from '@linode/utilities'; | ||
|
|
||
| import { humanizeLargeData } from './utils'; | ||
|
|
||
| import type { ZoomState } from '../Widget/components/useZoomController'; | ||
| import type { DataSet } from 'src/components/AreaChart/AreaChart'; | ||
| import type { MetricsDisplayRow } from 'src/components/LineGraph/MetricsDisplay'; | ||
|
|
||
| interface ZoomStateData { | ||
| /** | ||
| * The data to be processed according to the zoom state | ||
| */ | ||
| data: DataSet[]; | ||
| /** | ||
| * Indicates if the unit is humanizable | ||
| */ | ||
| isHumanizableUnit?: boolean; | ||
| /** | ||
| * The legend rows to be processed according to zoom state | ||
| */ | ||
| legendRows?: MetricsDisplayRow[]; | ||
|
|
||
| /** | ||
| * The unit of measurement for formatting | ||
| */ | ||
| unit?: string; | ||
|
|
||
| /** | ||
| * The current zoom state | ||
| */ | ||
| zoom: ZoomState; | ||
| } | ||
|
|
||
| /** | ||
| * @param data The data for which to compute the zoomed-in subset | ||
| * @param zoom The current zoom state | ||
| * @returns The subset of data that falls within the zoomed-in range | ||
| */ | ||
| export const computeZoomedInData = ({ | ||
| data, | ||
| zoom, | ||
| }: ZoomStateData): DataSet[] => { | ||
| if (!data || data.length === 0) { | ||
| return data; | ||
| } | ||
| if (zoom.left === 'dataMin' && zoom.right === 'dataMax') { | ||
| return data; | ||
| } | ||
|
|
||
| const minZoom = zoom.left === 'dataMin' ? data[0].timestamp : zoom.left; // left zoom boundary | ||
| const maxZoom = | ||
| zoom.right === 'dataMax' ? data[data.length - 1].timestamp : zoom.right; // right zoom boundary | ||
| return data.filter( | ||
| ({ timestamp }) => timestamp >= minZoom && timestamp <= maxZoom | ||
| ); | ||
| }; | ||
|
|
||
| /** | ||
| * @param zoom The current zoom state | ||
| * @param data The data to compute legend rows from | ||
| * @param legendRows The original legend rows | ||
| * @returns The computed legend rows based on the zoomed-in data | ||
| */ | ||
| export const computeLegendRowsBasedOnData = ({ | ||
| data, | ||
| zoom, | ||
| legendRows, | ||
| unit, | ||
| isHumanizableUnit, | ||
| }: ZoomStateData) => { | ||
| if (!legendRows || !data || !data.length) return undefined; | ||
|
|
||
| // If not zoomed, return original rows unchanged | ||
| if (zoom.left === 'dataMin' && zoom.right === 'dataMax') { | ||
| return legendRows; | ||
| } | ||
|
|
||
| const minZoom = zoom.left === 'dataMin' ? data[0].timestamp : zoom.left; // left zoom boundary | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This could throw error if data is |
||
| const maxZoom = | ||
| zoom.right === 'dataMax' ? data[data.length - 1].timestamp : zoom.right; // right zoom boundary | ||
|
|
||
| return legendRows.map((legendRow) => { | ||
| const values: number[] = []; | ||
|
|
||
| for (const dataRow of data) { | ||
| const value = dataRow[legendRow.legendTitle]; | ||
| if ( | ||
| typeof value === 'number' && | ||
| !Number.isNaN(value) && | ||
| dataRow.timestamp >= minZoom && | ||
| dataRow.timestamp <= maxZoom | ||
| ) { | ||
| values.push(value); | ||
| } | ||
| } | ||
|
|
||
| return { | ||
| ...legendRow, | ||
| format: isHumanizableUnit | ||
| ? (value: number) => `${humanizeLargeData(value)} ${unit}` // continue to humanize values | ||
| : (value: number) => `${roundTo(value)} ${unit}`, // only round the values, units and values are already scaled up | ||
| data: getMetricsFromDimensionData(values), | ||
| }; | ||
| }); | ||
| }; | ||
|
|
||
| /** | ||
| * @param data The data of the current dimension | ||
| * @returns The max, avg, last, length, total from the data | ||
| */ | ||
| export const getMetricsFromDimensionData = (data: number[]): Metrics => { | ||
| // If there's no data | ||
| if (!data || !Array.isArray(data) || data.length < 1) { | ||
| return { average: 0, last: 0, length: 0, max: 0, total: 0 }; | ||
| } | ||
|
|
||
| let max = 0; | ||
| let sum = 0; | ||
|
|
||
| // The data is large, so we get everything we need in one iteration | ||
| data.forEach((value): void => { | ||
| if (value === null || value === undefined || Number.isNaN(value)) { | ||
| return; | ||
| } | ||
|
|
||
| if (value > max) { | ||
| max = value; | ||
| } | ||
|
|
||
| sum += value; | ||
| }); | ||
|
|
||
| const length = data.length; | ||
|
|
||
| // Safeguard against dividing by 0 | ||
| const average = length > 0 ? sum / length : 0; | ||
|
|
||
| const last = data[length - 1] || 0; | ||
|
|
||
| return { average, last, length, max, total: sum }; | ||
| }; | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The test checks
totalandmaxbut does not verifyaverageandlengthwhen NaN values are present. The current implementation includes NaN values in the length calculation (line 133), which would make the average calculation include them in the denominator despite excluding them from the sum. Add assertions foraverageandlengthto ensure the behavior matches expectations.