Skip to content

Commit

Permalink
MM-57878 Add PerformanceReporter for clientside performance metrics (#…
Browse files Browse the repository at this point in the history
…26800)

* Define rough code for PerformanceReporter

* Create a component to manage the PerformanceReporter

* Start adding tests for PerformanceReporter

* Add test for web vitals reporting

* Update schema to more closely match the API spec

* Collect marks as counters and further update structure of API payload

* Add some outstanding TODOs about the API structure

* Add counter for long tasks

* Add EnableClientMetrics without any System Console UI

* Have PerformanceReporter use EnableClientMetrics

* Have the PerformanceReporter only report results when logged in

* Add test for having PerformanceReporter fall back to fetch

* Stop logging errors for measurements failing

* Remove buffered from observer

* Remove the Mystery Ampersand

* Still record marks with telemetry actions even if telemetry is disabled

* Add timestamps to performance reports

* Reuse the new telemetry code for the old telemetry

* The second half of the last commit

* Use Node performance libraries in all tests

* Set version of PerformanceReport

* Switch to the proper version of EnableClientMetrics

* Remove TODO for unneeded field

* Add user agent and platform detection

* Updated metrics API route
  • Loading branch information
hmhealey committed May 9, 2024
1 parent d6a8ad0 commit de3c7ad
Show file tree
Hide file tree
Showing 22 changed files with 1,188 additions and 67 deletions.
1 change: 1 addition & 0 deletions server/config/client.go
Expand Up @@ -261,6 +261,7 @@ func GenerateLimitedClientConfig(c *model.Config, telemetryID string, license *m
props["IosMinVersion"] = c.ClientRequirements.IosMinVersion

props["EnableDiagnostics"] = strconv.FormatBool(*c.LogSettings.EnableDiagnostics)
props["EnableClientMetrics"] = strconv.FormatBool(*c.MetricsSettings.EnableClientMetrics)

props["EnableComplianceExport"] = strconv.FormatBool(*c.MessageExportSettings.EnableExport)

Expand Down
5 changes: 3 additions & 2 deletions server/platform/services/telemetry/telemetry.go
Expand Up @@ -758,8 +758,9 @@ func (ts *TelemetryService) trackConfig() {
})

ts.SendTelemetry(TrackConfigMetrics, map[string]any{
"enable": *cfg.MetricsSettings.Enable,
"block_profile_rate": *cfg.MetricsSettings.BlockProfileRate,
"enable": *cfg.MetricsSettings.Enable,
"block_profile_rate": *cfg.MetricsSettings.BlockProfileRate,
"enable_client_metrics": *cfg.MetricsSettings.EnableClientMetrics,
})

ts.SendTelemetry(TrackConfigNativeApp, map[string]any{
Expand Down
1 change: 1 addition & 0 deletions webapp/channels/package.json
Expand Up @@ -98,6 +98,7 @@
"tinycolor2": "1.4.2",
"turndown": "7.1.1",
"typescript": "5.3.3",
"web-vitals": "3.5.2",
"zen-observable": "0.9.0"
},
"devDependencies": {
Expand Down
54 changes: 9 additions & 45 deletions webapp/channels/src/actions/telemetry_actions.jsx
Expand Up @@ -10,15 +10,6 @@ import {getBool} from 'mattermost-redux/selectors/entities/preferences';
import {isDevModeEnabled} from 'selectors/general';
import store from 'stores/redux_store';

const SUPPORTS_CLEAR_MARKS = isSupported([performance.clearMarks]);
const SUPPORTS_MARK = isSupported([performance.mark]);
const SUPPORTS_MEASURE_METHODS = isSupported([
performance.measure,
performance.getEntries,
performance.getEntriesByName,
performance.clearMeasures,
]);

const HEADER_X_PAGE_LOAD_CONTEXT = 'X-Page-Load-Context';

export function isTelemetryEnabled(state) {
Expand Down Expand Up @@ -58,59 +49,37 @@ export function pageVisited(category, name) {
*
*/
export function clearMarks(names) {
if (!shouldTrackPerformance() || !SUPPORTS_CLEAR_MARKS) {
return;
}
names.forEach((name) => performance.clearMarks(name));
}

export function mark(name) {
if (!shouldTrackPerformance() || !SUPPORTS_MARK) {
performance.mark(name);

if (!shouldTrackPerformance()) {
return;
}
performance.mark(name);

initRequestCountingIfNecessary();
updateRequestCountAtMark(name);
}

/**
* Takes the names of two markers and invokes performance.measure on
* them. The measured duration (ms) and the string name of the measure is
* are returned.
* Takes the names of two markers and returns the number of requests sent between them.
*
* @param {string} name1 the first marker
* @param {string} name2 the second marker
*
* @returns {{duration: number; requestCount: number; measurementName: string}}
* An object containing the measured duration (in ms) between two marks, the
* number of API requests made during that period, and the name of the measurement.
* Returns a duration and request count of -1 if performance isn't being tracked
* or one of the markers can't be found.
* @returns {number} Returns a request count of -1 if performance isn't being tracked
*
*/
export function measure(name1, name2) {
if (!shouldTrackPerformance() || !SUPPORTS_MEASURE_METHODS) {
return {duration: -1, requestCount: -1, measurementName: ''};
}

// Check for existence of entry name to avoid DOMException
const performanceEntries = performance.getEntries();
if (![name1, name2].every((name) => performanceEntries.find((item) => item.name === name))) {
return {duration: -1, requestCount: -1, measurementName: ''};
export function countRequestsBetween(name1, name2) {
if (!shouldTrackPerformance()) {
return -1;
}

const displayPrefix = '🐐 Mattermost: ';
const measurementName = `${displayPrefix}${name1} - ${name2}`;
performance.measure(measurementName, name1, name2);
const duration = mostRecentDurationByEntryName(measurementName);

const requestCount = getRequestCountAtMark(name2) - getRequestCountAtMark(name1);

// Clean up the measures we created
performance.clearMeasures(measurementName);

return {duration, requestCount, measurementName};
return requestCount;
}

/**
Expand Down Expand Up @@ -154,11 +123,6 @@ export function measurePageLoadTelemetry() {
}, tenSeconds);
}

function mostRecentDurationByEntryName(entryName) {
const entriesWithName = performance.getEntriesByName(entryName);
return entriesWithName.map((item) => item.duration)[entriesWithName.length - 1];
}

function isSupported(checks) {
for (let i = 0, len = checks.length; i < len; i++) {
const item = checks[i];
Expand Down
47 changes: 29 additions & 18 deletions webapp/channels/src/components/post_view/post_list/post_list.tsx
Expand Up @@ -6,13 +6,14 @@ import React from 'react';
import type {ActionResult} from 'mattermost-redux/types/actions';

import type {updateNewMessagesAtInChannel} from 'actions/global_actions';
import {clearMarks, mark, measure, trackEvent} from 'actions/telemetry_actions.jsx';
import {clearMarks, countRequestsBetween, mark, shouldTrackPerformance, trackEvent} from 'actions/telemetry_actions.jsx';
import type {LoadPostsParameters, LoadPostsReturnValue, CanLoadMorePosts} from 'actions/views/channel';

import LoadingScreen from 'components/loading_screen';
import VirtPostList from 'components/post_view/post_list_virtualized/post_list_virtualized';

import {PostRequestTypes} from 'utils/constants';
import {measureAndReport} from 'utils/performance_telemetry';
import {getOldestPostId, getLatestPostId} from 'utils/post_utils';

const MAX_NUMBER_OF_AUTO_RETRIES = 3;
Expand All @@ -23,29 +24,39 @@ export const MAX_EXTRA_PAGES_LOADED = 10;
function markAndMeasureChannelSwitchEnd(fresh = false) {
mark('PostList#component');

const {duration: dur1, requestCount: requestCount1} = measure('SidebarChannelLink#click', 'PostList#component');
const {duration: dur2, requestCount: requestCount2} = measure('TeamLink#click', 'PostList#component');
// Send new performance metrics to server
const channelSwitch = measureAndReport('channel_switch', 'SidebarChannelLink#click', 'PostList#component', true);
const teamSwitch = measureAndReport('team_switch', 'TeamLink#click', 'PostList#component', true);

// Send old performance metrics to Rudder
if (shouldTrackPerformance()) {
if (channelSwitch) {
const requestCount1 = countRequestsBetween('SidebarChannelLink#click', 'PostList#component');

trackEvent('performance', 'channel_switch', {
duration: Math.round(channelSwitch.duration),
fresh,
requestCount: requestCount1,
});
}

if (teamSwitch) {
const requestCount2 = countRequestsBetween('TeamLink#click', 'PostList#component');

trackEvent('performance', 'team_switch', {
duration: Math.round(teamSwitch.duration),
fresh,
requestCount: requestCount2,
});
}
}

// Clear all the metrics so that we can differentiate between a channel and team switch next time this is called
clearMarks([
'SidebarChannelLink#click',
'TeamLink#click',
'PostList#component',
]);

if (dur1 !== -1) {
trackEvent('performance', 'channel_switch', {
duration: Math.round(dur1),
fresh,
requestCount: requestCount1,
});
}
if (dur2 !== -1) {
trackEvent('performance', 'team_switch', {
duration: Math.round(dur2),
fresh,
requestCount: requestCount2,
});
}
}

export interface Props {
Expand Down
Expand Up @@ -4,6 +4,7 @@ exports[`components/Root Routes Should mount public product routes 1`] = `
<RootProvider>
<Connect(MobileViewWatcher) />
<LuxonController />
<PerformanceReporterController />
<Switch>
<Route
component={[Function]}
Expand Down
@@ -0,0 +1,28 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.

import {useEffect, useRef} from 'react';
import {useStore} from 'react-redux';

import {Client4} from 'mattermost-redux/client';

import PerformanceReporter from 'utils/performance_telemetry/reporter';

export default function PerformanceReporterController() {
const store = useStore();

const reporter = useRef<PerformanceReporter>();

useEffect(() => {
reporter.current = new PerformanceReporter(Client4, store);
reporter.current.observe();

// There's no way to clean up web-vitals, so continue to assume that this component won't ever be unmounted
return () => {
// eslint-disable-next-line no-console
console.error('PerformanceReporterController - Component unmounted or store changed');
};
}, [store]);

return null;
}
2 changes: 2 additions & 0 deletions webapp/channels/src/components/root/root.tsx
Expand Up @@ -55,6 +55,7 @@ import * as Utils from 'utils/utils';
import type {ProductComponent, PluginComponent} from 'types/store/plugins';

import LuxonController from './luxon_controller';
import PerformanceReporterController from './performance_reporter_controller';
import RootProvider from './root_provider';
import RootRedirect from './root_redirect';

Expand Down Expand Up @@ -447,6 +448,7 @@ export default class Root extends React.PureComponent<Props, State> {
<RootProvider>
<MobileViewWatcher/>
<LuxonController/>
<PerformanceReporterController/>
<Switch>
<Route
path={'/error'}
Expand Down
15 changes: 15 additions & 0 deletions webapp/channels/src/tests/helpers/user_agent_mocks.ts
Expand Up @@ -8,21 +8,36 @@

let currentUA = '';
let initialUA = '';
let currentPlatform = '';
let initialPlatform = '';

window.navigator = window.navigator || {};

initialUA = window.navigator.userAgent;
initialPlatform = window.navigator.platform;

Object.defineProperty(window.navigator, 'userAgent', {
get() {
return currentUA;
},
});
Object.defineProperty(window.navigator, 'platform', {
get() {
return currentPlatform;
},
});

export function reset() {
set(initialUA);
setPlatform(initialPlatform);
}
export function set(ua: string) {
currentUA = ua;
}
export function setPlatform(platform: string) {
currentPlatform = platform;
}

export function mockSafari() {
set('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.1 Safari/605.1.15');
}
Expand Down
79 changes: 79 additions & 0 deletions webapp/channels/src/tests/performance_mock.test.ts
@@ -0,0 +1,79 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.

import {waitForObservations} from './performance_mock';

describe('PerformanceObserver', () => {
test('should be able to observe a mark', async () => {
const callback = jest.fn();

const observer = new PerformanceObserver(callback);
observer.observe({entryTypes: ['mark']});

const testMark = performance.mark('testMark');

await waitForObservations();

expect(callback).toHaveBeenCalledTimes(1);

const observedEntries = callback.mock.calls[0][0].getEntries();
expect(observedEntries).toHaveLength(1);
expect(observedEntries[0]).toBe(testMark);
expect(observedEntries[0]).toMatchObject({
entryType: 'mark',
name: 'testMark',
});
});

test('should be able to observe multiple marks', async () => {
const callback = jest.fn();

const observer = new PerformanceObserver(callback);
observer.observe({entryTypes: ['mark']});

const testMarkA = performance.mark('testMarkA');
const testMarkB = performance.mark('testMarkB');

await waitForObservations();

expect(callback).toHaveBeenCalledTimes(1);

// Both marks were batched into a single call
const observedEntries = callback.mock.calls[0][0].getEntries();
expect(observedEntries).toHaveLength(2);
expect(observedEntries[0]).toBe(testMarkA);
expect(observedEntries[0]).toMatchObject({
entryType: 'mark',
name: 'testMarkA',
});
expect(observedEntries[1]).toBe(testMarkB);
expect(observedEntries[1]).toMatchObject({
entryType: 'mark',
name: 'testMarkB',
});
});

test('should be able to observe a measure', async () => {
const callback = jest.fn();

const observer = new PerformanceObserver(callback);
observer.observe({entryTypes: ['measure']});

const testMarkA = performance.mark('testMarkA');
const testMarkB = performance.mark('testMarkB');
const testMeasure = performance.measure('testMeasure', 'testMarkA', 'testMarkB');

await waitForObservations();

expect(callback).toHaveBeenCalledTimes(1);

const observedEntries = callback.mock.calls[0][0].getEntries();
expect(observedEntries).toHaveLength(1);
expect(observedEntries[0]).toBe(testMeasure);
expect(observedEntries[0]).toMatchObject({
entryType: 'measure',
name: 'testMeasure',
duration: testMarkB.startTime - testMarkA.startTime,
});
});
});
29 changes: 29 additions & 0 deletions webapp/channels/src/tests/performance_mock.ts
@@ -0,0 +1,29 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.

import {PerformanceObserver as NodePerformanceObserver, performance as nodePerformance} from 'node:perf_hooks';

// These aren't a perfect match for window.performance and PerformanceObserver, but they're close enough. They don't
// work with `jest.useFakeTimers` because that overwrites window.performance in a way that breaks the Node.js version.
//
// To use PerformanceObserver, you need to use a `setTimeout` or `await observations()` to have a PerformanceObserver's
// callback get called. See the accompanying tests for examples.

Object.defineProperty(window, 'performance', {
writable: true,
value: nodePerformance,
});

Object.defineProperty(global, 'PerformanceObserver', {
value: NodePerformanceObserver,
});

// Only Chrome-based browsers support long task timings currently, so make Node pretend it does too
Object.defineProperty(PerformanceObserver, 'supportedEntryTypes', {
value: [...PerformanceObserver.supportedEntryTypes, 'longtask'],
});

export function waitForObservations() {
// Performance observations are processed after any timeout
return new Promise((resolve) => setTimeout(resolve));
}

0 comments on commit de3c7ad

Please sign in to comment.