Skip to content

Commit

Permalink
feat: adding built-in mark to compare to get better metrics
Browse files Browse the repository at this point in the history
  • Loading branch information
willmendesneto committed Sep 30, 2020
1 parent caa1de2 commit 54fd7c9
Show file tree
Hide file tree
Showing 5 changed files with 79 additions and 19 deletions.
36 changes: 32 additions & 4 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,36 @@ and this project adheres to [Semantic Versioning](http://semver.org/).

## [Unreleased][]

### Updated

- `end()`: Adding built-in mark to compare if the application is not passing the values. This will give us all the required information for the consumers.

Without passing a mark to compare

```js
import * as PerfMarks from 'perf-marks';

...
PerfMarks.start('name-of-your-mark');
...
const markResults: PerfMarks.PerfMarksPerformanceEntry = PerfMarks.end('name-of-your-mark');
```

Passing a mark to compare

```js
import * as PerfMarks from 'perf-marks';

...
PerfMarks.start('name-of-your-mark');
PerfMarks.start('name-of-your-mark-to-be-compared-with');
...
const markResults: PerfMarks.PerfMarksPerformanceEntry = PerfMarks.end(
'name-of-your-mark',
'name-of-your-mark-to-be-compared-with'
);
```

## [1.13.4][] - 2020-08-21

### Fixed
Expand Down Expand Up @@ -359,7 +389,5 @@ if (PerfMarks.isPerformanceObservableSupported) {
[1.13.2]: https://github.com/willmendesneto/perf-marks/tree/v1.13.2
[unreleased]: https://github.com/willmendesneto/perf-marks/compare/v1.13.3...HEAD
[1.13.3]: https://github.com/willmendesneto/perf-marks/tree/v1.13.3


[Unreleased]: https://github.com/willmendesneto/perf-marks/compare/v1.13.4...HEAD
[1.13.4]: https://github.com/willmendesneto/perf-marks/tree/v1.13.4
[unreleased]: https://github.com/willmendesneto/perf-marks/compare/v1.13.4...HEAD
[1.13.4]: https://github.com/willmendesneto/perf-marks/tree/v1.13.4
6 changes: 5 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -111,12 +111,16 @@ PerfMarks.start('name-of-your-mark');
...
```

### `PerfMarks.end(markName)`
### `PerfMarks.end(markName, markNameToCompare)`

Returns the results for the specified marker.

> `PerfMarks.end(markName)` calls `PerfMarks.clear(markName)` after return the mark values
If `markNameToCompare` value is not passed, the package will create a mark using `markName + '-end'`. Otherwise, it will compare based on the given mark.

> If you're passing `markNameToCompare` value, please make sure you're also started the mark with the same name previously
```js
import * as PerfMarks from 'perf-marks';

Expand Down
24 changes: 23 additions & 1 deletion src/__tests__/perf-marks.ts
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ describe('PerfMarks: User timing API is available', () => {
]);

const mark = 'mark';
const secondMark = 'mark-end';
const secondMark = 'mark-end-of-measurement';

PerfMarks.start(mark);
PerfMarks.start(secondMark);
Expand All @@ -100,6 +100,28 @@ describe('PerfMarks: User timing API is available', () => {
expect(performance.clearMeasures).toHaveBeenNthCalledWith(2, secondMark);
});

it('should remove mark and compared mark with the one created internally if second parameter was NOT passed to `end()`', () => {
jest.spyOn(performance, 'getEntriesByName').mockImplementation(() => [
{
duration: 1,
startTime: 2,
toJSON: () => null,
entryType: '',
name: '',
},
]);

const mark = 'mark';

PerfMarks.start(mark);
PerfMarks.end(mark);

expect(performance.clearMarks).toHaveBeenNthCalledWith(1, mark);
expect(performance.clearMarks).toHaveBeenNthCalledWith(2, `${mark}-end`);
expect(performance.clearMeasures).toHaveBeenNthCalledWith(1, mark);
expect(performance.clearMeasures).toHaveBeenNthCalledWith(2, `${mark}-end`);
});

it('should return user timing information if user finishes mark', () => {
const mark = 'mark';
PerfMarks.start(mark);
Expand Down
4 changes: 2 additions & 2 deletions src/__tests__/profiler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -61,13 +61,13 @@ describe('PerfMarks: User timing API is NOT available', () => {
expect(performance.getEntriesByName(markProfiler)).toHaveLength(0);
});

it('should run profile if receives callback as promise', async () => {
it('should run profile if receives callback as promise', async function PROFILING_TEST() {
const functionToBeProfiled = () =>
// eslint-disable-next-line compat/compat
new Promise(resolve => {
return setTimeout(() => {
resolve(testFunction());
}, 500);
}, 5);
});

const result = await profiler(functionToBeProfiled, markProfiler);
Expand Down
28 changes: 17 additions & 11 deletions src/marks.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ const clear = (markName: string): void => {
marksMap[markName] = undefined;

// Removes PerformanceObserver references from memory
if (marksObserver[markName]) {
if (!!marksObserver[markName]) {
marksObserver[markName] = undefined;
}

Expand Down Expand Up @@ -93,11 +93,20 @@ const end = (markName: string, markNameToCompare?: string): PerfMarksPerformance
try {
const startTime = marksMap[markName];

// NodeJS is not using performance api directly from them for now
if (!isUserTimingAPISupported || isNodeJSEnv) {
// `performance.measure()` behaves diferently between frontend and
// backend in Javascript applications. Using based on NodeJS docs
performance.measure(markName, markName, markNameToCompare || markName);
if (!isUserTimingAPISupported) {
return startTime
? ({ duration: getTimeNow() - startTime, startTime, entryType: 'measure', name: markName } as PerformanceEntry)
: {};
}
// If there's no User Timing mark to be compared with,
// the package will create one to be used for better comparison
if (!markNameToCompare) {
performance.mark(`${markName}-end`);
}

performance.measure(markName, markName, markNameToCompare || `${markName}-end`);

if (isNodeJSEnv) {
if (!!marksObserver[markName]) {
return marksObserver[markName] as PerfMarksPerformanceEntry;
}
Expand All @@ -106,7 +115,6 @@ const end = (markName: string, markNameToCompare?: string): PerfMarksPerformance
: {};
}

performance.measure(markName, markName, markNameToCompare || undefined);
const entry: PerformanceEntry | undefined = performance.getEntriesByName(markName).pop();

return entry || {};
Expand All @@ -115,15 +123,13 @@ const end = (markName: string, markNameToCompare?: string): PerfMarksPerformance
// This could only happen if something in event loop crashed
// in an unexpected place earlier.
// Don't pile on with more errors.

return {};
} finally {
// Clear marks immediately to avoid growing buffer.
clear(markName);
// Clear marks used for comparison in case of it's value was passed
if (markNameToCompare) {
clear(markNameToCompare);
}
// If the mark to compare is not passed, it should remove the one we create with `-end` suffix
clear(markNameToCompare || `${markName}-end`);
}
};

Expand Down

0 comments on commit 54fd7c9

Please sign in to comment.