mirror of
https://github.com/zebrajr/node.git
synced 2026-01-15 12:15:26 +00:00
perf_hooks: allow omitted parameters in 'performance.measure'
Make `startMark` and `endMark` parameters optional. PR-URL: https://github.com/nodejs/node/pull/32651 Fixes: https://github.com/nodejs/node/issues/32647 Refs: https://www.w3.org/TR/user-timing-2/#measure-method Reviewed-By: Anna Henningsen <anna@addaleax.net> Reviewed-By: Chengzhong Wu <legendecas@gmail.com> Reviewed-By: James M Snell <jasnell@gmail.com>
This commit is contained in:
@@ -17,9 +17,12 @@ const obs = new PerformanceObserver((items) => {
|
||||
performance.clearMarks();
|
||||
});
|
||||
obs.observe({ entryTypes: ['measure'] });
|
||||
performance.measure('Start to Now');
|
||||
|
||||
performance.mark('A');
|
||||
doSomeLongRunningProcess(() => {
|
||||
performance.measure('A to Now', 'A');
|
||||
|
||||
performance.mark('B');
|
||||
performance.measure('A to B', 'A', 'B');
|
||||
});
|
||||
@@ -53,14 +56,18 @@ Creates a new `PerformanceMark` entry in the Performance Timeline. A
|
||||
`performanceEntry.duration` is always `0`. Performance marks are used
|
||||
to mark specific significant moments in the Performance Timeline.
|
||||
|
||||
### `performance.measure(name, startMark, endMark)`
|
||||
### `performance.measure(name[, startMark[, endMark]])`
|
||||
<!-- YAML
|
||||
added: v8.5.0
|
||||
changes:
|
||||
- version: REPLACEME
|
||||
pr-url: https://github.com/nodejs/node/pull/32651
|
||||
description: Make `startMark` and `endMark` parameters optional.
|
||||
-->
|
||||
|
||||
* `name` {string}
|
||||
* `startMark` {string}
|
||||
* `endMark` {string}
|
||||
* `startMark` {string} Optional.
|
||||
* `endMark` {string} Optional.
|
||||
|
||||
Creates a new `PerformanceMeasure` entry in the Performance Timeline. A
|
||||
`PerformanceMeasure` is a subclass of `PerformanceEntry` whose
|
||||
@@ -73,9 +80,10 @@ Performance Timeline, or *may* identify any of the timestamp properties
|
||||
provided by the `PerformanceNodeTiming` class. If the named `startMark` does
|
||||
not exist, then `startMark` is set to [`timeOrigin`][] by default.
|
||||
|
||||
The `endMark` argument must identify any *existing* `PerformanceMark` in the
|
||||
Performance Timeline or any of the timestamp properties provided by the
|
||||
`PerformanceNodeTiming` class. If the named `endMark` does not exist, an
|
||||
The optional `endMark` argument must identify any *existing* `PerformanceMark`
|
||||
in the Performance Timeline or any of the timestamp properties provided by the
|
||||
`PerformanceNodeTiming` class. `endMark` will be `performance.now()`
|
||||
if no parameter is passed, otherwise if the named `endMark` does not exist, an
|
||||
error will be thrown.
|
||||
|
||||
### `performance.nodeTiming`
|
||||
|
||||
@@ -395,12 +395,14 @@ class Performance {
|
||||
|
||||
measure(name, startMark, endMark) {
|
||||
name = `${name}`;
|
||||
endMark = `${endMark}`;
|
||||
startMark = startMark !== undefined ? `${startMark}` : '';
|
||||
const marks = this[kIndex][kMarks];
|
||||
if (!marks.has(endMark) && !(endMark in nodeTiming)) {
|
||||
throw new ERR_INVALID_PERFORMANCE_MARK(endMark);
|
||||
if (arguments.length >= 3) {
|
||||
if (!marks.has(endMark) && !(endMark in nodeTiming))
|
||||
throw new ERR_INVALID_PERFORMANCE_MARK(endMark);
|
||||
else
|
||||
endMark = `${endMark}`;
|
||||
}
|
||||
startMark = startMark !== undefined ? `${startMark}` : '';
|
||||
_measure(name, startMark, endMark);
|
||||
}
|
||||
|
||||
|
||||
@@ -172,7 +172,6 @@ void Measure(const FunctionCallbackInfo<Value>& args) {
|
||||
HandleScope scope(env->isolate());
|
||||
Utf8Value name(env->isolate(), args[0]);
|
||||
Utf8Value startMark(env->isolate(), args[1]);
|
||||
Utf8Value endMark(env->isolate(), args[2]);
|
||||
|
||||
AliasedFloat64Array& milestones = env->performance_state()->milestones;
|
||||
|
||||
@@ -186,11 +185,17 @@ void Measure(const FunctionCallbackInfo<Value>& args) {
|
||||
startTimestamp = milestones[milestone];
|
||||
}
|
||||
|
||||
uint64_t endTimestamp = GetPerformanceMark(env, *endMark);
|
||||
if (endTimestamp == 0) {
|
||||
PerformanceMilestone milestone = ToPerformanceMilestoneEnum(*endMark);
|
||||
if (milestone != NODE_PERFORMANCE_MILESTONE_INVALID)
|
||||
endTimestamp = milestones[milestone];
|
||||
uint64_t endTimestamp = 0;
|
||||
if (args[2]->IsUndefined()) {
|
||||
endTimestamp = PERFORMANCE_NOW();
|
||||
} else {
|
||||
Utf8Value endMark(env->isolate(), args[2]);
|
||||
endTimestamp = GetPerformanceMark(env, *endMark);
|
||||
if (endTimestamp == 0) {
|
||||
PerformanceMilestone milestone = ToPerformanceMilestoneEnum(*endMark);
|
||||
if (milestone != NODE_PERFORMANCE_MILESTONE_INVALID)
|
||||
endTimestamp = milestones[milestone];
|
||||
}
|
||||
}
|
||||
|
||||
if (endTimestamp < startTimestamp)
|
||||
|
||||
25
test/parallel/test-performance-measure.js
Normal file
25
test/parallel/test-performance-measure.js
Normal file
@@ -0,0 +1,25 @@
|
||||
'use strict';
|
||||
|
||||
const common = require('../common');
|
||||
const assert = require('assert');
|
||||
|
||||
const { PerformanceObserver, performance } = require('perf_hooks');
|
||||
const DELAY = 1000;
|
||||
|
||||
const expected = ['Start to Now', 'A to Now', 'A to B'];
|
||||
const obs = new PerformanceObserver(common.mustCall((items) => {
|
||||
const entries = items.getEntries();
|
||||
const { name, duration } = entries[0];
|
||||
assert.ok(duration > DELAY);
|
||||
assert.strictEqual(expected.shift(), name);
|
||||
}, 3));
|
||||
obs.observe({ entryTypes: ['measure'] });
|
||||
|
||||
performance.mark('A');
|
||||
setTimeout(common.mustCall(() => {
|
||||
performance.measure('Start to Now');
|
||||
performance.measure('A to Now', 'A');
|
||||
|
||||
performance.mark('B');
|
||||
performance.measure('A to B', 'A', 'B');
|
||||
}), DELAY);
|
||||
Reference in New Issue
Block a user