Bug 1573998 [wpt PR 18438] - [UserTimingL3] Add 'duration' member of PerformanceMeasureOptions., a=testonly

Automatic update from web-platform-tests
[UserTimingL3] Add 'duration' member of PerformanceMeasureOptions.

In the User Timing L3 spec
(https://w3c.github.io/user-timing/#performancemeasureoptions-dictionary),
PerformanceMeasureOptions dictionaries can have a 'duration' member.
This gives users better control over what timespan gets covered when
calling `performance.measure()`.

WPT coverage is also extended to check that user agents support the
'duration' member and raise errors as specified.

Bug: 953848
Change-Id: Ibd2a9536b1688e19bebfd559dcf9f3437b7d89e7
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1753741
Commit-Queue: Tom McKee <tommckee@chromium.org>
Reviewed-by: Nicolás Peña Moreno <npm@chromium.org>
Cr-Commit-Position: refs/heads/master@{#687391}

--

wpt-commits: b73b1029ffcf31ba7c0086fd233b8f27f58d0440
wpt-pr: 18438
This commit is contained in:
Tom McKee 2019-08-19 14:23:40 +00:00 коммит произвёл moz-wptsync-bot
Родитель 953fc80171
Коммит ef70575d2c
2 изменённых файлов: 14 добавлений и 7 удалений

Просмотреть файл

@ -30,10 +30,12 @@ async_test(function (t) {
{ entryType: "measure", name: "measure15", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 },
{ entryType: "measure", name: "measure16", detail: null, startTime: timeStamp1 },
{ entryType: "measure", name: "measure17", detail: { customInfo: 159 }, startTime: timeStamp3, duration: timeStamp2 - timeStamp3 },
{ entryType: "measure", name: "measure18", detail: null, startTime: 0 },
{ entryType: "measure", name: "measure19", detail: null, startTime: 0 },
{ entryType: "measure", name: "measure18", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 },
{ entryType: "measure", name: "measure19", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 },
{ entryType: "measure", name: "measure20", detail: null, startTime: 0 },
{ entryType: "measure", name: "measure21", detail: null, startTime: 0 }];
{ entryType: "measure", name: "measure21", detail: null, startTime: 0 },
{ entryType: "measure", name: "measure22", detail: null, startTime: 0 },
{ entryType: "measure", name: "measure23", detail: null, startTime: 0 }];
const observer = new PerformanceObserver(
t.step_func(function (entryList, obs) {
measureEntries =
@ -80,11 +82,15 @@ async_test(function (t) {
self.performance.measure("measure16", { start: 'mark1', end: undefined, detail: null }));
returnedEntries.push(
self.performance.measure("measure17", { start: timeStamp3, end: 'mark2', detail: { customInfo: 159 }}));
returnedEntries.push(
self.performance.measure("measure18", { start: timeStamp1, duration: timeStamp2 - timeStamp1 }));
returnedEntries.push(
self.performance.measure("measure19", { duration: timeStamp2 - timeStamp1, end: timeStamp2 }));
// {}, null, undefined, invalid-dict passed to startOrOptions are interpreted as start time being 0.
returnedEntries.push(self.performance.measure("measure18", {}, 'mark1'));
returnedEntries.push(self.performance.measure("measure19", null, 'mark1'));
returnedEntries.push(self.performance.measure("measure20", undefined, 'mark1'));
returnedEntries.push(self.performance.measure("measure21", { invalidDict:1 }, 'mark1'));
returnedEntries.push(self.performance.measure("measure20", {}, 'mark1'));
returnedEntries.push(self.performance.measure("measure21", null, 'mark1'));
returnedEntries.push(self.performance.measure("measure22", undefined, 'mark1'));
returnedEntries.push(self.performance.measure("measure23", { invalidDict:1 }, 'mark1'));
checkEntries(returnedEntries, expectedEntries);
}, "measure entries' detail and start/end are customizable");

Просмотреть файл

@ -28,6 +28,7 @@ test_method_throw_exception('performance.measure("Exception5", "ExistMark", "Non
test_method_throw_exception('performance.measure("Exception6", "NonExistMark1", "NonExistMark2")', 'SYNTAX_ERR');
test_method_throw_exception('performance.measure("Exception7", "redirectStart")', 'INVALID_ACCESS_ERR');
test_method_throw_exception('performance.measure("Exception8", {"detail": "non-empty"})', TypeError());
test_method_throw_exception('performance.measure("Exception9", {"start": 1, "duration": 2, "end": 3})', TypeError());
</script>
</body>
</html>