зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1453439 [wpt PR 10434] - Remove http/tests/w3c, a=testonly
Automatic update from web-platform-testsRemove http/tests/w3c This CL moves the final user-timing tests to external/wpt and removes the folder. The performance_entrylist_checker is changed as in the http counterpart to allow de-duplicating test messages. There are some tests that are related to the wpt counterparts, but not equivalent and in a different format, so unifying some of these tests can be done in the future. Bug: 498037 Change-Id: I6f35d962ba088d92a9768e49a65dea4d5c267491 Reviewed-on: https://chromium-review.googlesource.com/1007888 Reviewed-by: Timothy Dresser <tdresser@chromium.org> Commit-Queue: Nicolás Peña Moreno <npm@chromium.org> Cr-Commit-Position: refs/heads/master@{#550310} -- wpt-commits: 51704839c172ba8a70910bff484837c25adbfc9a wpt-pr: 10434
This commit is contained in:
Родитель
2de7f65b58
Коммит
bb2f79dd59
|
@ -366829,6 +366829,18 @@
|
|||
{}
|
||||
]
|
||||
],
|
||||
"user-timing/clearMarks.html": [
|
||||
[
|
||||
"/user-timing/clearMarks.html",
|
||||
{}
|
||||
]
|
||||
],
|
||||
"user-timing/clearMeasures.html": [
|
||||
[
|
||||
"/user-timing/clearMeasures.html",
|
||||
{}
|
||||
]
|
||||
],
|
||||
"user-timing/clear_all_marks.any.js": [
|
||||
[
|
||||
"/user-timing/clear_all_marks.any.html",
|
||||
|
@ -366933,6 +366945,12 @@
|
|||
{}
|
||||
]
|
||||
],
|
||||
"user-timing/mark.html": [
|
||||
[
|
||||
"/user-timing/mark.html",
|
||||
{}
|
||||
]
|
||||
],
|
||||
"user-timing/mark_exceptions.html": [
|
||||
[
|
||||
"/user-timing/mark_exceptions.html",
|
||||
|
@ -366945,6 +366963,18 @@
|
|||
{}
|
||||
]
|
||||
],
|
||||
"user-timing/measure_associated_with_navigation_timing.html": [
|
||||
[
|
||||
"/user-timing/measure_associated_with_navigation_timing.html",
|
||||
{}
|
||||
]
|
||||
],
|
||||
"user-timing/measure_exception.html": [
|
||||
[
|
||||
"/user-timing/measure_exception.html",
|
||||
{}
|
||||
]
|
||||
],
|
||||
"user-timing/measure_exceptions_navigation_timing.html": [
|
||||
[
|
||||
"/user-timing/measure_exceptions_navigation_timing.html",
|
||||
|
@ -366967,6 +366997,12 @@
|
|||
{}
|
||||
]
|
||||
],
|
||||
"user-timing/measures.html": [
|
||||
[
|
||||
"/user-timing/measures.html",
|
||||
{}
|
||||
]
|
||||
],
|
||||
"user-timing/user_timing_exists.any.js": [
|
||||
[
|
||||
"/user-timing/user_timing_exists.any.html",
|
||||
|
@ -604394,6 +604430,14 @@
|
|||
"b82f9756b15ef3ea45fb250e304031d9ceaee9c7",
|
||||
"support"
|
||||
],
|
||||
"user-timing/clearMarks.html": [
|
||||
"2d63d9e6d85171289c7e3feab7d5db1083d2d461",
|
||||
"testharness"
|
||||
],
|
||||
"user-timing/clearMeasures.html": [
|
||||
"7c1424d8907f5c01898598691e4734ea6442ac90",
|
||||
"testharness"
|
||||
],
|
||||
"user-timing/clear_all_marks.any.js": [
|
||||
"61961fe36a9d1f9492ad0d54ab46dbd95202bc84",
|
||||
"testharness"
|
||||
|
@ -604442,6 +604486,10 @@
|
|||
"58a8783b699acb89ab81da9e1417a542fe2e7d99",
|
||||
"testharness"
|
||||
],
|
||||
"user-timing/mark.html": [
|
||||
"d8b001369f4b9d57a0f2d13adbfc27d986eaa061",
|
||||
"testharness"
|
||||
],
|
||||
"user-timing/mark_exceptions.html": [
|
||||
"fe9279b28727941bab52aacaba104a4481f49191",
|
||||
"testharness"
|
||||
|
@ -604450,6 +604498,14 @@
|
|||
"b021b6706afbf40f59ba1bbc743f4f4e57ea4f66",
|
||||
"testharness"
|
||||
],
|
||||
"user-timing/measure_associated_with_navigation_timing.html": [
|
||||
"448483a5bdb88c75a945315b483006541423032b",
|
||||
"testharness"
|
||||
],
|
||||
"user-timing/measure_exception.html": [
|
||||
"4ee2ee1ad9b147fce725a4aefc6d1c0bb30d1dc6",
|
||||
"testharness"
|
||||
],
|
||||
"user-timing/measure_exceptions_navigation_timing.html": [
|
||||
"ed9d9be01e740d282ec94379bfd78aca07b56325",
|
||||
"testharness"
|
||||
|
@ -604462,12 +604518,16 @@
|
|||
"e04fed804b4d89be63b8fdcbf12774c9a613f6d3",
|
||||
"testharness"
|
||||
],
|
||||
"user-timing/measures.html": [
|
||||
"60621f7cf3ed5ede5246619f890105e33a827c13",
|
||||
"testharness"
|
||||
],
|
||||
"user-timing/resources/webperftestharness.js": [
|
||||
"b1f81236de54467168bd09d749c2a6f453c5c3e1",
|
||||
"support"
|
||||
],
|
||||
"user-timing/resources/webperftestharnessextension.js": [
|
||||
"c19034c4cb9bbe6c04929857561f37b3c709d8f4",
|
||||
"3e7332f352713b52fe898941089c89dc1d1bf522",
|
||||
"support"
|
||||
],
|
||||
"user-timing/user_timing_exists.any.js": [
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>functionality test of window.performance.clearMarks</title>
|
||||
<link rel="author" title="Intel" href="http://www.intel.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/#extensions-performance-interface"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="/common/performance-timeline-utils.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
<script src="resources/webperftestharnessextension.js"></script>
|
||||
<script>
|
||||
setup({ explicit_done: true });
|
||||
|
||||
function onload_test()
|
||||
{
|
||||
const entrylist_checker = new performance_entrylist_checker('mark');
|
||||
const string_mark_names = mark_names.map(function (x) { return String(x)});
|
||||
mark_names.forEach(performance.mark, performance);
|
||||
|
||||
for (let i = 0; i < mark_names.length; ++i)
|
||||
{
|
||||
performance.clearMarks(mark_names[i]);
|
||||
const retained_entries = performance.getEntriesByType('mark');
|
||||
const non_retained_entries = performance.getEntriesByName(mark_names[i], 'mark');
|
||||
entrylist_checker.entrylist_check(retained_entries, mark_names.length - i - 1, string_mark_names,
|
||||
'First loop: checking entries after removing "' + mark_names[i] + '". ');
|
||||
test_equals(non_retained_entries.length, 0,
|
||||
'First loop: marks that we cleared for "' + mark_names[i] + '" should not exist anymore.');
|
||||
}
|
||||
|
||||
mark_names.forEach(performance.mark, performance);
|
||||
performance.clearMarks();
|
||||
test_equals(performance.getEntriesByType('mark').length, 0, 'No marks should exist after we clear all.');
|
||||
|
||||
// Following cases test clear existed mark name that is tied for two times.
|
||||
mark_names.forEach(performance.mark, performance);
|
||||
mark_names.forEach(performance.mark, performance);
|
||||
|
||||
for (let i = 0; i < mark_names.length; ++i)
|
||||
{
|
||||
performance.clearMarks(mark_names[i]);
|
||||
const retained_entries = performance.getEntriesByType('mark');
|
||||
const non_retained_entries = performance.getEntriesByName(mark_names[i], 'mark');
|
||||
entrylist_checker.entrylist_check(retained_entries, (mark_names.length - i - 1) * 2, string_mark_names,
|
||||
'Second loop: checking entries after removing "' + mark_names[i] + '". ');
|
||||
test_equals(non_retained_entries.length, 0,
|
||||
'Second loop: marks that we cleared for "' + mark_names[i] + '" should not exist anymore.');
|
||||
}
|
||||
|
||||
// Following cases test clear functionality when mark names are tied for two times.
|
||||
mark_names.forEach(performance.mark, performance);
|
||||
mark_names.forEach(performance.mark, performance);
|
||||
var entry_number_before_useless_clear = performance.getEntriesByType('Mark').length;
|
||||
performance.clearMarks('NonExist');
|
||||
var entry_number_after_useless_clear = performance.getEntriesByType('Mark').length;
|
||||
test_equals(entry_number_before_useless_clear, entry_number_after_useless_clear, 'Nothing should happen if we clear a non-exist mark.');
|
||||
|
||||
performance.clearMarks();
|
||||
test_equals(performance.getEntriesByType('mark').length, 0, 'No marks should exist when we clear all.');
|
||||
|
||||
done();
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body onload=onload_test()>
|
||||
<h1>Description</h1>
|
||||
<p>This test validates functionality of the interface window.performance.clearMarks.</p>
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,73 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>functionality test of window.performance.clearMeasures</title>
|
||||
<link rel="author" title="Intel" href="http://www.intel.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/#extensions-performance-interface"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="/common/performance-timeline-utils.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
<script src="resources/webperftestharnessextension.js"></script>
|
||||
<script>
|
||||
setup({ explicit_done: true });
|
||||
|
||||
function onload_test()
|
||||
{
|
||||
const context = new PerformanceContext(window.performance);
|
||||
const entrylist_checker = new performance_entrylist_checker('measure');
|
||||
const measure_names = measures.map(function(x) {return x[0];});
|
||||
|
||||
mark_names.forEach(context.mark, context);
|
||||
measures.forEach(context.initialMeasures, context);
|
||||
for (let i = 0; i < measures.length; ++i)
|
||||
{
|
||||
context.clearMeasures(measures[i][0]);
|
||||
const retained_entries = context.getEntriesByType('measure');
|
||||
const non_retained_entries = context.getEntriesByName(measures[i][0], 'measure');
|
||||
entrylist_checker.entrylist_check(retained_entries, measures.length - i - 1, measure_names,
|
||||
'First loop: checking entries after removing "' + measures[i][0] + '". ');
|
||||
test_equals(non_retained_entries.length, 0,
|
||||
'First loop: measure "' + measures[i][0] + '" should not exist anymore after we cleared it.');
|
||||
}
|
||||
|
||||
measures.forEach(context.initialMeasures, context);
|
||||
context.clearMeasures();
|
||||
test_equals(context.getEntriesByType('measure').length, 0, 'No measures should exist after we clear all (after first loop).');
|
||||
|
||||
// Following cases test clear existed measure name that is tied twice.
|
||||
measures.forEach(context.initialMeasures, context);
|
||||
mark_names.forEach(context.mark, context);
|
||||
measures.forEach(context.initialMeasures, context);
|
||||
for (let i = 0; i < measures.length; ++i)
|
||||
{
|
||||
context.clearMeasures(measures[i][0]);
|
||||
const retained_entries = context.getEntriesByType('measure');
|
||||
const non_retained_entries = context.getEntriesByName(measures[i][0], 'measure');
|
||||
entrylist_checker.entrylist_check(retained_entries, (measures.length - i - 1) * 2, measure_names,
|
||||
'Second loop: checking entries after removing "' + measures[i][0] + '". ');
|
||||
test_equals(non_retained_entries.length, 0,
|
||||
'Second loop: measure "' + measures[i][0] +'" should not exist anymore after we cleared it.');
|
||||
}
|
||||
|
||||
// Following cases test clear functionality when measure names are tied twice.
|
||||
measures.forEach(context.initialMeasures, context);
|
||||
measures.forEach(context.initialMeasures, context);
|
||||
const entry_number_before_useless_clear = context.getEntriesByType('measure').length;
|
||||
context.clearMeasures('NonExist');
|
||||
const entry_number_after_useless_clear = context.getEntriesByType('measure').length;
|
||||
test_equals(entry_number_before_useless_clear, entry_number_after_useless_clear, 'Nothing should happen if we clear a non-exist measure');
|
||||
context.clearMeasures();
|
||||
test_equals(context.getEntriesByType('measure').length, 0, 'No measures should exist when we clear all (after second loop).');
|
||||
|
||||
done();
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body onload=onload_test()>
|
||||
<h1>Description</h1>
|
||||
<p>This test validates functionality of the interface window.performance.clearMeasures.</p>
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,54 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>functionality test of window.performance.mark</title>
|
||||
<link rel="author" title="Intel" href="http://www.intel.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/#extensions-performance-interface"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="/common/performance-timeline-utils.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
<script src="resources/webperftestharnessextension.js"></script>
|
||||
<script>
|
||||
setup({ explicit_done: true });
|
||||
|
||||
function onload_test()
|
||||
{
|
||||
const entrylist_checker = new performance_entrylist_checker('mark');
|
||||
const string_mark_names = mark_names.map(function (x) { return String(x)});
|
||||
|
||||
test_equals(performance.getEntriesByType("mark").length, 0, 'There should be ' + 0 + ' marks');
|
||||
mark_names.forEach(performance.mark, performance);
|
||||
let mark_entrylist = performance.getEntriesByType('mark');
|
||||
|
||||
entrylist_checker.entrylist_check(mark_entrylist, mark_names.length, string_mark_names, 'Checking all entries.');
|
||||
|
||||
for (let i = 0; i < mark_entrylist.length; ++i)
|
||||
{
|
||||
const mark_entrylist_by_name = performance.getEntriesByName(mark_entrylist[i].name, 'mark');
|
||||
entrylist_checker.entrylist_check(mark_entrylist_by_name, 1, string_mark_names,
|
||||
'First loop: checking entry of name "' + mark_entrylist[i].name + '".');
|
||||
}
|
||||
|
||||
mark_names.forEach(performance.mark, performance);
|
||||
mark_entrylist = performance.getEntriesByType('mark');
|
||||
entrylist_checker.entrylist_check(mark_entrylist, mark_names.length * 2, string_mark_names, 'Checking all doubly marked entries.');
|
||||
|
||||
for (let i = 0; i < mark_entrylist.length; ++i)
|
||||
{
|
||||
const mark_entrylist_by_name = performance.getEntriesByName(mark_entrylist[i].name, 'mark');
|
||||
entrylist_checker.entrylist_check(mark_entrylist_by_name, 2, string_mark_names,
|
||||
'Second loop step ' + i + ': checking entries of name "' + mark_entrylist[i].name + '".');
|
||||
}
|
||||
|
||||
done();
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body onload=onload_test()>
|
||||
<h1>Description</h1>
|
||||
<p>This test validates functionality of the interface window.performance.mark.</p>
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,53 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>functionality test of window.performance.measure</title>
|
||||
<link rel="author" title="Intel" href="http://www.intel.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/#extensions-performance-interface"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="/common/performance-timeline-utils.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
<script src="resources/webperftestharnessextension.js"></script>
|
||||
<script>
|
||||
setup({ explicit_done: true });
|
||||
|
||||
function onload_test()
|
||||
{
|
||||
const measures_for_timing_order = [
|
||||
['nav2now', 'navigationStart'],
|
||||
['loadTime', 'navigationStart', 'loadEventEnd'],
|
||||
['loadEventEnd2a', 'loadEventEnd', 'abc'],
|
||||
['nav2a', 'navigationStart', 'abc'],
|
||||
['domComplete2a', 'domComplete', 'abc'],
|
||||
['negativeValue', 1, 'navigationStart'],
|
||||
];
|
||||
const context = new PerformanceContext(window.performance);
|
||||
|
||||
mark_names.forEach(context.mark, context);
|
||||
measures_for_timing_order.forEach(context.initialMeasures, context);
|
||||
test_greater_than(context.getEntriesByName('nav2now', 'measure')[0].duration, 0, 'Measure of navigationStart to now should be positive value.');
|
||||
test_greater_than(context.getEntriesByName('loadTime', 'measure')[0].duration, 0, 'Measure of navigationStart to loadEventEnd should be positive value.');
|
||||
test_greater_than(0, context.getEntriesByName('negativeValue', 'measure')[0].duration, 'Measure of current mark to navigationStart should be negative value.');
|
||||
test_equals(context.getEntriesByName('loadTime', 'measure')[0].duration + context.getEntriesByName('loadEventEnd2a', 'measure')[0].duration, context.getEntriesByName('nav2a', 'measure')[0].duration, 'loadTime plus loadEventEnd to a mark "a" should equal to navigationStart to "a".');
|
||||
|
||||
// Following cases test for scenarios that measure names are tied twice.
|
||||
mark_names.forEach(context.mark, context);
|
||||
measures_for_timing_order.forEach(context.initialMeasures, context);
|
||||
|
||||
test_greater_than(context.getEntriesByName('nav2now', 'measure')[1].duration, context.getEntriesByName('nav2now', 'measure')[0].duration, 'Second measure of current mark to navigationStart should be negative value.');
|
||||
test_equals(context.getEntriesByName('loadTime', 'measure')[0].duration, context.getEntriesByName('loadTime', 'measure')[1].duration, 'Measures of loadTime should have same duration.');
|
||||
test_greater_than(context.getEntriesByName('domComplete2a', 'measure')[1].duration, context.getEntriesByName('domComplete2a', 'measure')[0].duration, 'Measure from domComplete event to most recent mark "a" should have longer duration.');
|
||||
test_greater_than(context.getEntriesByName('negativeValue', 'measure')[0].duration, context.getEntriesByName('negativeValue', 'measure')[1].duration, 'Measure from most recent mark to navigationStart should have longer duration.');
|
||||
|
||||
done();
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body onload="setTimeout(onload_test,0)">
|
||||
<h1>Description</h1>
|
||||
<p>This test validates functionality of the interface window.performance.measure using keywords from the Navigation Timing spec.</p>
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,32 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>exception test of window.performance.measure</title>
|
||||
<link rel="author" title="Intel" href="http://www.intel.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/#extensions-performance-interface"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="/common/performance-timeline-utils.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
<script src="resources/webperftestharnessextension.js"></script>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<h1>Description</h1>
|
||||
<p>This test validates all exception scenarios of method window.performance.measure in User Timing API</p>
|
||||
|
||||
<div id="log"></div>
|
||||
<script>
|
||||
performance.mark('ExistMark');
|
||||
test_method_throw_exception('performance.measure()', TypeError());
|
||||
test_method_throw_exception('performance.measure("Exception1", "NonExistMark1")', 'SYNTAX_ERR');
|
||||
test_method_throw_exception('performance.measure("Exception2", "NonExistMark1", "navigationStart")', 'SYNTAX_ERR');
|
||||
test_method_throw_exception('performance.measure("Exception3", "navigationStart", "NonExistMark1")', 'SYNTAX_ERR');
|
||||
test_method_throw_exception('performance.measure("Exception4", "NonExistMark1", "ExistMark")', 'SYNTAX_ERR');
|
||||
test_method_throw_exception('performance.measure("Exception5", "ExistMark", "NonExistMark1")', 'SYNTAX_ERR');
|
||||
test_method_throw_exception('performance.measure("Exception6", "NonExistMark1", "NonExistMark2")', 'SYNTAX_ERR');
|
||||
test_method_throw_exception('performance.measure("Exception7", "redirectStart")', 'INVALID_ACCESS_ERR');
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,62 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>functionality test of window.performance.measure</title>
|
||||
<link rel="author" title="Intel" href="http://www.intel.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/#extensions-performance-interface"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="/common/performance-timeline-utils.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
<script src="resources/webperftestharnessextension.js"></script>
|
||||
<script>
|
||||
setup({ explicit_done: true });
|
||||
|
||||
function onload_test()
|
||||
{
|
||||
const context = new PerformanceContext(window.performance);
|
||||
const entrylist_checker = new performance_entrylist_checker('measure');
|
||||
const measure_names = measures.map(function(x) {return x[0];});
|
||||
|
||||
test_equals(context.getEntriesByType('measure').length, 0, 'There should be ' + 0 + ' entries returned.');
|
||||
|
||||
mark_names.forEach(context.mark, context);
|
||||
measures.forEach(context.initialMeasures, context);
|
||||
|
||||
let measure_entrylist = context.getEntriesByType('measure');
|
||||
entrylist_checker.entrylist_check(measure_entrylist, measures.length, measure_names,
|
||||
'Checking all entries.');
|
||||
|
||||
for (let i = 0; i < measure_entrylist.length; ++i)
|
||||
{
|
||||
const measure_entrylist_by_name = context.getEntriesByName(measure_entrylist[i].name, 'measure');
|
||||
entrylist_checker.entrylist_check(measure_entrylist_by_name, 1, measure_names,
|
||||
'First loop: checking entry of name "' + measure_entrylist[i].name + '".');
|
||||
}
|
||||
|
||||
// Following cases test for scenarios that measure names are tied for two times
|
||||
mark_names.forEach(context.mark, context);
|
||||
measures.forEach(context.initialMeasures, context);
|
||||
|
||||
measure_entrylist = context.getEntriesByType('measure');
|
||||
entrylist_checker.entrylist_check(measure_entrylist, measures.length * 2, measure_names,
|
||||
'Checking all doubly measured entries.');
|
||||
|
||||
for (let i = 0; i < measure_entrylist.length; ++i)
|
||||
{
|
||||
const measure_entrylist_by_name = context.getEntriesByName(measure_entrylist[i].name, 'measure');
|
||||
entrylist_checker.entrylist_check(measure_entrylist_by_name, 2, measure_names,
|
||||
'Second loop step ' + i + ': checking entry of name "' + measure_entrylist[i].name + '".');
|
||||
}
|
||||
|
||||
done();
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body onload=onload_test()>
|
||||
<h1>Description</h1>
|
||||
<p>This test validates functionality of the interface window.performance.measure.</p>
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
|
@ -88,28 +88,29 @@ function test_resource_entries(entries, expected_entries)
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
function performance_entrylist_checker(type)
|
||||
{
|
||||
var entryType = type;
|
||||
const entryType = type;
|
||||
|
||||
function entry_check(entry, expectedNames)
|
||||
function entry_check(entry, expectedNames, testDescription = '')
|
||||
{
|
||||
var msg = 'Entry \"' + entry.name + '\" should be one that we have set.';
|
||||
const msg = testDescription + 'Entry \"' + entry.name + '\" should be one that we have set.';
|
||||
wp_test(function() { assert_in_array(entry.name, expectedNames, msg); }, msg);
|
||||
test_equals(entry.entryType, entryType, 'entryType should be \"' + entryType + '\".');
|
||||
test_equals(entry.entryType, entryType, testDescription + 'entryType should be \"' + entryType + '\".');
|
||||
if (type === "measure") {
|
||||
test_true(isFinite(entry.startTime), 'startTime should be a number.');
|
||||
test_true(isFinite(entry.duration), 'duration should be a number.');
|
||||
test_true(isFinite(entry.startTime), testDescription + 'startTime should be a number.');
|
||||
test_true(isFinite(entry.duration), testDescription + 'duration should be a number.');
|
||||
} else if (type === "mark") {
|
||||
test_greater_than(entry.startTime, 0, 'startTime should greater than 0.');
|
||||
test_equals(entry.duration, 0, 'duration of mark should be 0.');
|
||||
test_greater_than(entry.startTime, 0, testDescription + 'startTime should greater than 0.');
|
||||
test_equals(entry.duration, 0, testDescription + 'duration of mark should be 0.');
|
||||
}
|
||||
}
|
||||
|
||||
function entrylist_order_check(entryList)
|
||||
{
|
||||
var inOrder = true;
|
||||
for (var i = 0; i < entryList.length - 1; ++i)
|
||||
let inOrder = true;
|
||||
for (let i = 0; i < entryList.length - 1; ++i)
|
||||
{
|
||||
if (entryList[i + 1].startTime < entryList[i].startTime) {
|
||||
inOrder = false;
|
||||
|
@ -119,13 +120,13 @@ function performance_entrylist_checker(type)
|
|||
return inOrder;
|
||||
}
|
||||
|
||||
function entrylist_check(entryList, expectedLength, expectedNames)
|
||||
function entrylist_check(entryList, expectedLength, expectedNames, testDescription = '')
|
||||
{
|
||||
test_equals(entryList.length, expectedLength, 'There should be ' + expectedLength + ' entries.');
|
||||
test_true(entrylist_order_check(entryList), 'Entries in entrylist should be in order.');
|
||||
for (var i = 0; i < entryList.length; ++i)
|
||||
test_equals(entryList.length, expectedLength, testDescription + 'There should be ' + expectedLength + ' entries.');
|
||||
test_true(entrylist_order_check(entryList), testDescription + 'Entries in entrylist should be in order.');
|
||||
for (let i = 0; i < entryList.length; ++i)
|
||||
{
|
||||
entry_check(entryList[i], expectedNames);
|
||||
entry_check(entryList[i], expectedNames, testDescription + 'Entry_list ' + i + '. ');
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Загрузка…
Ссылка в новой задаче