зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1325207 - [reftest] Stop treating 'skip/skip-if' as a failure type in the manifests r=kats
Currently the RefTest manifest parser has 5 possible statuses: 0 => EXPECTED_PASS 1 => EXPECTED_FAIL 2 => EXPECTED_RANDOM 3 => EXPECTED_DEATH (aka skip) 4 => EXPECTED_FUZZY In the manifests, the last status annotation that appears on the line will take precedence. For example: skip-if(true) fails-if(true) == test1.html ref.html fails-if(true) skip-if(true) == test2.html ref.html The first test will have an expected status equal to EXPECTED_FAIL, whereas the second one will be EXPECTED_DEATH. The same holds true for any combination of 'fail/random/skip/fuzzy' annotations. This means developers need to be very careful about the order they specify these annotations as getting the order wrong can easily lead to unexpected behaviour. With the introduction of defaults in bug 1616368, the risk of unexpected behaviour is far greater. Since defaults are simply prepended to the test line, a manifest that looks like: defaults skip-if(true) == test1.html ref.html fails-if(true) == test2.html ref.html will actually skip the first test, but run the second (since the fails-if overwrites EXPECTED_DEATH with EXPECTED_FAIL). The root of the problem appears to be that 'skip' and 'fuzzy' are not actually test statuses. They are modifiers that affect how we run the test, but don't actually affect whether the test is expected to pass or fail. Therefore, this patch solves the problem by making 'skip/skip-if' its own thing that does not get overwritten by other failure types. In otherwords, a 'skip-if' can appear before or after a 'fails-if' and it will have the same meaning. Differential Revision: https://phabricator.services.mozilla.com/D64457 --HG-- extra : moz-landing-system : lando
This commit is contained in:
Родитель
43107334dd
Коммит
a9dd0b2e3e
|
@ -38,8 +38,7 @@ for (let [key, val] of Object.entries({
|
|||
EXPECTED_PASS: 0,
|
||||
EXPECTED_FAIL: 1,
|
||||
EXPECTED_RANDOM: 2,
|
||||
EXPECTED_DEATH: 3, // test must be skipped to avoid e.g. crash/hang
|
||||
EXPECTED_FUZZY: 4,
|
||||
EXPECTED_FUZZY: 3,
|
||||
|
||||
// types of preference value we might want to set for a specific test
|
||||
PREF_BOOLEAN: 0,
|
||||
|
|
|
@ -110,6 +110,7 @@ function ReadManifest(aURL, aFilter, aManifestID)
|
|||
var maxAsserts = 0;
|
||||
var needs_focus = false;
|
||||
var slow = false;
|
||||
var skip = false;
|
||||
var testPrefSettings = defaultTestPrefSettings.concat();
|
||||
var refPrefSettings = defaultRefPrefSettings.concat();
|
||||
var fuzzy_delta = { min: 0, max: 2 };
|
||||
|
@ -227,7 +228,7 @@ function ReadManifest(aURL, aFilter, aManifestID)
|
|||
} else if (stat == "random") {
|
||||
expected_status = EXPECTED_RANDOM;
|
||||
} else if (stat == "skip") {
|
||||
expected_status = EXPECTED_DEATH;
|
||||
skip = true;
|
||||
} else if (stat == "silentfail") {
|
||||
allow_silent_fail = true;
|
||||
}
|
||||
|
@ -279,14 +280,14 @@ function ReadManifest(aURL, aFilter, aManifestID)
|
|||
throw "Error in manifest file " + aURL.spec + " line " + lineNo + ": use of include with http";
|
||||
|
||||
// If the expected_status is EXPECTED_PASS (the default) then allow
|
||||
// the include. If it is EXPECTED_DEATH, that means there was a skip
|
||||
// the include. If 'skip' is true, that means there was a skip
|
||||
// or skip-if annotation (with a true condition) on this include
|
||||
// statement, so we should skip the include. Any other expected_status
|
||||
// is disallowed since it's nonintuitive as to what the intended
|
||||
// effect is.
|
||||
if (nonSkipUsed) {
|
||||
throw "Error in manifest file " + aURL.spec + " line " + lineNo + ": include statement with annotation other than 'skip' or 'skip-if'";
|
||||
} else if (expected_status == EXPECTED_DEATH) {
|
||||
} else if (skip) {
|
||||
g.logger.info("Skipping included manifest at " + aURL.spec + " line " + lineNo + " due to matching skip condition");
|
||||
} else {
|
||||
// poor man's assertion
|
||||
|
@ -335,7 +336,7 @@ function ReadManifest(aURL, aFilter, aManifestID)
|
|||
var type = items[0];
|
||||
if (items.length != 2)
|
||||
throw "Error in manifest file " + aURL.spec + " line " + lineNo + ": incorrect number of arguments to " + type;
|
||||
if (type == TYPE_LOAD && expected_status != EXPECTED_PASS && expected_status != EXPECTED_DEATH)
|
||||
if (type == TYPE_LOAD && expected_status != EXPECTED_PASS)
|
||||
throw "Error in manifest file " + aURL.spec + " line " + lineNo + ": incorrect known failure type for load test";
|
||||
AddTestItem({ type: type,
|
||||
expected: expected_status,
|
||||
|
@ -346,6 +347,7 @@ function ReadManifest(aURL, aFilter, aManifestID)
|
|||
maxAsserts: maxAsserts,
|
||||
needsFocus: needs_focus,
|
||||
slow: slow,
|
||||
skip: skip,
|
||||
prefSettings1: testPrefSettings,
|
||||
prefSettings2: refPrefSettings,
|
||||
fuzzyMinDelta: fuzzy_delta.min,
|
||||
|
@ -383,7 +385,7 @@ function ReadManifest(aURL, aFilter, aManifestID)
|
|||
// comparing the two failures, which is not a useful result.
|
||||
if (expected_status === EXPECTED_FAIL ||
|
||||
expected_status === EXPECTED_RANDOM) {
|
||||
expected_status = EXPECTED_DEATH;
|
||||
skip = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -396,6 +398,7 @@ function ReadManifest(aURL, aFilter, aManifestID)
|
|||
maxAsserts: maxAsserts,
|
||||
needsFocus: needs_focus,
|
||||
slow: slow,
|
||||
skip: skip,
|
||||
prefSettings1: testPrefSettings,
|
||||
prefSettings2: refPrefSettings,
|
||||
fuzzyMinDelta: fuzzy_delta.min,
|
||||
|
|
|
@ -497,7 +497,7 @@ function StartTests()
|
|||
// tURLs is a temporary array containing all active tests
|
||||
var tURLs = new Array();
|
||||
for (var i = 0; i < g.urls.length; ++i) {
|
||||
if (g.urls[i].expected == EXPECTED_DEATH)
|
||||
if (g.urls[i].skip)
|
||||
continue;
|
||||
|
||||
if (g.urls[i].needsFocus && !Focus())
|
||||
|
@ -601,7 +601,7 @@ function BuildUseCounts()
|
|||
g.uriUseCounts = {};
|
||||
for (var i = 0; i < g.urls.length; ++i) {
|
||||
var url = g.urls[i];
|
||||
if (url.expected != EXPECTED_DEATH &&
|
||||
if (!url.skip &&
|
||||
(url.type == TYPE_REFTEST_EQUAL ||
|
||||
url.type == TYPE_REFTEST_NOTEQUAL)) {
|
||||
if (url.prefSettings1.length == 0) {
|
||||
|
@ -646,7 +646,7 @@ function StartCurrentTest()
|
|||
while (g.urls.length > 0) {
|
||||
var test = g.urls[0];
|
||||
logger.testStart(test.identifier);
|
||||
if (test.expected == EXPECTED_DEATH) {
|
||||
if (test.skip) {
|
||||
++g.testResults.Skip;
|
||||
logger.testEnd(test.identifier, "SKIP");
|
||||
g.urls.shift();
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
# interactions between skip and fail
|
||||
skip-if(true) fails == skip-if_fails.html ref.html
|
||||
skip-if(true) fails-if(true) == skip-if_fails-if.html ref.html
|
||||
skip fails == skip_fails.html ref.html
|
||||
skip-if(false) fails == fails.html ref.html
|
||||
fails skip-if(true) == fails_skip-if.html ref.html
|
||||
fails-if(true) skip-if(true) == fails-if_skip-if.html ref.html
|
||||
fails skip == fails_skip.html ref.html
|
||||
fails-if(false) skip == skip.html ref.html
|
||||
skip-if(true) fails skip-if(false) == skip-if-true_fails_skip-if-false ref.html
|
||||
skip-if(false) fails skip-if(true) == skip-if-false_fails_skip-if-true ref.html
|
|
@ -38,6 +38,22 @@ def test_parse_test_types(parse):
|
|||
assert tests[4]['type'] == 'print'
|
||||
|
||||
|
||||
def test_parse_failure_type_interactions(parse):
|
||||
"""Tests interactions between skip and fails."""
|
||||
tests = parse('failure-type-interactions.list')
|
||||
for t in tests:
|
||||
if 'skip' in t['name']:
|
||||
assert t['skip']
|
||||
else:
|
||||
assert not t['skip']
|
||||
|
||||
# 0 => EXPECTED_PASS, 1 => EXPECTED_FAIL
|
||||
if 'fails' in t['name']:
|
||||
assert t['expected'] == 1
|
||||
else:
|
||||
assert t['expected'] == 0
|
||||
|
||||
|
||||
def test_parse_invalid_manifests(parse):
|
||||
# XXX We should assert that the output contains the appropriate error
|
||||
# message, but we seem to be hitting an issue in pytest that is preventing
|
||||
|
|
Загрузка…
Ссылка в новой задаче