зеркало из https://github.com/mozilla/moz-skia.git
gm: if test has no expectations, record its result as no-expectations regardless of ignoreFailure
After https://code.google.com/p/skia/source/detail?r=11640 ('Ignore any pdf-poppler GM failures'), there are a bunch of pdf-poppler tests showing up as failure-ignored at http://c128.i.corp.google.com/production-gm-diffs/latest/view.html Make them go away. R=scroggo@google.com Review URL: https://codereview.chromium.org/26650005 git-svn-id: http://skia.googlecode.com/svn/trunk@11703 2bbb7eff-a529-9590-31e7-b0007b416f81
This commit is contained in:
Родитель
0fbe01fa53
Коммит
1ddfbc201a
|
@ -821,7 +821,7 @@ public:
|
|||
}
|
||||
|
||||
/**
|
||||
* Add this result to the appropriate JSON collection of actual results,
|
||||
* Add this result to the appropriate JSON collection of actual results (but just ONE),
|
||||
* depending on errors encountered.
|
||||
*/
|
||||
void add_actual_results_to_json_summary(const char testName[],
|
||||
|
@ -829,33 +829,35 @@ public:
|
|||
ErrorCombination errors,
|
||||
bool ignoreFailure) {
|
||||
Json::Value jsonActualResults = actualResultDigest.asJsonTypeValuePair();
|
||||
Json::Value *resultCollection = NULL;
|
||||
|
||||
if (errors.isEmpty()) {
|
||||
this->fJsonActualResults_Succeeded[testName] = jsonActualResults;
|
||||
} else {
|
||||
resultCollection = &this->fJsonActualResults_Succeeded;
|
||||
} else if (errors.includes(kRenderModeMismatch_ErrorType)) {
|
||||
resultCollection = &this->fJsonActualResults_Failed;
|
||||
} else if (errors.includes(kExpectationsMismatch_ErrorType)) {
|
||||
if (ignoreFailure) {
|
||||
this->fJsonActualResults_FailureIgnored[testName] =
|
||||
jsonActualResults;
|
||||
resultCollection = &this->fJsonActualResults_FailureIgnored;
|
||||
} else {
|
||||
if (errors.includes(kMissingExpectations_ErrorType)) {
|
||||
// TODO: What about the case where there IS an
|
||||
// expected image hash digest, but that gm test
|
||||
// doesn't actually run? For now, those cases
|
||||
// will always be ignored, because gm only looks
|
||||
// at expectations that correspond to gm tests
|
||||
// that were actually run.
|
||||
//
|
||||
// Once we have the ability to express
|
||||
// expectations as a JSON file, we should fix this
|
||||
// (and add a test case for which an expectation
|
||||
// is given but the test is never run).
|
||||
this->fJsonActualResults_NoComparison[testName] =
|
||||
jsonActualResults;
|
||||
}
|
||||
if (errors.includes(kExpectationsMismatch_ErrorType) ||
|
||||
errors.includes(kRenderModeMismatch_ErrorType)) {
|
||||
this->fJsonActualResults_Failed[testName] = jsonActualResults;
|
||||
}
|
||||
resultCollection = &this->fJsonActualResults_Failed;
|
||||
}
|
||||
} else if (errors.includes(kMissingExpectations_ErrorType)) {
|
||||
// TODO: What about the case where there IS an expected
|
||||
// image hash digest, but that gm test doesn't actually
|
||||
// run? For now, those cases will always be ignored,
|
||||
// because gm only looks at expectations that correspond
|
||||
// to gm tests that were actually run.
|
||||
//
|
||||
// Once we have the ability to express expectations as a
|
||||
// JSON file, we should fix this (and add a test case for
|
||||
// which an expectation is given but the test is never
|
||||
// run).
|
||||
resultCollection = &this->fJsonActualResults_NoComparison;
|
||||
}
|
||||
|
||||
// If none of the above cases match, we don't add it to ANY tally of actual results.
|
||||
if (resultCollection) {
|
||||
(*resultCollection)[testName] = jsonActualResults;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
out/Debug/gm --verbose --hierarchy --match selftest1 --config 8888 565 -r gm/tests/inputs/images/empty-dir --writeJsonSummaryPath gm/tests/outputs/compared-against-empty-dir/output-actual/json-summary.txt --writePath gm/tests/outputs/compared-against-empty-dir/output-actual/writePath --mismatchPath gm/tests/outputs/compared-against-empty-dir/output-actual/mismatchPath --missingExpectationsPath gm/tests/outputs/compared-against-empty-dir/output-actual/missingExpectationsPath
|
||||
out/Debug/gm --verbose --hierarchy --match selftest1 --ignoreTests 8888 --config 8888 565 -r gm/tests/inputs/images/empty-dir --writeJsonSummaryPath gm/tests/outputs/compared-against-empty-dir/output-actual/json-summary.txt --writePath gm/tests/outputs/compared-against-empty-dir/output-actual/writePath --mismatchPath gm/tests/outputs/compared-against-empty-dir/output-actual/mismatchPath --missingExpectationsPath gm/tests/outputs/compared-against-empty-dir/output-actual/missingExpectationsPath
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
},
|
||||
"8888/selftest1.png" : {
|
||||
"allowed-digests" : null,
|
||||
"ignore-failure" : false
|
||||
"ignore-failure" : true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -209,7 +209,9 @@ gm_test "--verbose --hierarchy --match selftest1 --ignoreTests 8888/selfte $CONF
|
|||
gm_test "--verbose --hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/json/different-pixels-ignore-some-failures.json" "$GM_OUTPUTS/ignoring-some-failures"
|
||||
|
||||
# Compare generated image against an empty "expected image" dir.
|
||||
gm_test "--verbose --hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/images/empty-dir" "$GM_OUTPUTS/compared-against-empty-dir"
|
||||
# Even the tests that have been marked as ignore-failure should show up as
|
||||
# no-comparison.
|
||||
gm_test "--verbose --hierarchy --match selftest1 --ignoreTests 8888 $CONFIGS -r $GM_INPUTS/images/empty-dir" "$GM_OUTPUTS/compared-against-empty-dir"
|
||||
|
||||
# Compare generated image against a nonexistent "expected image" dir.
|
||||
gm_test "--verbose --hierarchy --match selftest1 $CONFIGS -r ../path/to/nowhere" "$GM_OUTPUTS/compared-against-nonexistent-dir"
|
||||
|
|
Загрузка…
Ссылка в новой задаче