BUG=http://code.google.com/p/skia/issues/detail?id=1079
TBR=robertphillips
Review URL: https://codereview.appspot.com/7138065

git-svn-id: http://skia.googlecode.com/svn/trunk@7289 2bbb7eff-a529-9590-31e7-b0007b416f81
This commit is contained in:
epoger@google.com 2013-01-18 21:45:42 +00:00
Родитель 8e4adb0e01
Коммит 9ef89ce8b7
3 изменённых файлов: 211 добавлений и 418 удалений

Просмотреть файл

@ -1,174 +0,0 @@
/*
* Copyright 2013 Google Inc.
*
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#ifndef gm_expectations_DEFINED
#define gm_expectations_DEFINED
#include "gm.h"
#include "SkBitmapChecksummer.h"
#include "SkImageDecoder.h"
#include "SkOSFile.h"
#include "SkRefCnt.h"
#include "SkTArray.h"
#ifdef SK_BUILD_FOR_WIN
// json includes xlocale which generates warning 4530 because we're compiling without
// exceptions; see https://code.google.com/p/skia/issues/detail?id=1067
#pragma warning(push)
#pragma warning(disable : 4530)
#endif
#include "json/value.h"
#ifdef SK_BUILD_FOR_WIN
#pragma warning(pop)
#endif
namespace skiagm {
// The actual type we use to represent a checksum is hidden in here.
typedef Json::UInt64 Checksum;
static inline Json::Value asJsonValue(Checksum checksum) {
return checksum;
}
static SkString make_filename(const char path[],
const char renderModeDescriptor[],
const char *name,
const char suffix[]) {
SkString filename(path);
if (filename.endsWith(SkPATH_SEPARATOR)) {
filename.remove(filename.size() - 1, 1);
}
filename.appendf("%c%s%s.%s", SkPATH_SEPARATOR,
name, renderModeDescriptor, suffix);
return filename;
}
/**
* Test expectations (allowed image checksums, etc.)
*/
class Expectations {
public:
/**
* No expectations at all.
*
* We set ignoreFailure to false by default, but it doesn't really
* matter... the result will always be "no-comparison" anyway.
*/
Expectations(bool ignoreFailure=false) {
fIgnoreFailure = ignoreFailure;
}
/**
* Allow exactly one checksum (appropriate for the case when we
* are comparing against a single PNG file).
*
* By default, DO NOT ignore failures.
*/
Expectations(Checksum singleChecksum, bool ignoreFailure=false) {
fIgnoreFailure = ignoreFailure;
fAllowedChecksums.push_back() = singleChecksum;
}
/**
* Returns true iff we want to ignore failed expectations.
*/
bool ignoreFailure() const { return this->fIgnoreFailure; }
/**
* Returns true iff there are no allowed checksums.
*/
bool empty() const { return this->fAllowedChecksums.empty(); }
/**
* Returns true iff actualChecksum matches any allowedChecksum,
* regardless of fIgnoreFailure. (The caller can check
* that separately.)
*/
bool match(Checksum actualChecksum) const {
for (int i=0; i < this->fAllowedChecksums.count(); i++) {
Checksum allowedChecksum = this->fAllowedChecksums[i];
if (allowedChecksum == actualChecksum) {
return true;
}
}
return false;
}
/**
* Return a JSON representation of the allowed checksums.
* This does NOT include any information about whether to
* ignore failures.
*/
Json::Value allowedChecksumsAsJson() const {
Json::Value allowedChecksumArray;
if (!this->fAllowedChecksums.empty()) {
for (int i=0; i < this->fAllowedChecksums.count(); i++) {
Checksum allowedChecksum = this->fAllowedChecksums[i];
allowedChecksumArray.append(asJsonValue(allowedChecksum));
}
}
return allowedChecksumArray;
}
private:
SkTArray<Checksum> fAllowedChecksums;
bool fIgnoreFailure;
};
/**
* Abstract source of Expectations objects for individual tests.
*/
class ExpectationsSource : public SkRefCnt {
public:
virtual Expectations get(const char *testName) = 0;
};
/**
* Return Expectations based on individual image files on disk.
*/
class IndividualImageExpectationsSource : public ExpectationsSource {
public:
/**
* Create an ExpectationsSource that will return Expectations based on
* image files found within rootDir.
*
* rootDir: directory under which to look for image files
* (this string will be copied to storage within this object)
* notifyOfMissingFiles: whether to log a message to stderr if an image
* file cannot be found
*/
IndividualImageExpectationsSource(const char *rootDir,
bool notifyOfMissingFiles) :
fRootDir(rootDir), fNotifyOfMissingFiles(notifyOfMissingFiles) {}
Expectations get(const char *testName) SK_OVERRIDE {
SkString path = make_filename(fRootDir.c_str(), "", testName,
"png");
SkBitmap referenceBitmap;
bool decodedReferenceBitmap =
SkImageDecoder::DecodeFile(path.c_str(), &referenceBitmap,
SkBitmap::kARGB_8888_Config,
SkImageDecoder::kDecodePixels_Mode,
NULL);
if (decodedReferenceBitmap) {
Checksum checksum = SkBitmapChecksummer::Compute64(
referenceBitmap);
return Expectations(checksum);
} else {
if (fNotifyOfMissingFiles) {
fprintf(stderr, "FAILED to read %s\n", path.c_str());
}
return Expectations();
}
}
private:
const SkString fRootDir;
const bool fNotifyOfMissingFiles;
};
}
#endif

Просмотреть файл

@ -14,7 +14,6 @@
*/ */
#include "gm.h" #include "gm.h"
#include "gm_expectations.h"
#include "system_preferences.h" #include "system_preferences.h"
#include "SkBitmapChecksummer.h" #include "SkBitmapChecksummer.h"
#include "SkColorPriv.h" #include "SkColorPriv.h"
@ -36,7 +35,7 @@
#ifdef SK_BUILD_FOR_WIN #ifdef SK_BUILD_FOR_WIN
// json includes xlocale which generates warning 4530 because we're compiling without // json includes xlocale which generates warning 4530 because we're compiling without
// exceptions; see https://code.google.com/p/skia/issues/detail?id=1067 // exceptions
#pragma warning(push) #pragma warning(push)
#pragma warning(disable : 4530) #pragma warning(disable : 4530)
#endif #endif
@ -200,6 +199,7 @@ public:
GMMain() { GMMain() {
// Set default values of member variables, which tool_main() // Set default values of member variables, which tool_main()
// may override. // may override.
fNotifyMissingReadReference = true;
fUseFileHierarchy = false; fUseFileHierarchy = false;
fMismatchPath = NULL; fMismatchPath = NULL;
} }
@ -216,6 +216,19 @@ public:
return name; return name;
} }
static SkString make_filename(const char path[],
const char renderModeDescriptor[],
const SkString& name,
const char suffix[]) {
SkString filename(path);
if (filename.endsWith(SkPATH_SEPARATOR)) {
filename.remove(filename.size() - 1, 1);
}
filename.appendf("%c%s%s.%s", SkPATH_SEPARATOR,
name.c_str(), renderModeDescriptor, suffix);
return filename;
}
/* since PNG insists on unpremultiplying our alpha, we take no /* since PNG insists on unpremultiplying our alpha, we take no
precision chances and force all pixels to be 100% opaque, precision chances and force all pixels to be 100% opaque,
otherwise on compare we may not get a perfect match. otherwise on compare we may not get a perfect match.
@ -271,6 +284,51 @@ public:
} }
} }
// Compares "target" and "base" bitmaps, returning the result
// (ERROR_NONE if the two bitmaps are identical).
ErrorBitfield compare(const SkBitmap& target, const SkBitmap& base,
const SkString& name,
const char* renderModeDescriptor) {
SkBitmap copy;
const SkBitmap* bm = &target;
if (target.config() != SkBitmap::kARGB_8888_Config) {
target.copyTo(&copy, SkBitmap::kARGB_8888_Config);
bm = &copy;
}
SkBitmap baseCopy;
const SkBitmap* bp = &base;
if (base.config() != SkBitmap::kARGB_8888_Config) {
base.copyTo(&baseCopy, SkBitmap::kARGB_8888_Config);
bp = &baseCopy;
}
force_all_opaque(*bm);
force_all_opaque(*bp);
const int w = bm->width();
const int h = bm->height();
if (w != bp->width() || h != bp->height()) {
RecordError(ERROR_IMAGE_MISMATCH, name, renderModeDescriptor);
return ERROR_IMAGE_MISMATCH;
}
SkAutoLockPixels bmLock(*bm);
SkAutoLockPixels baseLock(*bp);
for (int y = 0; y < h; y++) {
for (int x = 0; x < w; x++) {
SkPMColor c0 = *bp->getAddr32(x, y);
SkPMColor c1 = *bm->getAddr32(x, y);
if (c0 != c1) {
RecordError(ERROR_IMAGE_MISMATCH, name,
renderModeDescriptor);
return ERROR_IMAGE_MISMATCH;
}
}
}
return ERROR_NONE;
}
static bool write_document(const SkString& path, static bool write_document(const SkString& path,
const SkDynamicMemoryWStream& document) { const SkDynamicMemoryWStream& document) {
SkFILEWStream stream(path.c_str()); SkFILEWStream stream(path.c_str());
@ -427,18 +485,15 @@ public:
gRec.fBackend == kGPU_Backend || gRec.fBackend == kGPU_Backend ||
(gRec.fBackend == kPDF_Backend && CAN_IMAGE_PDF)) { (gRec.fBackend == kPDF_Backend && CAN_IMAGE_PDF)) {
path = make_filename(writePath, renderModeDescriptor, name.c_str(), path = make_filename(writePath, renderModeDescriptor, name, "png");
"png");
success = write_bitmap(path, bitmap); success = write_bitmap(path, bitmap);
} }
if (kPDF_Backend == gRec.fBackend) { if (kPDF_Backend == gRec.fBackend) {
path = make_filename(writePath, renderModeDescriptor, name.c_str(), path = make_filename(writePath, renderModeDescriptor, name, "pdf");
"pdf");
success = write_document(path, *document); success = write_document(path, *document);
} }
if (kXPS_Backend == gRec.fBackend) { if (kXPS_Backend == gRec.fBackend) {
path = make_filename(writePath, renderModeDescriptor, name.c_str(), path = make_filename(writePath, renderModeDescriptor, name, "xps");
"xps");
success = write_document(path, *document); success = write_document(path, *document);
} }
if (success) { if (success) {
@ -451,195 +506,115 @@ public:
} }
} }
/** // Compares bitmap "bitmap" to a reference bitmap read from disk.
* Compares actual checksum to expectations.
* Returns ERROR_NONE if they match, or some particular error code otherwise
*
* If fMismatchPath has been set, and there are pixel diffs, then the
* actual bitmap will be written out to a file within fMismatchPath.
*
* @param expectations what expectations to compare actualBitmap against
* @param actualBitmap the image we actually generated
* @param baseNameString name of test without renderModeDescriptor added
* @param renderModeDescriptor e.g., "-rtree", "-deferred"
* @param addToJsonSummary whether to add these results (both actual and
* expected) to the JSON summary
*
* TODO: For now, addToJsonSummary is only set to true within
* compare_test_results_to_stored_expectations(), so results of our
* in-memory comparisons (Rtree vs regular, etc.) are not written to the
* JSON summary. We may wish to change that.
*/
ErrorBitfield compare_to_expectations(Expectations expectations,
const SkBitmap& actualBitmap,
const SkString& baseNameString,
const char renderModeDescriptor[],
bool addToJsonSummary=false) {
ErrorBitfield retval;
Checksum actualChecksum = SkBitmapChecksummer::Compute64(actualBitmap);
SkString completeNameString = baseNameString;
completeNameString.append(renderModeDescriptor);
const char* completeName = completeNameString.c_str();
if (expectations.empty()) {
retval = ERROR_READING_REFERENCE_IMAGE;
} else if (expectations.match(actualChecksum)) {
retval = ERROR_NONE;
} else {
retval = ERROR_IMAGE_MISMATCH;
if (fMismatchPath) {
SkString path =
make_filename(fMismatchPath, renderModeDescriptor,
baseNameString.c_str(), "png");
write_bitmap(path, actualBitmap);
}
}
RecordError(retval, baseNameString, renderModeDescriptor);
if (addToJsonSummary) {
add_actual_results_to_json_summary(completeName, actualChecksum,
retval,
expectations.ignoreFailure());
add_expected_results_to_json_summary(completeName, expectations);
}
return retval;
}
/**
* Add this result to the appropriate JSON collection of actual results,
* depending on status.
*/
void add_actual_results_to_json_summary(const char testName[],
Checksum actualChecksum,
ErrorBitfield result,
bool ignoreFailure) {
Json::Value actualResults;
actualResults[kJsonKey_ActualResults_AnyStatus_Checksum] =
asJsonValue(actualChecksum);
if (ERROR_NONE == result) {
this->fJsonActualResults_Succeeded[testName] = actualResults;
} else {
if (ignoreFailure) {
// TODO: Once we have added the ability to compare
// actual results against expectations in a JSON file
// (where we can set ignore-failure to either true or
// false), add tests cases that exercise ignored
// failures (both for ERROR_READING_REFERENCE_IMAGE
// and ERROR_IMAGE_MISMATCH).
this->fJsonActualResults_FailureIgnored[testName] =
actualResults;
} else {
switch(result) {
case ERROR_READING_REFERENCE_IMAGE:
// TODO: What about the case where there IS an
// expected image checksum, but that gm test
// doesn't actually run? For now, those cases
// will always be ignored, because gm only looks
// at expectations that correspond to gm tests
// that were actually run.
// //
// Once we have the ability to express // Returns a description of the difference between "bitmap" and
// expectations as a JSON file, we should fix this // the reference bitmap, or ERROR_READING_REFERENCE_IMAGE if
// (and add a test case for which an expectation // unable to read the reference bitmap from disk.
// is given but the test is never run). ErrorBitfield compare_to_reference_image_on_disk(
this->fJsonActualResults_NoComparison[testName] = const char readPath [], const SkString& name, SkBitmap &bitmap,
actualResults; const char renderModeDescriptor []) {
break; ErrorBitfield retval;
case ERROR_IMAGE_MISMATCH: SkString path = make_filename(readPath, "", name, "png");
this->fJsonActualResults_Failed[testName] = actualResults; SkBitmap referenceBitmap;
break; Json::Value expectedChecksumsArray;
default:
fprintf(stderr, "encountered unexpected result %d\n", bool decodedReferenceBitmap =
result); SkImageDecoder::DecodeFile(path.c_str(), &referenceBitmap,
SkDEBUGFAIL("encountered unexpected result"); SkBitmap::kARGB_8888_Config,
break; SkImageDecoder::kDecodePixels_Mode,
} NULL);
if (decodedReferenceBitmap) {
expectedChecksumsArray.append(Json::UInt64(
SkBitmapChecksummer::Compute64(referenceBitmap)));
retval = compare(bitmap, referenceBitmap, name,
renderModeDescriptor);
if (fMismatchPath && (retval & ERROR_IMAGE_MISMATCH)) {
SkString path = make_filename(fMismatchPath, renderModeDescriptor, name, "png");
write_bitmap(path, bitmap);
} }
} else {
if (fNotifyMissingReadReference) {
fprintf(stderr, "FAILED to read %s\n", path.c_str());
} }
RecordError(ERROR_READING_REFERENCE_IMAGE, name,
renderModeDescriptor);
retval = ERROR_READING_REFERENCE_IMAGE;
} }
/** // Add this result to the appropriate JSON collection of actual results,
* Add this test to the JSON collection of expected results. // depending on status.
*/ Json::Value actualResults;
void add_expected_results_to_json_summary(const char testName[], actualResults[kJsonKey_ActualResults_AnyStatus_Checksum] = Json::UInt64(
Expectations expectations) { SkBitmapChecksummer::Compute64(bitmap));
if (decodedReferenceBitmap) {
if (ERROR_NONE == retval) {
fJsonActualResults_Succeeded[name.c_str()] = actualResults;
} else {
fJsonActualResults_Failed[name.c_str()] = actualResults;
}
} else {
fJsonActualResults_NoComparison[name.c_str()] = actualResults;
}
// Add this test to the JSON collection of expected results.
// For now, we assume that this collection starts out empty and we // For now, we assume that this collection starts out empty and we
// just fill it in as we go; once gm accepts a JSON file as input, // just fill it in as we go; once gm accepts a JSON file as input,
// we'll have to change that. // we'll have to change that.
Json::Value expectedResults; Json::Value expectedResults;
expectedResults[kJsonKey_ExpectedResults_Checksums] = expectedResults[kJsonKey_ExpectedResults_Checksums] = expectedChecksumsArray;
expectations.allowedChecksumsAsJson(); expectedResults[kJsonKey_ExpectedResults_IgnoreFailure] = !decodedReferenceBitmap;
expectedResults[kJsonKey_ExpectedResults_IgnoreFailure] = fJsonExpectedResults[name.c_str()] = expectedResults;
expectations.ignoreFailure();
this->fJsonExpectedResults[testName] = expectedResults;
}
/**
* Compare actualBitmap to expectations stored in this->fExpectationsSource.
*
* @param gm which test generated the actualBitmap
* @param gRec
* @param writePath unless this is NULL, write out actual images into this
* directory
* @param actualBitmap bitmap generated by this run
* @param pdf
*/
ErrorBitfield compare_test_results_to_stored_expectations(
GM* gm, const ConfigData& gRec, const char writePath[],
SkBitmap& actualBitmap, SkDynamicMemoryWStream* pdf) {
SkString name = make_name(gm->shortName(), gRec.fName);
ErrorBitfield retval = ERROR_NONE;
ExpectationsSource *expectationsSource =
this->fExpectationsSource.get();
if (expectationsSource && (gRec.fFlags & kRead_ConfigFlag)) {
Expectations expectations = expectationsSource->get(name.c_str());
retval |= compare_to_expectations(expectations, actualBitmap,
name, "", true);
} else {
// If we are running without expectations, we still want to
// record the actual results.
Checksum actualChecksum =
SkBitmapChecksummer::Compute64(actualBitmap);
add_actual_results_to_json_summary(name.c_str(), actualChecksum,
ERROR_READING_REFERENCE_IMAGE,
false);
}
// TODO: Consider moving this into compare_to_expectations(),
// similar to fMismatchPath... for now, we don't do that, because
// we don't want to write out the actual bitmaps for all
// renderModes of all tests! That would be a lot of files.
if (writePath && (gRec.fFlags & kWrite_ConfigFlag)) {
retval |= write_reference_image(gRec, writePath, "",
name, actualBitmap, pdf);
}
return retval; return retval;
} }
/** // NOTE: As far as I can tell, this function is NEVER called with a
* Compare actualBitmap to referenceBitmap. // non-blank renderModeDescriptor, EXCEPT when readPath and writePath are
* // both NULL (and thus no images are read from or written to disk).
* @param gm which test generated the bitmap // So I don't trust that the renderModeDescriptor is being used for
* @param gRec // anything other than debug output these days.
* @param renderModeDescriptor ErrorBitfield handle_test_results(GM* gm,
* @param actualBitmap actual bitmap generated by this run const ConfigData& gRec,
* @param referenceBitmap bitmap we expected to be generated const char writePath [],
*/ const char readPath [],
ErrorBitfield compare_test_results_to_reference_bitmap( const char renderModeDescriptor [],
GM* gm, const ConfigData& gRec, const char renderModeDescriptor [], SkBitmap& bitmap,
SkBitmap& actualBitmap, const SkBitmap* referenceBitmap) { SkDynamicMemoryWStream* pdf,
const SkBitmap* referenceBitmap) {
SkASSERT(referenceBitmap);
SkString name = make_name(gm->shortName(), gRec.fName); SkString name = make_name(gm->shortName(), gRec.fName);
Checksum referenceChecksum = ErrorBitfield retval = ERROR_NONE;
SkBitmapChecksummer::Compute64(*referenceBitmap);
Expectations expectations(referenceChecksum); if (readPath && (gRec.fFlags & kRead_ConfigFlag)) {
return compare_to_expectations(expectations, actualBitmap, retval |= compare_to_reference_image_on_disk(readPath, name, bitmap,
name, renderModeDescriptor); renderModeDescriptor);
} else if (NULL == referenceBitmap) {
// If we are running without "--readPath", we still want to
// record the actual results.
//
// For now, though, we don't record results of comparisons against
// different in-memory representations (hence the referenceBitmap
// NULL check).
Json::Value actualResults;
actualResults[kJsonKey_ActualResults_AnyStatus_Checksum] =
Json::UInt64(SkBitmapChecksummer::Compute64(bitmap));
fJsonActualResults_NoComparison[name.c_str()] = actualResults;
}
if (writePath && (gRec.fFlags & kWrite_ConfigFlag)) {
retval |= write_reference_image(gRec, writePath,
renderModeDescriptor,
name, bitmap, pdf);
}
if (referenceBitmap) {
ErrorBitfield compareResult = compare(bitmap, *referenceBitmap, name,
renderModeDescriptor);
if (fMismatchPath && (compareResult & ERROR_IMAGE_MISMATCH)) {
SkString path = make_filename(fMismatchPath, renderModeDescriptor, name, "png");
write_bitmap(path, bitmap);
}
retval |= compareResult;
}
return retval;
} }
static SkPicture* generate_new_picture(GM* gm, BbhType bbhType, uint32_t recordFlags, static SkPicture* generate_new_picture(GM* gm, BbhType bbhType, uint32_t recordFlags,
@ -692,6 +667,7 @@ public:
ErrorBitfield test_drawing(GM* gm, ErrorBitfield test_drawing(GM* gm,
const ConfigData& gRec, const ConfigData& gRec,
const char writePath [], const char writePath [],
const char readPath [],
GrContext* context, GrContext* context,
GrRenderTarget* rt, GrRenderTarget* rt,
SkBitmap* bitmap) { SkBitmap* bitmap) {
@ -703,9 +679,6 @@ public:
ErrorBitfield errors = generate_image(gm, gRec, context, rt, bitmap, ErrorBitfield errors = generate_image(gm, gRec, context, rt, bitmap,
false); false);
if (ERROR_NONE != errors) { if (ERROR_NONE != errors) {
// TODO: Add a test to exercise what the stdout and
// JSON look like if we get an "early error" while
// trying to generate the image.
return errors; return errors;
} }
} else if (gRec.fBackend == kPDF_Backend) { } else if (gRec.fBackend == kPDF_Backend) {
@ -718,8 +691,8 @@ public:
} else if (gRec.fBackend == kXPS_Backend) { } else if (gRec.fBackend == kXPS_Backend) {
generate_xps(gm, document); generate_xps(gm, document);
} }
return compare_test_results_to_stored_expectations( return handle_test_results(gm, gRec, writePath, readPath,
gm, gRec, writePath, *bitmap, &document); "", *bitmap, &document, NULL);
} }
ErrorBitfield test_deferred_drawing(GM* gm, ErrorBitfield test_deferred_drawing(GM* gm,
@ -737,15 +710,17 @@ public:
if (!generate_image(gm, gRec, context, rt, &bitmap, true)) { if (!generate_image(gm, gRec, context, rt, &bitmap, true)) {
return ERROR_NONE; return ERROR_NONE;
} }
return compare_test_results_to_reference_bitmap( return handle_test_results(gm, gRec, NULL, NULL,
gm, gRec, "-deferred", bitmap, &referenceBitmap); "-deferred", bitmap, NULL,
&referenceBitmap);
} }
return ERROR_NONE; return ERROR_NONE;
} }
ErrorBitfield test_pipe_playback(GM* gm, ErrorBitfield test_pipe_playback(GM* gm,
const ConfigData& gRec, const ConfigData& gRec,
const SkBitmap& referenceBitmap) { const SkBitmap& referenceBitmap,
const char readPath []) {
ErrorBitfield errors = ERROR_NONE; ErrorBitfield errors = ERROR_NONE;
for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) { for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) {
SkBitmap bitmap; SkBitmap bitmap;
@ -760,8 +735,9 @@ public:
writer.endRecording(); writer.endRecording();
SkString string("-pipe"); SkString string("-pipe");
string.append(gPipeWritingFlagCombos[i].name); string.append(gPipeWritingFlagCombos[i].name);
errors |= compare_test_results_to_reference_bitmap( errors |= handle_test_results(gm, gRec, NULL, NULL,
gm, gRec, string.c_str(), bitmap, &referenceBitmap); string.c_str(), bitmap, NULL,
&referenceBitmap);
if (errors != ERROR_NONE) { if (errors != ERROR_NONE) {
break; break;
} }
@ -770,7 +746,8 @@ public:
} }
ErrorBitfield test_tiled_pipe_playback( ErrorBitfield test_tiled_pipe_playback(
GM* gm, const ConfigData& gRec, const SkBitmap& referenceBitmap) { GM* gm, const ConfigData& gRec, const SkBitmap& referenceBitmap,
const char readPath []) {
ErrorBitfield errors = ERROR_NONE; ErrorBitfield errors = ERROR_NONE;
for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) { for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) {
SkBitmap bitmap; SkBitmap bitmap;
@ -785,8 +762,9 @@ public:
writer.endRecording(); writer.endRecording();
SkString string("-tiled pipe"); SkString string("-tiled pipe");
string.append(gPipeWritingFlagCombos[i].name); string.append(gPipeWritingFlagCombos[i].name);
errors |= compare_test_results_to_reference_bitmap( errors |= handle_test_results(gm, gRec, NULL, NULL,
gm, gRec, string.c_str(), bitmap, &referenceBitmap); string.c_str(), bitmap, NULL,
&referenceBitmap);
if (errors != ERROR_NONE) { if (errors != ERROR_NONE) {
break; break;
} }
@ -799,6 +777,9 @@ public:
// They are public for now, to allow easier setting by tool_main(). // They are public for now, to allow easier setting by tool_main().
// //
// if true, emit a message when we can't find a reference image to compare
bool fNotifyMissingReadReference;
bool fUseFileHierarchy; bool fUseFileHierarchy;
const char* fMismatchPath; const char* fMismatchPath;
@ -806,11 +787,6 @@ public:
// information about all failed tests we have encountered so far // information about all failed tests we have encountered so far
SkTArray<FailRec> fFailedTests; SkTArray<FailRec> fFailedTests;
// Where to read expectations (expected image checksums, etc.) from.
// If unset, we don't do comparisons.
SkAutoTUnref<ExpectationsSource> fExpectationsSource;
// JSON summaries that we generate as we go (just for output).
Json::Value fJsonExpectedResults; Json::Value fJsonExpectedResults;
Json::Value fJsonActualResults_Failed; Json::Value fJsonActualResults_Failed;
Json::Value fJsonActualResults_FailureIgnored; Json::Value fJsonActualResults_FailureIgnored;
@ -1002,9 +978,6 @@ int tool_main(int argc, char** argv) {
const char* readPath = NULL; // if non-null, were we read from to compare const char* readPath = NULL; // if non-null, were we read from to compare
const char* resourcePath = NULL;// if non-null, where we read from for image resources const char* resourcePath = NULL;// if non-null, where we read from for image resources
// if true, emit a message when we can't find a reference image to compare
bool notifyMissingReadReference = true;
SkTDArray<const char*> fMatches; SkTDArray<const char*> fMatches;
bool doPDF = true; bool doPDF = true;
@ -1069,7 +1042,7 @@ int tool_main(int argc, char** argv) {
} else if (strcmp(*argv, "--nodeferred") == 0) { } else if (strcmp(*argv, "--nodeferred") == 0) {
doDeferred = false; doDeferred = false;
} else if (strcmp(*argv, "--disable-missing-warning") == 0) { } else if (strcmp(*argv, "--disable-missing-warning") == 0) {
notifyMissingReadReference = false; gmmain.fNotifyMissingReadReference = false;
} else if (strcmp(*argv, "--mismatchPath") == 0) { } else if (strcmp(*argv, "--mismatchPath") == 0) {
argv++; argv++;
if (argv < stop && **argv) { if (argv < stop && **argv) {
@ -1098,7 +1071,7 @@ int tool_main(int argc, char** argv) {
return -1; return -1;
} }
} else if (strcmp(*argv, "--enable-missing-warning") == 0) { } else if (strcmp(*argv, "--enable-missing-warning") == 0) {
notifyMissingReadReference = true; gmmain.fNotifyMissingReadReference = true;
} else if (strcmp(*argv, "--forceBWtext") == 0) { } else if (strcmp(*argv, "--forceBWtext") == 0) {
gForceBWtext = true; gForceBWtext = true;
} else if (strcmp(*argv, "--help") == 0 || strcmp(*argv, "-h") == 0) { } else if (strcmp(*argv, "--help") == 0 || strcmp(*argv, "-h") == 0) {
@ -1213,21 +1186,7 @@ int tool_main(int argc, char** argv) {
GM::SetResourcePath(resourcePath); GM::SetResourcePath(resourcePath);
if (readPath) { if (readPath) {
if (!sk_exists(readPath)) {
fprintf(stderr, "readPath %s does not exist!\n", readPath);
return -1;
}
if (sk_isdir(readPath)) {
fprintf(stderr, "reading from %s\n", readPath); fprintf(stderr, "reading from %s\n", readPath);
gmmain.fExpectationsSource.reset(SkNEW_ARGS(
IndividualImageExpectationsSource,
(readPath, notifyMissingReadReference)));
} else {
fprintf(stderr, "reading expectations from JSON summary file %s ",
readPath);
fprintf(stderr, "BUT WE DON'T KNOW HOW TO DO THIS YET!\n");
return -1;
}
} }
if (writePath) { if (writePath) {
fprintf(stderr, "writing to %s\n", writePath); fprintf(stderr, "writing to %s\n", writePath);
@ -1359,7 +1318,7 @@ int tool_main(int argc, char** argv) {
if (ERROR_NONE == renderErrors) { if (ERROR_NONE == renderErrors) {
renderErrors |= gmmain.test_drawing(gm, config, writePath, renderErrors |= gmmain.test_drawing(gm, config, writePath,
GetGr(), readPath, GetGr(),
renderTarget, renderTarget,
&comparisonBitmap); &comparisonBitmap);
} }
@ -1394,8 +1353,11 @@ int tool_main(int argc, char** argv) {
SkBitmap bitmap; SkBitmap bitmap;
gmmain.generate_image_from_picture(gm, compareConfig, pict, gmmain.generate_image_from_picture(gm, compareConfig, pict,
&bitmap); &bitmap);
pictErrors |= gmmain.compare_test_results_to_reference_bitmap( pictErrors |= gmmain.handle_test_results(gm, compareConfig,
gm, compareConfig, "-replay", bitmap, &comparisonBitmap); NULL, NULL,
"-replay", bitmap,
NULL,
&comparisonBitmap);
} }
if ((ERROR_NONE == testErrors) && if ((ERROR_NONE == testErrors) &&
@ -1407,14 +1369,17 @@ int tool_main(int argc, char** argv) {
SkBitmap bitmap; SkBitmap bitmap;
gmmain.generate_image_from_picture(gm, compareConfig, repict, gmmain.generate_image_from_picture(gm, compareConfig, repict,
&bitmap); &bitmap);
pictErrors |= gmmain.compare_test_results_to_reference_bitmap( pictErrors |= gmmain.handle_test_results(gm, compareConfig,
gm, compareConfig, "-serialize", bitmap, &comparisonBitmap); NULL, NULL,
"-serialize", bitmap,
NULL,
&comparisonBitmap);
} }
if (writePicturePath) { if (writePicturePath) {
const char* pictureSuffix = "skp"; const char* pictureSuffix = "skp";
SkString path = make_filename(writePicturePath, "", SkString path = gmmain.make_filename(writePicturePath, "",
gm->shortName(), SkString(gm->shortName()),
pictureSuffix); pictureSuffix);
SkFILEWStream stream(path.c_str()); SkFILEWStream stream(path.c_str());
pict->serialize(&stream); pict->serialize(&stream);
@ -1423,19 +1388,18 @@ int tool_main(int argc, char** argv) {
testErrors |= pictErrors; testErrors |= pictErrors;
} }
// TODO: add a test in which the RTree rendering results in a
// different bitmap than the standard rendering. It should
// show up as failed in the JSON summary, and should be listed
// in the stdout also.
if (!(gmFlags & GM::kSkipPicture_Flag) && doRTree) { if (!(gmFlags & GM::kSkipPicture_Flag) && doRTree) {
SkPicture* pict = gmmain.generate_new_picture( SkPicture* pict = gmmain.generate_new_picture(gm, kRTree_BbhType,
gm, kRTree_BbhType, SkPicture::kUsePathBoundsForClip_RecordingFlag); SkPicture::kUsePathBoundsForClip_RecordingFlag);
SkAutoUnref aur(pict); SkAutoUnref aur(pict);
SkBitmap bitmap; SkBitmap bitmap;
gmmain.generate_image_from_picture(gm, compareConfig, pict, gmmain.generate_image_from_picture(gm, compareConfig, pict,
&bitmap); &bitmap);
testErrors |= gmmain.compare_test_results_to_reference_bitmap( testErrors |= gmmain.handle_test_results(gm, compareConfig,
gm, compareConfig, "-rtree", bitmap, &comparisonBitmap); NULL, NULL,
"-rtree", bitmap,
NULL,
&comparisonBitmap);
} }
if (!(gmFlags & GM::kSkipPicture_Flag) && doTileGrid) { if (!(gmFlags & GM::kSkipPicture_Flag) && doTileGrid) {
@ -1446,9 +1410,8 @@ int tool_main(int argc, char** argv) {
// We record with the reciprocal scale to obtain a replay // We record with the reciprocal scale to obtain a replay
// result that can be validated against comparisonBitmap. // result that can be validated against comparisonBitmap.
SkScalar recordScale = SkScalarInvert(replayScale); SkScalar recordScale = SkScalarInvert(replayScale);
SkPicture* pict = gmmain.generate_new_picture( SkPicture* pict = gmmain.generate_new_picture(gm, kTileGrid_BbhType,
gm, kTileGrid_BbhType, SkPicture::kUsePathBoundsForClip_RecordingFlag, SkPicture::kUsePathBoundsForClip_RecordingFlag, recordScale);
recordScale);
SkAutoUnref aur(pict); SkAutoUnref aur(pict);
SkBitmap bitmap; SkBitmap bitmap;
gmmain.generate_image_from_picture(gm, compareConfig, pict, gmmain.generate_image_from_picture(gm, compareConfig, pict,
@ -1458,8 +1421,10 @@ int tool_main(int argc, char** argv) {
suffix += "-scale-"; suffix += "-scale-";
suffix.appendScalar(replayScale); suffix.appendScalar(replayScale);
} }
testErrors |= gmmain.compare_test_results_to_reference_bitmap( testErrors |= gmmain.handle_test_results(gm, compareConfig,
gm, compareConfig, suffix.c_str(), bitmap, NULL, NULL,
suffix.c_str(), bitmap,
NULL,
&comparisonBitmap); &comparisonBitmap);
} }
} }
@ -1471,14 +1436,16 @@ int tool_main(int argc, char** argv) {
if ((ERROR_NONE == testErrors) && doPipe) { if ((ERROR_NONE == testErrors) && doPipe) {
pipeErrors |= gmmain.test_pipe_playback(gm, compareConfig, pipeErrors |= gmmain.test_pipe_playback(gm, compareConfig,
comparisonBitmap); comparisonBitmap,
readPath);
} }
if ((ERROR_NONE == testErrors) && if ((ERROR_NONE == testErrors) &&
(ERROR_NONE == pipeErrors) && (ERROR_NONE == pipeErrors) &&
doTiledPipe && !(gmFlags & GM::kSkipTiled_Flag)) { doTiledPipe && !(gmFlags & GM::kSkipTiled_Flag)) {
pipeErrors |= gmmain.test_tiled_pipe_playback(gm, compareConfig, pipeErrors |= gmmain.test_tiled_pipe_playback(gm, compareConfig,
comparisonBitmap); comparisonBitmap,
readPath);
} }
testErrors |= pipeErrors; testErrors |= pipeErrors;
@ -1487,10 +1454,10 @@ int tool_main(int argc, char** argv) {
// Update overall results. // Update overall results.
// We only tabulate the particular error types that we currently // We only tabulate the particular error types that we currently
// care about (e.g., missing reference images). Later on, if we // care about (e.g., missing reference images). Later on, if we
// want to also tabulate other error types, we can do so. // want to also tabulate pixel mismatches vs dimension mistmatches
// (or whatever else), we can do so.
testsRun++; testsRun++;
if (!gmmain.fExpectationsSource.get() || if (!readPath || (ERROR_READING_REFERENCE_IMAGE & testErrors)) {
(ERROR_READING_REFERENCE_IMAGE & testErrors)) {
testsMissingReferenceImages++; testsMissingReferenceImages++;
} else if (ERROR_NONE == testErrors) { } else if (ERROR_NONE == testErrors) {
testsPassed++; testsPassed++;

Просмотреть файл

@ -12,7 +12,7 @@
"expected-results" : { "expected-results" : {
"8888/dashing2" : { "8888/dashing2" : {
"checksums" : null, "checksums" : null,
"ignore-failure" : false "ignore-failure" : true
} }
} }
} }