зеркало из https://github.com/mozilla/gecko-dev.git
Backed out changeset 6da58c7bb247 (bug 1365649) for frequently failing devtools/client/netmonitor/test/browser_net_simple-request-data.js. r=backout
This commit is contained in:
Родитель
877985ac99
Коммит
2748dd52d2
|
@ -132,9 +132,6 @@
|
|||
get failCount() {
|
||||
return this.results.filter(t => !t.info && !t.pass).length;
|
||||
},
|
||||
get allowedFailureCount() {
|
||||
return this.results.filter(t => t.allowedFailure).length;
|
||||
},
|
||||
|
||||
addResult: function addResult(result) {
|
||||
this.lastOutputTime = Date.now();
|
||||
|
|
|
@ -165,9 +165,9 @@ function Tester(aTests, structuredLogger, aCallback) {
|
|||
|
||||
this._toleratedUncaughtRejections = null;
|
||||
this._uncaughtErrorObserver = ({message, date, fileName, stack, lineNumber}) => {
|
||||
let ex = message;
|
||||
let error = message;
|
||||
if (fileName || lineNumber) {
|
||||
ex = {
|
||||
error = {
|
||||
fileName: fileName,
|
||||
lineNumber: lineNumber,
|
||||
message: message,
|
||||
|
@ -178,18 +178,21 @@ function Tester(aTests, structuredLogger, aCallback) {
|
|||
}
|
||||
|
||||
// We may have a whitelist of rejections we wish to tolerate.
|
||||
let pass = this._toleratedUncaughtRejections &&
|
||||
let tolerate = this._toleratedUncaughtRejections &&
|
||||
this._toleratedUncaughtRejections.indexOf(message) != -1;
|
||||
let name = "A promise chain failed to handle a rejection: ";
|
||||
if (pass) {
|
||||
if (tolerate) {
|
||||
name = "WARNING: (PLEASE FIX THIS AS PART OF BUG 1077403) " + name;
|
||||
}
|
||||
|
||||
this.currentTest.addResult(new testResult({
|
||||
pass, name, ex, todo: pass, stack,
|
||||
allowFailure: this.currentTest.allowFailure,
|
||||
}));
|
||||
};
|
||||
this.currentTest.addResult(
|
||||
new testResult(
|
||||
/*success*/tolerate,
|
||||
/*name*/name,
|
||||
/*error*/error,
|
||||
/*known*/tolerate,
|
||||
/*stack*/stack));
|
||||
};
|
||||
}
|
||||
Tester.prototype = {
|
||||
EventUtils: {},
|
||||
|
@ -278,11 +281,9 @@ Tester.prototype = {
|
|||
if (this.currentTest && window.gBrowser && gBrowser.tabs.length > 1) {
|
||||
while (gBrowser.tabs.length > 1) {
|
||||
let lastTab = gBrowser.tabContainer.lastChild;
|
||||
this.currentTest.addResult(new testResult({
|
||||
name: baseMsg.replace("{elt}", "tab") + ": " +
|
||||
lastTab.linkedBrowser.currentURI.spec,
|
||||
allowFailure: this.currentTest.allowFailure,
|
||||
}));
|
||||
let msg = baseMsg.replace("{elt}", "tab") +
|
||||
": " + lastTab.linkedBrowser.currentURI.spec;
|
||||
this.currentTest.addResult(new testResult(false, msg, "", false));
|
||||
gBrowser.removeTab(lastTab);
|
||||
}
|
||||
}
|
||||
|
@ -314,10 +315,7 @@ Tester.prototype = {
|
|||
}
|
||||
let msg = baseMsg.replace("{elt}", type);
|
||||
if (this.currentTest) {
|
||||
this.currentTest.addResult(new testResult({
|
||||
name: msg,
|
||||
allowFailure: this.currentTest.allowFailure,
|
||||
}));
|
||||
this.currentTest.addResult(new testResult(false, msg, "", false));
|
||||
} else {
|
||||
if (!createdFakeTestForLogging) {
|
||||
createdFakeTestForLogging = true;
|
||||
|
@ -436,22 +434,18 @@ Tester.prototype = {
|
|||
yield func.apply(testScope);
|
||||
}
|
||||
catch (ex) {
|
||||
this.currentTest.addResult(new testResult({
|
||||
name: "Cleanup function threw an exception",
|
||||
ex,
|
||||
allowFailure: this.currentTest.allowFailure,
|
||||
}));
|
||||
this.currentTest.addResult(new testResult(false, "Cleanup function threw an exception", ex, false));
|
||||
}
|
||||
}
|
||||
|
||||
if (this.currentTest.passCount === 0 &&
|
||||
this.currentTest.failCount === 0 &&
|
||||
this.currentTest.todoCount === 0) {
|
||||
this.currentTest.addResult(new testResult({
|
||||
name: "This test contains no passes, no fails and no todos. Maybe" +
|
||||
" it threw a silent exception? Make sure you use" +
|
||||
" waitForExplicitFinish() if you need it.",
|
||||
}));
|
||||
this.currentTest.addResult(new testResult(false, "This test contains no passes, no fails and no todos. Maybe it threw a silent exception? Make sure you use waitForExplicitFinish() if you need it.", "", false));
|
||||
}
|
||||
|
||||
if (testScope.__expected == 'fail' && testScope.__num_failed <= 0) {
|
||||
this.currentTest.addResult(new testResult(false, "We expected at least one assertion to fail because this test file was marked as fail-if in the manifest!", "", true));
|
||||
}
|
||||
|
||||
this.Promise.Debugging.flushUncaughtErrors();
|
||||
|
@ -459,18 +453,12 @@ Tester.prototype = {
|
|||
let winUtils = window.QueryInterface(Ci.nsIInterfaceRequestor)
|
||||
.getInterface(Ci.nsIDOMWindowUtils);
|
||||
if (winUtils.isTestControllingRefreshes) {
|
||||
this.currentTest.addResult(new testResult({
|
||||
name: "test left refresh driver under test control",
|
||||
}));
|
||||
this.currentTest.addResult(new testResult(false, "test left refresh driver under test control", "", false));
|
||||
winUtils.restoreNormalRefresh();
|
||||
}
|
||||
|
||||
if (this.SimpleTest.isExpectingUncaughtException()) {
|
||||
this.currentTest.addResult(new testResult({
|
||||
name: "expectUncaughtException was called but no uncaught" +
|
||||
" exception was detected!",
|
||||
allowFailure: this.currentTest.allowFailure,
|
||||
}));
|
||||
this.currentTest.addResult(new testResult(false, "expectUncaughtException was called but no uncaught exception was detected!", "", false));
|
||||
}
|
||||
|
||||
Object.keys(window).forEach(function (prop) {
|
||||
|
@ -483,12 +471,8 @@ Tester.prototype = {
|
|||
}
|
||||
if (this._globalProperties.indexOf(prop) == -1) {
|
||||
this._globalProperties.push(prop);
|
||||
if (this._globalPropertyWhitelist.indexOf(prop) == -1) {
|
||||
this.currentTest.addResult(new testResult({
|
||||
name: "test left unexpected property on window: " + prop,
|
||||
allowFailure: this.currentTest.allowFailure,
|
||||
}));
|
||||
}
|
||||
if (this._globalPropertyWhitelist.indexOf(prop) == -1)
|
||||
this.currentTest.addResult(new testResult(false, "test left unexpected property on window: " + prop, "", false));
|
||||
}
|
||||
}, this);
|
||||
|
||||
|
@ -518,11 +502,10 @@ Tester.prototype = {
|
|||
|
||||
// Notify a long running test problem if it didn't end up in a timeout.
|
||||
if (this.currentTest.unexpectedTimeouts && !this.currentTest.timedOut) {
|
||||
this.currentTest.addResult(new testResult({
|
||||
name: "This test exceeded the timeout threshold. It should be" +
|
||||
" rewritten or split up. If that's not possible, use" +
|
||||
" requestLongerTimeout(N), but only as a last resort.",
|
||||
}));
|
||||
let msg = "This test exceeded the timeout threshold. It should be " +
|
||||
"rewritten or split up. If that's not possible, use " +
|
||||
"requestLongerTimeout(N), but only as a last resort.";
|
||||
this.currentTest.addResult(new testResult(false, msg, "", false));
|
||||
}
|
||||
|
||||
// If we're in a debug build, check assertion counts. This code
|
||||
|
@ -537,53 +520,24 @@ Tester.prototype = {
|
|||
let max = testScope.__expectedMaxAsserts;
|
||||
let min = testScope.__expectedMinAsserts;
|
||||
if (numAsserts > max) {
|
||||
// TEST-UNEXPECTED-FAIL
|
||||
this.currentTest.addResult(new testResult({
|
||||
name: "Assertion count " + numAsserts +
|
||||
" is greater than expected range " +
|
||||
min + "-" + max + " assertions.",
|
||||
pass: true, // TEMPORARILY TEST-KNOWN-FAIL
|
||||
todo: true,
|
||||
allowFailure: this.currentTest.allowFailure,
|
||||
}));
|
||||
let msg = "Assertion count " + numAsserts +
|
||||
" is greater than expected range " +
|
||||
min + "-" + max + " assertions.";
|
||||
// TEST-UNEXPECTED-FAIL (TEMPORARILY TEST-KNOWN-FAIL)
|
||||
//this.currentTest.addResult(new testResult(false, msg, "", false));
|
||||
this.currentTest.addResult(new testResult(true, msg, "", true));
|
||||
} else if (numAsserts < min) {
|
||||
let msg = "Assertion count " + numAsserts +
|
||||
" is less than expected range " +
|
||||
min + "-" + max + " assertions.";
|
||||
// TEST-UNEXPECTED-PASS
|
||||
this.currentTest.addResult(new testResult({
|
||||
name: "Assertion count " + numAsserts +
|
||||
" is less than expected range " +
|
||||
min + "-" + max + " assertions.",
|
||||
todo: true,
|
||||
allowFailure: this.currentTest.allowFailure,
|
||||
}));
|
||||
this.currentTest.addResult(new testResult(false, msg, "", true));
|
||||
} else if (numAsserts > 0) {
|
||||
let msg = "Assertion count " + numAsserts +
|
||||
" is within expected range " +
|
||||
min + "-" + max + " assertions.";
|
||||
// TEST-KNOWN-FAIL
|
||||
this.currentTest.addResult(new testResult({
|
||||
name: "Assertion count " + numAsserts +
|
||||
" is within expected range " +
|
||||
min + "-" + max + " assertions.",
|
||||
pass: true,
|
||||
todo: true,
|
||||
allowFailure: this.currentTest.allowFailure,
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
if (this.currentTest.allowFailure) {
|
||||
if (this.currentTest.expectedAllowedFailureCount) {
|
||||
this.currentTest.addResult(new testResult({
|
||||
name: "Expected " +
|
||||
this.currentTest.expectedAllowedFailureCount +
|
||||
" failures in this file, got " +
|
||||
this.currentTest.allowedFailureCount + ".",
|
||||
pass: this.currentTest.expectedAllowedFailureCount ==
|
||||
this.currentTest.allowedFailureCount,
|
||||
}));
|
||||
} else if (this.currentTest.allowedFailureCount == 0) {
|
||||
this.currentTest.addResult(new testResult({
|
||||
name: "We expect at least one assertion to fail because this" +
|
||||
" test file is marked as fail-if in the manifest.",
|
||||
todo: true,
|
||||
}));
|
||||
this.currentTest.addResult(new testResult(true, msg, "", true));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -735,17 +689,13 @@ Tester.prototype = {
|
|||
this.currentTest.scope.ExtensionTestUtils = this.ExtensionTestUtils;
|
||||
// Pass a custom report function for mochitest style reporting.
|
||||
this.currentTest.scope.Assert = new this.Assert(function(err, message, stack) {
|
||||
currentTest.addResult(new testResult(err ? {
|
||||
name: err.message,
|
||||
ex: err.stack,
|
||||
stack: err.stack,
|
||||
allowFailure: currentTest.allowFailure,
|
||||
} : {
|
||||
name: message,
|
||||
pass: true,
|
||||
stack,
|
||||
allowFailure: currentTest.allowFailure,
|
||||
}));
|
||||
let res;
|
||||
if (err) {
|
||||
res = new testResult(false, err.message, err.stack, false, err.stack);
|
||||
} else {
|
||||
res = new testResult(true, message, "", false, stack);
|
||||
}
|
||||
currentTest.addResult(res);
|
||||
});
|
||||
|
||||
this.ContentTask.setTestScope(currentScope);
|
||||
|
@ -778,10 +728,7 @@ Tester.prototype = {
|
|||
// will also ignore an existing head.js attempting to import a missing
|
||||
// module - see bug 755558 for why this strategy is preferred anyway.
|
||||
if (!/^Error opening input stream/.test(ex.toString())) {
|
||||
this.currentTest.addResult(new testResult({
|
||||
name: "head.js import threw an exception",
|
||||
ex,
|
||||
}));
|
||||
this.currentTest.addResult(new testResult(false, "head.js import threw an exception", ex, false));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -805,13 +752,11 @@ Tester.prototype = {
|
|||
try {
|
||||
yield task();
|
||||
} catch (ex) {
|
||||
currentTest.addResult(new testResult({
|
||||
name: "Uncaught exception",
|
||||
pass: this.SimpleTest.isExpectingUncaughtException(),
|
||||
ex,
|
||||
stack: (typeof ex == "object" && "stack" in ex) ? ex.stack : null,
|
||||
allowFailure: currentTest.allowFailure,
|
||||
}));
|
||||
let isExpected = !!this.SimpleTest.isExpectingUncaughtException();
|
||||
let stack = (typeof ex == "object" && "stack" in ex)?ex.stack:null;
|
||||
let name = "Uncaught exception";
|
||||
let result = new testResult(isExpected, name, ex, false, stack);
|
||||
currentTest.addResult(result);
|
||||
}
|
||||
Promise.Debugging.flushUncaughtErrors();
|
||||
this.SimpleTest.info("Leaving test " + task.name);
|
||||
|
@ -824,13 +769,9 @@ Tester.prototype = {
|
|||
throw "This test didn't call add_task, nor did it define a generatorTest() function, nor did it define a test() function, so we don't know how to run it.";
|
||||
}
|
||||
} catch (ex) {
|
||||
let isExpected = !!this.SimpleTest.isExpectingUncaughtException();
|
||||
if (!this.SimpleTest.isIgnoringAllUncaughtExceptions()) {
|
||||
this.currentTest.addResult(new testResult({
|
||||
name: "Exception thrown",
|
||||
pass: this.SimpleTest.isExpectingUncaughtException(),
|
||||
ex,
|
||||
allowFailure: this.currentTest.allowFailure,
|
||||
}));
|
||||
this.currentTest.addResult(new testResult(isExpected, "Exception thrown", ex, false));
|
||||
this.SimpleTest.expectUncaughtException(false);
|
||||
} else {
|
||||
this.currentTest.addResult(new testMessage("Exception thrown: " + ex));
|
||||
|
@ -883,7 +824,7 @@ Tester.prototype = {
|
|||
return;
|
||||
}
|
||||
|
||||
self.currentTest.addResult(new testResult({ name: "Test timed out" }));
|
||||
self.currentTest.addResult(new testResult(false, "Test timed out", null, false));
|
||||
self.currentTest.timedOut = true;
|
||||
self.currentTest.scope.__waitTimer = null;
|
||||
self.nextTest();
|
||||
|
@ -900,82 +841,63 @@ Tester.prototype = {
|
|||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Represents the result of one test assertion. This is described with a string
|
||||
* in traditional logging, and has a "status" and "expected" property used in
|
||||
* structured logging. Normally, results are mapped as follows:
|
||||
*
|
||||
* pass: todo: Added to: Described as: Status: Expected:
|
||||
* true false passCount TEST-PASS PASS PASS
|
||||
* true true todoCount TEST-KNOWN-FAIL FAIL FAIL
|
||||
* false false failCount TEST-UNEXPECTED-FAIL FAIL PASS
|
||||
* false true failCount TEST-UNEXPECTED-PASS PASS FAIL
|
||||
*
|
||||
* The "allowFailure" argument indicates that this is one of the assertions that
|
||||
* should be allowed to fail, for example because "fail-if" is true for the
|
||||
* current test file in the manifest. In this case, results are mapped this way:
|
||||
*
|
||||
* pass: todo: Added to: Described as: Status: Expected:
|
||||
* true false passCount TEST-PASS PASS PASS
|
||||
* true true todoCount TEST-KNOWN-FAIL FAIL FAIL
|
||||
* false false todoCount TEST-KNOWN-FAIL FAIL FAIL
|
||||
* false true todoCount TEST-KNOWN-FAIL FAIL FAIL
|
||||
*/
|
||||
function testResult({ name, pass, todo, ex, stack, allowFailure }) {
|
||||
this.info = false;
|
||||
this.name = name;
|
||||
function testResult(aCondition, aName, aDiag, aIsTodo, aStack) {
|
||||
this.name = aName;
|
||||
this.msg = "";
|
||||
|
||||
if (allowFailure && !pass) {
|
||||
this.allowedFailure = true;
|
||||
this.pass = true;
|
||||
this.todo = true;
|
||||
} else {
|
||||
this.pass = !!pass;
|
||||
this.todo = todo;
|
||||
}
|
||||
|
||||
this.expected = this.todo ? "FAIL" : "PASS";
|
||||
this.info = false;
|
||||
this.pass = !!aCondition;
|
||||
this.todo = aIsTodo;
|
||||
|
||||
if (this.pass) {
|
||||
this.status = this.expected;
|
||||
return;
|
||||
}
|
||||
|
||||
this.status = this.todo ? "PASS" : "FAIL";
|
||||
|
||||
if (ex) {
|
||||
if (typeof ex == "object" && "fileName" in ex) {
|
||||
// we have an exception - print filename and linenumber information
|
||||
this.msg += "at " + ex.fileName + ":" + ex.lineNumber + " - ";
|
||||
}
|
||||
this.msg += String(ex);
|
||||
}
|
||||
|
||||
if (stack) {
|
||||
this.msg += "\nStack trace:\n";
|
||||
let normalized;
|
||||
if (stack instanceof Components.interfaces.nsIStackFrame) {
|
||||
let frames = [];
|
||||
for (let frame = stack; frame; frame = frame.caller) {
|
||||
frames.push(frame.filename + ":" + frame.name + ":" + frame.lineNumber);
|
||||
}
|
||||
normalized = frames.join("\n");
|
||||
if (aIsTodo) {
|
||||
this.status = "FAIL";
|
||||
this.expected = "FAIL";
|
||||
} else {
|
||||
normalized = "" + stack;
|
||||
this.status = "PASS";
|
||||
this.expected = "PASS";
|
||||
}
|
||||
this.msg += Task.Debugging.generateReadableStack(normalized, " ");
|
||||
}
|
||||
|
||||
if (gConfig.debugOnFailure) {
|
||||
// You've hit this line because you requested to break into the
|
||||
// debugger upon a testcase failure on your test run.
|
||||
debugger;
|
||||
} else {
|
||||
if (aDiag) {
|
||||
if (typeof aDiag == "object" && "fileName" in aDiag) {
|
||||
// we have an exception - print filename and linenumber information
|
||||
this.msg += "at " + aDiag.fileName + ":" + aDiag.lineNumber + " - ";
|
||||
}
|
||||
this.msg += String(aDiag);
|
||||
}
|
||||
if (aStack) {
|
||||
this.msg += "\nStack trace:\n";
|
||||
let normalized;
|
||||
if (aStack instanceof Components.interfaces.nsIStackFrame) {
|
||||
let frames = [];
|
||||
for (let frame = aStack; frame; frame = frame.caller) {
|
||||
frames.push(frame.filename + ":" + frame.name + ":" + frame.lineNumber);
|
||||
}
|
||||
normalized = frames.join("\n");
|
||||
} else {
|
||||
normalized = "" + aStack;
|
||||
}
|
||||
this.msg += Task.Debugging.generateReadableStack(normalized, " ");
|
||||
}
|
||||
if (aIsTodo) {
|
||||
this.status = "PASS";
|
||||
this.expected = "FAIL";
|
||||
} else {
|
||||
this.status = "FAIL";
|
||||
this.expected = "PASS";
|
||||
}
|
||||
|
||||
if (gConfig.debugOnFailure) {
|
||||
// You've hit this line because you requested to break into the
|
||||
// debugger upon a testcase failure on your test run.
|
||||
debugger;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function testMessage(msg) {
|
||||
this.msg = msg || "";
|
||||
function testMessage(aName) {
|
||||
this.msg = aName || "";
|
||||
this.info = true;
|
||||
}
|
||||
|
||||
|
@ -983,16 +905,20 @@ function testMessage(msg) {
|
|||
// cannot conflict with global variables used in tests.
|
||||
function testScope(aTester, aTest, expected) {
|
||||
this.__tester = aTester;
|
||||
|
||||
aTest.allowFailure = expected == "fail";
|
||||
this.__expected = expected;
|
||||
this.__num_failed = 0;
|
||||
|
||||
var self = this;
|
||||
this.ok = function test_ok(condition, name, ex, stack) {
|
||||
aTest.addResult(new testResult({
|
||||
name, pass: condition, ex,
|
||||
stack: stack || Components.stack.caller,
|
||||
allowFailure: aTest.allowFailure,
|
||||
}));
|
||||
this.ok = function test_ok(condition, name, diag, stack) {
|
||||
if (self.__expected == 'fail') {
|
||||
if (!condition) {
|
||||
self.__num_failed++;
|
||||
condition = true;
|
||||
}
|
||||
}
|
||||
|
||||
aTest.addResult(new testResult(condition, name, diag, false,
|
||||
stack ? stack : Components.stack.caller));
|
||||
};
|
||||
this.is = function test_is(a, b, name) {
|
||||
self.ok(a == b, name, "Got " + a + ", expected " + b, false,
|
||||
|
@ -1002,12 +928,9 @@ function testScope(aTester, aTest, expected) {
|
|||
self.ok(a != b, name, "Didn't expect " + a + ", but got it", false,
|
||||
Components.stack.caller);
|
||||
};
|
||||
this.todo = function test_todo(condition, name, ex, stack) {
|
||||
aTest.addResult(new testResult({
|
||||
name, pass: !condition, todo: true, ex,
|
||||
stack: stack || Components.stack.caller,
|
||||
allowFailure: aTest.allowFailure,
|
||||
}));
|
||||
this.todo = function test_todo(condition, name, diag, stack) {
|
||||
aTest.addResult(new testResult(!condition, name, diag, true,
|
||||
stack ? stack : Components.stack.caller));
|
||||
};
|
||||
this.todo_is = function test_todo_is(a, b, name) {
|
||||
self.todo(a == b, name, "Got " + a + ", expected " + b,
|
||||
|
@ -1082,9 +1005,8 @@ function testScope(aTester, aTest, expected) {
|
|||
self.__expectedMaxAsserts = max;
|
||||
};
|
||||
|
||||
this.setExpectedFailuresForSelfTest = function test_setExpectedFailuresForSelfTest(expectedAllowedFailureCount) {
|
||||
aTest.allowFailure = true;
|
||||
aTest.expectedAllowedFailureCount = expectedAllowedFailureCount;
|
||||
this.setExpected = function test_setExpected() {
|
||||
self.__expected = 'fail';
|
||||
};
|
||||
|
||||
this.finish = function test_finish() {
|
||||
|
@ -1115,6 +1037,7 @@ testScope.prototype = {
|
|||
__timeoutFactor: 1,
|
||||
__expectedMinAsserts: 0,
|
||||
__expectedMaxAsserts: 0,
|
||||
__expected: 'pass',
|
||||
|
||||
EventUtils: {},
|
||||
SimpleTest: {},
|
||||
|
|
|
@ -2,38 +2,48 @@
|
|||
support-files =
|
||||
head.js
|
||||
|
||||
[browser_browserLoaded_content_loaded.js]
|
||||
[browser_add_task.js]
|
||||
[browser_async.js]
|
||||
[browser_browserLoaded_content_loaded.js]
|
||||
[browser_BrowserTestUtils.js]
|
||||
support-files =
|
||||
dummy.html
|
||||
[browser_fail.js]
|
||||
[browser_fail_add_task.js]
|
||||
[browser_fail_async.js]
|
||||
[browser_fail_if.js]
|
||||
fail-if = true
|
||||
[browser_fail_throw.js]
|
||||
[browser_fail_timeout.js]
|
||||
skip-if = true # Disabled beacuse it takes too long (bug 1178959)
|
||||
[browser_fail_uncaught_rejection.js]
|
||||
[browser_fail_unexpectedTimeout.js]
|
||||
skip-if = true # Disabled beacuse it takes too long (bug 1178959)
|
||||
[browser_getTestFile.js]
|
||||
support-files =
|
||||
test-dir/*
|
||||
waitForFocusPage.html
|
||||
[browser_head.js]
|
||||
[browser_pass.js]
|
||||
[browser_parameters.js]
|
||||
[browser_popupNode.js]
|
||||
[browser_popupNode_check.js]
|
||||
[browser_privileges.js]
|
||||
[browser_requestLongerTimeout.js]
|
||||
skip-if = true # Disabled beacuse it takes too long (bug 1178959)
|
||||
[browser_sanityException.js]
|
||||
[browser_sanityException2.js]
|
||||
[browser_waitForFocus.js]
|
||||
skip-if = (os == "win" && e10s && debug)
|
||||
[browser_getTestFile.js]
|
||||
support-files =
|
||||
test-dir/*
|
||||
waitForFocusPage.html
|
||||
|
||||
# Disabled because it would take too long, useful to check functionality though.
|
||||
# browser_requestLongerTimeout.js
|
||||
[browser_zz_fail_openwindow.js]
|
||||
skip-if = true # this catches outside of the main loop to find an extra window
|
||||
[browser_fail.js]
|
||||
skip-if = true
|
||||
[browser_fail_add_task.js]
|
||||
skip-if = true # fail-if doesnt catch an exception outside the test
|
||||
[browser_fail_async_throw.js]
|
||||
skip-if = true # fail-if doesnt catch an exception outside the test
|
||||
[browser_fail_fp.js]
|
||||
fail-if = true
|
||||
[browser_fail_pf.js]
|
||||
fail-if = true
|
||||
[browser_fail_throw.js]
|
||||
skip-if = true # fail-if doesnt catch an exception outside the test
|
||||
|
||||
# Disabled beacuse it takes too long (bug 1178959)
|
||||
[browser_fail_timeout.js]
|
||||
skip-if = true
|
||||
# Disabled beacuse it takes too long (bug 1178959)
|
||||
[browser_fail_unexpectedTimeout.js]
|
||||
skip-if = true
|
||||
|
||||
|
|
|
@ -4,5 +4,5 @@ function test() {
|
|||
ok(true, "timeout ran");
|
||||
finish();
|
||||
}
|
||||
setTimeout(done, 500);
|
||||
setTimeout(done, 10000);
|
||||
}
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
setExpectedFailuresForSelfTest(6);
|
||||
|
||||
function test() {
|
||||
ok(false, "fail ok");
|
||||
is(true, false, "fail is");
|
||||
|
|
|
@ -3,25 +3,55 @@
|
|||
|
||||
"use strict";
|
||||
|
||||
setExpectedFailuresForSelfTest(4);
|
||||
// This test is designed to fail.
|
||||
// It ensures that throwing an asynchronous error from add_task will
|
||||
// fail the test.
|
||||
|
||||
function rejectOnNextTick(error) {
|
||||
return new Promise((resolve, reject) => executeSoon(() => reject(error)));
|
||||
var passedTests = 0;
|
||||
|
||||
function rejectWithTimeout(error = undefined) {
|
||||
let deferred = Promise.defer();
|
||||
executeSoon(function() {
|
||||
ok(true, "we get here after a timeout");
|
||||
deferred.reject(error);
|
||||
});
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
add_task(function failWithoutError() {
|
||||
yield rejectOnNextTick(undefined);
|
||||
try {
|
||||
yield rejectWithTimeout();
|
||||
} finally {
|
||||
++passedTests;
|
||||
}
|
||||
});
|
||||
|
||||
add_task(function failWithString() {
|
||||
yield rejectOnNextTick("This is a string");
|
||||
try {
|
||||
yield rejectWithTimeout("Meaningless error");
|
||||
} finally {
|
||||
++passedTests;
|
||||
}
|
||||
});
|
||||
|
||||
add_task(function failWithInt() {
|
||||
yield rejectOnNextTick(42);
|
||||
add_task(function failWithoutInt() {
|
||||
try {
|
||||
yield rejectWithTimeout(42);
|
||||
} finally {
|
||||
++passedTests;
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
// This one should display a stack trace
|
||||
add_task(function failWithError() {
|
||||
yield rejectOnNextTick(new Error("This is an error"));
|
||||
try {
|
||||
yield rejectWithTimeout(new Error("This is an error"));
|
||||
} finally {
|
||||
++passedTests;
|
||||
}
|
||||
});
|
||||
|
||||
add_task(function done() {
|
||||
is(passedTests, 4, "Passed all tests");
|
||||
});
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
setExpectedFailuresForSelfTest(1);
|
||||
|
||||
function test() {
|
||||
waitForExplicitFinish();
|
||||
executeSoon(() => {
|
||||
ok(false, "fail");
|
||||
finish();
|
||||
});
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
function test() {
|
||||
function end() {
|
||||
throw "thrown exception";
|
||||
}
|
||||
waitForExplicitFinish();
|
||||
setTimeout(end, 1000);
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
function test() {
|
||||
ok(false, "first fail ok");
|
||||
ok(true, "then pass ok");
|
||||
}
|
|
@ -1,4 +0,0 @@
|
|||
// We expect this test to fail because it is marked as fail-if in the manifest.
|
||||
function test() {
|
||||
ok(false, "fail ok");
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
function test() {
|
||||
ok(true, "first pass ok");
|
||||
ok(false, "then fail ok");
|
||||
}
|
|
@ -1,5 +1,3 @@
|
|||
setExpectedFailuresForSelfTest(1);
|
||||
|
||||
function test() {
|
||||
throw "thrown exception";
|
||||
}
|
||||
|
|
|
@ -1,6 +0,0 @@
|
|||
setExpectedFailuresForSelfTest(1);
|
||||
|
||||
function test() {
|
||||
Components.utils.import("resource://gre/modules/Promise.jsm", this);
|
||||
Promise.reject(new Error("Promise rejection."));
|
||||
}
|
Загрузка…
Ссылка в новой задаче