Back out b17c8b926585, seems to not work any more. :-\

This commit is contained in:
Jeff Walden 2010-07-27 18:23:30 -07:00
Родитель dca5e62b28
Коммит 030e5a1195
8 изменённых файлов: 17 добавлений и 69 удалений

Просмотреть файл

@ -149,11 +149,11 @@ script 15.9.5.7.js
script 15.9.5.8.js script 15.9.5.8.js
script 15.9.5.9.js script 15.9.5.9.js
script 15.9.5.js script 15.9.5.js
slow script dst-offset-caching-1-of-8.js script dst-offset-caching-1-of-8.js
slow script dst-offset-caching-2-of-8.js script dst-offset-caching-2-of-8.js
slow script dst-offset-caching-3-of-8.js script dst-offset-caching-3-of-8.js
slow script dst-offset-caching-4-of-8.js script dst-offset-caching-4-of-8.js
slow script dst-offset-caching-5-of-8.js script dst-offset-caching-5-of-8.js
slow script dst-offset-caching-6-of-8.js script dst-offset-caching-6-of-8.js
slow script dst-offset-caching-7-of-8.js script dst-offset-caching-7-of-8.js
slow script dst-offset-caching-8-of-8.js script dst-offset-caching-8-of-8.js

Просмотреть файл

@ -227,8 +227,6 @@ if __name__ == '__main__':
help='check for test files not listed in the manifest') help='check for test files not listed in the manifest')
op.add_option('--failure-file', dest='failure_file', op.add_option('--failure-file', dest='failure_file',
help='write tests that have not passed to the given file') help='write tests that have not passed to the given file')
op.add_option('--run-slow-tests', dest='run_slow_tests', action='store_true',
help='run particularly slow tests as well as average-speed tests')
(OPTIONS, args) = op.parse_args() (OPTIONS, args) = op.parse_args()
if len(args) < 1: if len(args) < 1:
if not OPTIONS.check_manifest: if not OPTIONS.check_manifest:
@ -312,9 +310,6 @@ if __name__ == '__main__':
OPTIONS.run_skipped = True OPTIONS.run_skipped = True
test_list = [ _ for _ in test_list if not _.enable ] test_list = [ _ for _ in test_list if not _.enable ]
if not OPTIONS.run_slow_tests:
test_list = [ _ for _ in test_list if not _.slow ]
if OPTIONS.debug and test_list: if OPTIONS.debug and test_list:
if len(test_list) > 1: if len(test_list) > 1:
print('Multiple tests match command line arguments, debugger can only run one') print('Multiple tests match command line arguments, debugger can only run one')

Просмотреть файл

@ -110,7 +110,6 @@ def parse(filename, xul_tester, reldir = ''):
enable = True enable = True
expect = True expect = True
random = False random = False
slow = False
pos = 0 pos = 0
while pos < len(parts): while pos < len(parts):
@ -145,14 +144,11 @@ def parse(filename, xul_tester, reldir = ''):
elif parts[pos] == 'script': elif parts[pos] == 'script':
script = parts[pos+1] script = parts[pos+1]
pos += 2 pos += 2
elif parts[pos] == 'slow':
slow = True
pos += 1
else: else:
print 'warning: invalid manifest line element "%s"'%parts[pos] print 'warning: invalid manifest line element "%s"'%parts[pos]
pos += 1 pos += 1
assert script is not None assert script is not None
ans.append(TestCase(os.path.join(reldir, script), ans.append(TestCase(os.path.join(reldir, script),
enable, expect, random, slow)) enable, expect, random))
return ans return ans

Просмотреть файл

@ -87,12 +87,11 @@ class Test(object):
class TestCase(Test): class TestCase(Test):
"""A test case consisting of a test and an expected result.""" """A test case consisting of a test and an expected result."""
def __init__(self, path, enable, expect, random, slow): def __init__(self, path, enable, expect, random):
Test.__init__(self, path) Test.__init__(self, path)
self.enable = enable # bool: True => run test, False => don't run self.enable = enable # bool: True => run test, False => don't run
self.expect = expect # bool: expected result, True => pass self.expect = expect # bool: expected result, True => pass
self.random = random # bool: True => ignore output as 'random' self.random = random # bool: True => ignore output as 'random'
self.slow = slow # bool: True => test may run slowly
def __str__(self): def __str__(self):
ans = self.path ans = self.path
@ -102,8 +101,6 @@ class TestCase(Test):
ans += ', fails' ans += ', fails'
if self.random: if self.random:
ans += ', random' ans += ', random'
if self.slow:
ans += ', slow'
return ans return ans
class TestOutput: class TestOutput:

Просмотреть файл

@ -74,17 +74,6 @@ must be one of the following:
particular platform (i.e. it allows us to get test particular platform (i.e. it allows us to get test
coverage on the other platforms). coverage on the other platforms).
slow The test may take a long time to run, so run it if slow tests are
either enabled or not disabled (test manifest interpreters may
choose whether or not to run such tests by default).
slow-if(condition) If the condition is met, the test is treated as if
'slow' had been specified. This is useful for tests
which are slow only on particular platforms (e.g. a
test which exercised out-of-memory behavior might be
fast on a 32-bit system but inordinately slow on a
64-bit system).
asserts(count) asserts(count)
Loading the test and reference is known to assert exactly Loading the test and reference is known to assert exactly
count times. count times.

Просмотреть файл

@ -79,13 +79,6 @@ RefTestCmdLineHandler.prototype =
catch (e) { catch (e) {
} }
try {
var skipslowtests = cmdLine.handleFlag("reftestskipslowtests", false);
args.skipslowtests = skipslowtests;
}
catch (e) {
}
/* Ignore the platform's online/offline status while running reftests. */ /* Ignore the platform's online/offline status while running reftests. */
var ios = Components.classes["@mozilla.org/network/io-service;1"] var ios = Components.classes["@mozilla.org/network/io-service;1"]
.getService(Components.interfaces.nsIIOService2); .getService(Components.interfaces.nsIIOService2);

Просмотреть файл

@ -90,7 +90,6 @@ var gTestResults = {
AssertionKnown: 0, AssertionKnown: 0,
Random : 0, Random : 0,
Skip: 0, Skip: 0,
Slow: 0,
}; };
var gTotalTests = 0; var gTotalTests = 0;
var gState; var gState;
@ -128,9 +127,6 @@ const gProtocolRE = /^\w+:/;
var HTTP_SERVER_PORT = 4444; var HTTP_SERVER_PORT = 4444;
const HTTP_SERVER_PORTS_TO_TRY = 50; const HTTP_SERVER_PORTS_TO_TRY = 50;
// whether to run slow tests or not
var gRunSlowTests = true;
// whether we should skip caching canvases // whether we should skip caching canvases
var gNoCanvasCache = false; var gNoCanvasCache = false;
@ -263,9 +259,6 @@ function StartTests()
if ("nocache" in args && args["nocache"]) if ("nocache" in args && args["nocache"])
gNoCanvasCache = true; gNoCanvasCache = true;
if ("skipslowtests" in args && args.skipslowtests)
gRunSlowTests = false;
ReadTopManifest(args.uri); ReadTopManifest(args.uri);
BuildUseCounts(); BuildUseCounts();
@ -667,19 +660,10 @@ function ServeFiles(manifestURL, depth, aURL, files)
function StartCurrentTest() function StartCurrentTest()
{ {
// make sure we don't run tests that are expected to kill the browser // make sure we don't run tests that are expected to kill the browser
while (gURLs.length > 0) { while (gURLs.length > 0 && gURLs[0].expected == EXPECTED_DEATH) {
var test = gURLs[0]; ++gTestResults.Skip;
if (test.expected == EXPECTED_DEATH) { gDumpLog("REFTEST TEST-KNOWN-FAIL | " + gURLs[0].url1.spec + " | (SKIP)\n");
++gTestResults.Skip; gURLs.shift();
gDumpLog("REFTEST TEST-KNOWN-FAIL | " + test.url1.spec + " | (SKIP)\n");
gURLs.shift();
} else if (test.slow && !gRunSlowTests) {
++gTestResults.Slow;
gDumpLog("REFTEST TEST-KNOWN-SLOW | " + test.url1.spec + " | (SLOW)\n");
gURLs.shift();
} else {
break;
}
} }
if (gURLs.length == 0) { if (gURLs.length == 0) {
@ -742,13 +726,12 @@ function DoneTests()
gTestResults.FailedLoad + " failed load, " + gTestResults.FailedLoad + " failed load, " +
gTestResults.Exception + " exception)\n"); gTestResults.Exception + " exception)\n");
count = gTestResults.KnownFail + gTestResults.AssertionKnown + count = gTestResults.KnownFail + gTestResults.AssertionKnown +
gTestResults.Random + gTestResults.Skip + gTestResults.Slow; gTestResults.Random + gTestResults.Skip;
dump("REFTEST INFO | Known problems: " + count + " (" + gDumpLog("REFTEST INFO | Known problems: " + count + " (" +
gTestResults.KnownFail + " known fail, " + gTestResults.KnownFail + " known fail, " +
gTestResults.AssertionKnown + " known asserts, " + gTestResults.AssertionKnown + " known asserts, " +
gTestResults.Random + " random, " + gTestResults.Random + " random, " +
gTestResults.Skip + " skipped, " + gTestResults.Skip + " skipped)\n");
gTestResults.Slow + " slow)\n");
gDumpLog("REFTEST INFO | Total canvas count = " + gRecycledCanvases.length + "\n"); gDumpLog("REFTEST INFO | Total canvas count = " + gRecycledCanvases.length + "\n");

Просмотреть файл

@ -229,11 +229,6 @@ class ReftestOptions(OptionParser):
help = "file to log output to in addition to stdout") help = "file to log output to in addition to stdout")
defaults["logFile"] = None defaults["logFile"] = None
self.add_option("--skip-slow-tests",
dest = "skipSlowTests", action = "store_true",
help = "skip tests marked as slow when running")
defaults["skipSlowTests"] = False
self.set_defaults(**defaults) self.set_defaults(**defaults)
def main(): def main():