зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1171755 - Update to latest wptrunner, a=testonly
This commit is contained in:
Родитель
91daae4bef
Коммит
1c310d44a1
|
@ -0,0 +1,2 @@
|
|||
prefs: ["browser.display.foreground_color:#FF0000",
|
||||
"browser.display.background_color:#000000"]
|
|
@ -0,0 +1,2 @@
|
|||
[test_pref_reset.html]
|
||||
prefs: [@Reset]
|
|
@ -0,0 +1 @@
|
|||
disabled: true
|
|
@ -0,0 +1,2 @@
|
|||
[testharness_1.html]
|
||||
disabled: @False
|
|
@ -101,6 +101,8 @@ def settings_to_argv(settings):
|
|||
def set_from_args(settings, args):
|
||||
if args.test:
|
||||
settings["include"] = args.test
|
||||
if args.tags:
|
||||
settings["tags"] = args.tags
|
||||
|
||||
def run(config, args):
|
||||
logger = structuredlog.StructuredLogger("web-platform-tests")
|
||||
|
@ -139,6 +141,8 @@ def get_parser():
|
|||
help="Specific product to include in test run")
|
||||
parser.add_argument("--pdb", action="store_true",
|
||||
help="Invoke pdb on uncaught exception")
|
||||
parser.add_argument("--tag", action="append", dest="tags",
|
||||
help="tags to select tests")
|
||||
parser.add_argument("test", nargs="*",
|
||||
help="Specific tests to include in test run")
|
||||
return parser
|
||||
|
|
10
testing/web-platform/harness/test/testdata/testharness/firefox/subdir/test_pref_inherit.html
поставляемый
Normal file
10
testing/web-platform/harness/test/testdata/testharness/firefox/subdir/test_pref_inherit.html
поставляемый
Normal file
|
@ -0,0 +1,10 @@
|
|||
<!doctype html>
|
||||
<title>Example pref test</title>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<p>Test requires the pref browser.display.foreground_color to be set to #00FF00</p>
|
||||
<script>
|
||||
test(function() {
|
||||
assert_equals(getComputedStyle(document.body).color, "rgb(255, 0, 0)");
|
||||
}, "Test that pref was set");
|
||||
</script>
|
10
testing/web-platform/harness/test/testdata/testharness/firefox/subdir/test_pref_reset.html
поставляемый
Normal file
10
testing/web-platform/harness/test/testdata/testharness/firefox/subdir/test_pref_reset.html
поставляемый
Normal file
|
@ -0,0 +1,10 @@
|
|||
<!doctype html>
|
||||
<title>Example pref test</title>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<p>Test requires the pref browser.display.foreground_color to be set to #00FF00</p>
|
||||
<script>
|
||||
test(function() {
|
||||
assert_equals(getComputedStyle(document.body).color, "rgb(0, 0, 0)");
|
||||
}, "Test that pref was reset");
|
||||
</script>
|
10
testing/web-platform/harness/test/testdata/testharness/firefox/test_pref_dir.html
поставляемый
Normal file
10
testing/web-platform/harness/test/testdata/testharness/firefox/test_pref_dir.html
поставляемый
Normal file
|
@ -0,0 +1,10 @@
|
|||
<!doctype html>
|
||||
<title>Example pref test</title>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<p>Test requires the pref browser.display.foreground_color to be set to #FF0000</p>
|
||||
<script>
|
||||
test(function() {
|
||||
assert_equals(getComputedStyle(document.body).color, "rgb(255, 0, 0)");
|
||||
}, "Test that pref was set");
|
||||
</script>
|
|
@ -1,5 +1,5 @@
|
|||
<!doctype html>
|
||||
<title>Example https test</title>
|
||||
<title>Example pref test</title>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<p>Test requires the pref browser.display.foreground_color to be set to #00FF00</p>
|
||||
|
|
9
testing/web-platform/harness/test/testdata/testharness/subdir/testharness_1.html
поставляемый
Normal file
9
testing/web-platform/harness/test/testdata/testharness/subdir/testharness_1.html
поставляемый
Normal file
|
@ -0,0 +1,9 @@
|
|||
<!doctype html>
|
||||
<title>Test should be enabled</title>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script>
|
||||
test(function() {
|
||||
assert_true(true);
|
||||
}, "Test that should pass");
|
||||
</script>
|
|
@ -1,12 +1,8 @@
|
|||
<!doctype html>
|
||||
<title>Simple testharness.js usage</title>
|
||||
<title>Test should be disabled</title>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script>
|
||||
test(function() {
|
||||
assert_true(true);
|
||||
}, "Test that should pass");
|
||||
|
||||
test(function() {
|
||||
assert_true(false);
|
||||
}, "Test that should fail");
|
||||
|
|
|
@ -29,4 +29,5 @@ module global scope.
|
|||
product_list = ["b2g",
|
||||
"chrome",
|
||||
"firefox",
|
||||
"servo"]
|
||||
"servo",
|
||||
"servodriver"]
|
||||
|
|
|
@ -42,7 +42,8 @@ def browser_kwargs(test_environment, **kwargs):
|
|||
"no_backup": kwargs.get("b2g_no_backup", False)}
|
||||
|
||||
|
||||
def executor_kwargs(test_type, server_config, cache_manager, **kwargs):
|
||||
def executor_kwargs(test_type, server_config, cache_manager, run_info_data,
|
||||
**kwargs):
|
||||
timeout_multiplier = kwargs["timeout_multiplier"]
|
||||
if timeout_multiplier is None:
|
||||
timeout_multiplier = 2
|
||||
|
|
|
@ -20,7 +20,7 @@ __wptrunner__ = {"product": "chrome",
|
|||
|
||||
|
||||
def check_args(**kwargs):
|
||||
require_arg(kwargs, "binary")
|
||||
require_arg(kwargs, "webdriver_binary")
|
||||
|
||||
|
||||
def browser_kwargs(**kwargs):
|
||||
|
@ -28,15 +28,16 @@ def browser_kwargs(**kwargs):
|
|||
"webdriver_binary": kwargs["webdriver_binary"]}
|
||||
|
||||
|
||||
def executor_kwargs(test_type, server_config, cache_manager, **kwargs):
|
||||
def executor_kwargs(test_type, server_config, cache_manager, run_info_data,
|
||||
**kwargs):
|
||||
from selenium.webdriver import DesiredCapabilities
|
||||
|
||||
executor_kwargs = base_executor_kwargs(test_type, server_config,
|
||||
cache_manager, **kwargs)
|
||||
executor_kwargs["close_after_done"] = True
|
||||
executor_kwargs["capabilities"] = dict(DesiredCapabilities.CHROME.items() +
|
||||
{"chromeOptions":
|
||||
{"binary": kwargs["binary"]}}.items())
|
||||
executor_kwargs["capabilities"] = dict(DesiredCapabilities.CHROME.items())
|
||||
if kwargs["binary"] is not None:
|
||||
executor_kwargs["capabilities"]["chromeOptions"] = {"binary": kwargs["binary"]}
|
||||
|
||||
return executor_kwargs
|
||||
|
||||
|
|
|
@ -46,10 +46,13 @@ def browser_kwargs(**kwargs):
|
|||
"ca_certificate_path": kwargs["ssl_env"].ca_cert_path()}
|
||||
|
||||
|
||||
def executor_kwargs(test_type, server_config, cache_manager, **kwargs):
|
||||
def executor_kwargs(test_type, server_config, cache_manager, run_info_data,
|
||||
**kwargs):
|
||||
executor_kwargs = base_executor_kwargs(test_type, server_config,
|
||||
cache_manager, **kwargs)
|
||||
executor_kwargs["close_after_done"] = True
|
||||
if run_info_data["debug"] and kwargs["timeout_multiplier"] is None:
|
||||
executor_kwargs["timeout_multiplier"] = 3
|
||||
return executor_kwargs
|
||||
|
||||
|
||||
|
@ -71,7 +74,6 @@ class FirefoxBrowser(Browser):
|
|||
self.binary = binary
|
||||
self.prefs_root = prefs_root
|
||||
self.marionette_port = None
|
||||
self.used_ports.add(self.marionette_port)
|
||||
self.runner = None
|
||||
self.debug_info = debug_info
|
||||
self.profile = None
|
||||
|
@ -82,6 +84,7 @@ class FirefoxBrowser(Browser):
|
|||
|
||||
def start(self):
|
||||
self.marionette_port = get_free_port(2828, exclude=self.used_ports)
|
||||
self.used_ports.add(self.marionette_port)
|
||||
|
||||
env = os.environ.copy()
|
||||
env["MOZ_DISABLE_NONLOCAL_CONNECTIONS"] = "1"
|
||||
|
|
|
@ -29,7 +29,8 @@ def browser_kwargs(**kwargs):
|
|||
"debug_info": kwargs["debug_info"]}
|
||||
|
||||
|
||||
def executor_kwargs(test_type, server_config, cache_manager, **kwargs):
|
||||
def executor_kwargs(test_type, server_config, cache_manager, run_info_data,
|
||||
**kwargs):
|
||||
rv = base_executor_kwargs(test_type, server_config,
|
||||
cache_manager, **kwargs)
|
||||
rv["pause_after_test"] = kwargs["pause_after_test"]
|
||||
|
|
|
@ -0,0 +1,141 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
from mozprocess import ProcessHandler
|
||||
|
||||
from .base import Browser, require_arg, get_free_port, browser_command, ExecutorBrowser
|
||||
from ..executors import executor_kwargs as base_executor_kwargs
|
||||
from ..executors.executorservodriver import (ServoWebDriverTestharnessExecutor,
|
||||
ServoWebDriverRefTestExecutor)
|
||||
|
||||
here = os.path.join(os.path.split(__file__)[0])
|
||||
|
||||
__wptrunner__ = {"product": "servodriver",
|
||||
"check_args": "check_args",
|
||||
"browser": "ServoWebDriverBrowser",
|
||||
"executor": {"testharness": "ServoWebDriverTestharnessExecutor",
|
||||
"reftest": "ServoWebDriverRefTestExecutor"},
|
||||
"browser_kwargs": "browser_kwargs",
|
||||
"executor_kwargs": "executor_kwargs",
|
||||
"env_options": "env_options"}
|
||||
|
||||
hosts_text = """127.0.0.1 web-platform.test
|
||||
127.0.0.1 www.web-platform.test
|
||||
127.0.0.1 www1.web-platform.test
|
||||
127.0.0.1 www2.web-platform.test
|
||||
127.0.0.1 xn--n8j6ds53lwwkrqhv28a.web-platform.test
|
||||
127.0.0.1 xn--lve-6lad.web-platform.test
|
||||
"""
|
||||
|
||||
|
||||
def check_args(**kwargs):
|
||||
require_arg(kwargs, "binary")
|
||||
|
||||
|
||||
def browser_kwargs(**kwargs):
|
||||
return {"binary": kwargs["binary"],
|
||||
"debug_info": kwargs["debug_info"]}
|
||||
|
||||
|
||||
def executor_kwargs(test_type, server_config, cache_manager, **kwargs):
|
||||
rv = base_executor_kwargs(test_type, server_config,
|
||||
cache_manager, **kwargs)
|
||||
return rv
|
||||
|
||||
|
||||
def env_options():
|
||||
return {"host": "web-platform.test",
|
||||
"bind_hostname": "true",
|
||||
"testharnessreport": "testharnessreport-servodriver.js",
|
||||
"supports_debugger": True}
|
||||
|
||||
|
||||
def make_hosts_file():
|
||||
hosts_fd, hosts_path = tempfile.mkstemp()
|
||||
with os.fdopen(hosts_fd, "w") as f:
|
||||
f.write(hosts_text)
|
||||
return hosts_path
|
||||
|
||||
|
||||
class ServoWebDriverBrowser(Browser):
|
||||
used_ports = set()
|
||||
|
||||
def __init__(self, logger, binary, debug_info=None, webdriver_host="127.0.0.1"):
|
||||
Browser.__init__(self, logger)
|
||||
self.binary = binary
|
||||
self.webdriver_host = webdriver_host
|
||||
self.webdriver_port = None
|
||||
self.proc = None
|
||||
self.debug_info = debug_info
|
||||
self.hosts_path = make_hosts_file()
|
||||
self.command = None
|
||||
|
||||
def start(self):
|
||||
self.webdriver_port = get_free_port(4444, exclude=self.used_ports)
|
||||
self.used_ports.add(self.webdriver_port)
|
||||
|
||||
env = os.environ.copy()
|
||||
env["HOST_FILE"] = self.hosts_path
|
||||
|
||||
debug_args, command = browser_command(self.binary,
|
||||
["--cpu", "--hard-fail",
|
||||
"--webdriver", str(self.webdriver_port),
|
||||
"about:blank"],
|
||||
self.debug_info)
|
||||
|
||||
self.command = command
|
||||
|
||||
self.command = debug_args + self.command
|
||||
|
||||
if not self.debug_info or not self.debug_info.interactive:
|
||||
self.proc = ProcessHandler(self.command,
|
||||
processOutputLine=[self.on_output],
|
||||
env=env,
|
||||
storeOutput=False)
|
||||
self.proc.run()
|
||||
else:
|
||||
self.proc = subprocess.Popen(self.command, env=env)
|
||||
|
||||
self.logger.debug("Servo Started")
|
||||
|
||||
def stop(self):
|
||||
self.logger.debug("Stopping browser")
|
||||
if self.proc is not None:
|
||||
try:
|
||||
self.proc.kill()
|
||||
except OSError:
|
||||
# This can happen on Windows if the process is already dead
|
||||
pass
|
||||
|
||||
def pid(self):
|
||||
if self.proc is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
return self.proc.pid
|
||||
except AttributeError:
|
||||
return None
|
||||
|
||||
def on_output(self, line):
|
||||
"""Write a line of output from the process to the log"""
|
||||
self.logger.process_output(self.pid(),
|
||||
line.decode("utf8", "replace"),
|
||||
command=" ".join(self.command))
|
||||
|
||||
def is_alive(self):
|
||||
if self.runner:
|
||||
return self.runner.is_running()
|
||||
return False
|
||||
|
||||
def cleanup(self):
|
||||
self.stop()
|
||||
|
||||
def executor_browser(self):
|
||||
assert self.webdriver_port is not None
|
||||
return ExecutorBrowser, {"webdriver_host": self.webdriver_host,
|
||||
"webdriver_port": self.webdriver_port}
|
|
@ -99,7 +99,7 @@ class TestExecutor(object):
|
|||
self.timeout_multiplier = timeout_multiplier
|
||||
self.debug_info = debug_info
|
||||
self.last_environment = {"protocol": "http",
|
||||
"prefs": []}
|
||||
"prefs": {}}
|
||||
self.protocol = None # This must be set in subclasses
|
||||
|
||||
@property
|
||||
|
|
|
@ -139,55 +139,62 @@ class MarionetteProtocol(Protocol):
|
|||
|
||||
def on_environment_change(self, old_environment, new_environment):
|
||||
#Unset all the old prefs
|
||||
for name, _ in old_environment.get("prefs", []):
|
||||
for name in old_environment.get("prefs", {}).iterkeys():
|
||||
value = self.executor.original_pref_values[name]
|
||||
if value is None:
|
||||
self.clear_user_pref(name)
|
||||
else:
|
||||
self.set_pref(name, value)
|
||||
|
||||
for name, value in new_environment.get("prefs", []):
|
||||
for name, value in new_environment.get("prefs", {}).iteritems():
|
||||
self.executor.original_pref_values[name] = self.get_pref(name)
|
||||
self.set_pref(name, value)
|
||||
|
||||
def set_pref(self, name, value):
|
||||
if value.lower() not in ("true", "false"):
|
||||
try:
|
||||
int(value)
|
||||
except ValueError:
|
||||
value = "'%s'" % value
|
||||
else:
|
||||
value = value.lower()
|
||||
|
||||
self.logger.info("Setting pref %s (%s)" % (name, value))
|
||||
self.marionette.set_context(self.marionette.CONTEXT_CHROME)
|
||||
|
||||
script = """
|
||||
let prefInterface = Components.classes["@mozilla.org/preferences-service;1"]
|
||||
.getService(Components.interfaces.nsIPrefBranch);
|
||||
let pref = '%s';
|
||||
let type = prefInterface.getPrefType(pref);
|
||||
let value = %s;
|
||||
switch(type) {
|
||||
case prefInterface.PREF_STRING:
|
||||
prefInterface.setCharPref(pref, '%s');
|
||||
prefInterface.setCharPref(pref, value);
|
||||
break;
|
||||
case prefInterface.PREF_BOOL:
|
||||
prefInterface.setBoolPref(pref, %s);
|
||||
prefInterface.setBoolPref(pref, value);
|
||||
break;
|
||||
case prefInterface.PREF_INT:
|
||||
prefInterface.setIntPref(pref, %s);
|
||||
prefInterface.setIntPref(pref, value);
|
||||
break;
|
||||
}
|
||||
""" % (name, value, value, value)
|
||||
""" % (name, value)
|
||||
with self.marionette.using_context(self.marionette.CONTEXT_CHROME):
|
||||
self.marionette.execute_script(script)
|
||||
self.marionette.set_context(self.marionette.CONTEXT_CONTENT)
|
||||
|
||||
def clear_user_pref(self, name):
|
||||
self.logger.info("Clearing pref %s" % (name))
|
||||
self.marionette.set_context(self.marionette.CONTEXT_CHROME)
|
||||
script = """
|
||||
let prefInterface = Components.classes["@mozilla.org/preferences-service;1"]
|
||||
.getService(Components.interfaces.nsIPrefBranch);
|
||||
let pref = '%s';
|
||||
prefInterface.clearUserPref(pref);
|
||||
""" % name
|
||||
with self.marionette.using_context(self.marionette.CONTEXT_CHROME):
|
||||
self.marionette.execute_script(script)
|
||||
self.marionette.set_context(self.marionette.CONTEXT_CONTENT)
|
||||
|
||||
def get_pref(self, name):
|
||||
self.marionette.set_context(self.marionette.CONTEXT_CHROME)
|
||||
self.marionette.execute_script("""
|
||||
script = """
|
||||
let prefInterface = Components.classes["@mozilla.org/preferences-service;1"]
|
||||
.getService(Components.interfaces.nsIPrefBranch);
|
||||
let pref = '%s';
|
||||
|
@ -202,8 +209,9 @@ class MarionetteProtocol(Protocol):
|
|||
case prefInterface.PREF_INVALID:
|
||||
return null;
|
||||
}
|
||||
""" % (name))
|
||||
self.marionette.set_context(self.marionette.CONTEXT_CONTENT)
|
||||
""" % name
|
||||
with self.marionette.using_context(self.marionette.CONTEXT_CHROME):
|
||||
self.marionette.execute_script(script)
|
||||
|
||||
class MarionetteRun(object):
|
||||
def __init__(self, logger, func, marionette, url, timeout):
|
||||
|
@ -383,10 +391,7 @@ class MarionetteRefTestExecutor(RefTestExecutor):
|
|||
timeout).run()
|
||||
|
||||
def _screenshot(self, marionette, url, timeout):
|
||||
try:
|
||||
marionette.navigate(url)
|
||||
except errors.MarionetteException:
|
||||
raise ExecutorException("ERROR", "Failed to load url %s" % (url,))
|
||||
|
||||
marionette.execute_async_script(self.wait_script)
|
||||
|
||||
|
|
|
@ -198,7 +198,8 @@ class ServoRefTestExecutor(ProcessTestExecutor):
|
|||
|
||||
try:
|
||||
self.proc.run()
|
||||
rv = self.proc.wait(timeout=test.timeout)
|
||||
timeout = test.timeout * self.timeout_multiplier + 5
|
||||
rv = self.proc.wait(timeout=timeout)
|
||||
except KeyboardInterrupt:
|
||||
self.proc.kill()
|
||||
raise
|
||||
|
|
|
@ -0,0 +1,243 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import json
|
||||
import os
|
||||
import socket
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from .base import (Protocol,
|
||||
RefTestExecutor,
|
||||
RefTestImplementation,
|
||||
TestharnessExecutor,
|
||||
strip_server)
|
||||
import webdriver
|
||||
from ..testrunner import Stop
|
||||
|
||||
here = os.path.join(os.path.split(__file__)[0])
|
||||
|
||||
extra_timeout = 5
|
||||
|
||||
|
||||
class ServoWebDriverProtocol(Protocol):
|
||||
def __init__(self, executor, browser, capabilities, **kwargs):
|
||||
Protocol.__init__(self, executor, browser)
|
||||
self.capabilities = capabilities
|
||||
self.host = browser.webdriver_host
|
||||
self.port = browser.webdriver_port
|
||||
self.session = None
|
||||
|
||||
def setup(self, runner):
|
||||
"""Connect to browser via WebDriver."""
|
||||
self.runner = runner
|
||||
|
||||
session_started = False
|
||||
try:
|
||||
self.session = webdriver.Session(self.host, self.port)
|
||||
self.session.start()
|
||||
except:
|
||||
self.logger.warning(
|
||||
"Connecting with WebDriver failed:\n%s" % traceback.format_exc())
|
||||
else:
|
||||
self.logger.debug("session started")
|
||||
session_started = True
|
||||
|
||||
if not session_started:
|
||||
self.logger.warning("Failed to connect via WebDriver")
|
||||
self.executor.runner.send_message("init_failed")
|
||||
else:
|
||||
self.executor.runner.send_message("init_succeeded")
|
||||
|
||||
def teardown(self):
|
||||
self.logger.debug("Hanging up on WebDriver session")
|
||||
try:
|
||||
self.session.end()
|
||||
except:
|
||||
pass
|
||||
|
||||
def is_alive(self):
|
||||
try:
|
||||
# Get a simple property over the connection
|
||||
self.session.handle
|
||||
# TODO what exception?
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
|
||||
def after_connect(self):
|
||||
pass
|
||||
|
||||
def wait(self):
|
||||
while True:
|
||||
try:
|
||||
self.session.execute_async_script("")
|
||||
except webdriver.TimeoutException:
|
||||
pass
|
||||
except (socket.timeout, IOError):
|
||||
break
|
||||
except Exception as e:
|
||||
self.logger.error(traceback.format_exc(e))
|
||||
break
|
||||
|
||||
|
||||
class ServoWebDriverRun(object):
|
||||
def __init__(self, func, session, url, timeout, current_timeout=None):
|
||||
self.func = func
|
||||
self.result = None
|
||||
self.session = session
|
||||
self.url = url
|
||||
self.timeout = timeout
|
||||
self.result_flag = threading.Event()
|
||||
|
||||
def run(self):
|
||||
executor = threading.Thread(target=self._run)
|
||||
executor.start()
|
||||
|
||||
flag = self.result_flag.wait(self.timeout + extra_timeout)
|
||||
if self.result is None:
|
||||
assert not flag
|
||||
self.result = False, ("EXTERNAL-TIMEOUT", None)
|
||||
|
||||
return self.result
|
||||
|
||||
def _run(self):
|
||||
try:
|
||||
self.result = True, self.func(self.session, self.url, self.timeout)
|
||||
except webdriver.TimeoutException:
|
||||
self.result = False, ("EXTERNAL-TIMEOUT", None)
|
||||
except (socket.timeout, IOError):
|
||||
self.result = False, ("CRASH", None)
|
||||
except Exception as e:
|
||||
message = getattr(e, "message", "")
|
||||
if message:
|
||||
message += "\n"
|
||||
message += traceback.format_exc(e)
|
||||
self.result = False, ("ERROR", e)
|
||||
finally:
|
||||
self.result_flag.set()
|
||||
|
||||
|
||||
def timeout_func(timeout):
|
||||
if timeout:
|
||||
t0 = time.time()
|
||||
return lambda: time.time() - t0 > timeout + extra_timeout
|
||||
else:
|
||||
return lambda: False
|
||||
|
||||
|
||||
class ServoWebDriverTestharnessExecutor(TestharnessExecutor):
|
||||
def __init__(self, browser, server_config, timeout_multiplier=1,
|
||||
close_after_done=True, capabilities=None, debug_info=None):
|
||||
TestharnessExecutor.__init__(self, browser, server_config, timeout_multiplier=1,
|
||||
debug_info=None)
|
||||
self.protocol = ServoWebDriverProtocol(self, browser, capabilities=capabilities)
|
||||
with open(os.path.join(here, "testharness_servodriver.js")) as f:
|
||||
self.script = f.read()
|
||||
self.timeout = None
|
||||
|
||||
def on_protocol_change(self, new_protocol):
|
||||
pass
|
||||
|
||||
def is_alive(self):
|
||||
return self.protocol.is_alive()
|
||||
|
||||
def do_test(self, test):
|
||||
url = self.test_url(test)
|
||||
|
||||
timeout = test.timeout * self.timeout_multiplier + extra_timeout
|
||||
|
||||
if timeout != self.timeout:
|
||||
try:
|
||||
self.protocol.session.timeouts.script = timeout
|
||||
self.timeout = timeout
|
||||
except IOError:
|
||||
self.logger.error("Lost webdriver connection")
|
||||
return Stop
|
||||
|
||||
success, data = ServoWebDriverRun(self.do_testharness,
|
||||
self.protocol.session,
|
||||
url,
|
||||
timeout).run()
|
||||
|
||||
if success:
|
||||
return self.convert_result(test, data)
|
||||
|
||||
return (test.result_cls(*data), [])
|
||||
|
||||
def do_testharness(self, session, url, timeout):
|
||||
session.url = url
|
||||
result = json.loads(
|
||||
session.execute_async_script(
|
||||
self.script % {"abs_url": url,
|
||||
"url": strip_server(url),
|
||||
"timeout_multiplier": self.timeout_multiplier,
|
||||
"timeout": timeout * 1000}))
|
||||
# Prevent leaking every page in history until Servo develops a more sane
|
||||
# page cache
|
||||
session.back()
|
||||
return result
|
||||
|
||||
|
||||
class TimeoutError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ServoWebDriverRefTestExecutor(RefTestExecutor):
|
||||
def __init__(self, browser, server_config, timeout_multiplier=1,
|
||||
screenshot_cache=None, capabilities=None, debug_info=None):
|
||||
"""Selenium WebDriver-based executor for reftests"""
|
||||
RefTestExecutor.__init__(self,
|
||||
browser,
|
||||
server_config,
|
||||
screenshot_cache=screenshot_cache,
|
||||
timeout_multiplier=timeout_multiplier,
|
||||
debug_info=debug_info)
|
||||
self.protocol = ServoWebDriverProtocol(self, browser,
|
||||
capabilities=capabilities)
|
||||
self.implementation = RefTestImplementation(self)
|
||||
self.timeout = None
|
||||
with open(os.path.join(here, "reftest-wait_servodriver.js")) as f:
|
||||
self.wait_script = f.read()
|
||||
|
||||
def is_alive(self):
|
||||
return self.protocol.is_alive()
|
||||
|
||||
def do_test(self, test):
|
||||
try:
|
||||
result = self.implementation.run_test(test)
|
||||
return self.convert_result(test, result)
|
||||
except IOError:
|
||||
return test.result_cls("CRASH", None), []
|
||||
except TimeoutError:
|
||||
return test.result_cls("TIMEOUT", None), []
|
||||
except Exception as e:
|
||||
message = getattr(e, "message", "")
|
||||
if message:
|
||||
message += "\n"
|
||||
message += traceback.format_exc(e)
|
||||
return test.result_cls("ERROR", message), []
|
||||
|
||||
def screenshot(self, test):
|
||||
timeout = (test.timeout * self.timeout_multiplier + extra_timeout
|
||||
if self.debug_info is None else None)
|
||||
|
||||
if self.timeout != timeout:
|
||||
try:
|
||||
self.protocol.session.timeouts.script = timeout
|
||||
self.timeout = timeout
|
||||
except IOError:
|
||||
self.logger.error("Lost webdriver connection")
|
||||
return Stop
|
||||
|
||||
return ServoWebDriverRun(self._screenshot,
|
||||
self.protocol.session,
|
||||
self.test_url(test),
|
||||
timeout).run()
|
||||
|
||||
def _screenshot(self, session, url, timeout):
|
||||
session.url = url
|
||||
session.execute_async_script(self.wait_script)
|
||||
return session.screenshot()
|
|
@ -0,0 +1,20 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
*/
|
||||
|
||||
callback = arguments[arguments.length - 1];
|
||||
|
||||
function check_done() {
|
||||
if (!document.body.classList.contains('reftest-wait')) {
|
||||
callback();
|
||||
} else {
|
||||
setTimeout(check_done, 50);
|
||||
}
|
||||
}
|
||||
|
||||
if (document.readyState === 'complete') {
|
||||
check_done();
|
||||
} else {
|
||||
addEventListener("load", check_done);
|
||||
}
|
|
@ -5,17 +5,20 @@
|
|||
window.wrappedJSObject.timeout_multiplier = %(timeout_multiplier)d;
|
||||
window.wrappedJSObject.explicit_timeout = %(explicit_timeout)d;
|
||||
|
||||
window.wrappedJSObject.done = function(tests, status) {
|
||||
window.wrappedJSObject.addEventListener("message", function listener(event) {
|
||||
if (event.data.type != "complete") {
|
||||
return;
|
||||
}
|
||||
window.wrappedJSObject.removeEventListener("message", listener);
|
||||
clearTimeout(timer);
|
||||
var test_results = tests.map(function(x) {
|
||||
return {name:x.name, status:x.status, message:x.message, stack:x.stack}
|
||||
});
|
||||
var tests = event.data.tests;
|
||||
var status = event.data.status;
|
||||
marionetteScriptFinished({test:"%(url)s",
|
||||
tests:test_results,
|
||||
tests: tests,
|
||||
status: status.status,
|
||||
message: status.message,
|
||||
stack: status.stack});
|
||||
}
|
||||
}, false);
|
||||
|
||||
window.wrappedJSObject.win = window.open("%(abs_url)s", "%(window_id)s");
|
||||
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
window.__wd_results_callback__ = arguments[arguments.length - 1];
|
||||
window.__wd_results_timer__ = setTimeout(timeout, %(timeout)s);
|
|
@ -5,17 +5,16 @@
|
|||
var callback = arguments[arguments.length - 1];
|
||||
window.timeout_multiplier = %(timeout_multiplier)d;
|
||||
|
||||
window.done = function(tests, status) {
|
||||
window.addEventListener("message", function(event) {
|
||||
var tests = event.data[0];
|
||||
var status = event.data[1];
|
||||
clearTimeout(timer);
|
||||
var test_results = tests.map(function(x) {
|
||||
return {name:x.name, status:x.status, message:x.message, stack:x.stack}
|
||||
});
|
||||
callback({test:"%(url)s",
|
||||
tests:test_results,
|
||||
tests: tests,
|
||||
status: status.status,
|
||||
message: status.message,
|
||||
stack: status.stack});
|
||||
}
|
||||
}, false);
|
||||
|
||||
window.win = window.open("%(abs_url)s", "%(window_id)s");
|
||||
|
||||
|
|
|
@ -0,0 +1,587 @@
|
|||
import errno
|
||||
import httplib
|
||||
import json
|
||||
import socket
|
||||
import time
|
||||
import urlparse
|
||||
from collections import defaultdict
|
||||
|
||||
element_key = "element-6066-11e4-a52e-4f735466cecf"
|
||||
|
||||
|
||||
class WebDriverException(Exception):
|
||||
http_status = None
|
||||
status_code = None
|
||||
|
||||
def __init__(self, message):
|
||||
self.message = message
|
||||
|
||||
|
||||
class ElementNotSelectableException(WebDriverException):
|
||||
http_status = 400
|
||||
status_code = "element not selectable"
|
||||
|
||||
|
||||
class ElementNotVisibleException(WebDriverException):
|
||||
http_status = 400
|
||||
status_code = "element not visible"
|
||||
|
||||
|
||||
class InvalidArgumentException(WebDriverException):
|
||||
http_status = 400
|
||||
status_code = "invalid argument"
|
||||
|
||||
|
||||
class InvalidCookieDomainException(WebDriverException):
|
||||
http_status = 400
|
||||
status_code = "invalid cookie domain"
|
||||
|
||||
|
||||
class InvalidElementCoordinatesException(WebDriverException):
|
||||
http_status = 400
|
||||
status_code = "invalid element coordinates"
|
||||
|
||||
|
||||
class InvalidElementStateException(WebDriverException):
|
||||
http_status = 400
|
||||
status_code = "invalid cookie domain"
|
||||
|
||||
|
||||
class InvalidSelectorException(WebDriverException):
|
||||
http_status = 400
|
||||
status_code = "invalid selector"
|
||||
|
||||
|
||||
class InvalidSessionIdException(WebDriverException):
|
||||
http_status = 404
|
||||
status_code = "invalid session id"
|
||||
|
||||
|
||||
class JavascriptErrorException(WebDriverException):
|
||||
http_status = 500
|
||||
status_code = "javascript error"
|
||||
|
||||
|
||||
class MoveTargetOutOfBoundsException(WebDriverException):
|
||||
http_status = 500
|
||||
status_code = "move target out of bounds"
|
||||
|
||||
|
||||
class NoSuchAlertException(WebDriverException):
|
||||
http_status = 400
|
||||
status_code = "no such alert"
|
||||
|
||||
|
||||
class NoSuchElementException(WebDriverException):
|
||||
http_status = 404
|
||||
status_code = "no such element"
|
||||
|
||||
|
||||
class NoSuchFrameException(WebDriverException):
|
||||
http_status = 400
|
||||
status_code = "no such frame"
|
||||
|
||||
|
||||
class NoSuchWindowException(WebDriverException):
|
||||
http_status = 400
|
||||
status_code = "no such window"
|
||||
|
||||
|
||||
class ScriptTimeoutException(WebDriverException):
|
||||
http_status = 408
|
||||
status_code = "script timeout"
|
||||
|
||||
|
||||
class SessionNotCreatedException(WebDriverException):
|
||||
http_status = 500
|
||||
status_code = "session not created"
|
||||
|
||||
|
||||
class StaleElementReferenceException(WebDriverException):
|
||||
http_status = 400
|
||||
status_code = "stale element reference"
|
||||
|
||||
|
||||
class TimeoutException(WebDriverException):
|
||||
http_status = 408
|
||||
status_code = "timeout"
|
||||
|
||||
|
||||
class UnableToSetCookieException(WebDriverException):
|
||||
http_status = 500
|
||||
status_code = "unable to set cookie"
|
||||
|
||||
|
||||
class UnexpectedAlertOpenException(WebDriverException):
|
||||
http_status = 500
|
||||
status_code = "unexpected alert open"
|
||||
|
||||
|
||||
class UnknownErrorException(WebDriverException):
|
||||
http_status = 500
|
||||
status_code = "unknown error"
|
||||
|
||||
|
||||
class UnknownCommandException(WebDriverException):
|
||||
http_status = (404, 405)
|
||||
status_code = "unknown command"
|
||||
|
||||
|
||||
class UnsupportedOperationException(WebDriverException):
|
||||
http_status = 500
|
||||
status_code = "unsupported operation"
|
||||
|
||||
|
||||
def group_exceptions():
|
||||
exceptions = defaultdict(dict)
|
||||
for item in _objs:
|
||||
if type(item) == type and issubclass(item, WebDriverException):
|
||||
if not isinstance(item.http_status, tuple):
|
||||
statuses = (item.http_status,)
|
||||
else:
|
||||
statuses = item.http_status
|
||||
|
||||
for status in statuses:
|
||||
exceptions[status][item.status_code] = item
|
||||
return exceptions
|
||||
|
||||
|
||||
_objs = locals().values()
|
||||
_exceptions = group_exceptions()
|
||||
del _objs
|
||||
del group_exceptions
|
||||
|
||||
|
||||
def wait_for_port(host, port, timeout=60):
|
||||
""" Wait for the specified Marionette host/port to be available."""
|
||||
starttime = time.time()
|
||||
poll_interval = 0.1
|
||||
while time.time() - starttime < timeout:
|
||||
sock = None
|
||||
try:
|
||||
sock = socket.socket()
|
||||
sock.connect((host, port))
|
||||
return True
|
||||
except socket.error as e:
|
||||
if e[0] != errno.ECONNREFUSED:
|
||||
raise
|
||||
finally:
|
||||
if sock:
|
||||
sock.close()
|
||||
time.sleep(poll_interval)
|
||||
return False
|
||||
|
||||
|
||||
class Transport(object):
|
||||
def __init__(self, host, port, url_prefix="", port_timeout=60):
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.port_timeout = port_timeout
|
||||
if url_prefix == "":
|
||||
self.path_prefix = "/"
|
||||
else:
|
||||
self.path_prefix = "/%s/" % url_prefix.strip("/")
|
||||
self._connection = None
|
||||
|
||||
def connect(self):
|
||||
wait_for_port(self.host, self.port, self.port_timeout)
|
||||
self._connection = httplib.HTTPConnection(self.host, self.port)
|
||||
|
||||
def close_connection(self):
|
||||
if self._connection:
|
||||
self._connection.close()
|
||||
self._connection = None
|
||||
|
||||
def url(self, suffix):
|
||||
return urlparse.urljoin(self.url_prefix, suffix)
|
||||
|
||||
def send(self, method, url, body=None, headers=None, key=None):
|
||||
if not self._connection:
|
||||
self.connect()
|
||||
|
||||
if body is None and method == "POST":
|
||||
body = {}
|
||||
|
||||
if isinstance(body, dict):
|
||||
body = json.dumps(body)
|
||||
|
||||
if isinstance(body, unicode):
|
||||
body = body.encode("utf-8")
|
||||
|
||||
if headers is None:
|
||||
headers = {}
|
||||
|
||||
url = self.path_prefix + url
|
||||
|
||||
self._connection.request(method, url, body, headers)
|
||||
|
||||
try:
|
||||
resp = self._connection.getresponse()
|
||||
except Exception:
|
||||
# This should probably be more specific
|
||||
raise IOError
|
||||
body = resp.read()
|
||||
|
||||
try:
|
||||
data = json.loads(body)
|
||||
except:
|
||||
raise
|
||||
raise WebDriverException("Could not parse response body as JSON: %s" % body)
|
||||
|
||||
if resp.status != 200:
|
||||
cls = _exceptions.get(resp.status, {}).get(data.get("status", None), WebDriverException)
|
||||
raise cls(data.get("message", ""))
|
||||
|
||||
if key is not None:
|
||||
data = data[key]
|
||||
|
||||
if not data:
|
||||
data = None
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def command(func):
|
||||
def inner(self, *args, **kwargs):
|
||||
if hasattr(self, "session"):
|
||||
session_id = self.session.session_id
|
||||
else:
|
||||
session_id = self.session_id
|
||||
|
||||
if session_id is None:
|
||||
raise SessionNotCreatedException("Session not created")
|
||||
return func(self, *args, **kwargs)
|
||||
|
||||
inner.__name__ = func.__name__
|
||||
inner.__doc__ = func.__doc__
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
class Timeouts(object):
|
||||
def __init__(self, session):
|
||||
self.session = session
|
||||
self._script = 30
|
||||
self._load = 0
|
||||
self._implicit_wait = 0
|
||||
|
||||
def _set_timeouts(self, name, value):
|
||||
body = {"type": name,
|
||||
"ms": value * 1000}
|
||||
return self.session.send_command("POST", "timeouts", body)
|
||||
|
||||
@property
|
||||
def script(self):
|
||||
return self._script
|
||||
|
||||
@script.setter
|
||||
def script(self, value):
|
||||
self._set_timeouts("script", value)
|
||||
self._script = value
|
||||
|
||||
@property
|
||||
def load(self):
|
||||
return self._load
|
||||
|
||||
@load.setter
|
||||
def set_load(self, value):
|
||||
self._set_timeouts("page load", value)
|
||||
self._script = value
|
||||
|
||||
@property
|
||||
def implicit_wait(self):
|
||||
return self._implicit_wait
|
||||
|
||||
@implicit_wait.setter
|
||||
def implicit_wait(self, value):
|
||||
self._set_timeouts("implicit wait", value)
|
||||
self._implicit_wait = value
|
||||
|
||||
|
||||
class Window(object):
|
||||
def __init__(self, session):
|
||||
self.session = session
|
||||
|
||||
@property
|
||||
@command
|
||||
def size(self):
|
||||
return self.session.send_command("GET", "window/size")
|
||||
|
||||
@size.setter
|
||||
@command
|
||||
def size(self, (height, width)):
|
||||
body = {"width": width,
|
||||
"height": height}
|
||||
|
||||
return self.session.send_command("POST", "window/size", body)
|
||||
|
||||
@property
|
||||
@command
|
||||
def maximize(self):
|
||||
return self.session.send_command("POST", "window/maximize")
|
||||
|
||||
|
||||
class Find(object):
|
||||
def __init__(self, session):
|
||||
self.session = session
|
||||
|
||||
@command
|
||||
def css(self, selector, all=True):
|
||||
return self._find_element("css selector", selector, all)
|
||||
|
||||
def _find_element(self, strategy, selector, all):
|
||||
route = "elements" if all else "element"
|
||||
|
||||
body = {"using": strategy,
|
||||
"value": selector}
|
||||
|
||||
data = self.session.send_command("POST", route, body, key="value")
|
||||
|
||||
if all:
|
||||
rv = [self.session._element(item) for item in data]
|
||||
else:
|
||||
rv = self.session._element(data)
|
||||
|
||||
return rv
|
||||
|
||||
|
||||
class Session(object):
|
||||
def __init__(self, host, port, url_prefix="", desired_capabilities=None, port_timeout=60):
|
||||
self.transport = Transport(host, port, url_prefix, port_timeout)
|
||||
self.desired_capabilities = desired_capabilities
|
||||
self.session_id = None
|
||||
self.timeouts = None
|
||||
self.window = None
|
||||
self.find = None
|
||||
self._element_cache = {}
|
||||
|
||||
def start(self):
|
||||
desired_capabilities = self.desired_capabilities if self.desired_capabilities else {}
|
||||
body = {"capabilities": {"desiredCapabilites": desired_capabilities}}
|
||||
|
||||
rv = self.transport.send("POST", "session", body=body)
|
||||
self.session_id = rv["sessionId"]
|
||||
|
||||
self.timeouts = Timeouts(self)
|
||||
self.window = Window(self)
|
||||
self.find = Find(self)
|
||||
|
||||
return rv["value"]
|
||||
|
||||
@command
|
||||
def end(self):
|
||||
url = "session/%s" % self.session_id
|
||||
self.transport.send("DELETE", url)
|
||||
self.session_id = None
|
||||
self.timeouts = None
|
||||
self.window = None
|
||||
self.find = None
|
||||
self.transport.close_connection()
|
||||
|
||||
def __enter__(self):
|
||||
resp = self.start()
|
||||
if resp.error:
|
||||
raise Exception(resp)
|
||||
return self
|
||||
|
||||
def __exit__(self, *args, **kwargs):
|
||||
resp = self.end()
|
||||
if resp.error:
|
||||
raise Exception(resp)
|
||||
|
||||
def send_command(self, method, url, body=None, key=None):
|
||||
url = urlparse.urljoin("session/%s/" % self.session_id, url)
|
||||
return self.transport.send(method, url, body, key=key)
|
||||
|
||||
@property
|
||||
@command
|
||||
def url(self):
|
||||
return self.send_command("GET", "url", key="value")
|
||||
|
||||
@url.setter
|
||||
@command
|
||||
def url(self, url):
|
||||
if urlparse.urlsplit(url).netloc is None:
|
||||
return self.url(url)
|
||||
body = {"url": url}
|
||||
return self.send_command("POST", "url", body)
|
||||
|
||||
@command
|
||||
def back(self):
|
||||
return self.send_command("POST", "back")
|
||||
|
||||
@command
|
||||
def forward(self):
|
||||
return self.send_command("POST", "forward")
|
||||
|
||||
@command
|
||||
def refresh(self):
|
||||
return self.send_command("POST", "refresh")
|
||||
|
||||
@property
|
||||
@command
|
||||
def title(self):
|
||||
return self.send_command("GET", "title", key="value")
|
||||
|
||||
@property
|
||||
@command
|
||||
def handle(self):
|
||||
return self.send_command("GET", "window_handle", key="value")
|
||||
|
||||
@handle.setter
|
||||
@command
|
||||
def handle(self, handle):
|
||||
body = {"handle": handle}
|
||||
return self.send_command("POST", "window", body=body)
|
||||
|
||||
def switch_frame(self, frame):
|
||||
if frame == "parent":
|
||||
url = "frame/parent"
|
||||
body = None
|
||||
else:
|
||||
url = "frame"
|
||||
if isinstance(frame, Element):
|
||||
body = {"id": frame.json()}
|
||||
else:
|
||||
body = {"id": frame}
|
||||
|
||||
return self.send_command("POST", url, body)
|
||||
|
||||
@command
|
||||
def close(self):
|
||||
return self.send_command("DELETE", "window_handle")
|
||||
|
||||
@property
|
||||
@command
|
||||
def handles(self):
|
||||
return self.send_command("GET", "window_handles", key="value")
|
||||
|
||||
@property
|
||||
@command
|
||||
def active_element(self):
|
||||
data = self.send_command("GET", "element/active", key="value")
|
||||
if data is not None:
|
||||
return self._element(data)
|
||||
|
||||
def _element(self, data):
|
||||
elem_id = data[element_key]
|
||||
assert elem_id
|
||||
if elem_id in self._element_cache:
|
||||
return self._element_cache[elem_id]
|
||||
return Element(self, elem_id)
|
||||
|
||||
@command
|
||||
def cookies(self, name=None):
|
||||
if name is None:
|
||||
url = "cookie"
|
||||
else:
|
||||
url = "cookie/%s" % name
|
||||
return self.send_command("GET", url, {}, key="value")
|
||||
|
||||
@command
|
||||
def set_cookie(self, name, value, path=None, domain=None, secure=None, expiry=None):
|
||||
body = {"name": name,
|
||||
"value": value}
|
||||
if path is not None:
|
||||
body["path"] = path
|
||||
if domain is not None:
|
||||
body["domain"] = domain
|
||||
if secure is not None:
|
||||
body["secure"] = secure
|
||||
if expiry is not None:
|
||||
body["expiry"] = expiry
|
||||
self.send_command("POST", "cookie", body)
|
||||
|
||||
def delete_cookie(self, name=None):
|
||||
if name is None:
|
||||
url = "cookie"
|
||||
else:
|
||||
url = "cookie/%s" % name
|
||||
self.send_command("DELETE", url, {}, key="value")
|
||||
|
||||
#[...]
|
||||
|
||||
@command
|
||||
def execute_script(self, script, args=None):
|
||||
if args is None:
|
||||
args = []
|
||||
|
||||
body = {
|
||||
"script": script,
|
||||
"args": args
|
||||
}
|
||||
return self.send_command("POST", "execute", body, key="value")
|
||||
|
||||
@command
|
||||
def execute_async_script(self, script, args=None):
|
||||
if args is None:
|
||||
args = []
|
||||
|
||||
body = {
|
||||
"script": script,
|
||||
"args": args
|
||||
}
|
||||
return self.send_command("POST", "execute_async", body, key="value")
|
||||
|
||||
#[...]
|
||||
|
||||
@command
|
||||
def screenshot(self):
|
||||
return self.send_command("GET", "screenshot", key="value")
|
||||
|
||||
|
||||
class Element(object):
|
||||
def __init__(self, session, id):
|
||||
self.session = session
|
||||
self.id = id
|
||||
assert id not in self.session._element_cache
|
||||
self.session._element_cache[self.id] = self
|
||||
|
||||
def json(self):
|
||||
return {element_key: self.id}
|
||||
|
||||
@property
|
||||
def session_id(self):
|
||||
return self.session.session_id
|
||||
|
||||
def url(self, suffix):
|
||||
return "element/%s/%s" % (self.id, suffix)
|
||||
|
||||
@command
|
||||
def find_element(self, strategy, selector):
|
||||
body = {"using": strategy,
|
||||
"value": selector}
|
||||
|
||||
elem = self.session.send_command("POST", self.url("element"), body, key="value")
|
||||
return self.session.element(elem)
|
||||
|
||||
@command
|
||||
def click(self):
|
||||
self.session.send_command("POST", self.url("click"), {})
|
||||
|
||||
@command
|
||||
def tap(self):
|
||||
self.session.send_command("POST", self.url("tap"), {})
|
||||
|
||||
@command
|
||||
def clear(self):
|
||||
self.session.send_command("POST", self.url("clear"), {})
|
||||
|
||||
@command
|
||||
def send_keys(self, keys):
|
||||
if isinstance(keys, (str, unicode)):
|
||||
keys = [char for char in keys]
|
||||
|
||||
body = {"value": keys}
|
||||
|
||||
return self.session.send_command("POST", self.url("value"), body)
|
||||
|
||||
@property
|
||||
@command
|
||||
def text(self):
|
||||
return self.session.send_command("GET", self.url("text"), key="value")
|
||||
|
||||
@property
|
||||
@command
|
||||
def name(self):
|
||||
return self.session.send_command("GET", self.url("name"), key="value")
|
|
@ -29,6 +29,42 @@ def data_cls_getter(output_node, visited_node):
|
|||
raise ValueError
|
||||
|
||||
|
||||
def disabled(node):
|
||||
"""Boolean indicating whether the test is disabled"""
|
||||
try:
|
||||
return node.get("disabled")
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
|
||||
def tags(node):
|
||||
"""Set of tags that have been applied to the test"""
|
||||
try:
|
||||
value = node.get("tags")
|
||||
if isinstance(value, (str, unicode)):
|
||||
return {value}
|
||||
return set(value)
|
||||
except KeyError:
|
||||
return set()
|
||||
|
||||
|
||||
def prefs(node):
|
||||
def value(ini_value):
|
||||
if isinstance(ini_value, (str, unicode)):
|
||||
return tuple(ini_value.split(":", 1))
|
||||
else:
|
||||
return (ini_value, None)
|
||||
|
||||
try:
|
||||
node_prefs = node.get("prefs")
|
||||
if type(node_prefs) in (str, unicode):
|
||||
prefs = {value(node_prefs)}
|
||||
rv = dict(value(item) for item in node_prefs)
|
||||
except KeyError:
|
||||
rv = {}
|
||||
return rv
|
||||
|
||||
|
||||
class ExpectedManifest(ManifestItem):
|
||||
def __init__(self, name, test_path, url_base):
|
||||
"""Object representing all the tests in a particular manifest
|
||||
|
@ -71,6 +107,32 @@ class ExpectedManifest(ManifestItem):
|
|||
return urlparse.urljoin(self.url_base,
|
||||
"/".join(self.test_path.split(os.path.sep)))
|
||||
|
||||
@property
|
||||
def disabled(self):
|
||||
return disabled(self)
|
||||
|
||||
@property
|
||||
def tags(self):
|
||||
return tags(self)
|
||||
|
||||
@property
|
||||
def prefs(self):
|
||||
return prefs(self)
|
||||
|
||||
|
||||
class DirectoryManifest(ManifestItem):
|
||||
@property
|
||||
def disabled(self):
|
||||
return disabled(self)
|
||||
|
||||
@property
|
||||
def tags(self):
|
||||
return tags(self)
|
||||
|
||||
@property
|
||||
def prefs(self):
|
||||
return prefs(self)
|
||||
|
||||
|
||||
class TestNode(ManifestItem):
|
||||
def __init__(self, name):
|
||||
|
@ -100,21 +162,17 @@ class TestNode(ManifestItem):
|
|||
def id(self):
|
||||
return urlparse.urljoin(self.parent.url, self.name)
|
||||
|
||||
@property
|
||||
def disabled(self):
|
||||
"""Boolean indicating whether the test is disabled"""
|
||||
try:
|
||||
return self.get("disabled")
|
||||
except KeyError:
|
||||
return False
|
||||
return disabled(self)
|
||||
|
||||
@property
|
||||
def tags(self):
|
||||
return tags(self)
|
||||
|
||||
@property
|
||||
def prefs(self):
|
||||
try:
|
||||
prefs = self.get("prefs")
|
||||
if type(prefs) in (str, unicode):
|
||||
prefs = [prefs]
|
||||
return [item.split(":", 1) for item in prefs]
|
||||
except KeyError:
|
||||
return []
|
||||
return prefs(self)
|
||||
|
||||
def append(self, node):
|
||||
"""Add a subtest to the current test
|
||||
|
@ -159,9 +217,28 @@ def get_manifest(metadata_root, test_path, url_base, run_info):
|
|||
manifest_path = expected.expected_path(metadata_root, test_path)
|
||||
try:
|
||||
with open(manifest_path) as f:
|
||||
return static.compile(f, run_info,
|
||||
return static.compile(f,
|
||||
run_info,
|
||||
data_cls_getter=data_cls_getter,
|
||||
test_path=test_path,
|
||||
url_base=url_base)
|
||||
except IOError:
|
||||
return None
|
||||
|
||||
def get_dir_manifest(metadata_root, path, run_info):
|
||||
"""Get the ExpectedManifest for a particular test path, or None if there is no
|
||||
metadata stored for that test path.
|
||||
|
||||
:param metadata_root: Absolute path to the root of the metadata directory
|
||||
:param path: Path to the ini file relative to the metadata root
|
||||
:param run_info: Dictionary of properties of the test run for which the expectation
|
||||
values should be computed.
|
||||
"""
|
||||
full_path = os.path.join(metadata_root, path)
|
||||
try:
|
||||
with open(full_path) as f:
|
||||
return static.compile(f,
|
||||
run_info,
|
||||
data_cls_getter=lambda x,y: DirectoryManifest)
|
||||
except IOError:
|
||||
return None
|
||||
|
|
|
@ -56,8 +56,8 @@ class Reducer(object):
|
|||
self.test_loader = wptrunner.TestLoader(kwargs["tests_root"],
|
||||
kwargs["metadata_root"],
|
||||
[self.test_type],
|
||||
test_filter,
|
||||
run_info)
|
||||
run_info,
|
||||
manifest_filer=test_filter)
|
||||
if kwargs["repeat"] == 1:
|
||||
logger.critical("Need to specify --repeat with more than one repetition")
|
||||
sys.exit(1)
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
setup({output:%(output)d});
|
||||
|
||||
add_completion_callback(function() {
|
||||
add_completion_callback(function (tests, status) {
|
||||
var test_results = tests.map(function(x) {
|
||||
return {name:x.name, status:x.status, message:x.message, stack:x.stack}
|
||||
});
|
||||
var id = location.pathname + location.search + location.hash;
|
||||
var results = JSON.stringify({test: id,
|
||||
tests:test_results,
|
||||
status: status.status,
|
||||
message: status.message,
|
||||
stack: status.stack});
|
||||
(function done() {
|
||||
if (window.__wd_results_callback__) {
|
||||
clearTimeout(__wd_results_timer__);
|
||||
__wd_results_callback__(results)
|
||||
} else {
|
||||
setTimeout(done, 20);
|
||||
}
|
||||
})()
|
||||
})
|
||||
});
|
|
@ -3,7 +3,8 @@
|
|||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
var props = {output:%(output)d,
|
||||
explicit_timeout: true};
|
||||
explicit_timeout: true,
|
||||
message_events: ["completion"]};
|
||||
|
||||
if (window.opener && "timeout_multiplier" in window.opener) {
|
||||
props["timeout_multiplier"] = window.opener.timeout_multiplier;
|
||||
|
@ -16,6 +17,14 @@ if (window.opener && window.opener.explicit_timeout) {
|
|||
setup(props);
|
||||
add_completion_callback(function() {
|
||||
add_completion_callback(function(tests, status) {
|
||||
window.opener.done(tests, status)
|
||||
var harness_status = {
|
||||
"status": status.status,
|
||||
"message": status.message,
|
||||
"stack": status.stack
|
||||
};
|
||||
var test_results = tests.map(function(x) {
|
||||
return {name:x.name, status:x.status, message:x.message, stack:x.stack}
|
||||
});
|
||||
window.opener.postMessage([test_results, harness_status], "*");
|
||||
})
|
||||
});
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
import json
|
||||
import os
|
||||
import sys
|
||||
import urlparse
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from Queue import Empty
|
||||
from collections import defaultdict, OrderedDict
|
||||
from collections import defaultdict, OrderedDict, deque
|
||||
from multiprocessing import Queue
|
||||
|
||||
import manifestinclude
|
||||
|
@ -25,6 +26,7 @@ class TestChunker(object):
|
|||
self.total_chunks = total_chunks
|
||||
self.chunk_number = chunk_number
|
||||
assert self.chunk_number <= self.total_chunks
|
||||
self.logger = structured.get_default_logger()
|
||||
|
||||
def __call__(self, manifest):
|
||||
raise NotImplementedError
|
||||
|
@ -47,18 +49,15 @@ class HashChunker(TestChunker):
|
|||
if hash(test_path) % self.total_chunks == chunk_index:
|
||||
yield test_path, tests
|
||||
|
||||
|
||||
class EqualTimeChunker(TestChunker):
|
||||
"""Chunker that uses the test timeout as a proxy for the running time of the test"""
|
||||
def _group_by_directory(self, manifest_items):
|
||||
"""Split the list of manifest items into a ordered dict that groups tests in
|
||||
so that anything in the same subdirectory beyond a depth of 3 is in the same
|
||||
group. So all tests in a/b/c, a/b/c/d and a/b/c/e will be grouped together
|
||||
and separate to tests in a/b/f
|
||||
|
||||
def _get_chunk(self, manifest_items):
|
||||
# For each directory containing tests, calculate the maximum execution time after running all
|
||||
# the tests in that directory. Then work out the index into the manifest corresponding to the
|
||||
# directories at fractions of m/N of the running time where m=1..N-1 and N is the total number
|
||||
# of chunks. Return an array of these indicies
|
||||
|
||||
total_time = 0
|
||||
by_dir = OrderedDict()
|
||||
Returns: tuple (ordered dict of {test_dir: PathData}, total estimated runtime)
|
||||
"""
|
||||
|
||||
class PathData(object):
|
||||
def __init__(self, path):
|
||||
|
@ -66,73 +65,8 @@ class EqualTimeChunker(TestChunker):
|
|||
self.time = 0
|
||||
self.tests = []
|
||||
|
||||
class Chunk(object):
|
||||
def __init__(self):
|
||||
self.paths = []
|
||||
self.tests = []
|
||||
self.time = 0
|
||||
|
||||
def append(self, path_data):
|
||||
self.paths.append(path_data.path)
|
||||
self.tests.extend(path_data.tests)
|
||||
self.time += path_data.time
|
||||
|
||||
class ChunkList(object):
|
||||
def __init__(self, total_time, n_chunks):
|
||||
self.total_time = total_time
|
||||
self.n_chunks = n_chunks
|
||||
|
||||
self.remaining_chunks = n_chunks
|
||||
|
||||
self.chunks = []
|
||||
|
||||
self.update_time_per_chunk()
|
||||
|
||||
def __iter__(self):
|
||||
for item in self.chunks:
|
||||
yield item
|
||||
|
||||
def __getitem__(self, i):
|
||||
return self.chunks[i]
|
||||
|
||||
def sort_chunks(self):
|
||||
self.chunks = sorted(self.chunks, key=lambda x:x.paths[0])
|
||||
|
||||
def get_tests(self, chunk_number):
|
||||
return self[chunk_number - 1].tests
|
||||
|
||||
def append(self, chunk):
|
||||
if len(self.chunks) == self.n_chunks:
|
||||
raise ValueError("Tried to create more than %n chunks" % self.n_chunks)
|
||||
self.chunks.append(chunk)
|
||||
self.remaining_chunks -= 1
|
||||
|
||||
@property
|
||||
def current_chunk(self):
|
||||
if self.chunks:
|
||||
return self.chunks[-1]
|
||||
|
||||
def update_time_per_chunk(self):
|
||||
self.time_per_chunk = (self.total_time - sum(item.time for item in self)) / self.remaining_chunks
|
||||
|
||||
def create(self):
|
||||
rv = Chunk()
|
||||
self.append(rv)
|
||||
return rv
|
||||
|
||||
def add_path(self, path_data):
|
||||
sum_time = self.current_chunk.time + path_data.time
|
||||
if sum_time > self.time_per_chunk and self.remaining_chunks > 0:
|
||||
overshoot = sum_time - self.time_per_chunk
|
||||
undershoot = self.time_per_chunk - self.current_chunk.time
|
||||
if overshoot < undershoot:
|
||||
self.create()
|
||||
self.current_chunk.append(path_data)
|
||||
else:
|
||||
self.current_chunk.append(path_data)
|
||||
self.create()
|
||||
else:
|
||||
self.current_chunk.append(path_data)
|
||||
by_dir = OrderedDict()
|
||||
total_time = 0
|
||||
|
||||
for i, (test_path, tests) in enumerate(manifest_items):
|
||||
test_dir = tuple(os.path.split(test_path)[0].split(os.path.sep)[:3])
|
||||
|
@ -144,42 +78,238 @@ class EqualTimeChunker(TestChunker):
|
|||
time = sum(wpttest.DEFAULT_TIMEOUT if test.timeout !=
|
||||
"long" else wpttest.LONG_TIMEOUT for test in tests)
|
||||
data.time += time
|
||||
total_time += time
|
||||
data.tests.append((test_path, tests))
|
||||
|
||||
total_time += time
|
||||
return by_dir, total_time
|
||||
|
||||
chunk_list = ChunkList(total_time, self.total_chunks)
|
||||
def _maybe_remove(self, chunks, i, direction):
|
||||
"""Trial removing a chunk from one chunk to an adjacent one.
|
||||
|
||||
:param chunks: - the list of all chunks
|
||||
:param i: - the chunk index in the list of chunks to try removing from
|
||||
:param direction: either "next" if we are going to move from the end to
|
||||
the subsequent chunk, or "prev" if we are going to move
|
||||
from the start into the previous chunk.
|
||||
|
||||
:returns bool: Did a chunk get moved?"""
|
||||
source_chunk = chunks[i]
|
||||
if direction == "next":
|
||||
target_chunk = chunks[i+1]
|
||||
path_index = -1
|
||||
move_func = lambda: target_chunk.appendleft(source_chunk.pop())
|
||||
elif direction == "prev":
|
||||
target_chunk = chunks[i-1]
|
||||
path_index = 0
|
||||
move_func = lambda: target_chunk.append(source_chunk.popleft())
|
||||
else:
|
||||
raise ValueError("Unexpected move direction %s" % direction)
|
||||
|
||||
return self._maybe_move(source_chunk, target_chunk, path_index, move_func)
|
||||
|
||||
def _maybe_add(self, chunks, i, direction):
|
||||
"""Trial adding a chunk from one chunk to an adjacent one.
|
||||
|
||||
:param chunks: - the list of all chunks
|
||||
:param i: - the chunk index in the list of chunks to try adding to
|
||||
:param direction: either "next" if we are going to remove from the
|
||||
the subsequent chunk, or "prev" if we are going to remove
|
||||
from the the previous chunk.
|
||||
|
||||
:returns bool: Did a chunk get moved?"""
|
||||
target_chunk = chunks[i]
|
||||
if direction == "next":
|
||||
source_chunk = chunks[i+1]
|
||||
path_index = 0
|
||||
move_func = lambda: target_chunk.append(source_chunk.popleft())
|
||||
elif direction == "prev":
|
||||
source_chunk = chunks[i-1]
|
||||
path_index = -1
|
||||
move_func = lambda: target_chunk.appendleft(source_chunk.pop())
|
||||
else:
|
||||
raise ValueError("Unexpected move direction %s" % direction)
|
||||
|
||||
return self._maybe_move(source_chunk, target_chunk, path_index, move_func)
|
||||
|
||||
def _maybe_move(self, source_chunk, target_chunk, path_index, move_func):
|
||||
"""Move from one chunk to another, assess the change in badness,
|
||||
and keep the move iff it decreases the badness score.
|
||||
|
||||
:param source_chunk: chunk to move from
|
||||
:param target_chunk: chunk to move to
|
||||
:param path_index: 0 if we are moving from the start or -1 if we are moving from the
|
||||
end
|
||||
:param move_func: Function that actually moves between chunks"""
|
||||
if len(source_chunk.paths) <= 1:
|
||||
return False
|
||||
|
||||
move_time = source_chunk.paths[path_index].time
|
||||
|
||||
new_source_badness = self._badness(source_chunk.time - move_time)
|
||||
new_target_badness = self._badness(target_chunk.time + move_time)
|
||||
|
||||
delta_badness = ((new_source_badness + new_target_badness) -
|
||||
(source_chunk.badness + target_chunk.badness))
|
||||
if delta_badness < 0:
|
||||
move_func()
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _badness(self, time):
|
||||
"""Metric of badness for a specific chunk
|
||||
|
||||
:param time: the time for a specific chunk"""
|
||||
return (time - self.expected_time)**2
|
||||
|
||||
def _get_chunk(self, manifest_items):
|
||||
by_dir, total_time = self._group_by_directory(manifest_items)
|
||||
|
||||
if len(by_dir) < self.total_chunks:
|
||||
raise ValueError("Tried to split into %i chunks, but only %i subdirectories included" % (
|
||||
self.total_chunks, len(by_dir)))
|
||||
|
||||
# Put any individual dirs with a time greater than the time per chunk into their own
|
||||
# chunk
|
||||
self.expected_time = float(total_time) / self.total_chunks
|
||||
|
||||
chunks = self._create_initial_chunks(by_dir)
|
||||
|
||||
while True:
|
||||
to_remove = []
|
||||
for path_data in by_dir.itervalues():
|
||||
if path_data.time > chunk_list.time_per_chunk:
|
||||
to_remove.append(path_data)
|
||||
if to_remove:
|
||||
for path_data in to_remove:
|
||||
chunk = chunk_list.create()
|
||||
chunk.append(path_data)
|
||||
del by_dir[path_data.path]
|
||||
chunk_list.update_time_per_chunk()
|
||||
else:
|
||||
# Move a test from one chunk to the next until doing so no longer
|
||||
# reduces the badness
|
||||
got_improvement = self._update_chunks(chunks)
|
||||
if not got_improvement:
|
||||
break
|
||||
|
||||
chunk = chunk_list.create()
|
||||
for path_data in by_dir.itervalues():
|
||||
chunk_list.add_path(path_data)
|
||||
self.logger.debug(self.expected_time)
|
||||
for i, chunk in chunks.iteritems():
|
||||
self.logger.debug("%i: %i, %i" % (i + 1, chunk.time, chunk.badness))
|
||||
|
||||
assert len(chunk_list.chunks) == self.total_chunks, len(chunk_list.chunks)
|
||||
assert sum(item.time for item in chunk_list) == chunk_list.total_time
|
||||
assert self._all_tests(by_dir) == self._chunked_tests(chunks)
|
||||
|
||||
chunk_list.sort_chunks()
|
||||
return self._get_tests(chunks)
|
||||
|
||||
return chunk_list.get_tests(self.chunk_number)
|
||||
@staticmethod
|
||||
def _all_tests(by_dir):
|
||||
"""Return a set of all tests in the manifest from a grouping by directory"""
|
||||
return set(x[0] for item in by_dir.itervalues()
|
||||
for x in item.tests)
|
||||
|
||||
@staticmethod
|
||||
def _chunked_tests(chunks):
|
||||
"""Return a set of all tests in the manifest from the chunk list"""
|
||||
return set(x[0] for chunk in chunks.itervalues()
|
||||
for path in chunk.paths
|
||||
for x in path.tests)
|
||||
|
||||
|
||||
def _create_initial_chunks(self, by_dir):
|
||||
"""Create an initial unbalanced list of chunks.
|
||||
|
||||
:param by_dir: All tests in the manifest grouped by subdirectory
|
||||
:returns list: A list of Chunk objects"""
|
||||
|
||||
class Chunk(object):
|
||||
def __init__(self, paths, index):
|
||||
"""List of PathData objects that together form a single chunk of
|
||||
tests"""
|
||||
self.paths = deque(paths)
|
||||
self.time = sum(item.time for item in paths)
|
||||
self.index = index
|
||||
|
||||
def appendleft(self, path):
|
||||
"""Add a PathData object to the start of the chunk"""
|
||||
self.paths.appendleft(path)
|
||||
self.time += path.time
|
||||
|
||||
def append(self, path):
|
||||
"""Add a PathData object to the end of the chunk"""
|
||||
self.paths.append(path)
|
||||
self.time += path.time
|
||||
|
||||
def pop(self):
|
||||
"""Remove PathData object from the end of the chunk"""
|
||||
assert len(self.paths) > 1
|
||||
self.time -= self.paths[-1].time
|
||||
return self.paths.pop()
|
||||
|
||||
def popleft(self):
|
||||
"""Remove PathData object from the start of the chunk"""
|
||||
assert len(self.paths) > 1
|
||||
self.time -= self.paths[0].time
|
||||
return self.paths.popleft()
|
||||
|
||||
@property
|
||||
def badness(self_):
|
||||
"""Badness metric for this chunk"""
|
||||
return self._badness(self_.time)
|
||||
|
||||
initial_size = len(by_dir) / self.total_chunks
|
||||
chunk_boundaries = [initial_size * i
|
||||
for i in xrange(self.total_chunks)] + [len(by_dir)]
|
||||
|
||||
chunks = OrderedDict()
|
||||
for i, lower in enumerate(chunk_boundaries[:-1]):
|
||||
upper = chunk_boundaries[i + 1]
|
||||
paths = by_dir.values()[lower:upper]
|
||||
chunks[i] = Chunk(paths, i)
|
||||
|
||||
assert self._all_tests(by_dir) == self._chunked_tests(chunks)
|
||||
|
||||
return chunks
|
||||
|
||||
def _update_chunks(self, chunks):
|
||||
"""Run a single iteration of the chunk update algorithm.
|
||||
|
||||
:param chunks: - List of chunks
|
||||
"""
|
||||
#TODO: consider replacing this with a heap
|
||||
sorted_chunks = sorted(chunks.values(), key=lambda x:-x.badness)
|
||||
got_improvement = False
|
||||
for chunk in sorted_chunks:
|
||||
if chunk.time < self.expected_time:
|
||||
f = self._maybe_add
|
||||
else:
|
||||
f = self._maybe_remove
|
||||
|
||||
if chunk.index == 0:
|
||||
order = ["next"]
|
||||
elif chunk.index == self.total_chunks - 1:
|
||||
order = ["prev"]
|
||||
else:
|
||||
if chunk.time < self.expected_time:
|
||||
# First try to add a test from the neighboring chunk with the
|
||||
# greatest total time
|
||||
if chunks[chunk.index + 1].time > chunks[chunk.index - 1].time:
|
||||
order = ["next", "prev"]
|
||||
else:
|
||||
order = ["prev", "next"]
|
||||
else:
|
||||
# First try to remove a test and add to the neighboring chunk with the
|
||||
# lowest total time
|
||||
if chunks[chunk.index + 1].time > chunks[chunk.index - 1].time:
|
||||
order = ["prev", "next"]
|
||||
else:
|
||||
order = ["next", "prev"]
|
||||
|
||||
for direction in order:
|
||||
if f(chunks, chunk.index, direction):
|
||||
got_improvement = True
|
||||
break
|
||||
|
||||
if got_improvement:
|
||||
break
|
||||
|
||||
return got_improvement
|
||||
|
||||
def _get_tests(self, chunks):
|
||||
"""Return the list of tests corresponding to the chunk number we are running.
|
||||
|
||||
:param chunks: List of chunks"""
|
||||
tests = []
|
||||
for path in chunks[self.chunk_number - 1].paths:
|
||||
tests.extend(path.tests)
|
||||
|
||||
return tests
|
||||
|
||||
def __call__(self, manifest_iter):
|
||||
manifest = list(manifest_iter)
|
||||
|
@ -214,6 +344,14 @@ class TestFilter(object):
|
|||
if include_tests:
|
||||
yield test_path, include_tests
|
||||
|
||||
class TagFilter(object):
|
||||
def __init__(self, tags):
|
||||
self.tags = set(tags)
|
||||
|
||||
def __call__(self, test_iter):
|
||||
for test in test_iter:
|
||||
if test.tags & self.tags:
|
||||
yield test
|
||||
|
||||
class ManifestLoader(object):
|
||||
def __init__(self, test_paths, force_manifest_update=False):
|
||||
|
@ -276,20 +414,30 @@ class ManifestLoader(object):
|
|||
|
||||
return manifest_file
|
||||
|
||||
def iterfilter(filters, iter):
|
||||
for f in filters:
|
||||
iter = f(iter)
|
||||
for item in iter:
|
||||
yield item
|
||||
|
||||
class TestLoader(object):
|
||||
def __init__(self,
|
||||
test_manifests,
|
||||
test_types,
|
||||
test_filter,
|
||||
run_info,
|
||||
manifest_filters=None,
|
||||
meta_filters=None,
|
||||
chunk_type="none",
|
||||
total_chunks=1,
|
||||
chunk_number=1,
|
||||
include_https=True):
|
||||
|
||||
self.test_types = test_types
|
||||
self.test_filter = test_filter
|
||||
self.run_info = run_info
|
||||
|
||||
self.manifest_filters = manifest_filters if manifest_filters is not None else []
|
||||
self.meta_filters = meta_filters if meta_filters is not None else []
|
||||
|
||||
self.manifests = test_manifests
|
||||
self.tests = None
|
||||
self.disabled_tests = None
|
||||
|
@ -305,6 +453,9 @@ class TestLoader(object):
|
|||
chunk_number)
|
||||
|
||||
self._test_ids = None
|
||||
|
||||
self.directory_manifests = {}
|
||||
|
||||
self._load_tests()
|
||||
|
||||
@property
|
||||
|
@ -316,22 +467,39 @@ class TestLoader(object):
|
|||
self._test_ids += [item.id for item in test_dict[test_type]]
|
||||
return self._test_ids
|
||||
|
||||
def get_test(self, manifest_test, expected_file):
|
||||
if expected_file is not None:
|
||||
expected = expected_file.get_test(manifest_test.id)
|
||||
else:
|
||||
expected = None
|
||||
def get_test(self, manifest_test, inherit_metadata, test_metadata):
|
||||
if test_metadata is not None:
|
||||
inherit_metadata.append(test_metadata)
|
||||
test_metadata = test_metadata.get_test(manifest_test.id)
|
||||
|
||||
return wpttest.from_manifest(manifest_test, expected)
|
||||
return wpttest.from_manifest(manifest_test, inherit_metadata, test_metadata)
|
||||
|
||||
def load_expected_manifest(self, test_manifest, metadata_path, test_path):
|
||||
return manifestexpected.get_manifest(metadata_path, test_path, test_manifest.url_base, self.run_info)
|
||||
def load_dir_metadata(self, test_manifest, metadata_path, test_path):
|
||||
rv = []
|
||||
path_parts = os.path.dirname(test_path).split(os.path.sep)
|
||||
for i in xrange(1,len(path_parts) + 1):
|
||||
path = os.path.join(os.path.sep.join(path_parts[:i]), "__dir__.ini")
|
||||
if path not in self.directory_manifests:
|
||||
self.directory_manifests[path] = manifestexpected.get_dir_manifest(
|
||||
metadata_path, path, self.run_info)
|
||||
manifest = self.directory_manifests[path]
|
||||
if manifest is not None:
|
||||
rv.append(manifest)
|
||||
return rv
|
||||
|
||||
def load_metadata(self, test_manifest, metadata_path, test_path):
|
||||
inherit_metadata = self.load_dir_metadata(test_manifest, metadata_path, test_path)
|
||||
test_metadata = manifestexpected.get_manifest(
|
||||
metadata_path, test_path, test_manifest.url_base, self.run_info)
|
||||
return inherit_metadata, test_metadata
|
||||
|
||||
def iter_tests(self):
|
||||
manifest_items = []
|
||||
|
||||
for manifest in self.manifests.keys():
|
||||
manifest_items.extend(self.test_filter(manifest.itertypes(*self.test_types)))
|
||||
manifest_iter = iterfilter(self.manifest_filters,
|
||||
manifest.itertypes(*self.test_types))
|
||||
manifest_items.extend(manifest_iter)
|
||||
|
||||
if self.chunker is not None:
|
||||
manifest_items = self.chunker(manifest_items)
|
||||
|
@ -339,12 +507,15 @@ class TestLoader(object):
|
|||
for test_path, tests in manifest_items:
|
||||
manifest_file = iter(tests).next().manifest
|
||||
metadata_path = self.manifests[manifest_file]["metadata_path"]
|
||||
expected_file = self.load_expected_manifest(manifest_file, metadata_path, test_path)
|
||||
inherit_metadata, test_metadata = self.load_metadata(manifest_file, metadata_path, test_path)
|
||||
|
||||
for test in iterfilter(self.meta_filters,
|
||||
self.iter_wpttest(inherit_metadata, test_metadata, tests)):
|
||||
yield test_path, test.test_type, test
|
||||
|
||||
def iter_wpttest(self, inherit_metadata, test_metadata, tests):
|
||||
for manifest_test in tests:
|
||||
test = self.get_test(manifest_test, expected_file)
|
||||
test_type = manifest_test.item_type
|
||||
yield test_path, test_type, test
|
||||
yield self.get_test(manifest_test, inherit_metadata, test_metadata)
|
||||
|
||||
def _load_tests(self):
|
||||
"""Read in the tests from the manifest file and add them to a queue"""
|
||||
|
|
|
@ -293,8 +293,8 @@ class TestRunnerManager(threading.Thread):
|
|||
# reason
|
||||
# Need to consider the unlikely case where one test causes the
|
||||
# runner process to repeatedly die
|
||||
self.logger.info("Last test did not complete, requeueing")
|
||||
self.requeue_test()
|
||||
self.logger.critical("Last test did not complete")
|
||||
break
|
||||
self.logger.warning(
|
||||
"More tests found, but runner process died, restarting")
|
||||
self.restart_count += 1
|
||||
|
@ -466,10 +466,6 @@ class TestRunnerManager(threading.Thread):
|
|||
def start_next_test(self):
|
||||
self.send_message("run_test")
|
||||
|
||||
def requeue_test(self):
|
||||
self.test_source.requeue(self.test)
|
||||
self.test = None
|
||||
|
||||
def test_start(self, test):
|
||||
self.test = test
|
||||
self.logger.test_start(test.id)
|
||||
|
|
|
@ -86,6 +86,14 @@ def create_parser(product_choices=None):
|
|||
default=False,
|
||||
help="List the tests that are disabled on the current platform")
|
||||
|
||||
build_type = parser.add_mutually_exclusive_group()
|
||||
build_type.add_argument("--debug-build", dest="debug", action="store_true",
|
||||
default=None,
|
||||
help="Build is a debug build (overrides any mozinfo file)")
|
||||
build_type.add_argument("--release-build", dest="debug", action="store_false",
|
||||
default=None,
|
||||
help="Build is a release (overrides any mozinfo file)")
|
||||
|
||||
test_selection_group = parser.add_argument_group("Test Selection")
|
||||
test_selection_group.add_argument("--test-types", action="store",
|
||||
nargs="*", default=["testharness", "reftest"],
|
||||
|
@ -97,6 +105,8 @@ def create_parser(product_choices=None):
|
|||
help="URL prefix to exclude")
|
||||
test_selection_group.add_argument("--include-manifest", type=abs_path,
|
||||
help="Path to manifest listing tests to include")
|
||||
test_selection_group.add_argument("--tag", action="append", dest="tags",
|
||||
help="Labels applied to tests to include in the run. Labels starting dir: are equivalent to top-level directories.")
|
||||
|
||||
debugging_group = parser.add_argument_group("Debugging")
|
||||
debugging_group.add_argument('--debugger', const="__default__", nargs="?",
|
||||
|
|
|
@ -115,6 +115,9 @@ class Compiler(NodeVisitor):
|
|||
def visit_ValueNode(self, node):
|
||||
return (lambda x: True, node.data)
|
||||
|
||||
def visit_AtomNode(self, node):
|
||||
return (lambda x: True, node.data)
|
||||
|
||||
def visit_ConditionalNode(self, node):
|
||||
return self.visit(node.children[0]), self.visit(node.children[1])
|
||||
|
||||
|
|
|
@ -68,6 +68,9 @@ class Compiler(NodeVisitor):
|
|||
def visit_ValueNode(self, node):
|
||||
return node.data
|
||||
|
||||
def visit_AtomNode(self, node):
|
||||
return node.data
|
||||
|
||||
def visit_ListNode(self, node):
|
||||
return [self.visit(child) for child in node.children]
|
||||
|
||||
|
|
|
@ -93,6 +93,10 @@ class ValueNode(Node):
|
|||
raise TypeError
|
||||
|
||||
|
||||
class AtomNode(ValueNode):
|
||||
pass
|
||||
|
||||
|
||||
class ConditionalNode(Node):
|
||||
pass
|
||||
|
||||
|
|
|
@ -44,6 +44,9 @@ binary_operators = ["==", "!=", "and", "or"]
|
|||
|
||||
operators = ["==", "!=", "not", "and", "or"]
|
||||
|
||||
atoms = {"True": True,
|
||||
"False": False,
|
||||
"Reset": object()}
|
||||
|
||||
def decode(byte_str):
|
||||
return byte_str.decode("utf8")
|
||||
|
@ -55,7 +58,7 @@ def precedence(operator_node):
|
|||
|
||||
class TokenTypes(object):
|
||||
def __init__(self):
|
||||
for type in ["group_start", "group_end", "paren", "list_start", "list_end", "separator", "ident", "string", "number", "eof"]:
|
||||
for type in ["group_start", "group_end", "paren", "list_start", "list_end", "separator", "ident", "string", "number", "atom", "eof"]:
|
||||
setattr(self, type, type)
|
||||
|
||||
token_types = TokenTypes()
|
||||
|
@ -232,6 +235,8 @@ class Tokenizer(object):
|
|||
self.state = self.eol_state
|
||||
elif self.char() == ",":
|
||||
raise ParseError(self.filename, self.line_number, "List item started with separator")
|
||||
elif self.char() == "@":
|
||||
self.state = self.list_value_atom_state
|
||||
else:
|
||||
self.state = self.list_value_state
|
||||
|
||||
|
@ -267,6 +272,11 @@ class Tokenizer(object):
|
|||
if rv:
|
||||
yield (token_types.string, decode(rv))
|
||||
|
||||
def list_value_atom_state(self):
|
||||
self.consume()
|
||||
for _, value in self.list_value_state():
|
||||
yield token_types.atom, value
|
||||
|
||||
def list_end_state(self):
|
||||
self.consume()
|
||||
yield (token_types.list_end, "]")
|
||||
|
@ -282,7 +292,14 @@ class Tokenizer(object):
|
|||
self.state = self.comment_state
|
||||
else:
|
||||
self.state = self.line_end_state
|
||||
elif self.char() == "@":
|
||||
self.consume()
|
||||
for _, value in self.value_inner_state():
|
||||
yield token_types.atom, value
|
||||
else:
|
||||
self.state = self.value_inner_state
|
||||
|
||||
def value_inner_state(self):
|
||||
rv = ""
|
||||
spaces = 0
|
||||
while True:
|
||||
|
@ -544,12 +561,17 @@ class Parser(object):
|
|||
if self.token[0] == token_types.string:
|
||||
self.value()
|
||||
self.eof_or_end_group()
|
||||
elif self.token[0] == token_types.atom:
|
||||
self.atom()
|
||||
else:
|
||||
raise ParseError
|
||||
|
||||
def list_value(self):
|
||||
self.tree.append(ListNode())
|
||||
while self.token[0] == token_types.string:
|
||||
while self.token[0] in (token_types.atom, token_types.string):
|
||||
if self.token[0] == token_types.atom:
|
||||
self.atom()
|
||||
else:
|
||||
self.value()
|
||||
self.expect(token_types.list_end)
|
||||
self.tree.pop()
|
||||
|
@ -571,6 +593,13 @@ class Parser(object):
|
|||
self.consume()
|
||||
self.tree.pop()
|
||||
|
||||
def atom(self):
|
||||
if self.token[1] not in atoms:
|
||||
raise ParseError(self.tokenizer.filename, self.tokenizer.line_number, "Unrecognised symbol @%s" % self.token[1])
|
||||
self.tree.append(AtomNode(atoms[self.token[1]]))
|
||||
self.consume()
|
||||
self.tree.pop()
|
||||
|
||||
def expr_start(self):
|
||||
self.expr_builder = ExpressionBuilder(self.tokenizer)
|
||||
self.expr_builders.append(self.expr_builder)
|
||||
|
@ -605,21 +634,21 @@ class Parser(object):
|
|||
elif self.token[0] == token_types.number:
|
||||
self.expr_number()
|
||||
else:
|
||||
raise ParseError
|
||||
raise ParseError(self.tokenizer.filename, self.tokenizer.line_number, "Unrecognised operand")
|
||||
|
||||
def expr_unary_op(self):
|
||||
if self.token[1] in unary_operators:
|
||||
self.expr_builder.push_operator(UnaryOperatorNode(self.token[1]))
|
||||
self.consume()
|
||||
else:
|
||||
raise ParseError(self.filename, self.tokenizer.line_number, "Expected unary operator")
|
||||
raise ParseError(self.tokenizer.filename, self.tokenizer.line_number, "Expected unary operator")
|
||||
|
||||
def expr_bin_op(self):
|
||||
if self.token[1] in binary_operators:
|
||||
self.expr_builder.push_operator(BinaryOperatorNode(self.token[1]))
|
||||
self.consume()
|
||||
else:
|
||||
raise ParseError(self.filename, self.tokenizer.line_number, "Expected binary operator")
|
||||
raise ParseError(self.tokenizer.filename, self.tokenizer.line_number, "Expected binary operator")
|
||||
|
||||
def expr_value(self):
|
||||
node_type = {token_types.string: StringNode,
|
||||
|
|
|
@ -3,7 +3,9 @@
|
|||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from node import NodeVisitor, ValueNode, ListNode, BinaryExpressionNode
|
||||
from parser import precedence
|
||||
from parser import atoms, precedence
|
||||
|
||||
atom_names = {v:"@%s" % k for (k,v) in atoms.iteritems()}
|
||||
|
||||
named_escapes = set(["\a", "\b", "\f", "\n", "\r", "\t", "\v"])
|
||||
|
||||
|
@ -80,6 +82,9 @@ class ManifestSerializer(NodeVisitor):
|
|||
quote = ""
|
||||
return [quote + escape(node.data, extras=quote) + quote]
|
||||
|
||||
def visit_AtomNode(self, node):
|
||||
return [atom_names[node.data]]
|
||||
|
||||
def visit_ConditionalNode(self, node):
|
||||
return ["if %s: %s" % tuple(self.visit(item)[0] for item in node.children)]
|
||||
|
||||
|
|
|
@ -67,5 +67,13 @@ key:
|
|||
]]]]]]
|
||||
)
|
||||
|
||||
def test_atom_0(self):
|
||||
with self.assertRaises(parser.ParseError):
|
||||
self.parse("key: @Unknown")
|
||||
|
||||
def test_atom_1(self):
|
||||
with self.assertRaises(parser.ParseError):
|
||||
self.parse("key: @true")
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
|
|
@ -209,3 +209,19 @@ class TokenizerTest(unittest.TestCase):
|
|||
def test_escape_11(self):
|
||||
self.compare(r"""key: \\ab
|
||||
""")
|
||||
|
||||
def test_atom_1(self):
|
||||
self.compare(r"""key: @True
|
||||
""")
|
||||
|
||||
def test_atom_2(self):
|
||||
self.compare(r"""key: @False
|
||||
""")
|
||||
|
||||
def test_atom_3(self):
|
||||
self.compare(r"""key: @Reset
|
||||
""")
|
||||
|
||||
def test_atom_4(self):
|
||||
self.compare(r"""key: [a, @Reset, b]
|
||||
""")
|
||||
|
|
|
@ -40,20 +40,27 @@ def setup_logging(*args, **kwargs):
|
|||
global logger
|
||||
logger = wptlogging.setup(*args, **kwargs)
|
||||
|
||||
def get_loader(test_paths, product, ssl_env, debug=False, **kwargs):
|
||||
def get_loader(test_paths, product, ssl_env, debug=None, **kwargs):
|
||||
run_info = wpttest.get_run_info(kwargs["run_info"], product, debug=debug)
|
||||
|
||||
test_manifests = testloader.ManifestLoader(test_paths, force_manifest_update=kwargs["manifest_update"]).load()
|
||||
|
||||
test_filter = testloader.TestFilter(include=kwargs["include"],
|
||||
manifest_filters = []
|
||||
meta_filters = []
|
||||
|
||||
if kwargs["include"] or kwargs["exclude"] or kwargs["include_manifest"]:
|
||||
manifest_filters.append(testloader.TestFilter(include=kwargs["include"],
|
||||
exclude=kwargs["exclude"],
|
||||
manifest_path=kwargs["include_manifest"],
|
||||
test_manifests=test_manifests)
|
||||
test_manifests=test_manifests))
|
||||
if kwargs["tags"]:
|
||||
meta_filters.append(testloader.TagFilter(tags=kwargs["tags"]))
|
||||
|
||||
test_loader = testloader.TestLoader(test_manifests,
|
||||
kwargs["test_types"],
|
||||
test_filter,
|
||||
run_info,
|
||||
manifest_filters=manifest_filters,
|
||||
meta_filters=meta_filters,
|
||||
chunk_type=kwargs["chunk_type"],
|
||||
total_chunks=kwargs["total_chunks"],
|
||||
chunk_number=kwargs["this_chunk"],
|
||||
|
@ -111,7 +118,7 @@ def run_tests(config, test_paths, product, **kwargs):
|
|||
check_args(**kwargs)
|
||||
|
||||
if "test_loader" in kwargs:
|
||||
run_info = wpttest.get_run_info(kwargs["run_info"], product, debug=False)
|
||||
run_info = wpttest.get_run_info(kwargs["run_info"], product, debug=None)
|
||||
test_loader = kwargs["test_loader"]
|
||||
else:
|
||||
run_info, test_loader = get_loader(test_paths, product, ssl_env,
|
||||
|
@ -163,6 +170,7 @@ def run_tests(config, test_paths, product, **kwargs):
|
|||
executor_kwargs = get_executor_kwargs(test_type,
|
||||
test_environment.external_config,
|
||||
test_environment.cache_manager,
|
||||
run_info,
|
||||
**kwargs)
|
||||
|
||||
if executor_cls is None:
|
||||
|
@ -212,7 +220,7 @@ def main():
|
|||
elif kwargs["list_disabled"]:
|
||||
list_disabled(**kwargs)
|
||||
else:
|
||||
return run_tests(**kwargs)
|
||||
return not run_tests(**kwargs)
|
||||
except Exception:
|
||||
import pdb, traceback
|
||||
print traceback.format_exc()
|
||||
|
|
|
@ -9,6 +9,9 @@ import os
|
|||
|
||||
import mozinfo
|
||||
|
||||
from wptmanifest.parser import atoms
|
||||
|
||||
atom_reset = atoms["Reset"]
|
||||
|
||||
class Result(object):
|
||||
def __init__(self, status, message, expected=None, extra=None):
|
||||
|
@ -58,8 +61,11 @@ class RunInfo(dict):
|
|||
self._update_mozinfo(metadata_root)
|
||||
self.update(mozinfo.info)
|
||||
self["product"] = product
|
||||
if not "debug" in self:
|
||||
if debug is not None:
|
||||
self["debug"] = debug
|
||||
elif "debug" not in self:
|
||||
# Default to release
|
||||
self["debug"] = False
|
||||
|
||||
def _update_mozinfo(self, metadata_root):
|
||||
"""Add extra build information from a mozinfo.json file in a parent
|
||||
|
@ -83,27 +89,26 @@ class B2GRunInfo(RunInfo):
|
|||
class Test(object):
|
||||
result_cls = None
|
||||
subtest_result_cls = None
|
||||
test_type = None
|
||||
|
||||
def __init__(self, url, expected_metadata, timeout=DEFAULT_TIMEOUT, path=None,
|
||||
def __init__(self, url, inherit_metadata, test_metadata, timeout=DEFAULT_TIMEOUT, path=None,
|
||||
protocol="http"):
|
||||
self.url = url
|
||||
self._expected_metadata = expected_metadata
|
||||
self._inherit_metadata = inherit_metadata
|
||||
self._test_metadata = test_metadata
|
||||
self.timeout = timeout
|
||||
self.path = path
|
||||
if expected_metadata:
|
||||
prefs = expected_metadata.prefs()
|
||||
else:
|
||||
prefs = []
|
||||
self.environment = {"protocol": protocol, "prefs": prefs}
|
||||
self.environment = {"protocol": protocol, "prefs": self.prefs}
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.id == other.id
|
||||
|
||||
@classmethod
|
||||
def from_manifest(cls, manifest_item, expected_metadata):
|
||||
def from_manifest(cls, manifest_item, inherit_metadata, test_metadata):
|
||||
timeout = LONG_TIMEOUT if manifest_item.timeout == "long" else DEFAULT_TIMEOUT
|
||||
return cls(manifest_item.url,
|
||||
expected_metadata,
|
||||
inherit_metadata,
|
||||
test_metadata,
|
||||
timeout=timeout,
|
||||
path=manifest_item.path,
|
||||
protocol="https" if hasattr(manifest_item, "https") and manifest_item.https else "http")
|
||||
|
@ -117,22 +122,57 @@ class Test(object):
|
|||
def keys(self):
|
||||
return tuple()
|
||||
|
||||
def _get_metadata(self, subtest):
|
||||
if self._expected_metadata is None:
|
||||
return None
|
||||
|
||||
if subtest is not None:
|
||||
metadata = self._expected_metadata.get_subtest(subtest)
|
||||
def _get_metadata(self, subtest=None):
|
||||
if self._test_metadata is not None and subtest is not None:
|
||||
return self._test_metadata.get_subtest(subtest)
|
||||
else:
|
||||
metadata = self._expected_metadata
|
||||
return metadata
|
||||
return self._test_metadata
|
||||
|
||||
def itermeta(self, subtest=None):
|
||||
for metadata in self._inherit_metadata:
|
||||
yield metadata
|
||||
|
||||
if self._test_metadata is not None:
|
||||
yield self._get_metadata()
|
||||
if subtest is not None:
|
||||
subtest_meta = self._get_metadata(subtest)
|
||||
if subtest_meta is not None:
|
||||
yield subtest_meta
|
||||
|
||||
|
||||
def disabled(self, subtest=None):
|
||||
metadata = self._get_metadata(subtest)
|
||||
if metadata is None:
|
||||
return False
|
||||
for meta in self.itermeta(subtest):
|
||||
disabled = meta.disabled
|
||||
if disabled is not None:
|
||||
return disabled
|
||||
return None
|
||||
|
||||
return metadata.disabled()
|
||||
@property
|
||||
def tags(self):
|
||||
tags = set()
|
||||
for meta in self.itermeta():
|
||||
meta_tags = meta.tags
|
||||
if atom_reset in meta_tags:
|
||||
tags = meta_tags.copy()
|
||||
tags.remove(atom_reset)
|
||||
else:
|
||||
tags |= meta_tags
|
||||
|
||||
tags.add("dir:%s" % self.id.lstrip("/").split("/")[0])
|
||||
|
||||
return tags
|
||||
|
||||
@property
|
||||
def prefs(self):
|
||||
prefs = {}
|
||||
for meta in self.itermeta():
|
||||
meta_prefs = meta.prefs
|
||||
if atom_reset in prefs:
|
||||
prefs = meta_prefs.copy()
|
||||
del prefs[atom_reset]
|
||||
else:
|
||||
prefs.update(meta_prefs)
|
||||
return prefs
|
||||
|
||||
def expected(self, subtest=None):
|
||||
if subtest is None:
|
||||
|
@ -153,6 +193,7 @@ class Test(object):
|
|||
class TestharnessTest(Test):
|
||||
result_cls = TestharnessResult
|
||||
subtest_result_cls = TestharnessSubtestResult
|
||||
test_type = "testharness"
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
|
@ -160,6 +201,8 @@ class TestharnessTest(Test):
|
|||
|
||||
|
||||
class ManualTest(Test):
|
||||
test_type = "manual"
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
return self.url
|
||||
|
@ -167,9 +210,10 @@ class ManualTest(Test):
|
|||
|
||||
class ReftestTest(Test):
|
||||
result_cls = ReftestResult
|
||||
test_type = "reftest"
|
||||
|
||||
def __init__(self, url, expected, references, timeout=DEFAULT_TIMEOUT, path=None, protocol="http"):
|
||||
Test.__init__(self, url, expected, timeout, path, protocol)
|
||||
def __init__(self, url, inherit_metadata, test_metadata, references, timeout=DEFAULT_TIMEOUT, path=None, protocol="http"):
|
||||
Test.__init__(self, url, inherit_metadata, test_metadata, timeout, path, protocol)
|
||||
|
||||
for _, ref_type in references:
|
||||
if ref_type not in ("==", "!="):
|
||||
|
@ -180,7 +224,8 @@ class ReftestTest(Test):
|
|||
@classmethod
|
||||
def from_manifest(cls,
|
||||
manifest_test,
|
||||
expected_metadata,
|
||||
inherit_metadata,
|
||||
test_metadata,
|
||||
nodes=None,
|
||||
references_seen=None):
|
||||
|
||||
|
@ -194,7 +239,8 @@ class ReftestTest(Test):
|
|||
url = manifest_test.url
|
||||
|
||||
node = cls(manifest_test.url,
|
||||
expected_metadata,
|
||||
inherit_metadata,
|
||||
test_metadata,
|
||||
[],
|
||||
timeout=timeout,
|
||||
path=manifest_test.path,
|
||||
|
@ -219,11 +265,12 @@ class ReftestTest(Test):
|
|||
manifest_node = manifest_test.manifest.get_reference(ref_url)
|
||||
if manifest_node:
|
||||
reference = ReftestTest.from_manifest(manifest_node,
|
||||
[],
|
||||
None,
|
||||
nodes,
|
||||
references_seen)
|
||||
else:
|
||||
reference = ReftestTest(ref_url, None, [])
|
||||
reference = ReftestTest(ref_url, [], None, [])
|
||||
|
||||
node.references.append((reference, ref_type))
|
||||
|
||||
|
@ -243,7 +290,7 @@ manifest_test_cls = {"reftest": ReftestTest,
|
|||
"manual": ManualTest}
|
||||
|
||||
|
||||
def from_manifest(manifest_test, expected_metadata):
|
||||
def from_manifest(manifest_test, inherit_metadata, test_metadata):
|
||||
test_cls = manifest_test_cls[manifest_test.item_type]
|
||||
|
||||
return test_cls.from_manifest(manifest_test, expected_metadata)
|
||||
return test_cls.from_manifest(manifest_test, inherit_metadata, test_metadata)
|
||||
|
|
Загрузка…
Ссылка в новой задаче