зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1830917 Remove webextension python code for desktop r=perftest-reviewers,AlexandruIonescu
Differential Revision: https://phabricator.services.mozilla.com/D177501
This commit is contained in:
Родитель
1d71b17a64
Коммит
5a593c1491
|
@ -91,7 +91,6 @@ class Runner:
|
|||
if not os.path.exists(self.archive):
|
||||
os.makedirs(self.archive, exist_ok=True)
|
||||
|
||||
logger.info("Verifying Geckodriver binary presence")
|
||||
if shutil.which(self.geckodriver) is None and not os.path.exists(
|
||||
self.geckodriver
|
||||
):
|
||||
|
|
|
@ -32,8 +32,6 @@ from signal_handler import SignalHandler
|
|||
from utils import view_gecko_profile_from_raptor
|
||||
from webextension import (
|
||||
WebExtensionAndroid,
|
||||
WebExtensionDesktopChrome,
|
||||
WebExtensionFirefox,
|
||||
)
|
||||
|
||||
LOG = RaptorLogger(component="raptor-main")
|
||||
|
@ -77,11 +75,7 @@ def main(args=sys.argv[1:]):
|
|||
LOG.info(next_test["name"])
|
||||
|
||||
if not args.browsertime:
|
||||
if args.app == "firefox":
|
||||
raptor_class = WebExtensionFirefox
|
||||
elif args.app in CHROMIUM_DISTROS:
|
||||
raptor_class = WebExtensionDesktopChrome
|
||||
else:
|
||||
if args.app != "firefox" and args.app not in CHROMIUM_DISTROS:
|
||||
raptor_class = WebExtensionAndroid
|
||||
else:
|
||||
|
||||
|
|
|
@ -4,4 +4,3 @@
|
|||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from .android import WebExtensionAndroid
|
||||
from .desktop import WebExtensionDesktopChrome, WebExtensionFirefox
|
||||
|
|
|
@ -1,263 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from logger.logger import RaptorLogger
|
||||
from mozpower import MozPower
|
||||
from mozrunner import runners
|
||||
from outputhandler import OutputHandler
|
||||
from perftest import PerftestDesktop
|
||||
|
||||
from .base import WebExtension
|
||||
|
||||
LOG = RaptorLogger(component="raptor-webext-desktop")
|
||||
|
||||
|
||||
class WebExtensionDesktop(PerftestDesktop, WebExtension):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(WebExtensionDesktop, self).__init__(*args, **kwargs)
|
||||
|
||||
# create the desktop browser runner
|
||||
LOG.info("creating browser runner using mozrunner")
|
||||
self.output_handler = OutputHandler(verbose=self.config["verbose"])
|
||||
process_args = {"processOutputLine": [self.output_handler]}
|
||||
firefox_args = ["--allow-downgrade"]
|
||||
runner_cls = runners[self.config["app"]]
|
||||
self.runner = runner_cls(
|
||||
self.config["binary"],
|
||||
profile=self.profile,
|
||||
cmdargs=firefox_args,
|
||||
process_args=process_args,
|
||||
symbols_path=self.config["symbols_path"],
|
||||
)
|
||||
|
||||
# Force Firefox to immediately exit for content crashes
|
||||
self.runner.env["MOZ_CRASHREPORTER_SHUTDOWN"] = "1"
|
||||
|
||||
self.runner.env.update(self.config.get("environment", {}))
|
||||
|
||||
def launch_desktop_browser(self, test):
|
||||
raise NotImplementedError
|
||||
|
||||
def start_runner_proc(self):
|
||||
# launch the browser via our previously-created runner
|
||||
self.runner.start()
|
||||
|
||||
proc = self.runner.process_handler
|
||||
self.output_handler.proc = proc
|
||||
|
||||
# give our control server the browser process so it can shut it down later
|
||||
self.control_server.browser_proc = proc
|
||||
|
||||
def process_exists(self):
|
||||
return self.runner.is_running()
|
||||
|
||||
def run_test(self, test, timeout):
|
||||
# tests will be run warm (i.e. NO browser restart between page-cycles)
|
||||
# unless otheriwse specified in the test INI by using 'cold = true'
|
||||
mozpower_measurer = None
|
||||
if self.config.get("power_test", False):
|
||||
powertest_name = test["name"].replace("/", "-").replace("\\", "-")
|
||||
output_dir = os.path.join(
|
||||
self.artifact_dir, "power-measurements-%s" % powertest_name
|
||||
)
|
||||
test_dir = os.path.join(output_dir, powertest_name)
|
||||
|
||||
try:
|
||||
if not os.path.exists(output_dir):
|
||||
os.mkdir(output_dir)
|
||||
if not os.path.exists(test_dir):
|
||||
os.mkdir(test_dir)
|
||||
except Exception:
|
||||
LOG.critical(
|
||||
"Could not create directories to store power testing data."
|
||||
)
|
||||
raise
|
||||
|
||||
# Start power measurements with IPG creating a power usage log
|
||||
# every 30 seconds with 1 data point per second (or a 1000 milli-
|
||||
# second sampling rate).
|
||||
mozpower_measurer = MozPower(
|
||||
ipg_measure_duration=30,
|
||||
sampling_rate=1000,
|
||||
output_file_path=os.path.join(test_dir, "power-usage"),
|
||||
)
|
||||
mozpower_measurer.initialize_power_measurements()
|
||||
|
||||
if self.config.get("cold") or test.get("cold"):
|
||||
self.__run_test_cold(test, timeout)
|
||||
else:
|
||||
self.__run_test_warm(test, timeout)
|
||||
|
||||
if mozpower_measurer:
|
||||
mozpower_measurer.finalize_power_measurements(test_name=test["name"])
|
||||
perfherder_data = mozpower_measurer.get_perfherder_data()
|
||||
|
||||
if not self.config.get("run_local", False):
|
||||
# when not running locally, zip the data and delete the folder which
|
||||
# was placed in the zip
|
||||
powertest_name = test["name"].replace("/", "-").replace("\\", "-")
|
||||
power_data_path = os.path.join(
|
||||
self.artifact_dir, "power-measurements-%s" % powertest_name
|
||||
)
|
||||
shutil.make_archive(power_data_path, "zip", power_data_path)
|
||||
shutil.rmtree(power_data_path)
|
||||
|
||||
for data_type in perfherder_data:
|
||||
self.control_server.submit_supporting_data(perfherder_data[data_type])
|
||||
|
||||
def __run_test_cold(self, test, timeout):
|
||||
"""
|
||||
Run the Raptor test but restart the entire browser app between page-cycles.
|
||||
|
||||
Note: For page-load tests, playback will only be started once - at the beginning of all
|
||||
browser cycles, and then stopped after all cycles are finished. That includes the import
|
||||
of the mozproxy ssl cert and turning on the browser proxy.
|
||||
|
||||
Since we're running in cold-mode, before this point (in manifest.py) the
|
||||
'expected-browser-cycles' value was already set to the initial 'page-cycles' value;
|
||||
and the 'page-cycles' value was set to 1 as we want to perform one page-cycle per
|
||||
browser restart.
|
||||
|
||||
The 'browser-cycle' value is the current overall browser start iteration. The control
|
||||
server will receive the current 'browser-cycle' and the 'expected-browser-cycles' in
|
||||
each results set received; and will pass that on as part of the results so that the
|
||||
results processing will know results for multiple browser cycles are being received.
|
||||
|
||||
The default will be to run in warm mode; unless 'cold = true' is set in the test INI.
|
||||
"""
|
||||
LOG.info(
|
||||
"test %s is running in cold mode; browser WILL be restarted between "
|
||||
"page cycles" % test["name"]
|
||||
)
|
||||
|
||||
for test["browser_cycle"] in range(1, test["expected_browser_cycles"] + 1):
|
||||
|
||||
LOG.info(
|
||||
"begin browser cycle %d of %d for test %s"
|
||||
% (test["browser_cycle"], test["expected_browser_cycles"], test["name"])
|
||||
)
|
||||
|
||||
self.run_test_setup(test)
|
||||
|
||||
if test["browser_cycle"] == 1:
|
||||
|
||||
if not self.is_localhost:
|
||||
self.delete_proxy_settings_from_profile()
|
||||
|
||||
else:
|
||||
# initial browser profile was already created before run_test was called;
|
||||
# now additional browser cycles we want to create a new one each time
|
||||
self.build_browser_profile()
|
||||
|
||||
# Update runner profile
|
||||
self.runner.profile = self.profile
|
||||
|
||||
self.run_test_setup(test)
|
||||
|
||||
# now start the browser/app under test
|
||||
self.launch_desktop_browser(test)
|
||||
|
||||
# set our control server flag to indicate we are running the browser/app
|
||||
self.control_server._finished = False
|
||||
|
||||
self.wait_for_test_finish(test, timeout, self.process_exists)
|
||||
|
||||
def __run_test_warm(self, test, timeout):
|
||||
self.run_test_setup(test)
|
||||
|
||||
if not self.is_localhost:
|
||||
self.delete_proxy_settings_from_profile()
|
||||
|
||||
# start the browser/app under test
|
||||
self.launch_desktop_browser(test)
|
||||
|
||||
# set our control server flag to indicate we are running the browser/app
|
||||
self.control_server._finished = False
|
||||
|
||||
self.wait_for_test_finish(test, timeout, self.process_exists)
|
||||
|
||||
def run_test_teardown(self, test):
|
||||
# browser should be closed by now but this is a backup-shutdown (if not in debug-mode)
|
||||
if not self.debug_mode:
|
||||
# If the runner was not started in the first place, stop() will silently
|
||||
# catch RunnerNotStartedError
|
||||
self.runner.stop()
|
||||
else:
|
||||
# in debug mode, and running locally, leave the browser running
|
||||
if self.config["run_local"]:
|
||||
LOG.info(
|
||||
"* debug-mode enabled - please shutdown the browser manually..."
|
||||
)
|
||||
self.runner.wait(timeout=None)
|
||||
|
||||
super(WebExtensionDesktop, self).run_test_teardown(test)
|
||||
|
||||
def check_for_crashes(self):
|
||||
super(WebExtensionDesktop, self).check_for_crashes()
|
||||
|
||||
try:
|
||||
self.runner.check_for_crashes()
|
||||
except NotImplementedError: # not implemented for Chrome
|
||||
pass
|
||||
|
||||
self.crashes += self.runner.crashed
|
||||
|
||||
def clean_up(self):
|
||||
self.runner.stop()
|
||||
|
||||
super(WebExtensionDesktop, self).clean_up()
|
||||
|
||||
|
||||
class WebExtensionFirefox(WebExtensionDesktop):
|
||||
def launch_desktop_browser(self, test):
|
||||
LOG.info("starting %s" % self.config["app"])
|
||||
if self.config["is_release_build"]:
|
||||
self.disable_non_local_connections()
|
||||
|
||||
# if running debug-mode, tell Firefox to open the browser console on startup
|
||||
if self.debug_mode:
|
||||
self.runner.cmdargs.extend(["-jsconsole"])
|
||||
|
||||
self.start_runner_proc()
|
||||
|
||||
if self.config["is_release_build"] and test.get("playback") is not None:
|
||||
self.enable_non_local_connections()
|
||||
|
||||
# if geckoProfile is enabled, initialize it
|
||||
if self.config["gecko_profile"] is True:
|
||||
self._init_gecko_profiling(test)
|
||||
# tell the control server the gecko_profile dir; the control server
|
||||
# will receive the filename of the stored gecko profile from the web
|
||||
# extension, and will move it out of the browser user profile to
|
||||
# this directory; where it is picked-up by gecko_profile.symbolicate
|
||||
self.control_server.gecko_profile_dir = (
|
||||
self.gecko_profiler.gecko_profile_dir
|
||||
)
|
||||
|
||||
|
||||
class WebExtensionDesktopChrome(WebExtensionDesktop):
|
||||
def setup_chrome_args(self, test):
|
||||
# Setup chrome args and add them to the runner's args
|
||||
chrome_args = self.desktop_chrome_args(test)
|
||||
if " ".join(chrome_args) not in " ".join(self.runner.cmdargs):
|
||||
self.runner.cmdargs.extend(chrome_args)
|
||||
|
||||
def launch_desktop_browser(self, test):
|
||||
LOG.info("starting %s" % self.config["app"])
|
||||
|
||||
# Setup chrome/chromium specific arguments then start the runner
|
||||
self.setup_chrome_args(test)
|
||||
self.start_runner_proc()
|
||||
|
||||
def set_browser_test_prefs(self, raw_prefs):
|
||||
# add test-specific preferences
|
||||
LOG.info(
|
||||
"preferences were configured for the test, however \
|
||||
we currently do not install them on non-Firefox browsers."
|
||||
)
|
|
@ -12,7 +12,6 @@ sys.path.insert(0, raptor_dir)
|
|||
|
||||
from browsertime import Browsertime
|
||||
from perftest import Perftest
|
||||
from webextension import WebExtensionFirefox
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@ -42,11 +41,6 @@ def browsertime_options(options):
|
|||
return options
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def raptor(options):
|
||||
return WebExtensionFirefox(**options)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_test():
|
||||
return {
|
||||
|
|
|
@ -50,35 +50,6 @@ def test_start_and_stop():
|
|||
assert not control._server_thread.is_alive()
|
||||
|
||||
|
||||
def test_server_get_timeout(raptor):
|
||||
test_name = "test-name"
|
||||
url = "test-url"
|
||||
metrics = {"metric1": False, "metric2": True, "metric3": True}
|
||||
page_cycle = 1
|
||||
|
||||
def post_state():
|
||||
requests.post(
|
||||
"http://127.0.0.1:%s/" % raptor.control_server.port,
|
||||
json={
|
||||
"type": "webext_raptor-page-timeout",
|
||||
"data": [test_name, url, page_cycle, metrics],
|
||||
},
|
||||
)
|
||||
|
||||
assert len(raptor.results_handler.page_timeout_list) == 0
|
||||
|
||||
post_state()
|
||||
|
||||
assert len(raptor.results_handler.page_timeout_list) == 1
|
||||
|
||||
timeout_details = raptor.results_handler.page_timeout_list[0]
|
||||
assert timeout_details["test_name"] == test_name
|
||||
assert timeout_details["url"] == url
|
||||
|
||||
pending_metrics = [k for k, v in metrics.items() if v]
|
||||
assert len(timeout_details["pending_metrics"].split(", ")) == len(pending_metrics)
|
||||
|
||||
|
||||
def test_server_android_app_backgrounding():
|
||||
# Mock the background and foreground functions
|
||||
with mock.patch.object(
|
||||
|
@ -122,68 +93,5 @@ def test_server_android_app_backgrounding():
|
|||
assert not control._server_thread.is_alive()
|
||||
|
||||
|
||||
def test_server_wait_states(raptor):
|
||||
import datetime
|
||||
|
||||
def post_state():
|
||||
requests.post(
|
||||
"http://127.0.0.1:%s/" % raptor.control_server.port,
|
||||
json={"type": "webext_status", "data": "test status"},
|
||||
)
|
||||
|
||||
wait_time = 5
|
||||
message_state = "webext_status/test status"
|
||||
rhc = raptor.control_server.server.RequestHandlerClass
|
||||
|
||||
# Test initial state
|
||||
assert rhc.wait_after_messages == {}
|
||||
assert rhc.waiting_in_state is None
|
||||
assert rhc.wait_timeout == 60
|
||||
assert raptor.control_server_wait_get() == "None"
|
||||
|
||||
# Test setting a state
|
||||
assert raptor.control_server_wait_set(message_state) == ""
|
||||
assert message_state in rhc.wait_after_messages
|
||||
assert rhc.wait_after_messages[message_state]
|
||||
|
||||
# Test clearing a non-existent state
|
||||
assert raptor.control_server_wait_clear("nothing") == ""
|
||||
assert message_state in rhc.wait_after_messages
|
||||
|
||||
# Test clearing a state
|
||||
assert raptor.control_server_wait_clear(message_state) == ""
|
||||
assert message_state not in rhc.wait_after_messages
|
||||
|
||||
# Test clearing all states
|
||||
assert raptor.control_server_wait_set(message_state) == ""
|
||||
assert message_state in rhc.wait_after_messages
|
||||
assert raptor.control_server_wait_clear("all") == ""
|
||||
assert rhc.wait_after_messages == {}
|
||||
|
||||
# Test wait timeout
|
||||
# Block on post request
|
||||
assert raptor.control_server_wait_set(message_state) == ""
|
||||
assert rhc.wait_after_messages[message_state]
|
||||
assert raptor.control_server_wait_timeout(wait_time) == ""
|
||||
assert rhc.wait_timeout == wait_time
|
||||
start = datetime.datetime.now()
|
||||
post_state()
|
||||
assert datetime.datetime.now() - start < datetime.timedelta(seconds=wait_time + 2)
|
||||
assert raptor.control_server_wait_get() == "None"
|
||||
assert message_state not in rhc.wait_after_messages
|
||||
|
||||
raptor.clean_up()
|
||||
assert not raptor.control_server._server_thread.is_alive()
|
||||
|
||||
|
||||
def test_clean_up_stop_server(raptor):
|
||||
assert raptor.control_server._server_thread.is_alive()
|
||||
assert raptor.control_server.port is not None
|
||||
assert raptor.control_server.server is not None
|
||||
|
||||
raptor.clean_up()
|
||||
assert not raptor.control_server._server_thread.is_alive()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
mozunit.main()
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import os
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
from unittest import mock
|
||||
from unittest.mock import Mock
|
||||
|
@ -9,8 +8,6 @@ from unittest.mock import Mock
|
|||
import mozunit
|
||||
import pytest
|
||||
from mozprofile import BaseProfile
|
||||
from mozrunner.errors import RunnerNotStartedError
|
||||
from six import reraise
|
||||
|
||||
# need this so the raptor unit tests can find output & filter classes
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
@ -21,8 +18,6 @@ sys.path.insert(0, raptor_dir)
|
|||
from browsertime import BrowsertimeAndroid, BrowsertimeDesktop
|
||||
from webextension import (
|
||||
WebExtensionAndroid,
|
||||
WebExtensionDesktopChrome,
|
||||
WebExtensionFirefox,
|
||||
)
|
||||
|
||||
DEFAULT_TIMEOUT = 125
|
||||
|
@ -53,9 +48,6 @@ class TestBrowserThread(threading.Thread):
|
|||
@pytest.mark.parametrize(
|
||||
"perftest_class, app_name",
|
||||
[
|
||||
[WebExtensionFirefox, "firefox"],
|
||||
[WebExtensionDesktopChrome, "chrome"],
|
||||
[WebExtensionDesktopChrome, "chromium"],
|
||||
[WebExtensionAndroid, "geckoview"],
|
||||
[BrowsertimeDesktop, "firefox"],
|
||||
[BrowsertimeDesktop, "chrome"],
|
||||
|
@ -192,47 +184,6 @@ def test_perftest_run_test_setup(
|
|||
assert perftest.config["subtest_alert_on"] == expected_alert
|
||||
|
||||
|
||||
# WebExtension tests
|
||||
@pytest.mark.parametrize(
|
||||
"app", ["firefox", pytest.mark.xfail("chrome"), pytest.mark.xfail("chromium")]
|
||||
)
|
||||
def test_start_browser(get_binary, app):
|
||||
binary = get_binary(app)
|
||||
assert binary
|
||||
|
||||
raptor = WebExtensionFirefox(app, binary, post_startup_delay=0)
|
||||
|
||||
tests = [{"name": "raptor-{}-tp6".format(app), "page_timeout": 1000}]
|
||||
test_names = [test["name"] for test in tests]
|
||||
|
||||
thread = TestBrowserThread(raptor, tests, test_names)
|
||||
thread.start()
|
||||
|
||||
timeout = time.time() + 5 # seconds
|
||||
while time.time() < timeout:
|
||||
try:
|
||||
is_running = raptor.runner.is_running()
|
||||
assert is_running
|
||||
break
|
||||
except RunnerNotStartedError:
|
||||
time.sleep(0.1)
|
||||
else:
|
||||
# browser didn't start
|
||||
# if the thread had an error, display it here
|
||||
thread.print_error()
|
||||
assert False
|
||||
|
||||
raptor.clean_up()
|
||||
thread.join(5)
|
||||
|
||||
if thread.exc is not None:
|
||||
exc, value, tb = thread.exc
|
||||
reraise(exc, value, tb)
|
||||
|
||||
assert not raptor.runner.is_running()
|
||||
assert raptor.runner.returncode is not None
|
||||
|
||||
|
||||
# Browsertime tests
|
||||
def test_cmd_arguments(ConcreteBrowsertime, browsertime_options, mock_test):
|
||||
expected_cmd = {
|
||||
|
|
Загрузка…
Ссылка в новой задаче