Bug 1561939 - Extract `Perftest` abstract base class. r=rwood,perftest-reviewers

This mechanical transform lays the foundation for generalizing the
underlying test runner.

Differential Revision: https://phabricator.services.mozilla.com/D37132

--HG--
extra : moz-landing-system : lando
This commit is contained in:
Nick Alexander 2019-07-19 23:01:11 +00:00
Родитель 3316f7167c
Коммит cbf4f4f55e
1 изменённых файлов: 113 добавлений и 65 удалений

Просмотреть файл

@ -6,6 +6,7 @@
from __future__ import absolute_import
from abc import ABCMeta, abstractmethod
import json
import os
import posixpath
@ -78,8 +79,11 @@ class SignalHandlerException(Exception):
pass
class Raptor(object):
"""Container class for Raptor"""
class Perftest(object):
"""Abstract base class for perftests that execute via a subharness,
either Raptor or browsertime."""
__metaclass__ = ABCMeta
def __init__(self, app, binary, run_local=False, obj_path=None, profile_class=None,
gecko_profile=False, gecko_profile_interval=None, gecko_profile_entries=None,
@ -116,8 +120,6 @@ class Raptor(object):
self.config['e10s'] = False
self.raptor_venv = os.path.join(os.getcwd(), 'raptor-venv')
self.raptor_webext = None
self.control_server = None
self.playback = None
self.benchmark = None
self.benchmark_port = 0
@ -141,10 +143,24 @@ class Raptor(object):
# setup the control server
self.results_handler = RaptorResultsHandler(self.config)
self.start_control_server()
self.build_browser_profile()
def build_browser_profile(self):
self.profile = create_profile(self.profile_class)
# Merge extra profile data from testing/profiles
with open(os.path.join(self.profile_data_dir, 'profiles.json'), 'r') as fh:
base_profiles = json.load(fh)['raptor']
for profile in base_profiles:
path = os.path.join(self.profile_data_dir, profile)
LOG.info("Merging profile: {}".format(path))
self.profile.merge(path)
# share the profile dir with the config and the control server
self.config['local_profile_dir'] = self.profile.profile
@property
def profile_data_dir(self):
if 'MOZ_DEVELOPER_REPO_DIR' in os.environ:
@ -153,10 +169,78 @@ class Raptor(object):
return os.path.join(build.topsrcdir, 'testing', 'profiles')
return os.path.join(here, 'profile_data')
@abstractmethod
def check_for_crashes(self):
raise NotImplementedError
pass
@abstractmethod
def run_test_setup(self, test):
LOG.info("starting test: %s" % test['name'])
def run_tests(self, tests, test_names):
try:
for test in tests:
try:
self.run_test(test, timeout=int(test.get('page_timeout')))
finally:
self.run_test_teardown(test)
return self.process_results(test_names)
finally:
self.clean_up()
@abstractmethod
def run_test(self, test, timeout):
raise NotImplementedError()
@abstractmethod
def run_test_teardown(self, test):
self.check_for_crashes()
# gecko profiling symbolication
if self.config['gecko_profile'] is True:
self.gecko_profiler.symbolicate()
# clean up the temp gecko profiling folders
LOG.info("cleaning up after gecko profiling")
self.gecko_profiler.clean()
def process_results(self, test_names):
# when running locally output results in build/raptor.json; when running
# in production output to a local.json to be turned into tc job artifact
if self.config.get('run_local', False):
if 'MOZ_DEVELOPER_REPO_DIR' in os.environ:
raptor_json_path = os.path.join(os.environ['MOZ_DEVELOPER_REPO_DIR'],
'testing', 'mozharness', 'build', 'raptor.json')
else:
raptor_json_path = os.path.join(here, 'raptor.json')
else:
raptor_json_path = os.path.join(os.getcwd(), 'local.json')
self.config['raptor_json_path'] = raptor_json_path
return self.results_handler.summarize_and_output(self.config, test_names)
@abstractmethod
def clean_up(self):
pass
def get_page_timeout_list(self):
return self.results_handler.page_timeout_list
class Raptor(Perftest):
"""Container class for Raptor"""
def __init__(self, *args, **kwargs):
super(Raptor, self).__init__(*args, **kwargs)
self.raptor_webext = None
self.control_server = None
self.start_control_server()
def run_test_setup(self, test):
super(Raptor, self).run_test_setup(test)
LOG.info("starting raptor test: %s" % test['name'])
LOG.info("test settings: %s" % str(test))
LOG.info("raptor config: %s" % str(self.config))
@ -241,40 +325,22 @@ class Raptor(object):
raise RuntimeError("Test failed to finish. "
"Application timed out after {} seconds".format(timeout))
def run_test_teardown(self):
self.check_for_crashes()
def run_test_teardown(self, test):
super(Raptor, self).run_test_teardown(test)
if self.playback is not None:
self.playback.stop()
self.remove_raptor_webext()
# gecko profiling symbolication
if self.config['gecko_profile'] is True:
self.gecko_profiler.symbolicate()
# clean up the temp gecko profiling folders
LOG.info("cleaning up after gecko profiling")
self.gecko_profiler.clean()
def set_browser_test_prefs(self, raw_prefs):
# add test specific preferences
LOG.info("setting test-specific Firefox preferences")
self.profile.set_preferences(json.loads(raw_prefs))
def build_browser_profile(self):
self.profile = create_profile(self.profile_class)
super(Raptor, self).build_browser_profile(self)
# Merge extra profile data from testing/profiles
with open(os.path.join(self.profile_data_dir, 'profiles.json'), 'r') as fh:
base_profiles = json.load(fh)['raptor']
for profile in base_profiles:
path = os.path.join(self.profile_data_dir, profile)
LOG.info("Merging profile: {}".format(path))
self.profile.merge(path)
# share the profile dir with the config and the control server
self.config['local_profile_dir'] = self.profile.profile
self.control_server.user_profile = self.profile
def start_control_server(self):
@ -447,25 +513,9 @@ class Raptor(object):
self.config,
test)
def process_results(self, test_names):
# when running locally output results in build/raptor.json; when running
# in production output to a local.json to be turned into tc job artifact
if self.config.get('run_local', False):
if 'MOZ_DEVELOPER_REPO_DIR' in os.environ:
raptor_json_path = os.path.join(os.environ['MOZ_DEVELOPER_REPO_DIR'],
'testing', 'mozharness', 'build', 'raptor.json')
else:
raptor_json_path = os.path.join(here, 'raptor.json')
else:
raptor_json_path = os.path.join(os.getcwd(), 'local.json')
self.config['raptor_json_path'] = raptor_json_path
return self.results_handler.summarize_and_output(self.config, test_names)
def get_page_timeout_list(self):
return self.results_handler.page_timeout_list
def clean_up(self):
super(Raptor, self).clean_up()
if self.config['enable_control_server_wait']:
self.control_server_wait_clear('all')
@ -594,12 +644,9 @@ class RaptorDesktop(Raptor):
self.wait_for_test_finish(test, timeout)
self.run_test_teardown()
def __run_test_warm(self, test, timeout):
self.run_test_setup(test)
try:
if test.get('playback') is not None:
self.start_playback(test)
@ -614,10 +661,7 @@ class RaptorDesktop(Raptor):
self.wait_for_test_finish(test, timeout)
finally:
self.run_test_teardown()
def run_test_teardown(self):
def run_test_teardown(self, test):
# browser should be closed by now but this is a backup-shutdown (if not in debug-mode)
if not self.debug_mode:
if self.runner.is_running():
@ -628,9 +672,11 @@ class RaptorDesktop(Raptor):
LOG.info("* debug-mode enabled - please shutdown the browser manually...")
self.runner.wait(timeout=None)
super(RaptorDesktop, self).run_test_teardown()
super(RaptorDesktop, self).run_test_teardown(test)
def check_for_crashes(self):
super(RaptorDesktop, self).check_for_crashes()
try:
self.runner.check_for_crashes()
except NotImplementedError: # not implemented for Chrome
@ -928,11 +974,11 @@ class RaptorAndroid(Raptor):
is_benchmark = test.get('type') == "benchmark"
self.set_reverse_ports(is_benchmark=is_benchmark)
def run_test_teardown(self):
def run_test_teardown(self, test):
LOG.info('removing reverse socket connections')
self.device.remove_socket_connections('reverse')
super(RaptorAndroid, self).run_test_teardown()
super(RaptorAndroid, self).run_test_teardown(test)
def run_test(self, test, timeout):
# tests will be run warm (i.e. NO browser restart between page-cycles)
@ -960,7 +1006,6 @@ class RaptorAndroid(Raptor):
finally:
if self.config['power_test']:
finish_android_power_test(self, test['name'])
self.run_test_teardown()
def __run_test_cold(self, test, timeout):
'''
@ -1099,10 +1144,13 @@ class RaptorAndroid(Raptor):
self.runner.wait(timeout=None)
def check_for_crashes(self):
super(RaptorAndroid, self).check_for_crashes()
if not self.app_launched:
LOG.info("skipping check_for_crashes: application has not been launched")
return
self.app_launched = False
# Turn off verbose to prevent logcat from being inserted into the main log.
verbose = self.device._verbose
self.device._verbose = False