Bug 1504227 - Add raptor test option to specify what measurement(s) to alert on; r=davehunt

Depends on D17288

Differential Revision: https://phabricator.services.mozilla.com/D17289

--HG--
extra : moz-landing-system : lando
This commit is contained in:
Rob Wood 2019-01-25 18:01:45 +00:00
Родитель d7bb330c5f
Коммит 2863bd9494
4 изменённых файлов: 39 добавлений и 23 удалений

Просмотреть файл

@ -49,7 +49,7 @@ def validate_test_ini(test_details):
if setting not in test_details:
valid_settings = False
LOG.error("ERROR: setting '%s' is required but not found in %s"
% (setting, test_details['manifest']))
% (setting, test_details['manifest']))
# if playback is specified, we need more playback settings
if 'playback' in test_details:
@ -57,18 +57,22 @@ def validate_test_ini(test_details):
if setting not in test_details:
valid_settings = False
LOG.error("ERROR: setting '%s' is required but not found in %s"
% (setting, test_details['manifest']))
% (setting, test_details['manifest']))
# if 'alert-on' is specified, we need to make sure that the value given is valid
# i.e. any 'alert_on' values must be values that exist in the 'measure' ini setting
# i.e. 'alert_on = fcp, loadtime' must get rid of comma and space
if 'alert_on' in test_details:
for alert_on_value in test_details['alert_on'].split(', '):
alert_on_value = alert_on_value.strip()
# support with or without spaces, i.e. 'measure = fcp, loadtime' or '= fcp,loadtime'
# convert to a list; and remove any spaces
test_details['alert_on'] = [_item.strip() for _item in test_details['alert_on'].split(',')]
# now make sure each alert_on value provided is valid
for alert_on_value in test_details['alert_on']:
if alert_on_value not in test_details['measure']:
LOG.error("ERROR: The 'alert_on' value of '%s' is not valid because " \
LOG.error("ERROR: The 'alert_on' value of '%s' is not valid because "
"it doesn't exist in the 'measure' test setting!"
% alert_on_value)
% alert_on_value)
valid_settings = False
return valid_settings
@ -89,24 +93,22 @@ def write_test_settings_json(args, test_details, oskey):
if test_details['type'] == "pageload":
test_settings['raptor-options']['measure'] = {}
if "dcf" in test_details['measure']:
test_settings['raptor-options']['measure']['dcf'] = True
if "fnbpaint" in test_details['measure']:
test_settings['raptor-options']['measure']['fnbpaint'] = True
if "fcp" in test_details['measure']:
test_settings['raptor-options']['measure']['fcp'] = True
if "hero" in test_details['measure']:
test_settings['raptor-options']['measure']['hero'] = test_details['hero'].split(', ')
if "ttfi" in test_details['measure']:
test_settings['raptor-options']['measure']['ttfi'] = True
if "loadtime" in test_details['measure']:
test_settings['raptor-options']['measure']['loadtime'] = True
for m in [m.strip() for m in test_details['measure'].split(',')]:
test_settings['raptor-options']['measure'][m] = True
if m == 'hero':
test_settings['raptor-options']['measure'][m] = [h.strip() for h in
test_details['hero'].split(',')]
if test_details.get("alert_on", None) is not None:
# i.e. 'alert_on = fcp, loadtime' must get rid of comma and space
test_settings['raptor-options']['alert_on'] = test_details['alert_on'].split(', ')
# alert_on was already converted to list above
test_settings['raptor-options']['alert_on'] = test_details['alert_on']
if test_details.get("page_timeout", None) is not None:
test_settings['raptor-options']['page_timeout'] = int(test_details['page_timeout'])
test_settings['raptor-options']['unit'] = test_details.get("unit", "ms")
if test_details.get("lower_is_better", "true") == "false":
test_settings['raptor-options']['lower_is_better'] = False
else:

Просмотреть файл

@ -21,7 +21,7 @@ LOG = get_proxy_logger(component="raptor-output")
class Output(object):
"""class for raptor output"""
def __init__(self, results, supporting_data):
def __init__(self, results, supporting_data, subtest_alert_on):
"""
- results : list of RaptorTestResult instances
"""
@ -30,6 +30,7 @@ class Output(object):
self.supporting_data = supporting_data
self.summarized_supporting_data = []
self.summarized_screenshots = []
self.subtest_alert_on = subtest_alert_on
def summarize(self):
suites = []
@ -100,6 +101,14 @@ class Output(object):
if len(filtered_values) < 1:
continue
# if 'alert_on' is set for this particular measurement, then we want to set the
# flag in the perfherder output to turn on alerting for this subtest
if self.subtest_alert_on is not None:
if measurement_name in self.subtest_alert_on:
LOG.info("turning on subtest alerting for measurement type: %s"
% measurement_name)
new_subtest['shouldAlert'] = True
new_subtest['value'] = filter.median(filtered_values)
vals.append([new_subtest['value'], new_subtest['name']])

Просмотреть файл

@ -214,6 +214,11 @@ class Raptor(object):
self.log.info("preferences were configured for the test, \
but we do not install them on non Firefox browsers.")
# if 'alert_on' was provided in the test INI, we must add that to our config
# for use in our results.py and output.py
# test['alert_on'] has already been converted to a list and stripped of spaces
self.config['subtest_alert_on'] = test.get('alert_on', None)
# on firefox we can get an addon id; chrome addon actually is just cmd line arg
if self.config['app'] in ['firefox', 'geckoview', 'fennec']:
webext_id = self.profile.addons.addon_details(raptor_webext)['id']

Просмотреть файл

@ -69,7 +69,7 @@ class RaptorResultsHandler():
def summarize_and_output(self, test_config):
# summarize the result data, write to file and output PERFHERDER_DATA
LOG.info("summarizing raptor test results")
output = Output(self.results, self.supporting_data)
output = Output(self.results, self.supporting_data, test_config['subtest_alert_on'])
output.summarize()
output.summarize_screenshots(self.images)
# only dump out supporting data (i.e. power) if actual Raptor test completed