2015-07-03 19:04:58 +03:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# ***** BEGIN LICENSE BLOCK *****
|
|
|
|
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
2017-10-25 03:21:41 +03:00
|
|
|
|
2015-07-03 19:04:58 +03:00
|
|
|
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
|
|
|
# ***** END LICENSE BLOCK *****
|
|
|
|
"""desktop_unittest.py
|
|
|
|
The goal of this is to extract desktop unittesting from buildbot's factory.py
|
|
|
|
|
|
|
|
author: Jordan Lund
|
|
|
|
"""
|
|
|
|
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import sys
|
|
|
|
import copy
|
|
|
|
import shutil
|
|
|
|
import glob
|
2016-08-02 17:47:02 +03:00
|
|
|
import imp
|
2015-07-03 19:04:58 +03:00
|
|
|
|
2017-08-17 01:42:43 +03:00
|
|
|
from datetime import datetime, timedelta
|
|
|
|
|
2015-07-03 19:04:58 +03:00
|
|
|
# load modules from parent dir
|
|
|
|
sys.path.insert(1, os.path.dirname(sys.path[0]))
|
|
|
|
|
|
|
|
from mozharness.base.errors import BaseErrorList
|
2017-10-25 03:21:41 +03:00
|
|
|
from mozharness.base.log import INFO
|
2015-07-03 19:04:58 +03:00
|
|
|
from mozharness.base.script import PreScriptAction
|
|
|
|
from mozharness.base.vcs.vcsbase import MercurialScript
|
|
|
|
from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
|
2016-09-20 20:43:06 +03:00
|
|
|
from mozharness.mozilla.buildbot import TBPL_EXCEPTION
|
2015-07-03 19:04:58 +03:00
|
|
|
from mozharness.mozilla.mozbase import MozbaseMixin
|
2016-10-18 16:16:03 +03:00
|
|
|
from mozharness.mozilla.structuredlog import StructuredOutputParser
|
2016-10-27 22:11:26 +03:00
|
|
|
from mozharness.mozilla.testing.errors import HarnessErrorList
|
2016-10-18 16:16:03 +03:00
|
|
|
from mozharness.mozilla.testing.unittest import DesktopUnittestOutputParser
|
2015-07-03 19:04:58 +03:00
|
|
|
from mozharness.mozilla.testing.codecoverage import (
|
|
|
|
CodeCoverageMixin,
|
|
|
|
code_coverage_config_options
|
|
|
|
)
|
|
|
|
from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options
|
|
|
|
|
2017-10-25 03:21:41 +03:00
|
|
|
SUITE_CATEGORIES = ['gtest', 'cppunittest', 'jittest', 'mochitest', 'reftest', 'xpcshell',
|
|
|
|
'mozbase', 'mozmill']
|
2016-04-01 18:00:37 +03:00
|
|
|
SUITE_DEFAULT_E10S = ['mochitest', 'reftest']
|
2017-08-17 01:42:43 +03:00
|
|
|
SUITE_NO_E10S = ['xpcshell']
|
2015-07-03 19:04:58 +03:00
|
|
|
|
2016-10-18 16:16:03 +03:00
|
|
|
|
2015-07-03 19:04:58 +03:00
|
|
|
# DesktopUnittest {{{1
|
2017-10-25 03:21:41 +03:00
|
|
|
class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMixin,
|
|
|
|
CodeCoverageMixin):
|
2015-07-03 19:04:58 +03:00
|
|
|
config_options = [
|
|
|
|
[['--mochitest-suite', ], {
|
|
|
|
"action": "extend",
|
|
|
|
"dest": "specified_mochitest_suites",
|
2017-10-26 19:24:41 +03:00
|
|
|
"type": "string",
|
2015-07-03 19:04:58 +03:00
|
|
|
"help": "Specify which mochi suite to run. "
|
|
|
|
"Suites are defined in the config file.\n"
|
|
|
|
"Examples: 'all', 'plain1', 'plain5', 'chrome', or 'a11y'"}
|
|
|
|
],
|
|
|
|
[['--reftest-suite', ], {
|
|
|
|
"action": "extend",
|
|
|
|
"dest": "specified_reftest_suites",
|
2017-10-26 19:24:41 +03:00
|
|
|
"type": "string",
|
2015-07-03 19:04:58 +03:00
|
|
|
"help": "Specify which reftest suite to run. "
|
|
|
|
"Suites are defined in the config file.\n"
|
|
|
|
"Examples: 'all', 'crashplan', or 'jsreftest'"}
|
|
|
|
],
|
|
|
|
[['--xpcshell-suite', ], {
|
|
|
|
"action": "extend",
|
|
|
|
"dest": "specified_xpcshell_suites",
|
2017-10-26 19:24:41 +03:00
|
|
|
"type": "string",
|
2015-07-03 19:04:58 +03:00
|
|
|
"help": "Specify which xpcshell suite to run. "
|
|
|
|
"Suites are defined in the config file\n."
|
|
|
|
"Examples: 'xpcshell'"}
|
|
|
|
],
|
|
|
|
[['--cppunittest-suite', ], {
|
|
|
|
"action": "extend",
|
|
|
|
"dest": "specified_cppunittest_suites",
|
2017-10-26 19:24:41 +03:00
|
|
|
"type": "string",
|
2015-07-03 19:04:58 +03:00
|
|
|
"help": "Specify which cpp unittest suite to run. "
|
|
|
|
"Suites are defined in the config file\n."
|
|
|
|
"Examples: 'cppunittest'"}
|
|
|
|
],
|
2015-07-19 08:05:06 +03:00
|
|
|
[['--gtest-suite', ], {
|
|
|
|
"action": "extend",
|
|
|
|
"dest": "specified_gtest_suites",
|
2017-10-26 19:24:41 +03:00
|
|
|
"type": "string",
|
2015-07-19 08:05:06 +03:00
|
|
|
"help": "Specify which gtest suite to run. "
|
|
|
|
"Suites are defined in the config file\n."
|
|
|
|
"Examples: 'gtest'"}
|
|
|
|
],
|
2015-07-03 19:04:58 +03:00
|
|
|
[['--jittest-suite', ], {
|
|
|
|
"action": "extend",
|
|
|
|
"dest": "specified_jittest_suites",
|
2017-10-26 19:24:41 +03:00
|
|
|
"type": "string",
|
2015-07-03 19:04:58 +03:00
|
|
|
"help": "Specify which jit-test suite to run. "
|
|
|
|
"Suites are defined in the config file\n."
|
|
|
|
"Examples: 'jittest'"}
|
|
|
|
],
|
|
|
|
[['--mozbase-suite', ], {
|
|
|
|
"action": "extend",
|
|
|
|
"dest": "specified_mozbase_suites",
|
2017-10-26 19:24:41 +03:00
|
|
|
"type": "string",
|
2015-07-03 19:04:58 +03:00
|
|
|
"help": "Specify which mozbase suite to run. "
|
|
|
|
"Suites are defined in the config file\n."
|
|
|
|
"Examples: 'mozbase'"}
|
|
|
|
],
|
|
|
|
[['--mozmill-suite', ], {
|
|
|
|
"action": "extend",
|
|
|
|
"dest": "specified_mozmill_suites",
|
2017-10-26 19:24:41 +03:00
|
|
|
"type": "string",
|
2015-07-03 19:04:58 +03:00
|
|
|
"help": "Specify which mozmill suite to run. "
|
|
|
|
"Suites are defined in the config file\n."
|
|
|
|
"Examples: 'mozmill'"}
|
|
|
|
],
|
|
|
|
[['--run-all-suites', ], {
|
|
|
|
"action": "store_true",
|
|
|
|
"dest": "run_all_suites",
|
|
|
|
"default": False,
|
|
|
|
"help": "This will run all suites that are specified "
|
|
|
|
"in the config file. You do not need to specify "
|
|
|
|
"any other suites.\nBeware, this may take a while ;)"}
|
|
|
|
],
|
|
|
|
[['--e10s', ], {
|
|
|
|
"action": "store_true",
|
|
|
|
"dest": "e10s",
|
|
|
|
"default": False,
|
|
|
|
"help": "Run tests with multiple processes."}
|
|
|
|
],
|
2017-10-12 21:50:49 +03:00
|
|
|
[['--headless', ], {
|
|
|
|
"action": "store_true",
|
|
|
|
"dest": "headless",
|
|
|
|
"default": False,
|
|
|
|
"help": "Run tests in headless mode."}
|
|
|
|
],
|
2015-07-03 19:04:58 +03:00
|
|
|
[['--no-random', ], {
|
|
|
|
"action": "store_true",
|
|
|
|
"dest": "no_random",
|
|
|
|
"default": False,
|
|
|
|
"help": "Run tests with no random intermittents and bisect in case of real failure."}
|
|
|
|
],
|
|
|
|
[["--total-chunks"], {
|
|
|
|
"action": "store",
|
|
|
|
"dest": "total_chunks",
|
|
|
|
"help": "Number of total chunks"}
|
|
|
|
],
|
|
|
|
[["--this-chunk"], {
|
|
|
|
"action": "store",
|
|
|
|
"dest": "this_chunk",
|
|
|
|
"help": "Number of this chunk"}
|
|
|
|
],
|
2016-08-17 22:59:22 +03:00
|
|
|
[["--allow-software-gl-layers"], {
|
|
|
|
"action": "store_true",
|
|
|
|
"dest": "allow_software_gl_layers",
|
|
|
|
"default": False,
|
2017-10-25 03:21:41 +03:00
|
|
|
"help": "Permits a software GL implementation (such as LLVMPipe) to use "
|
|
|
|
"the GL compositor."}
|
2016-08-17 22:59:22 +03:00
|
|
|
],
|
2017-07-26 00:06:38 +03:00
|
|
|
[["--single-stylo-traversal"], {
|
2017-04-14 04:52:22 +03:00
|
|
|
"action": "store_true",
|
2017-07-26 00:06:38 +03:00
|
|
|
"dest": "single_stylo_traversal",
|
2017-04-14 04:52:22 +03:00
|
|
|
"default": False,
|
2017-07-26 00:06:38 +03:00
|
|
|
"help": "Forcibly enable single thread traversal in Stylo with STYLO_THREADS=1"}
|
2017-04-14 04:52:22 +03:00
|
|
|
],
|
2017-07-11 21:39:22 +03:00
|
|
|
[["--enable-stylo"], {
|
|
|
|
"action": "store_true",
|
|
|
|
"dest": "enable_stylo",
|
|
|
|
"default": False,
|
|
|
|
"help": "Run tests with Stylo enabled"}
|
|
|
|
],
|
2017-08-25 00:16:40 +03:00
|
|
|
[["--disable-stylo"], {
|
|
|
|
"action": "store_true",
|
|
|
|
"dest": "disable_stylo",
|
|
|
|
"default": False,
|
|
|
|
"help": "Run tests with Stylo disabled"}
|
|
|
|
],
|
2017-05-13 14:47:43 +03:00
|
|
|
[["--enable-webrender"], {
|
|
|
|
"action": "store_true",
|
|
|
|
"dest": "enable_webrender",
|
|
|
|
"default": False,
|
|
|
|
"help": "Tries to enable the WebRender compositor."}
|
|
|
|
],
|
2015-07-03 19:04:58 +03:00
|
|
|
] + copy.deepcopy(testing_config_options) + \
|
|
|
|
copy.deepcopy(blobupload_config_options) + \
|
|
|
|
copy.deepcopy(code_coverage_config_options)
|
|
|
|
|
|
|
|
def __init__(self, require_config_file=True):
|
|
|
|
# abs_dirs defined already in BaseScript but is here to make pylint happy
|
|
|
|
self.abs_dirs = None
|
|
|
|
super(DesktopUnittest, self).__init__(
|
|
|
|
config_options=self.config_options,
|
|
|
|
all_actions=[
|
|
|
|
'clobber',
|
|
|
|
'read-buildbot-config',
|
|
|
|
'download-and-extract',
|
|
|
|
'create-virtualenv',
|
|
|
|
'install',
|
2016-07-08 17:42:19 +03:00
|
|
|
'stage-files',
|
2015-07-03 19:04:58 +03:00
|
|
|
'run-tests',
|
|
|
|
],
|
|
|
|
require_config_file=require_config_file,
|
|
|
|
config={'require_test_zip': True})
|
|
|
|
|
|
|
|
c = self.config
|
|
|
|
self.global_test_options = []
|
|
|
|
self.installer_url = c.get('installer_url')
|
|
|
|
self.test_url = c.get('test_url')
|
|
|
|
self.test_packages_url = c.get('test_packages_url')
|
|
|
|
self.symbols_url = c.get('symbols_url')
|
|
|
|
# this is so mozinstall in install() doesn't bug out if we don't run
|
|
|
|
# the download_and_extract action
|
|
|
|
self.installer_path = c.get('installer_path')
|
|
|
|
self.binary_path = c.get('binary_path')
|
|
|
|
self.abs_app_dir = None
|
|
|
|
self.abs_res_dir = None
|
|
|
|
|
2016-05-12 23:55:35 +03:00
|
|
|
# Construct an identifier to be used to identify Perfherder data
|
|
|
|
# for resource monitoring recording. This attempts to uniquely
|
|
|
|
# identify this test invocation configuration.
|
|
|
|
perfherder_parts = []
|
|
|
|
perfherder_options = []
|
|
|
|
suites = (
|
|
|
|
('specified_mochitest_suites', 'mochitest'),
|
|
|
|
('specified_reftest_suites', 'reftest'),
|
|
|
|
('specified_xpcshell_suites', 'xpcshell'),
|
|
|
|
('specified_cppunittest_suites', 'cppunit'),
|
|
|
|
('specified_gtest_suites', 'gtest'),
|
|
|
|
('specified_jittest_suites', 'jittest'),
|
|
|
|
('specified_mozbase_suites', 'mozbase'),
|
|
|
|
('specified_mozmill_suites', 'mozmill'),
|
|
|
|
)
|
|
|
|
for s, prefix in suites:
|
|
|
|
if s in c:
|
|
|
|
perfherder_parts.append(prefix)
|
|
|
|
perfherder_parts.extend(c[s])
|
|
|
|
|
|
|
|
if 'this_chunk' in c:
|
|
|
|
perfherder_parts.append(c['this_chunk'])
|
|
|
|
|
|
|
|
if c['e10s']:
|
|
|
|
perfherder_options.append('e10s')
|
|
|
|
|
2017-07-11 21:39:22 +03:00
|
|
|
if c['enable_stylo']:
|
|
|
|
perfherder_options.append('stylo')
|
2017-08-25 00:16:40 +03:00
|
|
|
if c['disable_stylo']:
|
|
|
|
perfherder_options.append('stylo_disabled')
|
2017-07-11 21:39:22 +03:00
|
|
|
|
2016-05-12 23:55:35 +03:00
|
|
|
self.resource_monitor_perfherder_id = ('.'.join(perfherder_parts),
|
|
|
|
perfherder_options)
|
|
|
|
|
2015-07-03 19:04:58 +03:00
|
|
|
# helper methods {{{2
|
|
|
|
def _pre_config_lock(self, rw_config):
|
|
|
|
super(DesktopUnittest, self)._pre_config_lock(rw_config)
|
|
|
|
c = self.config
|
|
|
|
if not c.get('run_all_suites'):
|
|
|
|
return # configs are valid
|
|
|
|
for category in SUITE_CATEGORIES:
|
|
|
|
specific_suites = c.get('specified_%s_suites' % (category))
|
|
|
|
if specific_suites:
|
|
|
|
if specific_suites != 'all':
|
|
|
|
self.fatal("Config options are not valid. Please ensure"
|
|
|
|
" that if the '--run-all-suites' flag was enabled,"
|
|
|
|
" then do not specify to run only specific suites "
|
|
|
|
"like:\n '--mochitest-suite browser-chrome'")
|
|
|
|
|
|
|
|
def query_abs_dirs(self):
|
|
|
|
if self.abs_dirs:
|
|
|
|
return self.abs_dirs
|
|
|
|
abs_dirs = super(DesktopUnittest, self).query_abs_dirs()
|
|
|
|
|
|
|
|
c = self.config
|
|
|
|
dirs = {}
|
2017-07-10 21:29:09 +03:00
|
|
|
dirs['abs_work_dir'] = abs_dirs['abs_work_dir']
|
2015-07-03 19:04:58 +03:00
|
|
|
dirs['abs_app_install_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'application')
|
|
|
|
dirs['abs_test_install_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'tests')
|
|
|
|
dirs['abs_test_extensions_dir'] = os.path.join(dirs['abs_test_install_dir'], 'extensions')
|
|
|
|
dirs['abs_test_bin_dir'] = os.path.join(dirs['abs_test_install_dir'], 'bin')
|
|
|
|
dirs['abs_test_bin_plugins_dir'] = os.path.join(dirs['abs_test_bin_dir'],
|
|
|
|
'plugins')
|
|
|
|
dirs['abs_test_bin_components_dir'] = os.path.join(dirs['abs_test_bin_dir'],
|
|
|
|
'components')
|
|
|
|
dirs['abs_mochitest_dir'] = os.path.join(dirs['abs_test_install_dir'], "mochitest")
|
|
|
|
dirs['abs_reftest_dir'] = os.path.join(dirs['abs_test_install_dir'], "reftest")
|
|
|
|
dirs['abs_xpcshell_dir'] = os.path.join(dirs['abs_test_install_dir'], "xpcshell")
|
|
|
|
dirs['abs_cppunittest_dir'] = os.path.join(dirs['abs_test_install_dir'], "cppunittest")
|
2015-07-19 08:05:06 +03:00
|
|
|
dirs['abs_gtest_dir'] = os.path.join(dirs['abs_test_install_dir'], "gtest")
|
2017-10-25 03:21:41 +03:00
|
|
|
dirs['abs_blob_upload_dir'] = os.path.join(abs_dirs['abs_work_dir'],
|
|
|
|
'blobber_upload_dir')
|
|
|
|
dirs['abs_jittest_dir'] = os.path.join(dirs['abs_test_install_dir'],
|
|
|
|
"jit-test", "jit-test")
|
2015-07-03 19:04:58 +03:00
|
|
|
dirs['abs_mozbase_dir'] = os.path.join(dirs['abs_test_install_dir'], "mozbase")
|
|
|
|
dirs['abs_mozmill_dir'] = os.path.join(dirs['abs_test_install_dir'], "mozmill")
|
|
|
|
|
|
|
|
if os.path.isabs(c['virtualenv_path']):
|
|
|
|
dirs['abs_virtualenv_dir'] = c['virtualenv_path']
|
|
|
|
else:
|
|
|
|
dirs['abs_virtualenv_dir'] = os.path.join(abs_dirs['abs_work_dir'],
|
|
|
|
c['virtualenv_path'])
|
|
|
|
abs_dirs.update(dirs)
|
|
|
|
self.abs_dirs = abs_dirs
|
|
|
|
|
|
|
|
return self.abs_dirs
|
|
|
|
|
|
|
|
def query_abs_app_dir(self):
|
|
|
|
"""We can't set this in advance, because OSX install directories
|
|
|
|
change depending on branding and opt/debug.
|
|
|
|
"""
|
|
|
|
if self.abs_app_dir:
|
|
|
|
return self.abs_app_dir
|
|
|
|
if not self.binary_path:
|
|
|
|
self.fatal("Can't determine abs_app_dir (binary_path not set!)")
|
|
|
|
self.abs_app_dir = os.path.dirname(self.binary_path)
|
|
|
|
return self.abs_app_dir
|
|
|
|
|
|
|
|
def query_abs_res_dir(self):
|
|
|
|
"""The directory containing resources like plugins and extensions. On
|
|
|
|
OSX this is Contents/Resources, on all other platforms its the same as
|
|
|
|
the app dir.
|
|
|
|
|
|
|
|
As with the app dir, we can't set this in advance, because OSX install
|
|
|
|
directories change depending on branding and opt/debug.
|
|
|
|
"""
|
|
|
|
if self.abs_res_dir:
|
|
|
|
return self.abs_res_dir
|
|
|
|
|
|
|
|
abs_app_dir = self.query_abs_app_dir()
|
|
|
|
if self._is_darwin():
|
2015-08-14 23:54:08 +03:00
|
|
|
res_subdir = self.config.get("mac_res_subdir", "Resources")
|
2015-07-03 19:04:58 +03:00
|
|
|
self.abs_res_dir = os.path.join(os.path.dirname(abs_app_dir), res_subdir)
|
|
|
|
else:
|
|
|
|
self.abs_res_dir = abs_app_dir
|
|
|
|
return self.abs_res_dir
|
|
|
|
|
|
|
|
@PreScriptAction('create-virtualenv')
|
|
|
|
def _pre_create_virtualenv(self, action):
|
|
|
|
dirs = self.query_abs_dirs()
|
2015-08-27 20:38:18 +03:00
|
|
|
|
|
|
|
self.register_virtualenv_module(name='pip>=1.5')
|
2018-02-09 01:22:16 +03:00
|
|
|
self.register_virtualenv_module('psutil==5.4.3', method='pip')
|
2015-07-03 19:04:58 +03:00
|
|
|
self.register_virtualenv_module(name='mock')
|
|
|
|
self.register_virtualenv_module(name='simplejson')
|
|
|
|
|
2017-10-25 03:21:41 +03:00
|
|
|
requirements_files = [os.path.join(dirs['abs_test_install_dir'],
|
|
|
|
'config', 'marionette_requirements.txt')]
|
2016-03-16 00:27:36 +03:00
|
|
|
|
2017-01-30 23:47:12 +03:00
|
|
|
if self._query_specified_suites('mochitest') is not None:
|
2016-03-16 00:27:36 +03:00
|
|
|
# mochitest is the only thing that needs this
|
|
|
|
requirements_files.append(
|
|
|
|
os.path.join(dirs['abs_mochitest_dir'],
|
|
|
|
'websocketprocessbridge',
|
|
|
|
'websocketprocessbridge_requirements.txt'))
|
|
|
|
|
|
|
|
for requirements_file in requirements_files:
|
|
|
|
self.register_virtualenv_module(requirements=[requirements_file],
|
2015-07-03 19:04:58 +03:00
|
|
|
two_pass=True)
|
|
|
|
|
|
|
|
def _query_symbols_url(self):
|
|
|
|
"""query the full symbols URL based upon binary URL"""
|
|
|
|
# may break with name convention changes but is one less 'input' for script
|
|
|
|
if self.symbols_url:
|
|
|
|
return self.symbols_url
|
|
|
|
|
|
|
|
symbols_url = None
|
|
|
|
self.info("finding symbols_url based upon self.installer_url")
|
|
|
|
if self.installer_url:
|
|
|
|
for ext in ['.zip', '.dmg', '.tar.bz2']:
|
|
|
|
if ext in self.installer_url:
|
|
|
|
symbols_url = self.installer_url.replace(
|
|
|
|
ext, '.crashreporter-symbols.zip')
|
|
|
|
if not symbols_url:
|
|
|
|
self.fatal("self.installer_url was found but symbols_url could \
|
|
|
|
not be determined")
|
|
|
|
else:
|
|
|
|
self.fatal("self.installer_url was not found in self.config")
|
|
|
|
self.info("setting symbols_url as %s" % (symbols_url))
|
|
|
|
self.symbols_url = symbols_url
|
|
|
|
return self.symbols_url
|
|
|
|
|
|
|
|
def _query_abs_base_cmd(self, suite_category, suite):
|
|
|
|
if self.binary_path:
|
|
|
|
c = self.config
|
|
|
|
dirs = self.query_abs_dirs()
|
|
|
|
run_file = c['run_file_names'][suite_category]
|
|
|
|
base_cmd = [self.query_python_path('python'), '-u']
|
|
|
|
base_cmd.append(os.path.join(dirs["abs_%s_dir" % suite_category], run_file))
|
|
|
|
abs_app_dir = self.query_abs_app_dir()
|
|
|
|
abs_res_dir = self.query_abs_res_dir()
|
|
|
|
|
|
|
|
raw_log_file = os.path.join(dirs['abs_blob_upload_dir'],
|
|
|
|
'%s_raw.log' % suite)
|
2015-07-20 12:39:03 +03:00
|
|
|
|
|
|
|
error_summary_file = os.path.join(dirs['abs_blob_upload_dir'],
|
|
|
|
'%s_errorsummary.log' % suite)
|
2015-07-03 19:04:58 +03:00
|
|
|
str_format_values = {
|
|
|
|
'binary_path': self.binary_path,
|
|
|
|
'symbols_path': self._query_symbols_url(),
|
2017-10-25 03:21:41 +03:00
|
|
|
'abs_work_dir': dirs['abs_work_dir'],
|
2015-07-03 19:04:58 +03:00
|
|
|
'abs_app_dir': abs_app_dir,
|
2015-07-20 19:34:57 +03:00
|
|
|
'abs_res_dir': abs_res_dir,
|
2015-07-03 19:04:58 +03:00
|
|
|
'raw_log_file': raw_log_file,
|
2015-07-20 12:39:03 +03:00
|
|
|
'error_summary_file': error_summary_file,
|
2015-07-19 08:05:06 +03:00
|
|
|
'gtest_dir': os.path.join(dirs['abs_test_install_dir'],
|
|
|
|
'gtest'),
|
2015-07-03 19:04:58 +03:00
|
|
|
}
|
2015-07-19 08:05:06 +03:00
|
|
|
|
2015-07-03 19:04:58 +03:00
|
|
|
# TestingMixin._download_and_extract_symbols() will set
|
|
|
|
# self.symbols_path when downloading/extracting.
|
|
|
|
if self.symbols_path:
|
|
|
|
str_format_values['symbols_path'] = self.symbols_path
|
|
|
|
|
2017-08-17 01:42:43 +03:00
|
|
|
if suite_category not in SUITE_NO_E10S:
|
|
|
|
if suite_category in SUITE_DEFAULT_E10S and not c['e10s']:
|
|
|
|
base_cmd.append('--disable-e10s')
|
|
|
|
elif suite_category not in SUITE_DEFAULT_E10S and c['e10s']:
|
|
|
|
base_cmd.append('--e10s')
|
2015-07-03 19:04:58 +03:00
|
|
|
|
2017-11-02 22:53:39 +03:00
|
|
|
# Ignore chunking if we have user specified test paths
|
|
|
|
if os.environ.get('MOZHARNESS_TEST_PATHS'):
|
|
|
|
base_cmd.extend(os.environ['MOZHARNESS_TEST_PATHS'].split(':'))
|
|
|
|
elif c.get('total_chunks') and c.get('this_chunk'):
|
2015-07-03 19:04:58 +03:00
|
|
|
base_cmd.extend(['--total-chunks', c['total_chunks'],
|
|
|
|
'--this-chunk', c['this_chunk']])
|
|
|
|
|
|
|
|
if c['no_random']:
|
|
|
|
if suite_category == "mochitest":
|
|
|
|
base_cmd.append('--bisect-chunk=default')
|
|
|
|
else:
|
2017-10-25 03:21:41 +03:00
|
|
|
self.warning("--no-random does not currently work with suites other than "
|
|
|
|
"mochitest.")
|
2017-10-12 21:50:49 +03:00
|
|
|
|
|
|
|
if c['headless']:
|
2017-10-25 03:21:41 +03:00
|
|
|
base_cmd.append('--headless')
|
2017-10-12 21:50:49 +03:00
|
|
|
|
2015-07-03 19:04:58 +03:00
|
|
|
# set pluginsPath
|
|
|
|
abs_res_plugins_dir = os.path.join(abs_res_dir, 'plugins')
|
|
|
|
str_format_values['test_plugin_path'] = abs_res_plugins_dir
|
|
|
|
|
2015-08-14 23:54:08 +03:00
|
|
|
if suite_category not in c["suite_definitions"]:
|
|
|
|
self.fatal("'%s' not defined in the config!")
|
2015-07-03 19:04:58 +03:00
|
|
|
|
2017-02-21 06:55:59 +03:00
|
|
|
if suite in ('browser-chrome-coverage', 'xpcshell-coverage',
|
|
|
|
'mochitest-devtools-chrome-coverage', 'plain-chunked-coverage'):
|
2015-12-24 01:04:49 +03:00
|
|
|
base_cmd.append('--jscov-dir-prefix=%s' %
|
|
|
|
dirs['abs_blob_upload_dir'])
|
|
|
|
|
2015-08-14 23:54:08 +03:00
|
|
|
options = c["suite_definitions"][suite_category]["options"]
|
2015-07-03 19:04:58 +03:00
|
|
|
if options:
|
|
|
|
for option in options:
|
|
|
|
option = option % str_format_values
|
|
|
|
if not option.endswith('None'):
|
|
|
|
base_cmd.append(option)
|
2016-10-18 16:16:03 +03:00
|
|
|
if self.structured_output(
|
|
|
|
suite_category,
|
|
|
|
self._query_try_flavor(suite_category, suite)
|
|
|
|
):
|
2016-08-15 22:44:40 +03:00
|
|
|
base_cmd.append("--log-raw=-")
|
2015-07-03 19:04:58 +03:00
|
|
|
return base_cmd
|
|
|
|
else:
|
|
|
|
self.warning("Suite options for %s could not be determined."
|
|
|
|
"\nIf you meant to have options for this suite, "
|
|
|
|
"please make sure they are specified in your "
|
|
|
|
"config under %s_options" %
|
|
|
|
(suite_category, suite_category))
|
2015-09-22 19:55:29 +03:00
|
|
|
|
|
|
|
return base_cmd
|
2015-07-03 19:04:58 +03:00
|
|
|
else:
|
|
|
|
self.fatal("'binary_path' could not be determined.\n This should "
|
|
|
|
"be like '/path/build/application/firefox/firefox'"
|
|
|
|
"\nIf you are running this script without the 'install' "
|
|
|
|
"action (where binary_path is set), please ensure you are"
|
|
|
|
" either:\n(1) specifying it in the config file under "
|
|
|
|
"binary_path\n(2) specifying it on command line with the"
|
|
|
|
" '--binary-path' flag")
|
|
|
|
|
|
|
|
def _query_specified_suites(self, category):
|
|
|
|
# logic goes: if at least one '--{category}-suite' was given,
|
|
|
|
# then run only that(those) given suite(s). Elif no suites were
|
|
|
|
# specified and the --run-all-suites flag was given,
|
|
|
|
# run all {category} suites. Anything else, run no suites.
|
|
|
|
c = self.config
|
|
|
|
all_suites = c.get('all_%s_suites' % (category))
|
|
|
|
specified_suites = c.get('specified_%s_suites' % (category)) # list
|
|
|
|
suites = None
|
|
|
|
|
|
|
|
if specified_suites:
|
|
|
|
if 'all' in specified_suites:
|
|
|
|
# useful if you want a quick way of saying run all suites
|
|
|
|
# of a specific category.
|
|
|
|
suites = all_suites
|
|
|
|
else:
|
|
|
|
# suites gets a dict of everything from all_suites where a key
|
|
|
|
# is also in specified_suites
|
|
|
|
suites = dict((key, all_suites.get(key)) for key in
|
|
|
|
specified_suites if key in all_suites.keys())
|
|
|
|
else:
|
|
|
|
if c.get('run_all_suites'): # needed if you dont specify any suites
|
|
|
|
suites = all_suites
|
2017-08-17 01:42:43 +03:00
|
|
|
else:
|
|
|
|
suites = self.query_verify_category_suites(category, all_suites)
|
2015-07-03 19:04:58 +03:00
|
|
|
|
|
|
|
return suites
|
|
|
|
|
2015-09-22 19:55:29 +03:00
|
|
|
def _query_try_flavor(self, category, suite):
|
|
|
|
flavors = {
|
|
|
|
"mochitest": [("plain.*", "mochitest"),
|
|
|
|
("browser-chrome.*", "browser-chrome"),
|
|
|
|
("mochitest-devtools-chrome.*", "devtools-chrome"),
|
2017-09-12 21:54:47 +03:00
|
|
|
("chrome", "chrome")],
|
2015-09-22 19:55:29 +03:00
|
|
|
"xpcshell": [("xpcshell", "xpcshell")],
|
|
|
|
"reftest": [("reftest", "reftest"),
|
|
|
|
("crashtest", "crashtest")]
|
|
|
|
}
|
|
|
|
for suite_pattern, flavor in flavors.get(category, []):
|
|
|
|
if re.compile(suite_pattern).match(suite):
|
|
|
|
return flavor
|
|
|
|
|
2016-10-18 16:16:03 +03:00
|
|
|
def structured_output(self, suite_category, flavor=None):
|
|
|
|
unstructured_flavors = self.config.get('unstructured_flavors')
|
|
|
|
if not unstructured_flavors:
|
|
|
|
return False
|
|
|
|
if suite_category not in unstructured_flavors:
|
|
|
|
return True
|
2017-10-25 03:21:41 +03:00
|
|
|
if not unstructured_flavors.get(suite_category) or \
|
|
|
|
flavor in unstructured_flavors.get(suite_category):
|
2016-10-18 16:16:03 +03:00
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
def get_test_output_parser(self, suite_category, flavor=None, strict=False,
|
|
|
|
**kwargs):
|
|
|
|
if not self.structured_output(suite_category, flavor):
|
|
|
|
return DesktopUnittestOutputParser(suite_category=suite_category, **kwargs)
|
|
|
|
self.info("Structured output parser in use for %s." % suite_category)
|
|
|
|
return StructuredOutputParser(suite_category=suite_category, strict=strict, **kwargs)
|
|
|
|
|
2015-07-03 19:04:58 +03:00
|
|
|
# Actions {{{2
|
|
|
|
|
|
|
|
# clobber defined in BaseScript, deletes mozharness/build if exists
|
|
|
|
# read_buildbot_config is in BuildbotMixin.
|
|
|
|
# postflight_read_buildbot_config is in TestingMixin.
|
|
|
|
# preflight_download_and_extract is in TestingMixin.
|
|
|
|
# create_virtualenv is in VirtualenvMixin.
|
|
|
|
# preflight_install is in TestingMixin.
|
|
|
|
# install is in TestingMixin.
|
|
|
|
# upload_blobber_files is in BlobUploadMixin
|
|
|
|
|
2016-09-20 20:43:06 +03:00
|
|
|
@PreScriptAction('download-and-extract')
|
|
|
|
def _pre_download_and_extract(self, action):
|
|
|
|
"""Abort if --artifact try syntax is used with compiled-code tests"""
|
|
|
|
if not self.try_message_has_flag('artifact'):
|
|
|
|
return
|
|
|
|
self.info('Artifact build requested in try syntax.')
|
|
|
|
rejected = []
|
|
|
|
compiled_code_suites = [
|
|
|
|
"cppunit",
|
|
|
|
"gtest",
|
|
|
|
"jittest",
|
|
|
|
]
|
|
|
|
for category in SUITE_CATEGORIES:
|
|
|
|
suites = self._query_specified_suites(category) or []
|
|
|
|
for suite in suites:
|
|
|
|
if any([suite.startswith(c) for c in compiled_code_suites]):
|
|
|
|
rejected.append(suite)
|
|
|
|
break
|
|
|
|
if rejected:
|
|
|
|
self.buildbot_status(TBPL_EXCEPTION)
|
|
|
|
self.fatal("There are specified suites that are incompatible with "
|
2017-10-25 03:21:41 +03:00
|
|
|
"--artifact try syntax flag: {}".format(', '.join(rejected)),
|
2016-09-20 20:43:06 +03:00
|
|
|
exit_code=self.return_code)
|
|
|
|
|
2015-07-03 19:04:58 +03:00
|
|
|
def download_and_extract(self):
|
|
|
|
"""
|
|
|
|
download and extract test zip / download installer
|
|
|
|
optimizes which subfolders to extract from tests zip
|
|
|
|
"""
|
|
|
|
c = self.config
|
|
|
|
|
2016-08-04 16:07:30 +03:00
|
|
|
extract_dirs = None
|
2015-07-03 19:04:58 +03:00
|
|
|
|
|
|
|
if c.get('run_all_suites'):
|
|
|
|
target_categories = SUITE_CATEGORIES
|
|
|
|
else:
|
|
|
|
target_categories = [cat for cat in SUITE_CATEGORIES
|
|
|
|
if self._query_specified_suites(cat) is not None]
|
2016-08-04 16:07:30 +03:00
|
|
|
super(DesktopUnittest, self).download_and_extract(extract_dirs=extract_dirs,
|
2015-07-03 19:04:58 +03:00
|
|
|
suite_categories=target_categories)
|
|
|
|
|
2016-07-08 17:42:19 +03:00
|
|
|
def stage_files(self):
|
|
|
|
for category in SUITE_CATEGORIES:
|
|
|
|
suites = self._query_specified_suites(category)
|
|
|
|
stage = getattr(self, '_stage_{}'.format(category), None)
|
|
|
|
if suites and stage:
|
|
|
|
stage(suites)
|
2015-07-03 19:04:58 +03:00
|
|
|
|
2016-07-08 17:42:19 +03:00
|
|
|
def _stage_files(self, bin_name=None):
|
2015-07-03 19:04:58 +03:00
|
|
|
dirs = self.query_abs_dirs()
|
|
|
|
abs_app_dir = self.query_abs_app_dir()
|
|
|
|
|
|
|
|
# For mac these directories are in Contents/Resources, on other
|
|
|
|
# platforms abs_res_dir will point to abs_app_dir.
|
|
|
|
abs_res_dir = self.query_abs_res_dir()
|
|
|
|
abs_res_components_dir = os.path.join(abs_res_dir, 'components')
|
|
|
|
abs_res_plugins_dir = os.path.join(abs_res_dir, 'plugins')
|
|
|
|
abs_res_extensions_dir = os.path.join(abs_res_dir, 'extensions')
|
|
|
|
|
2016-02-21 01:17:55 +03:00
|
|
|
if bin_name:
|
|
|
|
self.info('copying %s to %s' % (os.path.join(dirs['abs_test_bin_dir'],
|
|
|
|
bin_name), os.path.join(abs_app_dir, bin_name)))
|
|
|
|
shutil.copy2(os.path.join(dirs['abs_test_bin_dir'], bin_name),
|
|
|
|
os.path.join(abs_app_dir, bin_name))
|
|
|
|
|
2015-09-10 20:33:27 +03:00
|
|
|
self.copytree(dirs['abs_test_bin_components_dir'],
|
|
|
|
abs_res_components_dir,
|
|
|
|
overwrite='overwrite_if_exists')
|
2016-02-21 01:17:55 +03:00
|
|
|
self.mkdir_p(abs_res_plugins_dir)
|
2015-09-10 20:33:27 +03:00
|
|
|
self.copytree(dirs['abs_test_bin_plugins_dir'],
|
|
|
|
abs_res_plugins_dir,
|
|
|
|
overwrite='overwrite_if_exists')
|
|
|
|
if os.path.isdir(dirs['abs_test_extensions_dir']):
|
|
|
|
self.mkdir_p(abs_res_extensions_dir)
|
|
|
|
self.copytree(dirs['abs_test_extensions_dir'],
|
|
|
|
abs_res_extensions_dir,
|
2015-07-03 19:04:58 +03:00
|
|
|
overwrite='overwrite_if_exists')
|
|
|
|
|
2016-07-08 17:42:19 +03:00
|
|
|
def _stage_xpcshell(self, suites):
|
|
|
|
self._stage_files(self.config['xpcshell_name'])
|
2016-02-21 01:17:55 +03:00
|
|
|
|
2016-07-08 17:42:19 +03:00
|
|
|
def _stage_cppunittest(self, suites):
|
2015-07-03 19:04:58 +03:00
|
|
|
abs_res_dir = self.query_abs_res_dir()
|
|
|
|
dirs = self.query_abs_dirs()
|
|
|
|
abs_cppunittest_dir = dirs['abs_cppunittest_dir']
|
|
|
|
|
|
|
|
# move manifest and js fils to resources dir, where tests expect them
|
|
|
|
files = glob.glob(os.path.join(abs_cppunittest_dir, '*.js'))
|
|
|
|
files.extend(glob.glob(os.path.join(abs_cppunittest_dir, '*.manifest')))
|
|
|
|
for f in files:
|
|
|
|
self.move(f, abs_res_dir)
|
|
|
|
|
2016-07-08 17:42:19 +03:00
|
|
|
def _stage_gtest(self, suites):
|
2015-07-19 08:05:06 +03:00
|
|
|
abs_res_dir = self.query_abs_res_dir()
|
|
|
|
abs_app_dir = self.query_abs_app_dir()
|
|
|
|
dirs = self.query_abs_dirs()
|
|
|
|
abs_gtest_dir = dirs['abs_gtest_dir']
|
|
|
|
dirs['abs_test_bin_dir'] = os.path.join(dirs['abs_test_install_dir'], 'bin')
|
|
|
|
|
|
|
|
files = glob.glob(os.path.join(dirs['abs_test_bin_plugins_dir'], 'gmp-*'))
|
|
|
|
files.append(os.path.join(abs_gtest_dir, 'dependentlibs.list.gtest'))
|
|
|
|
for f in files:
|
|
|
|
self.move(f, abs_res_dir)
|
|
|
|
|
|
|
|
self.copytree(os.path.join(abs_gtest_dir, 'gtest_bin'),
|
|
|
|
os.path.join(abs_app_dir))
|
|
|
|
|
2016-07-08 17:42:19 +03:00
|
|
|
def _stage_mozmill(self, suites):
|
|
|
|
self._stage_files()
|
2015-07-03 19:04:58 +03:00
|
|
|
dirs = self.query_abs_dirs()
|
2015-09-10 20:33:27 +03:00
|
|
|
modules = ['jsbridge', 'mozmill']
|
|
|
|
for module in modules:
|
|
|
|
self.install_module(module=os.path.join(dirs['abs_mozmill_dir'],
|
|
|
|
'resources',
|
|
|
|
module))
|
2015-07-03 19:04:58 +03:00
|
|
|
|
2017-11-22 22:08:16 +03:00
|
|
|
def _kill_proc_tree(self, pid):
|
|
|
|
# Kill a process tree (including grandchildren) with signal.SIGTERM
|
|
|
|
try:
|
|
|
|
import signal
|
|
|
|
import psutil
|
|
|
|
if pid == os.getpid():
|
|
|
|
return (None, None)
|
|
|
|
|
|
|
|
parent = psutil.Process(pid)
|
|
|
|
children = parent.children(recursive=True)
|
|
|
|
children.append(parent)
|
|
|
|
|
|
|
|
for p in children:
|
|
|
|
p.send_signal(signal.SIGTERM)
|
|
|
|
|
|
|
|
# allow for 60 seconds to kill procs
|
|
|
|
timeout = 60
|
|
|
|
gone, alive = psutil.wait_procs(children, timeout=timeout)
|
|
|
|
for p in gone:
|
|
|
|
self.info('psutil found pid %s dead' % p.pid)
|
|
|
|
for p in alive:
|
|
|
|
self.error('failed to kill pid %d after %d' % (p.pid, timeout))
|
|
|
|
|
|
|
|
return (gone, alive)
|
|
|
|
except Exception as e:
|
|
|
|
self.error('Exception while trying to kill process tree: %s' % str(e))
|
|
|
|
|
|
|
|
def _kill_named_proc(self, pname):
|
|
|
|
try:
|
|
|
|
import psutil
|
|
|
|
except Exception as e:
|
|
|
|
self.info("Error importing psutil, not killing process %s: %s" % pname, str(e))
|
|
|
|
return
|
|
|
|
|
|
|
|
for proc in psutil.process_iter():
|
|
|
|
try:
|
|
|
|
if proc.name() == pname:
|
|
|
|
procd = proc.as_dict(attrs=['pid', 'ppid', 'name', 'username'])
|
|
|
|
self.info("in _kill_named_proc, killing %s" % procd)
|
|
|
|
self._kill_proc_tree(proc.pid)
|
|
|
|
except Exception as e:
|
|
|
|
self.info("Warning: Unable to kill process %s: %s" % (pname, str(e)))
|
|
|
|
# may not be able to access process info for all processes
|
|
|
|
continue
|
|
|
|
|
|
|
|
def _remove_xen_clipboard(self):
|
|
|
|
"""
|
|
|
|
When running on a Windows 7 VM, we have XenDPriv.exe running which
|
|
|
|
interferes with the clipboard, lets terminate this process and remove
|
|
|
|
the binary so it doesn't restart
|
|
|
|
"""
|
|
|
|
if not self._is_windows():
|
|
|
|
return
|
|
|
|
|
|
|
|
self._kill_named_proc('XenDPriv.exe')
|
|
|
|
xenpath = os.path.join(os.environ['ProgramFiles'],
|
|
|
|
'Citrix',
|
|
|
|
'XenTools',
|
|
|
|
'XenDPriv.exe')
|
|
|
|
try:
|
|
|
|
if os.path.isfile(xenpath):
|
|
|
|
os.remove(xenpath)
|
|
|
|
except Exception as e:
|
|
|
|
self.error("Error: Failure to remove file %s: %s" % (xenpath, str(e)))
|
|
|
|
|
2017-11-08 19:32:11 +03:00
|
|
|
def _report_system_info(self):
|
|
|
|
"""
|
|
|
|
Create the system-info.log artifact file, containing a variety of
|
|
|
|
system information that might be useful in diagnosing test failures.
|
|
|
|
"""
|
2017-11-08 20:55:29 +03:00
|
|
|
try:
|
|
|
|
import psutil
|
2017-11-13 23:11:59 +03:00
|
|
|
dir = self.query_abs_dirs()['abs_blob_upload_dir']
|
|
|
|
self.mkdir_p(dir)
|
|
|
|
path = os.path.join(dir, "system-info.log")
|
|
|
|
with open(path, "w") as f:
|
2017-11-17 05:07:57 +03:00
|
|
|
f.write("System info collected at %s\n\n" % datetime.now())
|
|
|
|
f.write("\nBoot time %s\n" % datetime.fromtimestamp(psutil.boot_time()))
|
|
|
|
f.write("\nVirtual memory: %s\n" % str(psutil.virtual_memory()))
|
|
|
|
f.write("\nDisk partitions: %s\n" % str(psutil.disk_partitions()))
|
|
|
|
f.write("\nDisk usage (/): %s\n" % str(psutil.disk_usage(os.path.sep)))
|
|
|
|
if not self._is_windows():
|
|
|
|
# bug 1417189: frequent errors querying users on Windows
|
|
|
|
f.write("\nUsers: %s\n" % str(psutil.users()))
|
|
|
|
f.write("\nNetwork connections:\n")
|
2017-11-13 23:11:59 +03:00
|
|
|
try:
|
|
|
|
for nc in psutil.net_connections():
|
2017-11-17 05:07:57 +03:00
|
|
|
f.write(" %s\n" % str(nc))
|
2018-01-31 22:32:08 +03:00
|
|
|
except Exception:
|
2017-11-17 05:07:57 +03:00
|
|
|
f.write("Exception getting network info: %s\n" % sys.exc_info()[0])
|
|
|
|
f.write("\nProcesses:\n")
|
2017-11-13 23:11:59 +03:00
|
|
|
try:
|
|
|
|
for p in psutil.process_iter():
|
|
|
|
ctime = str(datetime.fromtimestamp(p.create_time()))
|
2017-11-17 05:07:57 +03:00
|
|
|
f.write(" PID %d %s %s created at %s\n" %
|
|
|
|
(p.pid, p.name(), str(p.cmdline()), ctime))
|
2018-01-31 22:32:08 +03:00
|
|
|
except Exception:
|
2017-11-17 05:07:57 +03:00
|
|
|
f.write("Exception getting process info: %s\n" % sys.exc_info()[0])
|
2018-01-31 22:32:08 +03:00
|
|
|
except Exception:
|
2017-11-13 23:11:59 +03:00
|
|
|
# psutil throws a variety of intermittent exceptions
|
|
|
|
self.info("Unable to complete system-info.log: %s" % sys.exc_info()[0])
|
2017-11-08 19:32:11 +03:00
|
|
|
|
2016-07-08 17:42:19 +03:00
|
|
|
# pull defined in VCSScript.
|
|
|
|
# preflight_run_tests defined in TestingMixin.
|
|
|
|
|
|
|
|
def run_tests(self):
|
2017-11-22 22:08:16 +03:00
|
|
|
self._remove_xen_clipboard()
|
2017-11-08 19:32:11 +03:00
|
|
|
self._report_system_info()
|
2017-10-07 17:52:39 +03:00
|
|
|
self.start_time = datetime.now()
|
2016-07-08 17:42:19 +03:00
|
|
|
for category in SUITE_CATEGORIES:
|
2017-10-07 17:52:39 +03:00
|
|
|
if not self._run_category_suites(category):
|
|
|
|
break
|
2016-07-08 17:42:19 +03:00
|
|
|
|
2016-05-30 15:53:58 +03:00
|
|
|
def get_timeout_for_category(self, suite_category):
|
|
|
|
if suite_category == 'cppunittest':
|
|
|
|
return 2500
|
|
|
|
return self.config["suite_definitions"][suite_category].get('run_timeout', 1000)
|
|
|
|
|
2016-07-08 17:42:19 +03:00
|
|
|
def _run_category_suites(self, suite_category):
|
2015-07-03 19:04:58 +03:00
|
|
|
"""run suite(s) to a specific category"""
|
|
|
|
dirs = self.query_abs_dirs()
|
|
|
|
suites = self._query_specified_suites(suite_category)
|
|
|
|
abs_app_dir = self.query_abs_app_dir()
|
|
|
|
abs_res_dir = self.query_abs_res_dir()
|
|
|
|
|
2017-08-17 01:42:43 +03:00
|
|
|
max_verify_time = timedelta(minutes=60)
|
2017-10-31 00:13:42 +03:00
|
|
|
max_verify_tests = 10
|
|
|
|
verified_tests = 0
|
2017-08-17 01:42:43 +03:00
|
|
|
|
2015-07-03 19:04:58 +03:00
|
|
|
if suites:
|
|
|
|
self.info('#### Running %s suites' % suite_category)
|
|
|
|
for suite in suites:
|
|
|
|
abs_base_cmd = self._query_abs_base_cmd(suite_category, suite)
|
|
|
|
cmd = abs_base_cmd[:]
|
|
|
|
replace_dict = {
|
|
|
|
'abs_app_dir': abs_app_dir,
|
|
|
|
|
|
|
|
# Mac specific, but points to abs_app_dir on other
|
|
|
|
# platforms.
|
|
|
|
'abs_res_dir': abs_res_dir,
|
|
|
|
}
|
|
|
|
options_list = []
|
2017-11-08 02:19:20 +03:00
|
|
|
env = {
|
|
|
|
'TEST_SUITE': suite
|
|
|
|
}
|
2015-07-03 19:04:58 +03:00
|
|
|
if isinstance(suites[suite], dict):
|
2015-09-22 19:55:29 +03:00
|
|
|
options_list = suites[suite].get('options', [])
|
2017-10-25 03:21:41 +03:00
|
|
|
if self.config.get('verify') is True:
|
2017-09-23 18:07:03 +03:00
|
|
|
tests_list = []
|
|
|
|
else:
|
|
|
|
tests_list = suites[suite].get('tests', [])
|
2015-09-01 14:48:41 +03:00
|
|
|
env = copy.deepcopy(suites[suite].get('env', {}))
|
2015-07-03 19:04:58 +03:00
|
|
|
else:
|
|
|
|
options_list = suites[suite]
|
2015-09-22 19:55:29 +03:00
|
|
|
tests_list = []
|
2015-07-03 19:04:58 +03:00
|
|
|
|
2015-09-22 19:55:29 +03:00
|
|
|
flavor = self._query_try_flavor(suite_category, suite)
|
|
|
|
try_options, try_tests = self.try_args(flavor)
|
2015-07-03 19:04:58 +03:00
|
|
|
|
2015-09-22 19:55:29 +03:00
|
|
|
cmd.extend(self.query_options(options_list,
|
|
|
|
try_options,
|
|
|
|
str_format_values=replace_dict))
|
|
|
|
cmd.extend(self.query_tests_args(tests_list,
|
|
|
|
try_tests,
|
|
|
|
str_format_values=replace_dict))
|
2015-07-03 19:04:58 +03:00
|
|
|
|
|
|
|
suite_name = suite_category + '-' + suite
|
|
|
|
tbpl_status, log_level = None, None
|
2016-10-27 22:11:26 +03:00
|
|
|
error_list = BaseErrorList + HarnessErrorList
|
2015-07-03 19:04:58 +03:00
|
|
|
parser = self.get_test_output_parser(suite_category,
|
2016-10-18 16:16:03 +03:00
|
|
|
flavor=flavor,
|
2015-07-03 19:04:58 +03:00
|
|
|
config=self.config,
|
|
|
|
error_list=error_list,
|
|
|
|
log_obj=self.log_obj)
|
|
|
|
|
2016-08-02 17:47:02 +03:00
|
|
|
if suite_category == "reftest":
|
|
|
|
ref_formatter = imp.load_source(
|
|
|
|
"ReftestFormatter",
|
|
|
|
os.path.abspath(
|
|
|
|
os.path.join(dirs["abs_reftest_dir"], "output.py")))
|
|
|
|
parser.formatter = ref_formatter.ReftestFormatter()
|
|
|
|
|
2015-07-03 19:04:58 +03:00
|
|
|
if self.query_minidump_stackwalk():
|
|
|
|
env['MINIDUMP_STACKWALK'] = self.minidump_stackwalk_path
|
2016-10-27 00:05:27 +03:00
|
|
|
if self.query_nodejs():
|
|
|
|
env['MOZ_NODE_PATH'] = self.nodejs_path
|
2015-07-03 19:04:58 +03:00
|
|
|
env['MOZ_UPLOAD_DIR'] = self.query_abs_dirs()['abs_blob_upload_dir']
|
|
|
|
env['MINIDUMP_SAVE_PATH'] = self.query_abs_dirs()['abs_blob_upload_dir']
|
2017-08-08 19:34:55 +03:00
|
|
|
env['RUST_BACKTRACE'] = 'full'
|
2015-07-03 19:04:58 +03:00
|
|
|
if not os.path.isdir(env['MOZ_UPLOAD_DIR']):
|
|
|
|
self.mkdir_p(env['MOZ_UPLOAD_DIR'])
|
2016-08-17 22:59:22 +03:00
|
|
|
|
|
|
|
if self.config['allow_software_gl_layers']:
|
|
|
|
env['MOZ_LAYERS_ALLOW_SOFTWARE_GL'] = '1'
|
2017-05-13 14:47:43 +03:00
|
|
|
if self.config['enable_webrender']:
|
|
|
|
env['MOZ_WEBRENDER'] = '1'
|
2017-08-09 23:23:04 +03:00
|
|
|
env['MOZ_ACCELERATED'] = '1'
|
2017-05-13 14:47:43 +03:00
|
|
|
|
2017-08-25 00:16:40 +03:00
|
|
|
if self.config['disable_stylo']:
|
|
|
|
if self.config['single_stylo_traversal']:
|
|
|
|
self.fatal("--disable-stylo conflicts with --single-stylo-traversal")
|
|
|
|
if self.config['enable_stylo']:
|
|
|
|
self.fatal("--disable-stylo conflicts with --enable-stylo")
|
|
|
|
|
2017-07-26 00:06:38 +03:00
|
|
|
if self.config['single_stylo_traversal']:
|
|
|
|
env['STYLO_THREADS'] = '1'
|
|
|
|
else:
|
|
|
|
env['STYLO_THREADS'] = '4'
|
2017-08-25 00:16:40 +03:00
|
|
|
|
2017-07-11 21:39:22 +03:00
|
|
|
if self.config['enable_stylo']:
|
|
|
|
env['STYLO_FORCE_ENABLED'] = '1'
|
2017-08-25 00:16:40 +03:00
|
|
|
if self.config['disable_stylo']:
|
|
|
|
env['STYLO_FORCE_DISABLED'] = '1'
|
2016-08-17 22:59:22 +03:00
|
|
|
|
2015-07-03 19:04:58 +03:00
|
|
|
env = self.query_env(partial_env=env, log_level=INFO)
|
2016-05-30 15:53:58 +03:00
|
|
|
cmd_timeout = self.get_timeout_for_category(suite_category)
|
2017-08-17 01:42:43 +03:00
|
|
|
|
|
|
|
for verify_args in self.query_verify_args(suite):
|
2017-10-07 17:52:39 +03:00
|
|
|
if (datetime.now() - self.start_time) > max_verify_time:
|
2017-08-17 01:42:43 +03:00
|
|
|
# Verification has run out of time. That is okay! Stop running
|
|
|
|
# tests so that a task timeout is not triggered, and so that
|
|
|
|
# (partial) results are made available in a timely manner.
|
2017-10-25 03:21:41 +03:00
|
|
|
self.info("TinderboxPrint: Verification too long: Not all tests "
|
|
|
|
"were verified.<br/>")
|
2017-10-07 17:52:39 +03:00
|
|
|
# Signal verify time exceeded, to break out of suites and
|
|
|
|
# suite categories loops also.
|
|
|
|
return False
|
2017-10-31 00:13:42 +03:00
|
|
|
if verified_tests >= max_verify_tests:
|
|
|
|
# When changesets are merged between trees or many tests are
|
|
|
|
# otherwise updated at once, there probably is not enough time
|
|
|
|
# to verify all tests, and attempting to do so may cause other
|
|
|
|
# problems, such as generating too much log output.
|
|
|
|
self.info("TinderboxPrint: Too many modified tests: Not all tests "
|
|
|
|
"were verified.<br/>")
|
|
|
|
return False
|
|
|
|
verified_tests = verified_tests + 1
|
2017-10-07 17:52:39 +03:00
|
|
|
|
2017-08-17 01:42:43 +03:00
|
|
|
final_cmd = copy.copy(cmd)
|
|
|
|
final_cmd.extend(verify_args)
|
|
|
|
return_code = self.run_command(final_cmd, cwd=dirs['abs_work_dir'],
|
|
|
|
output_timeout=cmd_timeout,
|
|
|
|
output_parser=parser,
|
|
|
|
env=env)
|
|
|
|
|
|
|
|
# mochitest, reftest, and xpcshell suites do not return
|
|
|
|
# appropriate return codes. Therefore, we must parse the output
|
|
|
|
# to determine what the tbpl_status and worst_log_level must
|
|
|
|
# be. We do this by:
|
|
|
|
# 1) checking to see if our mozharness script ran into any
|
|
|
|
# errors itself with 'num_errors' <- OutputParser
|
|
|
|
# 2) if num_errors is 0 then we look in the subclassed 'parser'
|
|
|
|
# findings for harness/suite errors <- DesktopUnittestOutputParser
|
|
|
|
# 3) checking to see if the return code is in success_codes
|
|
|
|
|
|
|
|
success_codes = None
|
|
|
|
if self._is_windows() and suite_category != 'gtest':
|
|
|
|
# bug 1120644
|
|
|
|
success_codes = [0, 1]
|
|
|
|
|
|
|
|
tbpl_status, log_level = parser.evaluate_parser(return_code,
|
|
|
|
success_codes=success_codes)
|
|
|
|
parser.append_tinderboxprint_line(suite_name)
|
|
|
|
|
|
|
|
self.buildbot_status(tbpl_status, level=log_level)
|
|
|
|
if len(verify_args) > 0:
|
2017-09-18 18:11:36 +03:00
|
|
|
self.log_verify_status(verify_args[-1], tbpl_status, log_level)
|
2017-08-17 01:42:43 +03:00
|
|
|
else:
|
|
|
|
self.log("The %s suite: %s ran with return status: %s" %
|
|
|
|
(suite_category, suite, tbpl_status), level=log_level)
|
2015-07-03 19:04:58 +03:00
|
|
|
else:
|
|
|
|
self.debug('There were no suites to run for %s' % suite_category)
|
2017-10-07 17:52:39 +03:00
|
|
|
return True
|
2015-07-03 19:04:58 +03:00
|
|
|
|
|
|
|
|
|
|
|
# main {{{1
|
|
|
|
if __name__ == '__main__':
|
|
|
|
desktop_unittest = DesktopUnittest()
|
|
|
|
desktop_unittest.run_and_exit()
|