2012-05-21 15:12:37 +04:00
|
|
|
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
2008-01-29 06:48:34 +03:00
|
|
|
|
|
|
|
"""
|
|
|
|
Runs the Mochitest test harness.
|
|
|
|
"""
|
|
|
|
|
2010-10-01 04:10:19 +04:00
|
|
|
from __future__ import with_statement
|
2008-01-29 06:48:34 +03:00
|
|
|
import os
|
|
|
|
import sys
|
2013-07-26 22:40:04 +04:00
|
|
|
SCRIPT_DIR = os.path.abspath(os.path.realpath(os.path.dirname(__file__)))
|
2010-11-07 23:47:02 +03:00
|
|
|
sys.path.insert(0, SCRIPT_DIR);
|
|
|
|
|
2014-05-01 15:18:00 +04:00
|
|
|
import ctypes
|
2014-01-22 21:54:49 +04:00
|
|
|
import glob
|
2013-09-23 18:47:48 +04:00
|
|
|
import json
|
|
|
|
import mozcrash
|
2014-07-02 15:52:00 +04:00
|
|
|
import mozdebug
|
2013-09-23 18:47:48 +04:00
|
|
|
import mozinfo
|
|
|
|
import mozprocess
|
|
|
|
import mozrunner
|
|
|
|
import optparse
|
|
|
|
import re
|
2009-02-09 21:57:27 +03:00
|
|
|
import shutil
|
2013-09-23 18:47:48 +04:00
|
|
|
import signal
|
2013-10-02 02:09:46 +04:00
|
|
|
import subprocess
|
2014-07-17 11:02:00 +04:00
|
|
|
import sys
|
2013-09-23 18:47:48 +04:00
|
|
|
import tempfile
|
|
|
|
import time
|
|
|
|
import traceback
|
2008-01-29 06:48:34 +03:00
|
|
|
import urllib2
|
2014-01-22 21:54:49 +04:00
|
|
|
import zipfile
|
2014-07-04 15:55:00 +04:00
|
|
|
import bisection
|
2013-09-23 18:47:48 +04:00
|
|
|
|
2014-08-29 18:37:17 +04:00
|
|
|
from automationutils import (
|
|
|
|
environment,
|
|
|
|
isURL,
|
|
|
|
KeyValueParseError,
|
|
|
|
parseKeyValue,
|
|
|
|
processLeakLog,
|
|
|
|
dumpScreen,
|
|
|
|
ShutdownLeaks,
|
|
|
|
printstatus,
|
|
|
|
LSANLeaks,
|
|
|
|
setAutomationLog,
|
|
|
|
)
|
|
|
|
|
2013-09-23 18:47:48 +04:00
|
|
|
from datetime import datetime
|
2013-08-02 16:48:06 +04:00
|
|
|
from manifestparser import TestManifest
|
2013-09-23 18:47:48 +04:00
|
|
|
from mochitest_options import MochitestOptions
|
|
|
|
from mozprofile import Profile, Preferences
|
|
|
|
from mozprofile.permissions import ServerLocations
|
|
|
|
from urllib import quote_plus as encodeURIComponent
|
2014-07-17 11:02:00 +04:00
|
|
|
from mozlog.structured.formatters import TbplFormatter
|
2014-08-29 18:37:17 +04:00
|
|
|
from mozlog.structured import commandline
|
2013-08-02 16:48:06 +04:00
|
|
|
|
2013-09-23 18:47:48 +04:00
|
|
|
# This should use the `which` module already in tree, but it is
|
|
|
|
# not yet present in the mozharness environment
|
|
|
|
from mozrunner.utils import findInPath as which
|
|
|
|
|
2014-01-22 21:54:49 +04:00
|
|
|
###########################
|
|
|
|
# Option for NSPR logging #
|
|
|
|
###########################
|
|
|
|
|
|
|
|
# Set the desired log modules you want an NSPR log be produced by a try run for, or leave blank to disable the feature.
|
|
|
|
# This will be passed to NSPR_LOG_MODULES environment variable. Try run will then put a download link for the log file
|
|
|
|
# on tbpl.mozilla.org.
|
|
|
|
|
|
|
|
NSPR_LOG_MODULES = ""
|
|
|
|
|
2014-07-17 11:02:00 +04:00
|
|
|
####################
|
|
|
|
# LOG HANDLING #
|
|
|
|
####################
|
|
|
|
|
|
|
|
### output processing
|
|
|
|
class MochitestFormatter(TbplFormatter):
|
2014-08-29 18:37:17 +04:00
|
|
|
"""
|
|
|
|
The purpose of this class is to maintain compatibility with legacy users.
|
|
|
|
Mozharness' summary parser expects the count prefix, and others expect python
|
|
|
|
logging to contain a line prefix picked up by TBPL (bug 1043420).
|
|
|
|
Those directly logging "TEST-UNEXPECTED" require no prefix to log output
|
|
|
|
in order to turn a build orange (bug 1044206).
|
|
|
|
|
|
|
|
Once updates are propagated to Mozharness, this class may be removed.
|
|
|
|
"""
|
2014-07-17 11:02:00 +04:00
|
|
|
log_num = 0
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
super(MochitestFormatter, self).__init__()
|
|
|
|
|
|
|
|
def __call__(self, data):
|
2014-08-29 18:37:17 +04:00
|
|
|
output = super(MochitestFormatter, self).__call__(data)
|
2014-07-24 10:49:00 +04:00
|
|
|
log_level = data.get('level', 'info').upper()
|
2014-08-29 18:37:17 +04:00
|
|
|
|
|
|
|
if 'js_source' in data or log_level == 'ERROR':
|
|
|
|
data.pop('js_source', None)
|
|
|
|
output = '%d %s %s' % (MochitestFormatter.log_num, log_level, output)
|
|
|
|
MochitestFormatter.log_num += 1
|
|
|
|
|
2014-07-17 11:02:00 +04:00
|
|
|
return output
|
|
|
|
|
|
|
|
### output processing
|
|
|
|
class MessageLogger(object):
|
|
|
|
"""File-like object for logging messages (structured logs)"""
|
|
|
|
BUFFERING_THRESHOLD = 100
|
|
|
|
# This is a delimiter used by the JS side to avoid logs interleaving
|
|
|
|
DELIMITER = u'\ue175\uee31\u2c32\uacbf'
|
|
|
|
BUFFERED_ACTIONS = set(['test_status', 'log'])
|
|
|
|
VALID_ACTIONS = set(['suite_start', 'suite_end', 'test_start', 'test_end',
|
|
|
|
'test_status', 'log',
|
|
|
|
'buffering_on', 'buffering_off'])
|
|
|
|
|
2014-08-13 20:03:00 +04:00
|
|
|
def __init__(self, logger, buffering=True):
|
2014-07-17 11:02:00 +04:00
|
|
|
self.logger = logger
|
|
|
|
self.buffering = buffering
|
|
|
|
self.tests_started = False
|
|
|
|
|
|
|
|
# Message buffering
|
|
|
|
self.buffered_messages = []
|
|
|
|
|
2014-07-22 13:56:00 +04:00
|
|
|
# Failures reporting, after the end of the tests execution
|
|
|
|
self.errors = []
|
|
|
|
|
2014-07-17 11:02:00 +04:00
|
|
|
def valid_message(self, obj):
|
|
|
|
"""True if the given object is a valid structured message (only does a superficial validation)"""
|
|
|
|
return isinstance(obj, dict) and 'action' in obj and obj['action'] in MessageLogger.VALID_ACTIONS
|
|
|
|
|
|
|
|
def parse_line(self, line):
|
|
|
|
"""Takes a given line of input (structured or not) and returns a list of structured messages"""
|
|
|
|
line = line.rstrip().decode("UTF-8", "replace")
|
|
|
|
|
|
|
|
messages = []
|
|
|
|
for fragment in line.split(MessageLogger.DELIMITER):
|
|
|
|
if not fragment:
|
|
|
|
continue
|
|
|
|
try:
|
|
|
|
message = json.loads(fragment)
|
|
|
|
if not self.valid_message(message):
|
|
|
|
message = dict(action='log', level='info', message=fragment, unstructured=True)
|
|
|
|
except ValueError:
|
|
|
|
message = dict(action='log', level='info', message=fragment, unstructured=True)
|
|
|
|
messages.append(message)
|
|
|
|
|
|
|
|
return messages
|
|
|
|
|
|
|
|
def process_message(self, message):
|
|
|
|
"""Processes a structured message. Takes into account buffering, errors, ..."""
|
|
|
|
if not self.tests_started and message['action'] == 'test_start':
|
|
|
|
self.tests_started = True
|
|
|
|
|
|
|
|
# Activation/deactivating message buffering from the JS side
|
|
|
|
if message['action'] == 'buffering_on':
|
|
|
|
self.buffering = True
|
|
|
|
return
|
|
|
|
if message['action'] == 'buffering_off':
|
|
|
|
self.buffering = False
|
|
|
|
return
|
|
|
|
|
|
|
|
unstructured = False
|
|
|
|
if 'unstructured' in message:
|
|
|
|
unstructured = True
|
|
|
|
message.pop('unstructured')
|
|
|
|
|
2014-07-22 13:56:00 +04:00
|
|
|
# Saving errors/failures to be shown at the end of the test run
|
|
|
|
is_error = 'expected' in message or (message['action'] == 'log' and message['message'].startswith('TEST-UNEXPECTED'))
|
|
|
|
if is_error:
|
|
|
|
self.errors.append(message)
|
|
|
|
|
2014-07-17 11:02:00 +04:00
|
|
|
# If we don't do any buffering, or the tests haven't started, or the message was unstructured, it is directly logged
|
|
|
|
if not self.buffering or unstructured or not self.tests_started:
|
|
|
|
self.logger.log_raw(message)
|
|
|
|
return
|
|
|
|
|
|
|
|
# If a test ended, we clean the buffer
|
|
|
|
if message['action'] == 'test_end':
|
|
|
|
self.buffered_messages = []
|
|
|
|
|
|
|
|
# Buffering logic; Also supports "raw" errors (in log messages) because some tests manually dump 'TEST-UNEXPECTED-FAIL'
|
|
|
|
if not is_error and message['action'] not in self.BUFFERED_ACTIONS:
|
|
|
|
self.logger.log_raw(message)
|
|
|
|
return
|
|
|
|
|
|
|
|
# test_status messages buffering
|
|
|
|
if is_error:
|
|
|
|
if self.buffered_messages:
|
|
|
|
number_messages = min(self.BUFFERING_THRESHOLD, len(self.buffered_messages))
|
|
|
|
self.logger.info("dumping last {0} message(s)".format(number_messages))
|
|
|
|
self.logger.info("if you need more context, please use SimpleTest.requestCompleteLog() in your test")
|
|
|
|
# Dumping previously buffered messages
|
|
|
|
self.dump_buffered(limit=True)
|
|
|
|
|
|
|
|
# Logging the error message
|
|
|
|
self.logger.log_raw(message)
|
|
|
|
else:
|
|
|
|
# Buffering the message
|
|
|
|
self.buffered_messages.append(message)
|
|
|
|
|
|
|
|
def write(self, line):
|
|
|
|
messages = self.parse_line(line)
|
|
|
|
for message in messages:
|
|
|
|
self.process_message(message)
|
|
|
|
return messages
|
|
|
|
|
|
|
|
def flush(self):
|
|
|
|
sys.stdout.flush()
|
|
|
|
|
|
|
|
def dump_buffered(self, limit=False):
|
|
|
|
if limit:
|
|
|
|
dumped_messages = self.buffered_messages[-self.BUFFERING_THRESHOLD:]
|
|
|
|
else:
|
|
|
|
dumped_messages = self.buffered_messages
|
|
|
|
|
|
|
|
for buf_msg in dumped_messages:
|
|
|
|
self.logger.log_raw(buf_msg)
|
|
|
|
# Cleaning the list of buffered messages
|
|
|
|
self.buffered_messages = []
|
|
|
|
|
|
|
|
def finish(self):
|
|
|
|
self.dump_buffered()
|
|
|
|
self.buffering = False
|
|
|
|
self.logger.suite_end()
|
|
|
|
|
2013-09-23 18:47:48 +04:00
|
|
|
####################
|
|
|
|
# PROCESS HANDLING #
|
|
|
|
####################
|
|
|
|
|
|
|
|
def call(*args, **kwargs):
|
|
|
|
"""front-end function to mozprocess.ProcessHandler"""
|
|
|
|
# TODO: upstream -> mozprocess
|
|
|
|
# https://bugzilla.mozilla.org/show_bug.cgi?id=791383
|
|
|
|
process = mozprocess.ProcessHandler(*args, **kwargs)
|
|
|
|
process.run()
|
|
|
|
return process.wait()
|
|
|
|
|
2014-08-13 20:03:00 +04:00
|
|
|
def killPid(pid, log):
|
2013-09-23 18:47:48 +04:00
|
|
|
# see also https://bugzilla.mozilla.org/show_bug.cgi?id=911249#c58
|
|
|
|
try:
|
|
|
|
os.kill(pid, getattr(signal, "SIGKILL", signal.SIGTERM))
|
|
|
|
except Exception, e:
|
2014-07-17 11:02:00 +04:00
|
|
|
log.info("Failed to kill process %d: %s" % (pid, str(e)))
|
2013-09-23 18:47:48 +04:00
|
|
|
|
|
|
|
if mozinfo.isWin:
|
2014-07-24 08:23:00 +04:00
|
|
|
import ctypes.wintypes
|
2013-09-23 18:47:48 +04:00
|
|
|
|
|
|
|
def isPidAlive(pid):
|
|
|
|
STILL_ACTIVE = 259
|
|
|
|
PROCESS_QUERY_LIMITED_INFORMATION = 0x1000
|
|
|
|
pHandle = ctypes.windll.kernel32.OpenProcess(PROCESS_QUERY_LIMITED_INFORMATION, 0, pid)
|
|
|
|
if not pHandle:
|
|
|
|
return False
|
|
|
|
pExitCode = ctypes.wintypes.DWORD()
|
|
|
|
ctypes.windll.kernel32.GetExitCodeProcess(pHandle, ctypes.byref(pExitCode))
|
|
|
|
ctypes.windll.kernel32.CloseHandle(pHandle)
|
|
|
|
return pExitCode.value == STILL_ACTIVE
|
2010-03-16 00:44:29 +03:00
|
|
|
|
2013-09-23 18:47:48 +04:00
|
|
|
else:
|
|
|
|
import errno
|
2010-03-13 20:56:24 +03:00
|
|
|
|
2013-09-23 18:47:48 +04:00
|
|
|
def isPidAlive(pid):
|
|
|
|
try:
|
|
|
|
# kill(pid, 0) checks for a valid PID without actually sending a signal
|
|
|
|
# The method throws OSError if the PID is invalid, which we catch below.
|
|
|
|
os.kill(pid, 0)
|
|
|
|
|
|
|
|
# Wait on it to see if it's a zombie. This can throw OSError.ECHILD if
|
|
|
|
# the process terminates before we get to this point.
|
|
|
|
wpid, wstatus = os.waitpid(pid, os.WNOHANG)
|
|
|
|
return wpid == 0
|
|
|
|
except OSError, err:
|
|
|
|
# Catch the errors we might expect from os.kill/os.waitpid,
|
|
|
|
# and re-raise any others
|
|
|
|
if err.errno == errno.ESRCH or err.errno == errno.ECHILD:
|
|
|
|
return False
|
|
|
|
raise
|
|
|
|
# TODO: ^ upstream isPidAlive to mozprocess
|
2010-03-13 20:56:24 +03:00
|
|
|
|
2008-01-29 06:48:34 +03:00
|
|
|
#######################
|
|
|
|
# HTTP SERVER SUPPORT #
|
|
|
|
#######################
|
|
|
|
|
2013-09-23 18:47:48 +04:00
|
|
|
class MochitestServer(object):
|
2008-01-29 06:48:34 +03:00
|
|
|
"Web server used to serve Mochitests, for closer fidelity to the real web."
|
|
|
|
|
2014-08-13 20:03:00 +04:00
|
|
|
def __init__(self, options, logger):
|
2013-07-26 22:40:04 +04:00
|
|
|
if isinstance(options, optparse.Values):
|
|
|
|
options = vars(options)
|
2014-08-13 20:03:00 +04:00
|
|
|
self._log = logger
|
2013-07-26 22:40:04 +04:00
|
|
|
self._closeWhenDone = options['closeWhenDone']
|
|
|
|
self._utilityPath = options['utilityPath']
|
|
|
|
self._xrePath = options['xrePath']
|
|
|
|
self._profileDir = options['profilePath']
|
|
|
|
self.webServer = options['webServer']
|
|
|
|
self.httpPort = options['httpPort']
|
2010-03-13 20:56:24 +03:00
|
|
|
self.shutdownURL = "http://%(server)s:%(port)s/server/shutdown" % { "server" : self.webServer, "port" : self.httpPort }
|
2013-07-26 22:40:04 +04:00
|
|
|
self.testPrefix = "'webapprt_'" if options.get('webapprtContent') else "undefined"
|
|
|
|
|
|
|
|
if options.get('httpdPath'):
|
|
|
|
self._httpdPath = options['httpdPath']
|
2013-06-29 06:20:08 +04:00
|
|
|
else:
|
2014-04-16 18:29:39 +04:00
|
|
|
self._httpdPath = SCRIPT_DIR
|
2013-06-29 06:20:08 +04:00
|
|
|
self._httpdPath = os.path.abspath(self._httpdPath)
|
2008-01-29 06:48:34 +03:00
|
|
|
|
|
|
|
def start(self):
|
|
|
|
"Run the Mochitest server, returning the process ID of the server."
|
2013-07-26 22:40:04 +04:00
|
|
|
|
2013-09-23 18:47:48 +04:00
|
|
|
# get testing environment
|
|
|
|
env = environment(xrePath=self._xrePath)
|
2009-10-08 22:10:47 +04:00
|
|
|
env["XPCOM_DEBUG_BREAK"] = "warn"
|
2014-06-19 06:39:34 +04:00
|
|
|
env["LD_LIBRARY_PATH"] = self._xrePath
|
2013-01-22 19:48:02 +04:00
|
|
|
|
|
|
|
# When running with an ASan build, our xpcshell server will also be ASan-enabled,
|
|
|
|
# thus consuming too much resources when running together with the browser on
|
|
|
|
# the test slaves. Try to limit the amount of resources by disabling certain
|
|
|
|
# features.
|
2014-02-28 19:33:25 +04:00
|
|
|
env["ASAN_OPTIONS"] = "quarantine_size=1:redzone=32:malloc_context_size=5"
|
2013-01-22 19:48:02 +04:00
|
|
|
|
2013-07-26 22:40:04 +04:00
|
|
|
if mozinfo.isWin:
|
2013-07-30 18:02:28 +04:00
|
|
|
env["PATH"] = env["PATH"] + ";" + str(self._xrePath)
|
2008-01-29 06:48:34 +03:00
|
|
|
|
2009-02-09 21:57:27 +03:00
|
|
|
args = ["-g", self._xrePath,
|
|
|
|
"-v", "170",
|
2014-04-16 18:29:39 +04:00
|
|
|
"-f", os.path.join(self._httpdPath, "httpd.js"),
|
2013-08-23 19:11:43 +04:00
|
|
|
"-e", """const _PROFILE_PATH = '%(profile)s'; const _SERVER_PORT = '%(port)s'; const _SERVER_ADDR = '%(server)s'; const _TEST_PREFIX = %(testPrefix)s; const _DISPLAY_RESULTS = %(displayResults)s;""" %
|
2012-09-19 03:28:39 +04:00
|
|
|
{"profile" : self._profileDir.replace('\\', '\\\\'), "port" : self.httpPort, "server" : self.webServer,
|
|
|
|
"testPrefix" : self.testPrefix, "displayResults" : str(not self._closeWhenDone).lower() },
|
2014-04-16 18:29:39 +04:00
|
|
|
"-f", os.path.join(SCRIPT_DIR, "server.js")]
|
2008-01-29 06:48:34 +03:00
|
|
|
|
2009-02-09 21:57:27 +03:00
|
|
|
xpcshell = os.path.join(self._utilityPath,
|
2013-07-26 22:40:04 +04:00
|
|
|
"xpcshell" + mozinfo.info['bin_suffix'])
|
2013-09-23 18:47:48 +04:00
|
|
|
command = [xpcshell] + args
|
2014-04-16 18:29:39 +04:00
|
|
|
self._process = mozprocess.ProcessHandler(command, cwd=SCRIPT_DIR, env=env)
|
2013-09-23 18:47:48 +04:00
|
|
|
self._process.run()
|
2014-08-13 20:03:00 +04:00
|
|
|
self._log.info("%s : launching %s" % (self.__class__.__name__, command))
|
2008-01-29 06:48:34 +03:00
|
|
|
pid = self._process.pid
|
2014-08-13 20:03:00 +04:00
|
|
|
self._log.info("runtests.py | Server pid: %d" % pid)
|
2008-01-29 06:48:34 +03:00
|
|
|
|
|
|
|
def ensureReady(self, timeout):
|
|
|
|
assert timeout >= 0
|
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
aliveFile = os.path.join(self._profileDir, "server_alive.txt")
|
2008-01-29 06:48:34 +03:00
|
|
|
i = 0
|
|
|
|
while i < timeout:
|
|
|
|
if os.path.exists(aliveFile):
|
|
|
|
break
|
|
|
|
time.sleep(1)
|
|
|
|
i += 1
|
|
|
|
else:
|
2014-08-13 20:03:00 +04:00
|
|
|
self._log.error("TEST-UNEXPECTED-FAIL | runtests.py | Timed out while waiting for server startup.")
|
2008-01-29 06:48:34 +03:00
|
|
|
self.stop()
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
def stop(self):
|
|
|
|
try:
|
2011-04-18 20:41:57 +04:00
|
|
|
with urllib2.urlopen(self.shutdownURL) as c:
|
|
|
|
c.read()
|
2010-06-24 13:32:01 +04:00
|
|
|
|
2013-09-23 18:47:48 +04:00
|
|
|
# TODO: need ProcessHandler.poll()
|
|
|
|
# https://bugzilla.mozilla.org/show_bug.cgi?id=912285
|
|
|
|
# rtncode = self._process.poll()
|
|
|
|
rtncode = self._process.proc.poll()
|
2010-10-01 04:10:19 +04:00
|
|
|
if rtncode is None:
|
2013-09-23 18:47:48 +04:00
|
|
|
# TODO: need ProcessHandler.terminate() and/or .send_signal()
|
|
|
|
# https://bugzilla.mozilla.org/show_bug.cgi?id=912285
|
|
|
|
# self._process.terminate()
|
|
|
|
self._process.proc.terminate()
|
2008-01-29 06:48:34 +03:00
|
|
|
except:
|
2008-04-08 09:18:45 +04:00
|
|
|
self._process.kill()
|
2008-01-29 06:48:34 +03:00
|
|
|
|
2010-06-17 09:38:55 +04:00
|
|
|
class WebSocketServer(object):
|
|
|
|
"Class which encapsulates the mod_pywebsocket server"
|
|
|
|
|
2014-08-13 20:03:00 +04:00
|
|
|
def __init__(self, options, scriptdir, logger, debuggerInfo=None):
|
2010-06-17 09:38:55 +04:00
|
|
|
self.port = options.webSocketPort
|
2010-11-21 06:29:58 +03:00
|
|
|
self.debuggerInfo = debuggerInfo
|
2014-08-13 20:03:00 +04:00
|
|
|
self._log = logger
|
|
|
|
self._scriptdir = scriptdir
|
|
|
|
|
2010-06-17 09:38:55 +04:00
|
|
|
|
|
|
|
def start(self):
|
2011-05-10 01:54:35 +04:00
|
|
|
# Invoke pywebsocket through a wrapper which adds special SIGINT handling.
|
|
|
|
#
|
|
|
|
# If we're in an interactive debugger, the wrapper causes the server to
|
|
|
|
# ignore SIGINT so the server doesn't capture a ctrl+c meant for the
|
|
|
|
# debugger.
|
|
|
|
#
|
|
|
|
# If we're not in an interactive debugger, the wrapper causes the server to
|
|
|
|
# die silently upon receiving a SIGINT.
|
|
|
|
scriptPath = 'pywebsocket_wrapper.py'
|
2010-11-21 06:29:58 +03:00
|
|
|
script = os.path.join(self._scriptdir, scriptPath)
|
2011-05-10 01:54:35 +04:00
|
|
|
|
|
|
|
cmd = [sys.executable, script]
|
2014-07-02 15:52:00 +04:00
|
|
|
if self.debuggerInfo and self.debuggerInfo.interactive:
|
2011-05-10 01:54:35 +04:00
|
|
|
cmd += ['--interactive']
|
2011-12-20 12:20:12 +04:00
|
|
|
cmd += ['-p', str(self.port), '-w', self._scriptdir, '-l', \
|
|
|
|
os.path.join(self._scriptdir, "websock.log"), \
|
|
|
|
'--log-level=debug', '--allow-handlers-outside-root-dir']
|
2013-09-23 18:47:48 +04:00
|
|
|
# start the process
|
2014-04-16 18:29:39 +04:00
|
|
|
self._process = mozprocess.ProcessHandler(cmd, cwd=SCRIPT_DIR)
|
2013-09-23 18:47:48 +04:00
|
|
|
self._process.run()
|
2010-06-17 09:38:55 +04:00
|
|
|
pid = self._process.pid
|
2014-08-13 20:03:00 +04:00
|
|
|
self._log.info("runtests.py | Websocket server pid: %d" % pid)
|
2010-06-17 09:38:55 +04:00
|
|
|
|
|
|
|
def stop(self):
|
|
|
|
self._process.kill()
|
2009-09-22 17:12:58 +04:00
|
|
|
|
2013-07-26 22:40:04 +04:00
|
|
|
class MochitestUtilsMixin(object):
|
|
|
|
"""
|
|
|
|
Class containing some utility functions common to both local and remote
|
|
|
|
mochitest runners
|
|
|
|
"""
|
|
|
|
|
|
|
|
# TODO Utility classes are a code smell. This class is temporary
|
|
|
|
# and should be removed when desktop mochitests are refactored
|
|
|
|
# on top of mozbase. Each of the functions in here should
|
|
|
|
# probably live somewhere in mozbase
|
|
|
|
|
|
|
|
oldcwd = os.getcwd()
|
|
|
|
jarDir = 'mochijar'
|
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
# Path to the test script on the server
|
2011-08-25 22:35:35 +04:00
|
|
|
TEST_PATH = "tests"
|
|
|
|
CHROME_PATH = "redirect.html"
|
2010-01-19 22:45:04 +03:00
|
|
|
urlOpts = []
|
2014-08-29 18:37:17 +04:00
|
|
|
log = None
|
2010-01-15 20:22:54 +03:00
|
|
|
|
2014-08-13 20:03:00 +04:00
|
|
|
def __init__(self, logger_options):
|
2013-09-23 18:47:48 +04:00
|
|
|
self.update_mozinfo()
|
2014-03-14 22:25:41 +04:00
|
|
|
self.server = None
|
|
|
|
self.wsserver = None
|
|
|
|
self.sslTunnel = None
|
|
|
|
self._locations = None
|
2014-08-13 20:03:00 +04:00
|
|
|
|
2014-08-29 18:37:17 +04:00
|
|
|
if self.log is None:
|
|
|
|
commandline.log_formatters["tbpl"] = (MochitestFormatter,
|
|
|
|
"Mochitest specific tbpl formatter")
|
|
|
|
self.log = commandline.setup_logging("mochitest",
|
|
|
|
logger_options,
|
|
|
|
{
|
|
|
|
"tbpl": sys.stdout
|
|
|
|
})
|
|
|
|
MochitestUtilsMixin.log = self.log
|
|
|
|
setAutomationLog(self.log)
|
2014-08-13 20:03:00 +04:00
|
|
|
|
2014-08-29 18:37:17 +04:00
|
|
|
self.message_logger = MessageLogger(logger=self.log)
|
2013-09-23 18:47:48 +04:00
|
|
|
|
|
|
|
def update_mozinfo(self):
|
|
|
|
"""walk up directories to find mozinfo.json update the info"""
|
|
|
|
# TODO: This should go in a more generic place, e.g. mozinfo
|
|
|
|
|
2013-08-14 21:57:43 +04:00
|
|
|
path = SCRIPT_DIR
|
2013-09-23 18:47:48 +04:00
|
|
|
dirs = set()
|
2013-08-14 21:57:43 +04:00
|
|
|
while path != os.path.expanduser('~'):
|
|
|
|
if path in dirs:
|
|
|
|
break
|
2013-09-23 18:47:48 +04:00
|
|
|
dirs.add(path)
|
2013-08-14 21:57:43 +04:00
|
|
|
path = os.path.split(path)[0]
|
|
|
|
|
|
|
|
mozinfo.find_and_update_from_json(*dirs)
|
2010-01-15 20:22:54 +03:00
|
|
|
|
|
|
|
def getFullPath(self, path):
|
2010-03-13 20:56:24 +03:00
|
|
|
" Get an absolute path relative to self.oldcwd."
|
2010-01-15 20:22:54 +03:00
|
|
|
return os.path.normpath(os.path.join(self.oldcwd, os.path.expanduser(path)))
|
|
|
|
|
2013-07-26 22:40:04 +04:00
|
|
|
def getLogFilePath(self, logFile):
|
|
|
|
""" return the log file path relative to the device we are testing on, in most cases
|
|
|
|
it will be the full path on the local system
|
|
|
|
"""
|
|
|
|
return self.getFullPath(logFile)
|
|
|
|
|
2014-03-14 22:25:41 +04:00
|
|
|
@property
|
|
|
|
def locations(self):
|
|
|
|
if self._locations is not None:
|
|
|
|
return self._locations
|
|
|
|
locations_file = os.path.join(SCRIPT_DIR, 'server-locations.txt')
|
|
|
|
self._locations = ServerLocations(locations_file)
|
|
|
|
return self._locations
|
|
|
|
|
2013-07-26 22:40:04 +04:00
|
|
|
def buildURLOptions(self, options, env):
|
|
|
|
""" Add test control options from the command line to the url
|
|
|
|
|
|
|
|
URL parameters to test URL:
|
|
|
|
|
|
|
|
autorun -- kick off tests automatically
|
|
|
|
closeWhenDone -- closes the browser after the tests
|
|
|
|
hideResultsTable -- hides the table of individual test results
|
|
|
|
logFile -- logs test run to an absolute path
|
|
|
|
totalChunks -- how many chunks to split tests into
|
|
|
|
thisChunk -- which chunk to run
|
2013-10-28 23:24:55 +04:00
|
|
|
startAt -- name of test to start at
|
|
|
|
endAt -- name of test to end at
|
2013-07-26 22:40:04 +04:00
|
|
|
timeout -- per-test timeout in seconds
|
|
|
|
repeat -- How many times to repeat the test, ie: repeat=1 will run the test twice.
|
|
|
|
"""
|
|
|
|
|
2014-08-15 23:42:00 +04:00
|
|
|
if not hasattr(options, 'logFile'):
|
|
|
|
options.logFile = ""
|
|
|
|
if not hasattr(options, 'fileLevel'):
|
|
|
|
options.fileLevel = 'INFO'
|
|
|
|
|
2013-07-26 22:40:04 +04:00
|
|
|
# allow relative paths for logFile
|
|
|
|
if options.logFile:
|
|
|
|
options.logFile = self.getLogFilePath(options.logFile)
|
2014-03-25 20:52:53 +04:00
|
|
|
|
|
|
|
# Note that all tests under options.subsuite need to be browser chrome tests.
|
|
|
|
if options.browserChrome or options.chrome or options.subsuite or \
|
2014-09-22 22:08:06 +04:00
|
|
|
options.a11y or options.webapprtChrome or options.jetpackPackage or \
|
|
|
|
options.jetpackAddon:
|
2013-07-26 22:40:04 +04:00
|
|
|
self.makeTestConfig(options)
|
|
|
|
else:
|
|
|
|
if options.autorun:
|
|
|
|
self.urlOpts.append("autorun=1")
|
|
|
|
if options.timeout:
|
|
|
|
self.urlOpts.append("timeout=%d" % options.timeout)
|
|
|
|
if options.closeWhenDone:
|
|
|
|
self.urlOpts.append("closeWhenDone=1")
|
|
|
|
if options.logFile:
|
|
|
|
self.urlOpts.append("logFile=" + encodeURIComponent(options.logFile))
|
|
|
|
self.urlOpts.append("fileLevel=" + encodeURIComponent(options.fileLevel))
|
|
|
|
if options.consoleLevel:
|
|
|
|
self.urlOpts.append("consoleLevel=" + encodeURIComponent(options.consoleLevel))
|
|
|
|
if options.totalChunks:
|
|
|
|
self.urlOpts.append("totalChunks=%d" % options.totalChunks)
|
|
|
|
self.urlOpts.append("thisChunk=%d" % options.thisChunk)
|
|
|
|
if options.chunkByDir:
|
|
|
|
self.urlOpts.append("chunkByDir=%d" % options.chunkByDir)
|
2013-10-28 23:24:55 +04:00
|
|
|
if options.startAt:
|
|
|
|
self.urlOpts.append("startAt=%s" % options.startAt)
|
|
|
|
if options.endAt:
|
|
|
|
self.urlOpts.append("endAt=%s" % options.endAt)
|
2013-07-26 22:40:04 +04:00
|
|
|
if options.shuffle:
|
|
|
|
self.urlOpts.append("shuffle=1")
|
|
|
|
if "MOZ_HIDE_RESULTS_TABLE" in env and env["MOZ_HIDE_RESULTS_TABLE"] == "1":
|
|
|
|
self.urlOpts.append("hideResultsTable=1")
|
|
|
|
if options.runUntilFailure:
|
|
|
|
self.urlOpts.append("runUntilFailure=1")
|
|
|
|
if options.repeat:
|
|
|
|
self.urlOpts.append("repeat=%d" % options.repeat)
|
|
|
|
if os.path.isfile(os.path.join(self.oldcwd, os.path.dirname(__file__), self.TEST_PATH, options.testPath)) and options.repeat > 0:
|
|
|
|
self.urlOpts.append("testname=%s" % ("/").join([self.TEST_PATH, options.testPath]))
|
|
|
|
if options.testManifest:
|
|
|
|
self.urlOpts.append("testManifest=%s" % options.testManifest)
|
|
|
|
if hasattr(options, 'runOnly') and options.runOnly:
|
|
|
|
self.urlOpts.append("runOnly=true")
|
|
|
|
else:
|
|
|
|
self.urlOpts.append("runOnly=false")
|
2013-08-02 16:48:06 +04:00
|
|
|
if options.manifestFile:
|
|
|
|
self.urlOpts.append("manifestFile=%s" % options.manifestFile)
|
2013-07-26 22:40:04 +04:00
|
|
|
if options.failureFile:
|
|
|
|
self.urlOpts.append("failureFile=%s" % self.getFullPath(options.failureFile))
|
|
|
|
if options.runSlower:
|
|
|
|
self.urlOpts.append("runSlower=true")
|
2013-11-01 23:23:34 +04:00
|
|
|
if options.debugOnFailure:
|
|
|
|
self.urlOpts.append("debugOnFailure=true")
|
2013-11-21 20:33:43 +04:00
|
|
|
if options.dumpOutputDirectory:
|
|
|
|
self.urlOpts.append("dumpOutputDirectory=%s" % encodeURIComponent(options.dumpOutputDirectory))
|
|
|
|
if options.dumpAboutMemoryAfterTest:
|
|
|
|
self.urlOpts.append("dumpAboutMemoryAfterTest=true")
|
|
|
|
if options.dumpDMDAfterTest:
|
|
|
|
self.urlOpts.append("dumpDMDAfterTest=true")
|
2014-07-23 17:31:00 +04:00
|
|
|
if options.debugger:
|
|
|
|
self.urlOpts.append("interactiveDebugger=true")
|
2013-07-26 22:40:04 +04:00
|
|
|
|
2014-03-18 19:03:51 +04:00
|
|
|
def getTestFlavor(self, options):
|
|
|
|
if options.browserChrome:
|
|
|
|
return "browser-chrome"
|
2014-09-22 22:08:06 +04:00
|
|
|
elif options.jetpackPackage:
|
|
|
|
return "jetpack-package"
|
|
|
|
elif options.jetpackAddon:
|
|
|
|
return "jetpack-addon"
|
2014-03-18 19:03:51 +04:00
|
|
|
elif options.chrome:
|
|
|
|
return "chrome"
|
|
|
|
elif options.a11y:
|
|
|
|
return "a11y"
|
|
|
|
elif options.webapprtChrome:
|
|
|
|
return "webapprt-chrome"
|
|
|
|
else:
|
|
|
|
return "mochitest"
|
|
|
|
|
|
|
|
# This check can be removed when bug 983867 is fixed.
|
|
|
|
def isTest(self, options, filename):
|
|
|
|
allow_js_css = False
|
|
|
|
if options.browserChrome:
|
|
|
|
allow_js_css = True
|
|
|
|
testPattern = re.compile(r"browser_.+\.js")
|
2014-09-22 22:08:06 +04:00
|
|
|
elif options.jetpackPackage:
|
|
|
|
allow_js_css = True
|
|
|
|
testPattern = re.compile(r"test-.+\.js")
|
|
|
|
elif options.jetpackAddon:
|
|
|
|
testPattern = re.compile(r".+\.xpi")
|
2014-03-18 19:03:51 +04:00
|
|
|
elif options.chrome or options.a11y:
|
|
|
|
testPattern = re.compile(r"(browser|test)_.+\.(xul|html|js|xhtml)")
|
2014-03-29 02:50:28 +04:00
|
|
|
elif options.webapprtContent:
|
2014-03-18 19:03:51 +04:00
|
|
|
testPattern = re.compile(r"webapprt_")
|
2014-03-29 02:50:28 +04:00
|
|
|
elif options.webapprtChrome:
|
|
|
|
allow_js_css = True
|
|
|
|
testPattern = re.compile(r"browser_")
|
2014-03-18 19:03:51 +04:00
|
|
|
else:
|
|
|
|
testPattern = re.compile(r"test_")
|
|
|
|
|
|
|
|
if not allow_js_css and (".js" in filename or ".css" in filename):
|
|
|
|
return False
|
|
|
|
|
|
|
|
pathPieces = filename.split("/")
|
|
|
|
|
|
|
|
return (testPattern.match(pathPieces[-1]) and
|
|
|
|
not re.search(r'\^headers\^$', filename))
|
|
|
|
|
|
|
|
def getTestPath(self, options):
|
|
|
|
if options.ipcplugins:
|
2014-06-03 19:19:28 +04:00
|
|
|
return "dom/plugins/test/mochitest"
|
2014-03-18 19:03:51 +04:00
|
|
|
else:
|
|
|
|
return options.testPath
|
|
|
|
|
2014-06-17 17:50:16 +04:00
|
|
|
def setTestRoot(self, options):
|
|
|
|
if hasattr(self, "testRoot"):
|
|
|
|
return self.testRoot, self.testRootAbs
|
|
|
|
else:
|
|
|
|
if options.browserChrome:
|
|
|
|
if options.immersiveMode:
|
|
|
|
self.testRoot = 'metro'
|
|
|
|
else:
|
|
|
|
self.testRoot = 'browser'
|
2014-09-22 22:08:06 +04:00
|
|
|
elif options.jetpackPackage:
|
|
|
|
self.testRoot = 'jetpack-package'
|
|
|
|
elif options.jetpackAddon:
|
|
|
|
self.testRoot = 'jetpack-addon'
|
2014-06-17 17:50:16 +04:00
|
|
|
elif options.a11y:
|
|
|
|
self.testRoot = 'a11y'
|
|
|
|
elif options.webapprtChrome:
|
|
|
|
self.testRoot = 'webapprtChrome'
|
|
|
|
elif options.chrome:
|
|
|
|
self.testRoot = 'chrome'
|
|
|
|
else:
|
|
|
|
self.testRoot = self.TEST_PATH
|
|
|
|
self.testRootAbs = os.path.join(SCRIPT_DIR, self.testRoot)
|
2014-03-18 19:03:51 +04:00
|
|
|
|
|
|
|
def buildTestURL(self, options):
|
|
|
|
testHost = "http://mochi.test:8888"
|
|
|
|
testPath = self.getTestPath(options)
|
|
|
|
testURL = "/".join([testHost, self.TEST_PATH, testPath])
|
|
|
|
if os.path.isfile(os.path.join(self.oldcwd, os.path.dirname(__file__), self.TEST_PATH, testPath)) and options.repeat > 0:
|
|
|
|
testURL = "/".join([testHost, self.TEST_PATH, os.path.dirname(testPath)])
|
|
|
|
if options.chrome or options.a11y:
|
|
|
|
testURL = "/".join([testHost, self.CHROME_PATH])
|
2014-09-22 22:08:06 +04:00
|
|
|
elif options.browserChrome or options.jetpackPackage or options.jetpackAddon:
|
2014-03-18 19:03:51 +04:00
|
|
|
testURL = "about:blank"
|
|
|
|
return testURL
|
|
|
|
|
2014-07-04 15:55:00 +04:00
|
|
|
def buildTestPath(self, options, testsToFilter=None, disabled=True):
|
2013-08-02 16:48:06 +04:00
|
|
|
""" Build the url path to the specific test harness and test file or directory
|
|
|
|
Build a manifest of tests to run and write out a json file for the harness to read
|
2014-07-04 15:55:00 +04:00
|
|
|
testsToFilter option is used to filter/keep the tests provided in the list
|
2014-06-16 22:51:22 +04:00
|
|
|
|
|
|
|
disabled -- This allows to add all disabled tests on the build side
|
|
|
|
and then on the run side to only run the enabled ones
|
2013-08-02 16:48:06 +04:00
|
|
|
"""
|
|
|
|
|
2014-07-04 15:55:00 +04:00
|
|
|
tests = self.getActiveTests(options, disabled)
|
|
|
|
paths = []
|
2014-03-18 19:03:51 +04:00
|
|
|
|
2014-07-04 15:55:00 +04:00
|
|
|
for test in tests:
|
|
|
|
if testsToFilter and (test['path'] not in testsToFilter):
|
|
|
|
continue
|
|
|
|
paths.append(test)
|
2014-03-18 19:03:51 +04:00
|
|
|
|
2014-07-17 11:02:00 +04:00
|
|
|
# suite_start message
|
|
|
|
flat_paths = [p['path'] for p in paths]
|
|
|
|
self.message_logger.logger.suite_start(flat_paths)
|
|
|
|
|
2014-07-04 15:55:00 +04:00
|
|
|
# Bug 883865 - add this functionality into manifestparser
|
|
|
|
with open(os.path.join(SCRIPT_DIR, 'tests.json'), 'w') as manifestFile:
|
|
|
|
manifestFile.write(json.dumps({'tests': paths}))
|
|
|
|
options.manifestFile = 'tests.json'
|
2013-08-02 16:48:06 +04:00
|
|
|
|
2014-03-18 19:03:51 +04:00
|
|
|
return self.buildTestURL(options)
|
2010-01-19 22:45:04 +03:00
|
|
|
|
2010-11-21 06:29:58 +03:00
|
|
|
def startWebSocketServer(self, options, debuggerInfo):
|
2010-06-17 09:38:55 +04:00
|
|
|
""" Launch the websocket server """
|
2014-08-13 20:03:00 +04:00
|
|
|
self.wsserver = WebSocketServer(options, SCRIPT_DIR, self.log, debuggerInfo)
|
2010-06-17 09:38:55 +04:00
|
|
|
self.wsserver.start()
|
|
|
|
|
2010-01-19 22:45:04 +03:00
|
|
|
def startWebServer(self, options):
|
2013-09-23 18:47:48 +04:00
|
|
|
"""Create the webserver and start it up"""
|
|
|
|
|
2014-08-13 20:03:00 +04:00
|
|
|
self.server = MochitestServer(options, self.log)
|
2010-01-19 22:45:04 +03:00
|
|
|
self.server.start()
|
2010-01-15 20:22:54 +03:00
|
|
|
|
2014-03-14 22:25:41 +04:00
|
|
|
if options.pidFile != "":
|
|
|
|
with open(options.pidFile + ".xpcshell.pid", 'w') as f:
|
|
|
|
f.write("%s" % self.server._process.pid)
|
|
|
|
|
|
|
|
def startServers(self, options, debuggerInfo):
|
|
|
|
# start servers and set ports
|
|
|
|
# TODO: pass these values, don't set on `self`
|
|
|
|
self.webServer = options.webServer
|
|
|
|
self.httpPort = options.httpPort
|
|
|
|
self.sslPort = options.sslPort
|
|
|
|
self.webSocketPort = options.webSocketPort
|
|
|
|
|
|
|
|
# httpd-path is specified by standard makefile targets and may be specified
|
|
|
|
# on the command line to select a particular version of httpd.js. If not
|
|
|
|
# specified, try to select the one from hostutils.zip, as required in bug 882932.
|
|
|
|
if not options.httpdPath:
|
|
|
|
options.httpdPath = os.path.join(options.utilityPath, "components")
|
|
|
|
|
|
|
|
self.startWebServer(options)
|
|
|
|
self.startWebSocketServer(options, debuggerInfo)
|
|
|
|
|
|
|
|
# start SSL pipe
|
2014-08-13 20:03:00 +04:00
|
|
|
self.sslTunnel = SSLTunnel(options, logger=self.log)
|
2014-03-14 22:25:41 +04:00
|
|
|
self.sslTunnel.buildConfig(self.locations)
|
|
|
|
self.sslTunnel.start()
|
|
|
|
|
2010-01-19 22:45:04 +03:00
|
|
|
# If we're lucky, the server has fully started by now, and all paths are
|
|
|
|
# ready, etc. However, xpcshell cold start times suck, at least for debug
|
|
|
|
# builds. We'll try to connect to the server for awhile, and if we fail,
|
|
|
|
# we'll try to kill the server and exit with an error.
|
2014-03-14 22:25:41 +04:00
|
|
|
if self.server is not None:
|
|
|
|
self.server.ensureReady(self.SERVER_STARTUP_TIMEOUT)
|
2010-01-19 22:45:04 +03:00
|
|
|
|
2014-03-14 22:25:41 +04:00
|
|
|
def stopServers(self):
|
|
|
|
"""Servers are no longer needed, and perhaps more importantly, anything they
|
|
|
|
might spew to console might confuse things."""
|
|
|
|
if self.server is not None:
|
|
|
|
try:
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.info('Stopping web server')
|
2014-03-14 22:25:41 +04:00
|
|
|
self.server.stop()
|
|
|
|
except Exception:
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.critical('Exception when stopping web server')
|
2010-03-13 20:56:24 +03:00
|
|
|
|
2014-03-14 22:25:41 +04:00
|
|
|
if self.wsserver is not None:
|
|
|
|
try:
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.info('Stopping web socket server')
|
2014-03-14 22:25:41 +04:00
|
|
|
self.wsserver.stop()
|
|
|
|
except Exception:
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.critical('Exception when stopping web socket server');
|
2014-03-14 22:25:41 +04:00
|
|
|
|
|
|
|
if self.sslTunnel is not None:
|
|
|
|
try:
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.info('Stopping ssltunnel')
|
2014-03-14 22:25:41 +04:00
|
|
|
self.sslTunnel.stop()
|
|
|
|
except Exception:
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.critical('Exception stopping ssltunnel');
|
2010-01-19 22:45:04 +03:00
|
|
|
|
2013-07-26 22:40:04 +04:00
|
|
|
def copyExtraFilesToProfile(self, options):
|
|
|
|
"Copy extra files or dirs specified on the command line to the testing profile."
|
|
|
|
for f in options.extraProfileFiles:
|
|
|
|
abspath = self.getFullPath(f)
|
|
|
|
if os.path.isfile(abspath):
|
|
|
|
shutil.copy2(abspath, options.profilePath)
|
|
|
|
elif os.path.isdir(abspath):
|
|
|
|
dest = os.path.join(options.profilePath, os.path.basename(abspath))
|
|
|
|
shutil.copytree(abspath, dest)
|
|
|
|
else:
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.warning("runtests.py | Failed to copy %s to profile" % abspath)
|
2013-07-26 22:40:04 +04:00
|
|
|
|
|
|
|
def installChromeJar(self, chrome, options):
|
2010-01-19 22:45:04 +03:00
|
|
|
"""
|
2013-07-26 22:40:04 +04:00
|
|
|
copy mochijar directory to profile as an extension so we have chrome://mochikit for all harness code
|
|
|
|
"""
|
|
|
|
# Write chrome.manifest.
|
|
|
|
with open(os.path.join(options.profilePath, "extensions", "staged", "mochikit@mozilla.org", "chrome.manifest"), "a") as mfile:
|
|
|
|
mfile.write(chrome)
|
|
|
|
|
|
|
|
def addChromeToProfile(self, options):
|
|
|
|
"Adds MochiKit chrome tests to the profile."
|
|
|
|
|
|
|
|
# Create (empty) chrome directory.
|
|
|
|
chromedir = os.path.join(options.profilePath, "chrome")
|
|
|
|
os.mkdir(chromedir)
|
|
|
|
|
|
|
|
# Write userChrome.css.
|
|
|
|
chrome = """
|
|
|
|
@namespace url("http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul"); /* set default namespace to XUL */
|
|
|
|
toolbar,
|
|
|
|
toolbarpalette {
|
|
|
|
background-color: rgb(235, 235, 235) !important;
|
|
|
|
}
|
|
|
|
toolbar#nav-bar {
|
|
|
|
background-image: none !important;
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
with open(os.path.join(options.profilePath, "userChrome.css"), "a") as chromeFile:
|
|
|
|
chromeFile.write(chrome)
|
|
|
|
|
|
|
|
manifest = os.path.join(options.profilePath, "tests.manifest")
|
|
|
|
with open(manifest, "w") as manifestFile:
|
2013-11-18 22:38:37 +04:00
|
|
|
# Register chrome directory.
|
2014-04-16 18:29:39 +04:00
|
|
|
chrometestDir = os.path.join(os.path.abspath("."), SCRIPT_DIR) + "/"
|
2013-11-18 22:38:37 +04:00
|
|
|
if mozinfo.isWin:
|
|
|
|
chrometestDir = "file:///" + chrometestDir.replace("\\", "/")
|
|
|
|
manifestFile.write("content mochitests %s contentaccessible=yes\n" % chrometestDir)
|
2013-07-26 22:40:04 +04:00
|
|
|
|
|
|
|
if options.testingModulesDir is not None:
|
|
|
|
manifestFile.write("resource testing-common file:///%s\n" %
|
|
|
|
options.testingModulesDir)
|
|
|
|
|
|
|
|
# Call installChromeJar().
|
|
|
|
if not os.path.isdir(os.path.join(SCRIPT_DIR, self.jarDir)):
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.error("TEST-UNEXPECTED-FAIL | invalid setup: missing mochikit extension")
|
2013-07-26 22:40:04 +04:00
|
|
|
return None
|
|
|
|
|
|
|
|
# Support Firefox (browser), B2G (shell), SeaMonkey (navigator), and Webapp
|
|
|
|
# Runtime (webapp).
|
|
|
|
chrome = ""
|
|
|
|
if options.browserChrome or options.chrome or options.a11y or options.webapprtChrome:
|
|
|
|
chrome += """
|
|
|
|
overlay chrome://browser/content/browser.xul chrome://mochikit/content/browser-test-overlay.xul
|
2013-08-29 01:35:34 +04:00
|
|
|
overlay chrome://browser/content/shell.xhtml chrome://mochikit/content/browser-test-overlay.xul
|
2013-07-26 22:40:04 +04:00
|
|
|
overlay chrome://navigator/content/navigator.xul chrome://mochikit/content/browser-test-overlay.xul
|
|
|
|
overlay chrome://webapprt/content/webapp.xul chrome://mochikit/content/browser-test-overlay.xul
|
2014-09-22 22:08:06 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
if options.jetpackPackage:
|
|
|
|
chrome += """
|
|
|
|
overlay chrome://browser/content/browser.xul chrome://mochikit/content/jetpack-package-overlay.xul
|
|
|
|
"""
|
|
|
|
|
|
|
|
if options.jetpackAddon:
|
|
|
|
chrome += """
|
|
|
|
overlay chrome://browser/content/browser.xul chrome://mochikit/content/jetpack-addon-overlay.xul
|
2013-07-26 22:40:04 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
self.installChromeJar(chrome, options)
|
|
|
|
return manifest
|
|
|
|
|
|
|
|
def getExtensionsToInstall(self, options):
|
|
|
|
"Return a list of extensions to install in the profile"
|
|
|
|
extensions = options.extensionsToInstall or []
|
|
|
|
appDir = options.app[:options.app.rfind(os.sep)] if options.app else options.utilityPath
|
|
|
|
|
|
|
|
extensionDirs = [
|
|
|
|
# Extensions distributed with the test harness.
|
|
|
|
os.path.normpath(os.path.join(SCRIPT_DIR, "extensions")),
|
|
|
|
]
|
|
|
|
if appDir:
|
|
|
|
# Extensions distributed with the application.
|
|
|
|
extensionDirs.append(os.path.join(appDir, "distribution", "extensions"))
|
|
|
|
|
|
|
|
for extensionDir in extensionDirs:
|
|
|
|
if os.path.isdir(extensionDir):
|
|
|
|
for dirEntry in os.listdir(extensionDir):
|
|
|
|
if dirEntry not in options.extensionsToExclude:
|
|
|
|
path = os.path.join(extensionDir, dirEntry)
|
|
|
|
if os.path.isdir(path) or (os.path.isfile(path) and path.endswith(".xpi")):
|
|
|
|
extensions.append(path)
|
|
|
|
|
|
|
|
# append mochikit
|
|
|
|
extensions.append(os.path.join(SCRIPT_DIR, self.jarDir))
|
|
|
|
return extensions
|
|
|
|
|
2014-03-14 22:25:41 +04:00
|
|
|
class SSLTunnel:
|
2014-08-13 20:03:00 +04:00
|
|
|
def __init__(self, options, logger):
|
|
|
|
self.log = logger
|
2014-03-14 22:25:41 +04:00
|
|
|
self.process = None
|
|
|
|
self.utilityPath = options.utilityPath
|
|
|
|
self.xrePath = options.xrePath
|
|
|
|
self.certPath = options.certPath
|
|
|
|
self.sslPort = options.sslPort
|
|
|
|
self.httpPort = options.httpPort
|
|
|
|
self.webServer = options.webServer
|
|
|
|
self.webSocketPort = options.webSocketPort
|
|
|
|
|
|
|
|
self.customCertRE = re.compile("^cert=(?P<nickname>[0-9a-zA-Z_ ]+)")
|
|
|
|
self.clientAuthRE = re.compile("^clientauth=(?P<clientauth>[a-z]+)")
|
|
|
|
self.redirRE = re.compile("^redir=(?P<redirhost>[0-9a-zA-Z_ .]+)")
|
|
|
|
|
|
|
|
def writeLocation(self, config, loc):
|
|
|
|
for option in loc.options:
|
|
|
|
match = self.customCertRE.match(option)
|
|
|
|
if match:
|
|
|
|
customcert = match.group("nickname");
|
|
|
|
config.write("listen:%s:%s:%s:%s\n" %
|
|
|
|
(loc.host, loc.port, self.sslPort, customcert))
|
|
|
|
|
|
|
|
match = self.clientAuthRE.match(option)
|
|
|
|
if match:
|
|
|
|
clientauth = match.group("clientauth");
|
|
|
|
config.write("clientauth:%s:%s:%s:%s\n" %
|
|
|
|
(loc.host, loc.port, self.sslPort, clientauth))
|
|
|
|
|
|
|
|
match = self.redirRE.match(option)
|
|
|
|
if match:
|
|
|
|
redirhost = match.group("redirhost")
|
|
|
|
config.write("redirhost:%s:%s:%s:%s\n" %
|
|
|
|
(loc.host, loc.port, self.sslPort, redirhost))
|
|
|
|
|
|
|
|
def buildConfig(self, locations):
|
|
|
|
"""Create the ssltunnel configuration file"""
|
|
|
|
configFd, self.configFile = tempfile.mkstemp(prefix="ssltunnel", suffix=".cfg")
|
|
|
|
with os.fdopen(configFd, "w") as config:
|
|
|
|
config.write("httpproxy:1\n")
|
|
|
|
config.write("certdbdir:%s\n" % self.certPath)
|
|
|
|
config.write("forward:127.0.0.1:%s\n" % self.httpPort)
|
|
|
|
config.write("websocketserver:%s:%s\n" % (self.webServer, self.webSocketPort))
|
|
|
|
config.write("listen:*:%s:pgo server certificate\n" % self.sslPort)
|
|
|
|
|
|
|
|
for loc in locations:
|
|
|
|
if loc.scheme == "https" and "nocert" not in loc.options:
|
|
|
|
self.writeLocation(config, loc)
|
|
|
|
|
|
|
|
def start(self):
|
|
|
|
""" Starts the SSL Tunnel """
|
|
|
|
|
|
|
|
# start ssltunnel to provide https:// URLs capability
|
|
|
|
bin_suffix = mozinfo.info.get('bin_suffix', '')
|
|
|
|
ssltunnel = os.path.join(self.utilityPath, "ssltunnel" + bin_suffix)
|
|
|
|
if not os.path.exists(ssltunnel):
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.error("INFO | runtests.py | expected to find ssltunnel at %s" % ssltunnel)
|
2014-03-14 22:25:41 +04:00
|
|
|
exit(1)
|
|
|
|
|
|
|
|
env = environment(xrePath=self.xrePath)
|
2014-06-19 06:39:34 +04:00
|
|
|
env["LD_LIBRARY_PATH"] = self.xrePath
|
2014-03-14 22:25:41 +04:00
|
|
|
self.process = mozprocess.ProcessHandler([ssltunnel, self.configFile],
|
|
|
|
env=env)
|
|
|
|
self.process.run()
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.info("runtests.py | SSL tunnel pid: %d" % self.process.pid)
|
2014-03-14 22:25:41 +04:00
|
|
|
|
|
|
|
def stop(self):
|
|
|
|
""" Stops the SSL Tunnel and cleans up """
|
|
|
|
if self.process is not None:
|
|
|
|
self.process.kill()
|
|
|
|
if os.path.exists(self.configFile):
|
|
|
|
os.remove(self.configFile)
|
2013-07-26 22:40:04 +04:00
|
|
|
|
2014-05-01 15:18:00 +04:00
|
|
|
def checkAndConfigureV4l2loopback(device):
|
|
|
|
'''
|
|
|
|
Determine if a given device path is a v4l2loopback device, and if so
|
|
|
|
toggle a few settings on it via fcntl. Very linux-specific.
|
|
|
|
|
|
|
|
Returns (status, device name) where status is a boolean.
|
|
|
|
'''
|
|
|
|
if not mozinfo.isLinux:
|
|
|
|
return False, ''
|
|
|
|
|
|
|
|
libc = ctypes.cdll.LoadLibrary('libc.so.6')
|
|
|
|
O_RDWR = 2
|
|
|
|
# These are from linux/videodev2.h
|
|
|
|
class v4l2_capability(ctypes.Structure):
|
|
|
|
_fields_ = [
|
|
|
|
('driver', ctypes.c_char * 16),
|
|
|
|
('card', ctypes.c_char * 32),
|
|
|
|
('bus_info', ctypes.c_char * 32),
|
|
|
|
('version', ctypes.c_uint32),
|
|
|
|
('capabilities', ctypes.c_uint32),
|
|
|
|
('device_caps', ctypes.c_uint32),
|
|
|
|
('reserved', ctypes.c_uint32 * 3)
|
|
|
|
]
|
|
|
|
VIDIOC_QUERYCAP = 0x80685600
|
|
|
|
|
|
|
|
fd = libc.open(device, O_RDWR)
|
|
|
|
if fd < 0:
|
|
|
|
return False, ''
|
|
|
|
|
|
|
|
vcap = v4l2_capability()
|
|
|
|
if libc.ioctl(fd, VIDIOC_QUERYCAP, ctypes.byref(vcap)) != 0:
|
|
|
|
return False, ''
|
|
|
|
|
|
|
|
if vcap.driver != 'v4l2 loopback':
|
|
|
|
return False, ''
|
|
|
|
|
|
|
|
class v4l2_control(ctypes.Structure):
|
|
|
|
_fields_ = [
|
|
|
|
('id', ctypes.c_uint32),
|
|
|
|
('value', ctypes.c_int32)
|
|
|
|
]
|
|
|
|
|
|
|
|
# These are private v4l2 control IDs, see:
|
|
|
|
# https://github.com/umlaeute/v4l2loopback/blob/fd822cf0faaccdf5f548cddd9a5a3dcebb6d584d/v4l2loopback.c#L131
|
|
|
|
KEEP_FORMAT = 0x8000000
|
|
|
|
SUSTAIN_FRAMERATE = 0x8000001
|
|
|
|
VIDIOC_S_CTRL = 0xc008561c
|
|
|
|
|
|
|
|
control = v4l2_control()
|
|
|
|
control.id = KEEP_FORMAT
|
|
|
|
control.value = 1
|
|
|
|
libc.ioctl(fd, VIDIOC_S_CTRL, ctypes.byref(control))
|
|
|
|
|
|
|
|
control.id = SUSTAIN_FRAMERATE
|
|
|
|
control.value = 1
|
|
|
|
libc.ioctl(fd, VIDIOC_S_CTRL, ctypes.byref(control))
|
|
|
|
libc.close(fd)
|
|
|
|
|
|
|
|
return True, vcap.card
|
|
|
|
|
2014-08-13 20:03:00 +04:00
|
|
|
def findTestMediaDevices(log):
|
2014-05-01 15:18:00 +04:00
|
|
|
'''
|
|
|
|
Find the test media devices configured on this system, and return a dict
|
|
|
|
containing information about them. The dict will have keys for 'audio'
|
|
|
|
and 'video', each containing the name of the media device to use.
|
|
|
|
|
|
|
|
If audio and video devices could not be found, return None.
|
|
|
|
|
|
|
|
This method is only currently implemented for Linux.
|
|
|
|
'''
|
|
|
|
if not mozinfo.isLinux:
|
|
|
|
return None
|
|
|
|
|
|
|
|
info = {}
|
|
|
|
# Look for a v4l2loopback device.
|
|
|
|
name = None
|
|
|
|
device = None
|
|
|
|
for dev in sorted(glob.glob('/dev/video*')):
|
|
|
|
result, name_ = checkAndConfigureV4l2loopback(dev)
|
|
|
|
if result:
|
|
|
|
name = name_
|
|
|
|
device = dev
|
|
|
|
break
|
|
|
|
|
|
|
|
if not (name and device):
|
|
|
|
log.error('Couldn\'t find a v4l2loopback video device')
|
|
|
|
return None
|
|
|
|
|
|
|
|
# Feed it a frame of output so it has something to display
|
|
|
|
subprocess.check_call(['/usr/bin/gst-launch-0.10', 'videotestsrc',
|
|
|
|
'pattern=green', 'num-buffers=1', '!',
|
|
|
|
'v4l2sink', 'device=%s' % device])
|
|
|
|
info['video'] = name
|
|
|
|
|
|
|
|
# Use pactl to see if the PulseAudio module-sine-source module is loaded.
|
|
|
|
def sine_source_loaded():
|
|
|
|
o = subprocess.check_output(['/usr/bin/pactl', 'list', 'short', 'modules'])
|
|
|
|
return filter(lambda x: 'module-sine-source' in x, o.splitlines())
|
|
|
|
|
|
|
|
if not sine_source_loaded():
|
|
|
|
# Load module-sine-source
|
|
|
|
subprocess.check_call(['/usr/bin/pactl', 'load-module',
|
|
|
|
'module-sine-source'])
|
|
|
|
if not sine_source_loaded():
|
|
|
|
log.error('Couldn\'t load module-sine-source')
|
|
|
|
return None
|
|
|
|
|
|
|
|
# Hardcode the name since it's always the same.
|
|
|
|
info['audio'] = 'Sine source at 440 Hz'
|
|
|
|
return info
|
|
|
|
|
2013-07-26 22:40:04 +04:00
|
|
|
class Mochitest(MochitestUtilsMixin):
|
2014-03-14 22:25:41 +04:00
|
|
|
certdbNew = False
|
|
|
|
sslTunnel = None
|
2013-07-26 22:40:04 +04:00
|
|
|
vmwareHelper = None
|
2013-09-23 18:47:48 +04:00
|
|
|
DEFAULT_TIMEOUT = 60.0
|
2014-05-01 15:18:00 +04:00
|
|
|
mediaDevices = None
|
2013-09-23 18:47:48 +04:00
|
|
|
|
|
|
|
# XXX use automation.py for test name to avoid breaking legacy
|
|
|
|
# TODO: replace this with 'runtests.py' or 'mochitest' or the like
|
|
|
|
test_name = 'automation.py'
|
2013-07-26 22:40:04 +04:00
|
|
|
|
2014-08-13 20:03:00 +04:00
|
|
|
def __init__(self, logger_options):
|
|
|
|
super(Mochitest, self).__init__(logger_options)
|
2014-07-17 11:02:00 +04:00
|
|
|
|
2013-09-23 18:47:48 +04:00
|
|
|
# environment function for browserEnv
|
|
|
|
self.environment = environment
|
2013-07-26 22:40:04 +04:00
|
|
|
|
|
|
|
# Max time in seconds to wait for server startup before tests will fail -- if
|
|
|
|
# this seems big, it's mostly for debug machines where cold startup
|
|
|
|
# (particularly after a build) takes forever.
|
2013-09-23 18:47:48 +04:00
|
|
|
self.SERVER_STARTUP_TIMEOUT = 180 if mozinfo.info.get('debug') else 90
|
|
|
|
|
|
|
|
# metro browser sub process id
|
|
|
|
self.browserProcessId = None
|
|
|
|
|
|
|
|
|
|
|
|
self.haveDumpedScreen = False
|
2014-07-04 15:55:00 +04:00
|
|
|
# Create variables to count the number of passes, fails, todos.
|
|
|
|
self.countpass = 0
|
|
|
|
self.countfail = 0
|
|
|
|
self.counttodo = 0
|
|
|
|
|
|
|
|
self.expectedError = {}
|
|
|
|
self.result = {}
|
2013-09-23 18:47:48 +04:00
|
|
|
|
|
|
|
def extraPrefs(self, extraPrefs):
|
|
|
|
"""interpolate extra preferences from option strings"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
return dict(parseKeyValue(extraPrefs, context='--setpref='))
|
|
|
|
except KeyValueParseError, e:
|
|
|
|
print str(e)
|
|
|
|
sys.exit(1)
|
2010-01-19 22:45:04 +03:00
|
|
|
|
2014-03-14 22:25:41 +04:00
|
|
|
def fillCertificateDB(self, options):
|
|
|
|
# TODO: move -> mozprofile:
|
|
|
|
# https://bugzilla.mozilla.org/show_bug.cgi?id=746243#c35
|
|
|
|
|
|
|
|
pwfilePath = os.path.join(options.profilePath, ".crtdbpw")
|
|
|
|
with open(pwfilePath, "w") as pwfile:
|
|
|
|
pwfile.write("\n")
|
|
|
|
|
|
|
|
# Pre-create the certification database for the profile
|
|
|
|
env = self.environment(xrePath=options.xrePath)
|
2014-06-19 06:39:34 +04:00
|
|
|
env["LD_LIBRARY_PATH"] = options.xrePath
|
2014-03-14 22:25:41 +04:00
|
|
|
bin_suffix = mozinfo.info.get('bin_suffix', '')
|
|
|
|
certutil = os.path.join(options.utilityPath, "certutil" + bin_suffix)
|
|
|
|
pk12util = os.path.join(options.utilityPath, "pk12util" + bin_suffix)
|
|
|
|
|
|
|
|
if self.certdbNew:
|
|
|
|
# android and b2g use the new DB formats exclusively
|
|
|
|
certdbPath = "sql:" + options.profilePath
|
|
|
|
else:
|
|
|
|
# desktop seems to use the old
|
|
|
|
certdbPath = options.profilePath
|
|
|
|
|
|
|
|
status = call([certutil, "-N", "-d", certdbPath, "-f", pwfilePath], env=env)
|
|
|
|
if status:
|
|
|
|
return status
|
|
|
|
|
|
|
|
# Walk the cert directory and add custom CAs and client certs
|
|
|
|
files = os.listdir(options.certPath)
|
|
|
|
for item in files:
|
|
|
|
root, ext = os.path.splitext(item)
|
|
|
|
if ext == ".ca":
|
|
|
|
trustBits = "CT,,"
|
|
|
|
if root.endswith("-object"):
|
|
|
|
trustBits = "CT,,CT"
|
|
|
|
call([certutil, "-A", "-i", os.path.join(options.certPath, item),
|
|
|
|
"-d", certdbPath, "-f", pwfilePath, "-n", root, "-t", trustBits],
|
|
|
|
env=env)
|
|
|
|
elif ext == ".client":
|
|
|
|
call([pk12util, "-i", os.path.join(options.certPath, item),
|
|
|
|
"-w", pwfilePath, "-d", certdbPath],
|
|
|
|
env=env)
|
|
|
|
|
|
|
|
os.unlink(pwfilePath)
|
|
|
|
return 0
|
|
|
|
|
2010-01-19 22:45:04 +03:00
|
|
|
def buildProfile(self, options):
|
|
|
|
""" create the profile and add optional chrome bits and files if requested """
|
2013-02-19 22:27:28 +04:00
|
|
|
if options.browserChrome and options.timeout:
|
|
|
|
options.extraPrefs.append("testing.browserTestHarness.timeout=%d" % options.timeout)
|
2014-02-11 21:01:08 +04:00
|
|
|
options.extraPrefs.append("browser.tabs.remote.autostart=%s" % ('true' if options.e10s else 'false'))
|
2014-09-16 11:20:07 +04:00
|
|
|
options.extraPrefs.append("browser.tabs.remote.sandbox=%s" % options.contentSandbox)
|
2013-07-30 16:30:40 +04:00
|
|
|
|
|
|
|
# get extensions to install
|
|
|
|
extensions = self.getExtensionsToInstall(options)
|
|
|
|
|
2013-09-23 18:47:48 +04:00
|
|
|
# web apps
|
2013-07-30 16:30:40 +04:00
|
|
|
appsPath = os.path.join(SCRIPT_DIR, 'profile_data', 'webapps_mochitest.json')
|
2013-09-23 18:47:48 +04:00
|
|
|
if os.path.exists(appsPath):
|
|
|
|
with open(appsPath) as apps_file:
|
|
|
|
apps = json.load(apps_file)
|
|
|
|
else:
|
|
|
|
apps = None
|
|
|
|
|
|
|
|
# preferences
|
2013-07-30 16:30:40 +04:00
|
|
|
prefsPath = os.path.join(SCRIPT_DIR, 'profile_data', 'prefs_general.js')
|
2013-09-23 18:47:48 +04:00
|
|
|
prefs = dict(Preferences.read_prefs(prefsPath))
|
|
|
|
prefs.update(self.extraPrefs(options.extraPrefs))
|
|
|
|
|
|
|
|
# interpolate preferences
|
|
|
|
interpolation = {"server": "%s:%s" % (options.webServer, options.httpPort)}
|
|
|
|
prefs = json.loads(json.dumps(prefs) % interpolation)
|
|
|
|
for pref in prefs:
|
|
|
|
prefs[pref] = Preferences.cast(prefs[pref])
|
|
|
|
# TODO: make this less hacky
|
|
|
|
# https://bugzilla.mozilla.org/show_bug.cgi?id=913152
|
|
|
|
|
|
|
|
# proxy
|
|
|
|
proxy = {'remote': options.webServer,
|
|
|
|
'http': options.httpPort,
|
|
|
|
'https': options.sslPort,
|
|
|
|
# use SSL port for legacy compatibility; see
|
|
|
|
# - https://bugzilla.mozilla.org/show_bug.cgi?id=688667#c66
|
|
|
|
# - https://bugzilla.mozilla.org/show_bug.cgi?id=899221
|
|
|
|
# - https://github.com/mozilla/mozbase/commit/43f9510e3d58bfed32790c82a57edac5f928474d
|
|
|
|
# 'ws': str(self.webSocketPort)
|
|
|
|
'ws': options.sslPort
|
|
|
|
}
|
2013-07-30 16:30:40 +04:00
|
|
|
|
2014-05-01 15:18:00 +04:00
|
|
|
# See if we should use fake media devices.
|
|
|
|
if options.useTestMediaDevices:
|
|
|
|
prefs['media.audio_loopback_dev'] = self.mediaDevices['audio']
|
|
|
|
prefs['media.video_loopback_dev'] = self.mediaDevices['video']
|
|
|
|
|
2014-03-14 22:25:41 +04:00
|
|
|
|
2013-09-23 18:47:48 +04:00
|
|
|
# create a profile
|
|
|
|
self.profile = Profile(profile=options.profilePath,
|
|
|
|
addons=extensions,
|
2014-03-14 22:25:41 +04:00
|
|
|
locations=self.locations,
|
2013-09-23 18:47:48 +04:00
|
|
|
preferences=prefs,
|
|
|
|
apps=apps,
|
|
|
|
proxy=proxy
|
|
|
|
)
|
|
|
|
|
|
|
|
# Fix options.profilePath for legacy consumers.
|
|
|
|
options.profilePath = self.profile.profile
|
2013-07-30 16:30:40 +04:00
|
|
|
|
|
|
|
manifest = self.addChromeToProfile(options)
|
2010-01-19 22:45:04 +03:00
|
|
|
self.copyExtraFilesToProfile(options)
|
2014-03-14 22:25:41 +04:00
|
|
|
|
|
|
|
# create certificate database for the profile
|
|
|
|
# TODO: this should really be upstreamed somewhere, maybe mozprofile
|
|
|
|
certificateStatus = self.fillCertificateDB(options)
|
|
|
|
if certificateStatus:
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.error("TEST-UNEXPECTED-FAIL | runtests.py | Certificate integration failed")
|
2014-03-14 22:25:41 +04:00
|
|
|
return None
|
|
|
|
|
2013-07-30 16:30:40 +04:00
|
|
|
return manifest
|
2010-01-19 22:45:04 +03:00
|
|
|
|
2014-07-25 15:01:13 +04:00
|
|
|
def getGMPPluginPath(self, options):
|
2014-08-22 18:28:04 +04:00
|
|
|
if options.gmp_path:
|
|
|
|
return options.gmp_path
|
|
|
|
|
2014-09-24 02:04:49 +04:00
|
|
|
gmp_parentdirs = [
|
|
|
|
# For local builds, GMP plugins will be under dist/bin.
|
|
|
|
options.xrePath,
|
|
|
|
# For packaged builds, GMP plugins will get copied under $profile/plugins.
|
|
|
|
os.path.join(self.profile.profile, 'plugins'),
|
|
|
|
]
|
|
|
|
|
|
|
|
gmp_subdirs = [
|
|
|
|
os.path.join('gmp-fake', '1.0'),
|
|
|
|
os.path.join('gmp-clearkey', '0.1'),
|
|
|
|
]
|
|
|
|
|
|
|
|
gmp_paths = [os.path.join(parent, sub)
|
|
|
|
for parent in gmp_parentdirs
|
|
|
|
for sub in gmp_subdirs
|
|
|
|
if os.path.isdir(os.path.join(parent, sub))]
|
|
|
|
|
|
|
|
if not gmp_paths:
|
|
|
|
# This is fatal for desktop environments.
|
|
|
|
raise EnvironmentError('Could not find test gmp plugins')
|
|
|
|
|
|
|
|
return os.pathsep.join(gmp_paths)
|
2014-07-25 15:01:13 +04:00
|
|
|
|
2014-08-06 01:11:53 +04:00
|
|
|
def buildBrowserEnv(self, options, debugger=False, env=None):
|
2013-09-23 18:47:48 +04:00
|
|
|
"""build the environment variables for the specific test and operating system"""
|
2014-06-20 20:08:30 +04:00
|
|
|
if mozinfo.info["asan"]:
|
|
|
|
lsanPath = SCRIPT_DIR
|
|
|
|
else:
|
|
|
|
lsanPath = None
|
|
|
|
|
2014-08-06 01:11:53 +04:00
|
|
|
browserEnv = self.environment(xrePath=options.xrePath, env=env,
|
|
|
|
debugger=debugger, dmdPath=options.dmdPath,
|
|
|
|
lsanPath=lsanPath)
|
2010-01-15 20:22:54 +03:00
|
|
|
|
|
|
|
# These variables are necessary for correct application startup; change
|
|
|
|
# via the commandline at your own risk.
|
|
|
|
browserEnv["XPCOM_DEBUG_BREAK"] = "stack"
|
|
|
|
|
2013-09-23 18:47:48 +04:00
|
|
|
# interpolate environment passed with options
|
|
|
|
try:
|
|
|
|
browserEnv.update(dict(parseKeyValue(options.environment, context='--setenv')))
|
|
|
|
except KeyValueParseError, e:
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.error(str(e))
|
2014-07-25 15:01:13 +04:00
|
|
|
return None
|
2010-01-15 20:22:54 +03:00
|
|
|
|
2010-03-13 20:56:24 +03:00
|
|
|
browserEnv["XPCOM_MEM_BLOAT_LOG"] = self.leak_report_file
|
2010-01-15 20:22:54 +03:00
|
|
|
|
2014-07-25 15:01:13 +04:00
|
|
|
try:
|
|
|
|
gmp_path = self.getGMPPluginPath(options)
|
|
|
|
if gmp_path is not None:
|
|
|
|
browserEnv["MOZ_GMP_PATH"] = gmp_path
|
|
|
|
except EnvironmentError:
|
2014-08-29 18:37:17 +04:00
|
|
|
self.log.error('Could not find path to gmp-fake plugin!')
|
2014-07-25 15:01:13 +04:00
|
|
|
return None
|
2014-07-21 01:39:19 +04:00
|
|
|
|
2010-01-19 22:45:04 +03:00
|
|
|
if options.fatalAssertions:
|
|
|
|
browserEnv["XPCOM_DEBUG_BREAK"] = "stack-and-abort"
|
2010-01-15 20:22:54 +03:00
|
|
|
|
2014-01-22 21:54:49 +04:00
|
|
|
# Produce an NSPR log, is setup (see NSPR_LOG_MODULES global at the top of
|
|
|
|
# this script).
|
|
|
|
self.nsprLogs = NSPR_LOG_MODULES and "MOZ_UPLOAD_DIR" in os.environ
|
|
|
|
if self.nsprLogs:
|
|
|
|
browserEnv["NSPR_LOG_MODULES"] = NSPR_LOG_MODULES
|
|
|
|
|
|
|
|
browserEnv["NSPR_LOG_FILE"] = "%s/nspr.log" % tempfile.gettempdir()
|
|
|
|
browserEnv["GECKO_SEPARATE_NSPR_LOGS"] = "1"
|
|
|
|
|
2014-02-08 08:25:45 +04:00
|
|
|
if debugger and not options.slowscript:
|
|
|
|
browserEnv["JS_DISABLE_SLOW_SCRIPT_SIGNALS"] = "1"
|
|
|
|
|
2010-01-19 22:45:04 +03:00
|
|
|
return browserEnv
|
|
|
|
|
2014-06-03 19:19:28 +04:00
|
|
|
def cleanup(self, options):
|
2010-01-19 22:45:04 +03:00
|
|
|
""" remove temporary files and profile """
|
2014-06-03 19:19:28 +04:00
|
|
|
if self.manifest is not None:
|
|
|
|
os.remove(self.manifest)
|
2013-09-23 18:47:48 +04:00
|
|
|
del self.profile
|
2014-03-14 22:25:41 +04:00
|
|
|
if options.pidFile != "":
|
|
|
|
try:
|
|
|
|
os.remove(options.pidFile)
|
|
|
|
if os.path.exists(options.pidFile + ".xpcshell.pid"):
|
|
|
|
os.remove(options.pidFile + ".xpcshell.pid")
|
|
|
|
except:
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.warning("cleaning up pidfile '%s' was unsuccessful from the test harness" % options.pidFile)
|
2014-07-04 15:55:00 +04:00
|
|
|
options.manifestFile = None
|
2013-09-23 18:47:48 +04:00
|
|
|
|
|
|
|
def dumpScreen(self, utilityPath):
|
|
|
|
if self.haveDumpedScreen:
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.info("Not taking screenshot here: see the one that was previously logged")
|
2013-09-23 18:47:48 +04:00
|
|
|
return
|
|
|
|
self.haveDumpedScreen = True
|
2013-10-06 02:28:34 +04:00
|
|
|
dumpScreen(utilityPath)
|
2013-09-23 18:47:48 +04:00
|
|
|
|
|
|
|
def killAndGetStack(self, processPID, utilityPath, debuggerInfo, dump_screen=False):
|
|
|
|
"""
|
|
|
|
Kill the process, preferrably in a way that gets us a stack trace.
|
|
|
|
Also attempts to obtain a screenshot before killing the process
|
|
|
|
if specified.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if dump_screen:
|
|
|
|
self.dumpScreen(utilityPath)
|
|
|
|
|
|
|
|
if mozinfo.info.get('crashreporter', True) and not debuggerInfo:
|
|
|
|
if mozinfo.isWin:
|
|
|
|
# We should have a "crashinject" program in our utility path
|
|
|
|
crashinject = os.path.normpath(os.path.join(utilityPath, "crashinject.exe"))
|
2014-03-26 14:14:51 +04:00
|
|
|
if os.path.exists(crashinject):
|
|
|
|
status = subprocess.Popen([crashinject, str(processPID)]).wait()
|
|
|
|
printstatus(status, "crashinject")
|
|
|
|
if status == 0:
|
|
|
|
return
|
2013-09-23 18:47:48 +04:00
|
|
|
else:
|
2013-10-01 21:47:05 +04:00
|
|
|
try:
|
|
|
|
os.kill(processPID, signal.SIGABRT)
|
|
|
|
except OSError:
|
|
|
|
# https://bugzilla.mozilla.org/show_bug.cgi?id=921509
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.info("Can't trigger Breakpad, process no longer exists")
|
2013-09-23 18:47:48 +04:00
|
|
|
return
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.info("Can't trigger Breakpad, just killing process")
|
|
|
|
killPid(processPID, self.log)
|
2013-09-23 18:47:48 +04:00
|
|
|
|
|
|
|
def checkForZombies(self, processLog, utilityPath, debuggerInfo):
|
|
|
|
"""Look for hung processes"""
|
|
|
|
|
|
|
|
if not os.path.exists(processLog):
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.info('Automation Error: PID log not found: %s' % processLog)
|
2013-09-23 18:47:48 +04:00
|
|
|
# Whilst no hung process was found, the run should still display as a failure
|
|
|
|
return True
|
|
|
|
|
|
|
|
# scan processLog for zombies
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.info('zombiecheck | Reading PID log: %s' % processLog)
|
2013-09-23 18:47:48 +04:00
|
|
|
processList = []
|
|
|
|
pidRE = re.compile(r'launched child process (\d+)$')
|
|
|
|
with open(processLog) as processLogFD:
|
|
|
|
for line in processLogFD:
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.info(line.rstrip())
|
2013-09-23 18:47:48 +04:00
|
|
|
m = pidRE.search(line)
|
|
|
|
if m:
|
|
|
|
processList.append(int(m.group(1)))
|
|
|
|
|
|
|
|
# kill zombies
|
|
|
|
foundZombie = False
|
|
|
|
for processPID in processList:
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.info("zombiecheck | Checking for orphan process with PID: %d" % processPID)
|
2013-09-23 18:47:48 +04:00
|
|
|
if isPidAlive(processPID):
|
|
|
|
foundZombie = True
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.error("TEST-UNEXPECTED-FAIL | zombiecheck | child process %d still alive after shutdown" % processPID)
|
2013-09-23 18:47:48 +04:00
|
|
|
self.killAndGetStack(processPID, utilityPath, debuggerInfo, dump_screen=not debuggerInfo)
|
|
|
|
|
|
|
|
return foundZombie
|
2010-01-15 20:22:54 +03:00
|
|
|
|
2010-05-17 22:00:13 +04:00
|
|
|
def startVMwareRecording(self, options):
|
2010-03-16 00:44:29 +03:00
|
|
|
""" starts recording inside VMware VM using the recording helper dll """
|
2013-09-23 18:47:48 +04:00
|
|
|
assert mozinfo.isWin
|
2010-03-16 00:44:29 +03:00
|
|
|
from ctypes import cdll
|
|
|
|
self.vmwareHelper = cdll.LoadLibrary(self.vmwareHelperPath)
|
|
|
|
if self.vmwareHelper is None:
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.warning("runtests.py | Failed to load "
|
2013-07-26 22:40:04 +04:00
|
|
|
"VMware recording helper")
|
2010-03-16 00:44:29 +03:00
|
|
|
return
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.info("runtests.py | Starting VMware recording.")
|
2010-03-16 00:44:29 +03:00
|
|
|
try:
|
|
|
|
self.vmwareHelper.StartRecording()
|
|
|
|
except Exception, e:
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.warning("runtests.py | Failed to start "
|
2013-07-26 22:40:04 +04:00
|
|
|
"VMware recording: (%s)" % str(e))
|
2010-03-16 00:44:29 +03:00
|
|
|
self.vmwareHelper = None
|
|
|
|
|
2010-05-17 22:00:13 +04:00
|
|
|
def stopVMwareRecording(self):
|
2010-03-16 00:44:29 +03:00
|
|
|
""" stops recording inside VMware VM using the recording helper dll """
|
2014-03-14 22:25:41 +04:00
|
|
|
try:
|
|
|
|
assert mozinfo.isWin
|
|
|
|
if self.vmwareHelper is not None:
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.info("runtests.py | Stopping VMware recording.")
|
2010-03-16 00:44:29 +03:00
|
|
|
self.vmwareHelper.StopRecording()
|
2014-03-14 22:25:41 +04:00
|
|
|
except Exception, e:
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.warning("runtests.py | Failed to stop "
|
2014-03-14 22:25:41 +04:00
|
|
|
"VMware recording: (%s)" % str(e))
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.exception('Error stopping VMWare recording')
|
2014-03-14 22:25:41 +04:00
|
|
|
|
|
|
|
self.vmwareHelper = None
|
2010-03-16 00:44:29 +03:00
|
|
|
|
2013-09-23 18:47:48 +04:00
|
|
|
def runApp(self,
|
|
|
|
testUrl,
|
|
|
|
env,
|
|
|
|
app,
|
|
|
|
profile,
|
|
|
|
extraArgs,
|
|
|
|
utilityPath,
|
|
|
|
debuggerInfo=None,
|
|
|
|
symbolsPath=None,
|
|
|
|
timeout=-1,
|
2013-11-21 00:53:08 +04:00
|
|
|
onLaunch=None,
|
2014-08-12 23:23:29 +04:00
|
|
|
detectShutdownLeaks=False,
|
2014-06-03 19:19:28 +04:00
|
|
|
screenshotOnFail=False,
|
2014-07-04 15:55:00 +04:00
|
|
|
testPath=None,
|
2014-07-17 11:02:00 +04:00
|
|
|
bisectChunk=None,
|
|
|
|
quiet=False):
|
2013-09-23 18:47:48 +04:00
|
|
|
"""
|
|
|
|
Run the app, log the duration it took to execute, return the status code.
|
|
|
|
Kills the app if it runs for longer than |maxTime| seconds, or outputs nothing for |timeout| seconds.
|
|
|
|
"""
|
|
|
|
|
2014-07-17 11:02:00 +04:00
|
|
|
# configure the message logger buffering
|
|
|
|
self.message_logger.buffering = quiet
|
|
|
|
|
2013-09-23 18:47:48 +04:00
|
|
|
# debugger information
|
|
|
|
interactive = False
|
|
|
|
debug_args = None
|
|
|
|
if debuggerInfo:
|
2014-07-02 15:52:00 +04:00
|
|
|
interactive = debuggerInfo.interactive
|
|
|
|
debug_args = [debuggerInfo.path] + debuggerInfo.args
|
2013-09-23 18:47:48 +04:00
|
|
|
|
|
|
|
# fix default timeout
|
|
|
|
if timeout == -1:
|
|
|
|
timeout = self.DEFAULT_TIMEOUT
|
|
|
|
|
|
|
|
# copy env so we don't munge the caller's environment
|
|
|
|
env = env.copy()
|
|
|
|
|
2014-03-23 18:20:32 +04:00
|
|
|
# make sure we clean up after ourselves.
|
|
|
|
try:
|
|
|
|
# set process log environment variable
|
|
|
|
tmpfd, processLog = tempfile.mkstemp(suffix='pidlog')
|
|
|
|
os.close(tmpfd)
|
|
|
|
env["MOZ_PROCESS_LOG"] = processLog
|
|
|
|
|
|
|
|
if interactive:
|
|
|
|
# If an interactive debugger is attached,
|
|
|
|
# don't use timeouts, and don't capture ctrl-c.
|
|
|
|
timeout = None
|
|
|
|
signal.signal(signal.SIGINT, lambda sigid, frame: None)
|
|
|
|
|
|
|
|
# build command line
|
|
|
|
cmd = os.path.abspath(app)
|
|
|
|
args = list(extraArgs)
|
|
|
|
# TODO: mozrunner should use -foreground at least for mac
|
|
|
|
# https://bugzilla.mozilla.org/show_bug.cgi?id=916512
|
|
|
|
args.append('-foreground')
|
|
|
|
if testUrl:
|
2014-07-02 15:52:00 +04:00
|
|
|
if debuggerInfo and debuggerInfo.requiresEscapedArgs:
|
2014-03-23 18:20:32 +04:00
|
|
|
testUrl = testUrl.replace("&", "\\&")
|
|
|
|
args.append(testUrl)
|
|
|
|
|
2014-08-12 23:23:29 +04:00
|
|
|
if detectShutdownLeaks:
|
2014-09-17 06:32:13 +04:00
|
|
|
shutdownLeaks = ShutdownLeaks(self.log)
|
2014-03-23 18:20:32 +04:00
|
|
|
else:
|
|
|
|
shutdownLeaks = None
|
|
|
|
|
2014-06-20 20:08:30 +04:00
|
|
|
if mozinfo.info["asan"] and (mozinfo.isLinux or mozinfo.isMac):
|
2014-09-17 06:32:13 +04:00
|
|
|
lsanLeaks = LSANLeaks(self.log)
|
2014-06-20 20:08:30 +04:00
|
|
|
else:
|
|
|
|
lsanLeaks = None
|
|
|
|
|
2014-03-23 18:20:32 +04:00
|
|
|
# create an instance to process the output
|
|
|
|
outputHandler = self.OutputHandler(harness=self,
|
|
|
|
utilityPath=utilityPath,
|
|
|
|
symbolsPath=symbolsPath,
|
|
|
|
dump_screen_on_timeout=not debuggerInfo,
|
2014-04-12 06:23:00 +04:00
|
|
|
dump_screen_on_fail=screenshotOnFail,
|
2014-03-23 18:20:32 +04:00
|
|
|
shutdownLeaks=shutdownLeaks,
|
2014-06-20 20:08:30 +04:00
|
|
|
lsanLeaks=lsanLeaks,
|
2014-07-04 15:55:00 +04:00
|
|
|
bisectChunk=bisectChunk
|
2014-03-23 18:20:32 +04:00
|
|
|
)
|
|
|
|
|
|
|
|
def timeoutHandler():
|
|
|
|
browserProcessId = outputHandler.browserProcessId
|
2014-06-03 19:19:28 +04:00
|
|
|
self.handleTimeout(timeout, proc, utilityPath, debuggerInfo, browserProcessId, testPath)
|
2014-03-23 18:20:32 +04:00
|
|
|
kp_kwargs = {'kill_on_timeout': False,
|
2014-04-16 18:29:39 +04:00
|
|
|
'cwd': SCRIPT_DIR,
|
2014-03-23 18:20:32 +04:00
|
|
|
'onTimeout': [timeoutHandler]}
|
|
|
|
kp_kwargs['processOutputLine'] = [outputHandler]
|
|
|
|
|
|
|
|
# create mozrunner instance and start the system under test process
|
|
|
|
self.lastTestSeen = self.test_name
|
|
|
|
startTime = datetime.now()
|
|
|
|
|
2014-06-19 22:17:26 +04:00
|
|
|
# b2g desktop requires Runner even though appname is b2g
|
2014-03-23 18:20:32 +04:00
|
|
|
if mozinfo.info.get('appname') == 'b2g' and mozinfo.info.get('toolkit') != 'gonk':
|
2014-06-19 22:17:26 +04:00
|
|
|
runner_cls = mozrunner.Runner
|
2014-03-23 18:20:32 +04:00
|
|
|
else:
|
|
|
|
runner_cls = mozrunner.runners.get(mozinfo.info.get('appname', 'firefox'),
|
|
|
|
mozrunner.Runner)
|
|
|
|
runner = runner_cls(profile=self.profile,
|
|
|
|
binary=cmd,
|
|
|
|
cmdargs=args,
|
|
|
|
env=env,
|
|
|
|
process_class=mozprocess.ProcessHandlerMixin,
|
2014-06-19 22:17:26 +04:00
|
|
|
process_args=kp_kwargs)
|
2014-03-23 18:20:32 +04:00
|
|
|
|
|
|
|
# start the runner
|
|
|
|
runner.start(debug_args=debug_args,
|
|
|
|
interactive=interactive,
|
|
|
|
outputTimeout=timeout)
|
|
|
|
proc = runner.process_handler
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.info("runtests.py | Application pid: %d" % proc.pid)
|
2014-03-23 18:20:32 +04:00
|
|
|
|
|
|
|
if onLaunch is not None:
|
|
|
|
# Allow callers to specify an onLaunch callback to be fired after the
|
|
|
|
# app is launched.
|
|
|
|
# We call onLaunch for b2g desktop mochitests so that we can
|
|
|
|
# run a Marionette script after gecko has completed startup.
|
|
|
|
onLaunch()
|
|
|
|
|
|
|
|
# wait until app is finished
|
|
|
|
# XXX copy functionality from
|
|
|
|
# https://github.com/mozilla/mozbase/blob/master/mozrunner/mozrunner/runner.py#L61
|
|
|
|
# until bug 913970 is fixed regarding mozrunner `wait` not returning status
|
|
|
|
# see https://bugzilla.mozilla.org/show_bug.cgi?id=913970
|
|
|
|
status = proc.wait()
|
2014-03-26 14:14:51 +04:00
|
|
|
printstatus(status, "Main app process")
|
2014-03-23 18:20:32 +04:00
|
|
|
runner.process_handler = None
|
|
|
|
|
|
|
|
# finalize output handler
|
2014-09-03 20:24:38 +04:00
|
|
|
outputHandler.finish()
|
2013-09-23 18:47:48 +04:00
|
|
|
|
2014-03-23 18:20:32 +04:00
|
|
|
# record post-test information
|
|
|
|
if status:
|
2014-07-17 11:02:00 +04:00
|
|
|
self.message_logger.dump_buffered()
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.error("TEST-UNEXPECTED-FAIL | %s | application terminated with exit code %s" % (self.lastTestSeen, status))
|
2014-03-23 18:20:32 +04:00
|
|
|
else:
|
|
|
|
self.lastTestSeen = 'Main app process exited normally'
|
2013-10-21 15:46:49 +04:00
|
|
|
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.info("runtests.py | Application ran for: %s" % str(datetime.now() - startTime))
|
2013-09-23 18:47:48 +04:00
|
|
|
|
2014-03-23 18:20:32 +04:00
|
|
|
# Do a final check for zombie child processes.
|
|
|
|
zombieProcesses = self.checkForZombies(processLog, utilityPath, debuggerInfo)
|
2013-09-23 18:47:48 +04:00
|
|
|
|
2014-03-23 18:20:32 +04:00
|
|
|
# check for crashes
|
|
|
|
minidump_path = os.path.join(self.profile.profile, "minidumps")
|
2014-09-24 04:33:31 +04:00
|
|
|
crash_count = mozcrash.log_crashes(self.log, minidump_path, symbolsPath,
|
|
|
|
test=self.lastTestSeen)
|
2013-09-23 18:47:48 +04:00
|
|
|
|
2014-09-24 04:33:31 +04:00
|
|
|
if crash_count or zombieProcesses:
|
2014-03-23 18:20:32 +04:00
|
|
|
status = 1
|
2013-09-23 18:47:48 +04:00
|
|
|
|
2014-03-23 18:20:32 +04:00
|
|
|
finally:
|
|
|
|
# cleanup
|
|
|
|
if os.path.exists(processLog):
|
|
|
|
os.remove(processLog)
|
2013-09-23 18:47:48 +04:00
|
|
|
|
|
|
|
return status
|
|
|
|
|
2014-07-04 15:55:00 +04:00
|
|
|
def initializeLooping(self, options):
|
|
|
|
"""
|
|
|
|
This method is used to clear the contents before each run of for loop.
|
|
|
|
This method is used for --run-by-dir and --bisect-chunk.
|
|
|
|
"""
|
|
|
|
self.expectedError.clear()
|
|
|
|
self.result.clear()
|
|
|
|
options.manifestFile = None
|
|
|
|
options.profilePath = None
|
|
|
|
self.urlOpts = []
|
|
|
|
|
|
|
|
def getActiveTests(self, options, disabled=True):
|
|
|
|
"""
|
|
|
|
This method is used to parse the manifest and return active filtered tests.
|
|
|
|
"""
|
|
|
|
self.setTestRoot(options)
|
|
|
|
manifest = self.getTestManifest(options)
|
|
|
|
|
|
|
|
if manifest:
|
|
|
|
# Python 2.6 doesn't allow unicode keys to be used for keyword
|
|
|
|
# arguments. This gross hack works around the problem until we
|
|
|
|
# rid ourselves of 2.6.
|
|
|
|
info = {}
|
|
|
|
for k, v in mozinfo.info.items():
|
|
|
|
if isinstance(k, unicode):
|
|
|
|
k = k.encode('ascii')
|
|
|
|
info[k] = v
|
|
|
|
|
|
|
|
# Bug 883858 - return all tests including disabled tests
|
|
|
|
testPath = self.getTestPath(options)
|
|
|
|
testPath = testPath.replace('\\', '/')
|
|
|
|
if testPath.endswith('.html') or \
|
|
|
|
testPath.endswith('.xhtml') or \
|
|
|
|
testPath.endswith('.xul') or \
|
|
|
|
testPath.endswith('.js'):
|
|
|
|
# In the case where we have a single file, we don't want to filter based on options such as subsuite.
|
|
|
|
tests = manifest.active_tests(disabled=disabled, options=None, **info)
|
|
|
|
for test in tests:
|
|
|
|
if 'disabled' in test:
|
|
|
|
del test['disabled']
|
|
|
|
else:
|
|
|
|
tests = manifest.active_tests(disabled=disabled, options=options, **info)
|
|
|
|
paths = []
|
|
|
|
|
|
|
|
for test in tests:
|
2014-09-24 22:12:00 +04:00
|
|
|
if test.get('expected') == 'fail':
|
|
|
|
raise Exception('fail-if encountered for test: %s. There is no support for fail-if in Mochitests.' % test['name'])
|
2014-07-04 15:55:00 +04:00
|
|
|
pathAbs = os.path.abspath(test['path'])
|
|
|
|
assert pathAbs.startswith(self.testRootAbs)
|
|
|
|
tp = pathAbs[len(self.testRootAbs):].replace('\\', '/').strip('/')
|
|
|
|
|
|
|
|
# Filter out tests if we are using --test-path
|
|
|
|
if testPath and not tp.startswith(testPath):
|
|
|
|
continue
|
|
|
|
|
|
|
|
if not self.isTest(options, tp):
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.warning('Warning: %s from manifest %s is not a valid test' % (test['name'], test['manifest']))
|
2014-07-04 15:55:00 +04:00
|
|
|
continue
|
|
|
|
|
|
|
|
testob = {'path': tp}
|
|
|
|
if test.has_key('disabled'):
|
|
|
|
testob['disabled'] = test['disabled']
|
|
|
|
paths.append(testob)
|
|
|
|
|
|
|
|
def path_sort(ob1, ob2):
|
|
|
|
path1 = ob1['path'].split('/')
|
|
|
|
path2 = ob2['path'].split('/')
|
|
|
|
return cmp(path1, path2)
|
|
|
|
|
|
|
|
paths.sort(path_sort)
|
|
|
|
|
|
|
|
return paths
|
|
|
|
|
|
|
|
def getTestsToRun(self, options):
|
|
|
|
"""
|
|
|
|
This method makes a list of tests that are to be run. Required mainly for --bisect-chunk.
|
|
|
|
"""
|
|
|
|
tests = self.getActiveTests(options)
|
|
|
|
testsToRun = []
|
|
|
|
for test in tests:
|
|
|
|
if test.has_key('disabled'):
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.info('TEST-SKIPPED | %s | %s' % (test['path'], test['disabled']))
|
2014-07-04 15:55:00 +04:00
|
|
|
continue
|
|
|
|
testsToRun.append(test['path'])
|
|
|
|
|
|
|
|
return testsToRun
|
|
|
|
|
|
|
|
def runMochitests(self, options, onLaunch=None):
|
|
|
|
"This is a base method for calling other methods in this class for --bisect-chunk."
|
|
|
|
testsToRun = self.getTestsToRun(options)
|
|
|
|
|
|
|
|
# Making an instance of bisect class for --bisect-chunk option.
|
|
|
|
bisect = bisection.Bisect(self)
|
|
|
|
finished = False
|
|
|
|
status = 0
|
2014-07-23 12:23:00 +04:00
|
|
|
bisection_log = 0
|
2014-07-04 15:55:00 +04:00
|
|
|
while not finished:
|
|
|
|
if options.bisectChunk:
|
|
|
|
testsToRun = bisect.pre_test(options, testsToRun, status)
|
2014-07-23 12:23:00 +04:00
|
|
|
# To inform that we are in the process of bisection, and to look for bleedthrough
|
|
|
|
if options.bisectChunk != "default" and not bisection_log:
|
|
|
|
log.info("TEST-UNEXPECTED-FAIL | Bisection | Please ignore repeats and look for 'Bleedthrough' (if any) at the end of the failure list")
|
|
|
|
bisection_log = 1
|
2014-07-04 15:55:00 +04:00
|
|
|
|
2014-07-09 15:20:00 +04:00
|
|
|
result = self.doTests(options, onLaunch, testsToRun)
|
2014-07-04 15:55:00 +04:00
|
|
|
if options.bisectChunk:
|
|
|
|
status = bisect.post_test(options, self.expectedError, self.result)
|
|
|
|
else:
|
|
|
|
status = -1
|
|
|
|
|
|
|
|
if status == -1:
|
|
|
|
finished = True
|
|
|
|
|
|
|
|
# We need to print the summary only if options.bisectChunk has a value.
|
|
|
|
# Also we need to make sure that we do not print the summary in between running tests via --run-by-dir.
|
|
|
|
if options.bisectChunk and options.bisectChunk in self.result:
|
|
|
|
bisect.print_summary()
|
|
|
|
|
2014-07-09 15:20:00 +04:00
|
|
|
return result
|
2014-07-04 15:55:00 +04:00
|
|
|
|
2013-01-04 22:41:34 +04:00
|
|
|
def runTests(self, options, onLaunch=None):
|
2010-01-19 22:45:04 +03:00
|
|
|
""" Prepare, configure, run tests and cleanup """
|
2013-09-23 18:47:48 +04:00
|
|
|
|
2014-06-17 17:50:16 +04:00
|
|
|
self.setTestRoot(options)
|
2014-06-03 19:19:28 +04:00
|
|
|
|
|
|
|
if not options.runByDir:
|
2014-07-09 15:20:00 +04:00
|
|
|
return self.runMochitests(options, onLaunch)
|
2014-06-03 19:19:28 +04:00
|
|
|
|
2014-07-04 15:55:00 +04:00
|
|
|
# code for --run-by-dir
|
2014-06-03 19:19:28 +04:00
|
|
|
dirs = self.getDirectories(options)
|
2014-07-04 15:55:00 +04:00
|
|
|
|
2014-06-03 19:19:28 +04:00
|
|
|
if options.totalChunks > 1:
|
|
|
|
chunkSize = int(len(dirs) / options.totalChunks) + 1
|
|
|
|
start = chunkSize * (options.thisChunk-1)
|
|
|
|
end = chunkSize * (options.thisChunk)
|
|
|
|
dirs = dirs[start:end]
|
|
|
|
|
|
|
|
options.totalChunks = None
|
|
|
|
options.thisChunk = None
|
|
|
|
options.chunkByDir = 0
|
|
|
|
inputTestPath = self.getTestPath(options)
|
|
|
|
for dir in dirs:
|
|
|
|
if inputTestPath and not inputTestPath.startswith(dir):
|
|
|
|
continue
|
|
|
|
|
|
|
|
options.testPath = dir
|
|
|
|
print "testpath: %s" % options.testPath
|
|
|
|
|
2014-06-23 13:24:00 +04:00
|
|
|
# If we are using --run-by-dir, we should not use the profile path (if) provided
|
|
|
|
# by the user, since we need to create a new directory for each run. We would face problems
|
|
|
|
# if we use the directory provided by the user.
|
2014-07-09 15:20:00 +04:00
|
|
|
result = self.runMochitests(options, onLaunch)
|
2014-06-03 19:19:28 +04:00
|
|
|
|
2014-08-07 18:58:00 +04:00
|
|
|
# Dump the logging buffer
|
|
|
|
self.message_logger.dump_buffered()
|
|
|
|
|
2014-06-03 19:19:28 +04:00
|
|
|
# printing total number of tests
|
|
|
|
if options.browserChrome:
|
|
|
|
print "TEST-INFO | checking window state"
|
|
|
|
print "Browser Chrome Test Summary"
|
|
|
|
print "\tPassed: %s" % self.countpass
|
|
|
|
print "\tFailed: %s" % self.countfail
|
|
|
|
print "\tTodo: %s" % self.counttodo
|
|
|
|
print "*** End BrowserChrome Test Results ***"
|
|
|
|
else:
|
|
|
|
print "0 INFO TEST-START | Shutdown"
|
|
|
|
print "1 INFO Passed: %s" % self.countpass
|
|
|
|
print "2 INFO Failed: %s" % self.countfail
|
|
|
|
print "3 INFO Todo: %s" % self.counttodo
|
|
|
|
print "4 INFO SimpleTest FINISHED"
|
|
|
|
|
2014-07-09 15:20:00 +04:00
|
|
|
return result
|
|
|
|
|
2014-07-23 12:23:00 +04:00
|
|
|
def doTests(self, options, onLaunch=None, testsToFilter=None):
|
2014-07-04 15:55:00 +04:00
|
|
|
# A call to initializeLooping method is required in case of --run-by-dir or --bisect-chunk
|
|
|
|
# since we need to initialize variables for each loop.
|
|
|
|
if options.bisectChunk or options.runByDir:
|
|
|
|
self.initializeLooping(options)
|
|
|
|
|
2013-09-23 18:47:48 +04:00
|
|
|
# get debugger info, a dict of:
|
|
|
|
# {'path': path to the debugger (string),
|
|
|
|
# 'interactive': whether the debugger is interactive or not (bool)
|
|
|
|
# 'args': arguments to the debugger (list)
|
|
|
|
# TODO: use mozrunner.local.debugger_arguments:
|
|
|
|
# https://github.com/mozilla/mozbase/blob/master/mozrunner/mozrunner/local.py#L42
|
2014-07-02 15:52:00 +04:00
|
|
|
|
|
|
|
debuggerInfo = mozdebug.get_debugger_info(options.debugger,
|
|
|
|
options.debuggerArgs,
|
|
|
|
options.debuggerInteractive)
|
2010-01-19 22:45:04 +03:00
|
|
|
|
2014-05-01 15:18:00 +04:00
|
|
|
if options.useTestMediaDevices:
|
2014-08-13 20:03:00 +04:00
|
|
|
devices = findTestMediaDevices(self.log)
|
2014-05-01 15:18:00 +04:00
|
|
|
if not devices:
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.error("Could not find test media devices to use")
|
2014-05-01 15:18:00 +04:00
|
|
|
return 1
|
|
|
|
self.mediaDevices = devices
|
|
|
|
|
2013-09-23 18:47:48 +04:00
|
|
|
# buildProfile sets self.profile .
|
|
|
|
# This relies on sideeffects and isn't very stateful:
|
|
|
|
# https://bugzilla.mozilla.org/show_bug.cgi?id=919300
|
2014-06-03 19:19:28 +04:00
|
|
|
self.manifest = self.buildProfile(options)
|
|
|
|
if self.manifest is None:
|
2010-10-01 04:10:19 +04:00
|
|
|
return 1
|
2010-12-22 19:39:29 +03:00
|
|
|
|
2014-06-23 13:24:00 +04:00
|
|
|
self.leak_report_file = os.path.join(options.profilePath, "runtests_leaks.log")
|
|
|
|
|
|
|
|
self.browserEnv = self.buildBrowserEnv(options, debuggerInfo is not None)
|
2014-09-12 00:59:00 +04:00
|
|
|
|
|
|
|
# If there are any Mulet-specific tests doing remote network access,
|
|
|
|
# we will not be aware since we are explicitely allowing this, as for B2G
|
|
|
|
if mozinfo.info.get('buildapp') == 'mulet' and 'MOZ_DISABLE_NONLOCAL_CONNECTIONS' in self.browserEnv:
|
|
|
|
del self.browserEnv['MOZ_DISABLE_NONLOCAL_CONNECTIONS']
|
|
|
|
|
2014-06-23 13:24:00 +04:00
|
|
|
if self.browserEnv is None:
|
|
|
|
return 1
|
|
|
|
|
2011-05-10 01:54:35 +04:00
|
|
|
try:
|
2014-03-14 22:25:41 +04:00
|
|
|
self.startServers(options, debuggerInfo)
|
2010-01-15 20:22:54 +03:00
|
|
|
|
2014-07-04 15:55:00 +04:00
|
|
|
# testsToFilter parameter is used to filter out the test list that is sent to buildTestPath
|
|
|
|
testURL = self.buildTestPath(options, testsToFilter)
|
2014-06-03 19:19:28 +04:00
|
|
|
|
|
|
|
# read the number of tests here, if we are not going to run any, terminate early
|
|
|
|
if os.path.exists(os.path.join(SCRIPT_DIR, 'tests.json')):
|
|
|
|
with open(os.path.join(SCRIPT_DIR, 'tests.json')) as fHandle:
|
|
|
|
tests = json.load(fHandle)
|
|
|
|
count = 0
|
|
|
|
for test in tests['tests']:
|
|
|
|
count += 1
|
|
|
|
if count == 0:
|
|
|
|
return 1
|
|
|
|
|
|
|
|
self.buildURLOptions(options, self.browserEnv)
|
2014-03-14 22:25:41 +04:00
|
|
|
if self.urlOpts:
|
|
|
|
testURL += "?" + "&".join(self.urlOpts)
|
2010-03-16 00:44:29 +03:00
|
|
|
|
2014-03-14 22:25:41 +04:00
|
|
|
if options.webapprtContent:
|
|
|
|
options.browserArgs.extend(('-test-mode', testURL))
|
|
|
|
testURL = None
|
2012-02-25 15:48:05 +04:00
|
|
|
|
2014-03-14 22:25:41 +04:00
|
|
|
if options.immersiveMode:
|
|
|
|
options.browserArgs.extend(('-firefoxpath', options.app))
|
|
|
|
options.app = self.immersiveHelperPath
|
2012-02-25 15:48:05 +04:00
|
|
|
|
2014-03-14 22:25:41 +04:00
|
|
|
if options.jsdebugger:
|
|
|
|
options.browserArgs.extend(['-jsdebugger'])
|
2014-03-25 01:43:36 +04:00
|
|
|
|
2014-03-14 22:25:41 +04:00
|
|
|
# Remove the leak detection file so it can't "leak" to the tests run.
|
|
|
|
# The file is not there if leak logging was not enabled in the application build.
|
|
|
|
if os.path.exists(self.leak_report_file):
|
|
|
|
os.remove(self.leak_report_file)
|
|
|
|
|
|
|
|
# then again to actually run mochitest
|
|
|
|
if options.timeout:
|
|
|
|
timeout = options.timeout + 30
|
|
|
|
elif options.debugger or not options.autorun:
|
|
|
|
timeout = None
|
|
|
|
else:
|
|
|
|
timeout = 330.0 # default JS harness timeout is 300 seconds
|
|
|
|
|
|
|
|
if options.vmwareRecording:
|
|
|
|
self.startVMwareRecording(options);
|
|
|
|
|
2014-08-12 23:23:29 +04:00
|
|
|
# detect shutdown leaks for m-bc runs
|
|
|
|
detectShutdownLeaks = mozinfo.info["debug"] and options.browserChrome and not options.webapprtChrome
|
|
|
|
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.info("runtests.py | Running tests: start.\n")
|
2014-03-14 22:25:41 +04:00
|
|
|
try:
|
|
|
|
status = self.runApp(testURL,
|
2014-06-03 19:19:28 +04:00
|
|
|
self.browserEnv,
|
2014-03-14 22:25:41 +04:00
|
|
|
options.app,
|
|
|
|
profile=self.profile,
|
|
|
|
extraArgs=options.browserArgs,
|
|
|
|
utilityPath=options.utilityPath,
|
|
|
|
debuggerInfo=debuggerInfo,
|
|
|
|
symbolsPath=options.symbolsPath,
|
|
|
|
timeout=timeout,
|
|
|
|
onLaunch=onLaunch,
|
2014-08-12 23:23:29 +04:00
|
|
|
detectShutdownLeaks=detectShutdownLeaks,
|
2014-06-03 19:19:28 +04:00
|
|
|
screenshotOnFail=options.screenshotOnFail,
|
2014-07-04 15:55:00 +04:00
|
|
|
testPath=options.testPath,
|
2014-07-17 11:02:00 +04:00
|
|
|
bisectChunk=options.bisectChunk,
|
|
|
|
quiet=options.quiet
|
2014-03-14 22:25:41 +04:00
|
|
|
)
|
|
|
|
except KeyboardInterrupt:
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.info("runtests.py | Received keyboard interrupt.\n");
|
2014-03-14 22:25:41 +04:00
|
|
|
status = -1
|
|
|
|
except:
|
|
|
|
traceback.print_exc()
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.error("Automation Error: Received unexpected exception while running application\n")
|
2014-03-14 22:25:41 +04:00
|
|
|
status = 1
|
2014-03-25 01:43:36 +04:00
|
|
|
|
2014-03-14 22:25:41 +04:00
|
|
|
finally:
|
|
|
|
if options.vmwareRecording:
|
|
|
|
self.stopVMwareRecording();
|
|
|
|
self.stopServers()
|
2014-03-25 01:43:36 +04:00
|
|
|
|
2014-10-01 01:17:27 +04:00
|
|
|
processLeakLog(self.leak_report_file, options.leakThresholds, options.ignoreMissingLeaks)
|
2014-03-14 22:25:41 +04:00
|
|
|
|
|
|
|
if self.nsprLogs:
|
|
|
|
with zipfile.ZipFile("%s/nsprlog.zip" % browserEnv["MOZ_UPLOAD_DIR"], "w", zipfile.ZIP_DEFLATED) as logzip:
|
|
|
|
for logfile in glob.glob("%s/nspr*.log*" % tempfile.gettempdir()):
|
|
|
|
logzip.write(logfile)
|
|
|
|
os.remove(logfile)
|
2014-03-25 01:43:36 +04:00
|
|
|
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.info("runtests.py | Running tests: end.")
|
2014-01-22 21:54:49 +04:00
|
|
|
|
2014-06-03 19:19:28 +04:00
|
|
|
if self.manifest is not None:
|
|
|
|
self.cleanup(options)
|
2010-01-15 20:22:54 +03:00
|
|
|
|
|
|
|
return status
|
|
|
|
|
2014-06-03 19:19:28 +04:00
|
|
|
def handleTimeout(self, timeout, proc, utilityPath, debuggerInfo, browserProcessId, testPath=None):
|
2013-09-23 18:47:48 +04:00
|
|
|
"""handle process output timeout"""
|
|
|
|
# TODO: bug 913975 : _processOutput should call self.processOutputLine one more time one timeout (I think)
|
2014-06-03 19:19:28 +04:00
|
|
|
if testPath:
|
2014-07-17 11:02:00 +04:00
|
|
|
error_message = "TEST-UNEXPECTED-TIMEOUT | %s | application timed out after %d seconds with no output on %s" % (self.lastTestSeen, int(timeout), testPath)
|
2014-06-03 19:19:28 +04:00
|
|
|
else:
|
2014-07-17 11:02:00 +04:00
|
|
|
error_message = "TEST-UNEXPECTED-TIMEOUT | %s | application timed out after %d seconds with no output" % (self.lastTestSeen, int(timeout))
|
|
|
|
|
|
|
|
self.message_logger.dump_buffered()
|
|
|
|
self.message_logger.buffering = False
|
2014-08-13 20:03:00 +04:00
|
|
|
self.log.error(error_message)
|
2014-07-17 11:02:00 +04:00
|
|
|
|
2013-09-23 18:47:48 +04:00
|
|
|
browserProcessId = browserProcessId or proc.pid
|
|
|
|
self.killAndGetStack(browserProcessId, utilityPath, debuggerInfo, dump_screen=not debuggerInfo)
|
|
|
|
|
2014-07-17 11:02:00 +04:00
|
|
|
|
2013-09-23 18:47:48 +04:00
|
|
|
|
|
|
|
class OutputHandler(object):
|
|
|
|
"""line output handler for mozrunner"""
|
2014-07-04 15:55:00 +04:00
|
|
|
def __init__(self, harness, utilityPath, symbolsPath=None, dump_screen_on_timeout=True, dump_screen_on_fail=False, shutdownLeaks=None, lsanLeaks=None, bisectChunk=None):
|
2013-09-23 18:47:48 +04:00
|
|
|
"""
|
|
|
|
harness -- harness instance
|
|
|
|
dump_screen_on_timeout -- whether to dump the screen on timeout
|
|
|
|
"""
|
|
|
|
self.harness = harness
|
|
|
|
self.utilityPath = utilityPath
|
|
|
|
self.symbolsPath = symbolsPath
|
|
|
|
self.dump_screen_on_timeout = dump_screen_on_timeout
|
2014-04-12 06:23:00 +04:00
|
|
|
self.dump_screen_on_fail = dump_screen_on_fail
|
2013-11-21 00:53:08 +04:00
|
|
|
self.shutdownLeaks = shutdownLeaks
|
2014-06-20 20:08:30 +04:00
|
|
|
self.lsanLeaks = lsanLeaks
|
2014-07-04 15:55:00 +04:00
|
|
|
self.bisectChunk = bisectChunk
|
2013-09-23 18:47:48 +04:00
|
|
|
|
|
|
|
# With metro browser runs this script launches the metro test harness which launches the browser.
|
|
|
|
# The metro test harness hands back the real browser process id via log output which we need to
|
|
|
|
# pick up on and parse out. This variable tracks the real browser process id if we find it.
|
|
|
|
self.browserProcessId = None
|
|
|
|
|
2014-09-03 20:24:38 +04:00
|
|
|
self.stackFixerFunction = self.stackFixer()
|
2013-09-23 18:47:48 +04:00
|
|
|
|
|
|
|
def processOutputLine(self, line):
|
|
|
|
"""per line handler of output for mozprocess"""
|
2014-07-17 11:02:00 +04:00
|
|
|
# Parsing the line (by the structured messages logger).
|
|
|
|
messages = self.harness.message_logger.parse_line(line)
|
|
|
|
|
|
|
|
for message in messages:
|
|
|
|
# Passing the message to the handlers
|
|
|
|
for handler in self.outputHandlers():
|
|
|
|
message = handler(message)
|
|
|
|
|
|
|
|
# Processing the message by the logger
|
|
|
|
self.harness.message_logger.process_message(message)
|
|
|
|
|
2013-09-23 18:47:48 +04:00
|
|
|
__call__ = processOutputLine
|
|
|
|
|
|
|
|
def outputHandlers(self):
|
|
|
|
"""returns ordered list of output handlers"""
|
2014-07-17 11:02:00 +04:00
|
|
|
handlers = [self.fix_stack,
|
|
|
|
self.record_last_test,
|
|
|
|
self.dumpScreenOnTimeout,
|
|
|
|
self.dumpScreenOnFail,
|
|
|
|
self.trackShutdownLeaks,
|
|
|
|
self.trackLSANLeaks,
|
|
|
|
self.countline,
|
|
|
|
]
|
|
|
|
if self.bisectChunk:
|
|
|
|
handlers.append(self.record_result)
|
|
|
|
handlers.append(self.first_error)
|
|
|
|
|
|
|
|
return handlers
|
2013-09-23 18:47:48 +04:00
|
|
|
|
|
|
|
def stackFixer(self):
|
|
|
|
"""
|
2014-09-03 20:24:38 +04:00
|
|
|
return stackFixerFunction, if any, to use on the output lines
|
2013-09-23 18:47:48 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
if not mozinfo.info.get('debug'):
|
2014-09-03 20:24:38 +04:00
|
|
|
return None
|
2013-09-23 18:47:48 +04:00
|
|
|
|
2014-09-03 20:24:38 +04:00
|
|
|
stackFixerFunction = None
|
2013-09-23 18:47:48 +04:00
|
|
|
|
|
|
|
def import_stackFixerModule(module_name):
|
|
|
|
sys.path.insert(0, self.utilityPath)
|
|
|
|
module = __import__(module_name, globals(), locals(), [])
|
|
|
|
sys.path.pop(0)
|
|
|
|
return module
|
|
|
|
|
|
|
|
if self.symbolsPath and os.path.exists(self.symbolsPath):
|
2014-09-03 20:24:38 +04:00
|
|
|
# Run each line through a function in fix_stack_using_bpsyms.py (uses breakpad symbol files).
|
2013-09-23 18:47:48 +04:00
|
|
|
# This method is preferred for Tinderbox builds, since native symbols may have been stripped.
|
|
|
|
stackFixerModule = import_stackFixerModule('fix_stack_using_bpsyms')
|
|
|
|
stackFixerFunction = lambda line: stackFixerModule.fixSymbols(line, self.symbolsPath)
|
|
|
|
|
2014-09-05 09:19:42 +04:00
|
|
|
elif mozinfo.isMac:
|
|
|
|
# Run each line through fix_macosx_stack.py (uses atos).
|
|
|
|
# This method is preferred for developer machines, so we don't have to run "make buildsymbols".
|
|
|
|
stackFixerModule = import_stackFixerModule('fix_macosx_stack')
|
|
|
|
stackFixerFunction = lambda line: stackFixerModule.fixSymbols(line)
|
|
|
|
|
2014-09-03 20:24:38 +04:00
|
|
|
elif mozinfo.isLinux:
|
|
|
|
# Run each line through fix_linux_stack.py (uses addr2line).
|
2013-09-23 18:47:48 +04:00
|
|
|
# This method is preferred for developer machines, so we don't have to run "make buildsymbols".
|
2014-09-03 20:24:38 +04:00
|
|
|
stackFixerModule = import_stackFixerModule('fix_linux_stack')
|
|
|
|
stackFixerFunction = lambda line: stackFixerModule.fixSymbols(line)
|
2013-09-23 18:47:48 +04:00
|
|
|
|
2014-09-03 20:24:38 +04:00
|
|
|
return stackFixerFunction
|
|
|
|
|
|
|
|
def finish(self):
|
2013-11-21 00:53:08 +04:00
|
|
|
if self.shutdownLeaks:
|
|
|
|
self.shutdownLeaks.process()
|
|
|
|
|
2014-06-20 20:08:30 +04:00
|
|
|
if self.lsanLeaks:
|
|
|
|
self.lsanLeaks.process()
|
|
|
|
|
2014-07-17 11:02:00 +04:00
|
|
|
# output message handlers:
|
|
|
|
# these take a message and return a message
|
2013-09-23 18:47:48 +04:00
|
|
|
|
2014-07-17 11:02:00 +04:00
|
|
|
def record_result(self, message):
|
|
|
|
if message['action'] == 'test_start': #by default make the result key equal to pass.
|
|
|
|
key = message['test'].split('/')[-1].strip()
|
2014-07-04 15:55:00 +04:00
|
|
|
self.harness.result[key] = "PASS"
|
2014-07-28 13:44:00 +04:00
|
|
|
elif message['action'] == 'test_status':
|
2014-07-17 11:02:00 +04:00
|
|
|
if 'expected' in message:
|
|
|
|
key = message['test'].split('/')[-1].strip()
|
|
|
|
self.harness.result[key] = "FAIL"
|
|
|
|
elif message['status'] == 'FAIL':
|
|
|
|
key = message['test'].split('/')[-1].strip()
|
|
|
|
self.harness.result[key] = "TODO"
|
|
|
|
return message
|
|
|
|
|
|
|
|
def first_error(self, message):
|
2014-07-28 13:44:00 +04:00
|
|
|
if message['action'] == 'test_status' and 'expected' in message and message['status'] == 'FAIL':
|
2014-07-17 11:02:00 +04:00
|
|
|
key = message['test'].split('/')[-1].strip()
|
2014-07-04 15:55:00 +04:00
|
|
|
if key not in self.harness.expectedError:
|
2014-07-28 13:44:00 +04:00
|
|
|
self.harness.expectedError[key] = message.get('message', message['subtest']).strip()
|
2014-07-17 11:02:00 +04:00
|
|
|
return message
|
2014-07-04 15:55:00 +04:00
|
|
|
|
2014-07-17 11:02:00 +04:00
|
|
|
def countline(self, message):
|
|
|
|
if message['action'] != 'log':
|
|
|
|
return message
|
|
|
|
line = message['message']
|
2014-06-03 19:19:28 +04:00
|
|
|
val = 0
|
|
|
|
try:
|
|
|
|
val = int(line.split(':')[-1].strip())
|
2014-07-17 11:02:00 +04:00
|
|
|
except ValueError:
|
|
|
|
return message
|
2014-06-03 19:19:28 +04:00
|
|
|
|
|
|
|
if "Passed:" in line:
|
|
|
|
self.harness.countpass += val
|
|
|
|
elif "Failed:" in line:
|
|
|
|
self.harness.countfail += val
|
|
|
|
elif "Todo:" in line:
|
|
|
|
self.harness.counttodo += val
|
2014-07-17 11:02:00 +04:00
|
|
|
return message
|
2013-09-23 18:47:48 +04:00
|
|
|
|
2014-07-17 11:02:00 +04:00
|
|
|
def fix_stack(self, message):
|
|
|
|
if message['action'] == 'log' and self.stackFixerFunction:
|
2014-08-04 13:29:58 +04:00
|
|
|
message['message'] = self.stackFixerFunction(message['message'].encode('utf-8', 'replace'))
|
2014-07-17 11:02:00 +04:00
|
|
|
return message
|
2014-07-11 16:15:29 +04:00
|
|
|
|
2014-07-17 11:02:00 +04:00
|
|
|
def record_last_test(self, message):
|
2014-05-09 22:59:18 +04:00
|
|
|
"""record last test on harness"""
|
2014-07-17 11:02:00 +04:00
|
|
|
if message['action'] == 'test_start':
|
|
|
|
self.harness.lastTestSeen = message['test']
|
|
|
|
return message
|
|
|
|
|
|
|
|
def dumpScreenOnTimeout(self, message):
|
|
|
|
if (not self.dump_screen_on_fail
|
|
|
|
and self.dump_screen_on_timeout
|
2014-08-15 22:42:00 +04:00
|
|
|
and message['action'] == 'test_status' and 'expected' in message
|
|
|
|
and "Test timed out" in message['subtest']):
|
2014-04-12 06:23:00 +04:00
|
|
|
self.harness.dumpScreen(self.utilityPath)
|
2014-07-17 11:02:00 +04:00
|
|
|
return message
|
2014-04-12 06:23:00 +04:00
|
|
|
|
2014-07-17 11:02:00 +04:00
|
|
|
def dumpScreenOnFail(self, message):
|
|
|
|
if self.dump_screen_on_fail and 'expected' in message and message['status'] == 'FAIL':
|
2013-09-23 18:47:48 +04:00
|
|
|
self.harness.dumpScreen(self.utilityPath)
|
2014-07-17 11:02:00 +04:00
|
|
|
return message
|
2014-07-11 16:15:29 +04:00
|
|
|
|
2014-07-17 11:02:00 +04:00
|
|
|
def trackLSANLeaks(self, message):
|
|
|
|
if self.lsanLeaks and message['action'] == 'log':
|
|
|
|
self.lsanLeaks.log(message['message'])
|
|
|
|
return message
|
2013-09-23 18:47:48 +04:00
|
|
|
|
2014-07-17 11:02:00 +04:00
|
|
|
def trackShutdownLeaks(self, message):
|
|
|
|
if self.shutdownLeaks:
|
|
|
|
self.shutdownLeaks.log(message)
|
|
|
|
return message
|
2013-09-23 18:47:48 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
def makeTestConfig(self, options):
|
|
|
|
"Creates a test configuration file for customizing test execution."
|
2011-05-17 21:10:37 +04:00
|
|
|
options.logFile = options.logFile.replace("\\", "\\\\")
|
|
|
|
options.testPath = options.testPath.replace("\\", "\\\\")
|
2012-06-30 02:52:43 +04:00
|
|
|
|
2011-07-27 03:13:20 +04:00
|
|
|
if "MOZ_HIDE_RESULTS_TABLE" in os.environ and os.environ["MOZ_HIDE_RESULTS_TABLE"] == "1":
|
|
|
|
options.hideResultsTable = True
|
|
|
|
|
2014-08-25 20:32:00 +04:00
|
|
|
d = dict((k, v) for k, v in options.__dict__.iteritems() if not k.startswith('log'))
|
2014-06-03 19:19:28 +04:00
|
|
|
d['testRoot'] = self.testRoot
|
2014-03-25 01:35:06 +04:00
|
|
|
content = json.dumps(d)
|
2008-01-29 06:48:34 +03:00
|
|
|
|
2011-04-18 20:41:57 +04:00
|
|
|
with open(os.path.join(options.profilePath, "testConfig.js"), "w") as config:
|
|
|
|
config.write(content)
|
2008-01-29 06:48:34 +03:00
|
|
|
|
2014-06-03 19:19:28 +04:00
|
|
|
def getTestManifest(self, options):
|
|
|
|
if isinstance(options.manifestFile, TestManifest):
|
|
|
|
manifest = options.manifestFile
|
|
|
|
elif options.manifestFile and os.path.isfile(options.manifestFile):
|
|
|
|
manifestFileAbs = os.path.abspath(options.manifestFile)
|
|
|
|
assert manifestFileAbs.startswith(SCRIPT_DIR)
|
|
|
|
manifest = TestManifest([options.manifestFile], strict=False)
|
|
|
|
elif options.manifestFile and os.path.isfile(os.path.join(SCRIPT_DIR, options.manifestFile)):
|
|
|
|
manifestFileAbs = os.path.abspath(os.path.join(SCRIPT_DIR, options.manifestFile))
|
|
|
|
assert manifestFileAbs.startswith(SCRIPT_DIR)
|
|
|
|
manifest = TestManifest([manifestFileAbs], strict=False)
|
|
|
|
else:
|
|
|
|
masterName = self.getTestFlavor(options) + '.ini'
|
|
|
|
masterPath = os.path.join(SCRIPT_DIR, self.testRoot, masterName)
|
|
|
|
|
|
|
|
if os.path.exists(masterPath):
|
|
|
|
manifest = TestManifest([masterPath], strict=False)
|
|
|
|
|
|
|
|
return manifest
|
|
|
|
|
|
|
|
def getDirectories(self, options):
|
|
|
|
"""
|
|
|
|
Make the list of directories by parsing manifests
|
|
|
|
"""
|
2014-07-10 11:38:00 +04:00
|
|
|
tests = self.getActiveTests(options, False)
|
2014-06-03 19:19:28 +04:00
|
|
|
dirlist = []
|
|
|
|
|
|
|
|
for test in tests:
|
2014-07-10 11:38:00 +04:00
|
|
|
rootdir = '/'.join(test['path'].split('/')[:-1])
|
2014-06-03 19:19:28 +04:00
|
|
|
if rootdir not in dirlist:
|
|
|
|
dirlist.append(rootdir)
|
|
|
|
|
|
|
|
return dirlist
|
2013-09-23 18:47:48 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
def main():
|
2013-09-23 18:47:48 +04:00
|
|
|
# parse command line options
|
|
|
|
parser = MochitestOptions()
|
2014-08-29 18:37:17 +04:00
|
|
|
commandline.add_logging_group(parser)
|
2013-09-23 18:47:48 +04:00
|
|
|
options, args = parser.parse_args()
|
|
|
|
if options is None:
|
|
|
|
# parsing error
|
2010-01-15 20:22:54 +03:00
|
|
|
sys.exit(1)
|
2014-08-13 20:03:00 +04:00
|
|
|
logger_options = {key: value for key, value in vars(options).iteritems() if key.startswith('log')}
|
|
|
|
mochitest = Mochitest(logger_options)
|
|
|
|
options = parser.verifyOptions(options, mochitest)
|
2010-01-15 20:22:54 +03:00
|
|
|
|
2010-03-10 06:33:11 +03:00
|
|
|
options.utilityPath = mochitest.getFullPath(options.utilityPath)
|
|
|
|
options.certPath = mochitest.getFullPath(options.certPath)
|
2010-05-06 16:06:09 +04:00
|
|
|
if options.symbolsPath and not isURL(options.symbolsPath):
|
2010-03-10 06:33:11 +03:00
|
|
|
options.symbolsPath = mochitest.getFullPath(options.symbolsPath)
|
|
|
|
|
2014-07-29 16:11:00 +04:00
|
|
|
return_code = mochitest.runTests(options)
|
|
|
|
mochitest.message_logger.finish()
|
|
|
|
|
|
|
|
sys.exit(return_code)
|
2008-01-29 06:48:34 +03:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|