2008-02-22 00:08:39 +03:00
|
|
|
#
|
|
|
|
# ***** BEGIN LICENSE BLOCK *****
|
|
|
|
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
|
|
|
#
|
|
|
|
# The contents of this file are subject to the Mozilla Public License Version
|
|
|
|
# 1.1 (the "License"); you may not use this file except in compliance with
|
|
|
|
# the License. You may obtain a copy of the License at
|
|
|
|
# http://www.mozilla.org/MPL/
|
|
|
|
#
|
|
|
|
# Software distributed under the License is distributed on an "AS IS" basis,
|
|
|
|
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
|
|
|
# for the specific language governing rights and limitations under the
|
|
|
|
# License.
|
|
|
|
#
|
|
|
|
# The Original Code is mozilla.org code.
|
|
|
|
#
|
|
|
|
# The Initial Developer of the Original Code is
|
|
|
|
# Mozilla Foundation.
|
|
|
|
# Portions created by the Initial Developer are Copyright (C) 2008
|
|
|
|
# the Initial Developer. All Rights Reserved.
|
|
|
|
#
|
|
|
|
# Contributor(s):
|
|
|
|
# Robert Sayre <sayrer@gmail.com>
|
|
|
|
# Jeff Walden <jwalden+bmo@mit.edu>
|
|
|
|
#
|
|
|
|
# Alternatively, the contents of this file may be used under the terms of
|
|
|
|
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
|
|
|
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
|
|
|
# in which case the provisions of the GPL or the LGPL are applicable instead
|
|
|
|
# of those above. If you wish to allow use of your version of this file only
|
|
|
|
# under the terms of either the GPL or the LGPL, and not to allow others to
|
|
|
|
# use your version of this file under the terms of the MPL, indicate your
|
|
|
|
# decision by deleting the provisions above and replace them with the notice
|
|
|
|
# and other provisions required by the GPL or the LGPL. If you do not delete
|
|
|
|
# the provisions above, a recipient may use your version of this file under
|
|
|
|
# the terms of any one of the MPL, the GPL or the LGPL.
|
|
|
|
#
|
|
|
|
# ***** END LICENSE BLOCK *****
|
|
|
|
|
2011-10-14 19:45:58 +04:00
|
|
|
from __future__ import with_statement
|
2008-06-07 10:43:15 +04:00
|
|
|
import codecs
|
2010-01-07 03:45:46 +03:00
|
|
|
from datetime import datetime, timedelta
|
2008-02-22 00:08:39 +03:00
|
|
|
import itertools
|
2008-04-08 09:18:45 +04:00
|
|
|
import logging
|
2008-02-22 00:08:39 +03:00
|
|
|
import os
|
2008-06-07 10:43:15 +04:00
|
|
|
import re
|
2009-10-14 00:56:24 +04:00
|
|
|
import select
|
2009-01-12 22:23:28 +03:00
|
|
|
import shutil
|
2008-04-08 09:18:45 +04:00
|
|
|
import signal
|
2009-01-12 22:23:28 +03:00
|
|
|
import subprocess
|
2008-02-22 00:08:39 +03:00
|
|
|
import sys
|
2008-04-08 09:18:45 +04:00
|
|
|
import threading
|
2009-11-20 22:48:56 +03:00
|
|
|
import tempfile
|
2010-08-20 03:12:46 +04:00
|
|
|
import sqlite3
|
2009-05-11 23:54:39 +04:00
|
|
|
|
2010-03-18 21:14:14 +03:00
|
|
|
SCRIPT_DIR = os.path.abspath(os.path.realpath(os.path.dirname(sys.argv[0])))
|
|
|
|
sys.path.insert(0, SCRIPT_DIR)
|
|
|
|
import automationutils
|
|
|
|
|
2010-03-13 20:56:24 +03:00
|
|
|
_DEFAULT_WEB_SERVER = "127.0.0.1"
|
|
|
|
_DEFAULT_HTTP_PORT = 8888
|
|
|
|
_DEFAULT_SSL_PORT = 4443
|
2010-06-17 23:00:58 +04:00
|
|
|
_DEFAULT_WEBSOCKET_PORT = 9988
|
2010-03-10 06:33:11 +03:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
#expand _DIST_BIN = __XPC_BIN_PATH__
|
|
|
|
#expand _IS_WIN32 = len("__WIN32__") != 0
|
|
|
|
#expand _IS_MAC = __IS_MAC__ != 0
|
|
|
|
#expand _IS_LINUX = __IS_LINUX__ != 0
|
2008-02-22 00:08:39 +03:00
|
|
|
#ifdef IS_CYGWIN
|
2010-01-15 20:22:54 +03:00
|
|
|
#expand _IS_CYGWIN = __IS_CYGWIN__ == 1
|
2008-02-22 00:08:39 +03:00
|
|
|
#else
|
2010-01-15 20:22:54 +03:00
|
|
|
_IS_CYGWIN = False
|
2008-02-22 00:08:39 +03:00
|
|
|
#endif
|
2010-01-15 20:22:54 +03:00
|
|
|
#expand _IS_CAMINO = __IS_CAMINO__ != 0
|
|
|
|
#expand _BIN_SUFFIX = __BIN_SUFFIX__
|
|
|
|
#expand _PERL = __PERL__
|
2008-02-22 00:08:39 +03:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
#expand _DEFAULT_APP = "./" + __BROWSER_PATH__
|
|
|
|
#expand _CERTS_SRC_DIR = __CERTS_SRC_DIR__
|
|
|
|
#expand _IS_TEST_BUILD = __IS_TEST_BUILD__
|
|
|
|
#expand _IS_DEBUG_BUILD = __IS_DEBUG_BUILD__
|
|
|
|
#expand _CRASHREPORTER = __CRASHREPORTER__ == 1
|
2008-04-08 09:18:45 +04:00
|
|
|
|
2010-03-18 21:14:14 +03:00
|
|
|
|
|
|
|
if _IS_WIN32:
|
|
|
|
import ctypes, ctypes.wintypes, time, msvcrt
|
|
|
|
else:
|
|
|
|
import errno
|
|
|
|
|
|
|
|
|
2010-03-18 23:13:33 +03:00
|
|
|
# We use the logging system here primarily because it'll handle multiple
|
|
|
|
# threads, which is needed to process the output of the server and application
|
|
|
|
# processes simultaneously.
|
|
|
|
_log = logging.getLogger()
|
|
|
|
handler = logging.StreamHandler(sys.stdout)
|
|
|
|
_log.setLevel(logging.INFO)
|
|
|
|
_log.addHandler(handler)
|
|
|
|
|
|
|
|
|
2008-04-08 09:18:45 +04:00
|
|
|
#################
|
|
|
|
# PROFILE SETUP #
|
|
|
|
#################
|
2008-02-22 00:08:39 +03:00
|
|
|
|
2008-06-07 10:43:15 +04:00
|
|
|
class SyntaxError(Exception):
|
|
|
|
"Signifies a syntax error on a particular line in server-locations.txt."
|
|
|
|
|
|
|
|
def __init__(self, lineno, msg = None):
|
|
|
|
self.lineno = lineno
|
|
|
|
self.msg = msg
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
s = "Syntax error on line " + str(self.lineno)
|
|
|
|
if self.msg:
|
|
|
|
s += ": %s." % self.msg
|
|
|
|
else:
|
|
|
|
s += "."
|
|
|
|
return s
|
|
|
|
|
|
|
|
|
|
|
|
class Location:
|
|
|
|
"Represents a location line in server-locations.txt."
|
|
|
|
|
|
|
|
def __init__(self, scheme, host, port, options):
|
|
|
|
self.scheme = scheme
|
|
|
|
self.host = host
|
|
|
|
self.port = port
|
|
|
|
self.options = options
|
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
class Automation(object):
|
2008-06-07 10:43:15 +04:00
|
|
|
"""
|
2010-01-15 20:22:54 +03:00
|
|
|
Runs the browser from a script, and provides useful utilities
|
|
|
|
for setting up the browser environment.
|
2008-06-07 10:43:15 +04:00
|
|
|
"""
|
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
DIST_BIN = _DIST_BIN
|
|
|
|
IS_WIN32 = _IS_WIN32
|
|
|
|
IS_MAC = _IS_MAC
|
|
|
|
IS_LINUX = _IS_LINUX
|
|
|
|
IS_CYGWIN = _IS_CYGWIN
|
|
|
|
IS_CAMINO = _IS_CAMINO
|
|
|
|
BIN_SUFFIX = _BIN_SUFFIX
|
|
|
|
PERL = _PERL
|
|
|
|
|
|
|
|
UNIXISH = not IS_WIN32 and not IS_MAC
|
|
|
|
|
|
|
|
DEFAULT_APP = _DEFAULT_APP
|
|
|
|
CERTS_SRC_DIR = _CERTS_SRC_DIR
|
|
|
|
IS_TEST_BUILD = _IS_TEST_BUILD
|
|
|
|
IS_DEBUG_BUILD = _IS_DEBUG_BUILD
|
|
|
|
CRASHREPORTER = _CRASHREPORTER
|
|
|
|
|
|
|
|
# timeout, in seconds
|
|
|
|
DEFAULT_TIMEOUT = 60.0
|
2010-03-13 20:56:24 +03:00
|
|
|
DEFAULT_WEB_SERVER = _DEFAULT_WEB_SERVER
|
|
|
|
DEFAULT_HTTP_PORT = _DEFAULT_HTTP_PORT
|
|
|
|
DEFAULT_SSL_PORT = _DEFAULT_SSL_PORT
|
2010-06-17 09:38:55 +04:00
|
|
|
DEFAULT_WEBSOCKET_PORT = _DEFAULT_WEBSOCKET_PORT
|
2008-06-07 10:43:15 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
def __init__(self):
|
2010-03-18 23:13:33 +03:00
|
|
|
self.log = _log
|
2010-06-11 05:46:18 +04:00
|
|
|
self.lastTestSeen = "automation.py"
|
2010-08-23 05:27:26 +04:00
|
|
|
self.haveDumpedScreen = False
|
2010-01-15 20:22:54 +03:00
|
|
|
|
2010-06-17 09:38:55 +04:00
|
|
|
def setServerInfo(self,
|
|
|
|
webServer = _DEFAULT_WEB_SERVER,
|
|
|
|
httpPort = _DEFAULT_HTTP_PORT,
|
|
|
|
sslPort = _DEFAULT_SSL_PORT,
|
2010-07-28 21:55:36 +04:00
|
|
|
webSocketPort = _DEFAULT_WEBSOCKET_PORT):
|
2010-03-13 20:56:24 +03:00
|
|
|
self.webServer = webServer
|
2010-03-13 00:53:37 +03:00
|
|
|
self.httpPort = httpPort
|
|
|
|
self.sslPort = sslPort
|
2010-06-17 09:38:55 +04:00
|
|
|
self.webSocketPort = webSocketPort
|
2010-03-13 00:53:37 +03:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
@property
|
|
|
|
def __all__(self):
|
|
|
|
return [
|
|
|
|
"UNIXISH",
|
|
|
|
"IS_WIN32",
|
|
|
|
"IS_MAC",
|
|
|
|
"log",
|
|
|
|
"runApp",
|
|
|
|
"Process",
|
|
|
|
"addCommonOptions",
|
|
|
|
"initializeProfile",
|
|
|
|
"DIST_BIN",
|
|
|
|
"DEFAULT_APP",
|
|
|
|
"CERTS_SRC_DIR",
|
|
|
|
"environment",
|
|
|
|
"IS_TEST_BUILD",
|
|
|
|
"IS_DEBUG_BUILD",
|
|
|
|
"DEFAULT_TIMEOUT",
|
|
|
|
]
|
|
|
|
|
|
|
|
class Process(subprocess.Popen):
|
|
|
|
"""
|
|
|
|
Represents our view of a subprocess.
|
|
|
|
It adds a kill() method which allows it to be stopped explicitly.
|
|
|
|
"""
|
|
|
|
|
2010-03-18 23:13:33 +03:00
|
|
|
def __init__(self,
|
|
|
|
args,
|
|
|
|
bufsize=0,
|
|
|
|
executable=None,
|
|
|
|
stdin=None,
|
|
|
|
stdout=None,
|
|
|
|
stderr=None,
|
|
|
|
preexec_fn=None,
|
|
|
|
close_fds=False,
|
|
|
|
shell=False,
|
|
|
|
cwd=None,
|
|
|
|
env=None,
|
|
|
|
universal_newlines=False,
|
|
|
|
startupinfo=None,
|
|
|
|
creationflags=0):
|
2010-09-23 20:19:31 +04:00
|
|
|
args = automationutils.wrapCommand(args)
|
|
|
|
print "args: %s" % args
|
2010-03-18 23:13:33 +03:00
|
|
|
subprocess.Popen.__init__(self, args, bufsize, executable,
|
|
|
|
stdin, stdout, stderr,
|
|
|
|
preexec_fn, close_fds,
|
|
|
|
shell, cwd, env,
|
|
|
|
universal_newlines, startupinfo, creationflags)
|
|
|
|
self.log = _log
|
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
def kill(self):
|
|
|
|
if Automation().IS_WIN32:
|
|
|
|
import platform
|
|
|
|
pid = "%i" % self.pid
|
|
|
|
if platform.release() == "2000":
|
|
|
|
# Windows 2000 needs 'kill.exe' from the
|
|
|
|
#'Windows 2000 Resource Kit tools'. (See bug 475455.)
|
|
|
|
try:
|
|
|
|
subprocess.Popen(["kill", "-f", pid]).wait()
|
|
|
|
except:
|
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | automation.py | Missing 'kill' utility to kill process with pid=%s. Kill it manually!", pid)
|
|
|
|
else:
|
|
|
|
# Windows XP and later.
|
|
|
|
subprocess.Popen(["taskkill", "/F", "/PID", pid]).wait()
|
|
|
|
else:
|
|
|
|
os.kill(self.pid, signal.SIGKILL)
|
|
|
|
|
|
|
|
def readLocations(self, locationsPath = "server-locations.txt"):
|
|
|
|
"""
|
|
|
|
Reads the locations at which the Mochitest HTTP server is available from
|
|
|
|
server-locations.txt.
|
|
|
|
"""
|
|
|
|
|
|
|
|
locationFile = codecs.open(locationsPath, "r", "UTF-8")
|
|
|
|
|
|
|
|
# Perhaps more detail than necessary, but it's the easiest way to make sure
|
|
|
|
# we get exactly the format we want. See server-locations.txt for the exact
|
|
|
|
# format guaranteed here.
|
|
|
|
lineRe = re.compile(r"^(?P<scheme>[a-z][-a-z0-9+.]*)"
|
2008-06-07 10:43:15 +04:00
|
|
|
r"://"
|
|
|
|
r"(?P<host>"
|
|
|
|
r"\d+\.\d+\.\d+\.\d+"
|
|
|
|
r"|"
|
|
|
|
r"(?:[a-z0-9](?:[-a-z0-9]*[a-z0-9])?\.)*"
|
|
|
|
r"[a-z](?:[-a-z0-9]*[a-z0-9])?"
|
|
|
|
r")"
|
|
|
|
r":"
|
|
|
|
r"(?P<port>\d+)"
|
|
|
|
r"(?:"
|
|
|
|
r"\s+"
|
2008-09-05 17:35:58 +04:00
|
|
|
r"(?P<options>\S+(?:,\S+)*)"
|
2008-06-07 10:43:15 +04:00
|
|
|
r")?$")
|
2010-01-15 20:22:54 +03:00
|
|
|
locations = []
|
|
|
|
lineno = 0
|
|
|
|
seenPrimary = False
|
|
|
|
for line in locationFile:
|
|
|
|
lineno += 1
|
|
|
|
if line.startswith("#") or line == "\n":
|
|
|
|
continue
|
2008-06-07 10:43:15 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
match = lineRe.match(line)
|
|
|
|
if not match:
|
|
|
|
raise SyntaxError(lineno)
|
|
|
|
|
|
|
|
options = match.group("options")
|
|
|
|
if options:
|
|
|
|
options = options.split(",")
|
|
|
|
if "primary" in options:
|
|
|
|
if seenPrimary:
|
|
|
|
raise SyntaxError(lineno, "multiple primary locations")
|
|
|
|
seenPrimary = True
|
|
|
|
else:
|
|
|
|
options = []
|
2008-06-07 10:43:15 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
locations.append(Location(match.group("scheme"), match.group("host"),
|
|
|
|
match.group("port"), options))
|
2008-06-07 10:43:15 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
if not seenPrimary:
|
|
|
|
raise SyntaxError(lineno + 1, "missing primary location")
|
2008-06-07 10:43:15 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
return locations
|
2008-06-07 10:43:15 +04:00
|
|
|
|
2010-08-20 03:12:46 +04:00
|
|
|
def setupPermissionsDatabase(self, profileDir, permissions):
|
|
|
|
# Open database and create table
|
|
|
|
permDB = sqlite3.connect(os.path.join(profileDir, "permissions.sqlite"))
|
|
|
|
cursor = permDB.cursor();
|
2010-08-21 20:48:01 +04:00
|
|
|
# SQL copied from nsPermissionManager.cpp
|
2010-08-20 03:12:46 +04:00
|
|
|
cursor.execute("""CREATE TABLE moz_hosts (
|
|
|
|
id INTEGER PRIMARY KEY,
|
|
|
|
host TEXT,
|
|
|
|
type TEXT,
|
|
|
|
permission INTEGER,
|
|
|
|
expireType INTEGER,
|
|
|
|
expireTime INTEGER)""")
|
|
|
|
|
|
|
|
# Insert desired permissions
|
|
|
|
c = 0
|
|
|
|
for perm in permissions.keys():
|
2010-09-04 02:53:28 +04:00
|
|
|
for host,allow in permissions[perm]:
|
2010-08-20 03:12:46 +04:00
|
|
|
c += 1
|
2010-09-04 02:53:28 +04:00
|
|
|
cursor.execute("INSERT INTO moz_hosts values(?, ?, ?, ?, 0, 0)",
|
|
|
|
(c, host, perm, 1 if allow else 2))
|
2010-08-20 03:12:46 +04:00
|
|
|
|
|
|
|
# Commit and close
|
|
|
|
permDB.commit()
|
|
|
|
cursor.close()
|
|
|
|
|
2010-03-13 00:53:37 +03:00
|
|
|
def initializeProfile(self, profileDir, extraPrefs = [], useServerLocations = False):
|
|
|
|
" Sets up the standard testing profile."
|
2008-06-07 10:43:15 +04:00
|
|
|
|
2010-03-13 00:53:37 +03:00
|
|
|
prefs = []
|
2010-01-15 20:22:54 +03:00
|
|
|
# Start with a clean slate.
|
|
|
|
shutil.rmtree(profileDir, True)
|
|
|
|
os.mkdir(profileDir)
|
2008-02-22 00:08:39 +03:00
|
|
|
|
2010-08-20 03:12:46 +04:00
|
|
|
# Set up permissions database
|
|
|
|
locations = self.readLocations()
|
|
|
|
self.setupPermissionsDatabase(profileDir,
|
2010-09-04 02:53:28 +04:00
|
|
|
{'allowXULXBL':[(l.host, 'noxul' not in l.options) for l in locations]});
|
2010-08-20 03:12:46 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
part = """\
|
2010-12-10 01:47:21 +03:00
|
|
|
user_pref("browser.console.showInPanel", true);
|
2008-02-22 00:08:39 +03:00
|
|
|
user_pref("browser.dom.window.dump.enabled", true);
|
2011-06-07 23:11:37 +04:00
|
|
|
user_pref("browser.firstrun.show.localepicker", false);
|
2011-06-11 02:14:02 +04:00
|
|
|
user_pref("browser.firstrun.show.uidiscovery", false);
|
2011-08-31 07:06:58 +04:00
|
|
|
user_pref("browser.ui.layout.tablet", 0); // force tablet UI off
|
2008-05-14 17:27:47 +04:00
|
|
|
user_pref("dom.allow_scripts_to_close_windows", true);
|
2008-02-22 00:08:39 +03:00
|
|
|
user_pref("dom.disable_open_during_load", false);
|
|
|
|
user_pref("dom.max_script_run_time", 0); // no slow script dialogs
|
2009-05-06 03:11:19 +04:00
|
|
|
user_pref("dom.max_chrome_script_run_time", 0);
|
2009-10-22 21:49:21 +04:00
|
|
|
user_pref("dom.popup_maximum", -1);
|
2011-01-25 03:23:08 +03:00
|
|
|
user_pref("dom.send_after_paint_to_content", true);
|
2010-09-17 05:24:14 +04:00
|
|
|
user_pref("dom.successive_dialog_time_limit", 0);
|
2008-02-22 00:08:39 +03:00
|
|
|
user_pref("signed.applets.codebase_principal_support", true);
|
|
|
|
user_pref("security.warn_submit_insecure", false);
|
|
|
|
user_pref("browser.shell.checkDefaultBrowser", false);
|
2008-09-05 16:03:30 +04:00
|
|
|
user_pref("shell.checkDefaultClient", false);
|
2008-02-22 00:08:39 +03:00
|
|
|
user_pref("browser.warnOnQuit", false);
|
2008-03-04 10:24:26 +03:00
|
|
|
user_pref("accessibility.typeaheadfind.autostart", false);
|
2008-03-05 01:12:06 +03:00
|
|
|
user_pref("javascript.options.showInConsole", true);
|
2010-09-11 22:41:39 +04:00
|
|
|
user_pref("devtools.errorconsole.enabled", true);
|
2008-04-29 03:56:07 +04:00
|
|
|
user_pref("layout.debug.enable_data_xbl", true);
|
2008-05-06 00:43:44 +04:00
|
|
|
user_pref("browser.EULA.override", true);
|
2008-09-10 08:13:23 +04:00
|
|
|
user_pref("gfx.color_management.force_srgb", true);
|
2009-01-13 08:52:00 +03:00
|
|
|
user_pref("network.manage-offline-status", false);
|
2009-05-06 08:30:39 +04:00
|
|
|
user_pref("test.mousescroll", true);
|
2009-01-15 22:19:15 +03:00
|
|
|
user_pref("security.default_personal_cert", "Select Automatically"); // Need to client auth test be w/o any dialogs
|
2009-02-24 22:46:51 +03:00
|
|
|
user_pref("network.http.prompt-temp-redirect", false);
|
2009-04-01 04:52:56 +04:00
|
|
|
user_pref("media.cache_size", 100);
|
2009-04-08 12:45:32 +04:00
|
|
|
user_pref("security.warn_viewing_mixed", false);
|
2010-08-13 06:07:18 +04:00
|
|
|
user_pref("app.update.enabled", false);
|
2010-10-27 01:06:20 +04:00
|
|
|
user_pref("browser.panorama.experienced_first_run", true); // Assume experienced
|
2011-04-26 16:30:17 +04:00
|
|
|
user_pref("dom.w3c_touch_events.enabled", true);
|
2011-11-05 01:45:01 +04:00
|
|
|
user_pref("toolkit.telemetry.prompted", 2);
|
2010-06-18 19:54:22 +04:00
|
|
|
|
|
|
|
// Only load extensions from the application and user profile
|
|
|
|
// AddonManager.SCOPE_PROFILE + AddonManager.SCOPE_APPLICATION
|
|
|
|
user_pref("extensions.enabledScopes", 5);
|
2010-10-12 23:15:04 +04:00
|
|
|
// Disable metadata caching for installed add-ons by default
|
|
|
|
user_pref("extensions.getAddons.cache.enabled", false);
|
2011-01-20 01:56:01 +03:00
|
|
|
// Disable intalling any distribution add-ons
|
|
|
|
user_pref("extensions.installDistroAddons", false);
|
2008-05-06 21:52:26 +04:00
|
|
|
|
2010-06-25 03:36:31 +04:00
|
|
|
user_pref("extensions.testpilot.runStudies", false);
|
2008-05-06 21:52:26 +04:00
|
|
|
|
2010-03-13 00:53:37 +03:00
|
|
|
user_pref("geo.wifi.uri", "http://%(server)s/tests/dom/tests/mochitest/geolocation/network_geolocation.sjs");
|
2009-08-15 03:12:09 +04:00
|
|
|
user_pref("geo.wifi.testing", true);
|
2010-08-31 20:15:52 +04:00
|
|
|
user_pref("geo.ignore.location_filter", true);
|
2009-08-15 03:12:09 +04:00
|
|
|
|
2008-05-06 21:52:26 +04:00
|
|
|
user_pref("camino.warn_when_closing", false); // Camino-only, harmless to others
|
2009-06-17 14:47:08 +04:00
|
|
|
|
|
|
|
// Make url-classifier updates so rare that they won't affect tests
|
|
|
|
user_pref("urlclassifier.updateinterval", 172800);
|
|
|
|
// Point the url-classifier to the local testing server for fast failures
|
2010-03-13 00:53:37 +03:00
|
|
|
user_pref("browser.safebrowsing.provider.0.gethashURL", "http://%(server)s/safebrowsing-dummy/gethash");
|
|
|
|
user_pref("browser.safebrowsing.provider.0.keyURL", "http://%(server)s/safebrowsing-dummy/newkey");
|
|
|
|
user_pref("browser.safebrowsing.provider.0.updateURL", "http://%(server)s/safebrowsing-dummy/update");
|
2010-09-15 03:51:12 +04:00
|
|
|
// Point update checks to the local testing server for fast failures
|
|
|
|
user_pref("extensions.update.url", "http://%(server)s/extensions-dummy/updateURL");
|
|
|
|
user_pref("extensions.blocklist.url", "http://%(server)s/extensions-dummy/blocklistURL");
|
2010-12-25 19:34:28 +03:00
|
|
|
// Make sure opening about:addons won't hit the network
|
|
|
|
user_pref("extensions.webservice.discoverURL", "http://%(server)s/extensions-dummy/discoveryURL");
|
2010-03-13 00:53:37 +03:00
|
|
|
""" % { "server" : self.webServer + ":" + str(self.httpPort) }
|
2010-01-15 20:22:54 +03:00
|
|
|
prefs.append(part)
|
2008-02-22 00:08:39 +03:00
|
|
|
|
2010-03-13 00:53:37 +03:00
|
|
|
if useServerLocations == False:
|
2010-01-15 20:22:54 +03:00
|
|
|
part = """
|
2010-03-13 00:53:37 +03:00
|
|
|
user_pref("capability.principal.codebase.p1.granted",
|
|
|
|
"UniversalXPConnect UniversalBrowserRead UniversalBrowserWrite \
|
|
|
|
UniversalPreferencesRead UniversalPreferencesWrite \
|
|
|
|
UniversalFileRead");
|
|
|
|
user_pref("capability.principal.codebase.p1.id", "%(origin)s");
|
|
|
|
user_pref("capability.principal.codebase.p1.subjectName", "");
|
|
|
|
""" % { "origin": "http://" + self.webServer + ":" + str(self.httpPort) }
|
|
|
|
prefs.append(part)
|
|
|
|
else:
|
|
|
|
# Grant God-power to all the privileged servers on which tests run.
|
|
|
|
privileged = filter(lambda loc: "privileged" in loc.options, locations)
|
|
|
|
for (i, l) in itertools.izip(itertools.count(1), privileged):
|
|
|
|
part = """
|
2008-02-22 00:08:39 +03:00
|
|
|
user_pref("capability.principal.codebase.p%(i)d.granted",
|
|
|
|
"UniversalXPConnect UniversalBrowserRead UniversalBrowserWrite \
|
|
|
|
UniversalPreferencesRead UniversalPreferencesWrite \
|
|
|
|
UniversalFileRead");
|
2008-06-07 10:43:15 +04:00
|
|
|
user_pref("capability.principal.codebase.p%(i)d.id", "%(origin)s");
|
2008-02-22 00:08:39 +03:00
|
|
|
user_pref("capability.principal.codebase.p%(i)d.subjectName", "");
|
2008-06-07 10:43:15 +04:00
|
|
|
""" % { "i": i,
|
2010-03-13 20:56:24 +03:00
|
|
|
"origin": (l.scheme + "://" + l.host + ":" + str(l.port)) }
|
2010-03-13 00:53:37 +03:00
|
|
|
prefs.append(part)
|
2008-02-22 00:08:39 +03:00
|
|
|
|
2010-03-13 00:53:37 +03:00
|
|
|
# We need to proxy every server but the primary one.
|
|
|
|
origins = ["'%s://%s:%s'" % (l.scheme, l.host, l.port)
|
|
|
|
for l in filter(lambda l: "primary" not in l.options, locations)]
|
|
|
|
origins = ", ".join(origins)
|
2008-02-22 00:08:39 +03:00
|
|
|
|
2010-03-13 00:53:37 +03:00
|
|
|
pacURL = """data:text/plain,
|
2008-02-22 00:08:39 +03:00
|
|
|
function FindProxyForURL(url, host)
|
|
|
|
{
|
2008-06-07 10:43:15 +04:00
|
|
|
var origins = [%(origins)s];
|
|
|
|
var regex = new RegExp('^([a-z][-a-z0-9+.]*)' +
|
|
|
|
'://' +
|
|
|
|
'(?:[^/@]*@)?' +
|
|
|
|
'(.*?)' +
|
|
|
|
'(?::(\\\\\\\\d+))?/');
|
2008-02-22 00:08:39 +03:00
|
|
|
var matches = regex.exec(url);
|
|
|
|
if (!matches)
|
|
|
|
return 'DIRECT';
|
2008-06-07 10:43:15 +04:00
|
|
|
var isHttp = matches[1] == 'http';
|
2008-09-05 17:35:58 +04:00
|
|
|
var isHttps = matches[1] == 'https';
|
2010-06-17 09:38:55 +04:00
|
|
|
var isWebSocket = matches[1] == 'ws';
|
2011-05-22 05:27:52 +04:00
|
|
|
var isWebSocketSSL = matches[1] == 'wss';
|
2008-06-07 10:43:15 +04:00
|
|
|
if (!matches[3])
|
2008-09-05 17:35:58 +04:00
|
|
|
{
|
2010-06-17 09:38:55 +04:00
|
|
|
if (isHttp | isWebSocket) matches[3] = '80';
|
2011-05-22 05:27:52 +04:00
|
|
|
if (isHttps | isWebSocketSSL) matches[3] = '443';
|
2008-09-05 17:35:58 +04:00
|
|
|
}
|
2010-06-17 09:38:55 +04:00
|
|
|
if (isWebSocket)
|
|
|
|
matches[1] = 'http';
|
2011-05-22 05:27:52 +04:00
|
|
|
if (isWebSocketSSL)
|
|
|
|
matches[1] = 'https';
|
2010-06-17 09:38:55 +04:00
|
|
|
|
2008-06-07 10:43:15 +04:00
|
|
|
var origin = matches[1] + '://' + matches[2] + ':' + matches[3];
|
|
|
|
if (origins.indexOf(origin) < 0)
|
|
|
|
return 'DIRECT';
|
|
|
|
if (isHttp)
|
2010-03-13 20:56:24 +03:00
|
|
|
return 'PROXY %(remote)s:%(httpport)s';
|
2011-05-22 05:27:52 +04:00
|
|
|
if (isHttps || isWebSocket || isWebSocketSSL)
|
2010-03-13 20:56:24 +03:00
|
|
|
return 'PROXY %(remote)s:%(sslport)s';
|
2008-02-22 00:08:39 +03:00
|
|
|
return 'DIRECT';
|
2010-03-13 20:56:24 +03:00
|
|
|
}""" % { "origins": origins,
|
|
|
|
"remote": self.webServer,
|
|
|
|
"httpport":self.httpPort,
|
2010-07-28 21:55:36 +04:00
|
|
|
"sslport": self.sslPort }
|
2010-03-13 00:53:37 +03:00
|
|
|
pacURL = "".join(pacURL.splitlines())
|
2008-02-22 00:08:39 +03:00
|
|
|
|
2010-03-13 00:53:37 +03:00
|
|
|
part += """
|
2008-02-22 00:08:39 +03:00
|
|
|
user_pref("network.proxy.type", 2);
|
|
|
|
user_pref("network.proxy.autoconfig_url", "%(pacURL)s");
|
2008-05-06 21:52:26 +04:00
|
|
|
|
|
|
|
user_pref("camino.use_system_proxy_settings", false); // Camino-only, harmless to others
|
2008-02-22 00:08:39 +03:00
|
|
|
""" % {"pacURL": pacURL}
|
2010-03-13 00:53:37 +03:00
|
|
|
prefs.append(part)
|
2009-10-14 21:55:25 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
for v in extraPrefs:
|
|
|
|
thispref = v.split("=")
|
|
|
|
if len(thispref) < 2:
|
|
|
|
print "Error: syntax error in --setpref=" + v
|
|
|
|
sys.exit(1)
|
|
|
|
part = 'user_pref("%s", %s);\n' % (thispref[0], thispref[1])
|
|
|
|
prefs.append(part)
|
|
|
|
|
|
|
|
# write the preferences
|
|
|
|
prefsFile = open(profileDir + "/" + "user.js", "a")
|
|
|
|
prefsFile.write("".join(prefs))
|
|
|
|
prefsFile.close()
|
|
|
|
|
|
|
|
def addCommonOptions(self, parser):
|
|
|
|
"Adds command-line options which are common to mochitest and reftest."
|
|
|
|
|
|
|
|
parser.add_option("--setpref",
|
|
|
|
action = "append", type = "string",
|
|
|
|
default = [],
|
|
|
|
dest = "extraPrefs", metavar = "PREF=VALUE",
|
|
|
|
help = "defines an extra user preference")
|
|
|
|
|
|
|
|
def fillCertificateDB(self, profileDir, certPath, utilityPath, xrePath):
|
|
|
|
pwfilePath = os.path.join(profileDir, ".crtdbpw")
|
2008-09-05 17:35:58 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
pwfile = open(pwfilePath, "w")
|
|
|
|
pwfile.write("\n")
|
|
|
|
pwfile.close()
|
2008-09-05 17:35:58 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
# Create head of the ssltunnel configuration file
|
|
|
|
sslTunnelConfigPath = os.path.join(profileDir, "ssltunnel.cfg")
|
|
|
|
sslTunnelConfig = open(sslTunnelConfigPath, "w")
|
2008-09-05 17:35:58 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
sslTunnelConfig.write("httpproxy:1\n")
|
|
|
|
sslTunnelConfig.write("certdbdir:%s\n" % certPath)
|
2010-03-13 20:56:24 +03:00
|
|
|
sslTunnelConfig.write("forward:127.0.0.1:%s\n" % self.httpPort)
|
2010-07-28 21:55:36 +04:00
|
|
|
sslTunnelConfig.write("websocketserver:%s:%s\n" % (self.webServer, self.webSocketPort))
|
2010-03-13 20:56:24 +03:00
|
|
|
sslTunnelConfig.write("listen:*:%s:pgo server certificate\n" % self.sslPort)
|
2010-01-15 20:22:54 +03:00
|
|
|
|
|
|
|
# Configure automatic certificate and bind custom certificates, client authentication
|
|
|
|
locations = self.readLocations()
|
|
|
|
locations.pop(0)
|
|
|
|
for loc in locations:
|
|
|
|
if loc.scheme == "https" and "nocert" not in loc.options:
|
|
|
|
customCertRE = re.compile("^cert=(?P<nickname>[0-9a-zA-Z_ ]+)")
|
|
|
|
clientAuthRE = re.compile("^clientauth=(?P<clientauth>[a-z]+)")
|
2011-11-03 01:43:27 +04:00
|
|
|
redirRE = re.compile("^redir=(?P<redirhost>[0-9a-zA-Z_ .]+)")
|
2010-01-15 20:22:54 +03:00
|
|
|
for option in loc.options:
|
|
|
|
match = customCertRE.match(option)
|
|
|
|
if match:
|
|
|
|
customcert = match.group("nickname");
|
2010-03-13 20:56:24 +03:00
|
|
|
sslTunnelConfig.write("listen:%s:%s:%s:%s\n" %
|
|
|
|
(loc.host, loc.port, self.sslPort, customcert))
|
2010-01-15 20:22:54 +03:00
|
|
|
|
|
|
|
match = clientAuthRE.match(option)
|
|
|
|
if match:
|
|
|
|
clientauth = match.group("clientauth");
|
2010-03-13 20:56:24 +03:00
|
|
|
sslTunnelConfig.write("clientauth:%s:%s:%s:%s\n" %
|
|
|
|
(loc.host, loc.port, self.sslPort, clientauth))
|
2010-01-15 20:22:54 +03:00
|
|
|
|
2011-11-03 01:43:27 +04:00
|
|
|
match = redirRE.match(option)
|
|
|
|
if match:
|
|
|
|
redirhost = match.group("redirhost")
|
|
|
|
sslTunnelConfig.write("redirhost:%s:%s:%s:%s\n" %
|
|
|
|
(loc.host, loc.port, self.sslPort, redirhost))
|
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
sslTunnelConfig.close()
|
|
|
|
|
|
|
|
# Pre-create the certification database for the profile
|
|
|
|
env = self.environment(xrePath = xrePath)
|
|
|
|
certutil = os.path.join(utilityPath, "certutil" + self.BIN_SUFFIX)
|
|
|
|
pk12util = os.path.join(utilityPath, "pk12util" + self.BIN_SUFFIX)
|
|
|
|
|
|
|
|
status = self.Process([certutil, "-N", "-d", profileDir, "-f", pwfilePath], env = env).wait()
|
|
|
|
if status != 0:
|
|
|
|
return status
|
|
|
|
|
|
|
|
# Walk the cert directory and add custom CAs and client certs
|
|
|
|
files = os.listdir(certPath)
|
|
|
|
for item in files:
|
|
|
|
root, ext = os.path.splitext(item)
|
|
|
|
if ext == ".ca":
|
|
|
|
trustBits = "CT,,"
|
|
|
|
if root.endswith("-object"):
|
|
|
|
trustBits = "CT,,CT"
|
|
|
|
self.Process([certutil, "-A", "-i", os.path.join(certPath, item),
|
|
|
|
"-d", profileDir, "-f", pwfilePath, "-n", root, "-t", trustBits],
|
|
|
|
env = env).wait()
|
|
|
|
if ext == ".client":
|
|
|
|
self.Process([pk12util, "-i", os.path.join(certPath, item), "-w",
|
|
|
|
pwfilePath, "-d", profileDir],
|
|
|
|
env = env).wait()
|
|
|
|
|
|
|
|
os.unlink(pwfilePath)
|
|
|
|
return 0
|
|
|
|
|
|
|
|
def environment(self, env = None, xrePath = None, crashreporter = True):
|
|
|
|
if xrePath == None:
|
|
|
|
xrePath = self.DIST_BIN
|
|
|
|
if env == None:
|
|
|
|
env = dict(os.environ)
|
|
|
|
|
2010-03-18 21:14:14 +03:00
|
|
|
ldLibraryPath = os.path.abspath(os.path.join(SCRIPT_DIR, xrePath))
|
2010-01-15 20:22:54 +03:00
|
|
|
if self.UNIXISH or self.IS_MAC:
|
|
|
|
envVar = "LD_LIBRARY_PATH"
|
|
|
|
if self.IS_MAC:
|
|
|
|
envVar = "DYLD_LIBRARY_PATH"
|
|
|
|
else: # unixish
|
|
|
|
env['MOZILLA_FIVE_HOME'] = xrePath
|
|
|
|
if envVar in env:
|
|
|
|
ldLibraryPath = ldLibraryPath + ":" + env[envVar]
|
|
|
|
env[envVar] = ldLibraryPath
|
|
|
|
elif self.IS_WIN32:
|
|
|
|
env["PATH"] = env["PATH"] + ";" + ldLibraryPath
|
|
|
|
|
|
|
|
if crashreporter:
|
|
|
|
env['MOZ_CRASHREPORTER_NO_REPORT'] = '1'
|
|
|
|
env['MOZ_CRASHREPORTER'] = '1'
|
2010-01-14 01:44:49 +03:00
|
|
|
else:
|
2010-01-15 20:22:54 +03:00
|
|
|
env['MOZ_CRASHREPORTER_DISABLE'] = '1'
|
|
|
|
|
|
|
|
env['GNOME_DISABLE_CRASH_DIALOG'] = '1'
|
|
|
|
env['XRE_NO_WINDOWS_CRASH_DIALOG'] = '1'
|
2011-10-29 02:43:49 +04:00
|
|
|
env['NS_TRACE_MALLOC_DISABLE_STACKS'] = '1'
|
2010-01-15 20:22:54 +03:00
|
|
|
return env
|
|
|
|
|
|
|
|
if IS_WIN32:
|
|
|
|
PeekNamedPipe = ctypes.windll.kernel32.PeekNamedPipe
|
|
|
|
GetLastError = ctypes.windll.kernel32.GetLastError
|
|
|
|
|
|
|
|
def readWithTimeout(self, f, timeout):
|
|
|
|
"""Try to read a line of output from the file object |f|.
|
|
|
|
|f| must be a pipe, like the |stdout| member of a subprocess.Popen
|
|
|
|
object created with stdout=PIPE. If no output
|
|
|
|
is received within |timeout| seconds, return a blank line.
|
|
|
|
Returns a tuple (line, did_timeout), where |did_timeout| is True
|
|
|
|
if the read timed out, and False otherwise."""
|
|
|
|
if timeout is None:
|
|
|
|
# shortcut to allow callers to pass in "None" for no timeout.
|
|
|
|
return (f.readline(), False)
|
2010-03-18 21:14:14 +03:00
|
|
|
x = msvcrt.get_osfhandle(f.fileno())
|
|
|
|
l = ctypes.c_long()
|
|
|
|
done = time.time() + timeout
|
|
|
|
while time.time() < done:
|
|
|
|
if self.PeekNamedPipe(x, None, 0, None, ctypes.byref(l), None) == 0:
|
2010-01-15 20:22:54 +03:00
|
|
|
err = self.GetLastError()
|
|
|
|
if err == 38 or err == 109: # ERROR_HANDLE_EOF || ERROR_BROKEN_PIPE
|
|
|
|
return ('', False)
|
|
|
|
else:
|
|
|
|
log.error("readWithTimeout got error: %d", err)
|
2010-01-25 23:37:58 +03:00
|
|
|
if l.value > 0:
|
2010-01-15 20:22:54 +03:00
|
|
|
# we're assuming that the output is line-buffered,
|
|
|
|
# which is not unreasonable
|
|
|
|
return (f.readline(), False)
|
2010-03-18 21:14:14 +03:00
|
|
|
time.sleep(0.01)
|
2009-10-14 00:56:24 +04:00
|
|
|
return ('', True)
|
2010-01-14 00:53:26 +03:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
def isPidAlive(self, pid):
|
|
|
|
STILL_ACTIVE = 259
|
|
|
|
PROCESS_QUERY_LIMITED_INFORMATION = 0x1000
|
2010-03-18 21:14:14 +03:00
|
|
|
pHandle = ctypes.windll.kernel32.OpenProcess(PROCESS_QUERY_LIMITED_INFORMATION, 0, pid)
|
2010-01-15 20:22:54 +03:00
|
|
|
if not pHandle:
|
|
|
|
return False
|
2010-01-27 01:46:47 +03:00
|
|
|
pExitCode = ctypes.wintypes.DWORD()
|
2010-06-26 09:40:13 +04:00
|
|
|
ctypes.windll.kernel32.GetExitCodeProcess(pHandle, ctypes.byref(pExitCode))
|
2010-03-18 21:14:14 +03:00
|
|
|
ctypes.windll.kernel32.CloseHandle(pHandle)
|
2011-04-14 15:03:31 +04:00
|
|
|
return pExitCode.value == STILL_ACTIVE
|
2010-01-15 20:22:54 +03:00
|
|
|
|
|
|
|
def killPid(self, pid):
|
|
|
|
PROCESS_TERMINATE = 0x0001
|
2010-03-18 21:14:14 +03:00
|
|
|
pHandle = ctypes.windll.kernel32.OpenProcess(PROCESS_TERMINATE, 0, pid)
|
2010-01-15 20:22:54 +03:00
|
|
|
if not pHandle:
|
2009-10-16 21:34:27 +04:00
|
|
|
return
|
2010-03-18 21:14:14 +03:00
|
|
|
success = ctypes.windll.kernel32.TerminateProcess(pHandle, 1)
|
|
|
|
ctypes.windll.kernel32.CloseHandle(pHandle)
|
2010-01-07 01:03:29 +03:00
|
|
|
|
2008-02-22 00:08:39 +03:00
|
|
|
else:
|
2010-01-15 20:22:54 +03:00
|
|
|
|
|
|
|
def readWithTimeout(self, f, timeout):
|
|
|
|
"""Try to read a line of output from the file object |f|. If no output
|
|
|
|
is received within |timeout| seconds, return a blank line.
|
|
|
|
Returns a tuple (line, did_timeout), where |did_timeout| is True
|
|
|
|
if the read timed out, and False otherwise."""
|
|
|
|
(r, w, e) = select.select([f], [], [], timeout)
|
|
|
|
if len(r) == 0:
|
|
|
|
return ('', True)
|
|
|
|
return (f.readline(), False)
|
|
|
|
|
|
|
|
def isPidAlive(self, pid):
|
|
|
|
try:
|
|
|
|
# kill(pid, 0) checks for a valid PID without actually sending a signal
|
|
|
|
# The method throws OSError if the PID is invalid, which we catch below.
|
|
|
|
os.kill(pid, 0)
|
|
|
|
|
|
|
|
# Wait on it to see if it's a zombie. This can throw OSError.ECHILD if
|
|
|
|
# the process terminates before we get to this point.
|
|
|
|
wpid, wstatus = os.waitpid(pid, os.WNOHANG)
|
2011-04-14 15:03:31 +04:00
|
|
|
return wpid == 0
|
2010-01-15 20:22:54 +03:00
|
|
|
except OSError, err:
|
|
|
|
# Catch the errors we might expect from os.kill/os.waitpid,
|
|
|
|
# and re-raise any others
|
2010-03-18 21:14:14 +03:00
|
|
|
if err.errno == errno.ESRCH or err.errno == errno.ECHILD:
|
2010-01-15 20:22:54 +03:00
|
|
|
return False
|
|
|
|
raise
|
|
|
|
|
|
|
|
def killPid(self, pid):
|
|
|
|
os.kill(pid, signal.SIGKILL)
|
|
|
|
|
2011-11-02 18:56:35 +04:00
|
|
|
def dumpScreen(self, utilityPath):
|
|
|
|
self.haveDumpedScreen = True;
|
|
|
|
|
|
|
|
# Need to figure out what tool and whether it write to a file or stdout
|
|
|
|
if self.UNIXISH:
|
|
|
|
utility = [os.path.join(utilityPath, "screentopng")]
|
|
|
|
imgoutput = 'stdout'
|
|
|
|
elif self.IS_MAC:
|
|
|
|
utility = ['/usr/sbin/screencapture', '-C', '-x', '-t', 'png']
|
|
|
|
imgoutput = 'file'
|
|
|
|
elif self.IS_WIN32:
|
|
|
|
self.log.info("If you fixed bug 589668, you'd get a screenshot here")
|
|
|
|
return
|
|
|
|
|
|
|
|
# Run the capture correctly for the type of capture
|
|
|
|
try:
|
|
|
|
if imgoutput == 'file':
|
|
|
|
tmpfd, imgfilename = tempfile.mkstemp(prefix='mozilla-test-fail_')
|
|
|
|
os.close(tmpfd)
|
|
|
|
dumper = self.Process(utility + [imgfilename])
|
|
|
|
elif imgoutput == 'stdout':
|
|
|
|
dumper = self.Process(utility, bufsize=-1,
|
|
|
|
stdout=subprocess.PIPE, close_fds=True)
|
|
|
|
except OSError, err:
|
|
|
|
self.log.info("Failed to start %s for screenshot: %s",
|
|
|
|
utility[0], err.strerror)
|
|
|
|
return
|
|
|
|
|
|
|
|
# Check whether the capture utility ran successfully
|
|
|
|
dumper_out, dumper_err = dumper.communicate()
|
|
|
|
if dumper.returncode != 0:
|
|
|
|
self.log.info("%s exited with code %d", utility, dumper.returncode)
|
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
|
|
|
if imgoutput == 'stdout':
|
|
|
|
image = dumper_out
|
|
|
|
elif imgoutput == 'file':
|
|
|
|
with open(imgfilename) as imgfile:
|
|
|
|
image = imgfile.read()
|
|
|
|
except IOError, err:
|
|
|
|
self.log.info("Failed to read image from %s", imgoutput)
|
|
|
|
|
|
|
|
import base64
|
|
|
|
encoded = base64.b64encode(image)
|
|
|
|
self.log.info("SCREENSHOT: data:image/png;base64,%s", encoded)
|
2010-08-23 05:27:26 +04:00
|
|
|
|
2010-03-21 09:08:49 +03:00
|
|
|
def killAndGetStack(self, proc, utilityPath, debuggerInfo):
|
|
|
|
"""Kill the process, preferrably in a way that gets us a stack trace."""
|
2011-10-22 00:09:05 +04:00
|
|
|
if not debuggerInfo and not self.haveDumpedScreen:
|
2010-08-23 05:27:26 +04:00
|
|
|
self.dumpScreen(utilityPath)
|
|
|
|
|
2010-03-21 09:08:49 +03:00
|
|
|
if self.CRASHREPORTER and not debuggerInfo:
|
2010-01-15 20:22:54 +03:00
|
|
|
if self.UNIXISH:
|
2010-02-26 02:05:48 +03:00
|
|
|
# ABRT will get picked up by Breakpad's signal handler
|
|
|
|
os.kill(proc.pid, signal.SIGABRT)
|
2010-01-15 20:22:54 +03:00
|
|
|
return
|
|
|
|
elif self.IS_WIN32:
|
|
|
|
# We should have a "crashinject" program in our utility path
|
|
|
|
crashinject = os.path.normpath(os.path.join(utilityPath, "crashinject.exe"))
|
|
|
|
if os.path.exists(crashinject) and subprocess.Popen([crashinject, str(proc.pid)]).wait() == 0:
|
|
|
|
return
|
2010-03-21 09:08:49 +03:00
|
|
|
#TODO: kill the process such that it triggers Breakpad on OS X (bug 525296)
|
2010-01-15 20:22:54 +03:00
|
|
|
self.log.info("Can't trigger Breakpad, just killing process")
|
|
|
|
proc.kill()
|
|
|
|
|
2010-06-26 01:47:19 +04:00
|
|
|
def waitForFinish(self, proc, utilityPath, timeout, maxTime, startTime, debuggerInfo, symbolsPath):
|
2010-01-19 22:45:04 +03:00
|
|
|
""" Look for timeout or crashes and return the status after the process terminates """
|
2010-01-15 20:22:54 +03:00
|
|
|
stackFixerProcess = None
|
2010-06-26 01:47:19 +04:00
|
|
|
stackFixerFunction = None
|
2010-01-15 20:22:54 +03:00
|
|
|
didTimeout = False
|
2011-06-19 00:29:57 +04:00
|
|
|
hitMaxTime = False
|
2010-01-19 22:45:04 +03:00
|
|
|
if proc.stdout is None:
|
2010-01-15 20:22:54 +03:00
|
|
|
self.log.info("TEST-INFO: Not logging stdout or stderr due to debugger connection")
|
|
|
|
else:
|
|
|
|
logsource = proc.stdout
|
2010-02-22 00:03:20 +03:00
|
|
|
|
2010-06-26 01:47:19 +04:00
|
|
|
if self.IS_DEBUG_BUILD and (self.IS_MAC or self.IS_LINUX) and symbolsPath and os.path.exists(symbolsPath):
|
|
|
|
# Run each line through a function in fix_stack_using_bpsyms.py (uses breakpad symbol files)
|
|
|
|
# This method is preferred for Tinderbox builds, since native symbols may have been stripped.
|
|
|
|
sys.path.insert(0, utilityPath)
|
|
|
|
import fix_stack_using_bpsyms as stackFixerModule
|
|
|
|
stackFixerFunction = lambda line: stackFixerModule.fixSymbols(line, symbolsPath)
|
|
|
|
del sys.path[0]
|
|
|
|
elif self.IS_DEBUG_BUILD and self.IS_MAC and False:
|
|
|
|
# Run each line through a function in fix_macosx_stack.py (uses atos)
|
2010-02-22 00:03:20 +03:00
|
|
|
sys.path.insert(0, utilityPath)
|
|
|
|
import fix_macosx_stack as stackFixerModule
|
2010-06-26 01:47:19 +04:00
|
|
|
stackFixerFunction = lambda line: stackFixerModule.fixSymbols(line)
|
2010-02-22 00:03:20 +03:00
|
|
|
del sys.path[0]
|
2010-06-26 01:47:19 +04:00
|
|
|
elif self.IS_DEBUG_BUILD and self.IS_LINUX:
|
|
|
|
# Run logsource through fix-linux-stack.pl (uses addr2line)
|
|
|
|
# This method is preferred for developer machines, so we don't have to run "make buildsymbols".
|
|
|
|
stackFixerProcess = self.Process([self.PERL, os.path.join(utilityPath, "fix-linux-stack.pl")],
|
|
|
|
stdin=logsource,
|
|
|
|
stdout=subprocess.PIPE)
|
|
|
|
logsource = stackFixerProcess.stdout
|
2010-01-15 20:22:54 +03:00
|
|
|
|
|
|
|
(line, didTimeout) = self.readWithTimeout(logsource, timeout)
|
|
|
|
while line != "" and not didTimeout:
|
2010-06-11 05:46:18 +04:00
|
|
|
if "TEST-START" in line and "|" in line:
|
|
|
|
self.lastTestSeen = line.split("|")[1].strip()
|
2010-06-26 01:47:19 +04:00
|
|
|
if stackFixerFunction:
|
|
|
|
line = stackFixerFunction(line)
|
2011-06-28 03:49:41 +04:00
|
|
|
self.log.info(line.rstrip().decode("UTF-8", "ignore"))
|
2011-10-22 00:09:05 +04:00
|
|
|
if not debuggerInfo and not self.haveDumpedScreen and "TEST-UNEXPECTED-FAIL" in line and "Test timed out" in line:
|
2010-08-23 05:27:26 +04:00
|
|
|
self.dumpScreen(utilityPath)
|
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
(line, didTimeout) = self.readWithTimeout(logsource, timeout)
|
|
|
|
if not hitMaxTime and maxTime and datetime.now() - startTime > timedelta(seconds = maxTime):
|
|
|
|
# Kill the application, but continue reading from stack fixer so as not to deadlock on stackFixerProcess.wait().
|
|
|
|
hitMaxTime = True
|
2010-06-11 05:46:18 +04:00
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | %s | application ran for longer than allowed maximum time of %d seconds", self.lastTestSeen, int(maxTime))
|
2010-03-21 09:08:49 +03:00
|
|
|
self.killAndGetStack(proc, utilityPath, debuggerInfo)
|
2010-01-15 20:22:54 +03:00
|
|
|
if didTimeout:
|
2010-06-11 05:46:18 +04:00
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | %s | application timed out after %d seconds with no output", self.lastTestSeen, int(timeout))
|
2010-03-21 09:08:49 +03:00
|
|
|
self.killAndGetStack(proc, utilityPath, debuggerInfo)
|
2010-01-15 20:22:54 +03:00
|
|
|
|
|
|
|
status = proc.wait()
|
2010-06-11 05:46:18 +04:00
|
|
|
if status == 0:
|
|
|
|
self.lastTestSeen = "Main app process exited normally"
|
2010-01-15 20:22:54 +03:00
|
|
|
if status != 0 and not didTimeout and not hitMaxTime:
|
2010-06-11 05:46:18 +04:00
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | %s | Exited with code %d during test run", self.lastTestSeen, status)
|
2010-01-15 20:22:54 +03:00
|
|
|
if stackFixerProcess is not None:
|
|
|
|
fixerStatus = stackFixerProcess.wait()
|
|
|
|
if fixerStatus != 0 and not didTimeout and not hitMaxTime:
|
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | automation.py | Stack fixer process exited with code %d during test run", fixerStatus)
|
2010-01-19 22:45:04 +03:00
|
|
|
return status
|
2010-01-15 20:22:54 +03:00
|
|
|
|
2010-01-19 22:45:04 +03:00
|
|
|
def buildCommandLine(self, app, debuggerInfo, profileDir, testURL, extraArgs):
|
|
|
|
""" build the application command line """
|
|
|
|
|
2011-09-10 04:04:36 +04:00
|
|
|
cmd = os.path.abspath(app)
|
|
|
|
if self.IS_MAC and not self.IS_CAMINO and os.path.exists(cmd + "-bin"):
|
|
|
|
# Prefer 'app-bin' in case 'app' is a shell script.
|
|
|
|
# We can remove this hack once bug 673899 etc are fixed.
|
2010-01-19 22:45:04 +03:00
|
|
|
cmd += "-bin"
|
|
|
|
|
|
|
|
args = []
|
|
|
|
|
|
|
|
if debuggerInfo:
|
|
|
|
args.extend(debuggerInfo["args"])
|
|
|
|
args.append(cmd)
|
|
|
|
cmd = os.path.abspath(debuggerInfo["path"])
|
|
|
|
|
|
|
|
if self.IS_MAC:
|
|
|
|
args.append("-foreground")
|
|
|
|
|
|
|
|
if self.IS_CYGWIN:
|
|
|
|
profileDirectory = commands.getoutput("cygpath -w \"" + profileDir + "/\"")
|
|
|
|
else:
|
|
|
|
profileDirectory = profileDir + "/"
|
|
|
|
|
|
|
|
args.extend(("-no-remote", "-profile", profileDirectory))
|
|
|
|
if testURL is not None:
|
|
|
|
if self.IS_CAMINO:
|
|
|
|
args.extend(("-url", testURL))
|
|
|
|
else:
|
|
|
|
args.append((testURL))
|
|
|
|
args.extend(extraArgs)
|
|
|
|
return cmd, args
|
|
|
|
|
|
|
|
def checkForZombies(self, processLog):
|
|
|
|
""" Look for hung processes """
|
2010-01-15 20:22:54 +03:00
|
|
|
if not os.path.exists(processLog):
|
|
|
|
self.log.info('INFO | automation.py | PID log not found: %s', processLog)
|
|
|
|
else:
|
|
|
|
self.log.info('INFO | automation.py | Reading PID log: %s', processLog)
|
|
|
|
processList = []
|
|
|
|
pidRE = re.compile(r'launched child process (\d+)$')
|
|
|
|
processLogFD = open(processLog)
|
|
|
|
for line in processLogFD:
|
|
|
|
self.log.info(line.rstrip())
|
|
|
|
m = pidRE.search(line)
|
|
|
|
if m:
|
|
|
|
processList.append(int(m.group(1)))
|
|
|
|
processLogFD.close()
|
|
|
|
|
|
|
|
for processPID in processList:
|
|
|
|
self.log.info("INFO | automation.py | Checking for orphan process with PID: %d", processPID)
|
|
|
|
if self.isPidAlive(processPID):
|
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | automation.py | child process %d still alive after shutdown", processPID)
|
|
|
|
self.killPid(processPID)
|
|
|
|
|
2011-09-21 18:27:16 +04:00
|
|
|
def checkForCrashes(self, profileDir, symbolsPath):
|
|
|
|
automationutils.checkForCrashes(os.path.join(profileDir, "minidumps"), symbolsPath, self.lastTestSeen)
|
|
|
|
|
2010-01-19 22:45:04 +03:00
|
|
|
def runApp(self, testURL, env, app, profileDir, extraArgs,
|
|
|
|
runSSLTunnel = False, utilityPath = None,
|
|
|
|
xrePath = None, certPath = None,
|
|
|
|
debuggerInfo = None, symbolsPath = None,
|
|
|
|
timeout = -1, maxTime = None):
|
|
|
|
"""
|
|
|
|
Run the app, log the duration it took to execute, return the status code.
|
|
|
|
Kills the app if it runs for longer than |maxTime| seconds, or outputs nothing for |timeout| seconds.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if utilityPath == None:
|
|
|
|
utilityPath = self.DIST_BIN
|
|
|
|
if xrePath == None:
|
|
|
|
xrePath = self.DIST_BIN
|
|
|
|
if certPath == None:
|
|
|
|
certPath = self.CERTS_SRC_DIR
|
|
|
|
if timeout == -1:
|
|
|
|
timeout = self.DEFAULT_TIMEOUT
|
|
|
|
|
|
|
|
# copy env so we don't munge the caller's environment
|
|
|
|
env = dict(env);
|
|
|
|
env["NO_EM_RESTART"] = "1"
|
|
|
|
tmpfd, processLog = tempfile.mkstemp(suffix='pidlog')
|
|
|
|
os.close(tmpfd)
|
|
|
|
env["MOZ_PROCESS_LOG"] = processLog
|
|
|
|
|
|
|
|
if self.IS_TEST_BUILD and runSSLTunnel:
|
|
|
|
# create certificate database for the profile
|
|
|
|
certificateStatus = self.fillCertificateDB(profileDir, certPath, utilityPath, xrePath)
|
|
|
|
if certificateStatus != 0:
|
2011-01-03 17:48:38 +03:00
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | automation.py | Certificate integration failed")
|
2010-01-19 22:45:04 +03:00
|
|
|
return certificateStatus
|
|
|
|
|
|
|
|
# start ssltunnel to provide https:// URLs capability
|
|
|
|
ssltunnel = os.path.join(utilityPath, "ssltunnel" + self.BIN_SUFFIX)
|
|
|
|
ssltunnelProcess = self.Process([ssltunnel,
|
|
|
|
os.path.join(profileDir, "ssltunnel.cfg")],
|
|
|
|
env = self.environment(xrePath = xrePath))
|
|
|
|
self.log.info("INFO | automation.py | SSL tunnel pid: %d", ssltunnelProcess.pid)
|
|
|
|
|
|
|
|
cmd, args = self.buildCommandLine(app, debuggerInfo, profileDir, testURL, extraArgs)
|
|
|
|
startTime = datetime.now()
|
|
|
|
|
|
|
|
if debuggerInfo and debuggerInfo["interactive"]:
|
2010-09-01 05:03:38 +04:00
|
|
|
# If an interactive debugger is attached, don't redirect output,
|
|
|
|
# don't use timeouts, and don't capture ctrl-c.
|
2010-03-21 09:08:49 +03:00
|
|
|
timeout = None
|
|
|
|
maxTime = None
|
2010-01-19 22:45:04 +03:00
|
|
|
outputPipe = None
|
2010-09-01 05:03:38 +04:00
|
|
|
signal.signal(signal.SIGINT, lambda sigid, frame: None)
|
2010-01-19 22:45:04 +03:00
|
|
|
else:
|
|
|
|
outputPipe = subprocess.PIPE
|
|
|
|
|
2010-06-11 08:28:36 +04:00
|
|
|
self.lastTestSeen = "automation.py"
|
2010-01-19 22:45:04 +03:00
|
|
|
proc = self.Process([cmd] + args,
|
|
|
|
env = self.environment(env, xrePath = xrePath,
|
|
|
|
crashreporter = not debuggerInfo),
|
|
|
|
stdout = outputPipe,
|
|
|
|
stderr = subprocess.STDOUT)
|
|
|
|
self.log.info("INFO | automation.py | Application pid: %d", proc.pid)
|
|
|
|
|
2010-06-26 01:47:19 +04:00
|
|
|
status = self.waitForFinish(proc, utilityPath, timeout, maxTime, startTime, debuggerInfo, symbolsPath)
|
2010-01-19 22:45:04 +03:00
|
|
|
self.log.info("INFO | automation.py | Application ran for: %s", str(datetime.now() - startTime))
|
|
|
|
|
|
|
|
# Do a final check for zombie child processes.
|
|
|
|
self.checkForZombies(processLog)
|
2011-09-21 18:27:16 +04:00
|
|
|
self.checkForCrashes(profileDir, symbolsPath)
|
2010-01-15 20:22:54 +03:00
|
|
|
|
|
|
|
if os.path.exists(processLog):
|
|
|
|
os.unlink(processLog)
|
|
|
|
|
|
|
|
if self.IS_TEST_BUILD and runSSLTunnel:
|
|
|
|
ssltunnelProcess.kill()
|
|
|
|
|
|
|
|
return status
|
2010-06-24 13:32:01 +04:00
|
|
|
|
2011-10-14 19:45:58 +04:00
|
|
|
def getExtensionIDFromRDF(self, rdfSource):
|
|
|
|
"""
|
|
|
|
Retrieves the extension id from an install.rdf file (or string).
|
|
|
|
"""
|
|
|
|
from xml.dom.minidom import parse, parseString, Node
|
|
|
|
|
|
|
|
if isinstance(rdfSource, file):
|
|
|
|
document = parse(rdfSource)
|
|
|
|
else:
|
|
|
|
document = parseString(rdfSource)
|
|
|
|
|
|
|
|
# Find the <em:id> element. There can be multiple <em:id> tags
|
|
|
|
# within <em:targetApplication> tags, so we have to check this way.
|
|
|
|
for rdfChild in document.documentElement.childNodes:
|
|
|
|
if rdfChild.nodeType == Node.ELEMENT_NODE and rdfChild.tagName == "Description":
|
|
|
|
for descChild in rdfChild.childNodes:
|
|
|
|
if descChild.nodeType == Node.ELEMENT_NODE and descChild.tagName == "em:id":
|
|
|
|
return descChild.childNodes[0].data
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2011-04-14 15:03:47 +04:00
|
|
|
def installExtension(self, extensionSource, profileDir, extensionID = None):
|
2011-10-14 19:45:58 +04:00
|
|
|
"""
|
|
|
|
Copies an extension into the extensions directory of the given profile.
|
|
|
|
extensionSource - the source location of the extension files. This can be either
|
|
|
|
a directory or a path to an xpi file.
|
|
|
|
profileDir - the profile directory we are copying into. We will create the
|
|
|
|
"extensions" directory there if it doesn't exist.
|
|
|
|
extensionID - the id of the extension to be used as the containing directory for the
|
|
|
|
extension, if extensionSource is a directory, i.e.
|
|
|
|
this is the name of the folder in the <profileDir>/extensions/<extensionID>
|
|
|
|
"""
|
2011-04-14 15:03:47 +04:00
|
|
|
if not os.path.isdir(profileDir):
|
|
|
|
self.log.info("INFO | automation.py | Cannot install extension, invalid profileDir at: %s", profileDir)
|
2011-01-04 14:06:53 +03:00
|
|
|
return
|
2010-06-24 13:32:01 +04:00
|
|
|
|
2011-10-14 19:45:58 +04:00
|
|
|
installRDFFilename = "install.rdf"
|
|
|
|
|
2011-10-22 22:37:15 +04:00
|
|
|
extensionsRootDir = os.path.join(profileDir, "extensions", "staged")
|
2011-10-14 19:45:58 +04:00
|
|
|
if not os.path.isdir(extensionsRootDir):
|
2011-10-22 22:37:15 +04:00
|
|
|
os.makedirs(extensionsRootDir)
|
2010-06-24 13:32:01 +04:00
|
|
|
|
2011-04-14 15:03:47 +04:00
|
|
|
if os.path.isfile(extensionSource):
|
2011-11-08 01:14:22 +04:00
|
|
|
reader = automationutils.ZipFileReader(extensionSource)
|
2011-10-14 19:45:58 +04:00
|
|
|
|
|
|
|
for filename in reader.namelist():
|
|
|
|
# Sanity check the zip file.
|
|
|
|
if os.path.isabs(filename):
|
|
|
|
self.log.info("INFO | automation.py | Cannot install extension, bad files in xpi")
|
|
|
|
return
|
|
|
|
|
|
|
|
# We may need to dig the extensionID out of the zip file...
|
|
|
|
if extensionID is None and filename == installRDFFilename:
|
|
|
|
extensionID = self.getExtensionIDFromRDF(reader.read(filename))
|
|
|
|
|
|
|
|
# We must know the extensionID now.
|
|
|
|
if extensionID is None:
|
2011-04-14 15:03:47 +04:00
|
|
|
self.log.info("INFO | automation.py | Cannot install extension, missing extensionID")
|
|
|
|
return
|
|
|
|
|
2011-10-14 19:45:58 +04:00
|
|
|
# Make the extension directory.
|
|
|
|
extensionDir = os.path.join(extensionsRootDir, extensionID)
|
|
|
|
os.mkdir(extensionDir)
|
|
|
|
|
|
|
|
# Extract all files.
|
|
|
|
reader.extractall(extensionDir)
|
|
|
|
|
|
|
|
elif os.path.isdir(extensionSource):
|
|
|
|
if extensionID is None:
|
|
|
|
filename = os.path.join(extensionSource, installRDFFilename)
|
|
|
|
if os.path.isfile(filename):
|
|
|
|
with open(filename, "r") as installRDF:
|
|
|
|
extensionID = self.getExtensionIDFromRDF(installRDF)
|
|
|
|
|
|
|
|
if extensionID is None:
|
|
|
|
self.log.info("INFO | automation.py | Cannot install extension, missing extensionID")
|
|
|
|
return
|
|
|
|
|
2011-04-14 15:03:47 +04:00
|
|
|
# Copy extension tree into its own directory.
|
|
|
|
# "destination directory must not already exist".
|
2011-10-14 19:45:58 +04:00
|
|
|
shutil.copytree(extensionSource, os.path.join(extensionsRootDir, extensionID))
|
|
|
|
|
2011-04-14 15:03:47 +04:00
|
|
|
else:
|
|
|
|
self.log.info("INFO | automation.py | Cannot install extension, invalid extensionSource at: %s", extensionSource)
|