2008-02-22 00:08:39 +03:00
|
|
|
#
|
|
|
|
# ***** BEGIN LICENSE BLOCK *****
|
|
|
|
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
|
|
|
#
|
|
|
|
# The contents of this file are subject to the Mozilla Public License Version
|
|
|
|
# 1.1 (the "License"); you may not use this file except in compliance with
|
|
|
|
# the License. You may obtain a copy of the License at
|
|
|
|
# http://www.mozilla.org/MPL/
|
|
|
|
#
|
|
|
|
# Software distributed under the License is distributed on an "AS IS" basis,
|
|
|
|
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
|
|
|
# for the specific language governing rights and limitations under the
|
|
|
|
# License.
|
|
|
|
#
|
|
|
|
# The Original Code is mozilla.org code.
|
|
|
|
#
|
|
|
|
# The Initial Developer of the Original Code is
|
|
|
|
# Mozilla Foundation.
|
|
|
|
# Portions created by the Initial Developer are Copyright (C) 2008
|
|
|
|
# the Initial Developer. All Rights Reserved.
|
|
|
|
#
|
|
|
|
# Contributor(s):
|
|
|
|
# Robert Sayre <sayrer@gmail.com>
|
|
|
|
# Jeff Walden <jwalden+bmo@mit.edu>
|
|
|
|
#
|
|
|
|
# Alternatively, the contents of this file may be used under the terms of
|
|
|
|
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
|
|
|
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
|
|
|
# in which case the provisions of the GPL or the LGPL are applicable instead
|
|
|
|
# of those above. If you wish to allow use of your version of this file only
|
|
|
|
# under the terms of either the GPL or the LGPL, and not to allow others to
|
|
|
|
# use your version of this file under the terms of the MPL, indicate your
|
|
|
|
# decision by deleting the provisions above and replace them with the notice
|
|
|
|
# and other provisions required by the GPL or the LGPL. If you do not delete
|
|
|
|
# the provisions above, a recipient may use your version of this file under
|
|
|
|
# the terms of any one of the MPL, the GPL or the LGPL.
|
|
|
|
#
|
|
|
|
# ***** END LICENSE BLOCK *****
|
|
|
|
|
2008-06-07 10:43:15 +04:00
|
|
|
import codecs
|
2010-01-07 03:45:46 +03:00
|
|
|
from datetime import datetime, timedelta
|
2008-02-22 00:08:39 +03:00
|
|
|
import itertools
|
2008-04-08 09:18:45 +04:00
|
|
|
import logging
|
2008-02-22 00:08:39 +03:00
|
|
|
import os
|
2008-06-07 10:43:15 +04:00
|
|
|
import re
|
2009-10-14 00:56:24 +04:00
|
|
|
import select
|
2009-01-12 22:23:28 +03:00
|
|
|
import shutil
|
2008-04-08 09:18:45 +04:00
|
|
|
import signal
|
2009-01-12 22:23:28 +03:00
|
|
|
import subprocess
|
2008-02-22 00:08:39 +03:00
|
|
|
import sys
|
2008-04-08 09:18:45 +04:00
|
|
|
import threading
|
2009-11-20 22:48:56 +03:00
|
|
|
import tempfile
|
2009-05-11 23:54:39 +04:00
|
|
|
|
2010-01-07 01:03:29 +03:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
#expand _DIST_BIN = __XPC_BIN_PATH__
|
|
|
|
#expand _IS_WIN32 = len("__WIN32__") != 0
|
|
|
|
#expand _IS_MAC = __IS_MAC__ != 0
|
|
|
|
#expand _IS_LINUX = __IS_LINUX__ != 0
|
2008-02-22 00:08:39 +03:00
|
|
|
#ifdef IS_CYGWIN
|
2010-01-15 20:22:54 +03:00
|
|
|
#expand _IS_CYGWIN = __IS_CYGWIN__ == 1
|
2008-02-22 00:08:39 +03:00
|
|
|
#else
|
2010-01-15 20:22:54 +03:00
|
|
|
_IS_CYGWIN = False
|
2008-02-22 00:08:39 +03:00
|
|
|
#endif
|
2010-01-15 20:22:54 +03:00
|
|
|
#expand _IS_CAMINO = __IS_CAMINO__ != 0
|
|
|
|
#expand _BIN_SUFFIX = __BIN_SUFFIX__
|
|
|
|
#expand _PERL = __PERL__
|
2008-02-22 00:08:39 +03:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
#expand _DEFAULT_APP = "./" + __BROWSER_PATH__
|
|
|
|
#expand _CERTS_SRC_DIR = __CERTS_SRC_DIR__
|
|
|
|
#expand _IS_TEST_BUILD = __IS_TEST_BUILD__
|
|
|
|
#expand _IS_DEBUG_BUILD = __IS_DEBUG_BUILD__
|
|
|
|
#expand _CRASHREPORTER = __CRASHREPORTER__ == 1
|
2008-04-08 09:18:45 +04:00
|
|
|
|
|
|
|
#################
|
|
|
|
# PROFILE SETUP #
|
|
|
|
#################
|
2008-02-22 00:08:39 +03:00
|
|
|
|
2008-06-07 10:43:15 +04:00
|
|
|
class SyntaxError(Exception):
|
|
|
|
"Signifies a syntax error on a particular line in server-locations.txt."
|
|
|
|
|
|
|
|
def __init__(self, lineno, msg = None):
|
|
|
|
self.lineno = lineno
|
|
|
|
self.msg = msg
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
s = "Syntax error on line " + str(self.lineno)
|
|
|
|
if self.msg:
|
|
|
|
s += ": %s." % self.msg
|
|
|
|
else:
|
|
|
|
s += "."
|
|
|
|
return s
|
|
|
|
|
|
|
|
|
|
|
|
class Location:
|
|
|
|
"Represents a location line in server-locations.txt."
|
|
|
|
|
|
|
|
def __init__(self, scheme, host, port, options):
|
|
|
|
self.scheme = scheme
|
|
|
|
self.host = host
|
|
|
|
self.port = port
|
|
|
|
self.options = options
|
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
class Automation(object):
|
2008-06-07 10:43:15 +04:00
|
|
|
"""
|
2010-01-15 20:22:54 +03:00
|
|
|
Runs the browser from a script, and provides useful utilities
|
|
|
|
for setting up the browser environment.
|
2008-06-07 10:43:15 +04:00
|
|
|
"""
|
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
DIST_BIN = _DIST_BIN
|
|
|
|
IS_WIN32 = _IS_WIN32
|
|
|
|
IS_MAC = _IS_MAC
|
|
|
|
IS_LINUX = _IS_LINUX
|
|
|
|
IS_CYGWIN = _IS_CYGWIN
|
|
|
|
IS_CAMINO = _IS_CAMINO
|
|
|
|
BIN_SUFFIX = _BIN_SUFFIX
|
|
|
|
PERL = _PERL
|
|
|
|
|
|
|
|
UNIXISH = not IS_WIN32 and not IS_MAC
|
|
|
|
|
|
|
|
DEFAULT_APP = _DEFAULT_APP
|
|
|
|
CERTS_SRC_DIR = _CERTS_SRC_DIR
|
|
|
|
IS_TEST_BUILD = _IS_TEST_BUILD
|
|
|
|
IS_DEBUG_BUILD = _IS_DEBUG_BUILD
|
|
|
|
CRASHREPORTER = _CRASHREPORTER
|
|
|
|
|
|
|
|
SCRIPT_DIR = os.path.abspath(os.path.realpath(os.path.dirname(sys.argv[0])))
|
|
|
|
sys.path.insert(0, SCRIPT_DIR)
|
|
|
|
automationutils = __import__('automationutils')
|
|
|
|
|
|
|
|
# timeout, in seconds
|
|
|
|
DEFAULT_TIMEOUT = 60.0
|
2008-06-07 10:43:15 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
log = logging.getLogger()
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
|
|
|
|
# We use the logging system here primarily because it'll handle multiple
|
|
|
|
# threads, which is needed to process the output of the server and application
|
|
|
|
# processes simultaneously.
|
|
|
|
handler = logging.StreamHandler(sys.stdout)
|
|
|
|
self.log.setLevel(logging.INFO)
|
|
|
|
self.log.addHandler(handler)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def __all__(self):
|
|
|
|
return [
|
|
|
|
"UNIXISH",
|
|
|
|
"IS_WIN32",
|
|
|
|
"IS_MAC",
|
|
|
|
"log",
|
|
|
|
"runApp",
|
|
|
|
"Process",
|
|
|
|
"addCommonOptions",
|
|
|
|
"initializeProfile",
|
|
|
|
"DIST_BIN",
|
|
|
|
"DEFAULT_APP",
|
|
|
|
"CERTS_SRC_DIR",
|
|
|
|
"environment",
|
|
|
|
"IS_TEST_BUILD",
|
|
|
|
"IS_DEBUG_BUILD",
|
|
|
|
"DEFAULT_TIMEOUT",
|
|
|
|
]
|
|
|
|
|
|
|
|
class Process(subprocess.Popen):
|
|
|
|
"""
|
|
|
|
Represents our view of a subprocess.
|
|
|
|
It adds a kill() method which allows it to be stopped explicitly.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def kill(self):
|
|
|
|
if Automation().IS_WIN32:
|
|
|
|
import platform
|
|
|
|
pid = "%i" % self.pid
|
|
|
|
if platform.release() == "2000":
|
|
|
|
# Windows 2000 needs 'kill.exe' from the
|
|
|
|
#'Windows 2000 Resource Kit tools'. (See bug 475455.)
|
|
|
|
try:
|
|
|
|
subprocess.Popen(["kill", "-f", pid]).wait()
|
|
|
|
except:
|
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | automation.py | Missing 'kill' utility to kill process with pid=%s. Kill it manually!", pid)
|
|
|
|
else:
|
|
|
|
# Windows XP and later.
|
|
|
|
subprocess.Popen(["taskkill", "/F", "/PID", pid]).wait()
|
|
|
|
else:
|
|
|
|
os.kill(self.pid, signal.SIGKILL)
|
|
|
|
|
|
|
|
def readLocations(self, locationsPath = "server-locations.txt"):
|
|
|
|
"""
|
|
|
|
Reads the locations at which the Mochitest HTTP server is available from
|
|
|
|
server-locations.txt.
|
|
|
|
"""
|
|
|
|
|
|
|
|
locationFile = codecs.open(locationsPath, "r", "UTF-8")
|
|
|
|
|
|
|
|
# Perhaps more detail than necessary, but it's the easiest way to make sure
|
|
|
|
# we get exactly the format we want. See server-locations.txt for the exact
|
|
|
|
# format guaranteed here.
|
|
|
|
lineRe = re.compile(r"^(?P<scheme>[a-z][-a-z0-9+.]*)"
|
2008-06-07 10:43:15 +04:00
|
|
|
r"://"
|
|
|
|
r"(?P<host>"
|
|
|
|
r"\d+\.\d+\.\d+\.\d+"
|
|
|
|
r"|"
|
|
|
|
r"(?:[a-z0-9](?:[-a-z0-9]*[a-z0-9])?\.)*"
|
|
|
|
r"[a-z](?:[-a-z0-9]*[a-z0-9])?"
|
|
|
|
r")"
|
|
|
|
r":"
|
|
|
|
r"(?P<port>\d+)"
|
|
|
|
r"(?:"
|
|
|
|
r"\s+"
|
2008-09-05 17:35:58 +04:00
|
|
|
r"(?P<options>\S+(?:,\S+)*)"
|
2008-06-07 10:43:15 +04:00
|
|
|
r")?$")
|
2010-01-15 20:22:54 +03:00
|
|
|
locations = []
|
|
|
|
lineno = 0
|
|
|
|
seenPrimary = False
|
|
|
|
for line in locationFile:
|
|
|
|
lineno += 1
|
|
|
|
if line.startswith("#") or line == "\n":
|
|
|
|
continue
|
2008-06-07 10:43:15 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
match = lineRe.match(line)
|
|
|
|
if not match:
|
|
|
|
raise SyntaxError(lineno)
|
|
|
|
|
|
|
|
options = match.group("options")
|
|
|
|
if options:
|
|
|
|
options = options.split(",")
|
|
|
|
if "primary" in options:
|
|
|
|
if seenPrimary:
|
|
|
|
raise SyntaxError(lineno, "multiple primary locations")
|
|
|
|
seenPrimary = True
|
|
|
|
else:
|
|
|
|
options = []
|
2008-06-07 10:43:15 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
locations.append(Location(match.group("scheme"), match.group("host"),
|
|
|
|
match.group("port"), options))
|
2008-06-07 10:43:15 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
if not seenPrimary:
|
|
|
|
raise SyntaxError(lineno + 1, "missing primary location")
|
2008-06-07 10:43:15 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
return locations
|
2008-06-07 10:43:15 +04:00
|
|
|
|
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
def initializeProfile(self, profileDir, extraPrefs = []):
|
|
|
|
"Sets up the standard testing profile."
|
2008-02-22 00:08:39 +03:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
# Start with a clean slate.
|
|
|
|
shutil.rmtree(profileDir, True)
|
|
|
|
os.mkdir(profileDir)
|
2008-02-22 00:08:39 +03:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
prefs = []
|
2008-02-22 00:08:39 +03:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
part = """\
|
2008-02-22 00:08:39 +03:00
|
|
|
user_pref("browser.dom.window.dump.enabled", true);
|
2008-05-14 17:27:47 +04:00
|
|
|
user_pref("dom.allow_scripts_to_close_windows", true);
|
2008-02-22 00:08:39 +03:00
|
|
|
user_pref("dom.disable_open_during_load", false);
|
|
|
|
user_pref("dom.max_script_run_time", 0); // no slow script dialogs
|
2009-05-06 03:11:19 +04:00
|
|
|
user_pref("dom.max_chrome_script_run_time", 0);
|
2009-10-22 21:49:21 +04:00
|
|
|
user_pref("dom.popup_maximum", -1);
|
2008-02-22 00:08:39 +03:00
|
|
|
user_pref("signed.applets.codebase_principal_support", true);
|
|
|
|
user_pref("security.warn_submit_insecure", false);
|
|
|
|
user_pref("browser.shell.checkDefaultBrowser", false);
|
2008-09-05 16:03:30 +04:00
|
|
|
user_pref("shell.checkDefaultClient", false);
|
2008-02-22 00:08:39 +03:00
|
|
|
user_pref("browser.warnOnQuit", false);
|
2008-03-04 10:24:26 +03:00
|
|
|
user_pref("accessibility.typeaheadfind.autostart", false);
|
2008-03-05 01:12:06 +03:00
|
|
|
user_pref("javascript.options.showInConsole", true);
|
2008-04-29 03:56:07 +04:00
|
|
|
user_pref("layout.debug.enable_data_xbl", true);
|
2008-05-06 00:43:44 +04:00
|
|
|
user_pref("browser.EULA.override", true);
|
2008-10-01 08:36:01 +04:00
|
|
|
user_pref("javascript.options.jit.content", true);
|
2008-09-10 08:13:23 +04:00
|
|
|
user_pref("gfx.color_management.force_srgb", true);
|
2009-01-13 08:52:00 +03:00
|
|
|
user_pref("network.manage-offline-status", false);
|
2009-05-06 08:30:39 +04:00
|
|
|
user_pref("test.mousescroll", true);
|
2009-01-15 22:19:15 +03:00
|
|
|
user_pref("security.default_personal_cert", "Select Automatically"); // Need to client auth test be w/o any dialogs
|
2009-02-24 22:46:51 +03:00
|
|
|
user_pref("network.http.prompt-temp-redirect", false);
|
2009-04-01 04:52:56 +04:00
|
|
|
user_pref("media.cache_size", 100);
|
2009-04-08 12:45:32 +04:00
|
|
|
user_pref("security.warn_viewing_mixed", false);
|
2008-05-06 21:52:26 +04:00
|
|
|
|
2010-03-09 19:55:39 +03:00
|
|
|
user_pref("geo.wifi.uri", "http://mochi.test:8888/tests/dom/tests/mochitest/geolocation/network_geolocation.sjs");
|
2009-08-15 03:12:09 +04:00
|
|
|
user_pref("geo.wifi.testing", true);
|
|
|
|
|
2008-05-06 21:52:26 +04:00
|
|
|
user_pref("camino.warn_when_closing", false); // Camino-only, harmless to others
|
2009-06-17 14:47:08 +04:00
|
|
|
|
|
|
|
// Make url-classifier updates so rare that they won't affect tests
|
|
|
|
user_pref("urlclassifier.updateinterval", 172800);
|
|
|
|
// Point the url-classifier to the local testing server for fast failures
|
2010-03-09 19:55:39 +03:00
|
|
|
user_pref("browser.safebrowsing.provider.0.gethashURL", "http://mochi.test:8888/safebrowsing-dummy/gethash");
|
|
|
|
user_pref("browser.safebrowsing.provider.0.keyURL", "http://mochi.test:8888/safebrowsing-dummy/newkey");
|
|
|
|
user_pref("browser.safebrowsing.provider.0.lookupURL", "http://mochi.test:8888/safebrowsing-dummy/lookup");
|
|
|
|
user_pref("browser.safebrowsing.provider.0.updateURL", "http://mochi.test:8888/safebrowsing-dummy/update");
|
2009-02-18 21:44:07 +03:00
|
|
|
"""
|
2009-02-18 16:31:31 +03:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
prefs.append(part)
|
2008-02-22 00:08:39 +03:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
locations = self.readLocations()
|
2008-06-07 10:43:15 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
# Grant God-power to all the privileged servers on which tests run.
|
|
|
|
privileged = filter(lambda loc: "privileged" in loc.options, locations)
|
|
|
|
for (i, l) in itertools.izip(itertools.count(1), privileged):
|
|
|
|
part = """
|
2008-02-22 00:08:39 +03:00
|
|
|
user_pref("capability.principal.codebase.p%(i)d.granted",
|
|
|
|
"UniversalXPConnect UniversalBrowserRead UniversalBrowserWrite \
|
|
|
|
UniversalPreferencesRead UniversalPreferencesWrite \
|
|
|
|
UniversalFileRead");
|
2008-06-07 10:43:15 +04:00
|
|
|
user_pref("capability.principal.codebase.p%(i)d.id", "%(origin)s");
|
2008-02-22 00:08:39 +03:00
|
|
|
user_pref("capability.principal.codebase.p%(i)d.subjectName", "");
|
2008-06-07 10:43:15 +04:00
|
|
|
""" % { "i": i,
|
|
|
|
"origin": (l.scheme + "://" + l.host + ":" + l.port) }
|
2010-01-15 20:22:54 +03:00
|
|
|
prefs.append(part)
|
2008-02-22 00:08:39 +03:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
# We need to proxy every server but the primary one.
|
|
|
|
origins = ["'%s://%s:%s'" % (l.scheme, l.host, l.port)
|
|
|
|
for l in filter(lambda l: "primary" not in l.options, locations)]
|
|
|
|
origins = ", ".join(origins)
|
2008-02-22 00:08:39 +03:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
pacURL = """data:text/plain,
|
2008-02-22 00:08:39 +03:00
|
|
|
function FindProxyForURL(url, host)
|
|
|
|
{
|
2008-06-07 10:43:15 +04:00
|
|
|
var origins = [%(origins)s];
|
|
|
|
var regex = new RegExp('^([a-z][-a-z0-9+.]*)' +
|
|
|
|
'://' +
|
|
|
|
'(?:[^/@]*@)?' +
|
|
|
|
'(.*?)' +
|
|
|
|
'(?::(\\\\\\\\d+))?/');
|
2008-02-22 00:08:39 +03:00
|
|
|
var matches = regex.exec(url);
|
|
|
|
if (!matches)
|
|
|
|
return 'DIRECT';
|
2008-06-07 10:43:15 +04:00
|
|
|
var isHttp = matches[1] == 'http';
|
2008-09-05 17:35:58 +04:00
|
|
|
var isHttps = matches[1] == 'https';
|
2008-06-07 10:43:15 +04:00
|
|
|
if (!matches[3])
|
2008-09-05 17:35:58 +04:00
|
|
|
{
|
|
|
|
if (isHttp) matches[3] = '80';
|
|
|
|
if (isHttps) matches[3] = '443';
|
|
|
|
}
|
|
|
|
|
2008-06-07 10:43:15 +04:00
|
|
|
var origin = matches[1] + '://' + matches[2] + ':' + matches[3];
|
|
|
|
if (origins.indexOf(origin) < 0)
|
|
|
|
return 'DIRECT';
|
|
|
|
if (isHttp)
|
2008-09-05 17:35:58 +04:00
|
|
|
return 'PROXY 127.0.0.1:8888';
|
|
|
|
if (isHttps)
|
|
|
|
return 'PROXY 127.0.0.1:4443';
|
2008-02-22 00:08:39 +03:00
|
|
|
return 'DIRECT';
|
2008-06-07 10:43:15 +04:00
|
|
|
}""" % { "origins": origins }
|
2010-01-15 20:22:54 +03:00
|
|
|
pacURL = "".join(pacURL.splitlines())
|
2008-02-22 00:08:39 +03:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
part = """
|
2008-02-22 00:08:39 +03:00
|
|
|
user_pref("network.proxy.type", 2);
|
|
|
|
user_pref("network.proxy.autoconfig_url", "%(pacURL)s");
|
2008-05-06 21:52:26 +04:00
|
|
|
|
|
|
|
user_pref("camino.use_system_proxy_settings", false); // Camino-only, harmless to others
|
2008-02-22 00:08:39 +03:00
|
|
|
""" % {"pacURL": pacURL}
|
2009-10-14 21:55:25 +04:00
|
|
|
prefs.append(part)
|
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
for v in extraPrefs:
|
|
|
|
thispref = v.split("=")
|
|
|
|
if len(thispref) < 2:
|
|
|
|
print "Error: syntax error in --setpref=" + v
|
|
|
|
sys.exit(1)
|
|
|
|
part = 'user_pref("%s", %s);\n' % (thispref[0], thispref[1])
|
|
|
|
prefs.append(part)
|
|
|
|
|
|
|
|
# write the preferences
|
|
|
|
prefsFile = open(profileDir + "/" + "user.js", "a")
|
|
|
|
prefsFile.write("".join(prefs))
|
|
|
|
prefsFile.close()
|
|
|
|
|
|
|
|
def addCommonOptions(self, parser):
|
|
|
|
"Adds command-line options which are common to mochitest and reftest."
|
|
|
|
|
|
|
|
parser.add_option("--setpref",
|
|
|
|
action = "append", type = "string",
|
|
|
|
default = [],
|
|
|
|
dest = "extraPrefs", metavar = "PREF=VALUE",
|
|
|
|
help = "defines an extra user preference")
|
|
|
|
|
|
|
|
def fillCertificateDB(self, profileDir, certPath, utilityPath, xrePath):
|
|
|
|
pwfilePath = os.path.join(profileDir, ".crtdbpw")
|
2008-09-05 17:35:58 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
pwfile = open(pwfilePath, "w")
|
|
|
|
pwfile.write("\n")
|
|
|
|
pwfile.close()
|
2008-09-05 17:35:58 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
# Create head of the ssltunnel configuration file
|
|
|
|
sslTunnelConfigPath = os.path.join(profileDir, "ssltunnel.cfg")
|
|
|
|
sslTunnelConfig = open(sslTunnelConfigPath, "w")
|
2008-09-05 17:35:58 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
sslTunnelConfig.write("httpproxy:1\n")
|
|
|
|
sslTunnelConfig.write("certdbdir:%s\n" % certPath)
|
|
|
|
sslTunnelConfig.write("forward:127.0.0.1:8888\n")
|
|
|
|
sslTunnelConfig.write("listen:*:4443:pgo server certificate\n")
|
|
|
|
|
|
|
|
# Configure automatic certificate and bind custom certificates, client authentication
|
|
|
|
locations = self.readLocations()
|
|
|
|
locations.pop(0)
|
|
|
|
for loc in locations:
|
|
|
|
if loc.scheme == "https" and "nocert" not in loc.options:
|
|
|
|
customCertRE = re.compile("^cert=(?P<nickname>[0-9a-zA-Z_ ]+)")
|
|
|
|
clientAuthRE = re.compile("^clientauth=(?P<clientauth>[a-z]+)")
|
|
|
|
for option in loc.options:
|
|
|
|
match = customCertRE.match(option)
|
|
|
|
if match:
|
|
|
|
customcert = match.group("nickname");
|
|
|
|
sslTunnelConfig.write("listen:%s:%s:4443:%s\n" %
|
|
|
|
(loc.host, loc.port, customcert))
|
|
|
|
|
|
|
|
match = clientAuthRE.match(option)
|
|
|
|
if match:
|
|
|
|
clientauth = match.group("clientauth");
|
|
|
|
sslTunnelConfig.write("clientauth:%s:%s:4443:%s\n" %
|
|
|
|
(loc.host, loc.port, clientauth))
|
|
|
|
|
|
|
|
sslTunnelConfig.close()
|
|
|
|
|
|
|
|
# Pre-create the certification database for the profile
|
|
|
|
env = self.environment(xrePath = xrePath)
|
|
|
|
certutil = os.path.join(utilityPath, "certutil" + self.BIN_SUFFIX)
|
|
|
|
pk12util = os.path.join(utilityPath, "pk12util" + self.BIN_SUFFIX)
|
|
|
|
|
|
|
|
status = self.Process([certutil, "-N", "-d", profileDir, "-f", pwfilePath], env = env).wait()
|
|
|
|
if status != 0:
|
|
|
|
return status
|
|
|
|
|
|
|
|
# Walk the cert directory and add custom CAs and client certs
|
|
|
|
files = os.listdir(certPath)
|
|
|
|
for item in files:
|
|
|
|
root, ext = os.path.splitext(item)
|
|
|
|
if ext == ".ca":
|
|
|
|
trustBits = "CT,,"
|
|
|
|
if root.endswith("-object"):
|
|
|
|
trustBits = "CT,,CT"
|
|
|
|
self.Process([certutil, "-A", "-i", os.path.join(certPath, item),
|
|
|
|
"-d", profileDir, "-f", pwfilePath, "-n", root, "-t", trustBits],
|
|
|
|
env = env).wait()
|
|
|
|
if ext == ".client":
|
|
|
|
self.Process([pk12util, "-i", os.path.join(certPath, item), "-w",
|
|
|
|
pwfilePath, "-d", profileDir],
|
|
|
|
env = env).wait()
|
|
|
|
|
|
|
|
os.unlink(pwfilePath)
|
|
|
|
return 0
|
|
|
|
|
|
|
|
def environment(self, env = None, xrePath = None, crashreporter = True):
|
|
|
|
if xrePath == None:
|
|
|
|
xrePath = self.DIST_BIN
|
|
|
|
if env == None:
|
|
|
|
env = dict(os.environ)
|
|
|
|
|
|
|
|
ldLibraryPath = os.path.abspath(os.path.join(self.SCRIPT_DIR, xrePath))
|
|
|
|
if self.UNIXISH or self.IS_MAC:
|
|
|
|
envVar = "LD_LIBRARY_PATH"
|
|
|
|
if self.IS_MAC:
|
|
|
|
envVar = "DYLD_LIBRARY_PATH"
|
|
|
|
else: # unixish
|
|
|
|
env['MOZILLA_FIVE_HOME'] = xrePath
|
|
|
|
if envVar in env:
|
|
|
|
ldLibraryPath = ldLibraryPath + ":" + env[envVar]
|
|
|
|
env[envVar] = ldLibraryPath
|
|
|
|
elif self.IS_WIN32:
|
|
|
|
env["PATH"] = env["PATH"] + ";" + ldLibraryPath
|
|
|
|
|
|
|
|
if crashreporter:
|
|
|
|
env['MOZ_CRASHREPORTER_NO_REPORT'] = '1'
|
|
|
|
env['MOZ_CRASHREPORTER'] = '1'
|
2010-01-14 01:44:49 +03:00
|
|
|
else:
|
2010-01-15 20:22:54 +03:00
|
|
|
env['MOZ_CRASHREPORTER_DISABLE'] = '1'
|
|
|
|
|
|
|
|
env['GNOME_DISABLE_CRASH_DIALOG'] = '1'
|
|
|
|
env['XRE_NO_WINDOWS_CRASH_DIALOG'] = '1'
|
|
|
|
return env
|
|
|
|
|
|
|
|
if IS_WIN32:
|
2010-02-03 05:57:15 +03:00
|
|
|
ctypes = __import__('ctypes')
|
|
|
|
wintypes = __import__('ctypes.wintypes')
|
|
|
|
time = __import__('time')
|
|
|
|
msvcrt = __import__('msvcrt')
|
2010-01-15 20:22:54 +03:00
|
|
|
PeekNamedPipe = ctypes.windll.kernel32.PeekNamedPipe
|
|
|
|
GetLastError = ctypes.windll.kernel32.GetLastError
|
|
|
|
|
|
|
|
def readWithTimeout(self, f, timeout):
|
|
|
|
"""Try to read a line of output from the file object |f|.
|
|
|
|
|f| must be a pipe, like the |stdout| member of a subprocess.Popen
|
|
|
|
object created with stdout=PIPE. If no output
|
|
|
|
is received within |timeout| seconds, return a blank line.
|
|
|
|
Returns a tuple (line, did_timeout), where |did_timeout| is True
|
|
|
|
if the read timed out, and False otherwise."""
|
|
|
|
if timeout is None:
|
|
|
|
# shortcut to allow callers to pass in "None" for no timeout.
|
|
|
|
return (f.readline(), False)
|
|
|
|
x = self.msvcrt.get_osfhandle(f.fileno())
|
|
|
|
l = self.ctypes.c_long()
|
|
|
|
done = self.time.time() + timeout
|
|
|
|
while self.time.time() < done:
|
|
|
|
if self.PeekNamedPipe(x, None, 0, None, self.ctypes.byref(l), None) == 0:
|
|
|
|
err = self.GetLastError()
|
|
|
|
if err == 38 or err == 109: # ERROR_HANDLE_EOF || ERROR_BROKEN_PIPE
|
|
|
|
return ('', False)
|
|
|
|
else:
|
|
|
|
log.error("readWithTimeout got error: %d", err)
|
2010-01-25 23:37:58 +03:00
|
|
|
if l.value > 0:
|
2010-01-15 20:22:54 +03:00
|
|
|
# we're assuming that the output is line-buffered,
|
|
|
|
# which is not unreasonable
|
|
|
|
return (f.readline(), False)
|
|
|
|
self.time.sleep(0.01)
|
2009-10-14 00:56:24 +04:00
|
|
|
return ('', True)
|
2010-01-14 00:53:26 +03:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
def isPidAlive(self, pid):
|
|
|
|
STILL_ACTIVE = 259
|
|
|
|
PROCESS_QUERY_LIMITED_INFORMATION = 0x1000
|
|
|
|
pHandle = self.ctypes.windll.kernel32.OpenProcess(PROCESS_QUERY_LIMITED_INFORMATION, 0, pid)
|
|
|
|
if not pHandle:
|
|
|
|
return False
|
2010-01-27 01:46:47 +03:00
|
|
|
pExitCode = ctypes.wintypes.DWORD()
|
2010-01-15 20:22:54 +03:00
|
|
|
self.ctypes.windll.kernel32.GetExitCodeProcess(pHandle, self.ctypes.byref(pExitCode))
|
|
|
|
self.ctypes.windll.kernel32.CloseHandle(pHandle)
|
|
|
|
if (pExitCode.value == STILL_ACTIVE):
|
|
|
|
return True
|
|
|
|
else:
|
2010-01-14 01:44:49 +03:00
|
|
|
return False
|
2010-01-15 20:22:54 +03:00
|
|
|
|
|
|
|
def killPid(self, pid):
|
|
|
|
PROCESS_TERMINATE = 0x0001
|
|
|
|
pHandle = self.ctypes.windll.kernel32.OpenProcess(PROCESS_TERMINATE, 0, pid)
|
|
|
|
if not pHandle:
|
2009-10-16 21:34:27 +04:00
|
|
|
return
|
2010-01-15 20:22:54 +03:00
|
|
|
success = self.ctypes.windll.kernel32.TerminateProcess(pHandle, 1)
|
|
|
|
self.ctypes.windll.kernel32.CloseHandle(pHandle)
|
2010-01-07 01:03:29 +03:00
|
|
|
|
2008-02-22 00:08:39 +03:00
|
|
|
else:
|
2010-01-15 20:22:54 +03:00
|
|
|
errno = __import__('errno')
|
|
|
|
|
|
|
|
def readWithTimeout(self, f, timeout):
|
|
|
|
"""Try to read a line of output from the file object |f|. If no output
|
|
|
|
is received within |timeout| seconds, return a blank line.
|
|
|
|
Returns a tuple (line, did_timeout), where |did_timeout| is True
|
|
|
|
if the read timed out, and False otherwise."""
|
|
|
|
(r, w, e) = select.select([f], [], [], timeout)
|
|
|
|
if len(r) == 0:
|
|
|
|
return ('', True)
|
|
|
|
return (f.readline(), False)
|
|
|
|
|
|
|
|
def isPidAlive(self, pid):
|
|
|
|
try:
|
|
|
|
# kill(pid, 0) checks for a valid PID without actually sending a signal
|
|
|
|
# The method throws OSError if the PID is invalid, which we catch below.
|
|
|
|
os.kill(pid, 0)
|
|
|
|
|
|
|
|
# Wait on it to see if it's a zombie. This can throw OSError.ECHILD if
|
|
|
|
# the process terminates before we get to this point.
|
|
|
|
wpid, wstatus = os.waitpid(pid, os.WNOHANG)
|
|
|
|
if wpid == 0:
|
|
|
|
return True
|
2008-02-22 00:08:39 +03:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
return False
|
|
|
|
except OSError, err:
|
|
|
|
# Catch the errors we might expect from os.kill/os.waitpid,
|
|
|
|
# and re-raise any others
|
|
|
|
if err.errno == self.errno.ESRCH or err.errno == self.errno.ECHILD:
|
|
|
|
return False
|
|
|
|
raise
|
|
|
|
|
|
|
|
def killPid(self, pid):
|
|
|
|
os.kill(pid, signal.SIGKILL)
|
|
|
|
|
|
|
|
def triggerBreakpad(self, proc, utilityPath):
|
|
|
|
"""Attempt to kill this process in a way that triggers Breakpad crash
|
|
|
|
reporting, if we know how for this platform. Otherwise just .kill() it."""
|
|
|
|
if self.CRASHREPORTER:
|
|
|
|
if self.UNIXISH:
|
2010-02-26 02:05:48 +03:00
|
|
|
# ABRT will get picked up by Breakpad's signal handler
|
|
|
|
os.kill(proc.pid, signal.SIGABRT)
|
2010-01-15 20:22:54 +03:00
|
|
|
return
|
|
|
|
elif self.IS_WIN32:
|
|
|
|
# We should have a "crashinject" program in our utility path
|
|
|
|
crashinject = os.path.normpath(os.path.join(utilityPath, "crashinject.exe"))
|
|
|
|
if os.path.exists(crashinject) and subprocess.Popen([crashinject, str(proc.pid)]).wait() == 0:
|
|
|
|
return
|
|
|
|
#TODO: kill the process such that it triggers Breakpad on OS X (bug 525296)
|
|
|
|
self.log.info("Can't trigger Breakpad, just killing process")
|
|
|
|
proc.kill()
|
|
|
|
|
2010-01-20 02:47:57 +03:00
|
|
|
def waitForFinish(self, proc, utilityPath, timeout, maxTime, startTime):
|
2010-01-19 22:45:04 +03:00
|
|
|
""" Look for timeout or crashes and return the status after the process terminates """
|
2010-01-15 20:22:54 +03:00
|
|
|
stackFixerProcess = None
|
2010-02-22 00:03:20 +03:00
|
|
|
stackFixerModule = None
|
2010-01-15 20:22:54 +03:00
|
|
|
didTimeout = False
|
2010-01-19 22:45:04 +03:00
|
|
|
if proc.stdout is None:
|
2010-01-15 20:22:54 +03:00
|
|
|
self.log.info("TEST-INFO: Not logging stdout or stderr due to debugger connection")
|
|
|
|
else:
|
|
|
|
logsource = proc.stdout
|
2010-02-22 00:03:20 +03:00
|
|
|
if self.IS_DEBUG_BUILD and self.IS_LINUX:
|
|
|
|
# Run logsource through fix-linux-stack.pl
|
|
|
|
stackFixerProcess = self.Process([self.PERL, os.path.join(utilityPath, "fix-linux-stack.pl")],
|
|
|
|
stdin=logsource,
|
|
|
|
stdout=subprocess.PIPE)
|
|
|
|
logsource = stackFixerProcess.stdout
|
|
|
|
|
|
|
|
if self.IS_DEBUG_BUILD and self.IS_MAC:
|
|
|
|
# Import fix_macosx_stack.py from utilityPath
|
|
|
|
sys.path.insert(0, utilityPath)
|
|
|
|
import fix_macosx_stack as stackFixerModule
|
|
|
|
del sys.path[0]
|
2010-01-15 20:22:54 +03:00
|
|
|
|
|
|
|
(line, didTimeout) = self.readWithTimeout(logsource, timeout)
|
|
|
|
hitMaxTime = False
|
|
|
|
while line != "" and not didTimeout:
|
2010-02-22 00:03:20 +03:00
|
|
|
if stackFixerModule:
|
|
|
|
line = stackFixerModule.fixSymbols(line)
|
2010-01-15 20:22:54 +03:00
|
|
|
self.log.info(line.rstrip())
|
|
|
|
(line, didTimeout) = self.readWithTimeout(logsource, timeout)
|
|
|
|
if not hitMaxTime and maxTime and datetime.now() - startTime > timedelta(seconds = maxTime):
|
|
|
|
# Kill the application, but continue reading from stack fixer so as not to deadlock on stackFixerProcess.wait().
|
|
|
|
hitMaxTime = True
|
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | automation.py | application ran for longer than allowed maximum time of %d seconds", int(maxTime))
|
|
|
|
self.triggerBreakpad(proc, utilityPath)
|
|
|
|
if didTimeout:
|
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | automation.py | application timed out after %d seconds with no output", int(timeout))
|
|
|
|
self.triggerBreakpad(proc, utilityPath)
|
|
|
|
|
|
|
|
status = proc.wait()
|
|
|
|
if status != 0 and not didTimeout and not hitMaxTime:
|
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | automation.py | Exited with code %d during test run", status)
|
|
|
|
if stackFixerProcess is not None:
|
|
|
|
fixerStatus = stackFixerProcess.wait()
|
|
|
|
if fixerStatus != 0 and not didTimeout and not hitMaxTime:
|
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | automation.py | Stack fixer process exited with code %d during test run", fixerStatus)
|
2010-01-19 22:45:04 +03:00
|
|
|
return status
|
2010-01-15 20:22:54 +03:00
|
|
|
|
2010-01-19 22:45:04 +03:00
|
|
|
def buildCommandLine(self, app, debuggerInfo, profileDir, testURL, extraArgs):
|
|
|
|
""" build the application command line """
|
|
|
|
|
|
|
|
cmd = app
|
|
|
|
if self.IS_MAC and not self.IS_CAMINO and not cmd.endswith("-bin"):
|
|
|
|
cmd += "-bin"
|
|
|
|
cmd = os.path.abspath(cmd)
|
|
|
|
|
|
|
|
args = []
|
|
|
|
|
|
|
|
if debuggerInfo:
|
|
|
|
args.extend(debuggerInfo["args"])
|
|
|
|
args.append(cmd)
|
|
|
|
cmd = os.path.abspath(debuggerInfo["path"])
|
|
|
|
|
|
|
|
if self.IS_MAC:
|
|
|
|
args.append("-foreground")
|
|
|
|
|
|
|
|
if self.IS_CYGWIN:
|
|
|
|
profileDirectory = commands.getoutput("cygpath -w \"" + profileDir + "/\"")
|
|
|
|
else:
|
|
|
|
profileDirectory = profileDir + "/"
|
|
|
|
|
|
|
|
args.extend(("-no-remote", "-profile", profileDirectory))
|
|
|
|
if testURL is not None:
|
|
|
|
if self.IS_CAMINO:
|
|
|
|
args.extend(("-url", testURL))
|
|
|
|
else:
|
|
|
|
args.append((testURL))
|
|
|
|
args.extend(extraArgs)
|
|
|
|
return cmd, args
|
|
|
|
|
|
|
|
def checkForZombies(self, processLog):
|
|
|
|
""" Look for hung processes """
|
2010-01-15 20:22:54 +03:00
|
|
|
if not os.path.exists(processLog):
|
|
|
|
self.log.info('INFO | automation.py | PID log not found: %s', processLog)
|
|
|
|
else:
|
|
|
|
self.log.info('INFO | automation.py | Reading PID log: %s', processLog)
|
|
|
|
processList = []
|
|
|
|
pidRE = re.compile(r'launched child process (\d+)$')
|
|
|
|
processLogFD = open(processLog)
|
|
|
|
for line in processLogFD:
|
|
|
|
self.log.info(line.rstrip())
|
|
|
|
m = pidRE.search(line)
|
|
|
|
if m:
|
|
|
|
processList.append(int(m.group(1)))
|
|
|
|
processLogFD.close()
|
|
|
|
|
|
|
|
for processPID in processList:
|
|
|
|
self.log.info("INFO | automation.py | Checking for orphan process with PID: %d", processPID)
|
|
|
|
if self.isPidAlive(processPID):
|
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | automation.py | child process %d still alive after shutdown", processPID)
|
|
|
|
self.killPid(processPID)
|
|
|
|
|
2010-01-19 22:45:04 +03:00
|
|
|
def runApp(self, testURL, env, app, profileDir, extraArgs,
|
|
|
|
runSSLTunnel = False, utilityPath = None,
|
|
|
|
xrePath = None, certPath = None,
|
|
|
|
debuggerInfo = None, symbolsPath = None,
|
|
|
|
timeout = -1, maxTime = None):
|
|
|
|
"""
|
|
|
|
Run the app, log the duration it took to execute, return the status code.
|
|
|
|
Kills the app if it runs for longer than |maxTime| seconds, or outputs nothing for |timeout| seconds.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if utilityPath == None:
|
|
|
|
utilityPath = self.DIST_BIN
|
|
|
|
if xrePath == None:
|
|
|
|
xrePath = self.DIST_BIN
|
|
|
|
if certPath == None:
|
|
|
|
certPath = self.CERTS_SRC_DIR
|
|
|
|
if timeout == -1:
|
|
|
|
timeout = self.DEFAULT_TIMEOUT
|
|
|
|
|
|
|
|
# copy env so we don't munge the caller's environment
|
|
|
|
env = dict(env);
|
|
|
|
env["NO_EM_RESTART"] = "1"
|
|
|
|
tmpfd, processLog = tempfile.mkstemp(suffix='pidlog')
|
|
|
|
os.close(tmpfd)
|
|
|
|
env["MOZ_PROCESS_LOG"] = processLog
|
|
|
|
|
|
|
|
if self.IS_TEST_BUILD and runSSLTunnel:
|
|
|
|
# create certificate database for the profile
|
|
|
|
certificateStatus = self.fillCertificateDB(profileDir, certPath, utilityPath, xrePath)
|
|
|
|
if certificateStatus != 0:
|
|
|
|
self.log.info("TEST-UNEXPECTED FAIL | automation.py | Certificate integration failed")
|
|
|
|
return certificateStatus
|
|
|
|
|
|
|
|
# start ssltunnel to provide https:// URLs capability
|
|
|
|
ssltunnel = os.path.join(utilityPath, "ssltunnel" + self.BIN_SUFFIX)
|
|
|
|
ssltunnelProcess = self.Process([ssltunnel,
|
|
|
|
os.path.join(profileDir, "ssltunnel.cfg")],
|
|
|
|
env = self.environment(xrePath = xrePath))
|
|
|
|
self.log.info("INFO | automation.py | SSL tunnel pid: %d", ssltunnelProcess.pid)
|
|
|
|
|
|
|
|
cmd, args = self.buildCommandLine(app, debuggerInfo, profileDir, testURL, extraArgs)
|
|
|
|
startTime = datetime.now()
|
|
|
|
|
|
|
|
# Don't redirect stdout and stderr if an interactive debugger is attached
|
|
|
|
if debuggerInfo and debuggerInfo["interactive"]:
|
|
|
|
outputPipe = None
|
|
|
|
else:
|
|
|
|
outputPipe = subprocess.PIPE
|
|
|
|
|
|
|
|
proc = self.Process([cmd] + args,
|
|
|
|
env = self.environment(env, xrePath = xrePath,
|
|
|
|
crashreporter = not debuggerInfo),
|
|
|
|
stdout = outputPipe,
|
|
|
|
stderr = subprocess.STDOUT)
|
|
|
|
self.log.info("INFO | automation.py | Application pid: %d", proc.pid)
|
|
|
|
|
2010-01-20 02:47:57 +03:00
|
|
|
status = self.waitForFinish(proc, utilityPath, timeout, maxTime, startTime)
|
2010-01-19 22:45:04 +03:00
|
|
|
self.log.info("INFO | automation.py | Application ran for: %s", str(datetime.now() - startTime))
|
|
|
|
|
|
|
|
# Do a final check for zombie child processes.
|
|
|
|
self.checkForZombies(processLog)
|
2010-01-15 20:22:54 +03:00
|
|
|
self.automationutils.checkForCrashes(os.path.join(profileDir, "minidumps"), symbolsPath)
|
|
|
|
|
|
|
|
if os.path.exists(processLog):
|
|
|
|
os.unlink(processLog)
|
|
|
|
|
|
|
|
if self.IS_TEST_BUILD and runSSLTunnel:
|
|
|
|
ssltunnelProcess.kill()
|
|
|
|
|
|
|
|
return status
|