2008-02-22 00:08:39 +03:00
|
|
|
#
|
2012-05-21 15:12:37 +04:00
|
|
|
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
2008-02-22 00:08:39 +03:00
|
|
|
|
2011-10-14 19:45:58 +04:00
|
|
|
from __future__ import with_statement
|
2008-06-07 10:43:15 +04:00
|
|
|
import codecs
|
2008-02-22 00:08:39 +03:00
|
|
|
import itertools
|
2013-07-30 16:30:40 +04:00
|
|
|
import json
|
2008-04-08 09:18:45 +04:00
|
|
|
import logging
|
2008-02-22 00:08:39 +03:00
|
|
|
import os
|
2008-06-07 10:43:15 +04:00
|
|
|
import re
|
2009-10-14 00:56:24 +04:00
|
|
|
import select
|
2009-01-12 22:23:28 +03:00
|
|
|
import shutil
|
2008-04-08 09:18:45 +04:00
|
|
|
import signal
|
2009-01-12 22:23:28 +03:00
|
|
|
import subprocess
|
2008-02-22 00:08:39 +03:00
|
|
|
import sys
|
2008-04-08 09:18:45 +04:00
|
|
|
import threading
|
2009-11-20 22:48:56 +03:00
|
|
|
import tempfile
|
2010-08-20 03:12:46 +04:00
|
|
|
import sqlite3
|
2014-12-20 16:45:48 +03:00
|
|
|
import zipfile
|
2013-07-30 16:30:40 +04:00
|
|
|
from datetime import datetime, timedelta
|
2012-07-20 10:40:15 +04:00
|
|
|
from string import Template
|
2009-05-11 23:54:39 +04:00
|
|
|
|
2010-03-18 21:14:14 +03:00
|
|
|
SCRIPT_DIR = os.path.abspath(os.path.realpath(os.path.dirname(sys.argv[0])))
|
|
|
|
sys.path.insert(0, SCRIPT_DIR)
|
|
|
|
import automationutils
|
|
|
|
|
2013-03-21 17:19:34 +04:00
|
|
|
# --------------------------------------------------------------
|
|
|
|
# TODO: this is a hack for mozbase without virtualenv, remove with bug 849900
|
2013-07-30 16:30:40 +04:00
|
|
|
# These paths refer to relative locations to test.zip, not the OBJDIR or SRCDIR
|
|
|
|
here = os.path.dirname(os.path.realpath(__file__))
|
2013-03-21 17:19:34 +04:00
|
|
|
mozbase = os.path.realpath(os.path.join(os.path.dirname(here), 'mozbase'))
|
|
|
|
|
2013-06-18 00:22:39 +04:00
|
|
|
if os.path.isdir(mozbase):
|
|
|
|
for package in os.listdir(mozbase):
|
2013-07-30 16:30:40 +04:00
|
|
|
package_path = os.path.join(mozbase, package)
|
|
|
|
if package_path not in sys.path:
|
|
|
|
sys.path.append(package_path)
|
2013-06-18 00:22:39 +04:00
|
|
|
|
|
|
|
import mozcrash
|
2013-07-30 16:30:40 +04:00
|
|
|
from mozprofile import Profile, Preferences
|
|
|
|
from mozprofile.permissions import ServerLocations
|
2013-06-18 00:22:39 +04:00
|
|
|
|
2013-03-21 17:19:34 +04:00
|
|
|
# ---------------------------------------------------------------
|
|
|
|
|
2013-05-07 19:19:46 +04:00
|
|
|
_DEFAULT_PREFERENCE_FILE = os.path.join(SCRIPT_DIR, 'prefs_general.js')
|
2013-07-30 16:30:40 +04:00
|
|
|
_DEFAULT_APPS_FILE = os.path.join(SCRIPT_DIR, 'webapps_mochitest.json')
|
2013-05-07 19:19:46 +04:00
|
|
|
|
2010-03-13 20:56:24 +03:00
|
|
|
_DEFAULT_WEB_SERVER = "127.0.0.1"
|
|
|
|
_DEFAULT_HTTP_PORT = 8888
|
|
|
|
_DEFAULT_SSL_PORT = 4443
|
2010-06-17 23:00:58 +04:00
|
|
|
_DEFAULT_WEBSOCKET_PORT = 9988
|
2010-03-10 06:33:11 +03:00
|
|
|
|
2012-09-26 03:28:17 +04:00
|
|
|
# from nsIPrincipal.idl
|
|
|
|
_APP_STATUS_NOT_INSTALLED = 0
|
|
|
|
_APP_STATUS_INSTALLED = 1
|
|
|
|
_APP_STATUS_PRIVILEGED = 2
|
|
|
|
_APP_STATUS_CERTIFIED = 3
|
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
#expand _DIST_BIN = __XPC_BIN_PATH__
|
|
|
|
#expand _IS_WIN32 = len("__WIN32__") != 0
|
|
|
|
#expand _IS_MAC = __IS_MAC__ != 0
|
|
|
|
#expand _IS_LINUX = __IS_LINUX__ != 0
|
2008-02-22 00:08:39 +03:00
|
|
|
#ifdef IS_CYGWIN
|
2010-01-15 20:22:54 +03:00
|
|
|
#expand _IS_CYGWIN = __IS_CYGWIN__ == 1
|
2008-02-22 00:08:39 +03:00
|
|
|
#else
|
2010-01-15 20:22:54 +03:00
|
|
|
_IS_CYGWIN = False
|
2008-02-22 00:08:39 +03:00
|
|
|
#endif
|
2010-01-15 20:22:54 +03:00
|
|
|
#expand _BIN_SUFFIX = __BIN_SUFFIX__
|
2008-02-22 00:08:39 +03:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
#expand _DEFAULT_APP = "./" + __BROWSER_PATH__
|
|
|
|
#expand _CERTS_SRC_DIR = __CERTS_SRC_DIR__
|
|
|
|
#expand _IS_TEST_BUILD = __IS_TEST_BUILD__
|
|
|
|
#expand _IS_DEBUG_BUILD = __IS_DEBUG_BUILD__
|
|
|
|
#expand _CRASHREPORTER = __CRASHREPORTER__ == 1
|
2013-01-22 19:48:02 +04:00
|
|
|
#expand _IS_ASAN = __IS_ASAN__ == 1
|
2008-04-08 09:18:45 +04:00
|
|
|
|
2010-03-18 21:14:14 +03:00
|
|
|
|
|
|
|
if _IS_WIN32:
|
|
|
|
import ctypes, ctypes.wintypes, time, msvcrt
|
|
|
|
else:
|
|
|
|
import errno
|
|
|
|
|
|
|
|
|
2012-12-20 12:43:19 +04:00
|
|
|
def getGlobalLog():
|
|
|
|
return _log
|
|
|
|
|
|
|
|
def resetGlobalLog(log):
|
|
|
|
while _log.handlers:
|
|
|
|
_log.removeHandler(_log.handlers[0])
|
|
|
|
handler = logging.StreamHandler(log)
|
|
|
|
_log.setLevel(logging.INFO)
|
|
|
|
_log.addHandler(handler)
|
|
|
|
|
2010-03-18 23:13:33 +03:00
|
|
|
# We use the logging system here primarily because it'll handle multiple
|
|
|
|
# threads, which is needed to process the output of the server and application
|
|
|
|
# processes simultaneously.
|
|
|
|
_log = logging.getLogger()
|
2012-12-20 12:43:19 +04:00
|
|
|
resetGlobalLog(sys.stdout)
|
2010-03-18 23:13:33 +03:00
|
|
|
|
|
|
|
|
2008-04-08 09:18:45 +04:00
|
|
|
#################
|
|
|
|
# PROFILE SETUP #
|
|
|
|
#################
|
2008-02-22 00:08:39 +03:00
|
|
|
|
2008-06-07 10:43:15 +04:00
|
|
|
class SyntaxError(Exception):
|
|
|
|
"Signifies a syntax error on a particular line in server-locations.txt."
|
|
|
|
|
|
|
|
def __init__(self, lineno, msg = None):
|
|
|
|
self.lineno = lineno
|
|
|
|
self.msg = msg
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
s = "Syntax error on line " + str(self.lineno)
|
|
|
|
if self.msg:
|
|
|
|
s += ": %s." % self.msg
|
|
|
|
else:
|
|
|
|
s += "."
|
|
|
|
return s
|
|
|
|
|
|
|
|
|
|
|
|
class Location:
|
|
|
|
"Represents a location line in server-locations.txt."
|
|
|
|
|
|
|
|
def __init__(self, scheme, host, port, options):
|
|
|
|
self.scheme = scheme
|
|
|
|
self.host = host
|
|
|
|
self.port = port
|
|
|
|
self.options = options
|
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
class Automation(object):
|
2008-06-07 10:43:15 +04:00
|
|
|
"""
|
2010-01-15 20:22:54 +03:00
|
|
|
Runs the browser from a script, and provides useful utilities
|
|
|
|
for setting up the browser environment.
|
2008-06-07 10:43:15 +04:00
|
|
|
"""
|
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
DIST_BIN = _DIST_BIN
|
|
|
|
IS_WIN32 = _IS_WIN32
|
|
|
|
IS_MAC = _IS_MAC
|
|
|
|
IS_LINUX = _IS_LINUX
|
|
|
|
IS_CYGWIN = _IS_CYGWIN
|
|
|
|
BIN_SUFFIX = _BIN_SUFFIX
|
|
|
|
|
|
|
|
UNIXISH = not IS_WIN32 and not IS_MAC
|
|
|
|
|
|
|
|
DEFAULT_APP = _DEFAULT_APP
|
|
|
|
CERTS_SRC_DIR = _CERTS_SRC_DIR
|
|
|
|
IS_TEST_BUILD = _IS_TEST_BUILD
|
|
|
|
IS_DEBUG_BUILD = _IS_DEBUG_BUILD
|
|
|
|
CRASHREPORTER = _CRASHREPORTER
|
2013-01-22 19:48:02 +04:00
|
|
|
IS_ASAN = _IS_ASAN
|
2010-01-15 20:22:54 +03:00
|
|
|
|
|
|
|
# timeout, in seconds
|
|
|
|
DEFAULT_TIMEOUT = 60.0
|
2010-03-13 20:56:24 +03:00
|
|
|
DEFAULT_WEB_SERVER = _DEFAULT_WEB_SERVER
|
|
|
|
DEFAULT_HTTP_PORT = _DEFAULT_HTTP_PORT
|
|
|
|
DEFAULT_SSL_PORT = _DEFAULT_SSL_PORT
|
2010-06-17 09:38:55 +04:00
|
|
|
DEFAULT_WEBSOCKET_PORT = _DEFAULT_WEBSOCKET_PORT
|
2008-06-07 10:43:15 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
def __init__(self):
|
2010-03-18 23:13:33 +03:00
|
|
|
self.log = _log
|
2010-06-11 05:46:18 +04:00
|
|
|
self.lastTestSeen = "automation.py"
|
2010-08-23 05:27:26 +04:00
|
|
|
self.haveDumpedScreen = False
|
2010-01-15 20:22:54 +03:00
|
|
|
|
2010-06-17 09:38:55 +04:00
|
|
|
def setServerInfo(self,
|
|
|
|
webServer = _DEFAULT_WEB_SERVER,
|
|
|
|
httpPort = _DEFAULT_HTTP_PORT,
|
|
|
|
sslPort = _DEFAULT_SSL_PORT,
|
2010-07-28 21:55:36 +04:00
|
|
|
webSocketPort = _DEFAULT_WEBSOCKET_PORT):
|
2010-03-13 20:56:24 +03:00
|
|
|
self.webServer = webServer
|
2010-03-13 00:53:37 +03:00
|
|
|
self.httpPort = httpPort
|
|
|
|
self.sslPort = sslPort
|
2010-06-17 09:38:55 +04:00
|
|
|
self.webSocketPort = webSocketPort
|
2010-03-13 00:53:37 +03:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
@property
|
|
|
|
def __all__(self):
|
|
|
|
return [
|
|
|
|
"UNIXISH",
|
|
|
|
"IS_WIN32",
|
|
|
|
"IS_MAC",
|
|
|
|
"log",
|
|
|
|
"runApp",
|
|
|
|
"Process",
|
|
|
|
"initializeProfile",
|
|
|
|
"DIST_BIN",
|
|
|
|
"DEFAULT_APP",
|
|
|
|
"CERTS_SRC_DIR",
|
|
|
|
"environment",
|
|
|
|
"IS_TEST_BUILD",
|
|
|
|
"IS_DEBUG_BUILD",
|
|
|
|
"DEFAULT_TIMEOUT",
|
|
|
|
]
|
|
|
|
|
|
|
|
class Process(subprocess.Popen):
|
|
|
|
"""
|
|
|
|
Represents our view of a subprocess.
|
|
|
|
It adds a kill() method which allows it to be stopped explicitly.
|
|
|
|
"""
|
|
|
|
|
2010-03-18 23:13:33 +03:00
|
|
|
def __init__(self,
|
|
|
|
args,
|
|
|
|
bufsize=0,
|
|
|
|
executable=None,
|
|
|
|
stdin=None,
|
|
|
|
stdout=None,
|
|
|
|
stderr=None,
|
|
|
|
preexec_fn=None,
|
|
|
|
close_fds=False,
|
|
|
|
shell=False,
|
|
|
|
cwd=None,
|
|
|
|
env=None,
|
|
|
|
universal_newlines=False,
|
|
|
|
startupinfo=None,
|
|
|
|
creationflags=0):
|
2013-08-23 19:11:43 +04:00
|
|
|
_log.info("INFO | automation.py | Launching: %s", subprocess.list2cmdline(args))
|
2010-03-18 23:13:33 +03:00
|
|
|
subprocess.Popen.__init__(self, args, bufsize, executable,
|
|
|
|
stdin, stdout, stderr,
|
|
|
|
preexec_fn, close_fds,
|
|
|
|
shell, cwd, env,
|
|
|
|
universal_newlines, startupinfo, creationflags)
|
|
|
|
self.log = _log
|
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
def kill(self):
|
|
|
|
if Automation().IS_WIN32:
|
|
|
|
import platform
|
|
|
|
pid = "%i" % self.pid
|
|
|
|
if platform.release() == "2000":
|
|
|
|
# Windows 2000 needs 'kill.exe' from the
|
|
|
|
#'Windows 2000 Resource Kit tools'. (See bug 475455.)
|
|
|
|
try:
|
|
|
|
subprocess.Popen(["kill", "-f", pid]).wait()
|
|
|
|
except:
|
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | automation.py | Missing 'kill' utility to kill process with pid=%s. Kill it manually!", pid)
|
|
|
|
else:
|
|
|
|
# Windows XP and later.
|
|
|
|
subprocess.Popen(["taskkill", "/F", "/PID", pid]).wait()
|
|
|
|
else:
|
|
|
|
os.kill(self.pid, signal.SIGKILL)
|
|
|
|
|
|
|
|
def readLocations(self, locationsPath = "server-locations.txt"):
|
|
|
|
"""
|
|
|
|
Reads the locations at which the Mochitest HTTP server is available from
|
|
|
|
server-locations.txt.
|
|
|
|
"""
|
|
|
|
|
|
|
|
locationFile = codecs.open(locationsPath, "r", "UTF-8")
|
|
|
|
|
|
|
|
# Perhaps more detail than necessary, but it's the easiest way to make sure
|
|
|
|
# we get exactly the format we want. See server-locations.txt for the exact
|
|
|
|
# format guaranteed here.
|
|
|
|
lineRe = re.compile(r"^(?P<scheme>[a-z][-a-z0-9+.]*)"
|
2008-06-07 10:43:15 +04:00
|
|
|
r"://"
|
|
|
|
r"(?P<host>"
|
|
|
|
r"\d+\.\d+\.\d+\.\d+"
|
|
|
|
r"|"
|
|
|
|
r"(?:[a-z0-9](?:[-a-z0-9]*[a-z0-9])?\.)*"
|
|
|
|
r"[a-z](?:[-a-z0-9]*[a-z0-9])?"
|
|
|
|
r")"
|
|
|
|
r":"
|
|
|
|
r"(?P<port>\d+)"
|
|
|
|
r"(?:"
|
|
|
|
r"\s+"
|
2008-09-05 17:35:58 +04:00
|
|
|
r"(?P<options>\S+(?:,\S+)*)"
|
2008-06-07 10:43:15 +04:00
|
|
|
r")?$")
|
2010-01-15 20:22:54 +03:00
|
|
|
locations = []
|
|
|
|
lineno = 0
|
|
|
|
seenPrimary = False
|
|
|
|
for line in locationFile:
|
|
|
|
lineno += 1
|
|
|
|
if line.startswith("#") or line == "\n":
|
|
|
|
continue
|
2008-06-07 10:43:15 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
match = lineRe.match(line)
|
|
|
|
if not match:
|
|
|
|
raise SyntaxError(lineno)
|
|
|
|
|
|
|
|
options = match.group("options")
|
|
|
|
if options:
|
|
|
|
options = options.split(",")
|
|
|
|
if "primary" in options:
|
|
|
|
if seenPrimary:
|
|
|
|
raise SyntaxError(lineno, "multiple primary locations")
|
|
|
|
seenPrimary = True
|
|
|
|
else:
|
|
|
|
options = []
|
2008-06-07 10:43:15 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
locations.append(Location(match.group("scheme"), match.group("host"),
|
|
|
|
match.group("port"), options))
|
2008-06-07 10:43:15 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
if not seenPrimary:
|
|
|
|
raise SyntaxError(lineno + 1, "missing primary location")
|
2008-06-07 10:43:15 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
return locations
|
2008-06-07 10:43:15 +04:00
|
|
|
|
2010-08-20 03:12:46 +04:00
|
|
|
def setupPermissionsDatabase(self, profileDir, permissions):
|
2013-07-30 16:30:40 +04:00
|
|
|
# Included for reftest compatibility;
|
|
|
|
# see https://bugzilla.mozilla.org/show_bug.cgi?id=688667
|
|
|
|
|
2010-08-20 03:12:46 +04:00
|
|
|
# Open database and create table
|
|
|
|
permDB = sqlite3.connect(os.path.join(profileDir, "permissions.sqlite"))
|
|
|
|
cursor = permDB.cursor();
|
2012-08-23 22:39:41 +04:00
|
|
|
|
2014-09-15 05:33:12 +04:00
|
|
|
cursor.execute("PRAGMA user_version=4");
|
2012-08-23 22:39:41 +04:00
|
|
|
|
2010-08-21 20:48:01 +04:00
|
|
|
# SQL copied from nsPermissionManager.cpp
|
2013-01-04 22:41:34 +04:00
|
|
|
cursor.execute("""CREATE TABLE IF NOT EXISTS moz_hosts (
|
|
|
|
id INTEGER PRIMARY KEY,
|
|
|
|
host TEXT,
|
|
|
|
type TEXT,
|
|
|
|
permission INTEGER,
|
|
|
|
expireType INTEGER,
|
|
|
|
expireTime INTEGER,
|
2014-09-15 05:33:12 +04:00
|
|
|
modificationTime INTEGER,
|
2013-01-04 22:41:34 +04:00
|
|
|
appId INTEGER,
|
|
|
|
isInBrowserElement INTEGER)""")
|
2010-08-20 03:12:46 +04:00
|
|
|
|
|
|
|
# Insert desired permissions
|
|
|
|
for perm in permissions.keys():
|
2010-09-04 02:53:28 +04:00
|
|
|
for host,allow in permissions[perm]:
|
2014-09-15 05:33:12 +04:00
|
|
|
cursor.execute("INSERT INTO moz_hosts values(NULL, ?, ?, ?, 0, 0, 0, 0, 0)",
|
2013-01-04 22:41:34 +04:00
|
|
|
(host, perm, 1 if allow else 2))
|
2010-08-20 03:12:46 +04:00
|
|
|
|
|
|
|
# Commit and close
|
|
|
|
permDB.commit()
|
|
|
|
cursor.close()
|
|
|
|
|
2013-05-07 19:19:46 +04:00
|
|
|
def initializeProfile(self, profileDir,
|
|
|
|
extraPrefs=None,
|
|
|
|
useServerLocations=False,
|
2013-07-30 16:30:40 +04:00
|
|
|
prefsPath=_DEFAULT_PREFERENCE_FILE,
|
|
|
|
appsPath=_DEFAULT_APPS_FILE,
|
|
|
|
addons=None):
|
2010-03-13 00:53:37 +03:00
|
|
|
" Sets up the standard testing profile."
|
2008-06-07 10:43:15 +04:00
|
|
|
|
2013-05-07 19:19:46 +04:00
|
|
|
extraPrefs = extraPrefs or []
|
2008-02-22 00:08:39 +03:00
|
|
|
|
2013-07-30 16:30:40 +04:00
|
|
|
# create the profile
|
|
|
|
prefs = {}
|
|
|
|
locations = None
|
2012-08-23 22:45:28 +04:00
|
|
|
if useServerLocations:
|
2013-07-30 16:30:40 +04:00
|
|
|
locations = ServerLocations()
|
|
|
|
locations.read(os.path.abspath('server-locations.txt'), True)
|
2012-09-25 23:51:59 +04:00
|
|
|
else:
|
2013-07-30 16:30:40 +04:00
|
|
|
prefs['network.proxy.type'] = 0
|
|
|
|
|
|
|
|
prefs.update(Preferences.read_prefs(prefsPath))
|
2009-10-14 21:55:25 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
for v in extraPrefs:
|
2012-04-25 21:34:53 +04:00
|
|
|
thispref = v.split("=", 1)
|
2010-01-15 20:22:54 +03:00
|
|
|
if len(thispref) < 2:
|
|
|
|
print "Error: syntax error in --setpref=" + v
|
|
|
|
sys.exit(1)
|
2013-07-30 16:30:40 +04:00
|
|
|
prefs[thispref[0]] = thispref[1]
|
|
|
|
|
|
|
|
|
|
|
|
interpolation = {"server": "%s:%s" % (self.webServer, self.httpPort)}
|
|
|
|
prefs = json.loads(json.dumps(prefs) % interpolation)
|
|
|
|
for pref in prefs:
|
|
|
|
prefs[pref] = Preferences.cast(prefs[pref])
|
|
|
|
|
|
|
|
# load apps
|
|
|
|
apps = None
|
|
|
|
if appsPath and os.path.exists(appsPath):
|
|
|
|
with open(appsPath, 'r') as apps_file:
|
|
|
|
apps = json.load(apps_file)
|
|
|
|
|
|
|
|
proxy = {'remote': str(self.webServer),
|
|
|
|
'http': str(self.httpPort),
|
|
|
|
'https': str(self.sslPort),
|
|
|
|
# use SSL port for legacy compatibility; see
|
|
|
|
# - https://bugzilla.mozilla.org/show_bug.cgi?id=688667#c66
|
|
|
|
# - https://bugzilla.mozilla.org/show_bug.cgi?id=899221
|
|
|
|
# 'ws': str(self.webSocketPort)
|
|
|
|
'ws': str(self.sslPort)
|
|
|
|
}
|
|
|
|
|
|
|
|
# return profile object
|
|
|
|
profile = Profile(profile=profileDir,
|
|
|
|
addons=addons,
|
|
|
|
locations=locations,
|
|
|
|
preferences=prefs,
|
|
|
|
restore=False,
|
|
|
|
apps=apps,
|
|
|
|
proxy=proxy)
|
|
|
|
return profile
|
2012-07-20 10:40:15 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
def fillCertificateDB(self, profileDir, certPath, utilityPath, xrePath):
|
|
|
|
pwfilePath = os.path.join(profileDir, ".crtdbpw")
|
|
|
|
pwfile = open(pwfilePath, "w")
|
|
|
|
pwfile.write("\n")
|
|
|
|
pwfile.close()
|
2008-09-05 17:35:58 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
# Create head of the ssltunnel configuration file
|
|
|
|
sslTunnelConfigPath = os.path.join(profileDir, "ssltunnel.cfg")
|
|
|
|
sslTunnelConfig = open(sslTunnelConfigPath, "w")
|
2008-09-05 17:35:58 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
sslTunnelConfig.write("httpproxy:1\n")
|
|
|
|
sslTunnelConfig.write("certdbdir:%s\n" % certPath)
|
2010-03-13 20:56:24 +03:00
|
|
|
sslTunnelConfig.write("forward:127.0.0.1:%s\n" % self.httpPort)
|
2010-07-28 21:55:36 +04:00
|
|
|
sslTunnelConfig.write("websocketserver:%s:%s\n" % (self.webServer, self.webSocketPort))
|
2010-03-13 20:56:24 +03:00
|
|
|
sslTunnelConfig.write("listen:*:%s:pgo server certificate\n" % self.sslPort)
|
2010-01-15 20:22:54 +03:00
|
|
|
|
|
|
|
# Configure automatic certificate and bind custom certificates, client authentication
|
|
|
|
locations = self.readLocations()
|
|
|
|
locations.pop(0)
|
|
|
|
for loc in locations:
|
|
|
|
if loc.scheme == "https" and "nocert" not in loc.options:
|
|
|
|
customCertRE = re.compile("^cert=(?P<nickname>[0-9a-zA-Z_ ]+)")
|
|
|
|
clientAuthRE = re.compile("^clientauth=(?P<clientauth>[a-z]+)")
|
2011-11-03 01:43:27 +04:00
|
|
|
redirRE = re.compile("^redir=(?P<redirhost>[0-9a-zA-Z_ .]+)")
|
2010-01-15 20:22:54 +03:00
|
|
|
for option in loc.options:
|
|
|
|
match = customCertRE.match(option)
|
|
|
|
if match:
|
|
|
|
customcert = match.group("nickname");
|
2010-03-13 20:56:24 +03:00
|
|
|
sslTunnelConfig.write("listen:%s:%s:%s:%s\n" %
|
|
|
|
(loc.host, loc.port, self.sslPort, customcert))
|
2010-01-15 20:22:54 +03:00
|
|
|
|
|
|
|
match = clientAuthRE.match(option)
|
|
|
|
if match:
|
|
|
|
clientauth = match.group("clientauth");
|
2010-03-13 20:56:24 +03:00
|
|
|
sslTunnelConfig.write("clientauth:%s:%s:%s:%s\n" %
|
|
|
|
(loc.host, loc.port, self.sslPort, clientauth))
|
2010-01-15 20:22:54 +03:00
|
|
|
|
2011-11-03 01:43:27 +04:00
|
|
|
match = redirRE.match(option)
|
|
|
|
if match:
|
|
|
|
redirhost = match.group("redirhost")
|
|
|
|
sslTunnelConfig.write("redirhost:%s:%s:%s:%s\n" %
|
|
|
|
(loc.host, loc.port, self.sslPort, redirhost))
|
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
sslTunnelConfig.close()
|
|
|
|
|
|
|
|
# Pre-create the certification database for the profile
|
|
|
|
env = self.environment(xrePath = xrePath)
|
|
|
|
certutil = os.path.join(utilityPath, "certutil" + self.BIN_SUFFIX)
|
|
|
|
pk12util = os.path.join(utilityPath, "pk12util" + self.BIN_SUFFIX)
|
|
|
|
|
|
|
|
status = self.Process([certutil, "-N", "-d", profileDir, "-f", pwfilePath], env = env).wait()
|
2014-03-26 14:14:51 +04:00
|
|
|
automationutils.printstatus(status, "certutil")
|
2010-01-15 20:22:54 +03:00
|
|
|
if status != 0:
|
|
|
|
return status
|
|
|
|
|
|
|
|
# Walk the cert directory and add custom CAs and client certs
|
|
|
|
files = os.listdir(certPath)
|
|
|
|
for item in files:
|
|
|
|
root, ext = os.path.splitext(item)
|
|
|
|
if ext == ".ca":
|
|
|
|
trustBits = "CT,,"
|
|
|
|
if root.endswith("-object"):
|
|
|
|
trustBits = "CT,,CT"
|
2014-03-26 14:14:51 +04:00
|
|
|
status = self.Process([certutil, "-A", "-i", os.path.join(certPath, item),
|
2010-01-15 20:22:54 +03:00
|
|
|
"-d", profileDir, "-f", pwfilePath, "-n", root, "-t", trustBits],
|
|
|
|
env = env).wait()
|
2014-03-26 14:14:51 +04:00
|
|
|
automationutils.printstatus(status, "certutil")
|
2010-01-15 20:22:54 +03:00
|
|
|
if ext == ".client":
|
2014-03-26 14:14:51 +04:00
|
|
|
status = self.Process([pk12util, "-i", os.path.join(certPath, item), "-w",
|
2010-01-15 20:22:54 +03:00
|
|
|
pwfilePath, "-d", profileDir],
|
|
|
|
env = env).wait()
|
2014-03-26 14:14:51 +04:00
|
|
|
automationutils.printstatus(status, "pk12util")
|
2010-01-15 20:22:54 +03:00
|
|
|
|
|
|
|
os.unlink(pwfilePath)
|
|
|
|
return 0
|
|
|
|
|
2014-06-20 20:08:30 +04:00
|
|
|
def environment(self, env=None, xrePath=None, crashreporter=True, debugger=False, dmdPath=None, lsanPath=None):
|
2010-01-15 20:22:54 +03:00
|
|
|
if xrePath == None:
|
|
|
|
xrePath = self.DIST_BIN
|
|
|
|
if env == None:
|
|
|
|
env = dict(os.environ)
|
|
|
|
|
2010-03-18 21:14:14 +03:00
|
|
|
ldLibraryPath = os.path.abspath(os.path.join(SCRIPT_DIR, xrePath))
|
2013-11-13 23:47:41 +04:00
|
|
|
dmdLibrary = None
|
|
|
|
preloadEnvVar = None
|
2010-01-15 20:22:54 +03:00
|
|
|
if self.UNIXISH or self.IS_MAC:
|
|
|
|
envVar = "LD_LIBRARY_PATH"
|
2013-11-13 23:47:41 +04:00
|
|
|
preloadEnvVar = "LD_PRELOAD"
|
2010-01-15 20:22:54 +03:00
|
|
|
if self.IS_MAC:
|
|
|
|
envVar = "DYLD_LIBRARY_PATH"
|
2013-11-13 23:47:41 +04:00
|
|
|
dmdLibrary = "libdmd.dylib"
|
2010-01-15 20:22:54 +03:00
|
|
|
else: # unixish
|
|
|
|
env['MOZILLA_FIVE_HOME'] = xrePath
|
2013-11-13 23:47:41 +04:00
|
|
|
dmdLibrary = "libdmd.so"
|
2010-01-15 20:22:54 +03:00
|
|
|
if envVar in env:
|
|
|
|
ldLibraryPath = ldLibraryPath + ":" + env[envVar]
|
|
|
|
env[envVar] = ldLibraryPath
|
|
|
|
elif self.IS_WIN32:
|
2013-07-30 18:02:28 +04:00
|
|
|
env["PATH"] = env["PATH"] + ";" + str(ldLibraryPath)
|
2013-11-13 23:47:41 +04:00
|
|
|
dmdLibrary = "dmd.dll"
|
|
|
|
preloadEnvVar = "MOZ_REPLACE_MALLOC_LIB"
|
|
|
|
|
|
|
|
if dmdPath and dmdLibrary and preloadEnvVar:
|
|
|
|
env[preloadEnvVar] = os.path.join(dmdPath, dmdLibrary)
|
2010-01-15 20:22:54 +03:00
|
|
|
|
2013-09-16 22:44:25 +04:00
|
|
|
if crashreporter and not debugger:
|
2010-01-15 20:22:54 +03:00
|
|
|
env['MOZ_CRASHREPORTER_NO_REPORT'] = '1'
|
|
|
|
env['MOZ_CRASHREPORTER'] = '1'
|
2010-01-14 01:44:49 +03:00
|
|
|
else:
|
2010-01-15 20:22:54 +03:00
|
|
|
env['MOZ_CRASHREPORTER_DISABLE'] = '1'
|
|
|
|
|
2014-09-22 17:48:00 +04:00
|
|
|
# Crash on non-local network connections by default.
|
|
|
|
# MOZ_DISABLE_NONLOCAL_CONNECTIONS can be set to "0" to temporarily
|
|
|
|
# enable non-local connections for the purposes of local testing. Don't
|
|
|
|
# override the user's choice here. See bug 1049688.
|
|
|
|
env.setdefault('MOZ_DISABLE_NONLOCAL_CONNECTIONS', '1')
|
2014-06-17 18:49:57 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
env['GNOME_DISABLE_CRASH_DIALOG'] = '1'
|
|
|
|
env['XRE_NO_WINDOWS_CRASH_DIALOG'] = '1'
|
2013-01-22 19:48:02 +04:00
|
|
|
|
2013-12-19 20:42:19 +04:00
|
|
|
# Set WebRTC logging in case it is not set yet
|
2014-11-20 03:16:29 +03:00
|
|
|
env.setdefault('NSPR_LOG_MODULES', 'signaling:5,mtransport:5,datachannel:5,jsep:5,MediaPipelineFactory:5')
|
2014-06-13 22:45:42 +04:00
|
|
|
env.setdefault('R_LOG_LEVEL', '6')
|
2013-12-19 20:42:19 +04:00
|
|
|
env.setdefault('R_LOG_DESTINATION', 'stderr')
|
|
|
|
env.setdefault('R_LOG_VERBOSE', '1')
|
2013-02-23 00:53:56 +04:00
|
|
|
|
2013-01-22 19:48:02 +04:00
|
|
|
# ASan specific environment stuff
|
|
|
|
if self.IS_ASAN and (self.IS_LINUX or self.IS_MAC):
|
2013-09-20 15:37:53 +04:00
|
|
|
# Symbolizer support
|
|
|
|
llvmsym = os.path.join(xrePath, "llvm-symbolizer")
|
|
|
|
if os.path.isfile(llvmsym):
|
|
|
|
env["ASAN_SYMBOLIZER_PATH"] = llvmsym
|
|
|
|
self.log.info("INFO | automation.py | ASan using symbolizer at %s", llvmsym)
|
2014-06-15 03:43:46 +04:00
|
|
|
else:
|
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | automation.py | Failed to find ASan symbolizer at %s", llvmsym)
|
2013-09-20 15:37:53 +04:00
|
|
|
|
2013-01-22 19:48:02 +04:00
|
|
|
try:
|
|
|
|
totalMemory = int(os.popen("free").readlines()[1].split()[1])
|
|
|
|
|
2013-11-14 04:44:41 +04:00
|
|
|
# Only 4 GB RAM or less available? Use custom ASan options to reduce
|
2013-01-22 19:48:02 +04:00
|
|
|
# the amount of resources required to do the tests. Standard options
|
|
|
|
# will otherwise lead to OOM conditions on the current test slaves.
|
2013-11-14 04:44:41 +04:00
|
|
|
if totalMemory <= 1024 * 1024 * 4:
|
2013-01-22 19:48:02 +04:00
|
|
|
self.log.info("INFO | automation.py | ASan running in low-memory configuration")
|
2014-02-28 19:33:25 +04:00
|
|
|
env["ASAN_OPTIONS"] = "quarantine_size=50331648:malloc_context_size=5"
|
2013-08-07 01:02:07 +04:00
|
|
|
else:
|
|
|
|
self.log.info("INFO | automation.py | ASan running in default memory configuration")
|
2013-01-22 19:48:02 +04:00
|
|
|
except OSError,err:
|
|
|
|
self.log.info("Failed determine available memory, disabling ASan low-memory configuration: %s", err.strerror)
|
|
|
|
except:
|
|
|
|
self.log.info("Failed determine available memory, disabling ASan low-memory configuration")
|
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
return env
|
|
|
|
|
2013-08-30 22:27:23 +04:00
|
|
|
def killPid(self, pid):
|
2013-08-31 05:55:10 +04:00
|
|
|
try:
|
|
|
|
os.kill(pid, getattr(signal, "SIGKILL", signal.SIGTERM))
|
|
|
|
except WindowsError:
|
|
|
|
self.log.info("Failed to kill process %d." % pid)
|
2013-08-30 22:27:23 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
if IS_WIN32:
|
|
|
|
PeekNamedPipe = ctypes.windll.kernel32.PeekNamedPipe
|
|
|
|
GetLastError = ctypes.windll.kernel32.GetLastError
|
|
|
|
|
|
|
|
def readWithTimeout(self, f, timeout):
|
2013-08-27 19:47:40 +04:00
|
|
|
"""
|
|
|
|
Try to read a line of output from the file object |f|. |f| must be a
|
|
|
|
pipe, like the |stdout| member of a subprocess.Popen object created
|
|
|
|
with stdout=PIPE. Returns a tuple (line, did_timeout), where |did_timeout|
|
|
|
|
is True if the read timed out, and False otherwise. If no output is
|
|
|
|
received within |timeout| seconds, returns a blank line.
|
|
|
|
"""
|
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
if timeout is None:
|
2013-08-27 19:47:40 +04:00
|
|
|
timeout = 0
|
|
|
|
|
2010-03-18 21:14:14 +03:00
|
|
|
x = msvcrt.get_osfhandle(f.fileno())
|
|
|
|
l = ctypes.c_long()
|
|
|
|
done = time.time() + timeout
|
2013-08-27 19:47:40 +04:00
|
|
|
|
|
|
|
buffer = ""
|
|
|
|
while timeout == 0 or time.time() < done:
|
2010-03-18 21:14:14 +03:00
|
|
|
if self.PeekNamedPipe(x, None, 0, None, ctypes.byref(l), None) == 0:
|
2010-01-15 20:22:54 +03:00
|
|
|
err = self.GetLastError()
|
|
|
|
if err == 38 or err == 109: # ERROR_HANDLE_EOF || ERROR_BROKEN_PIPE
|
|
|
|
return ('', False)
|
|
|
|
else:
|
2013-04-03 23:45:26 +04:00
|
|
|
self.log.error("readWithTimeout got error: %d", err)
|
2013-08-27 19:47:40 +04:00
|
|
|
# read a character at a time, checking for eol. Return once we get there.
|
|
|
|
index = 0
|
|
|
|
while index < l.value:
|
|
|
|
char = f.read(1)
|
|
|
|
buffer += char
|
|
|
|
if char == '\n':
|
|
|
|
return (buffer, False)
|
|
|
|
index = index + 1
|
2010-03-18 21:14:14 +03:00
|
|
|
time.sleep(0.01)
|
2013-08-27 19:47:40 +04:00
|
|
|
return (buffer, True)
|
2010-01-14 00:53:26 +03:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
def isPidAlive(self, pid):
|
|
|
|
STILL_ACTIVE = 259
|
|
|
|
PROCESS_QUERY_LIMITED_INFORMATION = 0x1000
|
2010-03-18 21:14:14 +03:00
|
|
|
pHandle = ctypes.windll.kernel32.OpenProcess(PROCESS_QUERY_LIMITED_INFORMATION, 0, pid)
|
2010-01-15 20:22:54 +03:00
|
|
|
if not pHandle:
|
|
|
|
return False
|
2010-01-27 01:46:47 +03:00
|
|
|
pExitCode = ctypes.wintypes.DWORD()
|
2010-06-26 09:40:13 +04:00
|
|
|
ctypes.windll.kernel32.GetExitCodeProcess(pHandle, ctypes.byref(pExitCode))
|
2010-03-18 21:14:14 +03:00
|
|
|
ctypes.windll.kernel32.CloseHandle(pHandle)
|
2011-04-14 15:03:31 +04:00
|
|
|
return pExitCode.value == STILL_ACTIVE
|
2010-01-15 20:22:54 +03:00
|
|
|
|
2008-02-22 00:08:39 +03:00
|
|
|
else:
|
2010-01-15 20:22:54 +03:00
|
|
|
|
|
|
|
def readWithTimeout(self, f, timeout):
|
|
|
|
"""Try to read a line of output from the file object |f|. If no output
|
|
|
|
is received within |timeout| seconds, return a blank line.
|
|
|
|
Returns a tuple (line, did_timeout), where |did_timeout| is True
|
|
|
|
if the read timed out, and False otherwise."""
|
|
|
|
(r, w, e) = select.select([f], [], [], timeout)
|
|
|
|
if len(r) == 0:
|
|
|
|
return ('', True)
|
|
|
|
return (f.readline(), False)
|
|
|
|
|
|
|
|
def isPidAlive(self, pid):
|
|
|
|
try:
|
|
|
|
# kill(pid, 0) checks for a valid PID without actually sending a signal
|
|
|
|
# The method throws OSError if the PID is invalid, which we catch below.
|
|
|
|
os.kill(pid, 0)
|
|
|
|
|
|
|
|
# Wait on it to see if it's a zombie. This can throw OSError.ECHILD if
|
|
|
|
# the process terminates before we get to this point.
|
|
|
|
wpid, wstatus = os.waitpid(pid, os.WNOHANG)
|
2011-04-14 15:03:31 +04:00
|
|
|
return wpid == 0
|
2010-01-15 20:22:54 +03:00
|
|
|
except OSError, err:
|
|
|
|
# Catch the errors we might expect from os.kill/os.waitpid,
|
|
|
|
# and re-raise any others
|
2010-03-18 21:14:14 +03:00
|
|
|
if err.errno == errno.ESRCH or err.errno == errno.ECHILD:
|
2010-01-15 20:22:54 +03:00
|
|
|
return False
|
|
|
|
raise
|
|
|
|
|
2011-11-02 18:56:35 +04:00
|
|
|
def dumpScreen(self, utilityPath):
|
2013-04-19 21:44:14 +04:00
|
|
|
if self.haveDumpedScreen:
|
|
|
|
self.log.info("Not taking screenshot here: see the one that was previously logged")
|
|
|
|
return
|
|
|
|
|
2011-11-02 18:56:35 +04:00
|
|
|
self.haveDumpedScreen = True;
|
2014-01-17 21:04:02 +04:00
|
|
|
automationutils.dumpScreen(utilityPath)
|
2011-11-02 18:56:35 +04:00
|
|
|
|
2010-08-23 05:27:26 +04:00
|
|
|
|
2013-04-03 23:45:26 +04:00
|
|
|
def killAndGetStack(self, processPID, utilityPath, debuggerInfo):
|
2012-12-20 12:43:19 +04:00
|
|
|
"""Kill the process, preferrably in a way that gets us a stack trace.
|
|
|
|
Also attempts to obtain a screenshot before killing the process."""
|
2012-03-07 21:27:45 +04:00
|
|
|
if not debuggerInfo:
|
2013-04-19 21:44:14 +04:00
|
|
|
self.dumpScreen(utilityPath)
|
2013-05-10 18:50:51 +04:00
|
|
|
self.killAndGetStackNoScreenshot(processPID, utilityPath, debuggerInfo)
|
2010-08-23 05:27:26 +04:00
|
|
|
|
2012-12-20 12:43:19 +04:00
|
|
|
def killAndGetStackNoScreenshot(self, processPID, utilityPath, debuggerInfo):
|
|
|
|
"""Kill the process, preferrably in a way that gets us a stack trace."""
|
2010-03-21 09:08:49 +03:00
|
|
|
if self.CRASHREPORTER and not debuggerInfo:
|
2013-09-16 22:44:25 +04:00
|
|
|
if not self.IS_WIN32:
|
2010-02-26 02:05:48 +03:00
|
|
|
# ABRT will get picked up by Breakpad's signal handler
|
2013-04-03 23:45:26 +04:00
|
|
|
os.kill(processPID, signal.SIGABRT)
|
2010-01-15 20:22:54 +03:00
|
|
|
return
|
2013-09-16 22:44:25 +04:00
|
|
|
else:
|
2010-01-15 20:22:54 +03:00
|
|
|
# We should have a "crashinject" program in our utility path
|
|
|
|
crashinject = os.path.normpath(os.path.join(utilityPath, "crashinject.exe"))
|
2014-03-26 14:14:51 +04:00
|
|
|
if os.path.exists(crashinject):
|
|
|
|
status = subprocess.Popen([crashinject, str(processPID)]).wait()
|
|
|
|
automationutils.printstatus(status, "crashinject")
|
|
|
|
if status == 0:
|
|
|
|
return
|
2010-01-15 20:22:54 +03:00
|
|
|
self.log.info("Can't trigger Breakpad, just killing process")
|
2013-04-03 23:45:26 +04:00
|
|
|
self.killPid(processPID)
|
2010-01-15 20:22:54 +03:00
|
|
|
|
2012-08-03 14:36:59 +04:00
|
|
|
def waitForFinish(self, proc, utilityPath, timeout, maxTime, startTime, debuggerInfo, symbolsPath):
|
2010-01-19 22:45:04 +03:00
|
|
|
""" Look for timeout or crashes and return the status after the process terminates """
|
2010-06-26 01:47:19 +04:00
|
|
|
stackFixerFunction = None
|
2010-01-15 20:22:54 +03:00
|
|
|
didTimeout = False
|
2011-06-19 00:29:57 +04:00
|
|
|
hitMaxTime = False
|
2010-01-19 22:45:04 +03:00
|
|
|
if proc.stdout is None:
|
2010-01-15 20:22:54 +03:00
|
|
|
self.log.info("TEST-INFO: Not logging stdout or stderr due to debugger connection")
|
|
|
|
else:
|
|
|
|
logsource = proc.stdout
|
2010-02-22 00:03:20 +03:00
|
|
|
|
2013-03-11 22:21:53 +04:00
|
|
|
if self.IS_DEBUG_BUILD and symbolsPath and os.path.exists(symbolsPath):
|
2010-06-26 01:47:19 +04:00
|
|
|
# Run each line through a function in fix_stack_using_bpsyms.py (uses breakpad symbol files)
|
|
|
|
# This method is preferred for Tinderbox builds, since native symbols may have been stripped.
|
|
|
|
sys.path.insert(0, utilityPath)
|
|
|
|
import fix_stack_using_bpsyms as stackFixerModule
|
|
|
|
stackFixerFunction = lambda line: stackFixerModule.fixSymbols(line, symbolsPath)
|
|
|
|
del sys.path[0]
|
2014-09-05 09:19:42 +04:00
|
|
|
elif self.IS_DEBUG_BUILD and self.IS_MAC:
|
2010-06-26 01:47:19 +04:00
|
|
|
# Run each line through a function in fix_macosx_stack.py (uses atos)
|
2010-02-22 00:03:20 +03:00
|
|
|
sys.path.insert(0, utilityPath)
|
|
|
|
import fix_macosx_stack as stackFixerModule
|
2010-06-26 01:47:19 +04:00
|
|
|
stackFixerFunction = lambda line: stackFixerModule.fixSymbols(line)
|
2010-02-22 00:03:20 +03:00
|
|
|
del sys.path[0]
|
2010-06-26 01:47:19 +04:00
|
|
|
elif self.IS_DEBUG_BUILD and self.IS_LINUX:
|
2014-09-03 20:24:38 +04:00
|
|
|
# Run each line through a function in fix_linux_stack.py (uses addr2line)
|
2010-06-26 01:47:19 +04:00
|
|
|
# This method is preferred for developer machines, so we don't have to run "make buildsymbols".
|
2014-09-03 20:24:38 +04:00
|
|
|
sys.path.insert(0, utilityPath)
|
|
|
|
import fix_linux_stack as stackFixerModule
|
|
|
|
stackFixerFunction = lambda line: stackFixerModule.fixSymbols(line)
|
|
|
|
del sys.path[0]
|
2010-01-15 20:22:54 +03:00
|
|
|
|
2013-08-27 19:47:40 +04:00
|
|
|
# With metro browser runs this script launches the metro test harness which launches the browser.
|
|
|
|
# The metro test harness hands back the real browser process id via log output which we need to
|
|
|
|
# pick up on and parse out. This variable tracks the real browser process id if we find it.
|
|
|
|
browserProcessId = -1
|
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
(line, didTimeout) = self.readWithTimeout(logsource, timeout)
|
|
|
|
while line != "" and not didTimeout:
|
2012-05-30 03:10:58 +04:00
|
|
|
if stackFixerFunction:
|
|
|
|
line = stackFixerFunction(line)
|
2012-05-02 15:15:07 +04:00
|
|
|
self.log.info(line.rstrip().decode("UTF-8", "ignore"))
|
2010-06-11 05:46:18 +04:00
|
|
|
if "TEST-START" in line and "|" in line:
|
|
|
|
self.lastTestSeen = line.split("|")[1].strip()
|
2012-03-07 21:27:45 +04:00
|
|
|
if not debuggerInfo and "TEST-UNEXPECTED-FAIL" in line and "Test timed out" in line:
|
2013-04-19 21:44:14 +04:00
|
|
|
self.dumpScreen(utilityPath)
|
2010-08-23 05:27:26 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
(line, didTimeout) = self.readWithTimeout(logsource, timeout)
|
2013-08-27 19:47:40 +04:00
|
|
|
|
2010-01-15 20:22:54 +03:00
|
|
|
if not hitMaxTime and maxTime and datetime.now() - startTime > timedelta(seconds = maxTime):
|
2014-09-03 20:24:38 +04:00
|
|
|
# Kill the application.
|
2010-01-15 20:22:54 +03:00
|
|
|
hitMaxTime = True
|
2010-06-11 05:46:18 +04:00
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | %s | application ran for longer than allowed maximum time of %d seconds", self.lastTestSeen, int(maxTime))
|
2013-04-03 23:45:26 +04:00
|
|
|
self.killAndGetStack(proc.pid, utilityPath, debuggerInfo)
|
2010-01-15 20:22:54 +03:00
|
|
|
if didTimeout:
|
2013-08-27 19:47:40 +04:00
|
|
|
if line:
|
|
|
|
self.log.info(line.rstrip().decode("UTF-8", "ignore"))
|
2010-06-11 05:46:18 +04:00
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | %s | application timed out after %d seconds with no output", self.lastTestSeen, int(timeout))
|
2013-08-27 19:47:40 +04:00
|
|
|
if browserProcessId == -1:
|
|
|
|
browserProcessId = proc.pid
|
|
|
|
self.killAndGetStack(browserProcessId, utilityPath, debuggerInfo)
|
2010-01-15 20:22:54 +03:00
|
|
|
|
|
|
|
status = proc.wait()
|
2014-03-26 14:14:51 +04:00
|
|
|
automationutils.printstatus(status, "Main app process")
|
2010-06-11 05:46:18 +04:00
|
|
|
if status == 0:
|
|
|
|
self.lastTestSeen = "Main app process exited normally"
|
2010-01-15 20:22:54 +03:00
|
|
|
if status != 0 and not didTimeout and not hitMaxTime:
|
2010-06-11 05:46:18 +04:00
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | %s | Exited with code %d during test run", self.lastTestSeen, status)
|
2010-01-19 22:45:04 +03:00
|
|
|
return status
|
2010-01-15 20:22:54 +03:00
|
|
|
|
2010-01-19 22:45:04 +03:00
|
|
|
def buildCommandLine(self, app, debuggerInfo, profileDir, testURL, extraArgs):
|
|
|
|
""" build the application command line """
|
|
|
|
|
2011-09-10 04:04:36 +04:00
|
|
|
cmd = os.path.abspath(app)
|
2014-10-10 19:00:29 +04:00
|
|
|
if self.IS_MAC and os.path.exists(cmd + "-bin"):
|
2011-09-10 04:04:36 +04:00
|
|
|
# Prefer 'app-bin' in case 'app' is a shell script.
|
|
|
|
# We can remove this hack once bug 673899 etc are fixed.
|
2010-01-19 22:45:04 +03:00
|
|
|
cmd += "-bin"
|
|
|
|
|
|
|
|
args = []
|
|
|
|
|
|
|
|
if debuggerInfo:
|
2014-09-20 00:49:34 +04:00
|
|
|
args.extend(debuggerInfo.args)
|
2010-01-19 22:45:04 +03:00
|
|
|
args.append(cmd)
|
2014-09-20 00:49:34 +04:00
|
|
|
cmd = os.path.abspath(debuggerInfo.path)
|
2010-01-19 22:45:04 +03:00
|
|
|
|
|
|
|
if self.IS_MAC:
|
|
|
|
args.append("-foreground")
|
|
|
|
|
|
|
|
if self.IS_CYGWIN:
|
|
|
|
profileDirectory = commands.getoutput("cygpath -w \"" + profileDir + "/\"")
|
|
|
|
else:
|
|
|
|
profileDirectory = profileDir + "/"
|
|
|
|
|
|
|
|
args.extend(("-no-remote", "-profile", profileDirectory))
|
|
|
|
if testURL is not None:
|
2014-10-10 19:00:29 +04:00
|
|
|
args.append((testURL))
|
2010-01-19 22:45:04 +03:00
|
|
|
args.extend(extraArgs)
|
|
|
|
return cmd, args
|
|
|
|
|
2013-04-03 23:45:26 +04:00
|
|
|
def checkForZombies(self, processLog, utilityPath, debuggerInfo):
|
2010-01-19 22:45:04 +03:00
|
|
|
""" Look for hung processes """
|
2010-01-15 20:22:54 +03:00
|
|
|
if not os.path.exists(processLog):
|
2012-11-20 19:24:28 +04:00
|
|
|
self.log.info('Automation Error: PID log not found: %s', processLog)
|
|
|
|
# Whilst no hung process was found, the run should still display as a failure
|
|
|
|
return True
|
|
|
|
|
|
|
|
foundZombie = False
|
2013-03-26 15:10:14 +04:00
|
|
|
self.log.info('INFO | zombiecheck | Reading PID log: %s', processLog)
|
2012-11-20 19:24:28 +04:00
|
|
|
processList = []
|
|
|
|
pidRE = re.compile(r'launched child process (\d+)$')
|
|
|
|
processLogFD = open(processLog)
|
|
|
|
for line in processLogFD:
|
|
|
|
self.log.info(line.rstrip())
|
|
|
|
m = pidRE.search(line)
|
|
|
|
if m:
|
|
|
|
processList.append(int(m.group(1)))
|
|
|
|
processLogFD.close()
|
|
|
|
|
|
|
|
for processPID in processList:
|
2013-03-26 15:10:14 +04:00
|
|
|
self.log.info("INFO | zombiecheck | Checking for orphan process with PID: %d", processPID)
|
2012-11-20 19:24:28 +04:00
|
|
|
if self.isPidAlive(processPID):
|
|
|
|
foundZombie = True
|
2013-03-26 15:10:14 +04:00
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | zombiecheck | child process %d still alive after shutdown", processPID)
|
2013-04-03 23:45:26 +04:00
|
|
|
self.killAndGetStack(processPID, utilityPath, debuggerInfo)
|
2012-11-20 19:24:28 +04:00
|
|
|
return foundZombie
|
2010-01-15 20:22:54 +03:00
|
|
|
|
2013-04-12 19:18:14 +04:00
|
|
|
def checkForCrashes(self, minidumpDir, symbolsPath):
|
|
|
|
return mozcrash.check_for_crashes(minidumpDir, symbolsPath, test_name=self.lastTestSeen)
|
2011-09-21 18:27:16 +04:00
|
|
|
|
2010-01-19 22:45:04 +03:00
|
|
|
def runApp(self, testURL, env, app, profileDir, extraArgs,
|
|
|
|
runSSLTunnel = False, utilityPath = None,
|
2012-08-03 14:36:59 +04:00
|
|
|
xrePath = None, certPath = None,
|
2010-01-19 22:45:04 +03:00
|
|
|
debuggerInfo = None, symbolsPath = None,
|
2013-11-21 00:53:08 +04:00
|
|
|
timeout = -1, maxTime = None, onLaunch = None,
|
2014-08-12 23:23:29 +04:00
|
|
|
detectShutdownLeaks = False, screenshotOnFail=False, testPath=None, bisectChunk=None):
|
2010-01-19 22:45:04 +03:00
|
|
|
"""
|
|
|
|
Run the app, log the duration it took to execute, return the status code.
|
|
|
|
Kills the app if it runs for longer than |maxTime| seconds, or outputs nothing for |timeout| seconds.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if utilityPath == None:
|
|
|
|
utilityPath = self.DIST_BIN
|
|
|
|
if xrePath == None:
|
|
|
|
xrePath = self.DIST_BIN
|
|
|
|
if certPath == None:
|
|
|
|
certPath = self.CERTS_SRC_DIR
|
|
|
|
if timeout == -1:
|
|
|
|
timeout = self.DEFAULT_TIMEOUT
|
|
|
|
|
|
|
|
# copy env so we don't munge the caller's environment
|
|
|
|
env = dict(env);
|
|
|
|
env["NO_EM_RESTART"] = "1"
|
|
|
|
tmpfd, processLog = tempfile.mkstemp(suffix='pidlog')
|
|
|
|
os.close(tmpfd)
|
|
|
|
env["MOZ_PROCESS_LOG"] = processLog
|
|
|
|
|
|
|
|
if self.IS_TEST_BUILD and runSSLTunnel:
|
|
|
|
# create certificate database for the profile
|
|
|
|
certificateStatus = self.fillCertificateDB(profileDir, certPath, utilityPath, xrePath)
|
|
|
|
if certificateStatus != 0:
|
2011-01-03 17:48:38 +03:00
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | automation.py | Certificate integration failed")
|
2010-01-19 22:45:04 +03:00
|
|
|
return certificateStatus
|
|
|
|
|
|
|
|
# start ssltunnel to provide https:// URLs capability
|
|
|
|
ssltunnel = os.path.join(utilityPath, "ssltunnel" + self.BIN_SUFFIX)
|
|
|
|
ssltunnelProcess = self.Process([ssltunnel,
|
|
|
|
os.path.join(profileDir, "ssltunnel.cfg")],
|
|
|
|
env = self.environment(xrePath = xrePath))
|
|
|
|
self.log.info("INFO | automation.py | SSL tunnel pid: %d", ssltunnelProcess.pid)
|
|
|
|
|
|
|
|
cmd, args = self.buildCommandLine(app, debuggerInfo, profileDir, testURL, extraArgs)
|
|
|
|
startTime = datetime.now()
|
|
|
|
|
2014-09-20 00:49:34 +04:00
|
|
|
if debuggerInfo and debuggerInfo.interactive:
|
2010-09-01 05:03:38 +04:00
|
|
|
# If an interactive debugger is attached, don't redirect output,
|
|
|
|
# don't use timeouts, and don't capture ctrl-c.
|
2010-03-21 09:08:49 +03:00
|
|
|
timeout = None
|
|
|
|
maxTime = None
|
2010-01-19 22:45:04 +03:00
|
|
|
outputPipe = None
|
2010-09-01 05:03:38 +04:00
|
|
|
signal.signal(signal.SIGINT, lambda sigid, frame: None)
|
2010-01-19 22:45:04 +03:00
|
|
|
else:
|
|
|
|
outputPipe = subprocess.PIPE
|
|
|
|
|
2010-06-11 08:28:36 +04:00
|
|
|
self.lastTestSeen = "automation.py"
|
2010-01-19 22:45:04 +03:00
|
|
|
proc = self.Process([cmd] + args,
|
|
|
|
env = self.environment(env, xrePath = xrePath,
|
|
|
|
crashreporter = not debuggerInfo),
|
|
|
|
stdout = outputPipe,
|
|
|
|
stderr = subprocess.STDOUT)
|
|
|
|
self.log.info("INFO | automation.py | Application pid: %d", proc.pid)
|
|
|
|
|
2013-01-04 22:41:34 +04:00
|
|
|
if onLaunch is not None:
|
|
|
|
# Allow callers to specify an onLaunch callback to be fired after the
|
|
|
|
# app is launched.
|
|
|
|
onLaunch()
|
|
|
|
|
2012-08-03 14:36:59 +04:00
|
|
|
status = self.waitForFinish(proc, utilityPath, timeout, maxTime, startTime, debuggerInfo, symbolsPath)
|
2010-01-19 22:45:04 +03:00
|
|
|
self.log.info("INFO | automation.py | Application ran for: %s", str(datetime.now() - startTime))
|
|
|
|
|
|
|
|
# Do a final check for zombie child processes.
|
2013-04-03 23:45:26 +04:00
|
|
|
zombieProcesses = self.checkForZombies(processLog, utilityPath, debuggerInfo)
|
2012-11-20 19:24:28 +04:00
|
|
|
|
2013-04-12 19:18:14 +04:00
|
|
|
crashed = self.checkForCrashes(os.path.join(profileDir, "minidumps"), symbolsPath)
|
2012-11-20 19:24:28 +04:00
|
|
|
|
2012-11-20 19:24:28 +04:00
|
|
|
if crashed or zombieProcesses:
|
2012-11-20 19:24:28 +04:00
|
|
|
status = 1
|
2010-01-15 20:22:54 +03:00
|
|
|
|
|
|
|
if os.path.exists(processLog):
|
|
|
|
os.unlink(processLog)
|
|
|
|
|
|
|
|
if self.IS_TEST_BUILD and runSSLTunnel:
|
|
|
|
ssltunnelProcess.kill()
|
|
|
|
|
|
|
|
return status
|
2010-06-24 13:32:01 +04:00
|
|
|
|
2011-10-14 19:45:58 +04:00
|
|
|
def getExtensionIDFromRDF(self, rdfSource):
|
|
|
|
"""
|
|
|
|
Retrieves the extension id from an install.rdf file (or string).
|
|
|
|
"""
|
|
|
|
from xml.dom.minidom import parse, parseString, Node
|
|
|
|
|
|
|
|
if isinstance(rdfSource, file):
|
|
|
|
document = parse(rdfSource)
|
|
|
|
else:
|
|
|
|
document = parseString(rdfSource)
|
|
|
|
|
|
|
|
# Find the <em:id> element. There can be multiple <em:id> tags
|
|
|
|
# within <em:targetApplication> tags, so we have to check this way.
|
|
|
|
for rdfChild in document.documentElement.childNodes:
|
|
|
|
if rdfChild.nodeType == Node.ELEMENT_NODE and rdfChild.tagName == "Description":
|
|
|
|
for descChild in rdfChild.childNodes:
|
|
|
|
if descChild.nodeType == Node.ELEMENT_NODE and descChild.tagName == "em:id":
|
|
|
|
return descChild.childNodes[0].data
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2011-04-14 15:03:47 +04:00
|
|
|
def installExtension(self, extensionSource, profileDir, extensionID = None):
|
2011-10-14 19:45:58 +04:00
|
|
|
"""
|
|
|
|
Copies an extension into the extensions directory of the given profile.
|
|
|
|
extensionSource - the source location of the extension files. This can be either
|
|
|
|
a directory or a path to an xpi file.
|
|
|
|
profileDir - the profile directory we are copying into. We will create the
|
|
|
|
"extensions" directory there if it doesn't exist.
|
|
|
|
extensionID - the id of the extension to be used as the containing directory for the
|
|
|
|
extension, if extensionSource is a directory, i.e.
|
|
|
|
this is the name of the folder in the <profileDir>/extensions/<extensionID>
|
|
|
|
"""
|
2011-04-14 15:03:47 +04:00
|
|
|
if not os.path.isdir(profileDir):
|
|
|
|
self.log.info("INFO | automation.py | Cannot install extension, invalid profileDir at: %s", profileDir)
|
2011-01-04 14:06:53 +03:00
|
|
|
return
|
2010-06-24 13:32:01 +04:00
|
|
|
|
2011-10-14 19:45:58 +04:00
|
|
|
installRDFFilename = "install.rdf"
|
|
|
|
|
2011-10-22 22:37:15 +04:00
|
|
|
extensionsRootDir = os.path.join(profileDir, "extensions", "staged")
|
2011-10-14 19:45:58 +04:00
|
|
|
if not os.path.isdir(extensionsRootDir):
|
2011-10-22 22:37:15 +04:00
|
|
|
os.makedirs(extensionsRootDir)
|
2010-06-24 13:32:01 +04:00
|
|
|
|
2011-04-14 15:03:47 +04:00
|
|
|
if os.path.isfile(extensionSource):
|
2014-12-20 16:45:48 +03:00
|
|
|
reader = zipfile.ZipFile(extensionSource, "r")
|
2011-10-14 19:45:58 +04:00
|
|
|
|
|
|
|
for filename in reader.namelist():
|
|
|
|
# Sanity check the zip file.
|
|
|
|
if os.path.isabs(filename):
|
|
|
|
self.log.info("INFO | automation.py | Cannot install extension, bad files in xpi")
|
|
|
|
return
|
|
|
|
|
|
|
|
# We may need to dig the extensionID out of the zip file...
|
|
|
|
if extensionID is None and filename == installRDFFilename:
|
|
|
|
extensionID = self.getExtensionIDFromRDF(reader.read(filename))
|
|
|
|
|
|
|
|
# We must know the extensionID now.
|
|
|
|
if extensionID is None:
|
2011-04-14 15:03:47 +04:00
|
|
|
self.log.info("INFO | automation.py | Cannot install extension, missing extensionID")
|
|
|
|
return
|
|
|
|
|
2011-10-14 19:45:58 +04:00
|
|
|
# Make the extension directory.
|
|
|
|
extensionDir = os.path.join(extensionsRootDir, extensionID)
|
|
|
|
os.mkdir(extensionDir)
|
|
|
|
|
|
|
|
# Extract all files.
|
|
|
|
reader.extractall(extensionDir)
|
|
|
|
|
|
|
|
elif os.path.isdir(extensionSource):
|
|
|
|
if extensionID is None:
|
|
|
|
filename = os.path.join(extensionSource, installRDFFilename)
|
|
|
|
if os.path.isfile(filename):
|
|
|
|
with open(filename, "r") as installRDF:
|
|
|
|
extensionID = self.getExtensionIDFromRDF(installRDF)
|
|
|
|
|
|
|
|
if extensionID is None:
|
|
|
|
self.log.info("INFO | automation.py | Cannot install extension, missing extensionID")
|
|
|
|
return
|
|
|
|
|
2011-04-14 15:03:47 +04:00
|
|
|
# Copy extension tree into its own directory.
|
|
|
|
# "destination directory must not already exist".
|
2011-10-14 19:45:58 +04:00
|
|
|
shutil.copytree(extensionSource, os.path.join(extensionsRootDir, extensionID))
|
|
|
|
|
2011-04-14 15:03:47 +04:00
|
|
|
else:
|
|
|
|
self.log.info("INFO | automation.py | Cannot install extension, invalid extensionSource at: %s", extensionSource)
|
2013-01-04 05:37:26 +04:00
|
|
|
|
|
|
|
def elf_arm(self, filename):
|
|
|
|
data = open(filename, 'rb').read(20)
|
2013-02-15 00:24:21 +04:00
|
|
|
return data[:4] == "\x7fELF" and ord(data[18]) == 40 # EM_ARM
|
|
|
|
|