2009-09-06 05:08:49 +04:00
|
|
|
#
|
2012-05-21 15:12:37 +04:00
|
|
|
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
2009-09-06 05:08:49 +04:00
|
|
|
|
2011-11-15 08:33:21 +04:00
|
|
|
from __future__ import with_statement
|
|
|
|
import glob, logging, os, platform, shutil, subprocess, sys, tempfile, urllib2, zipfile
|
2013-10-06 02:28:34 +04:00
|
|
|
import base64
|
2009-09-06 05:08:49 +04:00
|
|
|
import re
|
2014-03-26 14:14:51 +04:00
|
|
|
import os
|
2010-05-06 16:06:09 +04:00
|
|
|
from urlparse import urlparse
|
2013-11-21 00:53:08 +04:00
|
|
|
from operator import itemgetter
|
2014-03-26 14:14:51 +04:00
|
|
|
import signal
|
2009-07-22 23:24:54 +04:00
|
|
|
|
2013-09-23 18:47:48 +04:00
|
|
|
try:
|
|
|
|
import mozinfo
|
|
|
|
except ImportError:
|
|
|
|
# Stub out fake mozinfo since this is not importable on Android 4.0 Opt.
|
|
|
|
# This should be fixed; see
|
|
|
|
# https://bugzilla.mozilla.org/show_bug.cgi?id=650881
|
|
|
|
mozinfo = type('mozinfo', (), dict(info={}))()
|
|
|
|
mozinfo.isWin = mozinfo.isLinux = mozinfo.isUnix = mozinfo.isMac = False
|
|
|
|
|
|
|
|
# TODO! FILE: localautomation :/
|
|
|
|
# mapping from would-be mozinfo attr <-> sys.platform
|
|
|
|
mapping = {'isMac': ['mac', 'darwin'],
|
|
|
|
'isLinux': ['linux', 'linux2'],
|
|
|
|
'isWin': ['win32', 'win64'],
|
|
|
|
}
|
|
|
|
mapping = dict(sum([[(value, key) for value in values] for key, values in mapping.items()], []))
|
|
|
|
attr = mapping.get(sys.platform)
|
|
|
|
if attr:
|
|
|
|
setattr(mozinfo, attr, True)
|
|
|
|
if mozinfo.isLinux:
|
|
|
|
mozinfo.isUnix = True
|
|
|
|
|
2009-07-22 23:24:54 +04:00
|
|
|
__all__ = [
|
2011-11-15 08:33:21 +04:00
|
|
|
"ZipFileReader",
|
2009-09-06 05:08:49 +04:00
|
|
|
"addCommonOptions",
|
|
|
|
"dumpLeakLog",
|
2010-05-06 16:06:09 +04:00
|
|
|
"isURL",
|
2009-09-06 05:08:49 +04:00
|
|
|
"processLeakLog",
|
2009-10-20 03:12:09 +04:00
|
|
|
"getDebuggerInfo",
|
|
|
|
"DEBUGGER_INFO",
|
2010-03-13 01:57:29 +03:00
|
|
|
"replaceBackSlashes",
|
2010-09-23 20:19:31 +04:00
|
|
|
"wrapCommand",
|
2013-09-23 18:47:48 +04:00
|
|
|
'KeyValueParseError',
|
|
|
|
'parseKeyValue',
|
|
|
|
'systemMemory',
|
2013-10-06 02:28:34 +04:00
|
|
|
'environment',
|
|
|
|
'dumpScreen',
|
2013-11-21 00:53:08 +04:00
|
|
|
"ShutdownLeaks"
|
2009-09-06 05:08:49 +04:00
|
|
|
]
|
2009-07-22 23:24:54 +04:00
|
|
|
|
2009-10-20 03:12:09 +04:00
|
|
|
# Map of debugging programs to information about them, like default arguments
|
|
|
|
# and whether or not they are interactive.
|
|
|
|
DEBUGGER_INFO = {
|
|
|
|
# gdb requires that you supply the '--args' flag in order to pass arguments
|
|
|
|
# after the executable name to the executable.
|
|
|
|
"gdb": {
|
|
|
|
"interactive": True,
|
|
|
|
"args": "-q --args"
|
|
|
|
},
|
|
|
|
|
2013-05-23 18:56:18 +04:00
|
|
|
"cgdb": {
|
|
|
|
"interactive": True,
|
|
|
|
"args": "-q --args"
|
|
|
|
},
|
|
|
|
|
2013-05-17 19:22:00 +04:00
|
|
|
"lldb": {
|
|
|
|
"interactive": True,
|
2013-12-03 22:01:30 +04:00
|
|
|
"args": "--",
|
|
|
|
"requiresEscapedArgs": True
|
2013-05-17 19:22:00 +04:00
|
|
|
},
|
|
|
|
|
2009-10-20 03:12:09 +04:00
|
|
|
# valgrind doesn't explain much about leaks unless you set the
|
2014-01-24 01:49:56 +04:00
|
|
|
# '--leak-check=full' flag. But there are a lot of objects that are
|
|
|
|
# semi-deliberately leaked, so we set '--show-possibly-lost=no' to avoid
|
|
|
|
# uninteresting output from those objects. We set '--smc-check==all-non-file'
|
|
|
|
# and '--vex-iropt-register-updates=allregs-at-mem-access' so that valgrind
|
|
|
|
# deals properly with JIT'd JavaScript code.
|
2009-10-20 03:12:09 +04:00
|
|
|
"valgrind": {
|
|
|
|
"interactive": False,
|
2014-01-24 01:49:56 +04:00
|
|
|
"args": " ".join(["--leak-check=full",
|
|
|
|
"--show-possibly-lost=no",
|
2014-03-21 04:46:13 +04:00
|
|
|
"--smc-check=all-non-file",
|
2014-01-24 01:49:56 +04:00
|
|
|
"--vex-iropt-register-updates=allregs-at-mem-access"])
|
2009-10-20 03:12:09 +04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-11-15 08:33:21 +04:00
|
|
|
class ZipFileReader(object):
|
|
|
|
"""
|
|
|
|
Class to read zip files in Python 2.5 and later. Limited to only what we
|
|
|
|
actually use.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, filename):
|
|
|
|
self._zipfile = zipfile.ZipFile(filename, "r")
|
|
|
|
|
|
|
|
def __del__(self):
|
|
|
|
self._zipfile.close()
|
|
|
|
|
|
|
|
def _getnormalizedpath(self, path):
|
|
|
|
"""
|
|
|
|
Gets a normalized path from 'path' (or the current working directory if
|
|
|
|
'path' is None). Also asserts that the path exists.
|
|
|
|
"""
|
|
|
|
if path is None:
|
|
|
|
path = os.curdir
|
|
|
|
path = os.path.normpath(os.path.expanduser(path))
|
|
|
|
assert os.path.isdir(path)
|
|
|
|
return path
|
|
|
|
|
|
|
|
def _extractname(self, name, path):
|
|
|
|
"""
|
|
|
|
Extracts a file with the given name from the zip file to the given path.
|
|
|
|
Also creates any directories needed along the way.
|
|
|
|
"""
|
|
|
|
filename = os.path.normpath(os.path.join(path, name))
|
|
|
|
if name.endswith("/"):
|
|
|
|
os.makedirs(filename)
|
|
|
|
else:
|
|
|
|
path = os.path.split(filename)[0]
|
|
|
|
if not os.path.isdir(path):
|
|
|
|
os.makedirs(path)
|
|
|
|
with open(filename, "wb") as dest:
|
|
|
|
dest.write(self._zipfile.read(name))
|
|
|
|
|
|
|
|
def namelist(self):
|
|
|
|
return self._zipfile.namelist()
|
|
|
|
|
|
|
|
def read(self, name):
|
|
|
|
return self._zipfile.read(name)
|
|
|
|
|
|
|
|
def extract(self, name, path = None):
|
|
|
|
if hasattr(self._zipfile, "extract"):
|
|
|
|
return self._zipfile.extract(name, path)
|
|
|
|
|
|
|
|
# This will throw if name is not part of the zip file.
|
|
|
|
self._zipfile.getinfo(name)
|
|
|
|
|
|
|
|
self._extractname(name, self._getnormalizedpath(path))
|
|
|
|
|
|
|
|
def extractall(self, path = None):
|
|
|
|
if hasattr(self._zipfile, "extractall"):
|
|
|
|
return self._zipfile.extractall(path)
|
|
|
|
|
|
|
|
path = self._getnormalizedpath(path)
|
|
|
|
|
|
|
|
for name in self._zipfile.namelist():
|
|
|
|
self._extractname(name, path)
|
|
|
|
|
2009-07-22 23:24:54 +04:00
|
|
|
log = logging.getLogger()
|
|
|
|
|
2010-05-06 16:06:09 +04:00
|
|
|
def isURL(thing):
|
|
|
|
"""Return True if |thing| looks like a URL."""
|
2011-11-30 02:13:08 +04:00
|
|
|
# We want to download URLs like http://... but not Windows paths like c:\...
|
|
|
|
return len(urlparse(thing).scheme) >= 2
|
2010-05-06 16:06:09 +04:00
|
|
|
|
2014-03-26 14:14:51 +04:00
|
|
|
# Python does not provide strsignal() even in the very latest 3.x.
|
|
|
|
# This is a reasonable fake.
|
|
|
|
def strsig(n):
|
|
|
|
# Signal numbers run 0 through NSIG-1; an array with NSIG members
|
|
|
|
# has exactly that many slots
|
|
|
|
_sigtbl = [None]*signal.NSIG
|
|
|
|
for k in dir(signal):
|
|
|
|
if k.startswith("SIG") and not k.startswith("SIG_") and k != "SIGCLD" and k != "SIGPOLL":
|
|
|
|
_sigtbl[getattr(signal, k)] = k
|
|
|
|
# Realtime signals mostly have no names
|
|
|
|
if hasattr(signal, "SIGRTMIN") and hasattr(signal, "SIGRTMAX"):
|
|
|
|
for r in range(signal.SIGRTMIN+1, signal.SIGRTMAX+1):
|
|
|
|
_sigtbl[r] = "SIGRTMIN+" + str(r - signal.SIGRTMIN)
|
|
|
|
# Fill in any remaining gaps
|
|
|
|
for i in range(signal.NSIG):
|
|
|
|
if _sigtbl[i] is None:
|
|
|
|
_sigtbl[i] = "unrecognized signal, number " + str(i)
|
|
|
|
if n < 0 or n >= signal.NSIG:
|
|
|
|
return "out-of-range signal, number "+str(n)
|
|
|
|
return _sigtbl[n]
|
|
|
|
|
|
|
|
def printstatus(status, name = ""):
|
|
|
|
# 'status' is the exit status
|
|
|
|
if os.name != 'posix':
|
|
|
|
# Windows error codes are easier to look up if printed in hexadecimal
|
|
|
|
if status < 0:
|
|
|
|
status += 2**32
|
|
|
|
print "TEST-INFO | %s: exit status %x\n" % (name, status)
|
|
|
|
elif os.WIFEXITED(status):
|
|
|
|
print "TEST-INFO | %s: exit %d\n" % (name, os.WEXITSTATUS(status))
|
|
|
|
elif os.WIFSIGNALED(status):
|
|
|
|
# The python stdlib doesn't appear to have strsignal(), alas
|
|
|
|
print "TEST-INFO | {}: killed by {}".format(name,strsig(os.WTERMSIG(status)))
|
|
|
|
else:
|
|
|
|
# This is probably a can't-happen condition on Unix, but let's be defensive
|
|
|
|
print "TEST-INFO | %s: undecodable exit status %04x\n" % (name, status)
|
|
|
|
|
2009-07-22 23:24:54 +04:00
|
|
|
def addCommonOptions(parser, defaults={}):
|
2009-09-06 05:08:49 +04:00
|
|
|
parser.add_option("--xre-path",
|
|
|
|
action = "store", type = "string", dest = "xrePath",
|
|
|
|
# individual scripts will set a sane default
|
|
|
|
default = None,
|
|
|
|
help = "absolute path to directory containing XRE (probably xulrunner)")
|
|
|
|
if 'SYMBOLS_PATH' not in defaults:
|
|
|
|
defaults['SYMBOLS_PATH'] = None
|
|
|
|
parser.add_option("--symbols-path",
|
|
|
|
action = "store", type = "string", dest = "symbolsPath",
|
|
|
|
default = defaults['SYMBOLS_PATH'],
|
2010-05-06 16:06:09 +04:00
|
|
|
help = "absolute path to directory containing breakpad symbols, or the URL of a zip file containing symbols")
|
2009-10-20 03:12:09 +04:00
|
|
|
parser.add_option("--debugger",
|
|
|
|
action = "store", dest = "debugger",
|
|
|
|
help = "use the given debugger to launch the application")
|
|
|
|
parser.add_option("--debugger-args",
|
|
|
|
action = "store", dest = "debuggerArgs",
|
|
|
|
help = "pass the given args to the debugger _before_ "
|
|
|
|
"the application on the command line")
|
|
|
|
parser.add_option("--debugger-interactive",
|
|
|
|
action = "store_true", dest = "debuggerInteractive",
|
|
|
|
help = "prevents the test harness from redirecting "
|
|
|
|
"stdout and stderr for interactive debuggers")
|
2009-07-22 23:24:54 +04:00
|
|
|
|
2009-10-20 03:12:09 +04:00
|
|
|
def getFullPath(directory, path):
|
|
|
|
"Get an absolute path relative to 'directory'."
|
|
|
|
return os.path.normpath(os.path.join(directory, os.path.expanduser(path)))
|
|
|
|
|
|
|
|
def searchPath(directory, path):
|
|
|
|
"Go one step beyond getFullPath and try the various folders in PATH"
|
|
|
|
# Try looking in the current working directory first.
|
|
|
|
newpath = getFullPath(directory, path)
|
2010-01-21 23:02:51 +03:00
|
|
|
if os.path.isfile(newpath):
|
2009-10-20 03:12:09 +04:00
|
|
|
return newpath
|
|
|
|
|
|
|
|
# At this point we have to fail if a directory was given (to prevent cases
|
|
|
|
# like './gdb' from matching '/usr/bin/./gdb').
|
|
|
|
if not os.path.dirname(path):
|
|
|
|
for dir in os.environ['PATH'].split(os.pathsep):
|
|
|
|
newpath = os.path.join(dir, path)
|
2010-01-21 23:02:51 +03:00
|
|
|
if os.path.isfile(newpath):
|
2009-10-20 03:12:09 +04:00
|
|
|
return newpath
|
|
|
|
return None
|
|
|
|
|
|
|
|
def getDebuggerInfo(directory, debugger, debuggerArgs, debuggerInteractive = False):
|
|
|
|
|
|
|
|
debuggerInfo = None
|
|
|
|
|
|
|
|
if debugger:
|
|
|
|
debuggerPath = searchPath(directory, debugger)
|
|
|
|
if not debuggerPath:
|
|
|
|
print "Error: Path %s doesn't exist." % debugger
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
debuggerName = os.path.basename(debuggerPath).lower()
|
|
|
|
|
|
|
|
def getDebuggerInfo(type, default):
|
|
|
|
if debuggerName in DEBUGGER_INFO and type in DEBUGGER_INFO[debuggerName]:
|
|
|
|
return DEBUGGER_INFO[debuggerName][type]
|
|
|
|
return default
|
|
|
|
|
|
|
|
debuggerInfo = {
|
|
|
|
"path": debuggerPath,
|
|
|
|
"interactive" : getDebuggerInfo("interactive", False),
|
2013-12-03 22:01:30 +04:00
|
|
|
"args": getDebuggerInfo("args", "").split(),
|
|
|
|
"requiresEscapedArgs": getDebuggerInfo("requiresEscapedArgs", False)
|
2009-10-20 03:12:09 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
if debuggerArgs:
|
|
|
|
debuggerInfo["args"] = debuggerArgs.split()
|
|
|
|
if debuggerInteractive:
|
|
|
|
debuggerInfo["interactive"] = debuggerInteractive
|
2011-11-30 02:13:08 +04:00
|
|
|
|
2009-10-20 03:12:09 +04:00
|
|
|
return debuggerInfo
|
|
|
|
|
2009-09-06 05:08:49 +04:00
|
|
|
|
|
|
|
def dumpLeakLog(leakLogFile, filter = False):
|
|
|
|
"""Process the leak log, without parsing it.
|
|
|
|
|
|
|
|
Use this function if you want the raw log only.
|
|
|
|
Use it preferably with the |XPCOM_MEM_LEAK_LOG| environment variable.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Don't warn (nor "info") if the log file is not there.
|
|
|
|
if not os.path.exists(leakLogFile):
|
|
|
|
return
|
|
|
|
|
2013-03-18 21:18:06 +04:00
|
|
|
with open(leakLogFile, "r") as leaks:
|
|
|
|
leakReport = leaks.read()
|
2009-09-06 05:08:49 +04:00
|
|
|
|
|
|
|
# Only |XPCOM_MEM_LEAK_LOG| reports can be actually filtered out.
|
|
|
|
# Only check whether an actual leak was reported.
|
|
|
|
if filter and not "0 TOTAL " in leakReport:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Simply copy the log.
|
|
|
|
log.info(leakReport.rstrip("\n"))
|
|
|
|
|
2013-03-18 21:18:08 +04:00
|
|
|
def processSingleLeakFile(leakLogFileName, processType, leakThreshold):
|
|
|
|
"""Process a single leak log.
|
2009-09-06 05:08:49 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
# Per-Inst Leaked Total Rem ...
|
|
|
|
# 0 TOTAL 17 192 419115886 2 ...
|
|
|
|
# 833 nsTimerImpl 60 120 24726 2 ...
|
|
|
|
lineRe = re.compile(r"^\s*\d+\s+(?P<name>\S+)\s+"
|
|
|
|
r"(?P<size>-?\d+)\s+(?P<bytesLeaked>-?\d+)\s+"
|
|
|
|
r"-?\d+\s+(?P<numLeaked>-?\d+)")
|
|
|
|
|
2009-11-10 22:16:12 +03:00
|
|
|
processString = ""
|
2013-03-18 21:18:08 +04:00
|
|
|
if processType:
|
|
|
|
# eg 'plugin'
|
|
|
|
processString = " %s process:" % processType
|
2013-03-18 21:18:06 +04:00
|
|
|
|
2013-03-18 21:18:07 +04:00
|
|
|
crashedOnPurpose = False
|
|
|
|
totalBytesLeaked = None
|
|
|
|
leakAnalysis = []
|
|
|
|
leakedObjectNames = []
|
2013-03-18 21:18:06 +04:00
|
|
|
with open(leakLogFileName, "r") as leaks:
|
|
|
|
for line in leaks:
|
|
|
|
if line.find("purposefully crash") > -1:
|
|
|
|
crashedOnPurpose = True
|
|
|
|
matches = lineRe.match(line)
|
|
|
|
if not matches:
|
2013-03-18 21:18:07 +04:00
|
|
|
# eg: the leak table header row
|
|
|
|
log.info(line.rstrip())
|
2013-03-18 21:18:06 +04:00
|
|
|
continue
|
|
|
|
name = matches.group("name")
|
|
|
|
size = int(matches.group("size"))
|
|
|
|
bytesLeaked = int(matches.group("bytesLeaked"))
|
|
|
|
numLeaked = int(matches.group("numLeaked"))
|
2013-03-18 21:18:07 +04:00
|
|
|
# Output the raw line from the leak log table if it is the TOTAL row,
|
|
|
|
# or is for an object row that has been leaked.
|
|
|
|
if numLeaked != 0 or name == "TOTAL":
|
|
|
|
log.info(line.rstrip())
|
|
|
|
# Analyse the leak log, but output later or it will interrupt the leak table
|
2013-03-18 21:18:06 +04:00
|
|
|
if name == "TOTAL":
|
|
|
|
totalBytesLeaked = bytesLeaked
|
|
|
|
if size < 0 or bytesLeaked < 0 or numLeaked < 0:
|
2013-03-18 21:18:08 +04:00
|
|
|
leakAnalysis.append("TEST-UNEXPECTED-FAIL | leakcheck |%s negative leaks caught!"
|
2013-03-18 21:18:07 +04:00
|
|
|
% processString)
|
2013-03-18 21:18:06 +04:00
|
|
|
continue
|
|
|
|
if name != "TOTAL" and numLeaked != 0:
|
|
|
|
leakedObjectNames.append(name)
|
2013-03-18 21:18:08 +04:00
|
|
|
leakAnalysis.append("TEST-INFO | leakcheck |%s leaked %d %s (%s bytes)"
|
2013-03-18 21:18:07 +04:00
|
|
|
% (processString, numLeaked, name, bytesLeaked))
|
|
|
|
log.info('\n'.join(leakAnalysis))
|
2013-03-18 21:18:05 +04:00
|
|
|
|
2013-03-18 21:18:04 +04:00
|
|
|
if totalBytesLeaked is None:
|
|
|
|
# We didn't see a line with name 'TOTAL'
|
2009-12-09 21:51:38 +03:00
|
|
|
if crashedOnPurpose:
|
2013-03-18 21:18:08 +04:00
|
|
|
log.info("TEST-INFO | leakcheck |%s deliberate crash and thus no leak log"
|
|
|
|
% processString)
|
2009-12-09 21:51:38 +03:00
|
|
|
else:
|
2013-03-18 21:18:05 +04:00
|
|
|
# TODO: This should be a TEST-UNEXPECTED-FAIL, but was changed to a warning
|
|
|
|
# due to too many intermittent failures (see bug 831223).
|
2013-03-18 21:18:08 +04:00
|
|
|
log.info("WARNING | leakcheck |%s missing output line for total leaks!"
|
|
|
|
% processString)
|
2013-03-18 21:18:07 +04:00
|
|
|
return
|
|
|
|
|
|
|
|
if totalBytesLeaked == 0:
|
2013-03-18 21:18:08 +04:00
|
|
|
log.info("TEST-PASS | leakcheck |%s no leaks detected!" % processString)
|
2013-03-18 21:18:07 +04:00
|
|
|
return
|
|
|
|
|
2013-03-18 21:18:08 +04:00
|
|
|
# totalBytesLeaked was seen and is non-zero.
|
2013-03-18 21:18:07 +04:00
|
|
|
if totalBytesLeaked > leakThreshold:
|
2013-03-18 21:18:08 +04:00
|
|
|
# Fail the run if we're over the threshold (which defaults to 0)
|
2013-03-18 21:18:07 +04:00
|
|
|
prefix = "TEST-UNEXPECTED-FAIL"
|
2013-03-18 21:18:04 +04:00
|
|
|
else:
|
2013-03-18 21:18:07 +04:00
|
|
|
prefix = "WARNING"
|
|
|
|
# Create a comma delimited string of the first N leaked objects found,
|
2013-03-18 21:18:08 +04:00
|
|
|
# to aid with bug summary matching in TBPL. Note: The order of the objects
|
|
|
|
# had no significance (they're sorted alphabetically).
|
2013-03-18 21:18:07 +04:00
|
|
|
maxSummaryObjects = 5
|
|
|
|
leakedObjectSummary = ', '.join(leakedObjectNames[:maxSummaryObjects])
|
|
|
|
if len(leakedObjectNames) > maxSummaryObjects:
|
|
|
|
leakedObjectSummary += ', ...'
|
2013-03-18 21:18:08 +04:00
|
|
|
log.info("%s | leakcheck |%s %d bytes leaked (%s)"
|
2013-03-18 21:18:07 +04:00
|
|
|
% (prefix, processString, totalBytesLeaked, leakedObjectSummary))
|
2009-11-10 22:16:12 +03:00
|
|
|
|
|
|
|
def processLeakLog(leakLogFile, leakThreshold = 0):
|
|
|
|
"""Process the leak log, including separate leak logs created
|
|
|
|
by child processes.
|
|
|
|
|
|
|
|
Use this function if you want an additional PASS/FAIL summary.
|
|
|
|
It must be used with the |XPCOM_MEM_BLOAT_LOG| environment variable.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not os.path.exists(leakLogFile):
|
2013-03-18 21:18:03 +04:00
|
|
|
log.info("WARNING | leakcheck | refcount logging is off, so leaks can't be detected!")
|
2009-11-10 22:16:12 +03:00
|
|
|
return
|
|
|
|
|
2013-03-18 21:18:06 +04:00
|
|
|
if leakThreshold != 0:
|
|
|
|
log.info("TEST-INFO | leakcheck | threshold set at %d bytes" % leakThreshold)
|
|
|
|
|
2009-11-10 22:16:12 +03:00
|
|
|
(leakLogFileDir, leakFileBase) = os.path.split(leakLogFile)
|
2013-03-18 21:18:08 +04:00
|
|
|
fileNameRegExp = re.compile(r".*?_([a-z]*)_pid\d*$")
|
2009-11-10 22:16:12 +03:00
|
|
|
if leakFileBase[-4:] == ".log":
|
|
|
|
leakFileBase = leakFileBase[:-4]
|
2013-03-18 21:18:08 +04:00
|
|
|
fileNameRegExp = re.compile(r".*?_([a-z]*)_pid\d*.log$")
|
2009-11-10 22:16:12 +03:00
|
|
|
|
|
|
|
for fileName in os.listdir(leakLogFileDir):
|
|
|
|
if fileName.find(leakFileBase) != -1:
|
|
|
|
thisFile = os.path.join(leakLogFileDir, fileName)
|
|
|
|
processType = None
|
2013-03-18 21:18:08 +04:00
|
|
|
m = fileNameRegExp.search(fileName)
|
2009-11-10 22:16:12 +03:00
|
|
|
if m:
|
|
|
|
processType = m.group(1)
|
2013-03-18 21:18:08 +04:00
|
|
|
processSingleLeakFile(thisFile, processType, leakThreshold)
|
2010-03-13 01:57:29 +03:00
|
|
|
|
|
|
|
def replaceBackSlashes(input):
|
|
|
|
return input.replace('\\', '/')
|
2010-09-23 20:19:31 +04:00
|
|
|
|
|
|
|
def wrapCommand(cmd):
|
|
|
|
"""
|
|
|
|
If running on OS X 10.5 or older, wrap |cmd| so that it will
|
|
|
|
be executed as an i386 binary, in case it's a 32-bit/64-bit universal
|
|
|
|
binary.
|
|
|
|
"""
|
|
|
|
if platform.system() == "Darwin" and \
|
|
|
|
hasattr(platform, 'mac_ver') and \
|
|
|
|
platform.mac_ver()[0][:4] < '10.6':
|
|
|
|
return ["arch", "-arch", "i386"] + cmd
|
|
|
|
# otherwise just execute the command normally
|
|
|
|
return cmd
|
2013-09-23 18:47:48 +04:00
|
|
|
|
|
|
|
class KeyValueParseError(Exception):
|
|
|
|
"""error when parsing strings of serialized key-values"""
|
|
|
|
def __init__(self, msg, errors=()):
|
|
|
|
self.errors = errors
|
|
|
|
Exception.__init__(self, msg)
|
|
|
|
|
|
|
|
def parseKeyValue(strings, separator='=', context='key, value: '):
|
|
|
|
"""
|
|
|
|
parse string-serialized key-value pairs in the form of
|
|
|
|
`key = value`. Returns a list of 2-tuples.
|
|
|
|
Note that whitespace is not stripped.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# syntax check
|
|
|
|
missing = [string for string in strings if separator not in string]
|
|
|
|
if missing:
|
|
|
|
raise KeyValueParseError("Error: syntax error in %s" % (context,
|
|
|
|
','.join(missing)),
|
|
|
|
errors=missing)
|
|
|
|
return [string.split(separator, 1) for string in strings]
|
|
|
|
|
|
|
|
def systemMemory():
|
|
|
|
"""
|
|
|
|
Returns total system memory in kilobytes.
|
|
|
|
Works only on unix-like platforms where `free` is in the path.
|
|
|
|
"""
|
|
|
|
return int(os.popen("free").readlines()[1].split()[1])
|
|
|
|
|
2013-11-13 23:47:41 +04:00
|
|
|
def environment(xrePath, env=None, crashreporter=True, debugger=False, dmdPath=None):
|
2013-09-23 18:47:48 +04:00
|
|
|
"""populate OS environment variables for mochitest"""
|
|
|
|
|
|
|
|
env = os.environ.copy() if env is None else env
|
|
|
|
|
|
|
|
assert os.path.isabs(xrePath)
|
|
|
|
|
|
|
|
ldLibraryPath = xrePath
|
|
|
|
|
|
|
|
envVar = None
|
2013-11-13 23:47:41 +04:00
|
|
|
dmdLibrary = None
|
|
|
|
preloadEnvVar = None
|
2013-09-23 18:47:48 +04:00
|
|
|
if mozinfo.isUnix:
|
|
|
|
envVar = "LD_LIBRARY_PATH"
|
|
|
|
env['MOZILLA_FIVE_HOME'] = xrePath
|
2013-11-13 23:47:41 +04:00
|
|
|
dmdLibrary = "libdmd.so"
|
|
|
|
preloadEnvVar = "LD_PRELOAD"
|
2013-09-23 18:47:48 +04:00
|
|
|
elif mozinfo.isMac:
|
|
|
|
envVar = "DYLD_LIBRARY_PATH"
|
2013-11-13 23:47:41 +04:00
|
|
|
dmdLibrary = "libdmd.dylib"
|
|
|
|
preloadEnvVar = "DYLD_INSERT_LIBRARIES"
|
2013-09-23 18:47:48 +04:00
|
|
|
elif mozinfo.isWin:
|
|
|
|
envVar = "PATH"
|
2013-11-13 23:47:41 +04:00
|
|
|
dmdLibrary = "dmd.dll"
|
|
|
|
preloadEnvVar = "MOZ_REPLACE_MALLOC_LIB"
|
2013-09-23 18:47:48 +04:00
|
|
|
if envVar:
|
|
|
|
envValue = ((env.get(envVar), str(ldLibraryPath))
|
|
|
|
if mozinfo.isWin
|
2013-11-13 23:47:41 +04:00
|
|
|
else (ldLibraryPath, dmdPath, env.get(envVar)))
|
2013-09-23 18:47:48 +04:00
|
|
|
env[envVar] = os.path.pathsep.join([path for path in envValue if path])
|
|
|
|
|
2013-11-13 23:47:41 +04:00
|
|
|
if dmdPath and dmdLibrary and preloadEnvVar:
|
|
|
|
env['DMD'] = '1'
|
|
|
|
env[preloadEnvVar] = os.path.join(dmdPath, dmdLibrary)
|
|
|
|
|
2013-09-23 18:47:48 +04:00
|
|
|
# crashreporter
|
|
|
|
env['GNOME_DISABLE_CRASH_DIALOG'] = '1'
|
|
|
|
env['XRE_NO_WINDOWS_CRASH_DIALOG'] = '1'
|
|
|
|
env['NS_TRACE_MALLOC_DISABLE_STACKS'] = '1'
|
|
|
|
|
2013-09-16 22:44:25 +04:00
|
|
|
if crashreporter and not debugger:
|
2013-09-23 18:47:48 +04:00
|
|
|
env['MOZ_CRASHREPORTER_NO_REPORT'] = '1'
|
|
|
|
env['MOZ_CRASHREPORTER'] = '1'
|
|
|
|
else:
|
|
|
|
env['MOZ_CRASHREPORTER_DISABLE'] = '1'
|
|
|
|
|
2013-12-19 20:42:19 +04:00
|
|
|
# Set WebRTC logging in case it is not set yet
|
2014-03-04 22:51:37 +04:00
|
|
|
env.setdefault('NSPR_LOG_MODULES', 'signaling:5,mtransport:5,datachannel:5')
|
|
|
|
env.setdefault('R_LOG_LEVEL', '6')
|
2013-12-19 20:42:19 +04:00
|
|
|
env.setdefault('R_LOG_DESTINATION', 'stderr')
|
|
|
|
env.setdefault('R_LOG_VERBOSE', '1')
|
2013-09-23 18:47:48 +04:00
|
|
|
|
|
|
|
# ASan specific environment stuff
|
|
|
|
asan = bool(mozinfo.info.get("asan"))
|
|
|
|
if asan and (mozinfo.isLinux or mozinfo.isMac):
|
|
|
|
try:
|
2013-10-21 22:11:07 +04:00
|
|
|
# Symbolizer support
|
|
|
|
llvmsym = os.path.join(xrePath, "llvm-symbolizer")
|
|
|
|
if os.path.isfile(llvmsym):
|
|
|
|
env["ASAN_SYMBOLIZER_PATH"] = llvmsym
|
|
|
|
log.info("ASan using symbolizer at %s", llvmsym)
|
|
|
|
|
2013-09-23 18:47:48 +04:00
|
|
|
totalMemory = systemMemory()
|
|
|
|
|
2013-11-14 04:44:41 +04:00
|
|
|
# Only 4 GB RAM or less available? Use custom ASan options to reduce
|
2013-09-23 18:47:48 +04:00
|
|
|
# the amount of resources required to do the tests. Standard options
|
|
|
|
# will otherwise lead to OOM conditions on the current test slaves.
|
|
|
|
message = "INFO | runtests.py | ASan running in %s configuration"
|
2013-11-14 04:44:41 +04:00
|
|
|
if totalMemory <= 1024 * 1024 * 4:
|
2013-09-23 18:47:48 +04:00
|
|
|
message = message % 'low-memory'
|
2014-02-28 19:33:25 +04:00
|
|
|
env["ASAN_OPTIONS"] = "quarantine_size=50331648:malloc_context_size=5"
|
2013-09-23 18:47:48 +04:00
|
|
|
else:
|
|
|
|
message = message % 'default memory'
|
|
|
|
except OSError,err:
|
|
|
|
log.info("Failed determine available memory, disabling ASan low-memory configuration: %s", err.strerror)
|
|
|
|
except:
|
|
|
|
log.info("Failed determine available memory, disabling ASan low-memory configuration")
|
|
|
|
else:
|
|
|
|
log.info(message)
|
|
|
|
|
|
|
|
return env
|
2013-10-06 02:28:34 +04:00
|
|
|
|
|
|
|
def dumpScreen(utilityPath):
|
2014-01-17 21:04:02 +04:00
|
|
|
"""dumps a screenshot of the entire screen to a directory specified by
|
|
|
|
the MOZ_UPLOAD_DIR environment variable"""
|
|
|
|
import mozfile
|
2013-10-06 02:28:34 +04:00
|
|
|
|
2014-01-17 21:04:02 +04:00
|
|
|
# Need to figure out which OS-dependent tool to use
|
2013-10-06 02:28:34 +04:00
|
|
|
if mozinfo.isUnix:
|
|
|
|
utility = [os.path.join(utilityPath, "screentopng")]
|
2014-03-26 14:14:51 +04:00
|
|
|
utilityname = "screentopng"
|
2013-10-06 02:28:34 +04:00
|
|
|
elif mozinfo.isMac:
|
|
|
|
utility = ['/usr/sbin/screencapture', '-C', '-x', '-t', 'png']
|
2014-03-26 14:14:51 +04:00
|
|
|
utilityname = "screencapture"
|
2013-10-06 02:28:34 +04:00
|
|
|
elif mozinfo.isWin:
|
|
|
|
utility = [os.path.join(utilityPath, "screenshot.exe")]
|
2014-03-26 14:14:51 +04:00
|
|
|
utilityname = "screenshot"
|
2014-01-17 21:04:02 +04:00
|
|
|
|
|
|
|
# Get dir where to write the screenshot file
|
|
|
|
parent_dir = os.environ.get('MOZ_UPLOAD_DIR', None)
|
|
|
|
if not parent_dir:
|
|
|
|
log.info('Failed to retrieve MOZ_UPLOAD_DIR env var')
|
|
|
|
return
|
|
|
|
|
|
|
|
# Run the capture
|
2013-10-06 02:28:34 +04:00
|
|
|
try:
|
2014-02-05 18:34:23 +04:00
|
|
|
tmpfd, imgfilename = tempfile.mkstemp(prefix='mozilla-test-fail-screenshot_', suffix='.png', dir=parent_dir)
|
|
|
|
os.close(tmpfd)
|
|
|
|
returncode = subprocess.call(utility + [imgfilename])
|
2014-03-26 14:14:51 +04:00
|
|
|
printstatus(returncode, utilityname)
|
2013-10-06 02:28:34 +04:00
|
|
|
except OSError, err:
|
|
|
|
log.info("Failed to start %s for screenshot: %s",
|
|
|
|
utility[0], err.strerror)
|
|
|
|
return
|
|
|
|
|
2013-11-21 00:53:08 +04:00
|
|
|
class ShutdownLeaks(object):
|
|
|
|
"""
|
|
|
|
Parses the mochitest run log when running a debug build, assigns all leaked
|
|
|
|
DOM windows (that are still around after test suite shutdown, despite running
|
|
|
|
the GC) to the tests that created them and prints leak statistics.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, logger):
|
|
|
|
self.logger = logger
|
|
|
|
self.tests = []
|
|
|
|
self.leakedWindows = {}
|
|
|
|
self.leakedDocShells = set()
|
|
|
|
self.currentTest = None
|
|
|
|
self.seenShutdown = False
|
|
|
|
|
|
|
|
def log(self, line):
|
|
|
|
if line[2:11] == "DOMWINDOW":
|
|
|
|
self._logWindow(line)
|
|
|
|
elif line[2:10] == "DOCSHELL":
|
|
|
|
self._logDocShell(line)
|
|
|
|
elif line.startswith("TEST-START"):
|
|
|
|
fileName = line.split(" ")[-1].strip().replace("chrome://mochitests/content/browser/", "")
|
|
|
|
self.currentTest = {"fileName": fileName, "windows": set(), "docShells": set()}
|
|
|
|
elif line.startswith("INFO TEST-END"):
|
|
|
|
# don't track a test if no windows or docShells leaked
|
|
|
|
if self.currentTest and (self.currentTest["windows"] or self.currentTest["docShells"]):
|
|
|
|
self.tests.append(self.currentTest)
|
|
|
|
self.currentTest = None
|
|
|
|
elif line.startswith("INFO TEST-START | Shutdown"):
|
|
|
|
self.seenShutdown = True
|
|
|
|
|
|
|
|
def process(self):
|
2013-11-27 17:52:02 +04:00
|
|
|
for test in self._parseLeakingTests():
|
2013-11-21 00:53:08 +04:00
|
|
|
for url, count in self._zipLeakedWindows(test["leakedWindows"]):
|
|
|
|
self.logger("TEST-UNEXPECTED-FAIL | %s | leaked %d window(s) until shutdown [url = %s]", test["fileName"], count, url)
|
|
|
|
|
|
|
|
if test["leakedDocShells"]:
|
|
|
|
self.logger("TEST-UNEXPECTED-FAIL | %s | leaked %d docShell(s) until shutdown", test["fileName"], len(test["leakedDocShells"]))
|
|
|
|
|
|
|
|
def _logWindow(self, line):
|
|
|
|
created = line[:2] == "++"
|
|
|
|
pid = self._parseValue(line, "pid")
|
|
|
|
serial = self._parseValue(line, "serial")
|
|
|
|
|
|
|
|
# log line has invalid format
|
|
|
|
if not pid or not serial:
|
|
|
|
self.logger("TEST-UNEXPECTED-FAIL | ShutdownLeaks | failed to parse line <%s>", line)
|
|
|
|
return
|
|
|
|
|
|
|
|
key = pid + "." + serial
|
|
|
|
|
|
|
|
if self.currentTest:
|
|
|
|
windows = self.currentTest["windows"]
|
|
|
|
if created:
|
|
|
|
windows.add(key)
|
|
|
|
else:
|
|
|
|
windows.discard(key)
|
|
|
|
elif self.seenShutdown and not created:
|
|
|
|
self.leakedWindows[key] = self._parseValue(line, "url")
|
|
|
|
|
|
|
|
def _logDocShell(self, line):
|
|
|
|
created = line[:2] == "++"
|
|
|
|
pid = self._parseValue(line, "pid")
|
|
|
|
id = self._parseValue(line, "id")
|
|
|
|
|
|
|
|
# log line has invalid format
|
|
|
|
if not pid or not id:
|
|
|
|
self.logger("TEST-UNEXPECTED-FAIL | ShutdownLeaks | failed to parse line <%s>", line)
|
|
|
|
return
|
|
|
|
|
|
|
|
key = pid + "." + id
|
|
|
|
|
|
|
|
if self.currentTest:
|
|
|
|
docShells = self.currentTest["docShells"]
|
|
|
|
if created:
|
|
|
|
docShells.add(key)
|
|
|
|
else:
|
|
|
|
docShells.discard(key)
|
|
|
|
elif self.seenShutdown and not created:
|
|
|
|
self.leakedDocShells.add(key)
|
|
|
|
|
|
|
|
def _parseValue(self, line, name):
|
|
|
|
match = re.search("\[%s = (.+?)\]" % name, line)
|
|
|
|
if match:
|
|
|
|
return match.group(1)
|
|
|
|
return None
|
|
|
|
|
|
|
|
def _parseLeakingTests(self):
|
|
|
|
leakingTests = []
|
|
|
|
|
|
|
|
for test in self.tests:
|
|
|
|
test["leakedWindows"] = [self.leakedWindows[id] for id in test["windows"] if id in self.leakedWindows]
|
|
|
|
test["leakedDocShells"] = [id for id in test["docShells"] if id in self.leakedDocShells]
|
|
|
|
test["leakCount"] = len(test["leakedWindows"]) + len(test["leakedDocShells"])
|
|
|
|
|
|
|
|
if test["leakCount"]:
|
|
|
|
leakingTests.append(test)
|
|
|
|
|
|
|
|
return sorted(leakingTests, key=itemgetter("leakCount"), reverse=True)
|
|
|
|
|
|
|
|
def _zipLeakedWindows(self, leakedWindows):
|
|
|
|
counts = []
|
|
|
|
counted = set()
|
|
|
|
|
|
|
|
for url in leakedWindows:
|
|
|
|
if not url in counted:
|
|
|
|
counts.append((url, leakedWindows.count(url)))
|
|
|
|
counted.add(url)
|
|
|
|
|
|
|
|
return sorted(counts, key=itemgetter(1), reverse=True)
|