зеркало из https://github.com/mozilla/gecko-dev.git
Bug 812179 - Removed hacks for Python < 2.6 from config/ [r=ted]
This commit is contained in:
Родитель
72663e9ea9
Коммит
999eeb2111
|
@ -218,7 +218,8 @@ class Expression:
|
|||
self.offset = expression.offset
|
||||
self.content = expression.content[:3]
|
||||
def __str__(self):
|
||||
return 'Unexpected content at offset %i, "%s"'%(self.offset, self.content)
|
||||
return 'Unexpected content at offset {0}, "{1}"'.format(self.offset,
|
||||
self.content)
|
||||
|
||||
class Context(dict):
|
||||
"""
|
||||
|
|
|
@ -7,7 +7,6 @@ processing jar.mn files.
|
|||
|
||||
See the documentation for jar.mn on MDC for further details on the format.
|
||||
'''
|
||||
|
||||
import sys
|
||||
import os
|
||||
import os.path
|
||||
|
@ -152,11 +151,12 @@ class JarMaker(object):
|
|||
'..', 'chrome.manifest')
|
||||
|
||||
if self.useJarfileManifest:
|
||||
self.updateManifest(jarPath + '.manifest', chromebasepath % '',
|
||||
self.updateManifest(jarPath + '.manifest', chromebasepath.format(''),
|
||||
register)
|
||||
addEntriesToListFile(chromeManifest, ['manifest chrome/%s.manifest' % (os.path.basename(jarPath),)])
|
||||
addEntriesToListFile(chromeManifest, ['manifest chrome/{0}.manifest'
|
||||
.format(os.path.basename(jarPath))])
|
||||
if self.useChromeManifest:
|
||||
self.updateManifest(chromeManifest, chromebasepath % 'chrome/',
|
||||
self.updateManifest(chromeManifest, chromebasepath.format('chrome/'),
|
||||
register)
|
||||
|
||||
# If requested, add a root chrome manifest entry (assumed to be in the parent directory
|
||||
|
@ -258,9 +258,9 @@ class JarMaker(object):
|
|||
'''
|
||||
|
||||
# chromebasepath is used for chrome registration manifests
|
||||
# %s is getting replaced with chrome/ for chrome.manifest, and with
|
||||
# {0} is getting replaced with chrome/ for chrome.manifest, and with
|
||||
# an empty string for jarfile.manifest
|
||||
chromebasepath = '%s' + os.path.basename(jarfile)
|
||||
chromebasepath = '{0}' + os.path.basename(jarfile)
|
||||
if self.outputFormat == 'jar':
|
||||
chromebasepath = 'jar:' + chromebasepath + '.jar!'
|
||||
chromebasepath += '/'
|
||||
|
@ -272,7 +272,7 @@ class JarMaker(object):
|
|||
jarfilepath = jarfile + '.jar'
|
||||
try:
|
||||
os.makedirs(os.path.dirname(jarfilepath))
|
||||
except OSError, error:
|
||||
except OSError as error:
|
||||
if error.errno != errno.EEXIST:
|
||||
raise
|
||||
jf = ZipFile(jarfilepath, 'a', lock = True)
|
||||
|
@ -345,7 +345,8 @@ class JarMaker(object):
|
|||
if realsrc is None:
|
||||
if jf is not None:
|
||||
jf.close()
|
||||
raise RuntimeError('File "%s" not found in %s' % (src, ', '.join(src_base)))
|
||||
raise RuntimeError('File "{0}" not found in {1}'
|
||||
.format(src, ', '.join(src_base)))
|
||||
if m.group('optPreprocess'):
|
||||
outf = outHelper.getOutput(out)
|
||||
inf = open(realsrc)
|
||||
|
@ -401,7 +402,7 @@ class JarMaker(object):
|
|||
# remove previous link or file
|
||||
try:
|
||||
os.remove(out)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
return open(out, 'wb')
|
||||
|
@ -411,7 +412,7 @@ class JarMaker(object):
|
|||
if not os.path.isdir(outdir):
|
||||
try:
|
||||
os.makedirs(outdir)
|
||||
except OSError, error:
|
||||
except OSError as error:
|
||||
if error.errno != errno.EEXIST:
|
||||
raise
|
||||
return out
|
||||
|
@ -425,7 +426,7 @@ class JarMaker(object):
|
|||
# remove previous link or file
|
||||
try:
|
||||
os.remove(out)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
if sys.platform != "win32":
|
||||
|
|
|
@ -77,9 +77,9 @@ class Preprocessor:
|
|||
|
||||
def warnUnused(self, file):
|
||||
if self.actionLevel == 0:
|
||||
sys.stderr.write('%s: WARNING: no preprocessor directives found\n' % file)
|
||||
sys.stderr.write('{0}: WARNING: no preprocessor directives found\n'.format(file))
|
||||
elif self.actionLevel == 1:
|
||||
sys.stderr.write('%s: WARNING: no useful preprocessor directives found\n' % file)
|
||||
sys.stderr.write('{0}: WARNING: no useful preprocessor directives found\n'.format(file))
|
||||
pass
|
||||
|
||||
def setLineEndings(self, aLE):
|
||||
|
@ -96,7 +96,9 @@ class Preprocessor:
|
|||
"""
|
||||
self.marker = aMarker
|
||||
if aMarker:
|
||||
self.instruction = re.compile('%s(?P<cmd>[a-z]+)(?:\s(?P<args>.*))?$'%aMarker, re.U)
|
||||
self.instruction = re.compile('{0}(?P<cmd>[a-z]+)(?:\s(?P<args>.*))?$'
|
||||
.format(aMarker),
|
||||
re.U)
|
||||
self.comment = re.compile(aMarker, re.U)
|
||||
else:
|
||||
class NoMatch(object):
|
||||
|
@ -129,9 +131,9 @@ class Preprocessor:
|
|||
self.writtenLines += 1
|
||||
ln = self.context['LINE']
|
||||
if self.writtenLines != ln:
|
||||
self.out.write('//@line %(line)d "%(file)s"%(le)s'%{'line': ln,
|
||||
'file': self.context['FILE'],
|
||||
'le': self.LE})
|
||||
self.out.write('//@line {line} "{file}"{le}'.format(line=ln,
|
||||
file=self.context['FILE'],
|
||||
le=self.LE))
|
||||
self.writtenLines = ln
|
||||
filteredLine = self.applyFilters(aLine)
|
||||
if filteredLine != aLine:
|
||||
|
|
|
@ -7,6 +7,7 @@ if the entry does not already exist.
|
|||
|
||||
Usage: buildlist.py <filename> <entry> [<entry> ...]
|
||||
'''
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
@ -20,14 +21,14 @@ def addEntriesToListFile(listFile, entries):
|
|||
try:
|
||||
if os.path.exists(listFile):
|
||||
f = open(listFile)
|
||||
existing = set([x.strip() for x in f.readlines()])
|
||||
existing = set(x.strip() for x in f.readlines())
|
||||
f.close()
|
||||
else:
|
||||
existing = set()
|
||||
f = open(listFile, 'a')
|
||||
for e in entries:
|
||||
if e not in existing:
|
||||
f.write("%s\n" % e)
|
||||
f.write("{0}\n".format(e))
|
||||
existing.add(e)
|
||||
f.close()
|
||||
finally:
|
||||
|
@ -35,6 +36,7 @@ def addEntriesToListFile(listFile, entries):
|
|||
|
||||
if __name__ == '__main__':
|
||||
if len(sys.argv) < 3:
|
||||
print >>sys.stderr, "Usage: buildlist.py <list file> <entry> [<entry> ...]"
|
||||
print("Usage: buildlist.py <list file> <entry> [<entry> ...]",
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
addEntriesToListFile(sys.argv[1], sys.argv[2:])
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
# not, an error message is printed, quoting ERROR_LOCATION, which should
|
||||
# probably be the filename and line number of the erroneous call to
|
||||
# check_source_count.py.
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
|
@ -32,17 +32,26 @@ for f in files:
|
|||
details[f] = num
|
||||
|
||||
if count == expected_count:
|
||||
print "TEST-PASS | check_source_count.py %s | %d" % (search_string, expected_count)
|
||||
print("TEST-PASS | check_source_count.py {0} | {1}"
|
||||
.format(search_string, expected_count))
|
||||
|
||||
else:
|
||||
print "TEST-UNEXPECTED-FAIL | check_source_count.py %s | " % (search_string),
|
||||
print("TEST-UNEXPECTED-FAIL | check_source_count.py {0} | "
|
||||
.format(search_string),
|
||||
end='')
|
||||
if count < expected_count:
|
||||
print "There are fewer occurrences of /%s/ than expected. This may mean that you have removed some, but forgotten to account for it %s." % (search_string, error_location)
|
||||
print("There are fewer occurrences of /{0}/ than expected. "
|
||||
"This may mean that you have removed some, but forgotten to "
|
||||
"account for it {1}.".format(search_string, error_location))
|
||||
else:
|
||||
print "There are more occurrences of /%s/ than expected. We're trying to prevent an increase in the number of %s's, using %s if possible. If it in unavoidable, you should update the expected count %s." % (search_string, search_string, replacement, error_location)
|
||||
print("There are more occurrences of /{0}/ than expected. We're trying "
|
||||
"to prevent an increase in the number of {1}'s, using {2} if "
|
||||
"possible. If it is unavoidable, you should update the expected "
|
||||
"count {3}.".format(search_string, search_string, replacement,
|
||||
error_location))
|
||||
|
||||
print "Expected: %d; found: %d" % (expected_count, count)
|
||||
print("Expected: {0}; found: {1}".format(expected_count, count))
|
||||
for k in sorted(details):
|
||||
print "Found %d occurences in %s" % (details[k],k)
|
||||
print("Found {0} occurences in {1}".format(details[k],k))
|
||||
sys.exit(-1)
|
||||
|
||||
|
|
|
@ -26,7 +26,6 @@ ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX} following these rules:
|
|||
descriptor contains. And for each of these LIBS, also apply the same
|
||||
rules.
|
||||
'''
|
||||
from __future__ import with_statement
|
||||
import sys, os, errno
|
||||
import expandlibs_config as conf
|
||||
|
||||
|
@ -36,7 +35,7 @@ def ensureParentDir(file):
|
|||
if dir and not os.path.exists(dir):
|
||||
try:
|
||||
os.makedirs(dir)
|
||||
except OSError, error:
|
||||
except OSError as error:
|
||||
if error.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
|
@ -90,7 +89,8 @@ class LibDescriptor(dict):
|
|||
|
||||
def __str__(self):
|
||||
'''Serializes the lib descriptor'''
|
||||
return '\n'.join('%s = %s' % (k, ' '.join(self[k])) for k in self.KEYS if len(self[k]))
|
||||
return '\n'.join('{0} = {1}'.format(k, ' '.join(self[k]))
|
||||
for k in self.KEYS if len(self[k]))
|
||||
|
||||
class ExpandArgs(list):
|
||||
def __init__(self, args):
|
||||
|
@ -135,4 +135,4 @@ class ExpandLibsDeps(ExpandArgs):
|
|||
return objs
|
||||
|
||||
if __name__ == '__main__':
|
||||
print " ".join(ExpandArgs(sys.argv[1:]))
|
||||
print(" ".join(ExpandArgs(sys.argv[1:])))
|
||||
|
|
|
@ -20,10 +20,11 @@ With the --symbol-order argument, followed by a file name, it will add the
|
|||
relevant linker options to change the order in which the linker puts the
|
||||
symbols appear in the resulting binary. Only works for ELF targets.
|
||||
'''
|
||||
from __future__ import with_statement
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
import os
|
||||
from expandlibs import ExpandArgs, relativize, isObject, ensureParentDir, ExpandLibsDeps
|
||||
from expandlibs import (ExpandArgs, relativize, isObject, ensureParentDir,
|
||||
ExpandLibsDeps)
|
||||
import expandlibs_config as conf
|
||||
from optparse import OptionParser
|
||||
import subprocess
|
||||
|
@ -92,10 +93,10 @@ class ExpandArgsMore(ExpandArgs):
|
|||
if not len(objs): return
|
||||
fd, tmp = tempfile.mkstemp(suffix=".list",dir=os.curdir)
|
||||
if conf.EXPAND_LIBS_LIST_STYLE == "linkerscript":
|
||||
content = ['INPUT("%s")\n' % obj for obj in objs]
|
||||
content = ['INPUT("{0}")\n'.format(obj) for obj in objs]
|
||||
ref = tmp
|
||||
elif conf.EXPAND_LIBS_LIST_STYLE == "list":
|
||||
content = ["%s\n" % obj for obj in objs]
|
||||
content = ["{0}\n".format(obj) for obj in objs]
|
||||
ref = "@" + tmp
|
||||
else:
|
||||
os.close(fd)
|
||||
|
@ -139,9 +140,13 @@ class ExpandArgsMore(ExpandArgs):
|
|||
def _getOrderedSections(self, ordered_symbols):
|
||||
'''Given an ordered list of symbols, returns the corresponding list
|
||||
of sections following the order.'''
|
||||
if not conf.EXPAND_LIBS_ORDER_STYLE in ['linkerscript', 'section-ordering-file']:
|
||||
raise Exception('EXPAND_LIBS_ORDER_STYLE "%s" is not supported' % conf.EXPAND_LIBS_ORDER_STYLE)
|
||||
finder = SectionFinder([arg for arg in self if isObject(arg) or os.path.splitext(arg)[1] == conf.LIB_SUFFIX])
|
||||
if conf.EXPAND_LIBS_ORDER_STYLE not in ['linkerscript',
|
||||
'section-ordering-file']:
|
||||
raise Exception('EXPAND_LIBS_ORDER_STYLE "{0}" is not supported'
|
||||
.format(conf.EXPAND_LIBS_ORDER_STYLE))
|
||||
finder = SectionFinder([arg for arg in self
|
||||
if isObject(arg) or
|
||||
os.path.splitext(arg)[1] == conf.LIB_SUFFIX])
|
||||
folded = self._getFoldedSections()
|
||||
sections = set()
|
||||
ordered_sections = []
|
||||
|
@ -182,32 +187,35 @@ class ExpandArgsMore(ExpandArgs):
|
|||
linked_sections = [s for s in linked_sections if s in split_sections]
|
||||
|
||||
if conf.EXPAND_LIBS_ORDER_STYLE == 'section-ordering-file':
|
||||
option = '-Wl,--section-ordering-file,%s'
|
||||
option = '-Wl,--section-ordering-file,{0}'
|
||||
content = sections
|
||||
for linked_section in linked_sections:
|
||||
content.extend(split_sections[linked_section])
|
||||
content.append('%s.*' % linked_section)
|
||||
content.append('{0}.*'.format(linked_section))
|
||||
content.append(linked_section)
|
||||
|
||||
elif conf.EXPAND_LIBS_ORDER_STYLE == 'linkerscript':
|
||||
option = '-Wl,-T,%s'
|
||||
option = '-Wl,-T,{0}'
|
||||
section_insert_before = dict(SECTION_INSERT_BEFORE)
|
||||
for linked_section in linked_sections:
|
||||
content.append('SECTIONS {')
|
||||
content.append(' %s : {' % linked_section)
|
||||
content.extend(' *(%s)' % s for s in split_sections[linked_section])
|
||||
content.append(' }')
|
||||
content.append('}')
|
||||
content.append('INSERT BEFORE %s' % section_insert_before[linked_section])
|
||||
content.append('SECTIONS {{')
|
||||
content.append(' {0} : {{'.format(linked_section))
|
||||
content.extend(' *({0})'
|
||||
.format(s for s in split_sections[linked_section]))
|
||||
content.append(' }}')
|
||||
content.append('}}')
|
||||
content.append('INSERT BEFORE {0}'
|
||||
.format(section_insert_before[linked_section]))
|
||||
else:
|
||||
raise Exception('EXPAND_LIBS_ORDER_STYLE "%s" is not supported' % conf.EXPAND_LIBS_ORDER_STYLE)
|
||||
raise Exception('EXPAND_LIBS_ORDER_STYLE "{0}" is not supported'
|
||||
.format(conf.EXPAND_LIBS_ORDER_STYLE))
|
||||
|
||||
fd, tmp = tempfile.mkstemp(dir=os.curdir)
|
||||
f = os.fdopen(fd, "w")
|
||||
f.write('\n'.join(content)+'\n')
|
||||
f.close()
|
||||
self.tmp.append(tmp)
|
||||
self.append(option % tmp)
|
||||
self.append(option.format(tmp))
|
||||
|
||||
class SectionFinder(object):
|
||||
'''Instances of this class allow to map symbol names to sections in
|
||||
|
@ -216,15 +224,17 @@ class SectionFinder(object):
|
|||
def __init__(self, objs):
|
||||
'''Creates an instance, given a list of object files.'''
|
||||
if not conf.EXPAND_LIBS_ORDER_STYLE in ['linkerscript', 'section-ordering-file']:
|
||||
raise Exception('EXPAND_LIBS_ORDER_STYLE "%s" is not supported' % conf.EXPAND_LIBS_ORDER_STYLE)
|
||||
raise Exception('EXPAND_LIBS_ORDER_STYLE "{0}" is not supported'
|
||||
.format(conf.EXPAND_LIBS_ORDER_STYLE))
|
||||
self.mapping = {}
|
||||
for obj in objs:
|
||||
if not isObject(obj) and os.path.splitext(obj)[1] != conf.LIB_SUFFIX:
|
||||
raise Exception('%s is not an object nor a static library' % obj)
|
||||
raise Exception('{0} is not an object nor a static library'
|
||||
.format(obj))
|
||||
for symbol, section in SectionFinder._getSymbols(obj):
|
||||
sym = SectionFinder._normalize(symbol)
|
||||
if sym in self.mapping:
|
||||
if not section in self.mapping[sym]:
|
||||
if section not in self.mapping[sym]:
|
||||
self.mapping[sym].append(section)
|
||||
else:
|
||||
self.mapping[sym] = [section]
|
||||
|
@ -268,11 +278,11 @@ class SectionFinder(object):
|
|||
return syms
|
||||
|
||||
def print_command(out, args):
|
||||
print >>out, "Executing: " + " ".join(args)
|
||||
print("Executing: " + " ".join(args), file=out)
|
||||
for tmp in [f for f in args.tmp if os.path.isfile(f)]:
|
||||
print >>out, tmp + ":"
|
||||
print(tmp + ":", file=out)
|
||||
with open(tmp) as file:
|
||||
print >>out, "".join([" " + l for l in file.readlines()])
|
||||
print("".join([" " + l for l in file.readlines()]), file=out)
|
||||
out.flush()
|
||||
|
||||
def main():
|
||||
|
@ -323,7 +333,10 @@ def main():
|
|||
return
|
||||
ensureParentDir(options.depend)
|
||||
with open(options.depend, 'w') as depfile:
|
||||
depfile.write("%s : %s\n" % (options.target, ' '.join(dep for dep in deps if os.path.isfile(dep) and dep != options.target)))
|
||||
depfile.write("{0} : {1}\n"
|
||||
.format(options.target, ' '.join(dep for dep in deps
|
||||
if os.path.isfile(dep) and
|
||||
dep != options.target)))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import print_function
|
||||
'''Given a list of object files and library names, prints a library
|
||||
descriptor to standard output'''
|
||||
|
||||
from __future__ import with_statement
|
||||
import sys
|
||||
import os
|
||||
import expandlibs_config as conf
|
||||
|
@ -19,12 +18,12 @@ def generate(args):
|
|||
if os.path.exists(arg):
|
||||
desc['OBJS'].append(os.path.abspath(arg))
|
||||
else:
|
||||
raise Exception("File not found: %s" % arg)
|
||||
raise Exception("File not found: {0}".format(arg))
|
||||
elif os.path.splitext(arg)[1] == conf.LIB_SUFFIX:
|
||||
if os.path.exists(arg) or os.path.exists(arg + conf.LIBS_DESC_SUFFIX):
|
||||
desc['LIBS'].append(os.path.abspath(arg))
|
||||
else:
|
||||
raise Exception("File not found: %s" % arg)
|
||||
raise Exception("File not found: {0}".format(arg))
|
||||
return desc
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -40,8 +39,9 @@ if __name__ == '__main__':
|
|||
|
||||
ensureParentDir(options.output)
|
||||
with open(options.output, 'w') as outfile:
|
||||
print >>outfile, generate(args)
|
||||
print(generate(args), file=outfile)
|
||||
if options.depend:
|
||||
ensureParentDir(options.depend)
|
||||
with open(options.depend, 'w') as depfile:
|
||||
depfile.write("%s : %s\n" % (options.output, ' '.join(ExpandLibsDeps(args))))
|
||||
depfile.write("{0} : {1}\n".format(options.output,
|
||||
' '.join(ExpandLibsDeps(args))))
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
usage = """%prog: A test for OOM conditions in the shell.
|
||||
|
||||
|
@ -65,7 +65,7 @@ def run(args, stdin=None):
|
|||
stdout_worker.join()
|
||||
stderr_worker.join()
|
||||
|
||||
except KeyboardInterrupt, e:
|
||||
except KeyboardInterrupt as e:
|
||||
sys.exit(-1)
|
||||
|
||||
stdout, stderr = stdout_worker.all, stderr_worker.all
|
||||
|
@ -101,7 +101,7 @@ def count_lines():
|
|||
|
||||
lines = []
|
||||
for k,v in counts.items():
|
||||
lines.append("%6d: %s" % (v,k))
|
||||
lines.append("{0:6}: {1}".format(v, k))
|
||||
|
||||
lines.sort()
|
||||
|
||||
|
@ -171,7 +171,7 @@ command_template = 'shell/js' \
|
|||
+ ' -m -j -p' \
|
||||
+ ' -e "const platform=\'darwin\'; const libdir=\'../jit-test/lib/\';"' \
|
||||
+ ' -f ../jit-test/lib/prolog.js' \
|
||||
+ ' -f %s'
|
||||
+ ' -f {0}'
|
||||
|
||||
|
||||
# Blacklists are things we don't want to see in our logs again (though we do
|
||||
|
@ -222,7 +222,7 @@ num_failures = 0
|
|||
for f in files:
|
||||
|
||||
# Run it once to establish boundaries
|
||||
command = (command_template + ' -O') % (f)
|
||||
command = (command_template + ' -O').format(f)
|
||||
out, err, exit = run(command)
|
||||
max = re.match(".*OOM max count: (\d+).*", out, flags=re.DOTALL).groups()[0]
|
||||
max = int(max)
|
||||
|
@ -232,11 +232,11 @@ for f in files:
|
|||
for i in range(20, max):
|
||||
|
||||
if OPTIONS.regression == None:
|
||||
print "Testing allocation %d/%d in %s" % (i,max,f)
|
||||
print("Testing allocation {0}/{1} in {2}".format(i,max,f))
|
||||
else:
|
||||
sys.stdout.write('.') # something short for tinderbox, no space or \n
|
||||
|
||||
command = (command_template + ' -A %d') % (f, i)
|
||||
command = (command_template + ' -A {0}').format(f, i)
|
||||
out, err, exit = run(command)
|
||||
|
||||
# Success (5 is SM's exit code for controlled errors)
|
||||
|
@ -282,7 +282,9 @@ for f in files:
|
|||
log.write ("\n")
|
||||
log.write ("=========================================================================")
|
||||
log.write ("\n")
|
||||
log.write ("An allocation failure at\n\tallocation %d/%d in %s\n\tcauses problems (detected using bug 624094)" % (i, max, f))
|
||||
log.write ("An allocation failure at\n\tallocation {0}/{1} in {2}\n\t"
|
||||
"causes problems (detected using bug 624094)"
|
||||
.format(i, max, f))
|
||||
log.write ("\n")
|
||||
log.write ("\n")
|
||||
|
||||
|
@ -323,7 +325,7 @@ for f in files:
|
|||
if OPTIONS.regression == None:
|
||||
count_lines()
|
||||
|
||||
print '\n',
|
||||
print()
|
||||
|
||||
# Do the actual regression check
|
||||
if OPTIONS.regression != None:
|
||||
|
@ -331,12 +333,20 @@ if OPTIONS.regression != None:
|
|||
|
||||
if num_failures != expected_num_failures:
|
||||
|
||||
print "TEST-UNEXPECTED-FAIL |",
|
||||
print("TEST-UNEXPECTED-FAIL |", end='')
|
||||
if num_failures > expected_num_failures:
|
||||
print "More out-of-memory errors were found (%s) than expected (%d). This probably means an allocation site has been added without a NULL-check. If this is unavoidable, you can account for it by updating Makefile.in." % (num_failures, expected_num_failures),
|
||||
print("More out-of-memory errors were found ({0}) than expected ({1}). "
|
||||
"This probably means an allocation site has been added without a "
|
||||
"NULL-check. If this is unavoidable, you can account for it by "
|
||||
"updating Makefile.in.".format(num_failures, expected_num_failures),
|
||||
end='')
|
||||
else:
|
||||
print "Congratulations, you have removed %d out-of-memory error(s) (%d remain)! Please account for it by updating Makefile.in." % (expected_num_failures - num_failures, num_failures),
|
||||
print("Congratulations, you have removed {0} out-of-memory error(s) "
|
||||
"({1} remain)! Please account for it by updating Makefile.in."
|
||||
.format(expected_num_failures - num_failures, num_failures),
|
||||
end='')
|
||||
sys.exit(-1)
|
||||
else:
|
||||
print 'TEST-PASS | find_OOM_errors | Found the expected number of OOM errors (%d)' % (expected_num_failures)
|
||||
print('TEST-PASS | find_OOM_errors | Found the expected number of OOM '
|
||||
'errors ({0})'.format(expected_num_failures))
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import print_function
|
||||
import os, re, string, sys
|
||||
|
||||
def find_in_path(file, searchpath):
|
||||
|
@ -19,7 +19,7 @@ def header_path(header, compiler):
|
|||
return find_in_path(header, os.environ.get('INCLUDE', ''))
|
||||
else:
|
||||
# hope someone notices this ...
|
||||
raise NotImplementedError, compiler
|
||||
raise NotImplementedError(compiler)
|
||||
|
||||
def is_comment(line):
|
||||
return re.match(r'\s*#.*', line)
|
||||
|
@ -48,9 +48,9 @@ def main(outdir, compiler, template_file, header_list_file):
|
|||
|
||||
if __name__ == '__main__':
|
||||
if 5 != len(sys.argv):
|
||||
print >>sys.stderr, """Usage:
|
||||
python %s OUT_DIR ('msvc'|'gcc') TEMPLATE_FILE HEADER_LIST_FILE
|
||||
"""% (sys.argv[0])
|
||||
print("""Usage:
|
||||
python {0} OUT_DIR ('msvc'|'gcc') TEMPLATE_FILE HEADER_LIST_FILE
|
||||
""".format(sys.argv[0]), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
main(*sys.argv[1:])
|
||||
|
|
|
@ -36,7 +36,7 @@ class _MozTestResult(_TestResult):
|
|||
_TestResult.addSuccess(self, test)
|
||||
filename = inspect.getfile(test.__class__)
|
||||
testname = test._testMethodName
|
||||
self.stream.writeln("TEST-PASS | %s | %s" % (filename, testname))
|
||||
self.stream.writeln("TEST-PASS | {0} | {1}".format(filename, testname))
|
||||
|
||||
def addError(self, test, err):
|
||||
_TestResult.addError(self, test, err)
|
||||
|
@ -54,13 +54,13 @@ class _MozTestResult(_TestResult):
|
|||
if not tb:
|
||||
self.stream.writeln("TEST-UNEXPECTED-FAIL | NO TRACEBACK |")
|
||||
_f, _ln, _t = inspect.getframeinfo(tb)[:3]
|
||||
self.stream.writeln("TEST-UNEXPECTED-FAIL | %s | line %d, %s: %s" %
|
||||
(_f, _ln, _t, value.message))
|
||||
self.stream.writeln("TEST-UNEXPECTED-FAIL | {0} | line {1}, {2}: {3}"
|
||||
.format(_f, _ln, _t, value.message))
|
||||
|
||||
def printErrorList(self):
|
||||
for test, err in self.errors:
|
||||
self.stream.writeln("ERROR: %s" % self.getDescription(test))
|
||||
self.stream.writeln("%s" % err)
|
||||
self.stream.writeln("ERROR: {0}".format(self.getDescription(test)))
|
||||
self.stream.writeln("{0}".format(err))
|
||||
|
||||
|
||||
class MozTestRunner(_TestRunner):
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
# a full build environment set up.
|
||||
# The basic limitation is, it doesn't even try to link and ignores
|
||||
# all related options.
|
||||
|
||||
from __future__ import print_function
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import os.path
|
||||
|
@ -39,7 +39,7 @@ def _nsinstall_internal(argv):
|
|||
# The remaining arguments are not used in our tree, thus they're not
|
||||
# implented.
|
||||
def BadArg(option, opt, value, parser):
|
||||
parser.error('option not supported: %s' % opt)
|
||||
parser.error('option not supported: {0}'.format(opt))
|
||||
|
||||
p.add_option('-C', action="callback", metavar="CWD",
|
||||
callback=BadArg,
|
||||
|
@ -56,7 +56,8 @@ def _nsinstall_internal(argv):
|
|||
try:
|
||||
options.m = int(options.m, 8)
|
||||
except:
|
||||
sys.stderr.write('nsinstall: ' + options.m + ' is not a valid mode\n')
|
||||
sys.stderr.write('nsinstall: {0} is not a valid mode\n'
|
||||
.format(options.m))
|
||||
return 1
|
||||
|
||||
# just create one directory?
|
||||
|
@ -64,7 +65,7 @@ def _nsinstall_internal(argv):
|
|||
dir = os.path.abspath(dir)
|
||||
if os.path.exists(dir):
|
||||
if not os.path.isdir(dir):
|
||||
print >> sys.stderr, ('nsinstall: %s is not a directory' % dir)
|
||||
print('nsinstall: {0} is not a directory'.format(dir), file=sys.stderr)
|
||||
return 1
|
||||
if mode:
|
||||
os.chmod(dir, mode)
|
||||
|
@ -75,11 +76,11 @@ def _nsinstall_internal(argv):
|
|||
os.makedirs(dir, mode)
|
||||
else:
|
||||
os.makedirs(dir)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
# We might have hit EEXIST due to a race condition (see bug 463411) -- try again once
|
||||
if try_again:
|
||||
return maybe_create_dir(dir, mode, False)
|
||||
print >> sys.stderr, ("nsinstall: failed to create directory %s: %s" % (dir, e))
|
||||
print("nsinstall: failed to create directory {0}: {1}".format(dir, e))
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
from __future__ import print_function
|
||||
|
||||
import configobj, sys
|
||||
|
||||
try:
|
||||
(file, section, key) = sys.argv[1:]
|
||||
except ValueError:
|
||||
print "Usage: printconfigsetting.py <file> <section> <setting>"
|
||||
print("Usage: printconfigsetting.py <file> <section> <setting>")
|
||||
sys.exit(1)
|
||||
|
||||
c = configobj.ConfigObj(file)
|
||||
|
@ -15,11 +16,11 @@ c = configobj.ConfigObj(file)
|
|||
try:
|
||||
s = c[section]
|
||||
except KeyError:
|
||||
print >>sys.stderr, "Section [%s] not found." % section
|
||||
print("Section [{0}] not found.".format(section), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
print s[key]
|
||||
print(s[key])
|
||||
except KeyError:
|
||||
print >>sys.stderr, "Key %s not found." % key
|
||||
print("Key {0} not found.".format(key), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
# 2.1a3pre > ""
|
||||
# 3.2b4 > " 3.2 Beta 4"
|
||||
# 3.2b4pre > ""
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
import re
|
||||
|
@ -17,7 +18,7 @@ def get_prerelease_suffix(version):
|
|||
""" Returns the prerelease suffix from the version string argument """
|
||||
|
||||
def mfunc(m):
|
||||
return " %s %s %s" % (m.group('prefix'),
|
||||
return " {0} {1} {2}".format(m.group('prefix'),
|
||||
{'a': 'Alpha', 'b': 'Beta'}[m.group('c')],
|
||||
m.group('suffix'))
|
||||
result, c = re.subn(r'^(?P<prefix>(\d+\.)*\d+)(?P<c>[ab])(?P<suffix>\d+)$',
|
||||
|
@ -27,4 +28,4 @@ def get_prerelease_suffix(version):
|
|||
return result
|
||||
|
||||
if len(sys.argv) == 2:
|
||||
print get_prerelease_suffix(sys.argv[1])
|
||||
print(get_prerelease_suffix(sys.argv[1]))
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from __future__ import print_function
|
||||
import unittest
|
||||
|
||||
import os, sys, os.path, time, inspect
|
||||
|
@ -109,29 +110,29 @@ def is_symlink_to(dest, src):
|
|||
class _TreeDiff(dircmp):
|
||||
"""Helper to report rich results on difference between two directories.
|
||||
"""
|
||||
def _fillDiff(self, dc, rv, basepath="%s"):
|
||||
rv['right_only'] += map(lambda l: basepath % l, dc.right_only)
|
||||
rv['left_only'] += map(lambda l: basepath % l, dc.left_only)
|
||||
rv['diff_files'] += map(lambda l: basepath % l, dc.diff_files)
|
||||
rv['funny'] += map(lambda l: basepath % l, dc.common_funny)
|
||||
rv['funny'] += map(lambda l: basepath % l, dc.funny_files)
|
||||
def _fillDiff(self, dc, rv, basepath="{0}"):
|
||||
rv['right_only'] += map(lambda l: basepath.format(l), dc.right_only)
|
||||
rv['left_only'] += map(lambda l: basepath.format(l), dc.left_only)
|
||||
rv['diff_files'] += map(lambda l: basepath.format(l), dc.diff_files)
|
||||
rv['funny'] += map(lambda l: basepath.format(l), dc.common_funny)
|
||||
rv['funny'] += map(lambda l: basepath.format(l), dc.funny_files)
|
||||
for subdir, _dc in dc.subdirs.iteritems():
|
||||
self._fillDiff(_dc, rv, basepath % (subdir + "/%s"))
|
||||
self._fillDiff(_dc, rv, basepath.format(subdir + "/{0}"))
|
||||
def allResults(self, left, right):
|
||||
rv = {'right_only':[], 'left_only':[],
|
||||
'diff_files':[], 'funny': []}
|
||||
self._fillDiff(self, rv)
|
||||
chunks = []
|
||||
if rv['right_only']:
|
||||
chunks.append('%s only in %s' % (', '.join(rv['right_only']),
|
||||
chunks.append('{0} only in {1}'.format(', '.join(rv['right_only']),
|
||||
right))
|
||||
if rv['left_only']:
|
||||
chunks.append('%s only in %s' % (', '.join(rv['left_only']),
|
||||
chunks.append('{0} only in {1}'.format(', '.join(rv['left_only']),
|
||||
left))
|
||||
if rv['diff_files']:
|
||||
chunks.append('%s differ' % ', '.join(rv['diff_files']))
|
||||
chunks.append('{0} differ'.format(', '.join(rv['diff_files'])))
|
||||
if rv['funny']:
|
||||
chunks.append("%s don't compare" % ', '.join(rv['funny']))
|
||||
chunks.append("{0} don't compare".format(', '.join(rv['funny'])))
|
||||
return '; '.join(chunks)
|
||||
|
||||
class TestJarMaker(unittest.TestCase):
|
||||
|
@ -152,7 +153,7 @@ class TestJarMaker(unittest.TestCase):
|
|||
|
||||
def tearDown(self):
|
||||
if self.debug:
|
||||
print self.tmpdir
|
||||
print(self.tmpdir)
|
||||
elif sys.platform != "win32":
|
||||
# can't clean up on windows
|
||||
rmtree(self.tmpdir)
|
||||
|
@ -238,7 +239,7 @@ class TestJarMaker(unittest.TestCase):
|
|||
srcbar = os.path.join(self.srcdir, 'bar')
|
||||
destfoo = os.path.join(self.builddir, 'chrome', 'test', 'dir', 'foo')
|
||||
self.assertTrue(is_symlink_to(destfoo, srcbar),
|
||||
"%s is not a symlink to %s" % (destfoo, srcbar))
|
||||
"{0} is not a symlink to {1}".format(destfoo, srcbar))
|
||||
|
||||
|
||||
class Test_relativesrcdir(unittest.TestCase):
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
from __future__ import with_statement
|
||||
import unittest
|
||||
|
||||
from StringIO import StringIO
|
||||
|
|
|
@ -33,9 +33,13 @@ class TestBuildList(unittest.TestCase):
|
|||
lines = [line.rstrip() for line in f.readlines()]
|
||||
f.close()
|
||||
for line in lines:
|
||||
self.assert_(len(l) > 0, "ran out of expected lines! (expected '%s', got '%s')" % (l, lines))
|
||||
self.assert_(len(l) > 0,
|
||||
"ran out of expected lines! (expected '{0}', got '{1}')"
|
||||
.format(l, lines))
|
||||
self.assertEqual(line, l.pop(0))
|
||||
self.assert_(len(l) == 0, "not enough lines in file! (expected '%s', got '%s'" % (l, lines))
|
||||
self.assert_(len(l) == 0,
|
||||
"not enough lines in file! (expected '{0}',"
|
||||
" got '{1}'".format(l, lines))
|
||||
|
||||
def test_basic(self):
|
||||
"Test that addEntriesToListFile works when file doesn't exist."
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
from __future__ import with_statement
|
||||
import subprocess
|
||||
import unittest
|
||||
import sys
|
||||
|
@ -72,21 +71,23 @@ class TestLibDescriptor(unittest.TestCase):
|
|||
'''Test LibDescriptor's serialization'''
|
||||
desc = LibDescriptor()
|
||||
desc[LibDescriptor.KEYS[0]] = ['a', 'b']
|
||||
self.assertEqual(str(desc), "%s = a b" % LibDescriptor.KEYS[0])
|
||||
self.assertEqual(str(desc), "{0} = a b".format(LibDescriptor.KEYS[0]))
|
||||
desc['unsupported-key'] = ['a']
|
||||
self.assertEqual(str(desc), "%s = a b" % LibDescriptor.KEYS[0])
|
||||
self.assertEqual(str(desc), "{0} = a b".format(LibDescriptor.KEYS[0]))
|
||||
desc[LibDescriptor.KEYS[1]] = ['c', 'd', 'e']
|
||||
self.assertEqual(str(desc), "%s = a b\n%s = c d e" % (LibDescriptor.KEYS[0], LibDescriptor.KEYS[1]))
|
||||
self.assertEqual(str(desc),
|
||||
"{0} = a b\n{1} = c d e"
|
||||
.format(LibDescriptor.KEYS[0], LibDescriptor.KEYS[1]))
|
||||
desc[LibDescriptor.KEYS[0]] = []
|
||||
self.assertEqual(str(desc), "%s = c d e" % (LibDescriptor.KEYS[1]))
|
||||
self.assertEqual(str(desc), "{0} = c d e".format(LibDescriptor.KEYS[1]))
|
||||
|
||||
def test_read(self):
|
||||
'''Test LibDescriptor's initialization'''
|
||||
desc_list = ["# Comment",
|
||||
"%s = a b" % LibDescriptor.KEYS[1],
|
||||
"{0} = a b".format(LibDescriptor.KEYS[1]),
|
||||
"", # Empty line
|
||||
"foo = bar", # Should be discarded
|
||||
"%s = c d e" % LibDescriptor.KEYS[0]]
|
||||
"{0} = c d e".format(LibDescriptor.KEYS[0])]
|
||||
desc = LibDescriptor(desc_list)
|
||||
self.assertEqual(desc[LibDescriptor.KEYS[1]], ['a', 'b'])
|
||||
self.assertEqual(desc[LibDescriptor.KEYS[0]], ['c', 'd', 'e'])
|
||||
|
@ -229,7 +230,7 @@ class TestExpandArgsMore(TestExpandInit):
|
|||
if config.EXPAND_LIBS_LIST_STYLE == "linkerscript":
|
||||
self.assertNotEqual(args[3][0], '@')
|
||||
filename = args[3]
|
||||
content = ['INPUT("%s")' % relativize(f) for f in objs]
|
||||
content = ['INPUT("{0}")'.format(relativize(f)) for f in objs]
|
||||
with open(filename, 'r') as f:
|
||||
self.assertEqual([l.strip() for l in f.readlines() if len(l.strip())], content)
|
||||
elif config.EXPAND_LIBS_LIST_STYLE == "list":
|
||||
|
@ -256,7 +257,7 @@ class TestExpandArgsMore(TestExpandInit):
|
|||
self.assertRelEqual([os.path.splitext(arg)[1] for arg in args[len(ar_extract):]], [config.LIB_SUFFIX])
|
||||
# Simulate AR_EXTRACT extracting one object file for the library
|
||||
lib = os.path.splitext(os.path.basename(args[len(ar_extract)]))[0]
|
||||
extracted[lib] = os.path.join(kargs['cwd'], "%s" % Obj(lib))
|
||||
extracted[lib] = os.path.join(kargs['cwd'], "{0}".format(Obj(lib)))
|
||||
self.touch([extracted[lib]])
|
||||
subprocess.call = call
|
||||
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import with_statement
|
||||
import sys
|
||||
import os
|
||||
from mozunit import main, MockedOpen
|
||||
|
|
|
@ -122,7 +122,8 @@ class TestNsinstall(unittest.TestCase):
|
|||
mode = 0600
|
||||
os.chmod(testfile, mode)
|
||||
testdir = self.mkdirs("testdir")
|
||||
self.assertEqual(nsinstall(["-m", "%04o" % mode, testfile, testdir]), 0)
|
||||
self.assertEqual(nsinstall(["-m", "{0:04o}"
|
||||
.format(mode), testfile, testdir]), 0)
|
||||
destfile = os.path.join(testdir, "testfile")
|
||||
self.assert_(os.path.isfile(destfile))
|
||||
self.assertEqual(os.stat(testfile).st_mode,
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
#!/usr/bin/env python
|
||||
from __future__ import with_statement
|
||||
import unittest
|
||||
import os, sys, time, tempfile
|
||||
import json, os, sys, time, tempfile
|
||||
from StringIO import StringIO
|
||||
import mozunit
|
||||
|
||||
from writemozinfo import build_dict, write_json, JsonValue, jsonify
|
||||
from writemozinfo import build_dict, write_json
|
||||
|
||||
class TestBuildDict(unittest.TestCase):
|
||||
def testMissing(self):
|
||||
|
@ -159,45 +158,6 @@ class TestBuildDict(unittest.TestCase):
|
|||
'MOZ_CRASHREPORTER':'1'})
|
||||
self.assertEqual(True, d['crashreporter'])
|
||||
|
||||
class TestJsonValue(unittest.TestCase):
|
||||
def testNone(self):
|
||||
self.assertEqual("null", repr(JsonValue(None)))
|
||||
|
||||
def testBool(self):
|
||||
self.assertEqual("true", repr(JsonValue(True)))
|
||||
self.assertEqual("false", repr(JsonValue(False)))
|
||||
|
||||
def testStr(self):
|
||||
self.assertEqual("'abc'", repr(JsonValue("abc")))
|
||||
|
||||
def testInt(self):
|
||||
self.assertEqual("100", repr(JsonValue(100)))
|
||||
|
||||
def testInvalid(self):
|
||||
self.assertRaises(Exception, JsonValue, unicode("abc"))
|
||||
self.assertRaises(Exception, JsonValue, 123.45)
|
||||
|
||||
def parse_json(j):
|
||||
"""
|
||||
Awful hack to parse a restricted subset of JSON strings into Python dicts.
|
||||
"""
|
||||
return eval(j, {'true':True,'false':False,'null':None})
|
||||
|
||||
class TestJsonify(unittest.TestCase):
|
||||
"""
|
||||
Test the jsonify function.
|
||||
"""
|
||||
def testBasic(self):
|
||||
"""
|
||||
Sanity check the set of accepted Python value types.
|
||||
"""
|
||||
j = parse_json(jsonify({'a':True,'b':False,'c':None,'d':100,'e':"abc"}))
|
||||
self.assertEquals(True, j['a'])
|
||||
self.assertEquals(False, j['b'])
|
||||
self.assertEquals(None, j['c'])
|
||||
self.assertEquals(100, j['d'])
|
||||
self.assertEquals("abc", j['e'])
|
||||
|
||||
class TestWriteJson(unittest.TestCase):
|
||||
"""
|
||||
Test the write_json function.
|
||||
|
@ -217,7 +177,7 @@ class TestWriteJson(unittest.TestCase):
|
|||
'TARGET_CPU':'i386',
|
||||
'MOZ_WIDGET_TOOLKIT':'windows'})
|
||||
with open(self.f) as f:
|
||||
d = parse_json(f.read())
|
||||
d = json.load(f)
|
||||
self.assertEqual('win', d['os'])
|
||||
self.assertEqual('x86', d['processor'])
|
||||
self.assertEqual('windows', d['toolkit'])
|
||||
|
@ -231,7 +191,7 @@ class TestWriteJson(unittest.TestCase):
|
|||
write_json(s, env={'OS_TARGET':'WINNT',
|
||||
'TARGET_CPU':'i386',
|
||||
'MOZ_WIDGET_TOOLKIT':'windows'})
|
||||
d = parse_json(s.getvalue())
|
||||
d = json.loads(s.getvalue())
|
||||
self.assertEqual('win', d['os'])
|
||||
self.assertEqual('x86', d['processor'])
|
||||
self.assertEqual('windows', d['toolkit'])
|
||||
|
|
|
@ -65,7 +65,7 @@ def prod(*iterables):
|
|||
|
||||
def getid(descs):
|
||||
'Convert a list of ints to a string.'
|
||||
return reduce(lambda x,y: x+'%d%d'%tuple(y), descs,'')
|
||||
return reduce(lambda x,y: x+'{0}{1}'.format(*tuple(y)), descs,'')
|
||||
|
||||
|
||||
def getContent(length):
|
||||
|
@ -186,7 +186,7 @@ allfiles = reduce(lambda l,r:l+r,
|
|||
[list(prod(*files[:(i+1)])) for i in xrange(len(leafs))])
|
||||
|
||||
for first in allfiles:
|
||||
testbasename = 'test%s_' % getid(first)
|
||||
testbasename = 'test{0}_'.format(getid(first))
|
||||
test = [None, '_write' + getid(first), None]
|
||||
for second in atomics:
|
||||
test[0] = testbasename + getid([second])
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# >>sys.stderr, This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
@ -24,7 +24,7 @@ class LockFile(object):
|
|||
try:
|
||||
os.remove(self.lockfile)
|
||||
break
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
if e.errno == errno.EACCES:
|
||||
# another process probably has the file open, we'll retry.
|
||||
# just a short sleep since we want to drop the lock ASAP
|
||||
|
@ -44,9 +44,9 @@ def lockFile(lockfile, max_wait = 600):
|
|||
fd = os.open(lockfile, os.O_EXCL | os.O_RDWR | os.O_CREAT)
|
||||
# we created the lockfile, so we're the owner
|
||||
break
|
||||
except OSError, e:
|
||||
if e.errno == errno.EEXIST or \
|
||||
(sys.platform == "win32" and e.errno == errno.EACCES):
|
||||
except OSError as e:
|
||||
if (e.errno == errno.EEXIST or
|
||||
(sys.platform == "win32" and e.errno == errno.EACCES)):
|
||||
pass
|
||||
else:
|
||||
# should not occur
|
||||
|
@ -57,22 +57,21 @@ def lockFile(lockfile, max_wait = 600):
|
|||
# and read its contents to report the owner PID
|
||||
f = open(lockfile, "r")
|
||||
s = os.stat(lockfile)
|
||||
except EnvironmentError, e:
|
||||
except EnvironmentError as e:
|
||||
if e.errno == errno.ENOENT or e.errno == errno.EACCES:
|
||||
# we didn't create the lockfile, so it did exist, but it's
|
||||
# gone now. Just try again
|
||||
continue
|
||||
sys.exit("%s exists but stat() failed: %s" %
|
||||
(lockfile, e.strerror))
|
||||
sys.exit("{0} exists but stat() failed: {1}"
|
||||
.format(lockfile, e.strerror))
|
||||
|
||||
# we didn't create the lockfile and it's still there, check
|
||||
# its age
|
||||
now = int(time.time())
|
||||
if now - s[stat.ST_MTIME] > max_wait:
|
||||
pid = f.readline().rstrip()
|
||||
sys.exit("%s has been locked for more than " \
|
||||
"%d seconds (PID %s)" % (lockfile, max_wait,
|
||||
pid))
|
||||
sys.exit("{0} has been locked for more than "
|
||||
"{1} seconds (PID {2})".format(lockfile, max_wait, pid))
|
||||
|
||||
# it's not been locked too long, wait a while and retry
|
||||
f.close()
|
||||
|
@ -82,7 +81,7 @@ def lockFile(lockfile, max_wait = 600):
|
|||
# descriptor into a Python file object and record our PID in it
|
||||
|
||||
f = os.fdopen(fd, "w")
|
||||
f.write("%d\n" % os.getpid())
|
||||
f.write("{0}\n".format(os.getpid()))
|
||||
f.close()
|
||||
return LockFile(lockfile)
|
||||
|
||||
|
@ -91,7 +90,7 @@ class pushback_iter(object):
|
|||
|
||||
This behaves like a regular iterable, just that you can call
|
||||
iter.pushback(item)
|
||||
to get the givem item as next item in the iteration.
|
||||
to get the given item as next item in the iteration.
|
||||
'''
|
||||
def __init__(self, iterable):
|
||||
self.it = iter(iterable)
|
||||
|
|
|
@ -9,8 +9,12 @@
|
|||
# configuration, such as the target OS and CPU.
|
||||
#
|
||||
# The output file is intended to be used as input to the mozinfo package.
|
||||
from __future__ import with_statement
|
||||
import os, re, sys
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import json
|
||||
|
||||
|
||||
def build_dict(env=os.environ):
|
||||
"""
|
||||
|
@ -71,32 +75,6 @@ def build_dict(env=os.environ):
|
|||
d["crashreporter"] = 'MOZ_CRASHREPORTER' in env and env['MOZ_CRASHREPORTER'] == '1'
|
||||
return d
|
||||
|
||||
#TODO: replace this with the json module when Python >= 2.6 is a requirement.
|
||||
class JsonValue:
|
||||
"""
|
||||
A class to serialize Python values into JSON-compatible representations.
|
||||
"""
|
||||
def __init__(self, v):
|
||||
if v is not None and not (isinstance(v,str) or isinstance(v,bool) or isinstance(v,int)):
|
||||
raise Exception("Unhandled data type: %s" % type(v))
|
||||
self.v = v
|
||||
def __repr__(self):
|
||||
if self.v is None:
|
||||
return "null"
|
||||
if isinstance(self.v,bool):
|
||||
return str(self.v).lower()
|
||||
return repr(self.v)
|
||||
|
||||
def jsonify(d):
|
||||
"""
|
||||
Return a JSON string of the dict |d|. Only handles a subset of Python
|
||||
value types: bool, str, int, None.
|
||||
"""
|
||||
jd = {}
|
||||
for k, v in d.iteritems():
|
||||
jd[k] = JsonValue(v)
|
||||
return repr(jd)
|
||||
|
||||
def write_json(file, env=os.environ):
|
||||
"""
|
||||
Write JSON data about the configuration specified in |env|
|
||||
|
@ -104,16 +82,17 @@ def write_json(file, env=os.environ):
|
|||
See build_dict for information about what environment variables are used,
|
||||
and what keys are produced.
|
||||
"""
|
||||
s = jsonify(build_dict(env))
|
||||
build_conf = build_dict(env)
|
||||
if isinstance(file, basestring):
|
||||
with open(file, "w") as f:
|
||||
f.write(s)
|
||||
json.dump(build_conf, f)
|
||||
else:
|
||||
file.write(s)
|
||||
json.dump(build_conf, file)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
write_json(sys.argv[1] if len(sys.argv) > 1 else sys.stdout)
|
||||
except Exception, e:
|
||||
print >>sys.stderr, str(e)
|
||||
except Exception as e:
|
||||
print(str(e), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
|
|
@ -218,7 +218,8 @@ class Expression:
|
|||
self.offset = expression.offset
|
||||
self.content = expression.content[:3]
|
||||
def __str__(self):
|
||||
return 'Unexpected content at offset %i, "%s"'%(self.offset, self.content)
|
||||
return 'Unexpected content at offset {0}, "{1}"'.format(self.offset,
|
||||
self.content)
|
||||
|
||||
class Context(dict):
|
||||
"""
|
||||
|
|
|
@ -77,9 +77,9 @@ class Preprocessor:
|
|||
|
||||
def warnUnused(self, file):
|
||||
if self.actionLevel == 0:
|
||||
sys.stderr.write('%s: WARNING: no preprocessor directives found\n' % file)
|
||||
sys.stderr.write('{0}: WARNING: no preprocessor directives found\n'.format(file))
|
||||
elif self.actionLevel == 1:
|
||||
sys.stderr.write('%s: WARNING: no useful preprocessor directives found\n' % file)
|
||||
sys.stderr.write('{0}: WARNING: no useful preprocessor directives found\n'.format(file))
|
||||
pass
|
||||
|
||||
def setLineEndings(self, aLE):
|
||||
|
@ -96,7 +96,9 @@ class Preprocessor:
|
|||
"""
|
||||
self.marker = aMarker
|
||||
if aMarker:
|
||||
self.instruction = re.compile('%s(?P<cmd>[a-z]+)(?:\s(?P<args>.*))?$'%aMarker, re.U)
|
||||
self.instruction = re.compile('{0}(?P<cmd>[a-z]+)(?:\s(?P<args>.*))?$'
|
||||
.format(aMarker),
|
||||
re.U)
|
||||
self.comment = re.compile(aMarker, re.U)
|
||||
else:
|
||||
class NoMatch(object):
|
||||
|
@ -129,9 +131,9 @@ class Preprocessor:
|
|||
self.writtenLines += 1
|
||||
ln = self.context['LINE']
|
||||
if self.writtenLines != ln:
|
||||
self.out.write('//@line %(line)d "%(file)s"%(le)s'%{'line': ln,
|
||||
'file': self.context['FILE'],
|
||||
'le': self.LE})
|
||||
self.out.write('//@line {line} "{file}"{le}'.format(line=ln,
|
||||
file=self.context['FILE'],
|
||||
le=self.LE))
|
||||
self.writtenLines = ln
|
||||
filteredLine = self.applyFilters(aLine)
|
||||
if filteredLine != aLine:
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
# not, an error message is printed, quoting ERROR_LOCATION, which should
|
||||
# probably be the filename and line number of the erroneous call to
|
||||
# check_source_count.py.
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
|
@ -32,17 +32,26 @@ for f in files:
|
|||
details[f] = num
|
||||
|
||||
if count == expected_count:
|
||||
print "TEST-PASS | check_source_count.py %s | %d" % (search_string, expected_count)
|
||||
print("TEST-PASS | check_source_count.py {0} | {1}"
|
||||
.format(search_string, expected_count))
|
||||
|
||||
else:
|
||||
print "TEST-UNEXPECTED-FAIL | check_source_count.py %s | " % (search_string),
|
||||
print("TEST-UNEXPECTED-FAIL | check_source_count.py {0} | "
|
||||
.format(search_string),
|
||||
end='')
|
||||
if count < expected_count:
|
||||
print "There are fewer occurrences of /%s/ than expected. This may mean that you have removed some, but forgotten to account for it %s." % (search_string, error_location)
|
||||
print("There are fewer occurrences of /{0}/ than expected. "
|
||||
"This may mean that you have removed some, but forgotten to "
|
||||
"account for it {1}.".format(search_string, error_location))
|
||||
else:
|
||||
print "There are more occurrences of /%s/ than expected. We're trying to prevent an increase in the number of %s's, using %s if possible. If it in unavoidable, you should update the expected count %s." % (search_string, search_string, replacement, error_location)
|
||||
print("There are more occurrences of /{0}/ than expected. We're trying "
|
||||
"to prevent an increase in the number of {1}'s, using {2} if "
|
||||
"possible. If it is unavoidable, you should update the expected "
|
||||
"count {3}.".format(search_string, search_string, replacement,
|
||||
error_location))
|
||||
|
||||
print "Expected: %d; found: %d" % (expected_count, count)
|
||||
print("Expected: {0}; found: {1}".format(expected_count, count))
|
||||
for k in sorted(details):
|
||||
print "Found %d occurences in %s" % (details[k],k)
|
||||
print("Found {0} occurences in {1}".format(details[k],k))
|
||||
sys.exit(-1)
|
||||
|
||||
|
|
|
@ -26,7 +26,6 @@ ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX} following these rules:
|
|||
descriptor contains. And for each of these LIBS, also apply the same
|
||||
rules.
|
||||
'''
|
||||
from __future__ import with_statement
|
||||
import sys, os, errno
|
||||
import expandlibs_config as conf
|
||||
|
||||
|
@ -36,7 +35,7 @@ def ensureParentDir(file):
|
|||
if dir and not os.path.exists(dir):
|
||||
try:
|
||||
os.makedirs(dir)
|
||||
except OSError, error:
|
||||
except OSError as error:
|
||||
if error.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
|
@ -90,7 +89,8 @@ class LibDescriptor(dict):
|
|||
|
||||
def __str__(self):
|
||||
'''Serializes the lib descriptor'''
|
||||
return '\n'.join('%s = %s' % (k, ' '.join(self[k])) for k in self.KEYS if len(self[k]))
|
||||
return '\n'.join('{0} = {1}'.format(k, ' '.join(self[k]))
|
||||
for k in self.KEYS if len(self[k]))
|
||||
|
||||
class ExpandArgs(list):
|
||||
def __init__(self, args):
|
||||
|
@ -135,4 +135,4 @@ class ExpandLibsDeps(ExpandArgs):
|
|||
return objs
|
||||
|
||||
if __name__ == '__main__':
|
||||
print " ".join(ExpandArgs(sys.argv[1:]))
|
||||
print(" ".join(ExpandArgs(sys.argv[1:])))
|
||||
|
|
|
@ -20,10 +20,11 @@ With the --symbol-order argument, followed by a file name, it will add the
|
|||
relevant linker options to change the order in which the linker puts the
|
||||
symbols appear in the resulting binary. Only works for ELF targets.
|
||||
'''
|
||||
from __future__ import with_statement
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
import os
|
||||
from expandlibs import ExpandArgs, relativize, isObject, ensureParentDir, ExpandLibsDeps
|
||||
from expandlibs import (ExpandArgs, relativize, isObject, ensureParentDir,
|
||||
ExpandLibsDeps)
|
||||
import expandlibs_config as conf
|
||||
from optparse import OptionParser
|
||||
import subprocess
|
||||
|
@ -92,10 +93,10 @@ class ExpandArgsMore(ExpandArgs):
|
|||
if not len(objs): return
|
||||
fd, tmp = tempfile.mkstemp(suffix=".list",dir=os.curdir)
|
||||
if conf.EXPAND_LIBS_LIST_STYLE == "linkerscript":
|
||||
content = ['INPUT("%s")\n' % obj for obj in objs]
|
||||
content = ['INPUT("{0}")\n'.format(obj) for obj in objs]
|
||||
ref = tmp
|
||||
elif conf.EXPAND_LIBS_LIST_STYLE == "list":
|
||||
content = ["%s\n" % obj for obj in objs]
|
||||
content = ["{0}\n".format(obj) for obj in objs]
|
||||
ref = "@" + tmp
|
||||
else:
|
||||
os.close(fd)
|
||||
|
@ -139,9 +140,13 @@ class ExpandArgsMore(ExpandArgs):
|
|||
def _getOrderedSections(self, ordered_symbols):
|
||||
'''Given an ordered list of symbols, returns the corresponding list
|
||||
of sections following the order.'''
|
||||
if not conf.EXPAND_LIBS_ORDER_STYLE in ['linkerscript', 'section-ordering-file']:
|
||||
raise Exception('EXPAND_LIBS_ORDER_STYLE "%s" is not supported' % conf.EXPAND_LIBS_ORDER_STYLE)
|
||||
finder = SectionFinder([arg for arg in self if isObject(arg) or os.path.splitext(arg)[1] == conf.LIB_SUFFIX])
|
||||
if conf.EXPAND_LIBS_ORDER_STYLE not in ['linkerscript',
|
||||
'section-ordering-file']:
|
||||
raise Exception('EXPAND_LIBS_ORDER_STYLE "{0}" is not supported'
|
||||
.format(conf.EXPAND_LIBS_ORDER_STYLE))
|
||||
finder = SectionFinder([arg for arg in self
|
||||
if isObject(arg) or
|
||||
os.path.splitext(arg)[1] == conf.LIB_SUFFIX])
|
||||
folded = self._getFoldedSections()
|
||||
sections = set()
|
||||
ordered_sections = []
|
||||
|
@ -182,32 +187,35 @@ class ExpandArgsMore(ExpandArgs):
|
|||
linked_sections = [s for s in linked_sections if s in split_sections]
|
||||
|
||||
if conf.EXPAND_LIBS_ORDER_STYLE == 'section-ordering-file':
|
||||
option = '-Wl,--section-ordering-file,%s'
|
||||
option = '-Wl,--section-ordering-file,{0}'
|
||||
content = sections
|
||||
for linked_section in linked_sections:
|
||||
content.extend(split_sections[linked_section])
|
||||
content.append('%s.*' % linked_section)
|
||||
content.append('{0}.*'.format(linked_section))
|
||||
content.append(linked_section)
|
||||
|
||||
elif conf.EXPAND_LIBS_ORDER_STYLE == 'linkerscript':
|
||||
option = '-Wl,-T,%s'
|
||||
option = '-Wl,-T,{0}'
|
||||
section_insert_before = dict(SECTION_INSERT_BEFORE)
|
||||
for linked_section in linked_sections:
|
||||
content.append('SECTIONS {')
|
||||
content.append(' %s : {' % linked_section)
|
||||
content.extend(' *(%s)' % s for s in split_sections[linked_section])
|
||||
content.append(' }')
|
||||
content.append('}')
|
||||
content.append('INSERT BEFORE %s' % section_insert_before[linked_section])
|
||||
content.append('SECTIONS {{')
|
||||
content.append(' {0} : {{'.format(linked_section))
|
||||
content.extend(' *({0})'
|
||||
.format(s for s in split_sections[linked_section]))
|
||||
content.append(' }}')
|
||||
content.append('}}')
|
||||
content.append('INSERT BEFORE {0}'
|
||||
.format(section_insert_before[linked_section]))
|
||||
else:
|
||||
raise Exception('EXPAND_LIBS_ORDER_STYLE "%s" is not supported' % conf.EXPAND_LIBS_ORDER_STYLE)
|
||||
raise Exception('EXPAND_LIBS_ORDER_STYLE "{0}" is not supported'
|
||||
.format(conf.EXPAND_LIBS_ORDER_STYLE))
|
||||
|
||||
fd, tmp = tempfile.mkstemp(dir=os.curdir)
|
||||
f = os.fdopen(fd, "w")
|
||||
f.write('\n'.join(content)+'\n')
|
||||
f.close()
|
||||
self.tmp.append(tmp)
|
||||
self.append(option % tmp)
|
||||
self.append(option.format(tmp))
|
||||
|
||||
class SectionFinder(object):
|
||||
'''Instances of this class allow to map symbol names to sections in
|
||||
|
@ -216,15 +224,17 @@ class SectionFinder(object):
|
|||
def __init__(self, objs):
|
||||
'''Creates an instance, given a list of object files.'''
|
||||
if not conf.EXPAND_LIBS_ORDER_STYLE in ['linkerscript', 'section-ordering-file']:
|
||||
raise Exception('EXPAND_LIBS_ORDER_STYLE "%s" is not supported' % conf.EXPAND_LIBS_ORDER_STYLE)
|
||||
raise Exception('EXPAND_LIBS_ORDER_STYLE "{0}" is not supported'
|
||||
.format(conf.EXPAND_LIBS_ORDER_STYLE))
|
||||
self.mapping = {}
|
||||
for obj in objs:
|
||||
if not isObject(obj) and os.path.splitext(obj)[1] != conf.LIB_SUFFIX:
|
||||
raise Exception('%s is not an object nor a static library' % obj)
|
||||
raise Exception('{0} is not an object nor a static library'
|
||||
.format(obj))
|
||||
for symbol, section in SectionFinder._getSymbols(obj):
|
||||
sym = SectionFinder._normalize(symbol)
|
||||
if sym in self.mapping:
|
||||
if not section in self.mapping[sym]:
|
||||
if section not in self.mapping[sym]:
|
||||
self.mapping[sym].append(section)
|
||||
else:
|
||||
self.mapping[sym] = [section]
|
||||
|
@ -268,11 +278,11 @@ class SectionFinder(object):
|
|||
return syms
|
||||
|
||||
def print_command(out, args):
|
||||
print >>out, "Executing: " + " ".join(args)
|
||||
print("Executing: " + " ".join(args), file=out)
|
||||
for tmp in [f for f in args.tmp if os.path.isfile(f)]:
|
||||
print >>out, tmp + ":"
|
||||
print(tmp + ":", file=out)
|
||||
with open(tmp) as file:
|
||||
print >>out, "".join([" " + l for l in file.readlines()])
|
||||
print("".join([" " + l for l in file.readlines()]), file=out)
|
||||
out.flush()
|
||||
|
||||
def main():
|
||||
|
@ -323,7 +333,10 @@ def main():
|
|||
return
|
||||
ensureParentDir(options.depend)
|
||||
with open(options.depend, 'w') as depfile:
|
||||
depfile.write("%s : %s\n" % (options.target, ' '.join(dep for dep in deps if os.path.isfile(dep) and dep != options.target)))
|
||||
depfile.write("{0} : {1}\n"
|
||||
.format(options.target, ' '.join(dep for dep in deps
|
||||
if os.path.isfile(dep) and
|
||||
dep != options.target)))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import print_function
|
||||
'''Given a list of object files and library names, prints a library
|
||||
descriptor to standard output'''
|
||||
|
||||
from __future__ import with_statement
|
||||
import sys
|
||||
import os
|
||||
import expandlibs_config as conf
|
||||
|
@ -19,12 +18,12 @@ def generate(args):
|
|||
if os.path.exists(arg):
|
||||
desc['OBJS'].append(os.path.abspath(arg))
|
||||
else:
|
||||
raise Exception("File not found: %s" % arg)
|
||||
raise Exception("File not found: {0}".format(arg))
|
||||
elif os.path.splitext(arg)[1] == conf.LIB_SUFFIX:
|
||||
if os.path.exists(arg) or os.path.exists(arg + conf.LIBS_DESC_SUFFIX):
|
||||
desc['LIBS'].append(os.path.abspath(arg))
|
||||
else:
|
||||
raise Exception("File not found: %s" % arg)
|
||||
raise Exception("File not found: {0}".format(arg))
|
||||
return desc
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -40,8 +39,9 @@ if __name__ == '__main__':
|
|||
|
||||
ensureParentDir(options.output)
|
||||
with open(options.output, 'w') as outfile:
|
||||
print >>outfile, generate(args)
|
||||
print(generate(args), file=outfile)
|
||||
if options.depend:
|
||||
ensureParentDir(options.depend)
|
||||
with open(options.depend, 'w') as depfile:
|
||||
depfile.write("%s : %s\n" % (options.output, ' '.join(ExpandLibsDeps(args))))
|
||||
depfile.write("{0} : {1}\n".format(options.output,
|
||||
' '.join(ExpandLibsDeps(args))))
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
usage = """%prog: A test for OOM conditions in the shell.
|
||||
|
||||
|
@ -65,7 +65,7 @@ def run(args, stdin=None):
|
|||
stdout_worker.join()
|
||||
stderr_worker.join()
|
||||
|
||||
except KeyboardInterrupt, e:
|
||||
except KeyboardInterrupt as e:
|
||||
sys.exit(-1)
|
||||
|
||||
stdout, stderr = stdout_worker.all, stderr_worker.all
|
||||
|
@ -101,7 +101,7 @@ def count_lines():
|
|||
|
||||
lines = []
|
||||
for k,v in counts.items():
|
||||
lines.append("%6d: %s" % (v,k))
|
||||
lines.append("{0:6}: {1}".format(v, k))
|
||||
|
||||
lines.sort()
|
||||
|
||||
|
@ -171,7 +171,7 @@ command_template = 'shell/js' \
|
|||
+ ' -m -j -p' \
|
||||
+ ' -e "const platform=\'darwin\'; const libdir=\'../jit-test/lib/\';"' \
|
||||
+ ' -f ../jit-test/lib/prolog.js' \
|
||||
+ ' -f %s'
|
||||
+ ' -f {0}'
|
||||
|
||||
|
||||
# Blacklists are things we don't want to see in our logs again (though we do
|
||||
|
@ -222,7 +222,7 @@ num_failures = 0
|
|||
for f in files:
|
||||
|
||||
# Run it once to establish boundaries
|
||||
command = (command_template + ' -O') % (f)
|
||||
command = (command_template + ' -O').format(f)
|
||||
out, err, exit = run(command)
|
||||
max = re.match(".*OOM max count: (\d+).*", out, flags=re.DOTALL).groups()[0]
|
||||
max = int(max)
|
||||
|
@ -232,11 +232,11 @@ for f in files:
|
|||
for i in range(20, max):
|
||||
|
||||
if OPTIONS.regression == None:
|
||||
print "Testing allocation %d/%d in %s" % (i,max,f)
|
||||
print("Testing allocation {0}/{1} in {2}".format(i,max,f))
|
||||
else:
|
||||
sys.stdout.write('.') # something short for tinderbox, no space or \n
|
||||
|
||||
command = (command_template + ' -A %d') % (f, i)
|
||||
command = (command_template + ' -A {0}').format(f, i)
|
||||
out, err, exit = run(command)
|
||||
|
||||
# Success (5 is SM's exit code for controlled errors)
|
||||
|
@ -282,7 +282,9 @@ for f in files:
|
|||
log.write ("\n")
|
||||
log.write ("=========================================================================")
|
||||
log.write ("\n")
|
||||
log.write ("An allocation failure at\n\tallocation %d/%d in %s\n\tcauses problems (detected using bug 624094)" % (i, max, f))
|
||||
log.write ("An allocation failure at\n\tallocation {0}/{1} in {2}\n\t"
|
||||
"causes problems (detected using bug 624094)"
|
||||
.format(i, max, f))
|
||||
log.write ("\n")
|
||||
log.write ("\n")
|
||||
|
||||
|
@ -323,7 +325,7 @@ for f in files:
|
|||
if OPTIONS.regression == None:
|
||||
count_lines()
|
||||
|
||||
print '\n',
|
||||
print()
|
||||
|
||||
# Do the actual regression check
|
||||
if OPTIONS.regression != None:
|
||||
|
@ -331,12 +333,20 @@ if OPTIONS.regression != None:
|
|||
|
||||
if num_failures != expected_num_failures:
|
||||
|
||||
print "TEST-UNEXPECTED-FAIL |",
|
||||
print("TEST-UNEXPECTED-FAIL |", end='')
|
||||
if num_failures > expected_num_failures:
|
||||
print "More out-of-memory errors were found (%s) than expected (%d). This probably means an allocation site has been added without a NULL-check. If this is unavoidable, you can account for it by updating Makefile.in." % (num_failures, expected_num_failures),
|
||||
print("More out-of-memory errors were found ({0}) than expected ({1}). "
|
||||
"This probably means an allocation site has been added without a "
|
||||
"NULL-check. If this is unavoidable, you can account for it by "
|
||||
"updating Makefile.in.".format(num_failures, expected_num_failures),
|
||||
end='')
|
||||
else:
|
||||
print "Congratulations, you have removed %d out-of-memory error(s) (%d remain)! Please account for it by updating Makefile.in." % (expected_num_failures - num_failures, num_failures),
|
||||
print("Congratulations, you have removed {0} out-of-memory error(s) "
|
||||
"({1} remain)! Please account for it by updating Makefile.in."
|
||||
.format(expected_num_failures - num_failures, num_failures),
|
||||
end='')
|
||||
sys.exit(-1)
|
||||
else:
|
||||
print 'TEST-PASS | find_OOM_errors | Found the expected number of OOM errors (%d)' % (expected_num_failures)
|
||||
print('TEST-PASS | find_OOM_errors | Found the expected number of OOM '
|
||||
'errors ({0})'.format(expected_num_failures))
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
# a full build environment set up.
|
||||
# The basic limitation is, it doesn't even try to link and ignores
|
||||
# all related options.
|
||||
|
||||
from __future__ import print_function
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import os.path
|
||||
|
@ -39,7 +39,7 @@ def _nsinstall_internal(argv):
|
|||
# The remaining arguments are not used in our tree, thus they're not
|
||||
# implented.
|
||||
def BadArg(option, opt, value, parser):
|
||||
parser.error('option not supported: %s' % opt)
|
||||
parser.error('option not supported: {0}'.format(opt))
|
||||
|
||||
p.add_option('-C', action="callback", metavar="CWD",
|
||||
callback=BadArg,
|
||||
|
@ -56,7 +56,8 @@ def _nsinstall_internal(argv):
|
|||
try:
|
||||
options.m = int(options.m, 8)
|
||||
except:
|
||||
sys.stderr.write('nsinstall: ' + options.m + ' is not a valid mode\n')
|
||||
sys.stderr.write('nsinstall: {0} is not a valid mode\n'
|
||||
.format(options.m))
|
||||
return 1
|
||||
|
||||
# just create one directory?
|
||||
|
@ -64,7 +65,7 @@ def _nsinstall_internal(argv):
|
|||
dir = os.path.abspath(dir)
|
||||
if os.path.exists(dir):
|
||||
if not os.path.isdir(dir):
|
||||
print >> sys.stderr, ('nsinstall: %s is not a directory' % dir)
|
||||
print('nsinstall: {0} is not a directory'.format(dir), file=sys.stderr)
|
||||
return 1
|
||||
if mode:
|
||||
os.chmod(dir, mode)
|
||||
|
@ -75,11 +76,11 @@ def _nsinstall_internal(argv):
|
|||
os.makedirs(dir, mode)
|
||||
else:
|
||||
os.makedirs(dir)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
# We might have hit EEXIST due to a race condition (see bug 463411) -- try again once
|
||||
if try_again:
|
||||
return maybe_create_dir(dir, mode, False)
|
||||
print >> sys.stderr, ("nsinstall: failed to create directory %s: %s" % (dir, e))
|
||||
print("nsinstall: failed to create directory {0}: {1}".format(dir, e))
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
|
|
Загрузка…
Ссылка в новой задаче