зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1542963 - run './mach lint ... --fix' on mozbuild/mozbuild, undoes some black changes. r=#build
Lint python/mozbuild/{mozbuild,mozpack}. ran './mach lint -l py2 -l flake8 -l shellcheck -l codespell -l yaml python/mozbuild/{mozbuild,mozpack}/ --fix' in order to undo some black changes and get closer to making this folder able to be validated on every lint run Differential Revision: https://phabricator.services.mozilla.com/D26640 --HG-- extra : rebase_source : 6b69b6ebcac73835b752607f7b5b4429de7f95cf extra : intermediate-source : 114355f83d36188fac592c8c6497242c71b27ad6 extra : source : 51fbdf63bbce8f75fac407de305f826dc775fcb5
This commit is contained in:
Родитель
8eba83f0f6
Коммит
c078e63bbc
|
@ -17,32 +17,33 @@ from mozbuild.util import (
|
|||
lock_file,
|
||||
)
|
||||
|
||||
|
||||
def addEntriesToListFile(listFile, entries):
|
||||
"""Given a file |listFile| containing one entry per line,
|
||||
add each entry in |entries| to the file, unless it is already
|
||||
present."""
|
||||
ensureParentDir(listFile)
|
||||
lock = lock_file(listFile + ".lck")
|
||||
try:
|
||||
if os.path.exists(listFile):
|
||||
f = open(listFile)
|
||||
existing = set(x.strip() for x in f.readlines())
|
||||
f.close()
|
||||
else:
|
||||
existing = set()
|
||||
for e in entries:
|
||||
if e not in existing:
|
||||
existing.add(e)
|
||||
with open(listFile, 'wb') as f:
|
||||
f.write("\n".join(sorted(existing))+"\n")
|
||||
finally:
|
||||
lock = None
|
||||
"""Given a file |listFile| containing one entry per line,
|
||||
add each entry in |entries| to the file, unless it is already
|
||||
present."""
|
||||
ensureParentDir(listFile)
|
||||
lock = lock_file(listFile + ".lck")
|
||||
try:
|
||||
if os.path.exists(listFile):
|
||||
f = open(listFile)
|
||||
existing = set(x.strip() for x in f.readlines())
|
||||
f.close()
|
||||
else:
|
||||
existing = set()
|
||||
for e in entries:
|
||||
if e not in existing:
|
||||
existing.add(e)
|
||||
with open(listFile, 'wb') as f:
|
||||
f.write("\n".join(sorted(existing))+"\n")
|
||||
finally:
|
||||
lock = None
|
||||
|
||||
|
||||
def main(args):
|
||||
if len(args) < 2:
|
||||
print("Usage: buildlist.py <list file> <entry> [<entry> ...]",
|
||||
file=sys.stderr)
|
||||
file=sys.stderr)
|
||||
return 1
|
||||
|
||||
return addEntriesToListFile(args[0], args[1:])
|
||||
|
|
|
@ -280,13 +280,14 @@ def check_networking(binary):
|
|||
s = 'TEST-UNEXPECTED-FAIL | check_networking | {} | Identified {} ' + \
|
||||
'networking function(s) being imported in the rust static library ({})'
|
||||
print(s.format(basename, len(bad_occurences_names),
|
||||
",".join(sorted(bad_occurences_names))),
|
||||
file=sys.stderr)
|
||||
",".join(sorted(bad_occurences_names))),
|
||||
file=sys.stderr)
|
||||
retcode = 1
|
||||
elif buildconfig.substs.get('MOZ_AUTOMATION'):
|
||||
print('TEST-PASS | check_networking | {}'.format(basename))
|
||||
return retcode
|
||||
|
||||
|
||||
def checks(target, binary):
|
||||
# The clang-plugin is built as target but is really a host binary.
|
||||
# Cheat and pretend we were passed the right argument.
|
||||
|
@ -345,7 +346,7 @@ def main(args):
|
|||
|
||||
if options.networking and options.host:
|
||||
print('--networking is only valid with --target',
|
||||
file=sys.stderr)
|
||||
file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if options.networking:
|
||||
|
|
|
@ -11,6 +11,7 @@ import shutil
|
|||
import sys
|
||||
import os
|
||||
|
||||
|
||||
def dump_symbols(target, tracking_file, count_ctors=False):
|
||||
# Our tracking file, if present, will contain path(s) to the previously generated
|
||||
# symbols. Remove them in this case so we don't simply accumulate old symbols
|
||||
|
@ -59,7 +60,7 @@ def dump_symbols(target, tracking_file, count_ctors=False):
|
|||
os.environ['OBJCOPY'] = objcopy
|
||||
|
||||
args = ([buildconfig.substs['PYTHON'], os.path.join(buildconfig.topsrcdir, 'toolkit',
|
||||
'crashreporter', 'tools', 'symbolstore.py')] +
|
||||
'crashreporter', 'tools', 'symbolstore.py')] +
|
||||
sym_store_args +
|
||||
['-s', buildconfig.topsrcdir, dump_syms_bin, os.path.join(buildconfig.topobjdir,
|
||||
'dist',
|
||||
|
@ -73,6 +74,7 @@ def dump_symbols(target, tracking_file, count_ctors=False):
|
|||
fh.write(out_files)
|
||||
fh.flush()
|
||||
|
||||
|
||||
def main(argv):
|
||||
parser = argparse.ArgumentParser(
|
||||
usage="Usage: dumpsymbols.py <library or program> <tracking file>")
|
||||
|
|
|
@ -13,6 +13,7 @@ import mozpack.path as mozpath
|
|||
import buildconfig
|
||||
from mozbuild.base import BuildEnvironmentNotFoundException
|
||||
|
||||
|
||||
def archive_exe(pkg_dir, tagfile, sfx_package, package, use_upx):
|
||||
tmpdir = tempfile.mkdtemp(prefix='tmp')
|
||||
try:
|
||||
|
@ -30,7 +31,8 @@ def archive_exe(pkg_dir, tagfile, sfx_package, package, use_upx):
|
|||
except BuildEnvironmentNotFoundException:
|
||||
# configure hasn't been run, just use the default
|
||||
sevenz = '7z'
|
||||
subprocess.check_call([sevenz, 'a', '-r', '-t7z', mozpath.join(tmpdir, 'app.7z'), '-mx', '-m0=BCJ2', '-m1=LZMA:d25', '-m2=LZMA:d19', '-m3=LZMA:d19', '-mb0:1', '-mb0s1:2', '-mb0s2:3'])
|
||||
subprocess.check_call([sevenz, 'a', '-r', '-t7z', mozpath.join(tmpdir, 'app.7z'), '-mx',
|
||||
'-m0=BCJ2', '-m1=LZMA:d25', '-m2=LZMA:d19', '-m3=LZMA:d19', '-mb0:1', '-mb0s1:2', '-mb0s2:3'])
|
||||
|
||||
with open(package, 'wb') as o:
|
||||
for i in [final_sfx, tagfile, mozpath.join(tmpdir, 'app.7z')]:
|
||||
|
@ -41,6 +43,7 @@ def archive_exe(pkg_dir, tagfile, sfx_package, package, use_upx):
|
|||
shutil.move('core', pkg_dir)
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def main(args):
|
||||
if len(args) != 4:
|
||||
print('Usage: exe_7z_archive.py <pkg_dir> <tagfile> <sfx_package> <package> <use_upx>',
|
||||
|
@ -50,5 +53,6 @@ def main(args):
|
|||
archive_exe(args[0], args[1], args[2], args[3], args[4])
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
|
|
@ -8,10 +8,12 @@ import shutil
|
|||
import sys
|
||||
import subprocess
|
||||
|
||||
|
||||
def extract_exe(package, target):
|
||||
subprocess.check_call(['7z', 'x', package, 'core'])
|
||||
shutil.move('core', target)
|
||||
|
||||
|
||||
def main(args):
|
||||
if len(args) != 2:
|
||||
print('Usage: exe_7z_extract.py <package> <target>',
|
||||
|
@ -21,5 +23,6 @@ def main(args):
|
|||
extract_exe(args[0], args[1])
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
|
|
@ -116,5 +116,6 @@ def main(argv):
|
|||
return 1
|
||||
return ret
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
|
|
@ -14,27 +14,29 @@ output_file = sys.argv[3]
|
|||
output = open(output_file, 'w')
|
||||
|
||||
with open(sys.argv[1]) as f:
|
||||
searchinfo = json.load(f)
|
||||
searchinfo = json.load(f)
|
||||
|
||||
# If we have a locale, use it, otherwise use the default
|
||||
if locale in searchinfo["locales"]:
|
||||
localeSearchInfo = searchinfo["locales"][locale]
|
||||
localeSearchInfo = searchinfo["locales"][locale]
|
||||
else:
|
||||
localeSearchInfo = {}
|
||||
localeSearchInfo["default"] = searchinfo["default"]
|
||||
localeSearchInfo = {}
|
||||
localeSearchInfo["default"] = searchinfo["default"]
|
||||
|
||||
|
||||
def validateDefault(key):
|
||||
if (not key in searchinfo["default"]):
|
||||
print >>sys.stderr, "Error: Missing default %s in list.json" % (key)
|
||||
sys.exit(1)
|
||||
if (not key in searchinfo["default"]):
|
||||
print >>sys.stderr, "Error: Missing default %s in list.json" % (key)
|
||||
sys.exit(1)
|
||||
|
||||
validateDefault("searchDefault");
|
||||
validateDefault("visibleDefaultEngines");
|
||||
|
||||
validateDefault("searchDefault")
|
||||
validateDefault("visibleDefaultEngines")
|
||||
|
||||
# If the selected locale doesn't have a searchDefault,
|
||||
# use the global one.
|
||||
if not "searchDefault" in localeSearchInfo["default"]:
|
||||
localeSearchInfo["default"]["searchDefault"] = searchinfo["default"]["searchDefault"]
|
||||
localeSearchInfo["default"]["searchDefault"] = searchinfo["default"]["searchDefault"]
|
||||
|
||||
# If the selected locale doesn't have a searchOrder,
|
||||
# use the global one if present.
|
||||
|
@ -45,23 +47,24 @@ if not "searchOrder" in localeSearchInfo["default"] and "searchOrder" in searchi
|
|||
# If we have region overrides, enumerate through them
|
||||
# and add the additional regions to the locale information.
|
||||
if "regionOverrides" in searchinfo:
|
||||
regionOverrides = searchinfo["regionOverrides"]
|
||||
regionOverrides = searchinfo["regionOverrides"]
|
||||
|
||||
for region in regionOverrides:
|
||||
# Only add a new engine list if there is an engine that is overridden
|
||||
enginesToOverride = set(regionOverrides[region].keys())
|
||||
if region in localeSearchInfo and "visibleDefaultEngines" in localeSearchInfo[region]:
|
||||
visibleDefaultEngines = localeSearchInfo[region]["visibleDefaultEngines"]
|
||||
else:
|
||||
visibleDefaultEngines = localeSearchInfo["default"]["visibleDefaultEngines"]
|
||||
if set(visibleDefaultEngines) & enginesToOverride:
|
||||
if region not in localeSearchInfo:
|
||||
localeSearchInfo[region] = {}
|
||||
localeSearchInfo[region]["visibleDefaultEngines"] = copy.deepcopy(visibleDefaultEngines)
|
||||
for i, engine in enumerate(localeSearchInfo[region]["visibleDefaultEngines"]):
|
||||
if engine in regionOverrides[region]:
|
||||
localeSearchInfo[region]["visibleDefaultEngines"][i] = regionOverrides[region][engine]
|
||||
for region in regionOverrides:
|
||||
# Only add a new engine list if there is an engine that is overridden
|
||||
enginesToOverride = set(regionOverrides[region].keys())
|
||||
if region in localeSearchInfo and "visibleDefaultEngines" in localeSearchInfo[region]:
|
||||
visibleDefaultEngines = localeSearchInfo[region]["visibleDefaultEngines"]
|
||||
else:
|
||||
visibleDefaultEngines = localeSearchInfo["default"]["visibleDefaultEngines"]
|
||||
if set(visibleDefaultEngines) & enginesToOverride:
|
||||
if region not in localeSearchInfo:
|
||||
localeSearchInfo[region] = {}
|
||||
localeSearchInfo[region]["visibleDefaultEngines"] = copy.deepcopy(
|
||||
visibleDefaultEngines)
|
||||
for i, engine in enumerate(localeSearchInfo[region]["visibleDefaultEngines"]):
|
||||
if engine in regionOverrides[region]:
|
||||
localeSearchInfo[region]["visibleDefaultEngines"][i] = regionOverrides[region][engine]
|
||||
|
||||
output.write(json.dumps(localeSearchInfo, ensure_ascii=False).encode('utf8'))
|
||||
|
||||
output.close();
|
||||
output.close()
|
||||
|
|
|
@ -102,7 +102,8 @@ def main(output, *args, **kwargs):
|
|||
def add_names(names, defaults={}):
|
||||
for name in names:
|
||||
site = copy.deepcopy(defaults)
|
||||
site.update(properties.get_dict('browser.suggestedsites.{name}'.format(name=name), required_keys=('title', 'url', 'bgcolor')))
|
||||
site.update(properties.get_dict('browser.suggestedsites.{name}'.format(
|
||||
name=name), required_keys=('title', 'url', 'bgcolor')))
|
||||
site['imageurl'] = image_url_template.format(name=name)
|
||||
sites.append(site)
|
||||
|
||||
|
@ -116,7 +117,7 @@ def main(output, *args, **kwargs):
|
|||
matches = [p for p, _ in finder.find(drawables_template.format(name=name))]
|
||||
if not matches:
|
||||
raise Exception("Could not find drawable in '{resources}' for '{name}'"
|
||||
.format(resources=resources, name=name))
|
||||
.format(resources=resources, name=name))
|
||||
else:
|
||||
if opts.verbose:
|
||||
print("Found {len} drawables in '{resources}' for '{name}': {matches}"
|
||||
|
@ -129,12 +130,14 @@ def main(output, *args, **kwargs):
|
|||
('browser.suggestedsites.restricted.list', {'restricted': True}),
|
||||
]
|
||||
if opts.verbose:
|
||||
print('Reading {len} suggested site lists: {lists}'.format(len=len(lists), lists=[list_name for list_name, _ in lists]))
|
||||
print('Reading {len} suggested site lists: {lists}'.format(
|
||||
len=len(lists), lists=[list_name for list_name, _ in lists]))
|
||||
|
||||
for (list_name, list_item_defaults) in lists:
|
||||
names = properties.get_list(list_name)
|
||||
if opts.verbose:
|
||||
print('Reading {len} suggested sites from {list}: {names}'.format(len=len(names), list=list_name, names=names))
|
||||
print('Reading {len} suggested sites from {list}: {names}'.format(
|
||||
len=len(names), list=list_name, names=names))
|
||||
add_names(names, list_item_defaults)
|
||||
|
||||
# We must define at least one site -- that's what the fallback is for.
|
||||
|
|
|
@ -7,9 +7,11 @@ from __future__ import print_function
|
|||
import sys
|
||||
import subprocess
|
||||
|
||||
|
||||
def make_unzip(package):
|
||||
subprocess.check_call(['unzip', package])
|
||||
|
||||
|
||||
def main(args):
|
||||
if len(args) != 1:
|
||||
print('Usage: make_unzip.py <package>',
|
||||
|
@ -19,5 +21,6 @@ def main(args):
|
|||
make_unzip(args[0])
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
|
|
@ -7,9 +7,11 @@ from __future__ import print_function
|
|||
import sys
|
||||
import subprocess
|
||||
|
||||
|
||||
def make_zip(source, package):
|
||||
subprocess.check_call(['zip', '-r9D', package, source, '-x', '\*/.mkdir.done'])
|
||||
|
||||
|
||||
def main(args):
|
||||
if len(args) != 2:
|
||||
print('Usage: make_zip.py <source> <package>',
|
||||
|
@ -19,5 +21,6 @@ def main(args):
|
|||
make_zip(args[0], args[1])
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
|
|
@ -10,24 +10,24 @@ engines = []
|
|||
locale = sys.argv[2]
|
||||
|
||||
with open(sys.argv[1]) as f:
|
||||
searchinfo = json.load(f)
|
||||
searchinfo = json.load(f)
|
||||
|
||||
# Get a list of the engines from the locale or the default
|
||||
engines = set()
|
||||
if locale in searchinfo["locales"]:
|
||||
for region, table in searchinfo["locales"][locale].iteritems():
|
||||
if "visibleDefaultEngines" in table:
|
||||
engines.update(table["visibleDefaultEngines"])
|
||||
for region, table in searchinfo["locales"][locale].iteritems():
|
||||
if "visibleDefaultEngines" in table:
|
||||
engines.update(table["visibleDefaultEngines"])
|
||||
|
||||
if not engines:
|
||||
engines.update(searchinfo["default"]["visibleDefaultEngines"])
|
||||
engines.update(searchinfo["default"]["visibleDefaultEngines"])
|
||||
|
||||
# Get additional engines from regionOverrides
|
||||
for region, overrides in searchinfo["regionOverrides"].iteritems():
|
||||
for originalengine, replacement in overrides.iteritems():
|
||||
if originalengine in engines:
|
||||
# We add the engine because we still need the original
|
||||
engines.add(replacement)
|
||||
for originalengine, replacement in overrides.iteritems():
|
||||
if originalengine in engines:
|
||||
# We add the engine because we still need the original
|
||||
engines.add(replacement)
|
||||
|
||||
# join() will take an iterable, not just a list.
|
||||
print('\n'.join(engines))
|
||||
|
|
|
@ -70,7 +70,7 @@ def package_fennec_apk(inputs=[], omni_ja=None,
|
|||
if verbose:
|
||||
print('Packaging %s from %s' % (path, file.path))
|
||||
if not os.path.exists(abspath):
|
||||
raise ValueError('File %s not found (looked for %s)' % \
|
||||
raise ValueError('File %s not found (looked for %s)' %
|
||||
(file.path, abspath))
|
||||
if jarrer.contains(path):
|
||||
jarrer.remove(path)
|
||||
|
|
|
@ -23,6 +23,7 @@ def main(argv):
|
|||
args = parser.parse_args(argv)
|
||||
|
||||
objdir_abspath = mozpath.abspath(buildconfig.topobjdir)
|
||||
|
||||
def is_valid_entry(entry):
|
||||
if isinstance(entry[1], BaseFile):
|
||||
entry_abspath = mozpath.abspath(entry[1].path)
|
||||
|
|
|
@ -15,10 +15,11 @@ def generate(output, *args):
|
|||
pp.handleCommandLine(list(args), True)
|
||||
return set(pp.includes)
|
||||
|
||||
|
||||
def main(args):
|
||||
pp = Preprocessor()
|
||||
pp.handleCommandLine(args, True)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(sys.argv[1:])
|
||||
main(sys.argv[1:])
|
||||
|
|
|
@ -55,6 +55,7 @@ def process_define_file(output, input):
|
|||
# via the command line, which raises a mass of macro
|
||||
# redefinition warnings. Just handle those macros
|
||||
# specially here.
|
||||
|
||||
def define_for_name(name, val):
|
||||
define = "#define {name} {val}".format(name=name, val=val)
|
||||
if name in ('WINVER', '_WIN32_WINNT'):
|
||||
|
|
|
@ -29,8 +29,8 @@ COMPLETE = 'Elapsed: {elapsed:.2f}s; From {dest}: Kept {existing} existing; ' \
|
|||
|
||||
|
||||
def process_manifest(destdir, paths, track,
|
||||
no_symlinks=False,
|
||||
defines={}):
|
||||
no_symlinks=False,
|
||||
defines={}):
|
||||
|
||||
if os.path.exists(track):
|
||||
# We use the same format as install manifests for the tracking
|
||||
|
@ -44,15 +44,15 @@ def process_manifest(destdir, paths, track,
|
|||
for p, f in finder.find(dest):
|
||||
remove_unaccounted.add(p, dummy_file)
|
||||
|
||||
remove_empty_directories=True
|
||||
remove_all_directory_symlinks=True
|
||||
remove_empty_directories = True
|
||||
remove_all_directory_symlinks = True
|
||||
|
||||
else:
|
||||
# If tracking is enabled and there is no file, we don't want to
|
||||
# be removing anything.
|
||||
remove_unaccounted = False
|
||||
remove_empty_directories=False
|
||||
remove_all_directory_symlinks=False
|
||||
remove_empty_directories = False
|
||||
remove_all_directory_symlinks = False
|
||||
|
||||
manifest = InstallManifest()
|
||||
for path in paths:
|
||||
|
@ -64,9 +64,9 @@ def process_manifest(destdir, paths, track,
|
|||
copier, defines_override=defines, link_policy=link_policy
|
||||
)
|
||||
result = copier.copy(destdir,
|
||||
remove_unaccounted=remove_unaccounted,
|
||||
remove_all_directory_symlinks=remove_all_directory_symlinks,
|
||||
remove_empty_directories=remove_empty_directories)
|
||||
remove_unaccounted=remove_unaccounted,
|
||||
remove_all_directory_symlinks=remove_all_directory_symlinks,
|
||||
remove_empty_directories=remove_empty_directories)
|
||||
|
||||
if track:
|
||||
# We should record files that we actually copied.
|
||||
|
@ -83,21 +83,21 @@ def main(argv):
|
|||
parser.add_argument('destdir', help='Destination directory.')
|
||||
parser.add_argument('manifests', nargs='+', help='Path to manifest file(s).')
|
||||
parser.add_argument('--no-symlinks', action='store_true',
|
||||
help='Do not install symbolic links. Always copy files')
|
||||
help='Do not install symbolic links. Always copy files')
|
||||
parser.add_argument('--track', metavar="PATH", required=True,
|
||||
help='Use installed files tracking information from the given path.')
|
||||
help='Use installed files tracking information from the given path.')
|
||||
parser.add_argument('-D', action=DefinesAction,
|
||||
dest='defines', metavar="VAR[=VAL]",
|
||||
help='Define a variable to override what is specified in the manifest')
|
||||
dest='defines', metavar="VAR[=VAL]",
|
||||
help='Define a variable to override what is specified in the manifest')
|
||||
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
start = time.time()
|
||||
|
||||
result = process_manifest(args.destdir, args.manifests,
|
||||
track=args.track,
|
||||
no_symlinks=args.no_symlinks,
|
||||
defines=args.defines)
|
||||
track=args.track,
|
||||
no_symlinks=args.no_symlinks,
|
||||
defines=args.defines)
|
||||
|
||||
elapsed = time.time() - start
|
||||
|
||||
|
@ -109,5 +109,6 @@ def main(argv):
|
|||
rm_files=result.removed_files_count,
|
||||
rm_dirs=result.removed_directories_count))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1:])
|
||||
|
|
|
@ -12,6 +12,7 @@ from mozpack.files import FileFinder
|
|||
from mozpack.mozjar import JarWriter
|
||||
import mozpack.path as mozpath
|
||||
|
||||
|
||||
def make_archive(archive_name, base, exclude, include):
|
||||
compress = ['**/*.sym']
|
||||
finder = FileFinder(base, ignore=exclude)
|
||||
|
@ -27,11 +28,13 @@ def make_archive(archive_name, base, exclude, include):
|
|||
writer.add(p.encode('utf-8'), f, mode=f.mode,
|
||||
compress=should_compress, skip_duplicates=True)
|
||||
|
||||
|
||||
def main(argv):
|
||||
parser = argparse.ArgumentParser(description='Produce a symbols archive')
|
||||
parser.add_argument('archive', help='Which archive to generate')
|
||||
parser.add_argument('base', help='Base directory to package')
|
||||
parser.add_argument('--full-archive', action='store_true', help='Generate a full symbol archive')
|
||||
parser.add_argument('--full-archive', action='store_true',
|
||||
help='Generate a full symbol archive')
|
||||
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
|
@ -47,5 +50,6 @@ def main(argv):
|
|||
|
||||
make_archive(args.archive, args.base, excludes, includes)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1:])
|
||||
|
|
|
@ -642,7 +642,7 @@ for k, v in ARCHIVE_FILES.items():
|
|||
continue
|
||||
|
||||
ignores = set(itertools.chain(*(e.get('ignore', [])
|
||||
for e in ARCHIVE_FILES['common'])))
|
||||
for e in ARCHIVE_FILES['common'])))
|
||||
|
||||
if not any(p.startswith('%s/' % k) for p in ignores):
|
||||
raise Exception('"common" ignore list probably should contain %s' % k)
|
||||
|
|
|
@ -1300,5 +1300,6 @@ def main(argv, _skip_logging=False):
|
|||
|
||||
return 0 if process_command(options, args) else 1
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover
|
||||
sys.exit(main(sys.argv))
|
||||
|
|
|
@ -9,6 +9,7 @@ import subprocess
|
|||
import sys
|
||||
import os
|
||||
|
||||
|
||||
def parse_outputs(crate_output, dep_outputs, pass_l_flag):
|
||||
env = {}
|
||||
args = []
|
||||
|
@ -59,6 +60,7 @@ def parse_outputs(crate_output, dep_outputs, pass_l_flag):
|
|||
|
||||
return env, args
|
||||
|
||||
|
||||
def wrap_rustc(args):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--crate-out', nargs='?')
|
||||
|
@ -73,5 +75,6 @@ def wrap_rustc(args):
|
|||
os.environ.update(new_env)
|
||||
return subprocess.Popen(args.cmd + new_args, cwd=args.cwd).wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(wrap_rustc(sys.argv[1:]))
|
||||
|
|
|
@ -15,69 +15,76 @@ import os
|
|||
from glob import glob
|
||||
import manifestparser
|
||||
|
||||
|
||||
def getIniTests(testdir):
|
||||
mp = manifestparser.ManifestParser(strict=False)
|
||||
mp.read(os.path.join(testdir, 'xpcshell.ini'))
|
||||
return mp.tests
|
||||
mp = manifestparser.ManifestParser(strict=False)
|
||||
mp.read(os.path.join(testdir, 'xpcshell.ini'))
|
||||
return mp.tests
|
||||
|
||||
|
||||
def verifyDirectory(initests, directory):
|
||||
files = glob(os.path.join(os.path.abspath(directory), "test_*"))
|
||||
for f in files:
|
||||
if (not os.path.isfile(f)):
|
||||
continue
|
||||
files = glob(os.path.join(os.path.abspath(directory), "test_*"))
|
||||
for f in files:
|
||||
if (not os.path.isfile(f)):
|
||||
continue
|
||||
|
||||
name = os.path.basename(f)
|
||||
if name.endswith('.in'):
|
||||
name = name[:-3]
|
||||
name = os.path.basename(f)
|
||||
if name.endswith('.in'):
|
||||
name = name[:-3]
|
||||
|
||||
if not name.endswith('.js'):
|
||||
continue
|
||||
if not name.endswith('.js'):
|
||||
continue
|
||||
|
||||
found = False
|
||||
for test in initests:
|
||||
if os.path.join(os.path.abspath(directory), name) == test['path']:
|
||||
found = True
|
||||
break
|
||||
|
||||
if not found:
|
||||
print >>sys.stderr, "TEST-UNEXPECTED-FAIL | xpccheck | test %s is missing from test manifest %s!" % (
|
||||
name, os.path.join(directory, 'xpcshell.ini'))
|
||||
sys.exit(1)
|
||||
|
||||
found = False
|
||||
for test in initests:
|
||||
if os.path.join(os.path.abspath(directory), name) == test['path']:
|
||||
found = True
|
||||
break
|
||||
|
||||
if not found:
|
||||
print >>sys.stderr, "TEST-UNEXPECTED-FAIL | xpccheck | test %s is missing from test manifest %s!" % (name, os.path.join(directory, 'xpcshell.ini'))
|
||||
sys.exit(1)
|
||||
|
||||
def verifyIniFile(initests, directory):
|
||||
files = glob(os.path.join(os.path.abspath(directory), "test_*"))
|
||||
for test in initests:
|
||||
name = test['path'].split('/')[-1]
|
||||
files = glob(os.path.join(os.path.abspath(directory), "test_*"))
|
||||
for test in initests:
|
||||
name = test['path'].split('/')[-1]
|
||||
|
||||
found = False
|
||||
for f in files:
|
||||
found = False
|
||||
for f in files:
|
||||
|
||||
fname = f.split('/')[-1]
|
||||
if fname.endswith('.in'):
|
||||
fname = '.in'.join(fname.split('.in')[:-1])
|
||||
fname = f.split('/')[-1]
|
||||
if fname.endswith('.in'):
|
||||
fname = '.in'.join(fname.split('.in')[:-1])
|
||||
|
||||
if os.path.join(os.path.abspath(directory), fname) == test['path']:
|
||||
found = True
|
||||
break
|
||||
if os.path.join(os.path.abspath(directory), fname) == test['path']:
|
||||
found = True
|
||||
break
|
||||
|
||||
if not found:
|
||||
print >>sys.stderr, "TEST-UNEXPECTED-FAIL | xpccheck | found %s in xpcshell.ini and not in directory '%s'" % (
|
||||
name, directory)
|
||||
sys.exit(1)
|
||||
|
||||
if not found:
|
||||
print >>sys.stderr, "TEST-UNEXPECTED-FAIL | xpccheck | found %s in xpcshell.ini and not in directory '%s'" % (name, directory)
|
||||
sys.exit(1)
|
||||
|
||||
def main(argv):
|
||||
if len(argv) < 2:
|
||||
print >>sys.stderr, "Usage: xpccheck.py <topsrcdir> <directory> [<directory> ...]"
|
||||
sys.exit(1)
|
||||
if len(argv) < 2:
|
||||
print >>sys.stderr, "Usage: xpccheck.py <topsrcdir> <directory> [<directory> ...]"
|
||||
sys.exit(1)
|
||||
|
||||
topsrcdir = argv[0]
|
||||
for d in argv[1:]:
|
||||
# xpcshell-unpack is a copy of xpcshell sibling directory and in the Makefile
|
||||
# we copy all files (including xpcshell.ini from the sibling directory.
|
||||
if d.endswith('toolkit/mozapps/extensions/test/xpcshell-unpack'):
|
||||
continue
|
||||
topsrcdir = argv[0]
|
||||
for d in argv[1:]:
|
||||
# xpcshell-unpack is a copy of xpcshell sibling directory and in the Makefile
|
||||
# we copy all files (including xpcshell.ini from the sibling directory.
|
||||
if d.endswith('toolkit/mozapps/extensions/test/xpcshell-unpack'):
|
||||
continue
|
||||
|
||||
initests = getIniTests(d)
|
||||
verifyDirectory(initests, d)
|
||||
verifyIniFile(initests, d)
|
||||
|
||||
initests = getIniTests(d)
|
||||
verifyDirectory(initests, d)
|
||||
verifyIniFile(initests, d)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1:])
|
||||
|
|
|
@ -87,32 +87,33 @@ def process(input_dirs, inc_paths, bindings_conf, cache_dir, header_dir,
|
|||
def main(argv):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--cache-dir',
|
||||
help='Directory in which to find or write cached lexer data.')
|
||||
help='Directory in which to find or write cached lexer data.')
|
||||
parser.add_argument('--depsdir',
|
||||
help='Directory in which to write dependency files.')
|
||||
help='Directory in which to write dependency files.')
|
||||
parser.add_argument('--bindings-conf',
|
||||
help='Path to the WebIDL binding configuration file.')
|
||||
help='Path to the WebIDL binding configuration file.')
|
||||
parser.add_argument('--input-dir', dest='input_dirs',
|
||||
action='append', default=[],
|
||||
help='Directory(ies) in which to find source .idl files.')
|
||||
parser.add_argument('headerdir',
|
||||
help='Directory in which to write header files.')
|
||||
help='Directory in which to write header files.')
|
||||
parser.add_argument('xpcrsdir',
|
||||
help='Directory in which to write rust xpcom binding files.')
|
||||
help='Directory in which to write rust xpcom binding files.')
|
||||
parser.add_argument('xptdir',
|
||||
help='Directory in which to write xpt file.')
|
||||
help='Directory in which to write xpt file.')
|
||||
parser.add_argument('module',
|
||||
help='Final module name to use for linked output xpt file.')
|
||||
help='Final module name to use for linked output xpt file.')
|
||||
parser.add_argument('idls', nargs='+',
|
||||
help='Source .idl file(s).')
|
||||
help='Source .idl file(s).')
|
||||
parser.add_argument('-I', dest='incpath', action='append', default=[],
|
||||
help='Extra directories where to look for included .idl files.')
|
||||
help='Extra directories where to look for included .idl files.')
|
||||
|
||||
args = parser.parse_args(argv)
|
||||
incpath = [os.path.join(topsrcdir, p) for p in args.incpath]
|
||||
process(args.input_dirs, incpath, args.bindings_conf, args.cache_dir,
|
||||
args.headerdir, args.xpcrsdir, args.xptdir, args.depsdir, args.module,
|
||||
args.idls)
|
||||
args.headerdir, args.xpcrsdir, args.xptdir, args.depsdir, args.module,
|
||||
args.idls)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1:])
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
import os
|
||||
import sqlite3 as lite
|
||||
|
||||
|
||||
class Node(object):
|
||||
|
||||
def __init__(self, graph, node_id):
|
||||
|
@ -39,11 +40,12 @@ class Node(object):
|
|||
return os.path.join(parent.get_path(graph), self.name)
|
||||
|
||||
def calculate_mtime(self):
|
||||
if self.type == 0: # only files have meaningful costs
|
||||
if self.type == 0: # only files have meaningful costs
|
||||
return sum(x.mtime for x in self.cmds)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
class Graph(object):
|
||||
|
||||
def __init__(self, path=None, connect=None):
|
||||
|
@ -67,7 +69,7 @@ class Graph(object):
|
|||
self.connect.close()
|
||||
|
||||
def query_arg(self, q, arg):
|
||||
assert isinstance(arg, tuple) #execute() requires tuple argument
|
||||
assert isinstance(arg, tuple) # execute() requires tuple argument
|
||||
cursor = self.connect.cursor()
|
||||
cursor.execute(q, arg)
|
||||
return cursor
|
||||
|
@ -112,7 +114,7 @@ class Graph(object):
|
|||
m, s = sec / 60, sec % 60
|
||||
print ("\n------ Summary for %s ------\
|
||||
\nTotal Build Time (mm:ss) = %d:%d\nNum Downstream Commands = %d"
|
||||
% (f, m, s, node.num_cmds))
|
||||
% (f, m, s, node.num_cmds))
|
||||
|
||||
def populate(self):
|
||||
# make nodes for files with downstream commands
|
||||
|
@ -127,5 +129,4 @@ class Graph(object):
|
|||
def get_cost_dict(self):
|
||||
if self.results is None:
|
||||
self.populate()
|
||||
return {k:v for k,v in self.results if v > 0}
|
||||
|
||||
return {k: v for k, v in self.results if v > 0}
|
||||
|
|
|
@ -17,31 +17,36 @@ PUSHLOG_CHUNK_SIZE = 500
|
|||
|
||||
URL = 'https://hg.mozilla.org/mozilla-central/json-pushes?'
|
||||
|
||||
|
||||
def unix_epoch(date):
|
||||
return (date - datetime(1970,1,1)).total_seconds()
|
||||
return (date - datetime(1970, 1, 1)).total_seconds()
|
||||
|
||||
|
||||
def unix_from_date(n, today):
|
||||
return unix_epoch(today - timedelta(days=n))
|
||||
|
||||
|
||||
def get_lastpid(session):
|
||||
return session.get(URL+'&version=2').json()['lastpushid']
|
||||
|
||||
|
||||
def get_pushlog_chunk(session, start, end):
|
||||
# returns pushes sorted by date
|
||||
res = session.get(URL+'version=1&startID={0}&\
|
||||
endID={1}&full=1'.format(start, end)).json()
|
||||
return sorted(res.items(), key = lambda x: x[1]['date'])
|
||||
return sorted(res.items(), key=lambda x: x[1]['date'])
|
||||
|
||||
|
||||
def collect_data(session, date):
|
||||
if date < 1206031764: #first push
|
||||
raise Exception ("No pushes exist before March 20, 2008.")
|
||||
if date < 1206031764: # first push
|
||||
raise Exception("No pushes exist before March 20, 2008.")
|
||||
lastpushid = get_lastpid(session)
|
||||
data = []
|
||||
start_id = lastpushid - PUSHLOG_CHUNK_SIZE
|
||||
end_id = lastpushid + 1
|
||||
while True:
|
||||
res = get_pushlog_chunk(session, start_id, end_id)
|
||||
starting_date = res[0][1]['date'] # date of oldest push in chunk
|
||||
starting_date = res[0][1]['date'] # date of oldest push in chunk
|
||||
dates = [x[1]['date'] for x in res]
|
||||
if starting_date < date:
|
||||
i = bisect.bisect_left(dates, date)
|
||||
|
@ -52,10 +57,12 @@ def collect_data(session, date):
|
|||
end_id = start_id + 1
|
||||
start_id = start_id - PUSHLOG_CHUNK_SIZE
|
||||
|
||||
|
||||
def get_data(epoch):
|
||||
session = requests.Session()
|
||||
data = collect_data(session, epoch)
|
||||
return {k:v for sublist in data for (k,v) in sublist}
|
||||
return {k: v for sublist in data for (k, v) in sublist}
|
||||
|
||||
|
||||
class Pushlog(object):
|
||||
|
||||
|
@ -78,6 +85,7 @@ class Pushlog(object):
|
|||
keys.sort()
|
||||
return keys
|
||||
|
||||
|
||||
class Push(object):
|
||||
|
||||
def __init__(self, pid, p_dict):
|
||||
|
@ -85,6 +93,7 @@ class Push(object):
|
|||
self.date = p_dict['date']
|
||||
self.files = [f for x in p_dict['changesets'] for f in x['files']]
|
||||
|
||||
|
||||
class Report(object):
|
||||
|
||||
def __init__(self, days, path=None, cost_dict=None):
|
||||
|
@ -112,7 +121,7 @@ class Report(object):
|
|||
cost = costs.get(f)
|
||||
count = counts.get(f)
|
||||
if cost is not None:
|
||||
res.append((f, cost, count, round(cost*count,3)))
|
||||
res.append((f, cost, count, round(cost*count, 3)))
|
||||
return res
|
||||
|
||||
def get_sorted_report(self, format):
|
||||
|
@ -143,7 +152,8 @@ class Report(object):
|
|||
res = self.get_sorted_report(format)
|
||||
if limit is not None:
|
||||
res = self.cut(limit, res)
|
||||
for x in res: data.append(x)
|
||||
for x in res:
|
||||
data.append(x)
|
||||
if format == 'pretty':
|
||||
print (data)
|
||||
else:
|
||||
|
@ -160,4 +170,3 @@ class Report(object):
|
|||
with open(file_path, 'wb') as f:
|
||||
f.write(content)
|
||||
print ("Created report: %s" % file_path)
|
||||
|
||||
|
|
|
@ -11,7 +11,8 @@ import time
|
|||
|
||||
# Builds before this build ID use the v0 version scheme. Builds after this
|
||||
# build ID use the v1 version scheme.
|
||||
V1_CUTOFF = 20150801000000 # YYYYmmddHHMMSS
|
||||
V1_CUTOFF = 20150801000000 # YYYYmmddHHMMSS
|
||||
|
||||
|
||||
def android_version_code_v0(buildid, cpu_arch=None, min_sdk=0, max_sdk=0):
|
||||
base = int(str(buildid)[:10])
|
||||
|
@ -30,6 +31,7 @@ def android_version_code_v0(buildid, cpu_arch=None, min_sdk=0, max_sdk=0):
|
|||
raise ValueError("Don't know how to compute android:versionCode "
|
||||
"for CPU arch %s" % cpu_arch)
|
||||
|
||||
|
||||
def android_version_code_v1(buildid, cpu_arch=None, min_sdk=0, max_sdk=0):
|
||||
'''Generate a v1 android:versionCode.
|
||||
|
||||
|
@ -134,6 +136,7 @@ def android_version_code_v1(buildid, cpu_arch=None, min_sdk=0, max_sdk=0):
|
|||
|
||||
return version
|
||||
|
||||
|
||||
def android_version_code(buildid, *args, **kwargs):
|
||||
base = int(str(buildid))
|
||||
if base < V1_CUTOFF:
|
||||
|
@ -166,9 +169,9 @@ def main(argv):
|
|||
|
||||
args = parser.parse_args(argv)
|
||||
code = android_version_code(args.buildid,
|
||||
cpu_arch=args.cpu_arch,
|
||||
min_sdk=args.min_sdk,
|
||||
max_sdk=args.max_sdk)
|
||||
cpu_arch=args.cpu_arch,
|
||||
min_sdk=args.min_sdk,
|
||||
max_sdk=args.max_sdk)
|
||||
print(code)
|
||||
return 0
|
||||
|
||||
|
|
|
@ -77,7 +77,8 @@ from mozpack.mozjar import (
|
|||
from mozpack.packager.unpack import UnpackFinder
|
||||
import mozpack.path as mozpath
|
||||
|
||||
NUM_PUSHHEADS_TO_QUERY_PER_PARENT = 50 # Number of candidate pushheads to cache per parent changeset.
|
||||
# Number of candidate pushheads to cache per parent changeset.
|
||||
NUM_PUSHHEADS_TO_QUERY_PER_PARENT = 50
|
||||
|
||||
# Number of parent changesets to consider as possible pushheads.
|
||||
# There isn't really such a thing as a reasonable default here, because we don't
|
||||
|
@ -313,8 +314,8 @@ class AndroidArtifactJob(ArtifactJob):
|
|||
|
||||
dirname, basename = os.path.split(p)
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{'basename': basename},
|
||||
'Adding {basename} to processed archive')
|
||||
{'basename': basename},
|
||||
'Adding {basename} to processed archive')
|
||||
|
||||
basedir = 'bin'
|
||||
if not basename.endswith('.so'):
|
||||
|
@ -323,7 +324,8 @@ class AndroidArtifactJob(ArtifactJob):
|
|||
writer.add(basename.encode('utf-8'), f.open())
|
||||
|
||||
def process_symbols_archive(self, filename, processed_filename):
|
||||
ArtifactJob.process_symbols_archive(self, filename, processed_filename, skip_compressed=True)
|
||||
ArtifactJob.process_symbols_archive(
|
||||
self, filename, processed_filename, skip_compressed=True)
|
||||
|
||||
if self._symbols_archive_suffix != 'crashreporter-symbols-full.zip':
|
||||
return
|
||||
|
@ -440,8 +442,8 @@ class MacArtifactJob(ArtifactJob):
|
|||
oldcwd = os.getcwd()
|
||||
try:
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{'tempdir': tempdir},
|
||||
'Unpacking DMG into {tempdir}')
|
||||
{'tempdir': tempdir},
|
||||
'Unpacking DMG into {tempdir}')
|
||||
if self._substs['HOST_OS_ARCH'] == 'Linux':
|
||||
# This is a cross build, use hfsplus and dmg tools to extract the dmg.
|
||||
os.chdir(tempdir)
|
||||
|
@ -486,8 +488,8 @@ class MacArtifactJob(ArtifactJob):
|
|||
for path in paths:
|
||||
for p, f in finder.find(path):
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{'path': p},
|
||||
'Adding {path} to processed archive')
|
||||
{'path': p},
|
||||
'Adding {path} to processed archive')
|
||||
destpath = mozpath.join('bin', os.path.basename(p))
|
||||
writer.add(destpath.encode('utf-8'), f, mode=f.mode)
|
||||
|
||||
|
@ -507,8 +509,8 @@ class MacArtifactJob(ArtifactJob):
|
|||
shutil.rmtree(tempdir)
|
||||
except (OSError, IOError):
|
||||
self.log(logging.WARN, 'artifact',
|
||||
{'tempdir': tempdir},
|
||||
'Unable to delete {tempdir}')
|
||||
{'tempdir': tempdir},
|
||||
'Unable to delete {tempdir}')
|
||||
pass
|
||||
|
||||
|
||||
|
@ -561,8 +563,8 @@ class WinArtifactJob(ArtifactJob):
|
|||
basename = mozpath.relpath(p, self.product)
|
||||
basename = mozpath.join('bin', basename)
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{'basename': basename},
|
||||
'Adding {basename} to processed archive')
|
||||
{'basename': basename},
|
||||
'Adding {basename} to processed archive')
|
||||
writer.add(basename.encode('utf-8'), f.open(), mode=f.mode)
|
||||
added_entry = True
|
||||
|
||||
|
@ -668,8 +670,8 @@ class CacheManager(object):
|
|||
def load_cache(self):
|
||||
if self._skip_cache:
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{},
|
||||
'Skipping cache: ignoring load_cache!')
|
||||
{},
|
||||
'Skipping cache: ignoring load_cache!')
|
||||
return
|
||||
|
||||
try:
|
||||
|
@ -681,25 +683,26 @@ class CacheManager(object):
|
|||
# exceptions, so it's not worth trying to be fine grained here.
|
||||
# We ignore any exception, so the cache is effectively dropped.
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{'filename': self._cache_filename, 'exception': repr(e)},
|
||||
'Ignoring exception unpickling cache file {filename}: {exception}')
|
||||
{'filename': self._cache_filename, 'exception': repr(e)},
|
||||
'Ignoring exception unpickling cache file {filename}: {exception}')
|
||||
pass
|
||||
|
||||
def dump_cache(self):
|
||||
if self._skip_cache:
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{},
|
||||
'Skipping cache: ignoring dump_cache!')
|
||||
{},
|
||||
'Skipping cache: ignoring dump_cache!')
|
||||
return
|
||||
|
||||
ensureParentDir(self._cache_filename)
|
||||
pickle.dump(list(reversed(list(self._cache.items()))), open(self._cache_filename, 'wb'), -1)
|
||||
pickle.dump(list(reversed(list(self._cache.items()))),
|
||||
open(self._cache_filename, 'wb'), -1)
|
||||
|
||||
def clear_cache(self):
|
||||
if self._skip_cache:
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{},
|
||||
'Skipping cache: ignoring clear_cache!')
|
||||
{},
|
||||
'Skipping cache: ignoring clear_cache!')
|
||||
return
|
||||
|
||||
with self:
|
||||
|
@ -712,11 +715,13 @@ class CacheManager(object):
|
|||
def __exit__(self, type, value, traceback):
|
||||
self.dump_cache()
|
||||
|
||||
|
||||
class PushheadCache(CacheManager):
|
||||
'''Helps map tree/revision pairs to parent pushheads according to the pushlog.'''
|
||||
|
||||
def __init__(self, cache_dir, log=None, skip_cache=False):
|
||||
CacheManager.__init__(self, cache_dir, 'pushhead_cache', MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
|
||||
CacheManager.__init__(self, cache_dir, 'pushhead_cache',
|
||||
MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
|
||||
|
||||
@cachedmethod(operator.attrgetter('_cache'))
|
||||
def parent_pushhead_id(self, tree, revision):
|
||||
|
@ -743,11 +748,13 @@ class PushheadCache(CacheManager):
|
|||
p['changesets'][-1] for p in result['pushes'].values()
|
||||
]
|
||||
|
||||
|
||||
class TaskCache(CacheManager):
|
||||
'''Map candidate pushheads to Task Cluster task IDs and artifact URLs.'''
|
||||
|
||||
def __init__(self, cache_dir, log=None, skip_cache=False):
|
||||
CacheManager.__init__(self, cache_dir, 'artifact_url', MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
|
||||
CacheManager.__init__(self, cache_dir, 'artifact_url',
|
||||
MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
|
||||
|
||||
@cachedmethod(operator.attrgetter('_cache'))
|
||||
def artifacts(self, tree, job, artifact_job_class, rev):
|
||||
|
@ -782,7 +789,8 @@ class TaskCache(CacheManager):
|
|||
except KeyError:
|
||||
# Not all revisions correspond to pushes that produce the job we
|
||||
# care about; and even those that do may not have completed yet.
|
||||
raise ValueError('Task for {namespace} does not exist (yet)!'.format(namespace=namespace))
|
||||
raise ValueError(
|
||||
'Task for {namespace} does not exist (yet)!'.format(namespace=namespace))
|
||||
|
||||
return taskId, list_artifacts(taskId)
|
||||
|
||||
|
@ -820,13 +828,15 @@ class Artifacts(object):
|
|||
substs=self._substs)
|
||||
except KeyError:
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{'job': self._job},
|
||||
'Unknown job {job}')
|
||||
{'job': self._job},
|
||||
'Unknown job {job}')
|
||||
raise KeyError("Unknown job")
|
||||
|
||||
self._task_cache = TaskCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
|
||||
self._artifact_cache = ArtifactCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
|
||||
self._pushhead_cache = PushheadCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
|
||||
self._artifact_cache = ArtifactCache(
|
||||
self._cache_dir, log=self._log, skip_cache=self._skip_cache)
|
||||
self._pushhead_cache = PushheadCache(
|
||||
self._cache_dir, log=self._log, skip_cache=self._skip_cache)
|
||||
|
||||
def log(self, *args, **kwargs):
|
||||
if self._log:
|
||||
|
@ -953,8 +963,8 @@ from remote. Please run `hg pull` and build again.
|
|||
see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code/Mercurial/Bundles""")
|
||||
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{'len': len(last_revs)},
|
||||
'hg suggested {len} candidate revisions')
|
||||
{'len': len(last_revs)},
|
||||
'hg suggested {len} candidate revisions')
|
||||
|
||||
def to_pair(line):
|
||||
rev, node = line.split(':', 1)
|
||||
|
@ -1002,7 +1012,8 @@ see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code
|
|||
|
||||
def find_pushhead_artifacts(self, task_cache, job, tree, pushhead):
|
||||
try:
|
||||
taskId, artifacts = task_cache.artifacts(tree, job, self._artifact_job.__class__, pushhead)
|
||||
taskId, artifacts = task_cache.artifacts(
|
||||
tree, job, self._artifact_job.__class__, pushhead)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
@ -1023,32 +1034,32 @@ see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code
|
|||
|
||||
def install_from_file(self, filename, distdir):
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{'filename': filename},
|
||||
'Installing from {filename}')
|
||||
{'filename': filename},
|
||||
'Installing from {filename}')
|
||||
|
||||
# Do we need to post-process?
|
||||
processed_filename = filename + PROCESSED_SUFFIX
|
||||
|
||||
if self._skip_cache and os.path.exists(processed_filename):
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{'path': processed_filename},
|
||||
'Skipping cache: removing cached processed artifact {path}')
|
||||
{'path': processed_filename},
|
||||
'Skipping cache: removing cached processed artifact {path}')
|
||||
os.remove(processed_filename)
|
||||
|
||||
if not os.path.exists(processed_filename):
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{'filename': filename},
|
||||
'Processing contents of {filename}')
|
||||
{'filename': filename},
|
||||
'Processing contents of {filename}')
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{'processed_filename': processed_filename},
|
||||
'Writing processed {processed_filename}')
|
||||
{'processed_filename': processed_filename},
|
||||
'Writing processed {processed_filename}')
|
||||
self._artifact_job.process_artifact(filename, processed_filename)
|
||||
|
||||
self._artifact_cache._persist_limit.register_file(processed_filename)
|
||||
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{'processed_filename': processed_filename},
|
||||
'Installing from processed {processed_filename}')
|
||||
{'processed_filename': processed_filename},
|
||||
'Installing from processed {processed_filename}')
|
||||
|
||||
# Copy all .so files, avoiding modification where possible.
|
||||
ensureParentDir(mozpath.join(distdir, '.dummy'))
|
||||
|
@ -1062,20 +1073,20 @@ see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code
|
|||
shutil.copyfileobj(zf.open(info), fh)
|
||||
file_existed, file_updated = fh.close()
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{'updating': 'Updating' if file_updated else 'Not updating', 'filename': n},
|
||||
'{updating} {filename}')
|
||||
{'updating': 'Updating' if file_updated else 'Not updating', 'filename': n},
|
||||
'{updating} {filename}')
|
||||
if not file_existed or file_updated:
|
||||
# Libraries and binaries may need to be marked executable,
|
||||
# depending on platform.
|
||||
perms = info.external_attr >> 16 # See http://stackoverflow.com/a/434689.
|
||||
perms |= stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH # u+w, a+r.
|
||||
perms = info.external_attr >> 16 # See http://stackoverflow.com/a/434689.
|
||||
perms |= stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH # u+w, a+r.
|
||||
os.chmod(n, perms)
|
||||
return 0
|
||||
|
||||
def install_from_url(self, url, distdir):
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{'url': url},
|
||||
'Installing from {url}')
|
||||
{'url': url},
|
||||
'Installing from {url}')
|
||||
filename = self._artifact_cache.fetch(url)
|
||||
return self.install_from_file(filename, distdir)
|
||||
|
||||
|
@ -1117,7 +1128,7 @@ see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code
|
|||
try:
|
||||
if self._hg:
|
||||
revision = subprocess.check_output([self._hg, 'log', '--template', '{node}\n',
|
||||
'-r', revset], cwd=self._topsrcdir).strip()
|
||||
'-r', revset], cwd=self._topsrcdir).strip()
|
||||
elif self._git:
|
||||
revset = subprocess.check_output([
|
||||
self._git, 'rev-parse', '%s^{commit}' % revset],
|
||||
|
@ -1165,7 +1176,8 @@ see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code
|
|||
url = get_artifact_url(taskId, artifact_name)
|
||||
urls.append(url)
|
||||
if not urls:
|
||||
raise ValueError('Task {taskId} existed, but no artifacts found!'.format(taskId=taskId))
|
||||
raise ValueError(
|
||||
'Task {taskId} existed, but no artifacts found!'.format(taskId=taskId))
|
||||
for url in urls:
|
||||
if self.install_from_url(url, distdir):
|
||||
return 1
|
||||
|
@ -1194,11 +1206,10 @@ see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code
|
|||
|
||||
return self.install_from_recent(distdir)
|
||||
|
||||
|
||||
def clear_cache(self):
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{},
|
||||
'Deleting cached artifacts and caches.')
|
||||
{},
|
||||
'Deleting cached artifacts and caches.')
|
||||
self._task_cache.clear_cache()
|
||||
self._artifact_cache.clear_cache()
|
||||
self._pushhead_cache.clear_cache()
|
||||
|
|
|
@ -57,6 +57,7 @@ from mozbuild.util import (
|
|||
mkdir,
|
||||
)
|
||||
|
||||
|
||||
class XPIDLManager(object):
|
||||
"""Helps manage XPCOM IDLs in the context of the build system."""
|
||||
|
||||
|
@ -102,6 +103,7 @@ class XPIDLManager(object):
|
|||
"""
|
||||
return itertools.chain(*[m.stems() for m in self.modules.itervalues()])
|
||||
|
||||
|
||||
class BinariesCollection(object):
|
||||
"""Tracks state of binaries produced by the build."""
|
||||
|
||||
|
@ -109,6 +111,7 @@ class BinariesCollection(object):
|
|||
self.shared_libraries = []
|
||||
self.programs = []
|
||||
|
||||
|
||||
class CommonBackend(BuildBackend):
|
||||
"""Holds logic common to all build backends."""
|
||||
|
||||
|
@ -182,7 +185,8 @@ class CommonBackend(BuildBackend):
|
|||
return False
|
||||
|
||||
elif isinstance(obj, Exports):
|
||||
objdir_files = [f.full_path for path, files in obj.files.walk() for f in files if isinstance(f, ObjDirPath)]
|
||||
objdir_files = [f.full_path for path, files in obj.files.walk()
|
||||
for f in files if isinstance(f, ObjDirPath)]
|
||||
if objdir_files:
|
||||
self._handle_generated_sources(objdir_files)
|
||||
return False
|
||||
|
@ -204,7 +208,6 @@ class CommonBackend(BuildBackend):
|
|||
self._handle_generated_sources(mozpath.join(self.environment.topobjdir, 'dist/include/%s.h' % stem)
|
||||
for stem in self._idl_manager.idl_stems())
|
||||
|
||||
|
||||
for config in self._configs:
|
||||
self.backend_input_files.add(config.source)
|
||||
|
||||
|
@ -294,7 +297,7 @@ class CommonBackend(BuildBackend):
|
|||
seen_libs.add(lib)
|
||||
os_libs.append(lib)
|
||||
|
||||
return (objs, sorted(seen_pgo_gen_only_objs), no_pgo_objs, \
|
||||
return (objs, sorted(seen_pgo_gen_only_objs), no_pgo_objs,
|
||||
shared_libs, os_libs, static_libs)
|
||||
|
||||
def _make_list_file(self, kind, objdir, objs, name):
|
||||
|
@ -332,7 +335,8 @@ class CommonBackend(BuildBackend):
|
|||
return ref
|
||||
|
||||
def _handle_generated_sources(self, files):
|
||||
self._generated_sources.update(mozpath.relpath(f, self.environment.topobjdir) for f in files)
|
||||
self._generated_sources.update(mozpath.relpath(
|
||||
f, self.environment.topobjdir) for f in files)
|
||||
|
||||
def _handle_webidl_collection(self, webidls):
|
||||
|
||||
|
@ -411,7 +415,7 @@ class CommonBackend(BuildBackend):
|
|||
'so it cannot be built in unified mode."\n'
|
||||
'#undef INITGUID\n'
|
||||
'#endif')
|
||||
f.write('\n'.join(includeTemplate % { "cppfile": s } for
|
||||
f.write('\n'.join(includeTemplate % {"cppfile": s} for
|
||||
s in source_filenames))
|
||||
|
||||
def _write_unified_files(self, unified_source_mapping, output_directory,
|
||||
|
|
|
@ -118,7 +118,7 @@ class ConfigEnvironment(object):
|
|||
"""
|
||||
|
||||
def __init__(self, topsrcdir, topobjdir, defines=None,
|
||||
non_global_defines=None, substs=None, source=None, mozconfig=None):
|
||||
non_global_defines=None, substs=None, source=None, mozconfig=None):
|
||||
|
||||
if not source:
|
||||
source = mozpath.join(topobjdir, 'config.status')
|
||||
|
@ -148,10 +148,11 @@ class ConfigEnvironment(object):
|
|||
self.bin_suffix = self.substs.get('BIN_SUFFIX', '')
|
||||
|
||||
global_defines = [name for name in self.defines
|
||||
if not name in self.non_global_defines]
|
||||
if not name in self.non_global_defines]
|
||||
self.substs['ACDEFINES'] = ' '.join(['-D%s=%s' % (name,
|
||||
shell_quote(self.defines[name]).replace('$', '$$'))
|
||||
for name in sorted(global_defines)])
|
||||
shell_quote(self.defines[name]).replace('$', '$$'))
|
||||
for name in sorted(global_defines)])
|
||||
|
||||
def serialize(name, obj):
|
||||
if isinstance(obj, StringTypes):
|
||||
return obj
|
||||
|
@ -159,9 +160,9 @@ class ConfigEnvironment(object):
|
|||
return ' '.join(obj)
|
||||
raise Exception('Unhandled type %s for %s', type(obj), str(name))
|
||||
self.substs['ALLSUBSTS'] = '\n'.join(sorted(['%s = %s' % (name,
|
||||
serialize(name, self.substs[name])) for name in self.substs if self.substs[name]]))
|
||||
serialize(name, self.substs[name])) for name in self.substs if self.substs[name]]))
|
||||
self.substs['ALLEMPTYSUBSTS'] = '\n'.join(sorted(['%s =' % name
|
||||
for name in self.substs if not self.substs[name]]))
|
||||
for name in self.substs if not self.substs[name]]))
|
||||
|
||||
self.substs = ReadOnlyDict(self.substs)
|
||||
|
||||
|
@ -214,7 +215,7 @@ class ConfigEnvironment(object):
|
|||
config = BuildConfig.from_config_status(path)
|
||||
|
||||
return ConfigEnvironment(config.topsrcdir, config.topobjdir,
|
||||
config.defines, config.non_global_defines, config.substs, path)
|
||||
config.defines, config.non_global_defines, config.substs, path)
|
||||
|
||||
|
||||
class PartialConfigDict(object):
|
||||
|
@ -224,6 +225,7 @@ class PartialConfigDict(object):
|
|||
similar for substs), where the value of FOO is delay-loaded until it is
|
||||
needed.
|
||||
"""
|
||||
|
||||
def __init__(self, config_statusd, typ, environ_override=False):
|
||||
self._dict = {}
|
||||
self._datadir = mozpath.join(config_statusd, typ)
|
||||
|
@ -338,6 +340,7 @@ class PartialConfigEnvironment(object):
|
|||
intended to be used instead of the defines structure from config.status so
|
||||
that scripts can depend directly on its value.
|
||||
"""
|
||||
|
||||
def __init__(self, topobjdir):
|
||||
config_statusd = mozpath.join(topobjdir, 'config.statusd')
|
||||
self.substs = PartialConfigDict(config_statusd, 'substs', environ_override=True)
|
||||
|
@ -353,8 +356,8 @@ class PartialConfigEnvironment(object):
|
|||
if name not in config['non_global_defines']
|
||||
]
|
||||
acdefines = ' '.join(['-D%s=%s' % (name,
|
||||
shell_quote(config['defines'][name]).replace('$', '$$'))
|
||||
for name in sorted(global_defines)])
|
||||
shell_quote(config['defines'][name]).replace('$', '$$'))
|
||||
for name in sorted(global_defines)])
|
||||
substs['ACDEFINES'] = acdefines
|
||||
|
||||
all_defines = OrderedDict()
|
||||
|
|
|
@ -26,6 +26,7 @@ from mozbuild.base import ExecutionSummary
|
|||
# Open eclipse:
|
||||
# /Users/bgirard/mozilla/eclipse/eclipse/eclipse/eclipse -data $PWD/workspace
|
||||
|
||||
|
||||
class CppEclipseBackend(CommonBackend):
|
||||
"""Backend that generates Cpp Eclipse project files.
|
||||
"""
|
||||
|
@ -81,7 +82,8 @@ class CppEclipseBackend(CommonBackend):
|
|||
# Note that unlike VS, Eclipse' indexer seem to crawl the headers and
|
||||
# isn't picky about the local includes.
|
||||
if isinstance(obj, ComputedFlags):
|
||||
args = self._args_for_dirs.setdefault('tree/' + reldir, {'includes': [], 'defines': []})
|
||||
args = self._args_for_dirs.setdefault(
|
||||
'tree/' + reldir, {'includes': [], 'defines': []})
|
||||
# use the same args for any objdirs we include:
|
||||
if reldir == 'dom/bindings':
|
||||
self._args_for_dirs.setdefault('generated-webidl', args)
|
||||
|
@ -105,7 +107,8 @@ class CppEclipseBackend(CommonBackend):
|
|||
def consume_finished(self):
|
||||
settings_dir = os.path.join(self._project_dir, '.settings')
|
||||
launch_dir = os.path.join(self._project_dir, 'RunConfigurations')
|
||||
workspace_settings_dir = os.path.join(self._workspace_dir, '.metadata/.plugins/org.eclipse.core.runtime/.settings')
|
||||
workspace_settings_dir = os.path.join(
|
||||
self._workspace_dir, '.metadata/.plugins/org.eclipse.core.runtime/.settings')
|
||||
|
||||
for dir_name in [self._project_dir, settings_dir, launch_dir, workspace_settings_dir, self._workspace_lang_dir]:
|
||||
try:
|
||||
|
@ -129,22 +132,25 @@ class CppEclipseBackend(CommonBackend):
|
|||
workspace_language_path = os.path.join(self._workspace_lang_dir, 'language.settings.xml')
|
||||
with open(workspace_language_path, 'wb') as fh:
|
||||
workspace_lang_settings = WORKSPACE_LANGUAGE_SETTINGS_TEMPLATE
|
||||
workspace_lang_settings = workspace_lang_settings.replace("@COMPILER_FLAGS@", self._cxx + " " + self._cppflags);
|
||||
workspace_lang_settings = workspace_lang_settings.replace(
|
||||
"@COMPILER_FLAGS@", self._cxx + " " + self._cppflags)
|
||||
fh.write(workspace_lang_settings)
|
||||
|
||||
self._write_launch_files(launch_dir)
|
||||
|
||||
core_resources_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.core.resources.prefs')
|
||||
core_resources_prefs_path = os.path.join(
|
||||
workspace_settings_dir, 'org.eclipse.core.resources.prefs')
|
||||
with open(core_resources_prefs_path, 'wb') as fh:
|
||||
fh.write(STATIC_CORE_RESOURCES_PREFS);
|
||||
fh.write(STATIC_CORE_RESOURCES_PREFS)
|
||||
|
||||
core_runtime_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.core.runtime.prefs')
|
||||
core_runtime_prefs_path = os.path.join(
|
||||
workspace_settings_dir, 'org.eclipse.core.runtime.prefs')
|
||||
with open(core_runtime_prefs_path, 'wb') as fh:
|
||||
fh.write(STATIC_CORE_RUNTIME_PREFS);
|
||||
fh.write(STATIC_CORE_RUNTIME_PREFS)
|
||||
|
||||
ui_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.ui.prefs')
|
||||
with open(ui_prefs_path, 'wb') as fh:
|
||||
fh.write(STATIC_UI_PREFS);
|
||||
fh.write(STATIC_UI_PREFS)
|
||||
|
||||
cdt_ui_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.cdt.ui.prefs')
|
||||
cdt_ui_prefs = STATIC_CDT_UI_PREFS
|
||||
|
@ -155,10 +161,11 @@ class CppEclipseBackend(CommonBackend):
|
|||
XML_PREF_TEMPLATE = """<setting id\="@PREF_NAME@" value\="@PREF_VAL@"/>\\n"""
|
||||
for line in FORMATTER_SETTINGS.splitlines():
|
||||
[pref, val] = line.split("=")
|
||||
cdt_ui_prefs += XML_PREF_TEMPLATE.replace("@PREF_NAME@", pref).replace("@PREF_VAL@", val)
|
||||
cdt_ui_prefs += XML_PREF_TEMPLATE.replace("@PREF_NAME@",
|
||||
pref).replace("@PREF_VAL@", val)
|
||||
cdt_ui_prefs += "</profile>\\n</profiles>\\n"
|
||||
with open(cdt_ui_prefs_path, 'wb') as fh:
|
||||
fh.write(cdt_ui_prefs);
|
||||
fh.write(cdt_ui_prefs)
|
||||
|
||||
cdt_core_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.cdt.core.prefs')
|
||||
with open(cdt_core_prefs_path, 'wb') as fh:
|
||||
|
@ -168,11 +175,11 @@ class CppEclipseBackend(CommonBackend):
|
|||
# as the active formatter all its prefs are set in this prefs file,
|
||||
# so we need add those now:
|
||||
cdt_core_prefs += FORMATTER_SETTINGS
|
||||
fh.write(cdt_core_prefs);
|
||||
fh.write(cdt_core_prefs)
|
||||
|
||||
editor_prefs_path = os.path.join(workspace_settings_dir, "org.eclipse.ui.editors.prefs");
|
||||
editor_prefs_path = os.path.join(workspace_settings_dir, "org.eclipse.ui.editors.prefs")
|
||||
with open(editor_prefs_path, 'wb') as fh:
|
||||
fh.write(EDITOR_SETTINGS);
|
||||
fh.write(EDITOR_SETTINGS)
|
||||
|
||||
# Now import the project into the workspace
|
||||
self._import_project()
|
||||
|
@ -208,7 +215,7 @@ class CppEclipseBackend(CommonBackend):
|
|||
def _write_noindex(self):
|
||||
noindex_path = os.path.join(self._project_dir, '.settings/org.eclipse.cdt.core.prefs')
|
||||
with open(noindex_path, 'wb') as fh:
|
||||
fh.write(NOINDEX_TEMPLATE);
|
||||
fh.write(NOINDEX_TEMPLATE)
|
||||
|
||||
def _remove_noindex(self):
|
||||
# Below we remove the config file that temporarily disabled the indexer
|
||||
|
@ -257,7 +264,8 @@ class CppEclipseBackend(CommonBackend):
|
|||
dirsettings_template = LANGUAGE_SETTINGS_TEMPLATE_DIR_HEADER
|
||||
|
||||
# Add OS_COMPILE_CXXFLAGS args (same as OS_COMPILE_CFLAGS):
|
||||
dirsettings_template = dirsettings_template.replace('@PREINCLUDE_FILE_PATH@', os.path.join(self.environment.topobjdir, 'dist/include/mozilla-config.h'))
|
||||
dirsettings_template = dirsettings_template.replace('@PREINCLUDE_FILE_PATH@', os.path.join(
|
||||
self.environment.topobjdir, 'dist/include/mozilla-config.h'))
|
||||
dirsettings_template += add_define('MOZILLA_CLIENT', '1')
|
||||
|
||||
# Add EXTRA_INCLUDES args:
|
||||
|
@ -304,7 +312,7 @@ class CppEclipseBackend(CommonBackend):
|
|||
# netwerk/sctp/src uses -U__APPLE__ on Mac
|
||||
# XXX We should make this code smart enough to remove existing defines.
|
||||
continue
|
||||
d = d[2:] # get rid of leading "-D"
|
||||
d = d[2:] # get rid of leading "-D"
|
||||
name_value = d.split("=", 1)
|
||||
name = name_value[0]
|
||||
value = ""
|
||||
|
@ -314,7 +322,8 @@ class CppEclipseBackend(CommonBackend):
|
|||
dirsettings += LANGUAGE_SETTINGS_TEMPLATE_DIR_FOOTER
|
||||
fh.write(dirsettings)
|
||||
|
||||
fh.write(LANGUAGE_SETTINGS_TEMPLATE_FOOTER.replace("@COMPILER_FLAGS@", self._cxx + " " + self._cppflags))
|
||||
fh.write(LANGUAGE_SETTINGS_TEMPLATE_FOOTER.replace(
|
||||
"@COMPILER_FLAGS@", self._cxx + " " + self._cppflags))
|
||||
|
||||
def _write_launch_files(self, launch_dir):
|
||||
bin_dir = os.path.join(self.environment.topobjdir, 'dist')
|
||||
|
@ -334,21 +343,25 @@ class CppEclipseBackend(CommonBackend):
|
|||
launch = launch.replace('@LAUNCH_ARGS@', '-P -no-remote')
|
||||
fh.write(launch)
|
||||
|
||||
#TODO Add more launch configs (and delegate calls to mach)
|
||||
# TODO Add more launch configs (and delegate calls to mach)
|
||||
|
||||
def _write_project(self, fh):
|
||||
project = PROJECT_TEMPLATE;
|
||||
project = PROJECT_TEMPLATE
|
||||
|
||||
project = project.replace('@PROJECT_NAME@', self._project_name)
|
||||
project = project.replace('@PROJECT_TOPSRCDIR@', self.environment.topsrcdir)
|
||||
project = project.replace('@GENERATED_IPDL_FILES@', os.path.join(self.environment.topobjdir, "ipc", "ipdl"))
|
||||
project = project.replace('@GENERATED_WEBIDL_FILES@', os.path.join(self.environment.topobjdir, "dom", "bindings"))
|
||||
project = project.replace('@GENERATED_IPDL_FILES@', os.path.join(
|
||||
self.environment.topobjdir, "ipc", "ipdl"))
|
||||
project = project.replace('@GENERATED_WEBIDL_FILES@', os.path.join(
|
||||
self.environment.topobjdir, "dom", "bindings"))
|
||||
fh.write(project)
|
||||
|
||||
def _write_cproject(self, fh):
|
||||
cproject_header = CPROJECT_TEMPLATE_HEADER
|
||||
cproject_header = cproject_header.replace('@PROJECT_TOPSRCDIR@', self.environment.topobjdir)
|
||||
cproject_header = cproject_header.replace('@MACH_COMMAND@', os.path.join(self.environment.topsrcdir, 'mach'))
|
||||
cproject_header = cproject_header.replace(
|
||||
'@PROJECT_TOPSRCDIR@', self.environment.topobjdir)
|
||||
cproject_header = cproject_header.replace(
|
||||
'@MACH_COMMAND@', os.path.join(self.environment.topsrcdir, 'mach'))
|
||||
fh.write(cproject_header)
|
||||
fh.write(CPROJECT_TEMPLATE_FOOTER)
|
||||
|
||||
|
@ -615,21 +628,21 @@ undoHistorySize=200
|
|||
"""
|
||||
|
||||
|
||||
STATIC_CORE_RESOURCES_PREFS="""eclipse.preferences.version=1
|
||||
STATIC_CORE_RESOURCES_PREFS = """eclipse.preferences.version=1
|
||||
refresh.enabled=true
|
||||
"""
|
||||
|
||||
STATIC_CORE_RUNTIME_PREFS="""eclipse.preferences.version=1
|
||||
STATIC_CORE_RUNTIME_PREFS = """eclipse.preferences.version=1
|
||||
content-types/org.eclipse.cdt.core.cxxSource/file-extensions=mm
|
||||
content-types/org.eclipse.core.runtime.xml/file-extensions=xul
|
||||
content-types/org.eclipse.wst.jsdt.core.jsSource/file-extensions=jsm
|
||||
"""
|
||||
|
||||
STATIC_UI_PREFS="""eclipse.preferences.version=1
|
||||
STATIC_UI_PREFS = """eclipse.preferences.version=1
|
||||
showIntro=false
|
||||
"""
|
||||
|
||||
STATIC_CDT_CORE_PREFS="""eclipse.preferences.version=1
|
||||
STATIC_CDT_CORE_PREFS = """eclipse.preferences.version=1
|
||||
indexer.updatePolicy=0
|
||||
"""
|
||||
|
||||
|
@ -797,7 +810,7 @@ org.eclipse.cdt.core.formatter.tabulation.size=2
|
|||
org.eclipse.cdt.core.formatter.use_tabs_only_for_leading_indentations=false
|
||||
"""
|
||||
|
||||
STATIC_CDT_UI_PREFS="""eclipse.preferences.version=1
|
||||
STATIC_CDT_UI_PREFS = """eclipse.preferences.version=1
|
||||
buildConsoleLines=10000
|
||||
Console.limitConsoleOutput=false
|
||||
ensureNewlineAtEOF=false
|
||||
|
|
|
@ -134,10 +134,12 @@ class FasterMakeBackend(CommonBackend, PartialBackend):
|
|||
|
||||
elif isinstance(obj, GeneratedFile):
|
||||
if obj.outputs:
|
||||
first_output = mozpath.relpath(mozpath.join(obj.objdir, obj.outputs[0]), self.environment.topobjdir)
|
||||
first_output = mozpath.relpath(mozpath.join(
|
||||
obj.objdir, obj.outputs[0]), self.environment.topobjdir)
|
||||
for o in obj.outputs[1:]:
|
||||
fullpath = mozpath.join(obj.objdir, o)
|
||||
self._generated_files_map[mozpath.relpath(fullpath, self.environment.topobjdir)] = first_output
|
||||
self._generated_files_map[mozpath.relpath(
|
||||
fullpath, self.environment.topobjdir)] = first_output
|
||||
# We don't actually handle GeneratedFiles, we just need to know if
|
||||
# we can build multiple of them from a single make invocation in the
|
||||
# faster backend.
|
||||
|
@ -194,7 +196,6 @@ class FasterMakeBackend(CommonBackend, PartialBackend):
|
|||
mk.create_rule([target]).add_dependencies(
|
||||
'$(TOPOBJDIR)/%s' % d for d in deps)
|
||||
|
||||
|
||||
# This is not great, but it's better to have some dependencies on these Python files.
|
||||
python_deps = [
|
||||
'$(TOPSRCDIR)/python/mozbuild/mozbuild/action/l10n_merge.py',
|
||||
|
@ -208,7 +209,8 @@ class FasterMakeBackend(CommonBackend, PartialBackend):
|
|||
for (merge, ref_file, l10n_file) in deps:
|
||||
rule = mk.create_rule([merge]).add_dependencies(
|
||||
[ref_file, l10n_file] + python_deps)
|
||||
rule.add_commands(['$(PYTHON) -m mozbuild.action.l10n_merge --output {} --ref-file {} --l10n-file {}'.format(merge, ref_file, l10n_file)])
|
||||
rule.add_commands(
|
||||
['$(PYTHON) -m mozbuild.action.l10n_merge --output {} --ref-file {} --l10n-file {}'.format(merge, ref_file, l10n_file)])
|
||||
# Add a dummy rule for the l10n file since it might not exist.
|
||||
mk.create_rule([l10n_file])
|
||||
|
||||
|
|
|
@ -20,10 +20,11 @@ from mach.decorators import (
|
|||
Command,
|
||||
)
|
||||
|
||||
|
||||
@CommandProvider
|
||||
class MachCommands(MachCommandBase):
|
||||
@Command('ide', category='devenv',
|
||||
description='Generate a project and launch an IDE.')
|
||||
description='Generate a project and launch an IDE.')
|
||||
@CommandArgument('ide', choices=['eclipse', 'visualstudio'])
|
||||
@CommandArgument('args', nargs=argparse.REMAINDER)
|
||||
def eclipse(self, ide, args):
|
||||
|
|
|
@ -126,7 +126,7 @@ MOZBUILD_VARIABLES = [
|
|||
b'TEST_DIRS',
|
||||
b'TOOL_DIRS',
|
||||
# XXX config/Makefile.in specifies this in a make invocation
|
||||
#'USE_EXTENSION_MANIFEST',
|
||||
# 'USE_EXTENSION_MANIFEST',
|
||||
b'XPCSHELL_TESTS',
|
||||
b'XPIDL_MODULE',
|
||||
]
|
||||
|
@ -225,9 +225,9 @@ class BackendMakeFile(object):
|
|||
self.fh.write('NONRECURSIVE_TARGETS += export\n')
|
||||
self.fh.write('NONRECURSIVE_TARGETS_export += xpidl\n')
|
||||
self.fh.write('NONRECURSIVE_TARGETS_export_xpidl_DIRECTORY = '
|
||||
'$(DEPTH)/xpcom/xpidl\n')
|
||||
'$(DEPTH)/xpcom/xpidl\n')
|
||||
self.fh.write('NONRECURSIVE_TARGETS_export_xpidl_TARGETS += '
|
||||
'export\n')
|
||||
'export\n')
|
||||
|
||||
return self.fh.close()
|
||||
|
||||
|
@ -248,6 +248,7 @@ class RecursiveMakeTraversal(object):
|
|||
"""
|
||||
SubDirectoryCategories = ['dirs', 'tests']
|
||||
SubDirectoriesTuple = namedtuple('SubDirectories', SubDirectoryCategories)
|
||||
|
||||
class SubDirectories(SubDirectoriesTuple):
|
||||
def __new__(self):
|
||||
return RecursiveMakeTraversal.SubDirectoriesTuple.__new__(self, [], [])
|
||||
|
@ -390,7 +391,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
self._test_manifests = {}
|
||||
|
||||
self.backend_input_files.add(mozpath.join(self.environment.topobjdir,
|
||||
'config', 'autoconf.mk'))
|
||||
'config', 'autoconf.mk'))
|
||||
|
||||
self._install_manifests = defaultdict(InstallManifest)
|
||||
# The build system relies on some install manifests always existing
|
||||
|
@ -424,7 +425,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
if obj.objdir not in self._backend_files:
|
||||
self._backend_files[obj.objdir] = \
|
||||
BackendMakeFile(obj.srcdir, obj.objdir, obj.config,
|
||||
obj.topsrcdir, self.environment.topobjdir, self.dry_run)
|
||||
obj.topsrcdir, self.environment.topobjdir, self.dry_run)
|
||||
return self._backend_files[obj.objdir]
|
||||
|
||||
def consume_object(self, obj):
|
||||
|
@ -609,19 +610,19 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
\t@$(TOUCH) $@
|
||||
|
||||
""".format(stub=stub_file,
|
||||
output=first_output,
|
||||
dep_file=dep_file,
|
||||
inputs=' ' + ' '.join(inputs) if inputs else '',
|
||||
flags=' ' + ' '.join(shell_quote(f) for f in obj.flags) if obj.flags else '',
|
||||
backend=' backend.mk' if obj.flags else '',
|
||||
# Locale repacks repack multiple locales from a single configured objdir,
|
||||
# so standard mtime dependencies won't work properly when the build is re-run
|
||||
# with a different locale as input. IS_LANGUAGE_REPACK will reliably be set
|
||||
# in this situation, so simply force the generation to run in that case.
|
||||
force=force,
|
||||
locale='--locale=$(AB_CD) ' if obj.localized else '',
|
||||
script=obj.script,
|
||||
method=obj.method))
|
||||
output=first_output,
|
||||
dep_file=dep_file,
|
||||
inputs=' ' + ' '.join(inputs) if inputs else '',
|
||||
flags=' ' + ' '.join(shell_quote(f) for f in obj.flags) if obj.flags else '',
|
||||
backend=' backend.mk' if obj.flags else '',
|
||||
# Locale repacks repack multiple locales from a single configured objdir,
|
||||
# so standard mtime dependencies won't work properly when the build is re-run
|
||||
# with a different locale as input. IS_LANGUAGE_REPACK will reliably be set
|
||||
# in this situation, so simply force the generation to run in that case.
|
||||
force=force,
|
||||
locale='--locale=$(AB_CD) ' if obj.localized else '',
|
||||
script=obj.script,
|
||||
method=obj.method))
|
||||
|
||||
elif isinstance(obj, JARManifest):
|
||||
self._no_skip['libs'].add(backend_file.relobjdir)
|
||||
|
@ -784,7 +785,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
main, all_deps = \
|
||||
self._traversal.compute_dependencies(filter)
|
||||
for dir, deps in all_deps.items():
|
||||
if deps is not None or (dir in self._idl_dirs \
|
||||
if deps is not None or (dir in self._idl_dirs
|
||||
and tier == 'export'):
|
||||
rule = root_deps_mk.create_rule(['%s/%s' % (dir, tier)])
|
||||
if deps:
|
||||
|
@ -795,8 +796,8 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
if main:
|
||||
rule.add_dependencies('%s/%s' % (d, tier) for d in main)
|
||||
|
||||
all_compile_deps = reduce(lambda x,y: x|y,
|
||||
self._compile_graph.values()) if self._compile_graph else set()
|
||||
all_compile_deps = reduce(lambda x, y: x | y,
|
||||
self._compile_graph.values()) if self._compile_graph else set()
|
||||
# Include the following as dependencies of the top recursion target for
|
||||
# compilation:
|
||||
# - nodes that are not dependended upon by anything. Typically, this
|
||||
|
@ -902,8 +903,8 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
|
||||
if include_curdir_build_rules:
|
||||
makefile.add_statement('\n'
|
||||
'# Make sometimes gets confused between "foo" and "$(CURDIR)/foo".\n'
|
||||
'# Help it out by explicitly specifiying dependencies.')
|
||||
'# Make sometimes gets confused between "foo" and "$(CURDIR)/foo".\n'
|
||||
'# Help it out by explicitly specifiying dependencies.')
|
||||
makefile.add_statement('all_absolute_unified_files := \\\n'
|
||||
' $(addprefix $(CURDIR)/,$(%s))'
|
||||
% unified_files_makefile_variable)
|
||||
|
@ -949,7 +950,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
stub = not os.path.exists(makefile_in)
|
||||
if not stub:
|
||||
self.log(logging.DEBUG, 'substitute_makefile',
|
||||
{'path': makefile}, 'Substituting makefile: {path}')
|
||||
{'path': makefile}, 'Substituting makefile: {path}')
|
||||
self._makefile_in_count += 1
|
||||
|
||||
# In the export and libs tiers, we don't skip directories
|
||||
|
@ -961,7 +962,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
self._no_skip[tier].add(bf.relobjdir)
|
||||
else:
|
||||
self.log(logging.DEBUG, 'stub_makefile',
|
||||
{'path': makefile}, 'Creating stub Makefile: {path}')
|
||||
{'path': makefile}, 'Creating stub Makefile: {path}')
|
||||
|
||||
obj = self.Substitution()
|
||||
obj.output_path = makefile
|
||||
|
@ -976,7 +977,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
# XPI_PKGNAME or INSTALL_EXTENSION_ID can't be skipped and
|
||||
# must run during the 'tools' tier.
|
||||
for t in (b'XPI_PKGNAME', b'INSTALL_EXTENSION_ID',
|
||||
b'tools'):
|
||||
b'tools'):
|
||||
if t not in content:
|
||||
continue
|
||||
if t == b'tools' and not re.search('(?:^|\s)tools.*::', content, re.M):
|
||||
|
@ -984,13 +985,13 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
if objdir == self.environment.topobjdir:
|
||||
continue
|
||||
self._no_skip['tools'].add(mozpath.relpath(objdir,
|
||||
self.environment.topobjdir))
|
||||
self.environment.topobjdir))
|
||||
|
||||
# Directories with a Makefile containing a check target
|
||||
# can't be skipped and must run during the 'check' tier.
|
||||
if re.search('(?:^|\s)check.*::', content, re.M):
|
||||
self._no_skip['check'].add(mozpath.relpath(objdir,
|
||||
self.environment.topobjdir))
|
||||
self.environment.topobjdir))
|
||||
|
||||
# Detect any Makefile.ins that contain variables on the
|
||||
# moz.build-only list
|
||||
|
@ -1070,7 +1071,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
fh.write('DIRS := %s\n' % ' '.join(
|
||||
relativize(backend_file.objdir, obj.dirs)))
|
||||
self._traversal.add(backend_file.relobjdir,
|
||||
dirs=relativize(self.environment.topobjdir, obj.dirs))
|
||||
dirs=relativize(self.environment.topobjdir, obj.dirs))
|
||||
|
||||
# The directory needs to be registered whether subdirectories have been
|
||||
# registered or not.
|
||||
|
@ -1094,7 +1095,8 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
if obj.target and not obj.is_custom():
|
||||
backend_file.write('FINAL_TARGET = $(DEPTH)/%s\n' % (obj.target))
|
||||
else:
|
||||
backend_file.write('FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),$(DIST)/bin)$(DIST_SUBDIR:%=/%)\n')
|
||||
backend_file.write(
|
||||
'FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),$(DIST)/bin)$(DIST_SUBDIR:%=/%)\n')
|
||||
|
||||
if not obj.enabled:
|
||||
backend_file.write('NO_DIST_INSTALL := 1\n')
|
||||
|
@ -1110,7 +1112,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
|
||||
for module in manager.modules:
|
||||
build_files.add_optional_exists(mozpath.join('.deps',
|
||||
'%s.pp' % module))
|
||||
'%s.pp' % module))
|
||||
|
||||
modules = manager.modules
|
||||
xpt_modules = sorted(modules.keys())
|
||||
|
@ -1150,9 +1152,9 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
|
||||
obj = self.Substitution()
|
||||
obj.output_path = mozpath.join(self.environment.topobjdir, 'config',
|
||||
'makefiles', 'xpidl', 'Makefile')
|
||||
'makefiles', 'xpidl', 'Makefile')
|
||||
obj.input_path = mozpath.join(self.environment.topsrcdir, 'config',
|
||||
'makefiles', 'xpidl', 'Makefile.in')
|
||||
'makefiles', 'xpidl', 'Makefile.in')
|
||||
obj.topsrcdir = self.environment.topsrcdir
|
||||
obj.topobjdir = self.environment.topobjdir
|
||||
obj.config = self.environment
|
||||
|
@ -1236,7 +1238,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
# Much of the logic in this function could be moved to CommonBackend.
|
||||
for source in obj.source_relpaths:
|
||||
self.backend_input_files.add(mozpath.join(obj.topsrcdir,
|
||||
source))
|
||||
source))
|
||||
|
||||
# Don't allow files to be defined multiple times unless it is allowed.
|
||||
# We currently allow duplicates for non-test files or test files if
|
||||
|
@ -1251,7 +1253,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
for base, pattern, dest in obj.pattern_installs:
|
||||
try:
|
||||
self._install_manifests['_test_files'].add_pattern_link(base,
|
||||
pattern, dest)
|
||||
pattern, dest)
|
||||
except ValueError:
|
||||
if not obj.dupe_manifest:
|
||||
raise
|
||||
|
@ -1264,7 +1266,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
raise
|
||||
|
||||
m = self._test_manifests.setdefault(obj.flavor,
|
||||
(obj.install_prefix, set()))
|
||||
(obj.install_prefix, set()))
|
||||
m[1].add(obj.manifest_obj_relpath)
|
||||
|
||||
try:
|
||||
|
@ -1295,7 +1297,8 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
|
||||
def _process_per_source_flag(self, per_source_flag, backend_file):
|
||||
for flag in per_source_flag.flags:
|
||||
backend_file.write('%s_FLAGS += %s\n' % (mozpath.basename(per_source_flag.file_name), flag))
|
||||
backend_file.write('%s_FLAGS += %s\n' %
|
||||
(mozpath.basename(per_source_flag.file_name), flag))
|
||||
|
||||
def _process_computed_flags(self, computed_flags, backend_file):
|
||||
for var, flags in computed_flags.get_flags():
|
||||
|
@ -1357,7 +1360,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
else:
|
||||
target_name = obj.KIND
|
||||
return '%s/%s' % (mozpath.relpath(obj.objdir,
|
||||
self.environment.topobjdir), target_name)
|
||||
self.environment.topobjdir), target_name)
|
||||
|
||||
def _process_linked_libraries(self, obj, backend_file):
|
||||
def pretty_relpath(lib, name):
|
||||
|
@ -1414,7 +1417,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
# incorrect list file format to the host compiler as well as when
|
||||
# creating an archive with AR, which doesn't understand list files.
|
||||
if (objs == obj.objs and not isinstance(obj, (HostLibrary, StaticLibrary)) or
|
||||
isinstance(obj, StaticLibrary) and obj.no_expand_lib):
|
||||
isinstance(obj, StaticLibrary) and obj.no_expand_lib):
|
||||
backend_file.write_once('%s_OBJS := %s\n' % (obj.name,
|
||||
objs_ref))
|
||||
if profile_gen_objs:
|
||||
|
@ -1651,7 +1654,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
|
||||
def _write_manifests(self, dest, manifests):
|
||||
man_dir = mozpath.join(self.environment.topobjdir, '_build_manifests',
|
||||
dest)
|
||||
dest)
|
||||
|
||||
for k, manifest in manifests.items():
|
||||
with self._write_file(mozpath.join(man_dir, k)) as fh:
|
||||
|
@ -1688,7 +1691,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
pp.context.update(extra)
|
||||
if not pp.context.get('autoconfmk', ''):
|
||||
pp.context['autoconfmk'] = 'autoconf.mk'
|
||||
pp.handleLine(b'# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.\n');
|
||||
pp.handleLine(b'# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.\n')
|
||||
pp.handleLine(b'DEPTH := @DEPTH@\n')
|
||||
pp.handleLine(b'topobjdir := @topobjdir@\n')
|
||||
pp.handleLine(b'topsrcdir := @top_srcdir@\n')
|
||||
|
@ -1732,18 +1735,18 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
rule.add_commands([
|
||||
'$(RM) $@',
|
||||
'$(call py_action,preprocessor,$(DEFINES) $(ACDEFINES) '
|
||||
'$< -o $@)'
|
||||
'$< -o $@)'
|
||||
])
|
||||
|
||||
mk.add_statement('ALL_IPDLSRCS := %s %s' % (' '.join(sorted_nonstatic_ipdl_basenames),
|
||||
' '.join(sorted_static_ipdl_sources)))
|
||||
' '.join(sorted_static_ipdl_sources)))
|
||||
|
||||
self._add_unified_build_rules(mk, unified_ipdl_cppsrcs_mapping,
|
||||
unified_files_makefile_variable='CPPSRCS')
|
||||
|
||||
# Preprocessed ipdl files are generated in ipdl_dir.
|
||||
mk.add_statement('IPDLDIRS := %s %s' % (ipdl_dir, ' '.join(sorted(set(mozpath.dirname(p)
|
||||
for p in sorted_static_ipdl_sources)))))
|
||||
for p in sorted_static_ipdl_sources)))))
|
||||
|
||||
with self._write_file(mozpath.join(ipdl_dir, 'ipdlsrcs.mk')) as ipdls:
|
||||
mk.dump(ipdls, removal_guard=False)
|
||||
|
@ -1752,7 +1755,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
webidls, expected_build_output_files,
|
||||
global_define_files):
|
||||
include_dir = mozpath.join(self.environment.topobjdir, 'dist',
|
||||
'include')
|
||||
'include')
|
||||
for f in expected_build_output_files:
|
||||
if f.startswith(include_dir):
|
||||
self._install_manifests['dist_include'].add_optional_exists(
|
||||
|
@ -1783,12 +1786,12 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
# which would modify content in the source directory.
|
||||
'$(RM) $@',
|
||||
'$(call py_action,preprocessor,$(DEFINES) $(ACDEFINES) '
|
||||
'$< -o $@)'
|
||||
'$< -o $@)'
|
||||
])
|
||||
|
||||
self._add_unified_build_rules(mk,
|
||||
unified_source_mapping,
|
||||
unified_files_makefile_variable='unified_binding_cpp_files')
|
||||
unified_source_mapping,
|
||||
unified_files_makefile_variable='unified_binding_cpp_files')
|
||||
|
||||
webidls_mk = mozpath.join(bindings_dir, 'webidlsrcs.mk')
|
||||
with self._write_file(webidls_mk) as fh:
|
||||
|
|
|
@ -332,9 +332,9 @@ class TupBackend(CommonBackend):
|
|||
tiers.set_tiers(('tup',))
|
||||
tiers.begin_tier('tup')
|
||||
status = config.run_process(args=args,
|
||||
line_handler=output.on_line,
|
||||
ensure_exit_code=False,
|
||||
append_env=self._get_mozconfig_env(config))
|
||||
line_handler=output.on_line,
|
||||
ensure_exit_code=False,
|
||||
append_env=self._get_mozconfig_env(config))
|
||||
tiers.finish_tier('tup')
|
||||
if not status and self.environment.substs.get('MOZ_AUTOMATION'):
|
||||
config.log_manager.enable_unstructured()
|
||||
|
@ -475,7 +475,6 @@ class TupBackend(CommonBackend):
|
|||
# accurate once we start building libraries in their final locations.
|
||||
inputs = objs + static_libs + shared_libs + [self._shlibs]
|
||||
|
||||
|
||||
rust_linked = [l for l in prog.linked_libraries
|
||||
if isinstance(l, RustLibrary)]
|
||||
|
||||
|
@ -513,12 +512,10 @@ class TupBackend(CommonBackend):
|
|||
display='LINK %o'
|
||||
)
|
||||
|
||||
|
||||
def _gen_host_programs(self, backend_file):
|
||||
for p in backend_file.host_programs:
|
||||
self._gen_host_program(backend_file, p)
|
||||
|
||||
|
||||
def _gen_host_program(self, backend_file, prog):
|
||||
_, _, _, _, extra_libs, _ = self._expand_libs(prog)
|
||||
objs = prog.objs
|
||||
|
@ -559,7 +556,6 @@ class TupBackend(CommonBackend):
|
|||
display='LINK %o'
|
||||
)
|
||||
|
||||
|
||||
def _gen_static_library(self, backend_file):
|
||||
ar = [
|
||||
backend_file.environment.substs['AR'],
|
||||
|
@ -584,7 +580,6 @@ class TupBackend(CommonBackend):
|
|||
display='AR %o'
|
||||
)
|
||||
|
||||
|
||||
def consume_object(self, obj):
|
||||
"""Write out build files necessary to build with tup."""
|
||||
|
||||
|
@ -695,7 +690,7 @@ class TupBackend(CommonBackend):
|
|||
|
||||
with self._write_file(mozpath.join(self.environment.topobjdir, 'Tuprules.tup')) as fh:
|
||||
acdefines_flags = ' '.join(['-D%s=%s' % (name, shell_quote(value))
|
||||
for (name, value) in sorted(self.environment.acdefines.iteritems())])
|
||||
for (name, value) in sorted(self.environment.acdefines.iteritems())])
|
||||
# TODO: AB_CD only exists in Makefiles at the moment.
|
||||
acdefines_flags += ' -DAB_CD=en-US'
|
||||
|
||||
|
@ -729,12 +724,13 @@ class TupBackend(CommonBackend):
|
|||
# Ask the user to figure out where to run 'tup init' before
|
||||
# continuing.
|
||||
raise Exception("Please run `tup init --no-sync` in a common "
|
||||
"ancestor directory of your objdir and srcdir, possibly "
|
||||
"%s. To reduce file scanning overhead, this directory "
|
||||
"should contain the fewest files possible that are not "
|
||||
"necessary for this build." % tup_base_dir)
|
||||
"ancestor directory of your objdir and srcdir, possibly "
|
||||
"%s. To reduce file scanning overhead, this directory "
|
||||
"should contain the fewest files possible that are not "
|
||||
"necessary for this build." % tup_base_dir)
|
||||
tup = self.environment.substs.get('TUP', 'tup')
|
||||
self._cmd.run_process(cwd=tup_base_dir, log_name='tup', args=[tup, 'init', '--no-sync'])
|
||||
self._cmd.run_process(cwd=tup_base_dir, log_name='tup',
|
||||
args=[tup, 'init', '--no-sync'])
|
||||
|
||||
def _get_cargo_flags(self, obj):
|
||||
|
||||
|
@ -984,16 +980,15 @@ class TupBackend(CommonBackend):
|
|||
obj.name),
|
||||
output_group)
|
||||
|
||||
|
||||
for val in enumerate(invocations):
|
||||
_process(*val)
|
||||
|
||||
|
||||
def _gen_rust_rules(self, obj, backend_file):
|
||||
cargo_flags = self._get_cargo_flags(obj)
|
||||
cargo_env = self._get_cargo_env(obj, backend_file)
|
||||
|
||||
output_lines = []
|
||||
|
||||
def accumulate_output(line):
|
||||
output_lines.append(line)
|
||||
|
||||
|
@ -1014,7 +1009,6 @@ class TupBackend(CommonBackend):
|
|||
self._gen_cargo_rules(obj, cargo_plan, cargo_env, output_group)
|
||||
self.backend_input_files |= set(cargo_plan['inputs'])
|
||||
|
||||
|
||||
def _process_generated_file(self, backend_file, obj):
|
||||
if obj.script and obj.method:
|
||||
backend_file.export_shell()
|
||||
|
@ -1025,8 +1019,8 @@ class TupBackend(CommonBackend):
|
|||
obj.script,
|
||||
obj.method,
|
||||
obj.outputs[0],
|
||||
'%s.pp' % obj.outputs[0], # deps file required
|
||||
'unused', # deps target is required
|
||||
'%s.pp' % obj.outputs[0], # deps file required
|
||||
'unused', # deps target is required
|
||||
])
|
||||
full_inputs = [f.full_path for f in obj.inputs]
|
||||
cmd.extend(full_inputs)
|
||||
|
@ -1163,7 +1157,8 @@ class TupBackend(CommonBackend):
|
|||
output=mozpath.join(output_dir, output),
|
||||
output_group=output_group)
|
||||
else:
|
||||
backend_file.symlink_rule(f.full_path, output=f.target_basename, output_group=output_group)
|
||||
backend_file.symlink_rule(
|
||||
f.full_path, output=f.target_basename, output_group=output_group)
|
||||
else:
|
||||
if (self.environment.is_artifact_build and
|
||||
any(mozpath.match(f.target_basename, p) for p in self._compile_env_gen_files)):
|
||||
|
@ -1175,18 +1170,19 @@ class TupBackend(CommonBackend):
|
|||
f.target_basename)
|
||||
gen_backend_file = self._get_backend_file(f.context.relobjdir)
|
||||
if gen_backend_file.requires_delay([f]):
|
||||
gen_backend_file.delayed_installed_files.append((f.full_path, output, output_group))
|
||||
gen_backend_file.delayed_installed_files.append(
|
||||
(f.full_path, output, output_group))
|
||||
else:
|
||||
gen_backend_file.symlink_rule(f.full_path, output=output,
|
||||
output_group=output_group)
|
||||
|
||||
|
||||
def _process_final_target_pp_files(self, obj, backend_file):
|
||||
for i, (path, files) in enumerate(obj.files.walk()):
|
||||
self._add_features(obj.install_target, path)
|
||||
for f in files:
|
||||
self._preprocess(backend_file, f.full_path,
|
||||
destdir=mozpath.join(self.environment.topobjdir, obj.install_target, path),
|
||||
destdir=mozpath.join(self.environment.topobjdir,
|
||||
obj.install_target, path),
|
||||
target=f.target_basename)
|
||||
|
||||
def _process_computed_flags(self, obj, backend_file):
|
||||
|
@ -1315,7 +1311,8 @@ class TupBackend(CommonBackend):
|
|||
cmd.extend(['-I%s' % d for d in ipdldirs])
|
||||
cmd.extend(sorted_ipdl_sources)
|
||||
|
||||
outputs = ['IPCMessageTypeName.cpp', mozpath.join(outheaderdir, 'IPCMessageStart.h'), 'ipdl_lextab.py', 'ipdl_yacctab.py']
|
||||
outputs = ['IPCMessageTypeName.cpp', mozpath.join(
|
||||
outheaderdir, 'IPCMessageStart.h'), 'ipdl_lextab.py', 'ipdl_yacctab.py']
|
||||
|
||||
for filename in sorted_ipdl_sources:
|
||||
filepath, ext = os.path.splitext(filename)
|
||||
|
@ -1379,4 +1376,5 @@ class TupBackend(CommonBackend):
|
|||
backend_file.sources['.cpp'].extend(sorted(global_define_files))
|
||||
|
||||
test_backend_file = self._get_backend_file('dom/bindings/test')
|
||||
test_backend_file.sources['.cpp'].extend(sorted('../%sBinding.cpp' % s for s in webidls.all_test_stems()))
|
||||
test_backend_file.sources['.cpp'].extend(
|
||||
sorted('../%sBinding.cpp' % s for s in webidls.all_test_stems()))
|
||||
|
|
|
@ -34,21 +34,25 @@ from mozbuild.base import ExecutionSummary
|
|||
|
||||
MSBUILD_NAMESPACE = 'http://schemas.microsoft.com/developer/msbuild/2003'
|
||||
|
||||
|
||||
def get_id(name):
|
||||
return str(uuid.uuid5(uuid.NAMESPACE_URL, name)).upper()
|
||||
|
||||
|
||||
def visual_studio_product_to_solution_version(version):
|
||||
if version == '2017':
|
||||
return '12.00', '15'
|
||||
else:
|
||||
raise Exception('Unknown version seen: %s' % version)
|
||||
|
||||
|
||||
def visual_studio_product_to_platform_toolset_version(version):
|
||||
if version == '2017':
|
||||
return 'v141'
|
||||
else:
|
||||
raise Exception('Unknown version seen: %s' % version)
|
||||
|
||||
|
||||
class VisualStudioBackend(CommonBackend):
|
||||
"""Generate Visual Studio project files.
|
||||
|
||||
|
@ -100,7 +104,7 @@ class VisualStudioBackend(CommonBackend):
|
|||
elif isinstance(obj, UnifiedSources):
|
||||
# XXX we should be letting CommonBackend.consume_object call this
|
||||
# for us instead.
|
||||
self._process_unified_sources(obj);
|
||||
self._process_unified_sources(obj)
|
||||
|
||||
elif isinstance(obj, Library):
|
||||
self._libs_to_paths[obj.basename] = reldir
|
||||
|
@ -133,9 +137,9 @@ class VisualStudioBackend(CommonBackend):
|
|||
out_proj_dir = os.path.join(self._out_dir, self._projsubdir)
|
||||
|
||||
projects = self._write_projects_for_sources(self._libs_to_paths,
|
||||
"library", out_proj_dir)
|
||||
"library", out_proj_dir)
|
||||
projects.update(self._write_projects_for_sources(self._progs_to_paths,
|
||||
"binary", out_proj_dir))
|
||||
"binary", out_proj_dir))
|
||||
|
||||
# Generate projects that can be used to build common targets.
|
||||
for target in ('export', 'binaries', 'tools', 'full'):
|
||||
|
@ -145,15 +149,15 @@ class VisualStudioBackend(CommonBackend):
|
|||
command += ' %s' % target
|
||||
|
||||
project_id = self._write_vs_project(out_proj_dir, basename, target,
|
||||
build_command=command,
|
||||
clean_command='$(SolutionDir)\\mach.bat build clean')
|
||||
build_command=command,
|
||||
clean_command='$(SolutionDir)\\mach.bat build clean')
|
||||
|
||||
projects[basename] = (project_id, basename, target)
|
||||
|
||||
# A project that can be used to regenerate the visual studio projects.
|
||||
basename = 'target_vs'
|
||||
project_id = self._write_vs_project(out_proj_dir, basename, 'visual-studio',
|
||||
build_command='$(SolutionDir)\\mach.bat build-backend -b VisualStudio')
|
||||
build_command='$(SolutionDir)\\mach.bat build-backend -b VisualStudio')
|
||||
projects[basename] = (project_id, basename, 'visual-studio')
|
||||
|
||||
# Write out a shared property file with common variables.
|
||||
|
@ -190,7 +194,7 @@ class VisualStudioBackend(CommonBackend):
|
|||
|
||||
headers = [t[0] for t in finder.find('*.h')]
|
||||
headers = [os.path.normpath(os.path.join('$(TopSrcDir)',
|
||||
path, f)) for f in headers]
|
||||
path, f)) for f in headers]
|
||||
|
||||
includes = [
|
||||
os.path.join('$(TopSrcDir)', path),
|
||||
|
@ -201,7 +205,7 @@ class VisualStudioBackend(CommonBackend):
|
|||
includes.append('$(TopObjDir)\\dist\\include')
|
||||
|
||||
for v in ('NSPR_CFLAGS', 'NSS_CFLAGS', 'MOZ_JPEG_CFLAGS',
|
||||
'MOZ_PNG_CFLAGS', 'MOZ_ZLIB_CFLAGS', 'MOZ_PIXMAN_CFLAGS'):
|
||||
'MOZ_PNG_CFLAGS', 'MOZ_ZLIB_CFLAGS', 'MOZ_PIXMAN_CFLAGS'):
|
||||
if not config:
|
||||
break
|
||||
|
||||
|
@ -223,7 +227,7 @@ class VisualStudioBackend(CommonBackend):
|
|||
else:
|
||||
defines.append('%s=%s' % (k, v))
|
||||
|
||||
debugger=None
|
||||
debugger = None
|
||||
if prefix == 'binary':
|
||||
if item.startswith(self.environment.substs['MOZ_APP_NAME']):
|
||||
app_args = '-no-remote -profile $(TopObjDir)\\tmp\\profile-default'
|
||||
|
@ -236,12 +240,13 @@ class VisualStudioBackend(CommonBackend):
|
|||
basename = '%s_%s' % (prefix, item)
|
||||
|
||||
project_id = self._write_vs_project(out_dir, basename, item,
|
||||
includes=includes,
|
||||
forced_includes=['$(TopObjDir)\\dist\\include\\mozilla-config.h'],
|
||||
defines=defines,
|
||||
headers=headers,
|
||||
sources=sources,
|
||||
debugger=debugger)
|
||||
includes=includes,
|
||||
forced_includes=[
|
||||
'$(TopObjDir)\\dist\\include\\mozilla-config.h'],
|
||||
defines=defines,
|
||||
headers=headers,
|
||||
sources=sources,
|
||||
debugger=debugger)
|
||||
|
||||
projects[basename] = (project_id, basename, item)
|
||||
|
||||
|
@ -416,7 +421,7 @@ class VisualStudioBackend(CommonBackend):
|
|||
fh.write(b'$env:%s = "%s"\r\n' % (k, v))
|
||||
|
||||
relpath = os.path.relpath(self.environment.topsrcdir,
|
||||
self.environment.topobjdir).replace('\\', '/')
|
||||
self.environment.topobjdir).replace('\\', '/')
|
||||
|
||||
fh.write(b'$bashargs = "%s/mach", "--log-no-times"\r\n' % relpath)
|
||||
fh.write(b'$bashargs = $bashargs + $args\r\n')
|
||||
|
@ -425,9 +430,9 @@ class VisualStudioBackend(CommonBackend):
|
|||
fh.write(b'$procargs = "-c", $expanded\r\n')
|
||||
|
||||
fh.write(b'Start-Process -WorkingDirectory $env:TOPOBJDIR '
|
||||
b'-FilePath $env:MOZILLABUILD\\msys\\bin\\bash '
|
||||
b'-ArgumentList $procargs '
|
||||
b'-Wait -NoNewWindow\r\n')
|
||||
b'-FilePath $env:MOZILLABUILD\\msys\\bin\\bash '
|
||||
b'-ArgumentList $procargs '
|
||||
b'-Wait -NoNewWindow\r\n')
|
||||
|
||||
def _write_mach_batch(self, fh):
|
||||
"""Write out a batch script that builds the tree.
|
||||
|
@ -445,12 +450,12 @@ class VisualStudioBackend(CommonBackend):
|
|||
# relative paths, since munging c:\ to /c/ is slightly more
|
||||
# complicated.
|
||||
relpath = os.path.relpath(self.environment.topsrcdir,
|
||||
self.environment.topobjdir).replace('\\', '/')
|
||||
self.environment.topobjdir).replace('\\', '/')
|
||||
|
||||
# We go through mach because it has the logic for choosing the most
|
||||
# appropriate build tool.
|
||||
fh.write(b'"%%MOZILLABUILD%%\\msys\\bin\\bash" '
|
||||
b'-c "%s/mach --log-no-times %%1 %%2 %%3 %%4 %%5 %%6 %%7"' % relpath)
|
||||
b'-c "%s/mach --log-no-times %%1 %%2 %%3 %%4 %%5 %%6 %%7"' % relpath)
|
||||
|
||||
def _write_vs_project(self, out_dir, basename, name, **kwargs):
|
||||
root = '%s.vcxproj' % basename
|
||||
|
@ -458,21 +463,21 @@ class VisualStudioBackend(CommonBackend):
|
|||
|
||||
with self._write_file(os.path.join(out_dir, root), mode='rb') as fh:
|
||||
project_id, name = VisualStudioBackend.write_vs_project(fh,
|
||||
self._version, project_id, name, **kwargs)
|
||||
self._version, project_id, name, **kwargs)
|
||||
|
||||
with self._write_file(os.path.join(out_dir, '%s.user' % root), mode='rb') as fh:
|
||||
fh.write('<?xml version="1.0" encoding="utf-8"?>\r\n')
|
||||
fh.write('<Project ToolsVersion="4.0" xmlns="%s">\r\n' %
|
||||
MSBUILD_NAMESPACE)
|
||||
MSBUILD_NAMESPACE)
|
||||
fh.write('</Project>\r\n')
|
||||
|
||||
return project_id
|
||||
|
||||
@staticmethod
|
||||
def write_vs_project(fh, version, project_id, name, includes=[],
|
||||
forced_includes=[], defines=[],
|
||||
build_command=None, clean_command=None,
|
||||
debugger=None, headers=[], sources=[]):
|
||||
forced_includes=[], defines=[],
|
||||
build_command=None, clean_command=None,
|
||||
debugger=None, headers=[], sources=[]):
|
||||
|
||||
impl = getDOMImplementation()
|
||||
doc = impl.createDocument(MSBUILD_NAMESPACE, 'Project', None)
|
||||
|
@ -510,7 +515,8 @@ class VisualStudioBackend(CommonBackend):
|
|||
rn.appendChild(doc.createTextNode('mozilla'))
|
||||
|
||||
pts = pg.appendChild(doc.createElement('PlatformToolset'))
|
||||
pts.appendChild(doc.createTextNode(visual_studio_product_to_platform_toolset_version(version)))
|
||||
pts.appendChild(doc.createTextNode(
|
||||
visual_studio_product_to_platform_toolset_version(version)))
|
||||
|
||||
i = project.appendChild(doc.createElement('Import'))
|
||||
i.setAttribute('Project', '$(VCTargetsPath)\\Microsoft.Cpp.Default.props')
|
||||
|
|
|
@ -53,12 +53,14 @@ def ancestors(path):
|
|||
break
|
||||
path = newpath
|
||||
|
||||
|
||||
def samepath(path1, path2):
|
||||
if hasattr(os.path, 'samefile'):
|
||||
return os.path.samefile(path1, path2)
|
||||
return os.path.normcase(os.path.realpath(path1)) == \
|
||||
os.path.normcase(os.path.realpath(path2))
|
||||
|
||||
|
||||
class BadEnvironmentException(Exception):
|
||||
"""Base class for errors raised when the build environment is not sane."""
|
||||
|
||||
|
@ -69,6 +71,7 @@ class BuildEnvironmentNotFoundException(BadEnvironmentException):
|
|||
|
||||
class ObjdirMismatchException(BadEnvironmentException):
|
||||
"""Raised when the current dir is an objdir and doesn't match the mozconfig."""
|
||||
|
||||
def __init__(self, objdir1, objdir2):
|
||||
self.objdir1 = objdir1
|
||||
self.objdir2 = objdir2
|
||||
|
@ -85,6 +88,7 @@ class MozbuildObject(ProcessExecutionMixin):
|
|||
running processes, etc. This classes provides that functionality. Other
|
||||
modules can inherit from this class to obtain this functionality easily.
|
||||
"""
|
||||
|
||||
def __init__(self, topsrcdir, settings, log_manager, topobjdir=None,
|
||||
mozconfig=MozconfigLoader.AUTODETECT):
|
||||
"""Create a new Mozbuild object instance.
|
||||
|
@ -176,8 +180,8 @@ class MozbuildObject(ProcessExecutionMixin):
|
|||
|
||||
if topsrcdir == topobjdir:
|
||||
raise BadEnvironmentException('The object directory appears '
|
||||
'to be the same as your source directory (%s). This build '
|
||||
'configuration is not supported.' % topsrcdir)
|
||||
'to be the same as your source directory (%s). This build '
|
||||
'configuration is not supported.' % topsrcdir)
|
||||
|
||||
# If we can't resolve topobjdir, oh well. We'll figure out when we need
|
||||
# one.
|
||||
|
@ -191,7 +195,7 @@ class MozbuildObject(ProcessExecutionMixin):
|
|||
|
||||
if '@CONFIG_GUESS@' in topobjdir:
|
||||
topobjdir = topobjdir.replace('@CONFIG_GUESS@',
|
||||
self.resolve_config_guess())
|
||||
self.resolve_config_guess())
|
||||
|
||||
if not os.path.isabs(topobjdir):
|
||||
topobjdir = os.path.abspath(os.path.join(self.topsrcdir, topobjdir))
|
||||
|
@ -253,9 +257,10 @@ class MozbuildObject(ProcessExecutionMixin):
|
|||
def virtualenv_manager(self):
|
||||
if self._virtualenv_manager is None:
|
||||
self._virtualenv_manager = VirtualenvManager(self.topsrcdir,
|
||||
self.topobjdir, os.path.join(self.topobjdir, '_virtualenvs', 'init'),
|
||||
sys.stdout, os.path.join(self.topsrcdir, 'build',
|
||||
'virtualenv_packages.txt'))
|
||||
self.topobjdir, os.path.join(
|
||||
self.topobjdir, '_virtualenvs', 'init'),
|
||||
sys.stdout, os.path.join(self.topsrcdir, 'build',
|
||||
'virtualenv_packages.txt'))
|
||||
|
||||
return self._virtualenv_manager
|
||||
|
||||
|
@ -490,7 +495,6 @@ class MozbuildObject(ProcessExecutionMixin):
|
|||
|
||||
return BuildReader(config, finder=finder)
|
||||
|
||||
|
||||
@memoized_property
|
||||
def python3(self):
|
||||
"""Obtain info about a Python 3 executable.
|
||||
|
@ -542,10 +546,10 @@ class MozbuildObject(ProcessExecutionMixin):
|
|||
|
||||
if substs['OS_ARCH'] == 'Darwin':
|
||||
if substs['MOZ_BUILD_APP'] == 'xulrunner':
|
||||
stem = os.path.join(stem, 'XUL.framework');
|
||||
stem = os.path.join(stem, 'XUL.framework')
|
||||
else:
|
||||
stem = os.path.join(stem, substs['MOZ_MACBUNDLE_NAME'], 'Contents',
|
||||
'MacOS')
|
||||
'MacOS')
|
||||
elif where == 'default':
|
||||
stem = os.path.join(stem, 'bin')
|
||||
|
||||
|
@ -578,13 +582,14 @@ class MozbuildObject(ProcessExecutionMixin):
|
|||
notifier = which.which('terminal-notifier')
|
||||
except which.WhichError:
|
||||
raise Exception('Install terminal-notifier to get '
|
||||
'a notification when the build finishes.')
|
||||
'a notification when the build finishes.')
|
||||
self.run_process([notifier, '-title',
|
||||
'Mozilla Build System', '-group', 'mozbuild',
|
||||
'-message', msg], ensure_exit_code=False)
|
||||
'Mozilla Build System', '-group', 'mozbuild',
|
||||
'-message', msg], ensure_exit_code=False)
|
||||
elif sys.platform.startswith('win'):
|
||||
from ctypes import Structure, windll, POINTER, sizeof
|
||||
from ctypes.wintypes import DWORD, HANDLE, WINFUNCTYPE, BOOL, UINT
|
||||
|
||||
class FLASHWINDOW(Structure):
|
||||
_fields_ = [("cbSize", UINT),
|
||||
("hwnd", HANDLE),
|
||||
|
@ -604,21 +609,21 @@ class MozbuildObject(ProcessExecutionMixin):
|
|||
return
|
||||
|
||||
params = FLASHWINDOW(sizeof(FLASHWINDOW),
|
||||
console,
|
||||
FLASHW_CAPTION | FLASHW_TRAY | FLASHW_TIMERNOFG, 3, 0)
|
||||
console,
|
||||
FLASHW_CAPTION | FLASHW_TRAY | FLASHW_TIMERNOFG, 3, 0)
|
||||
FlashWindowEx(params)
|
||||
else:
|
||||
try:
|
||||
notifier = which.which('notify-send')
|
||||
except which.WhichError:
|
||||
raise Exception('Install notify-send (usually part of '
|
||||
'the libnotify package) to get a notification when '
|
||||
'the build finishes.')
|
||||
'the libnotify package) to get a notification when '
|
||||
'the build finishes.')
|
||||
self.run_process([notifier, '--app-name=Mozilla Build System',
|
||||
'Mozilla Build System', msg], ensure_exit_code=False)
|
||||
'Mozilla Build System', msg], ensure_exit_code=False)
|
||||
except Exception as e:
|
||||
self.log(logging.WARNING, 'notifier-failed', {'error':
|
||||
e.message}, 'Notification center failed: {error}')
|
||||
e.message}, 'Notification center failed: {error}')
|
||||
|
||||
def _ensure_objdir_exists(self):
|
||||
if os.path.isdir(self.statedir):
|
||||
|
@ -646,10 +651,10 @@ class MozbuildObject(ProcessExecutionMixin):
|
|||
return PathArgument(arg, self.topsrcdir, self.topobjdir)
|
||||
|
||||
def _run_make(self, directory=None, filename=None, target=None, log=True,
|
||||
srcdir=False, allow_parallel=True, line_handler=None,
|
||||
append_env=None, explicit_env=None, ignore_errors=False,
|
||||
ensure_exit_code=0, silent=True, print_directory=True,
|
||||
pass_thru=False, num_jobs=0, keep_going=False):
|
||||
srcdir=False, allow_parallel=True, line_handler=None,
|
||||
append_env=None, explicit_env=None, ignore_errors=False,
|
||||
ensure_exit_code=0, silent=True, print_directory=True,
|
||||
pass_thru=False, num_jobs=0, keep_going=False):
|
||||
"""Invoke make.
|
||||
|
||||
directory -- Relative directory to look for Makefile in.
|
||||
|
@ -791,11 +796,11 @@ class MozbuildObject(ProcessExecutionMixin):
|
|||
|
||||
if xcode_lisense_error:
|
||||
raise Exception('Xcode requires accepting to the license agreement.\n'
|
||||
'Please run Xcode and accept the license agreement.')
|
||||
'Please run Xcode and accept the license agreement.')
|
||||
|
||||
if self._is_windows():
|
||||
raise Exception('Could not find a suitable make implementation.\n'
|
||||
'Please use MozillaBuild 1.9 or newer')
|
||||
'Please use MozillaBuild 1.9 or newer')
|
||||
else:
|
||||
raise Exception('Could not find a suitable make implementation.')
|
||||
|
||||
|
@ -820,13 +825,12 @@ class MozbuildObject(ProcessExecutionMixin):
|
|||
"""
|
||||
|
||||
return cls(self.topsrcdir, self.settings, self.log_manager,
|
||||
topobjdir=self.topobjdir)
|
||||
topobjdir=self.topobjdir)
|
||||
|
||||
def _activate_virtualenv(self):
|
||||
self.virtualenv_manager.ensure()
|
||||
self.virtualenv_manager.activate()
|
||||
|
||||
|
||||
def _set_log_level(self, verbose):
|
||||
self.log_manager.terminal_handler.setLevel(logging.INFO if not verbose else logging.DEBUG)
|
||||
|
||||
|
@ -835,7 +839,8 @@ class MozbuildObject(ProcessExecutionMixin):
|
|||
pipenv = os.path.join(self.virtualenv_manager.bin_path, 'pipenv')
|
||||
if not os.path.exists(pipenv):
|
||||
for package in ['certifi', 'pipenv', 'six', 'virtualenv', 'virtualenv-clone']:
|
||||
path = os.path.normpath(os.path.join(self.topsrcdir, 'third_party/python', package))
|
||||
path = os.path.normpath(os.path.join(
|
||||
self.topsrcdir, 'third_party/python', package))
|
||||
self.virtualenv_manager.install_pip_package(path, vendored=True)
|
||||
return pipenv
|
||||
|
||||
|
@ -861,10 +866,10 @@ class MachCommandBase(MozbuildObject):
|
|||
detect_virtualenv_mozinfo = True
|
||||
if hasattr(context, 'detect_virtualenv_mozinfo'):
|
||||
detect_virtualenv_mozinfo = getattr(context,
|
||||
'detect_virtualenv_mozinfo')
|
||||
'detect_virtualenv_mozinfo')
|
||||
try:
|
||||
dummy = MozbuildObject.from_environment(cwd=context.cwd,
|
||||
detect_virtualenv_mozinfo=detect_virtualenv_mozinfo)
|
||||
detect_virtualenv_mozinfo=detect_virtualenv_mozinfo)
|
||||
topsrcdir = dummy.topsrcdir
|
||||
topobjdir = dummy._topobjdir
|
||||
if topobjdir:
|
||||
|
@ -881,12 +886,12 @@ class MachCommandBase(MozbuildObject):
|
|||
pass
|
||||
except ObjdirMismatchException as e:
|
||||
print('Ambiguous object directory detected. We detected that '
|
||||
'both %s and %s could be object directories. This is '
|
||||
'typically caused by having a mozconfig pointing to a '
|
||||
'different object directory from the current working '
|
||||
'directory. To solve this problem, ensure you do not have a '
|
||||
'default mozconfig in searched paths.' % (e.objdir1,
|
||||
e.objdir2))
|
||||
'both %s and %s could be object directories. This is '
|
||||
'typically caused by having a mozconfig pointing to a '
|
||||
'different object directory from the current working '
|
||||
'directory. To solve this problem, ensure you do not have a '
|
||||
'default mozconfig in searched paths.' % (e.objdir1,
|
||||
e.objdir2))
|
||||
sys.exit(1)
|
||||
|
||||
except MozconfigLoadException as e:
|
||||
|
@ -903,7 +908,7 @@ class MachCommandBase(MozbuildObject):
|
|||
sys.exit(1)
|
||||
|
||||
MozbuildObject.__init__(self, topsrcdir, context.settings,
|
||||
context.log_manager, topobjdir=topobjdir)
|
||||
context.log_manager, topobjdir=topobjdir)
|
||||
|
||||
self._mach_context = context
|
||||
|
||||
|
|
|
@ -54,4 +54,3 @@ def chunkify(things, this_chunk, chunks):
|
|||
return things[start:end]
|
||||
except TypeError:
|
||||
return islice(things, start, end)
|
||||
|
||||
|
|
|
@ -26,6 +26,8 @@ from manifest_handler import ChromeManifestHandler
|
|||
|
||||
|
||||
_line_comment_re = re.compile('^//@line (\d+) "(.+)"$')
|
||||
|
||||
|
||||
def generate_pp_info(path, topsrcdir):
|
||||
with open(path) as fh:
|
||||
# (start, end) -> (included_source, start)
|
||||
|
@ -57,6 +59,8 @@ def generate_pp_info(path, topsrcdir):
|
|||
|
||||
# This build backend is assuming the build to have happened already, as it is parsing
|
||||
# built preprocessed files to generate data to map them to the original sources.
|
||||
|
||||
|
||||
class ChromeMapBackend(CommonBackend):
|
||||
def _init(self):
|
||||
CommonBackend._init(self)
|
||||
|
|
|
@ -17,6 +17,7 @@ from mozpack.chrome.manifest import parse_manifest
|
|||
import mozpack.path as mozpath
|
||||
from manifest_handler import ChromeManifestHandler
|
||||
|
||||
|
||||
class LcovRecord(object):
|
||||
__slots__ = ("test_name",
|
||||
"source_file",
|
||||
|
@ -30,6 +31,7 @@ class LcovRecord(object):
|
|||
"lines",
|
||||
"line_count",
|
||||
"covered_line_count")
|
||||
|
||||
def __init__(self):
|
||||
self.functions = {}
|
||||
self.function_exec_counts = {}
|
||||
|
@ -72,6 +74,7 @@ class LcovRecord(object):
|
|||
self.branch_count = len(self.branches)
|
||||
self.covered_branch_count = len([c for c in self.branches.values() if c])
|
||||
|
||||
|
||||
class RecordRewriter(object):
|
||||
# Helper class for rewriting/spliting individual lcov records according
|
||||
# to what the preprocessor did.
|
||||
|
@ -164,7 +167,8 @@ class RecordRewriter(object):
|
|||
def rewrite_record(self, record, pp_info):
|
||||
# Rewrite the lines in the given record according to preprocessor info
|
||||
# and split to additional records when pp_info has included file info.
|
||||
self._current_pp_info = dict([(tuple([int(l) for l in k.split(',')]), v) for k, v in pp_info.items()])
|
||||
self._current_pp_info = dict(
|
||||
[(tuple([int(l) for l in k.split(',')]), v) for k, v in pp_info.items()])
|
||||
self._ranges = sorted(self._current_pp_info.keys())
|
||||
self._additions = {}
|
||||
self._rewrite_lines(record)
|
||||
|
@ -178,6 +182,7 @@ class RecordRewriter(object):
|
|||
r.resummarize()
|
||||
return generated_records
|
||||
|
||||
|
||||
class LcovFile(object):
|
||||
# Simple parser/pretty-printer for lcov format.
|
||||
# lcov parsing based on http://ltp.sourceforge.net/coverage/lcov/geninfo.1.php
|
||||
|
@ -404,6 +409,7 @@ class LcovFile(object):
|
|||
class UrlFinderError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class UrlFinder(object):
|
||||
# Given a "chrome://" or "resource://" url, uses data from the UrlMapBackend
|
||||
# and install manifests to find a path to the source file and the corresponding
|
||||
|
@ -580,7 +586,8 @@ class UrlFinder(object):
|
|||
return url_obj.path, None
|
||||
|
||||
dir_parts = parts[0].rsplit(app_name + '/', 1)
|
||||
url = mozpath.normpath(mozpath.join(self.topobjdir, 'dist', 'bin', dir_parts[1].lstrip('/'), parts[1].lstrip('/')))
|
||||
url = mozpath.normpath(mozpath.join(self.topobjdir, 'dist',
|
||||
'bin', dir_parts[1].lstrip('/'), parts[1].lstrip('/')))
|
||||
elif '.xpi!' in url:
|
||||
# This matching mechanism is quite brittle and based on examples seen in the wild.
|
||||
# There's no rule to match the XPI name to the path in dist/xpi-stage.
|
||||
|
@ -590,7 +597,8 @@ class UrlFinder(object):
|
|||
addon_name = addon_name[:-len('-test@mozilla.org')]
|
||||
elif addon_name.endswith('@mozilla.org'):
|
||||
addon_name = addon_name[:-len('@mozilla.org')]
|
||||
url = mozpath.normpath(mozpath.join(self.topobjdir, 'dist', 'xpi-stage', addon_name, parts[1].lstrip('/')))
|
||||
url = mozpath.normpath(mozpath.join(self.topobjdir, 'dist',
|
||||
'xpi-stage', addon_name, parts[1].lstrip('/')))
|
||||
elif url_obj.scheme == 'file' and os.path.isabs(url_obj.path):
|
||||
path = url_obj.path
|
||||
if not os.path.isfile(path):
|
||||
|
@ -607,6 +615,7 @@ class UrlFinder(object):
|
|||
self._final_mapping[url] = result
|
||||
return result
|
||||
|
||||
|
||||
class LcovFileRewriter(object):
|
||||
# Class for partial parses of LCOV format and rewriting to resolve urls
|
||||
# and preprocessed file lines.
|
||||
|
@ -694,5 +703,6 @@ def main():
|
|||
|
||||
rewriter.rewrite_files(files, args.output_file, args.output_suffix)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
|
@ -17,6 +17,7 @@ from mozpack.manifests import (
|
|||
)
|
||||
import mozpack.path as mozpath
|
||||
|
||||
|
||||
def describe_install_manifest(manifest, dest_dir):
|
||||
try:
|
||||
manifest = InstallManifest(manifest)
|
||||
|
@ -75,5 +76,6 @@ def cli(args=sys.argv[1:]):
|
|||
|
||||
return package_coverage_data(args.root, args.output_file)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(cli())
|
||||
|
|
|
@ -26,9 +26,9 @@ class Introspection(MachCommandBase):
|
|||
"""Instropection commands."""
|
||||
|
||||
@Command('compileflags', category='devenv',
|
||||
description='Display the compilation flags for a given source file')
|
||||
description='Display the compilation flags for a given source file')
|
||||
@CommandArgument('what', default=None,
|
||||
help='Source file to display compilation flags for')
|
||||
help='Source file to display compilation flags for')
|
||||
def compileflags(self, what):
|
||||
from mozbuild.util import resolve_target_to_make
|
||||
from mozbuild.compilation import util
|
||||
|
@ -39,7 +39,7 @@ class Introspection(MachCommandBase):
|
|||
path_arg = self._wrap_path_argument(what)
|
||||
|
||||
make_dir, make_target = resolve_target_to_make(self.topobjdir,
|
||||
path_arg.relpath())
|
||||
path_arg.relpath())
|
||||
|
||||
if make_dir is None and make_target is None:
|
||||
return 1
|
||||
|
|
|
@ -176,8 +176,8 @@ class CompileDBBackend(CommonBackend):
|
|||
if canonical_suffix not in self.COMPILERS:
|
||||
return
|
||||
db = self._db.setdefault((objdir, filename, unified),
|
||||
cenv.substs[self.COMPILERS[canonical_suffix]].split() +
|
||||
['-o', '/dev/null', '-c'])
|
||||
cenv.substs[self.COMPILERS[canonical_suffix]].split() +
|
||||
['-o', '/dev/null', '-c'])
|
||||
reldir = reldir or mozpath.relpath(objdir, cenv.topobjdir)
|
||||
|
||||
def append_var(name):
|
||||
|
|
|
@ -5,14 +5,16 @@
|
|||
import os
|
||||
from mozbuild import shellutil
|
||||
|
||||
|
||||
def check_top_objdir(topobjdir):
|
||||
top_make = os.path.join(topobjdir, 'Makefile')
|
||||
if not os.path.exists(top_make):
|
||||
print('Your tree has not been built yet. Please run '
|
||||
'|mach build| with no arguments.')
|
||||
'|mach build| with no arguments.')
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def get_build_vars(directory, cmd):
|
||||
build_vars = {}
|
||||
|
||||
|
@ -27,13 +29,14 @@ def get_build_vars(directory, cmd):
|
|||
try:
|
||||
old_logger = cmd.log_manager.replace_terminal_handler(None)
|
||||
cmd._run_make(directory=directory, target='showbuild', log=False,
|
||||
print_directory=False, allow_parallel=False, silent=True,
|
||||
line_handler=on_line)
|
||||
print_directory=False, allow_parallel=False, silent=True,
|
||||
line_handler=on_line)
|
||||
finally:
|
||||
cmd.log_manager.replace_terminal_handler(old_logger)
|
||||
|
||||
return build_vars
|
||||
|
||||
|
||||
def sanitize_cflags(flags):
|
||||
# We filter out -Xclang arguments as clang based tools typically choke on
|
||||
# passing these flags down to the clang driver. -Xclang tells the clang
|
||||
|
|
|
@ -87,22 +87,22 @@ class CompilerWarning(dict):
|
|||
return func(self._cmpkey(), other._cmpkey())
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._compare(other, lambda s,o: s == o)
|
||||
return self._compare(other, lambda s, o: s == o)
|
||||
|
||||
def __neq__(self, other):
|
||||
return self._compare(other, lambda s,o: s != o)
|
||||
return self._compare(other, lambda s, o: s != o)
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._compare(other, lambda s,o: s < o)
|
||||
return self._compare(other, lambda s, o: s < o)
|
||||
|
||||
def __le__(self, other):
|
||||
return self._compare(other, lambda s,o: s <= o)
|
||||
return self._compare(other, lambda s, o: s <= o)
|
||||
|
||||
def __gt__(self, other):
|
||||
return self._compare(other, lambda s,o: s > o)
|
||||
return self._compare(other, lambda s, o: s > o)
|
||||
|
||||
def __ge__(self, other):
|
||||
return self._compare(other, lambda s,o: s >= o)
|
||||
return self._compare(other, lambda s, o: s >= o)
|
||||
|
||||
def __hash__(self):
|
||||
"""Define so this can exist inside a set, etc."""
|
||||
|
@ -132,6 +132,7 @@ class WarningsDatabase(object):
|
|||
Callers should periodically prune old, invalid warnings from the database
|
||||
by calling prune(). A good time to do this is at the end of a build.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Create an empty database."""
|
||||
self._files = {}
|
||||
|
@ -304,6 +305,7 @@ class WarningsCollector(object):
|
|||
output from the compiler. Therefore, it can maintain state to parse
|
||||
multi-line warning messages.
|
||||
"""
|
||||
|
||||
def __init__(self, cb, objdir=None):
|
||||
"""Initialize a new collector.
|
||||
|
||||
|
|
|
@ -80,14 +80,14 @@ def config_status(topobjdir='.', topsrcdir='.', defines=None,
|
|||
|
||||
if 'CONFIG_FILES' in os.environ:
|
||||
raise Exception('Using the CONFIG_FILES environment variable is not '
|
||||
'supported.')
|
||||
'supported.')
|
||||
if 'CONFIG_HEADERS' in os.environ:
|
||||
raise Exception('Using the CONFIG_HEADERS environment variable is not '
|
||||
'supported.')
|
||||
'supported.')
|
||||
|
||||
if not os.path.isabs(topsrcdir):
|
||||
raise Exception('topsrcdir must be defined as an absolute directory: '
|
||||
'%s' % topsrcdir)
|
||||
'%s' % topsrcdir)
|
||||
|
||||
default_backends = ['RecursiveMake']
|
||||
default_backends = (substs or {}).get('BUILD_BACKENDS', ['RecursiveMake'])
|
||||
|
@ -112,8 +112,8 @@ def config_status(topobjdir='.', topsrcdir='.', defines=None,
|
|||
topobjdir = os.path.abspath('.')
|
||||
|
||||
env = ConfigEnvironment(topsrcdir, topobjdir, defines=defines,
|
||||
non_global_defines=non_global_defines, substs=substs,
|
||||
source=source, mozconfig=mozconfig)
|
||||
non_global_defines=non_global_defines, substs=substs,
|
||||
source=source, mozconfig=mozconfig)
|
||||
|
||||
with FileAvoidWrite(os.path.join(topobjdir, 'mozinfo.json')) as f:
|
||||
write_mozinfo(f, env, os.environ)
|
||||
|
|
|
@ -49,6 +49,7 @@ class ConfigureError(Exception):
|
|||
|
||||
class SandboxDependsFunction(object):
|
||||
'''Sandbox-visible representation of @depends functions.'''
|
||||
|
||||
def __init__(self, unsandboxed):
|
||||
self._or = unsandboxed.__or__
|
||||
self._and = unsandboxed.__and__
|
||||
|
@ -233,6 +234,7 @@ class CombinedDependsFunction(DependsFunction):
|
|||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
class SandboxedGlobal(dict):
|
||||
'''Identifiable dict type for use as function global'''
|
||||
|
||||
|
@ -357,10 +359,12 @@ class ConfigureSandbox(dict):
|
|||
# that can't be converted to ascii. Make our log methods robust to this
|
||||
# by detecting the encoding that a producer is likely to have used.
|
||||
encoding = getpreferredencoding()
|
||||
|
||||
def wrapped_log_method(logger, key):
|
||||
method = getattr(logger, key)
|
||||
if not encoding:
|
||||
return method
|
||||
|
||||
def wrapped(*args, **kwargs):
|
||||
out_args = [
|
||||
arg.decode(encoding) if isinstance(arg, str) else arg
|
||||
|
@ -661,7 +665,7 @@ class ConfigureSandbox(dict):
|
|||
when = self._normalize_when(kwargs.get('when'), 'option')
|
||||
args = [self._resolve(arg) for arg in args]
|
||||
kwargs = {k: self._resolve(v) for k, v in kwargs.iteritems()
|
||||
if k != 'when'}
|
||||
if k != 'when'}
|
||||
option = Option(*args, **kwargs)
|
||||
if when:
|
||||
self._conditions[option] = when
|
||||
|
|
|
@ -12,6 +12,7 @@ import subprocess
|
|||
import sys
|
||||
import re
|
||||
|
||||
|
||||
def get_range_for(compilation_unit, debug_info):
|
||||
'''Returns the range offset for a given compilation unit
|
||||
in a given debug_info.'''
|
||||
|
@ -32,6 +33,7 @@ def get_range_for(compilation_unit, debug_info):
|
|||
ranges = nfo.rsplit(None, 1)[1]
|
||||
return None
|
||||
|
||||
|
||||
def get_range_length(range, debug_ranges):
|
||||
'''Returns the number of items in the range starting at the
|
||||
given offset.'''
|
||||
|
@ -42,8 +44,9 @@ def get_range_length(range, debug_ranges):
|
|||
length += 1
|
||||
return length
|
||||
|
||||
|
||||
def main(bin, compilation_unit):
|
||||
p = subprocess.Popen(['objdump', '-W', bin], stdout = subprocess.PIPE, stderr = subprocess.PIPE)
|
||||
p = subprocess.Popen(['objdump', '-W', bin], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
(out, err) = p.communicate()
|
||||
sections = re.split('\n(Contents of the|The section) ', out)
|
||||
debug_info = [s for s in sections if s.startswith('.debug_info')]
|
||||
|
|
|
@ -21,12 +21,14 @@ import re
|
|||
|
||||
re_for_ld = re.compile('.*\((.*)\).*')
|
||||
|
||||
|
||||
def parse_readelf_line(x):
|
||||
"""Return the version from a readelf line that looks like:
|
||||
0x00ec: Rev: 1 Flags: none Index: 8 Cnt: 2 Name: GLIBCXX_3.4.6
|
||||
"""
|
||||
return x.split(':')[-1].split('_')[-1].strip()
|
||||
|
||||
|
||||
def parse_ld_line(x):
|
||||
"""Parse a line from the output of ld -t. The output of gold is just
|
||||
the full path, gnu ld prints "-lstdc++ (path)".
|
||||
|
@ -36,11 +38,13 @@ def parse_ld_line(x):
|
|||
return t.groups()[0].strip()
|
||||
return x.strip()
|
||||
|
||||
|
||||
def split_ver(v):
|
||||
"""Covert the string '1.2.3' into the list [1,2,3]
|
||||
"""
|
||||
return [int(x) for x in v.split('.')]
|
||||
|
||||
|
||||
def cmp_ver(a, b):
|
||||
"""Compare versions in the form 'a.b.c'
|
||||
"""
|
||||
|
@ -49,17 +53,19 @@ def cmp_ver(a, b):
|
|||
return i - j
|
||||
return 0
|
||||
|
||||
|
||||
def encode_ver(v):
|
||||
"""Encode the version as a single number.
|
||||
"""
|
||||
t = split_ver(v)
|
||||
return t[0] << 16 | t[1] << 8 | t[2]
|
||||
|
||||
|
||||
def find_version(args):
|
||||
"""Given a base command line for a compiler, find the version of the
|
||||
libstdc++ it uses.
|
||||
"""
|
||||
args += ['-shared', '-Wl,-t']
|
||||
args += ['-shared', '-Wl,-t']
|
||||
p = subprocess.Popen(args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
|
||||
candidates = [x for x in p.stdout if 'libstdc++.so' in x]
|
||||
candidates = [x for x in candidates if 'skipping incompatible' not in x]
|
||||
|
@ -77,9 +83,10 @@ candidates:
|
|||
p = subprocess.Popen(['readelf', '-V', libstdcxx], stdout=subprocess.PIPE)
|
||||
versions = [parse_readelf_line(x)
|
||||
for x in p.stdout.readlines() if 'Name: GLIBCXX' in x]
|
||||
last_version = sorted(versions, cmp = cmp_ver)[-1]
|
||||
last_version = sorted(versions, cmp=cmp_ver)[-1]
|
||||
return (last_version, encode_ver(last_version))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
"""Given the value of environment variable CXX or HOST_CXX, find the
|
||||
version of the libstdc++ it uses.
|
||||
|
|
|
@ -237,7 +237,6 @@ class LintSandbox(ConfigureSandbox):
|
|||
name, default))
|
||||
self._raise_from(e, frame.f_back if frame else None)
|
||||
|
||||
|
||||
def _check_help_for_option_with_func_default(self, option, *args, **kwargs):
|
||||
default = kwargs['default']
|
||||
|
||||
|
@ -285,6 +284,7 @@ class LintSandbox(ConfigureSandbox):
|
|||
|
||||
def imports_impl(self, _import, _from=None, _as=None):
|
||||
wrapper = super(LintSandbox, self).imports_impl(_import, _from=_from, _as=_as)
|
||||
|
||||
def decorator(func):
|
||||
self._has_imports.add(func)
|
||||
return wrapper(func)
|
||||
|
|
|
@ -46,7 +46,7 @@ def disassemble_as_iter(co):
|
|||
c = code[i]
|
||||
op = ord(c)
|
||||
opname = dis.opname[op]
|
||||
i += 1;
|
||||
i += 1
|
||||
if op >= dis.HAVE_ARGUMENT:
|
||||
arg = ord(code[i]) + ord(code[i + 1]) * 256 + extended_arg
|
||||
extended_arg = 0
|
||||
|
|
|
@ -106,6 +106,7 @@ class PositiveOptionValue(OptionValue):
|
|||
in the form of a tuple for when values are given to the option (in the form
|
||||
--option=value[,value2...].
|
||||
'''
|
||||
|
||||
def __nonzero__(self):
|
||||
return True
|
||||
|
||||
|
@ -424,6 +425,7 @@ class CommandLineHelper(object):
|
|||
Extra options can be added afterwards through API calls. For those,
|
||||
conflicting values will raise an exception.
|
||||
'''
|
||||
|
||||
def __init__(self, environ=os.environ, argv=sys.argv):
|
||||
self._environ = dict(environ)
|
||||
self._args = OrderedDict()
|
||||
|
|
|
@ -14,6 +14,7 @@ from collections import deque
|
|||
from contextlib import contextmanager
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
|
||||
def getpreferredencoding():
|
||||
# locale._parse_localename makes locale.getpreferredencoding
|
||||
# return None when LC_ALL is C, instead of e.g. 'US-ASCII' or
|
||||
|
@ -29,6 +30,7 @@ def getpreferredencoding():
|
|||
encoding = 'utf-8'
|
||||
return encoding
|
||||
|
||||
|
||||
class Version(LooseVersion):
|
||||
'''A simple subclass of distutils.version.LooseVersion.
|
||||
Adds attributes for `major`, `minor`, `patch` for the first three
|
||||
|
@ -40,13 +42,14 @@ class Version(LooseVersion):
|
|||
v.minor == 2
|
||||
v.patch == 0
|
||||
'''
|
||||
|
||||
def __init__(self, version):
|
||||
# Can't use super, LooseVersion's base class is not a new-style class.
|
||||
LooseVersion.__init__(self, version)
|
||||
# Take the first three integer components, stopping at the first
|
||||
# non-integer and padding the rest with zeroes.
|
||||
(self.major, self.minor, self.patch) = list(itertools.chain(
|
||||
itertools.takewhile(lambda x:isinstance(x, int), self.version),
|
||||
itertools.takewhile(lambda x: isinstance(x, int), self.version),
|
||||
(0, 0, 0)))[:3]
|
||||
|
||||
def __cmp__(self, other):
|
||||
|
@ -71,6 +74,7 @@ class ConfigureOutputHandler(logging.Handler):
|
|||
printed out. This feature is only enabled under the `queue_debug` context
|
||||
manager.
|
||||
'''
|
||||
|
||||
def __init__(self, stdout=sys.stdout, stderr=sys.stderr, maxlen=20):
|
||||
super(ConfigureOutputHandler, self).__init__()
|
||||
|
||||
|
@ -193,6 +197,7 @@ class LineIO(object):
|
|||
'''File-like class that sends each line of the written data to a callback
|
||||
(without carriage returns).
|
||||
'''
|
||||
|
||||
def __init__(self, callback, errors='strict'):
|
||||
self._callback = callback
|
||||
self._buf = ''
|
||||
|
|
|
@ -79,7 +79,7 @@ Preferences.
|
|||
|
||||
|
||||
INSTALL_TESTS_CLOBBER = ''.join([TextWrapper().fill(line) + '\n' for line in
|
||||
'''
|
||||
'''
|
||||
The build system was unable to install tests because the CLOBBER file has \
|
||||
been updated. This means if you edited any test files, your changes may not \
|
||||
be picked up until a full/clobber build is performed.
|
||||
|
@ -107,7 +107,7 @@ this is a clobber bug and not due to local changes.
|
|||
|
||||
|
||||
BuildOutputResult = namedtuple('BuildOutputResult',
|
||||
('warning', 'state_changed', 'message'))
|
||||
('warning', 'state_changed', 'message'))
|
||||
|
||||
|
||||
class TierStatus(object):
|
||||
|
@ -173,9 +173,9 @@ class TierStatus(object):
|
|||
def add_resources_to_dict(self, entry, start=None, end=None, phase=None):
|
||||
"""Helper function to append resource information to a dict."""
|
||||
cpu_percent = self.resources.aggregate_cpu_percent(start=start,
|
||||
end=end, phase=phase, per_cpu=False)
|
||||
end=end, phase=phase, per_cpu=False)
|
||||
cpu_times = self.resources.aggregate_cpu_times(start=start, end=end,
|
||||
phase=phase, per_cpu=False)
|
||||
phase=phase, per_cpu=False)
|
||||
io = self.resources.aggregate_io(start=start, end=end, phase=phase)
|
||||
|
||||
if cpu_percent is None:
|
||||
|
@ -339,8 +339,8 @@ class BuildMonitor(MozbuildObject):
|
|||
json.dump(self.resources.as_dict(), fh, indent=2)
|
||||
except Exception as e:
|
||||
self.log(logging.WARNING, 'build_resources_error',
|
||||
{'msg': str(e)},
|
||||
'Exception when writing resource usage file: {msg}')
|
||||
{'msg': str(e)},
|
||||
'Exception when writing resource usage file: {msg}')
|
||||
|
||||
def _get_finder_cpu_usage(self):
|
||||
"""Obtain the CPU usage of the Finder app on OS X.
|
||||
|
@ -438,9 +438,9 @@ class BuildMonitor(MozbuildObject):
|
|||
return None
|
||||
|
||||
cpu_percent = self.resources.aggregate_cpu_percent(phase=None,
|
||||
per_cpu=False)
|
||||
per_cpu=False)
|
||||
cpu_times = self.resources.aggregate_cpu_times(phase=None,
|
||||
per_cpu=False)
|
||||
per_cpu=False)
|
||||
io = self.resources.aggregate_io(phase=None)
|
||||
|
||||
o = dict(
|
||||
|
@ -462,9 +462,9 @@ class BuildMonitor(MozbuildObject):
|
|||
|
||||
for usage in self.resources.range_usage():
|
||||
cpu_percent = self.resources.aggregate_cpu_percent(usage.start,
|
||||
usage.end, per_cpu=False)
|
||||
usage.end, per_cpu=False)
|
||||
cpu_times = self.resources.aggregate_cpu_times(usage.start,
|
||||
usage.end, per_cpu=False)
|
||||
usage.end, per_cpu=False)
|
||||
|
||||
entry = dict(
|
||||
start=usage.start,
|
||||
|
@ -474,11 +474,10 @@ class BuildMonitor(MozbuildObject):
|
|||
)
|
||||
|
||||
self.tiers.add_resources_to_dict(entry, start=usage.start,
|
||||
end=usage.end)
|
||||
end=usage.end)
|
||||
|
||||
o['resources'].append(entry)
|
||||
|
||||
|
||||
# If the imports for this file ran before the in-tree virtualenv
|
||||
# was bootstrapped (for instance, for a clobber build in automation),
|
||||
# psutil might not be available.
|
||||
|
@ -524,8 +523,8 @@ class BuildMonitor(MozbuildObject):
|
|||
sin /= 1048576
|
||||
sout /= 1048576
|
||||
self.log(logging.WARNING, 'swap_activity',
|
||||
{'sin': sin, 'sout': sout},
|
||||
'Swap in/out (MB): {sin}/{sout}')
|
||||
{'sin': sin, 'sout': sout},
|
||||
'Swap in/out (MB): {sin}/{sout}')
|
||||
|
||||
def ccache_stats(self):
|
||||
ccache_stats = None
|
||||
|
@ -548,6 +547,7 @@ class TerminalLoggingHandler(logging.Handler):
|
|||
This class should probably live elsewhere, like the mach core. Consider
|
||||
this a proving ground for its usefulness.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
logging.Handler.__init__(self)
|
||||
|
||||
|
@ -683,7 +683,6 @@ class BuildOutputManager(OutputManager):
|
|||
# collection child process hasn't been told to stop.
|
||||
self.monitor.stop_resource_recording()
|
||||
|
||||
|
||||
def on_line(self, line):
|
||||
warning, state_changed, message = self.monitor.on_line(line)
|
||||
|
||||
|
@ -744,7 +743,7 @@ class StaticAnalysisOutputManager(OutputManager):
|
|||
|
||||
if warning:
|
||||
self.log(logging.INFO, 'compiler_warning', warning,
|
||||
'Warning: {flag} in {filename}: {message}')
|
||||
'Warning: {flag} in {filename}: {message}')
|
||||
|
||||
if relevant:
|
||||
self.log(logging.INFO, 'build_output', {'line': line}, '{line}')
|
||||
|
@ -786,7 +785,7 @@ class CCacheStats(object):
|
|||
STATS_KEYS = [
|
||||
# (key, description)
|
||||
# Refer to stats.c in ccache project for all the descriptions.
|
||||
('stats_zeroed', 'stats zero time'), # Old name prior to ccache 3.4
|
||||
('stats_zeroed', 'stats zero time'), # Old name prior to ccache 3.4
|
||||
('stats_zeroed', 'stats zeroed'),
|
||||
('stats_updated', 'stats updated'),
|
||||
('cache_hit_direct', 'cache hit (direct)'),
|
||||
|
@ -1005,11 +1004,11 @@ class BuildDriver(MozbuildObject):
|
|||
|
||||
if directory is not None and not what:
|
||||
print('Can only use -C/--directory with an explicit target '
|
||||
'name.')
|
||||
'name.')
|
||||
return 1
|
||||
|
||||
if directory is not None:
|
||||
disable_extra_make_dependencies=True
|
||||
disable_extra_make_dependencies = True
|
||||
directory = mozpath.normsep(directory)
|
||||
if directory.startswith('/'):
|
||||
directory = directory[1:]
|
||||
|
@ -1104,7 +1103,7 @@ class BuildDriver(MozbuildObject):
|
|||
else:
|
||||
make_dir, make_target = \
|
||||
resolve_target_to_make(self.topobjdir,
|
||||
path_arg.relpath())
|
||||
path_arg.relpath())
|
||||
|
||||
if make_dir is None and make_target is None:
|
||||
return 1
|
||||
|
@ -1146,10 +1145,11 @@ class BuildDriver(MozbuildObject):
|
|||
# intelligent about encountering undefined state.
|
||||
no_build_status = b'1' if make_dir is not None else b''
|
||||
status = self._run_make(directory=make_dir, target=make_target,
|
||||
line_handler=output.on_line, log=False, print_directory=False,
|
||||
ensure_exit_code=False, num_jobs=jobs, silent=not verbose,
|
||||
append_env={b'NO_BUILDSTATUS_MESSAGES': no_build_status},
|
||||
keep_going=keep_going)
|
||||
line_handler=output.on_line, log=False, print_directory=False,
|
||||
ensure_exit_code=False, num_jobs=jobs, silent=not verbose,
|
||||
append_env={
|
||||
b'NO_BUILDSTATUS_MESSAGES': no_build_status},
|
||||
keep_going=keep_going)
|
||||
|
||||
if status != 0:
|
||||
break
|
||||
|
@ -1204,8 +1204,8 @@ class BuildDriver(MozbuildObject):
|
|||
# in these directories.
|
||||
pathToThirdparty = os.path.join(self.topsrcdir,
|
||||
"tools",
|
||||
"rewriting",
|
||||
"ThirdPartyPaths.txt")
|
||||
"rewriting",
|
||||
"ThirdPartyPaths.txt")
|
||||
|
||||
if os.path.exists(pathToThirdparty):
|
||||
with open(pathToThirdparty) as f:
|
||||
|
@ -1284,7 +1284,8 @@ class BuildDriver(MozbuildObject):
|
|||
long_build = monitor.elapsed > 600
|
||||
|
||||
if long_build:
|
||||
output.on_line('We know it took a while, but your build finally finished successfully!')
|
||||
output.on_line(
|
||||
'We know it took a while, but your build finally finished successfully!')
|
||||
else:
|
||||
output.on_line('Your build was successful!')
|
||||
|
||||
|
@ -1299,7 +1300,7 @@ class BuildDriver(MozbuildObject):
|
|||
app = self.substs['MOZ_BUILD_APP']
|
||||
if app in ('browser', 'mobile/android'):
|
||||
print('For more information on what to do now, see '
|
||||
'https://developer.mozilla.org/docs/Developer_Guide/So_You_Just_Built_Firefox')
|
||||
'https://developer.mozilla.org/docs/Developer_Guide/So_You_Just_Built_Firefox')
|
||||
except Exception:
|
||||
# Ignore Exceptions in case we can't find config.status (such
|
||||
# as when doing OSX Universal builds)
|
||||
|
@ -1325,14 +1326,14 @@ class BuildDriver(MozbuildObject):
|
|||
# Only print build status messages when we have an active
|
||||
# monitor.
|
||||
if not buildstatus_messages:
|
||||
append_env[b'NO_BUILDSTATUS_MESSAGES'] = b'1'
|
||||
append_env[b'NO_BUILDSTATUS_MESSAGES'] = b'1'
|
||||
status = self._run_client_mk(target='configure',
|
||||
line_handler=line_handler,
|
||||
append_env=append_env)
|
||||
|
||||
if not status:
|
||||
print('Configure complete!')
|
||||
print('Be sure to run |mach build| to pick up any changes');
|
||||
print('Be sure to run |mach build| to pick up any changes')
|
||||
|
||||
return status
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ from textwrap import TextWrapper
|
|||
|
||||
|
||||
CLOBBER_MESSAGE = ''.join([TextWrapper().fill(line) + '\n' for line in
|
||||
'''
|
||||
'''
|
||||
The CLOBBER file has been updated, indicating that an incremental build since \
|
||||
your last build will probably not work. A full/clobber build is required.
|
||||
|
||||
|
@ -39,6 +39,7 @@ Well, are ya? -- you can ignore this clobber requirement by running:
|
|||
$ touch {clobber_file}
|
||||
'''.splitlines()])
|
||||
|
||||
|
||||
class Clobberer(object):
|
||||
def __init__(self, topsrcdir, topobjdir):
|
||||
"""Create a new object to manage clobbering the tree.
|
||||
|
@ -69,7 +70,7 @@ class Clobberer(object):
|
|||
|
||||
# Object directory clobber older than current is fine.
|
||||
if os.path.getmtime(self.src_clobber) <= \
|
||||
os.path.getmtime(self.obj_clobber):
|
||||
os.path.getmtime(self.obj_clobber):
|
||||
|
||||
return False
|
||||
|
||||
|
@ -182,8 +183,8 @@ class Clobberer(object):
|
|||
if not allow_auto:
|
||||
return True, False, \
|
||||
self._message('Automatic clobbering is not enabled\n'
|
||||
' (add "mk_add_options AUTOCLOBBER=1" to your '
|
||||
'mozconfig).')
|
||||
' (add "mk_add_options AUTOCLOBBER=1" to your '
|
||||
'mozconfig).')
|
||||
|
||||
if cwd.startswith(self.topobjdir) and cwd != self.topobjdir:
|
||||
return True, False, self._message(
|
||||
|
@ -204,4 +205,4 @@ class Clobberer(object):
|
|||
lines = [' ' + line for line in self.clobber_cause()]
|
||||
|
||||
return CLOBBER_MESSAGE.format(clobber_reason='\n'.join(lines),
|
||||
no_reason=' ' + reason, clobber_file=self.obj_clobber)
|
||||
no_reason=' ' + reason, clobber_file=self.obj_clobber)
|
||||
|
|
|
@ -34,6 +34,7 @@ hour. Backup programs that rely on this feature may be affected.
|
|||
https://technet.microsoft.com/en-us/library/cc785435.aspx
|
||||
'''
|
||||
|
||||
|
||||
class Doctor(object):
|
||||
def __init__(self, srcdir, objdir, fix):
|
||||
self.srcdir = mozpath.normpath(srcdir)
|
||||
|
@ -69,7 +70,7 @@ class Doctor(object):
|
|||
denied = True
|
||||
if denied:
|
||||
print('run "mach doctor --fix" AS ADMIN to re-attempt fixing your system')
|
||||
elif False: # elif fixable:
|
||||
elif False: # elif fixable:
|
||||
print('run "mach doctor --fix" as admin to attempt fixing your system')
|
||||
return int(not good)
|
||||
|
||||
|
@ -202,7 +203,7 @@ class Doctor(object):
|
|||
status = 'GOOD'
|
||||
desc = 'lastaccess disabled systemwide'
|
||||
elif disablelastaccess == 0:
|
||||
if False: # if self.fix:
|
||||
if False: # if self.fix:
|
||||
choice = self.prompt_bool(DISABLE_LASTACCESS_WIN)
|
||||
if not choice:
|
||||
return {'status': 'BAD, NOT FIXED',
|
||||
|
|
|
@ -16,6 +16,7 @@ if sys.version_info[0] == 3:
|
|||
else:
|
||||
str_type = basestring
|
||||
|
||||
|
||||
class DotProperties:
|
||||
r'''A thin representation of a key=value .properties file.'''
|
||||
|
||||
|
|
|
@ -108,8 +108,8 @@ class Daemon(object):
|
|||
['name', '.hg', 'wholename'],
|
||||
['dirname', '.git'],
|
||||
['name', '.git', 'wholename'],
|
||||
],
|
||||
],
|
||||
],
|
||||
],
|
||||
'fields': ['name'],
|
||||
}
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -192,6 +192,7 @@ class ComputedFlags(ContextDerived):
|
|||
flags[dest_var].extend(value)
|
||||
return flags.items()
|
||||
|
||||
|
||||
class XPIDLModule(ContextDerived):
|
||||
"""Describes an XPIDL module to be compiled."""
|
||||
|
||||
|
@ -207,6 +208,7 @@ class XPIDLModule(ContextDerived):
|
|||
self.name = name
|
||||
self.idl_files = idl_files
|
||||
|
||||
|
||||
class BaseDefines(ContextDerived):
|
||||
"""Context derived container object for DEFINES/HOST_DEFINES,
|
||||
which are OrderedDicts.
|
||||
|
@ -232,12 +234,15 @@ class BaseDefines(ContextDerived):
|
|||
else:
|
||||
self.defines.update(more_defines)
|
||||
|
||||
|
||||
class Defines(BaseDefines):
|
||||
pass
|
||||
|
||||
|
||||
class HostDefines(BaseDefines):
|
||||
pass
|
||||
|
||||
|
||||
class WebIDLCollection(ContextDerived):
|
||||
"""Collects WebIDL info referenced during the build."""
|
||||
|
||||
|
@ -676,7 +681,7 @@ class StaticLibrary(Library):
|
|||
)
|
||||
|
||||
def __init__(self, context, basename, real_name=None,
|
||||
link_into=None, no_expand_lib=False):
|
||||
link_into=None, no_expand_lib=False):
|
||||
Library.__init__(self, context, basename, real_name)
|
||||
self.link_into = link_into
|
||||
self.no_expand_lib = no_expand_lib
|
||||
|
@ -708,8 +713,8 @@ class RustLibrary(StaticLibrary):
|
|||
# many other things in the build system depend on that.
|
||||
assert self.crate_type == 'staticlib'
|
||||
self.lib_name = '%s%s%s' % (context.config.rust_lib_prefix,
|
||||
basename.replace('-', '_'),
|
||||
context.config.rust_lib_suffix)
|
||||
basename.replace('-', '_'),
|
||||
context.config.rust_lib_suffix)
|
||||
self.dependencies = dependencies
|
||||
self.features = features
|
||||
self.target_dir = target_dir
|
||||
|
@ -892,8 +897,8 @@ class TestManifest(ContextDerived):
|
|||
)
|
||||
|
||||
def __init__(self, context, path, manifest, flavor=None,
|
||||
install_prefix=None, relpath=None, sources=(),
|
||||
dupe_manifest=False):
|
||||
install_prefix=None, relpath=None, sources=(),
|
||||
dupe_manifest=False):
|
||||
ContextDerived.__init__(self, context)
|
||||
|
||||
assert flavor in all_test_flavors()
|
||||
|
@ -1036,7 +1041,7 @@ class UnifiedSources(BaseSources):
|
|||
unified_prefix = unified_prefix.replace('/', '_')
|
||||
|
||||
suffix = self.canonical_suffix[1:]
|
||||
unified_prefix='Unified_%s_%s' % (suffix, unified_prefix)
|
||||
unified_prefix = 'Unified_%s_%s' % (suffix, unified_prefix)
|
||||
self.unified_source_mapping = list(group_unified_files(source_files,
|
||||
unified_prefix=unified_prefix,
|
||||
unified_suffix=suffix,
|
||||
|
@ -1100,6 +1105,7 @@ class FinalTargetPreprocessedFiles(ContextDerived):
|
|||
ContextDerived.__init__(self, sandbox)
|
||||
self.files = files
|
||||
|
||||
|
||||
class LocalizedFiles(FinalTargetFiles):
|
||||
"""Sandbox container object for LOCALIZED_FILES, which is a
|
||||
HierarchicalStringList.
|
||||
|
@ -1187,12 +1193,14 @@ class GeneratedFile(ContextDerived):
|
|||
'.inc',
|
||||
'.py',
|
||||
'.rs',
|
||||
'node.stub', # To avoid VPATH issues with installing node files: https://bugzilla.mozilla.org/show_bug.cgi?id=1461714#c55
|
||||
'android_apks', # We need to compile Java to generate JNI wrappers for native code compilation to consume.
|
||||
'node.stub', # To avoid VPATH issues with installing node files: https://bugzilla.mozilla.org/show_bug.cgi?id=1461714#c55
|
||||
# We need to compile Java to generate JNI wrappers for native code compilation to consume.
|
||||
'android_apks',
|
||||
'.profdata',
|
||||
'.webidl'
|
||||
)
|
||||
self.required_for_compile = [f for f in self.outputs if f.endswith(suffixes) or 'stl_wrappers/' in f]
|
||||
self.required_for_compile = [
|
||||
f for f in self.outputs if f.endswith(suffixes) or 'stl_wrappers/' in f]
|
||||
|
||||
|
||||
class ChromeManifestEntry(ContextDerived):
|
||||
|
|
|
@ -185,7 +185,8 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
objs = list(emitfn(out))
|
||||
self._emitter_time += time.time() - start
|
||||
|
||||
for o in emit_objs(objs): yield o
|
||||
for o in emit_objs(objs):
|
||||
yield o
|
||||
|
||||
else:
|
||||
raise Exception('Unhandled output type: %s' % type(out))
|
||||
|
@ -196,7 +197,8 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
objs = list(self._emit_libs_derived(contexts))
|
||||
self._emitter_time += time.time() - start
|
||||
|
||||
for o in emit_objs(objs): yield o
|
||||
for o in emit_objs(objs):
|
||||
yield o
|
||||
|
||||
def _emit_libs_derived(self, contexts):
|
||||
|
||||
|
@ -234,11 +236,11 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
if isinstance(collection, WebIDLCollection):
|
||||
# Test webidl sources are added here as a somewhat special
|
||||
# case.
|
||||
idl_sources[mozpath.join(root, 'test')] = [s for s in collection.all_test_cpp_basenames()]
|
||||
idl_sources[mozpath.join(root, 'test')] = [
|
||||
s for s in collection.all_test_cpp_basenames()]
|
||||
|
||||
yield collection
|
||||
|
||||
|
||||
# Next do FINAL_LIBRARY linkage.
|
||||
for lib in (l for libs in self._libs.values() for l in libs):
|
||||
if not isinstance(lib, (StaticLibrary, RustLibrary)) or not lib.link_into:
|
||||
|
@ -255,14 +257,14 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
# both a static and a shared library in a directory, and having
|
||||
# that as a FINAL_LIBRARY.
|
||||
if len(set(type(l) for l in candidates)) == len(candidates) and \
|
||||
len(set(l.objdir for l in candidates)) == 1:
|
||||
len(set(l.objdir for l in candidates)) == 1:
|
||||
for c in candidates:
|
||||
c.link_library(lib)
|
||||
else:
|
||||
raise SandboxValidationError(
|
||||
'FINAL_LIBRARY ("%s") matches a LIBRARY_NAME defined in '
|
||||
'multiple places:\n %s' % (lib.link_into,
|
||||
'\n '.join(l.objdir for l in candidates)),
|
||||
'\n '.join(l.objdir for l in candidates)),
|
||||
contexts[lib.objdir])
|
||||
|
||||
# ...and USE_LIBS linkage.
|
||||
|
@ -281,13 +283,13 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
for lib in self._static_linking_shared:
|
||||
if all(isinstance(o, StaticLibrary) for o in recurse_refs(lib)):
|
||||
shared_libs = sorted(l.basename for l in lib.linked_libraries
|
||||
if isinstance(l, SharedLibrary))
|
||||
if isinstance(l, SharedLibrary))
|
||||
raise SandboxValidationError(
|
||||
'The static "%s" library is not used in a shared library '
|
||||
'or a program, but USE_LIBS contains the following shared '
|
||||
'library names:\n %s\n\nMaybe you can remove the '
|
||||
'static "%s" library?' % (lib.basename,
|
||||
'\n '.join(shared_libs), lib.basename),
|
||||
'\n '.join(shared_libs), lib.basename),
|
||||
contexts[lib.objdir])
|
||||
|
||||
# Propagate LIBRARY_DEFINES to all child libraries recursively.
|
||||
|
@ -305,7 +307,6 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
propagate_defines(lib, lib.lib_defines)
|
||||
yield lib
|
||||
|
||||
|
||||
for lib in (l for libs in self._libs.values() for l in libs):
|
||||
lib_defines = list(lib.lib_defines.get_defines())
|
||||
if lib_defines:
|
||||
|
@ -325,7 +326,6 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
for obj in self._binaries.values():
|
||||
yield obj
|
||||
|
||||
|
||||
LIBRARY_NAME_VAR = {
|
||||
'host': 'HOST_LIBRARY_NAME',
|
||||
'target': 'LIBRARY_NAME',
|
||||
|
@ -393,14 +393,14 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
for d in self._external_paths:
|
||||
if dir.startswith('%s/' % d):
|
||||
candidates = [self._get_external_library(dir, name,
|
||||
force_static)]
|
||||
force_static)]
|
||||
break
|
||||
|
||||
if not candidates:
|
||||
raise SandboxValidationError(
|
||||
'%s contains "%s", but there is no "%s" %s in %s.'
|
||||
% (variable, path, name,
|
||||
self.LIBRARY_NAME_VAR[obj.KIND], dir), context)
|
||||
self.LIBRARY_NAME_VAR[obj.KIND], dir), context)
|
||||
|
||||
if len(candidates) > 1:
|
||||
# If there's more than one remaining candidate, it could be
|
||||
|
@ -423,11 +423,11 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
raise SandboxValidationError(
|
||||
'%s contains "static:%s", but there is no static '
|
||||
'"%s" %s in %s.' % (variable, path, name,
|
||||
self.LIBRARY_NAME_VAR[obj.KIND], dir), context)
|
||||
self.LIBRARY_NAME_VAR[obj.KIND], dir), context)
|
||||
raise SandboxValidationError(
|
||||
'%s contains "static:%s", but there is no static "%s" '
|
||||
'%s in the tree' % (variable, name, name,
|
||||
self.LIBRARY_NAME_VAR[obj.KIND]), context)
|
||||
self.LIBRARY_NAME_VAR[obj.KIND]), context)
|
||||
|
||||
if not candidates:
|
||||
raise SandboxValidationError(
|
||||
|
@ -437,23 +437,23 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
|
||||
elif len(candidates) > 1:
|
||||
paths = (mozpath.join(l.relsrcdir, 'moz.build')
|
||||
for l in candidates)
|
||||
for l in candidates)
|
||||
raise SandboxValidationError(
|
||||
'%s contains "%s", which matches a %s defined in multiple '
|
||||
'places:\n %s' % (variable, path,
|
||||
self.LIBRARY_NAME_VAR[obj.KIND],
|
||||
'\n '.join(paths)), context)
|
||||
self.LIBRARY_NAME_VAR[obj.KIND],
|
||||
'\n '.join(paths)), context)
|
||||
|
||||
elif force_static and not isinstance(candidates[0], StaticLibrary):
|
||||
raise SandboxValidationError(
|
||||
'%s contains "static:%s", but there is only a shared "%s" '
|
||||
'in %s. You may want to add FORCE_STATIC_LIB=True in '
|
||||
'%s/moz.build, or remove "static:".' % (variable, path,
|
||||
name, candidates[0].relobjdir, candidates[0].relobjdir),
|
||||
name, candidates[0].relobjdir, candidates[0].relobjdir),
|
||||
context)
|
||||
|
||||
elif isinstance(obj, StaticLibrary) and isinstance(candidates[0],
|
||||
SharedLibrary):
|
||||
SharedLibrary):
|
||||
self._static_linking_shared.add(obj)
|
||||
obj.link_library(candidates[0])
|
||||
|
||||
|
@ -485,25 +485,29 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
# A simple version number.
|
||||
if isinstance(values, (str, unicode)):
|
||||
raise SandboxValidationError(
|
||||
'%s %s of crate %s does not list a path' % (description, dep_crate_name, crate_name),
|
||||
'%s %s of crate %s does not list a path' % (
|
||||
description, dep_crate_name, crate_name),
|
||||
context)
|
||||
|
||||
dep_path = values.get('path', None)
|
||||
if not dep_path:
|
||||
raise SandboxValidationError(
|
||||
'%s %s of crate %s does not list a path' % (description, dep_crate_name, crate_name),
|
||||
'%s %s of crate %s does not list a path' % (
|
||||
description, dep_crate_name, crate_name),
|
||||
context)
|
||||
|
||||
# Try to catch the case where somebody listed a
|
||||
# local path for development.
|
||||
if os.path.isabs(dep_path):
|
||||
raise SandboxValidationError(
|
||||
'%s %s of crate %s has a non-relative path' % (description, dep_crate_name, crate_name),
|
||||
'%s %s of crate %s has a non-relative path' % (
|
||||
description, dep_crate_name, crate_name),
|
||||
context)
|
||||
|
||||
if not os.path.exists(mozpath.join(context.config.topsrcdir, crate_dir, dep_path)):
|
||||
raise SandboxValidationError(
|
||||
'%s %s of crate %s refers to a non-existent path' % (description, dep_crate_name, crate_name),
|
||||
'%s %s of crate %s refers to a non-existent path' % (
|
||||
description, dep_crate_name, crate_name),
|
||||
context)
|
||||
|
||||
def _rust_library(self, context, libname, static_args, cls=RustLibrary):
|
||||
|
@ -549,7 +553,6 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
return cls(context, libname, cargo_file, crate_type, dependencies,
|
||||
features, cargo_target_dir, **static_args)
|
||||
|
||||
|
||||
def _handle_gn_dirs(self, context):
|
||||
for target_dir in context.get('GN_DIRS', []):
|
||||
context['DIRS'] += [target_dir]
|
||||
|
@ -569,10 +572,10 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
|
||||
yield GnProjectData(context, target_dir, gn_dir, non_unified_sources)
|
||||
|
||||
|
||||
def _handle_linkables(self, context, passthru, generated_files):
|
||||
linkables = []
|
||||
host_linkables = []
|
||||
|
||||
def add_program(prog, var):
|
||||
if var.startswith('HOST_'):
|
||||
host_linkables.append(prog)
|
||||
|
@ -584,14 +587,14 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
raise SandboxValidationError(
|
||||
'Cannot use "%s" as %s name, '
|
||||
'because it is already used in %s' % (program, kind,
|
||||
self._binaries[program].relsrcdir), context)
|
||||
self._binaries[program].relsrcdir), context)
|
||||
for kind, cls in [('PROGRAM', Program), ('HOST_PROGRAM', HostProgram)]:
|
||||
program = context.get(kind)
|
||||
if program:
|
||||
check_unique_binary(program, kind)
|
||||
self._binaries[program] = cls(context, program)
|
||||
self._linkage.append((context, self._binaries[program],
|
||||
kind.replace('PROGRAM', 'USE_LIBS')))
|
||||
kind.replace('PROGRAM', 'USE_LIBS')))
|
||||
add_program(self._binaries[program], kind)
|
||||
|
||||
all_rust_programs = []
|
||||
|
@ -605,7 +608,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
|
||||
# Verify Rust program definitions.
|
||||
if all_rust_programs:
|
||||
config, cargo_file = self._parse_cargo_file(context);
|
||||
config, cargo_file = self._parse_cargo_file(context)
|
||||
bin_section = config.get('bin', None)
|
||||
if not bin_section:
|
||||
raise SandboxValidationError(
|
||||
|
@ -634,12 +637,12 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
raise SandboxValidationError(
|
||||
'Cannot use "%s" in %s, '
|
||||
'because it is already used in %s' % (program, kind,
|
||||
self._binaries[program].relsrcdir), context)
|
||||
self._binaries[program].relsrcdir), context)
|
||||
self._binaries[program] = cls(context, program,
|
||||
is_unit_test=kind == 'CPP_UNIT_TESTS')
|
||||
is_unit_test=kind == 'CPP_UNIT_TESTS')
|
||||
self._linkage.append((context, self._binaries[program],
|
||||
'HOST_USE_LIBS' if kind == 'HOST_SIMPLE_PROGRAMS'
|
||||
else 'USE_LIBS'))
|
||||
'HOST_USE_LIBS' if kind == 'HOST_SIMPLE_PROGRAMS'
|
||||
else 'USE_LIBS'))
|
||||
add_program(self._binaries[program], kind)
|
||||
|
||||
host_libname = context.get('HOST_LIBRARY_NAME')
|
||||
|
@ -648,7 +651,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
if host_libname:
|
||||
if host_libname == libname:
|
||||
raise SandboxValidationError('LIBRARY_NAME and '
|
||||
'HOST_LIBRARY_NAME must have a different value', context)
|
||||
'HOST_LIBRARY_NAME must have a different value', context)
|
||||
|
||||
is_rust_library = context.get('IS_RUST_LIBRARY')
|
||||
if is_rust_library:
|
||||
|
@ -779,7 +782,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
raise SandboxValidationError(
|
||||
'Path specified in SYMBOLS_FILE does not exist: %s '
|
||||
'(resolved to %s)' % (symbols_file,
|
||||
symbols_file.full_path), context)
|
||||
symbols_file.full_path), context)
|
||||
shared_args['symbols_file'] = True
|
||||
else:
|
||||
if symbols_file.target_basename not in generated_files:
|
||||
|
@ -803,8 +806,8 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
if lib.defines:
|
||||
defines = lib.defines.get_defines()
|
||||
yield GeneratedFile(context, script,
|
||||
'generate_symbols_file', lib.symbols_file,
|
||||
[symbols_file], defines)
|
||||
'generate_symbols_file', lib.symbols_file,
|
||||
[symbols_file], defines)
|
||||
if static_lib:
|
||||
is_rust_library = context.get('IS_RUST_LIBRARY')
|
||||
if is_rust_library:
|
||||
|
@ -818,7 +821,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
if lib_defines:
|
||||
if not libname:
|
||||
raise SandboxValidationError('LIBRARY_DEFINES needs a '
|
||||
'LIBRARY_NAME to take effect', context)
|
||||
'LIBRARY_NAME to take effect', context)
|
||||
lib.lib_defines.update(lib_defines)
|
||||
|
||||
# Only emit sources if we have linkables defined in the same context.
|
||||
|
@ -849,7 +852,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
for f in context_srcs:
|
||||
if f in seen_sources:
|
||||
raise SandboxValidationError('Source file should only '
|
||||
'be added to %s once: %s' % (symbol, f), context)
|
||||
'be added to %s once: %s' % (symbol, f), context)
|
||||
seen_sources.add(f)
|
||||
full_path = f.full_path
|
||||
if isinstance(f, SourcePath):
|
||||
|
@ -866,18 +869,18 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
if flags.pgo_generate_only:
|
||||
if not isinstance(f, Path):
|
||||
raise SandboxValidationError('pgo_generate_only file'
|
||||
'must not be a generated file: %s' % f, context)
|
||||
'must not be a generated file: %s' % f, context)
|
||||
if mozpath.splitext(f)[1] != '.cpp':
|
||||
raise SandboxValidationError('pgo_generate_only file'
|
||||
'must be a .cpp file: %s' % f, context)
|
||||
'must be a .cpp file: %s' % f, context)
|
||||
if flags.no_pgo:
|
||||
raise SandboxValidationError('pgo_generate_only files'
|
||||
'cannot be marked no_pgo: %s' % f, context)
|
||||
'cannot be marked no_pgo: %s' % f, context)
|
||||
pgo_generate_only.add(f)
|
||||
|
||||
if isinstance(f, SourcePath) and not os.path.exists(full_path):
|
||||
raise SandboxValidationError('File listed in %s does not '
|
||||
'exist: \'%s\'' % (symbol, full_path), context)
|
||||
'exist: \'%s\'' % (symbol, full_path), context)
|
||||
|
||||
# UNIFIED_SOURCES only take SourcePaths, so there should be no
|
||||
# generated source in here
|
||||
|
@ -891,7 +894,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
if no_pgo:
|
||||
if no_pgo_sources:
|
||||
raise SandboxValidationError('NO_PGO and SOURCES[...].no_pgo '
|
||||
'cannot be set at the same time', context)
|
||||
'cannot be set at the same time', context)
|
||||
passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo
|
||||
if no_pgo_sources:
|
||||
passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo_sources
|
||||
|
@ -1003,7 +1006,6 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
l.cxx_link = True
|
||||
break
|
||||
|
||||
|
||||
def emit_from_context(self, context):
|
||||
"""Convert a Context to tree metadata objects.
|
||||
|
||||
|
@ -1022,7 +1024,8 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
|
||||
# We always emit a directory traversal descriptor. This is needed by
|
||||
# the recursive make backend.
|
||||
for o in self._emit_directory_traversal_from_context(context): yield o
|
||||
for o in self._emit_directory_traversal_from_context(context):
|
||||
yield o
|
||||
|
||||
for obj in self._process_xpidl(context):
|
||||
yield obj
|
||||
|
@ -1052,7 +1055,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
if context.config.substs.get('OS_TARGET') == 'WINNT' and \
|
||||
context['DELAYLOAD_DLLS']:
|
||||
context['LDFLAGS'].extend([('-DELAYLOAD:%s' % dll)
|
||||
for dll in context['DELAYLOAD_DLLS']])
|
||||
for dll in context['DELAYLOAD_DLLS']])
|
||||
context['OS_LIBS'].append('delayimp')
|
||||
|
||||
for v in ['CMFLAGS', 'CMMFLAGS']:
|
||||
|
@ -1077,7 +1080,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
raise SandboxValidationError(
|
||||
'Path specified in DEFFILE does not exist: %s '
|
||||
'(resolved to %s)' % (deffile,
|
||||
deffile.full_path), context)
|
||||
deffile.full_path), context)
|
||||
path = mozpath.relpath(deffile.full_path, context.objdir)
|
||||
else:
|
||||
path = deffile.target_basename
|
||||
|
@ -1138,7 +1141,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
|
||||
for path in context['CONFIGURE_SUBST_FILES']:
|
||||
sub = self._create_substitution(ConfigFileSubstitution, context,
|
||||
path)
|
||||
path)
|
||||
generated_files.add(str(sub.relpath))
|
||||
yield sub
|
||||
|
||||
|
@ -1185,17 +1188,17 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
if not isinstance(local_include, ObjDirPath):
|
||||
if not os.path.exists(full_path):
|
||||
raise SandboxValidationError('Path specified in LOCAL_INCLUDES '
|
||||
'does not exist: %s (resolved to %s)' % (local_include,
|
||||
full_path), context)
|
||||
'does not exist: %s (resolved to %s)' % (local_include,
|
||||
full_path), context)
|
||||
if not os.path.isdir(full_path):
|
||||
raise SandboxValidationError('Path specified in LOCAL_INCLUDES '
|
||||
'is a filename, but a directory is required: %s '
|
||||
'(resolved to %s)' % (local_include, full_path), context)
|
||||
'is a filename, but a directory is required: %s '
|
||||
'(resolved to %s)' % (local_include, full_path), context)
|
||||
if (full_path == context.config.topsrcdir or
|
||||
full_path == context.config.topobjdir):
|
||||
raise SandboxValidationError('Path specified in LOCAL_INCLUDES '
|
||||
'(%s) resolves to the topsrcdir or topobjdir (%s), which is '
|
||||
'not allowed' % (local_include, full_path), context)
|
||||
'(%s) resolves to the topsrcdir or topobjdir (%s), which is '
|
||||
'not allowed' % (local_include, full_path), context)
|
||||
include_obj = LocalInclude(context, local_include)
|
||||
local_includes.append(include_obj.path.full_path)
|
||||
yield include_obj
|
||||
|
@ -1207,7 +1210,8 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
for obj in self._handle_linkables(context, passthru, generated_files):
|
||||
yield obj
|
||||
|
||||
generated_files.update(['%s%s' % (k, self.config.substs.get('BIN_SUFFIX', '')) for k in self._binaries.keys()])
|
||||
generated_files.update(['%s%s' % (k, self.config.substs.get('BIN_SUFFIX', ''))
|
||||
for k in self._binaries.keys()])
|
||||
|
||||
components = []
|
||||
for var, cls in (
|
||||
|
@ -1376,7 +1380,6 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
if context.objdir in self._host_compile_dirs:
|
||||
yield computed_host_flags
|
||||
|
||||
|
||||
def _create_substitution(self, cls, context, path):
|
||||
sub = cls(context)
|
||||
sub.input_path = '%s.in' % path.full_path
|
||||
|
@ -1395,12 +1398,12 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
if not xpidl_module:
|
||||
if context['XPIDL_SOURCES']:
|
||||
raise SandboxValidationError('XPIDL_MODULE must be defined if '
|
||||
'XPIDL_SOURCES is defined.', context)
|
||||
'XPIDL_SOURCES is defined.', context)
|
||||
return
|
||||
|
||||
if not context['XPIDL_SOURCES']:
|
||||
raise SandboxValidationError('XPIDL_MODULE cannot be defined '
|
||||
'unless there are XPIDL_SOURCES', context)
|
||||
'unless there are XPIDL_SOURCES', context)
|
||||
|
||||
if context['DIST_INSTALL'] is False:
|
||||
self.log(logging.WARN, 'mozbuild_warning', dict(
|
||||
|
@ -1410,7 +1413,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
for idl in context['XPIDL_SOURCES']:
|
||||
if not os.path.exists(idl.full_path):
|
||||
raise SandboxValidationError('File %s from XPIDL_SOURCES '
|
||||
'does not exist' % idl.full_path, context)
|
||||
'does not exist' % idl.full_path, context)
|
||||
|
||||
yield XPIDLModule(context, xpidl_module, context['XPIDL_SOURCES'])
|
||||
|
||||
|
@ -1482,7 +1485,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
path = manifest_path.full_path
|
||||
manifest_dir = mozpath.dirname(path)
|
||||
manifest_reldir = mozpath.dirname(mozpath.relpath(path,
|
||||
context.config.topsrcdir))
|
||||
context.config.topsrcdir))
|
||||
manifest_sources = [mozpath.relpath(pth, context.config.topsrcdir)
|
||||
for pth in mpmanifest.source_files]
|
||||
install_prefix = mozpath.join(install_root, install_subdir)
|
||||
|
@ -1490,22 +1493,22 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
try:
|
||||
if not mpmanifest.tests:
|
||||
raise SandboxValidationError('Empty test manifest: %s'
|
||||
% path, context)
|
||||
% path, context)
|
||||
|
||||
defaults = mpmanifest.manifest_defaults[os.path.normpath(path)]
|
||||
obj = TestManifest(context, path, mpmanifest, flavor=flavor,
|
||||
install_prefix=install_prefix,
|
||||
relpath=mozpath.join(manifest_reldir, mozpath.basename(path)),
|
||||
sources=manifest_sources,
|
||||
dupe_manifest='dupe-manifest' in defaults)
|
||||
install_prefix=install_prefix,
|
||||
relpath=mozpath.join(manifest_reldir, mozpath.basename(path)),
|
||||
sources=manifest_sources,
|
||||
dupe_manifest='dupe-manifest' in defaults)
|
||||
|
||||
filtered = mpmanifest.tests
|
||||
|
||||
missing = [t['name'] for t in filtered if not os.path.exists(t['path'])]
|
||||
if missing:
|
||||
raise SandboxValidationError('Test manifest (%s) lists '
|
||||
'test that does not exist: %s' % (
|
||||
path, ', '.join(missing)), context)
|
||||
'test that does not exist: %s' % (
|
||||
path, ', '.join(missing)), context)
|
||||
|
||||
out_dir = mozpath.join(install_prefix, manifest_reldir)
|
||||
if 'install-to-subdir' in defaults:
|
||||
|
@ -1529,8 +1532,8 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
install_path[2:])),
|
||||
install_path not in install_info.external_installs]):
|
||||
raise SandboxValidationError('Error processing test '
|
||||
'manifest %s: entry in support-files not present '
|
||||
'in the srcdir: %s' % (path, install_path), context)
|
||||
'manifest %s: entry in support-files not present '
|
||||
'in the srcdir: %s' % (path, install_path), context)
|
||||
|
||||
obj.deferred_installs |= install_info.deferred_installs
|
||||
|
||||
|
@ -1541,7 +1544,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
# test package. They function as identifiers rather than files.
|
||||
if package_tests:
|
||||
manifest_relpath = mozpath.relpath(test['path'],
|
||||
mozpath.dirname(test['manifest']))
|
||||
mozpath.dirname(test['manifest']))
|
||||
obj.installs[mozpath.normpath(test['path'])] = \
|
||||
((mozpath.join(out_dir, manifest_relpath)), True)
|
||||
|
||||
|
@ -1568,28 +1571,28 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
del obj.installs[mozpath.join(manifest_dir, f)]
|
||||
except KeyError:
|
||||
raise SandboxValidationError('Error processing test '
|
||||
'manifest %s: entry in generated-files not present '
|
||||
'elsewhere in manifest: %s' % (path, f), context)
|
||||
'manifest %s: entry in generated-files not present '
|
||||
'elsewhere in manifest: %s' % (path, f), context)
|
||||
|
||||
yield obj
|
||||
except (AssertionError, Exception):
|
||||
raise SandboxValidationError('Error processing test '
|
||||
'manifest file %s: %s' % (path,
|
||||
'\n'.join(traceback.format_exception(*sys.exc_info()))),
|
||||
context)
|
||||
'manifest file %s: %s' % (path,
|
||||
'\n'.join(traceback.format_exception(*sys.exc_info()))),
|
||||
context)
|
||||
|
||||
def _process_reftest_manifest(self, context, flavor, manifest_path, manifest):
|
||||
manifest_full_path = manifest_path.full_path
|
||||
manifest_reldir = mozpath.dirname(mozpath.relpath(manifest_full_path,
|
||||
context.config.topsrcdir))
|
||||
context.config.topsrcdir))
|
||||
|
||||
# reftest manifests don't come from manifest parser. But they are
|
||||
# similar enough that we can use the same emitted objects. Note
|
||||
# that we don't perform any installs for reftests.
|
||||
obj = TestManifest(context, manifest_full_path, manifest,
|
||||
flavor=flavor, install_prefix='%s/' % flavor,
|
||||
relpath=mozpath.join(manifest_reldir,
|
||||
mozpath.basename(manifest_path)))
|
||||
flavor=flavor, install_prefix='%s/' % flavor,
|
||||
relpath=mozpath.join(manifest_reldir,
|
||||
mozpath.basename(manifest_path)))
|
||||
|
||||
for test, source_manifest in sorted(manifest.tests):
|
||||
obj.tests.append({
|
||||
|
@ -1608,7 +1611,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
jar_manifests = context.get('JAR_MANIFESTS', [])
|
||||
if len(jar_manifests) > 1:
|
||||
raise SandboxValidationError('While JAR_MANIFESTS is a list, '
|
||||
'it is currently limited to one value.', context)
|
||||
'it is currently limited to one value.', context)
|
||||
|
||||
for path in jar_manifests:
|
||||
yield JARManifest(context, path)
|
||||
|
@ -1620,8 +1623,8 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
if os.path.exists(os.path.join(context.srcdir, 'jar.mn')):
|
||||
if 'jar.mn' not in jar_manifests:
|
||||
raise SandboxValidationError('A jar.mn exists but it '
|
||||
'is not referenced in the moz.build file. '
|
||||
'Please define JAR_MANIFESTS.', context)
|
||||
'is not referenced in the moz.build file. '
|
||||
'Please define JAR_MANIFESTS.', context)
|
||||
|
||||
def _emit_directory_traversal_from_context(self, context):
|
||||
o = DirectoryTraversal(context)
|
||||
|
@ -1630,6 +1633,6 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
# Some paths have a subconfigure, yet also have a moz.build. Those
|
||||
# shouldn't end up in self._external_paths.
|
||||
if o.objdir:
|
||||
self._external_paths -= { o.relobjdir }
|
||||
self._external_paths -= {o.relobjdir}
|
||||
|
||||
yield o
|
||||
|
|
|
@ -40,7 +40,7 @@ sys.modules['gyp.generator.mozbuild'] = sys.modules[__name__]
|
|||
# chrome_src for the default includes, so go backwards from the pylib
|
||||
# directory, which is the parent directory of gyp module.
|
||||
chrome_src = mozpath.abspath(mozpath.join(mozpath.dirname(gyp.__file__),
|
||||
'../../../../..'))
|
||||
'../../../../..'))
|
||||
script_dir = mozpath.join(chrome_src, 'build')
|
||||
|
||||
|
||||
|
@ -74,10 +74,11 @@ class GypContext(TemplateContext):
|
|||
relobjdir is the object directory that will be used for this context,
|
||||
relative to the topobjdir defined in the ConfigEnvironment.
|
||||
"""
|
||||
|
||||
def __init__(self, config, relobjdir):
|
||||
self._relobjdir = relobjdir
|
||||
TemplateContext.__init__(self, template='Gyp',
|
||||
allowed_variables=VARIABLES, config=config)
|
||||
allowed_variables=VARIABLES, config=config)
|
||||
|
||||
|
||||
def handle_actions(actions, context, action_overrides):
|
||||
|
@ -88,10 +89,12 @@ def handle_actions(actions, context, action_overrides):
|
|||
raise RuntimeError('GYP action %s not listed in action_overrides' % name)
|
||||
outputs = action['outputs']
|
||||
if len(outputs) > 1:
|
||||
raise NotImplementedError('GYP actions with more than one output not supported: %s' % name)
|
||||
raise NotImplementedError(
|
||||
'GYP actions with more than one output not supported: %s' % name)
|
||||
output = outputs[0]
|
||||
if not output.startswith(idir):
|
||||
raise NotImplementedError('GYP actions outputting to somewhere other than <(INTERMEDIATE_DIR) not supported: %s' % output)
|
||||
raise NotImplementedError(
|
||||
'GYP actions outputting to somewhere other than <(INTERMEDIATE_DIR) not supported: %s' % output)
|
||||
output = output[len(idir):]
|
||||
context['GENERATED_FILES'] += [output]
|
||||
g = context['GENERATED_FILES'][output]
|
||||
|
@ -104,7 +107,8 @@ def handle_copies(copies, context):
|
|||
for copy in copies:
|
||||
dest = copy['destination']
|
||||
if not dest.startswith(dist):
|
||||
raise NotImplementedError('GYP copies to somewhere other than <(PRODUCT_DIR)/dist not supported: %s' % dest)
|
||||
raise NotImplementedError(
|
||||
'GYP copies to somewhere other than <(PRODUCT_DIR)/dist not supported: %s' % dest)
|
||||
dest_paths = dest[len(dist):].split('/')
|
||||
exports = context['EXPORTS']
|
||||
while dest_paths:
|
||||
|
@ -130,8 +134,8 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
|
|||
# directory. Since several targets can be in a given build_file,
|
||||
# separate them in subdirectories using the build_file basename
|
||||
# and the target_name.
|
||||
reldir = mozpath.relpath(mozpath.dirname(build_file),
|
||||
mozpath.dirname(path))
|
||||
reldir = mozpath.relpath(mozpath.dirname(build_file),
|
||||
mozpath.dirname(path))
|
||||
subdir = '%s_%s' % (
|
||||
mozpath.splitext(mozpath.basename(build_file))[0],
|
||||
target_name,
|
||||
|
@ -155,12 +159,13 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
|
|||
target_conf = spec['configurations'][c]
|
||||
|
||||
if 'actions' in spec:
|
||||
handle_actions(spec['actions'], context, action_overrides)
|
||||
handle_actions(spec['actions'], context, action_overrides)
|
||||
if 'copies' in spec:
|
||||
handle_copies(spec['copies'], context)
|
||||
handle_copies(spec['copies'], context)
|
||||
|
||||
use_libs = []
|
||||
libs = []
|
||||
|
||||
def add_deps(s):
|
||||
for t in s.get('dependencies', []) + s.get('dependencies_original', []):
|
||||
ty = targets[t]['type']
|
||||
|
@ -171,7 +176,7 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
|
|||
if ty in ('static_library', 'none'):
|
||||
add_deps(targets[t])
|
||||
libs.extend(spec.get('libraries', []))
|
||||
#XXX: this sucks, but webrtc breaks with this right now because
|
||||
# XXX: this sucks, but webrtc breaks with this right now because
|
||||
# it builds a library called 'gtest' and we just get lucky
|
||||
# that it isn't in USE_LIBS by that name anywhere.
|
||||
if no_chromium:
|
||||
|
@ -179,20 +184,20 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
|
|||
|
||||
os_libs = []
|
||||
for l in libs:
|
||||
if l.startswith('-'):
|
||||
os_libs.append(l)
|
||||
elif l.endswith('.lib'):
|
||||
os_libs.append(l[:-4])
|
||||
elif l:
|
||||
# For library names passed in from moz.build.
|
||||
use_libs.append(os.path.basename(l))
|
||||
if l.startswith('-'):
|
||||
os_libs.append(l)
|
||||
elif l.endswith('.lib'):
|
||||
os_libs.append(l[:-4])
|
||||
elif l:
|
||||
# For library names passed in from moz.build.
|
||||
use_libs.append(os.path.basename(l))
|
||||
|
||||
if spec['type'] == 'none':
|
||||
if not ('actions' in spec or 'copies' in spec):
|
||||
continue
|
||||
if not ('actions' in spec or 'copies' in spec):
|
||||
continue
|
||||
elif spec['type'] in ('static_library', 'shared_library', 'executable'):
|
||||
# Remove leading 'lib' from the target_name if any, and use as
|
||||
# library name.
|
||||
# Remove leading 'lib' from the target_name if any, and use as
|
||||
# library name.
|
||||
name = spec['target_name']
|
||||
if spec['type'] in ('static_library', 'shared_library'):
|
||||
if name.startswith('lib'):
|
||||
|
@ -222,9 +227,9 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
|
|||
ext = mozpath.splitext(f)[-1]
|
||||
extensions.add(ext)
|
||||
if f.startswith('$INTERMEDIATE_DIR/'):
|
||||
s = ObjDirPath(context, f.replace('$INTERMEDIATE_DIR/', '!'))
|
||||
s = ObjDirPath(context, f.replace('$INTERMEDIATE_DIR/', '!'))
|
||||
else:
|
||||
s = SourcePath(context, f)
|
||||
s = SourcePath(context, f)
|
||||
if ext == '.h':
|
||||
continue
|
||||
if ext == '.def':
|
||||
|
@ -282,7 +287,8 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
|
|||
if include.startswith('/'):
|
||||
resolved = mozpath.abspath(mozpath.join(config.topsrcdir, include[1:]))
|
||||
elif not include.startswith(('!', '%')):
|
||||
resolved = mozpath.abspath(mozpath.join(mozpath.dirname(build_file), include))
|
||||
resolved = mozpath.abspath(mozpath.join(
|
||||
mozpath.dirname(build_file), include))
|
||||
if not include.startswith(('!', '%')) and not os.path.exists(resolved):
|
||||
continue
|
||||
context['LOCAL_INCLUDES'] += [include]
|
||||
|
@ -325,17 +331,17 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
|
|||
raise NotImplementedError('Unsupported gyp target type: %s' % spec['type'])
|
||||
|
||||
if not no_chromium:
|
||||
# Add some features to all contexts. Put here in case LOCAL_INCLUDES
|
||||
# order matters.
|
||||
context['LOCAL_INCLUDES'] += [
|
||||
'!/ipc/ipdl/_ipdlheaders',
|
||||
'/ipc/chromium/src',
|
||||
'/ipc/glue',
|
||||
]
|
||||
# These get set via VC project file settings for normal GYP builds.
|
||||
if config.substs['OS_TARGET'] == 'WINNT':
|
||||
context['DEFINES']['UNICODE'] = True
|
||||
context['DEFINES']['_UNICODE'] = True
|
||||
# Add some features to all contexts. Put here in case LOCAL_INCLUDES
|
||||
# order matters.
|
||||
context['LOCAL_INCLUDES'] += [
|
||||
'!/ipc/ipdl/_ipdlheaders',
|
||||
'/ipc/chromium/src',
|
||||
'/ipc/glue',
|
||||
]
|
||||
# These get set via VC project file settings for normal GYP builds.
|
||||
if config.substs['OS_TARGET'] == 'WINNT':
|
||||
context['DEFINES']['UNICODE'] = True
|
||||
context['DEFINES']['_UNICODE'] = True
|
||||
context['COMPILE_FLAGS']['OS_INCLUDES'] = []
|
||||
|
||||
for key, value in gyp_dir_attrs.sandbox_vars.items():
|
||||
|
@ -368,6 +374,7 @@ class GypProcessor(object):
|
|||
gyp dependencies will be. gyp_dir_attrs are attributes set for the dir
|
||||
from moz.build.
|
||||
"""
|
||||
|
||||
def __init__(self, config, gyp_dir_attrs, path, output, executor,
|
||||
action_overrides, non_unified_sources):
|
||||
self._path = path
|
||||
|
|
|
@ -21,6 +21,7 @@ import mozpack.path as mozpath
|
|||
|
||||
TOPSRCDIR = os.path.abspath(os.path.join(__file__, '../../../../../'))
|
||||
|
||||
|
||||
class InvalidPathException(Exception):
|
||||
"""Represents an error due to an invalid path."""
|
||||
|
||||
|
@ -28,11 +29,11 @@ class InvalidPathException(Exception):
|
|||
@CommandProvider
|
||||
class MozbuildFileCommands(MachCommandBase):
|
||||
@Command('mozbuild-reference', category='build-dev',
|
||||
description='View reference documentation on mozbuild files.')
|
||||
description='View reference documentation on mozbuild files.')
|
||||
@CommandArgument('symbol', default=None, nargs='*',
|
||||
help='Symbol to view help on. If not specified, all will be shown.')
|
||||
help='Symbol to view help on. If not specified, all will be shown.')
|
||||
@CommandArgument('--name-only', '-n', default=False, action='store_true',
|
||||
help='Print symbol names only.')
|
||||
help='Print symbol names only.')
|
||||
def reference(self, symbol, name_only=False):
|
||||
# mozbuild.sphinx imports some Sphinx modules, so we need to be sure
|
||||
# the optional Sphinx package is installed.
|
||||
|
@ -284,7 +285,6 @@ class MozbuildFileCommands(MachCommandBase):
|
|||
print(e.message)
|
||||
return 1
|
||||
|
||||
|
||||
def _get_files_info(self, paths, rev=None):
|
||||
reader = self.mozbuild_reader(config_mode='empty', vcs_revision=rev)
|
||||
|
||||
|
@ -328,7 +328,6 @@ class MozbuildFileCommands(MachCommandBase):
|
|||
|
||||
return reader.files_info(allpaths)
|
||||
|
||||
|
||||
@SubCommand('file-info', 'schedules',
|
||||
'Show the combined SCHEDULES for the files listed.')
|
||||
@CommandArgument('paths', nargs='+',
|
||||
|
|
|
@ -80,7 +80,6 @@ from mozbuild.base import ExecutionSummary
|
|||
from concurrent.futures.process import ProcessPoolExecutor
|
||||
|
||||
|
||||
|
||||
if sys.version_info.major == 2:
|
||||
text_type = unicode
|
||||
type_type = types.TypeType
|
||||
|
@ -106,6 +105,7 @@ class EmptyConfig(object):
|
|||
This variation is needed because CONFIG uses .get() to access members.
|
||||
Without it, None (instead of our EmptyValue types) would be returned.
|
||||
"""
|
||||
|
||||
def get(self, key, default=None):
|
||||
return self[key]
|
||||
|
||||
|
@ -182,6 +182,7 @@ class MozbuildSandbox(Sandbox):
|
|||
metadata is a dict of metadata that can be used during the sandbox
|
||||
evaluation.
|
||||
"""
|
||||
|
||||
def __init__(self, context, metadata={}, finder=default_finder):
|
||||
assert isinstance(context, Context)
|
||||
|
||||
|
@ -241,7 +242,7 @@ class MozbuildSandbox(Sandbox):
|
|||
# protection, so it is omitted.
|
||||
if not is_read_allowed(path, self._context.config):
|
||||
raise SandboxLoadError(self._context.source_stack,
|
||||
sys.exc_info()[2], illegal_path=path)
|
||||
sys.exc_info()[2], illegal_path=path)
|
||||
|
||||
Sandbox.exec_file(self, path)
|
||||
|
||||
|
@ -290,14 +291,14 @@ class MozbuildSandbox(Sandbox):
|
|||
|
||||
if not inspect.isfunction(func):
|
||||
raise Exception('`template` is a function decorator. You must '
|
||||
'use it as `@template` preceding a function declaration.')
|
||||
'use it as `@template` preceding a function declaration.')
|
||||
|
||||
name = func.func_name
|
||||
|
||||
if name in self.templates:
|
||||
raise KeyError(
|
||||
'A template named "%s" was already declared in %s.' % (name,
|
||||
self.templates[name].path))
|
||||
self.templates[name].path))
|
||||
|
||||
if name.islower() or name.isupper() or name[0].islower():
|
||||
raise NameError('Template function names must be CamelCase.')
|
||||
|
@ -320,6 +321,7 @@ class MozbuildSandbox(Sandbox):
|
|||
The wrapper function does type coercion on the function arguments
|
||||
"""
|
||||
func, args_def, doc = function_def
|
||||
|
||||
def function(*args):
|
||||
def coerce(arg, type):
|
||||
if not isinstance(arg, type):
|
||||
|
@ -416,7 +418,7 @@ class TemplateFunction(object):
|
|||
# actually never calls __getitem__ and __setitem__, so we need to
|
||||
# modify the AST so that accesses to globals are properly directed
|
||||
# to a dict.
|
||||
self._global_name = b'_data' # AST wants str for this, not unicode
|
||||
self._global_name = b'_data' # AST wants str for this, not unicode
|
||||
# In case '_data' is a name used for a variable in the function code,
|
||||
# prepend more underscores until we find an unused name.
|
||||
while (self._global_name in code.co_names or
|
||||
|
@ -463,6 +465,7 @@ class TemplateFunction(object):
|
|||
"""AST Node Transformer to rewrite variable accesses to go through
|
||||
a dict.
|
||||
"""
|
||||
|
||||
def __init__(self, sandbox, global_name):
|
||||
self._sandbox = sandbox
|
||||
self._global_name = global_name
|
||||
|
@ -491,6 +494,7 @@ class TemplateFunction(object):
|
|||
|
||||
class SandboxValidationError(Exception):
|
||||
"""Represents an error encountered when validating sandbox results."""
|
||||
|
||||
def __init__(self, message, context):
|
||||
Exception.__init__(self, message)
|
||||
self.context = context
|
||||
|
@ -532,9 +536,10 @@ class BuildReaderError(Exception):
|
|||
MozbuildSandbox has over Sandbox (e.g. the concept of included files -
|
||||
which affect error messages, of course).
|
||||
"""
|
||||
|
||||
def __init__(self, file_stack, trace, sandbox_exec_error=None,
|
||||
sandbox_load_error=None, validation_error=None, other_error=None,
|
||||
sandbox_called_error=None):
|
||||
sandbox_load_error=None, validation_error=None, other_error=None,
|
||||
sandbox_called_error=None):
|
||||
|
||||
self.file_stack = file_stack
|
||||
self.trace = trace
|
||||
|
@ -559,7 +564,7 @@ class BuildReaderError(Exception):
|
|||
return self.file_stack[-2]
|
||||
|
||||
if self.sandbox_error is not None and \
|
||||
len(self.sandbox_error.file_stack):
|
||||
len(self.sandbox_error.file_stack):
|
||||
return self.sandbox_error.file_stack[-1]
|
||||
|
||||
return self.file_stack[-1]
|
||||
|
@ -602,7 +607,7 @@ class BuildReaderError(Exception):
|
|||
s.write('\n')
|
||||
|
||||
for l in traceback.format_exception(type(self.other), self.other,
|
||||
self.trace):
|
||||
self.trace):
|
||||
s.write(unicode(l))
|
||||
|
||||
return s.getvalue()
|
||||
|
@ -760,7 +765,7 @@ class BuildReaderError(Exception):
|
|||
|
||||
if inner.args[2] in DEPRECATION_HINTS:
|
||||
s.write('%s\n' %
|
||||
textwrap.dedent(DEPRECATION_HINTS[inner.args[2]]).strip())
|
||||
textwrap.dedent(DEPRECATION_HINTS[inner.args[2]]).strip())
|
||||
return
|
||||
|
||||
s.write('Please change the file to not use this variable.\n')
|
||||
|
@ -802,7 +807,7 @@ class BuildReaderError(Exception):
|
|||
s.write(' %s\n' % inner.args[4].__name__)
|
||||
else:
|
||||
for t in inner.args[4]:
|
||||
s.write( ' %s\n' % t.__name__)
|
||||
s.write(' %s\n' % t.__name__)
|
||||
s.write('\n')
|
||||
s.write('Change the file to write a value of the appropriate type ')
|
||||
s.write('and try again.\n')
|
||||
|
@ -1053,23 +1058,23 @@ class BuildReader(object):
|
|||
|
||||
except SandboxCalledError as sce:
|
||||
raise BuildReaderError(list(self._execution_stack),
|
||||
sys.exc_info()[2], sandbox_called_error=sce)
|
||||
sys.exc_info()[2], sandbox_called_error=sce)
|
||||
|
||||
except SandboxExecutionError as se:
|
||||
raise BuildReaderError(list(self._execution_stack),
|
||||
sys.exc_info()[2], sandbox_exec_error=se)
|
||||
sys.exc_info()[2], sandbox_exec_error=se)
|
||||
|
||||
except SandboxLoadError as sle:
|
||||
raise BuildReaderError(list(self._execution_stack),
|
||||
sys.exc_info()[2], sandbox_load_error=sle)
|
||||
sys.exc_info()[2], sandbox_load_error=sle)
|
||||
|
||||
except SandboxValidationError as ve:
|
||||
raise BuildReaderError(list(self._execution_stack),
|
||||
sys.exc_info()[2], validation_error=ve)
|
||||
sys.exc_info()[2], validation_error=ve)
|
||||
|
||||
except Exception as e:
|
||||
raise BuildReaderError(list(self._execution_stack),
|
||||
sys.exc_info()[2], other_error=e)
|
||||
sys.exc_info()[2], other_error=e)
|
||||
|
||||
def _read_mozbuild(self, path, config, descend, metadata):
|
||||
path = mozpath.normpath(path)
|
||||
|
@ -1127,7 +1132,7 @@ class BuildReader(object):
|
|||
for v in ('input', 'variables'):
|
||||
if not getattr(gyp_dir, v):
|
||||
raise SandboxValidationError('Missing value for '
|
||||
'GYP_DIRS["%s"].%s' % (target_dir, v), context)
|
||||
'GYP_DIRS["%s"].%s' % (target_dir, v), context)
|
||||
|
||||
# The make backend assumes contexts for sub-directories are
|
||||
# emitted after their parent, so accumulate the gyp contexts.
|
||||
|
@ -1140,7 +1145,7 @@ class BuildReader(object):
|
|||
source = SourcePath(context, s)
|
||||
if not self.finder.get(source.full_path):
|
||||
raise SandboxValidationError('Cannot find %s.' % source,
|
||||
context)
|
||||
context)
|
||||
non_unified_sources.add(source)
|
||||
action_overrides = {}
|
||||
for action, script in gyp_dir.action_overrides.iteritems():
|
||||
|
@ -1189,7 +1194,7 @@ class BuildReader(object):
|
|||
if not is_read_allowed(child_path, context.config):
|
||||
raise SandboxValidationError(
|
||||
'Attempting to process file outside of allowed paths: %s' %
|
||||
child_path, context)
|
||||
child_path, context)
|
||||
|
||||
if not descend:
|
||||
continue
|
||||
|
@ -1283,6 +1288,7 @@ class BuildReader(object):
|
|||
# Exporting doesn't work reliably in tree traversal mode. Override
|
||||
# the function to no-op.
|
||||
functions = dict(FUNCTIONS)
|
||||
|
||||
def export(sandbox):
|
||||
return lambda varname: None
|
||||
functions['export'] = tuple([export] + list(FUNCTIONS['export'][1:]))
|
||||
|
@ -1337,6 +1343,7 @@ class BuildReader(object):
|
|||
# times (once for every path in a directory that doesn't have any
|
||||
# test metadata). So, we cache the function call.
|
||||
defaults_cache = {}
|
||||
|
||||
def test_defaults_for_path(ctxs):
|
||||
key = tuple(ctx.current_path or ctx.main_path for ctx in ctxs)
|
||||
|
||||
|
@ -1394,7 +1401,8 @@ class BuildReader(object):
|
|||
test_manifest_contexts = set(
|
||||
['%s_MANIFESTS' % key for key in TEST_MANIFESTS] +
|
||||
['%s_MANIFESTS' % flavor.upper() for flavor in REFTEST_FLAVORS] +
|
||||
['%s_MANIFESTS' % flavor.upper().replace('-', '_') for flavor in WEB_PLATFORM_TESTS_FLAVORS]
|
||||
['%s_MANIFESTS' % flavor.upper().replace('-', '_')
|
||||
for flavor in WEB_PLATFORM_TESTS_FLAVORS]
|
||||
)
|
||||
|
||||
result_context = Files(Context())
|
||||
|
|
|
@ -53,6 +53,7 @@ class SandboxExecutionError(SandboxError):
|
|||
This is a simple container exception. It's purpose is to capture state
|
||||
so something else can report on it.
|
||||
"""
|
||||
|
||||
def __init__(self, file_stack, exc_type, exc_value, trace):
|
||||
SandboxError.__init__(self, file_stack)
|
||||
|
||||
|
@ -69,6 +70,7 @@ class SandboxLoadError(SandboxError):
|
|||
a file. If so, the file_stack will be non-empty and the file that caused
|
||||
the load will be on top of the stack.
|
||||
"""
|
||||
|
||||
def __init__(self, file_stack, trace, illegal_path=None, read_error=None):
|
||||
SandboxError.__init__(self, file_stack)
|
||||
|
||||
|
@ -155,7 +157,7 @@ class Sandbox(dict):
|
|||
source = self._finder.get(path).read()
|
||||
except Exception as e:
|
||||
raise SandboxLoadError(self._context.source_stack,
|
||||
sys.exc_info()[2], read_error=path)
|
||||
sys.exc_info()[2], read_error=path)
|
||||
|
||||
self.exec_source(source, path)
|
||||
|
||||
|
@ -288,7 +290,7 @@ class Sandbox(dict):
|
|||
raise KeyError('global_ns', 'reassign', key)
|
||||
|
||||
if (key not in self._context and isinstance(value, (list, dict))
|
||||
and not value):
|
||||
and not value):
|
||||
raise KeyError('Variable %s assigned an empty value.' % key)
|
||||
|
||||
self._context[key] = value
|
||||
|
|
|
@ -78,7 +78,8 @@ class MozbuildWriter(object):
|
|||
self.write('\n')
|
||||
self.write(self.indent + key)
|
||||
self.write(' += [\n ' + self.indent)
|
||||
self.write((',\n ' + self.indent).join(alphabetical_sorted(self.mb_serialize(v) for v in value)))
|
||||
self.write(
|
||||
(',\n ' + self.indent).join(alphabetical_sorted(self.mb_serialize(v) for v in value)))
|
||||
self.write('\n')
|
||||
self.write_ln(']')
|
||||
|
||||
|
@ -112,7 +113,6 @@ class MozbuildWriter(object):
|
|||
if not wrote_ln:
|
||||
self.write_ln("%s[%s] = %s" % subst_vals)
|
||||
|
||||
|
||||
def write_condition(self, values):
|
||||
def mk_condition(k, v):
|
||||
if not v:
|
||||
|
|
|
@ -90,7 +90,8 @@ class JarInfo(object):
|
|||
self.entries = []
|
||||
|
||||
|
||||
class DeprecatedJarManifest(Exception): pass
|
||||
class DeprecatedJarManifest(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class JarManifestParser(object):
|
||||
|
@ -197,7 +198,7 @@ class JarMaker(object):
|
|||
'''
|
||||
|
||||
def __init__(self, outputFormat='flat', useJarfileManifest=True,
|
||||
useChromeManifest=False):
|
||||
useChromeManifest=False):
|
||||
|
||||
self.outputFormat = outputFormat
|
||||
self.useJarfileManifest = useJarfileManifest
|
||||
|
@ -224,10 +225,10 @@ class JarMaker(object):
|
|||
|
||||
p = self.pp.getCommandLineParser(unescapeDefines=True)
|
||||
p.add_option('-f', type='choice', default='jar',
|
||||
choices=('jar', 'flat', 'symlink'),
|
||||
help='fileformat used for output',
|
||||
metavar='[jar, flat, symlink]',
|
||||
)
|
||||
choices=('jar', 'flat', 'symlink'),
|
||||
help='fileformat used for output',
|
||||
metavar='[jar, flat, symlink]',
|
||||
)
|
||||
p.add_option('-v', action='store_true', dest='verbose',
|
||||
help='verbose output')
|
||||
p.add_option('-q', action='store_false', dest='verbose',
|
||||
|
@ -238,13 +239,12 @@ class JarMaker(object):
|
|||
p.add_option('-s', type='string', action='append', default=[],
|
||||
help='source directory')
|
||||
p.add_option('-t', type='string', help='top source directory')
|
||||
p.add_option('-c', '--l10n-src', type='string', action='append'
|
||||
, help='localization directory')
|
||||
p.add_option('-c', '--l10n-src', type='string',
|
||||
action='append', help='localization directory')
|
||||
p.add_option('--l10n-base', type='string', action='store',
|
||||
help='base directory to be used for localization (requires relativesrcdir)'
|
||||
)
|
||||
p.add_option('--locale-mergedir', type='string', action='store'
|
||||
,
|
||||
p.add_option('--locale-mergedir', type='string', action='store',
|
||||
help='base directory to be used for l10n-merge (requires l10n-base and relativesrcdir)'
|
||||
)
|
||||
p.add_option('--relativesrcdir', type='string',
|
||||
|
@ -292,18 +292,18 @@ class JarMaker(object):
|
|||
chromeDir = \
|
||||
os.path.basename(os.path.dirname(os.path.normpath(chromeManifest)))
|
||||
logging.info("adding '%s' entry to root chrome manifest appid=%s"
|
||||
% (chromeDir, self.rootManifestAppId))
|
||||
% (chromeDir, self.rootManifestAppId))
|
||||
addEntriesToListFile(rootChromeManifest,
|
||||
['manifest %s/chrome.manifest application=%s'
|
||||
% (chromeDir,
|
||||
self.rootManifestAppId)])
|
||||
self.rootManifestAppId)])
|
||||
|
||||
def updateManifest(self, manifestPath, chromebasepath, register):
|
||||
'''updateManifest replaces the % in the chrome registration entries
|
||||
with the given chrome base path, and updates the given manifest file.
|
||||
'''
|
||||
myregister = dict.fromkeys(map(lambda s: s.replace('%',
|
||||
chromebasepath), register))
|
||||
chromebasepath), register))
|
||||
addEntriesToListFile(manifestPath, myregister.iterkeys())
|
||||
|
||||
def makeJar(self, infile, jardir):
|
||||
|
@ -314,7 +314,7 @@ class JarMaker(object):
|
|||
'''
|
||||
|
||||
# making paths absolute, guess srcdir if file and add to sourcedirs
|
||||
_normpath = lambda p: os.path.normpath(os.path.abspath(p))
|
||||
def _normpath(p): return os.path.normpath(os.path.abspath(p))
|
||||
self.topsourcedir = _normpath(self.topsourcedir)
|
||||
self.sourcedirs = [_normpath(p) for p in self.sourcedirs]
|
||||
if self.localedirs:
|
||||
|
@ -348,7 +348,7 @@ class JarMaker(object):
|
|||
if self.l10nmerge or not self.l10nbase:
|
||||
# add en-US if we merge, or if it's not l10n
|
||||
locdirs.append(os.path.join(self.topsourcedir,
|
||||
relativesrcdir, 'en-US'))
|
||||
relativesrcdir, 'en-US'))
|
||||
return locdirs
|
||||
|
||||
def processJarSection(self, jarinfo, jardir):
|
||||
|
@ -457,7 +457,7 @@ class JarMaker(object):
|
|||
if jf is not None:
|
||||
jf.close()
|
||||
raise RuntimeError('File "{0}" not found in {1}'.format(src,
|
||||
', '.join(src_base)))
|
||||
', '.join(src_base)))
|
||||
|
||||
if out in self._seen_output:
|
||||
raise RuntimeError('%s already added' % out)
|
||||
|
@ -585,7 +585,7 @@ def main(args=None):
|
|||
jm.l10nmerge = options.locale_mergedir
|
||||
if jm.l10nmerge and not os.path.isdir(jm.l10nmerge):
|
||||
logging.warning("WARNING: --locale-mergedir passed, but '%s' does not exist. "
|
||||
"Ignore this message if the locale is complete." % jm.l10nmerge)
|
||||
"Ignore this message if the locale is complete." % jm.l10nmerge)
|
||||
elif options.locale_mergedir:
|
||||
p.error('l10n-base required when using locale-mergedir')
|
||||
jm.localedirs = options.l10n_src
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -62,6 +62,7 @@ class _SimpleOrderedSet(object):
|
|||
It doesn't expose a complete API, and normalizes path separators
|
||||
at insertion.
|
||||
'''
|
||||
|
||||
def __init__(self):
|
||||
self._list = []
|
||||
self._set = set()
|
||||
|
@ -95,6 +96,7 @@ class Rule(object):
|
|||
command2
|
||||
...
|
||||
'''
|
||||
|
||||
def __init__(self, targets=[]):
|
||||
self._targets = _SimpleOrderedSet()
|
||||
self._dependencies = _SimpleOrderedSet()
|
||||
|
@ -175,6 +177,7 @@ def read_dep_makefile(fh):
|
|||
if rule:
|
||||
raise Exception('Makefile finishes with a backslash. Expected more input.')
|
||||
|
||||
|
||||
def write_dep_makefile(fh, target, deps):
|
||||
'''
|
||||
Write a Makefile containing only target's dependencies to the file handle
|
||||
|
|
|
@ -306,6 +306,7 @@ def _schema_1_additional(filename, manifest, require_license_file=True):
|
|||
class License(object):
|
||||
"""Voluptuous validator which verifies the license(s) are valid as per our
|
||||
whitelist."""
|
||||
|
||||
def __call__(self, values):
|
||||
if isinstance(values, str):
|
||||
values = [values]
|
||||
|
|
|
@ -66,7 +66,7 @@ class MozconfigLoader(object):
|
|||
\s* [?:]?= \s* # Assignment operator surrounded by optional
|
||||
# spaces
|
||||
(?P<value>.*$)''', # Everything else (likely the value)
|
||||
re.VERBOSE)
|
||||
re.VERBOSE)
|
||||
|
||||
# Default mozconfig files in the topsrcdir.
|
||||
DEFAULT_TOPSRCDIR_PATHS = ('.mozconfig', 'mozconfig')
|
||||
|
@ -145,7 +145,7 @@ class MozconfigLoader(object):
|
|||
'does not exist in any of ' + ', '.join(potential_roots))
|
||||
|
||||
env_path = os.path.join(existing[0], env_path)
|
||||
elif not os.path.exists(env_path): # non-relative path
|
||||
elif not os.path.exists(env_path): # non-relative path
|
||||
raise MozconfigFindException(
|
||||
'MOZCONFIG environment variable refers to a path that '
|
||||
'does not exist: ' + env_path)
|
||||
|
@ -156,12 +156,12 @@ class MozconfigLoader(object):
|
|||
'non-file: ' + env_path)
|
||||
|
||||
srcdir_paths = [os.path.join(self.topsrcdir, p) for p in
|
||||
self.DEFAULT_TOPSRCDIR_PATHS]
|
||||
self.DEFAULT_TOPSRCDIR_PATHS]
|
||||
existing = [p for p in srcdir_paths if os.path.isfile(p)]
|
||||
|
||||
if env_path is None and len(existing) > 1:
|
||||
raise MozconfigFindException('Multiple default mozconfig files '
|
||||
'present. Remove all but one. ' + ', '.join(existing))
|
||||
'present. Remove all but one. ' + ', '.join(existing))
|
||||
|
||||
path = None
|
||||
|
||||
|
@ -175,12 +175,12 @@ class MozconfigLoader(object):
|
|||
return os.path.abspath(path)
|
||||
|
||||
deprecated_paths = [os.path.join(self.topsrcdir, s) for s in
|
||||
self.DEPRECATED_TOPSRCDIR_PATHS]
|
||||
self.DEPRECATED_TOPSRCDIR_PATHS]
|
||||
|
||||
home = env.get('HOME', None)
|
||||
if home is not None:
|
||||
deprecated_paths.extend([os.path.join(home, s) for s in
|
||||
self.DEPRECATED_HOME_PATHS])
|
||||
self.DEPRECATED_HOME_PATHS])
|
||||
|
||||
for path in deprecated_paths:
|
||||
if os.path.exists(path):
|
||||
|
@ -243,7 +243,7 @@ class MozconfigLoader(object):
|
|||
# We need to capture stderr because that's where the shell sends
|
||||
# errors if execution fails.
|
||||
output = subprocess.check_output(command, stderr=subprocess.STDOUT,
|
||||
cwd=self.topsrcdir, env=env)
|
||||
cwd=self.topsrcdir, env=env)
|
||||
except subprocess.CalledProcessError as e:
|
||||
lines = e.output.splitlines()
|
||||
|
||||
|
@ -306,7 +306,7 @@ class MozconfigLoader(object):
|
|||
|
||||
# Environment variables also appear as shell variables, but that's
|
||||
# uninteresting duplication of information. Filter them out.
|
||||
filt = lambda x, y: {k: v for k, v in x.items() if k not in y}
|
||||
def filt(x, y): return {k: v for k, v in x.items() if k not in y}
|
||||
result['vars'] = diff_vars(
|
||||
filt(parsed['vars_before'], parsed['env_before']),
|
||||
filt(parsed['vars_after'], parsed['env_after'])
|
||||
|
|
|
@ -58,7 +58,7 @@ def build_dict(config, env=os.environ):
|
|||
# processor
|
||||
p = substs["TARGET_CPU"]
|
||||
# do some slight massaging for some values
|
||||
#TODO: retain specific values in case someone wants them?
|
||||
# TODO: retain specific values in case someone wants them?
|
||||
if p.startswith("arm"):
|
||||
p = "arm"
|
||||
elif re.match("i[3-9]86", p):
|
||||
|
|
|
@ -198,6 +198,7 @@ class Expression:
|
|||
rv = not rv
|
||||
return rv
|
||||
# Helper function to evaluate __get_logical_and and __get_logical_or results
|
||||
|
||||
def eval_logical_op(tok):
|
||||
left = opmap[tok[0].type](tok[0])
|
||||
right = opmap[tok[2].type](tok[2])
|
||||
|
@ -217,12 +218,13 @@ class Expression:
|
|||
'defined': lambda tok: tok.value in context,
|
||||
'int': lambda tok: tok.value}
|
||||
|
||||
return opmap[self.e.type](self.e);
|
||||
return opmap[self.e.type](self.e)
|
||||
|
||||
class __AST(list):
|
||||
"""
|
||||
Internal class implementing Abstract Syntax Tree nodes
|
||||
"""
|
||||
|
||||
def __init__(self, type):
|
||||
self.type = type
|
||||
super(self.__class__, self).__init__(self)
|
||||
|
@ -231,11 +233,14 @@ class Expression:
|
|||
"""
|
||||
Internal class implementing Abstract Syntax Tree leafs
|
||||
"""
|
||||
|
||||
def __init__(self, type, value):
|
||||
self.value = value
|
||||
self.type = type
|
||||
|
||||
def __str__(self):
|
||||
return self.value.__str__()
|
||||
|
||||
def __repr__(self):
|
||||
return self.value.__repr__()
|
||||
|
||||
|
@ -245,13 +250,16 @@ class Expression:
|
|||
It has two members, offset and content, which give the offset of the
|
||||
error and the offending content.
|
||||
"""
|
||||
|
||||
def __init__(self, expression):
|
||||
self.offset = expression.offset
|
||||
self.content = expression.content[:3]
|
||||
|
||||
def __str__(self):
|
||||
return 'Unexpected content at offset {0}, "{1}"'.format(self.offset,
|
||||
self.content)
|
||||
|
||||
|
||||
class Context(dict):
|
||||
"""
|
||||
This class holds variable values by subclassing dict, and while it
|
||||
|
@ -266,6 +274,7 @@ class Context(dict):
|
|||
to reflect the ambiguity between string literals and preprocessor
|
||||
variables.
|
||||
"""
|
||||
|
||||
def __getitem__(self, key):
|
||||
if key in self:
|
||||
return super(self.__class__, self).__getitem__(key)
|
||||
|
@ -285,9 +294,9 @@ class Preprocessor:
|
|||
|
||||
def __init__(self, defines=None, marker='#'):
|
||||
self.context = Context()
|
||||
for k,v in {'FILE': '',
|
||||
'LINE': 0,
|
||||
'DIRECTORY': os.path.abspath('.')}.iteritems():
|
||||
for k, v in {'FILE': '',
|
||||
'LINE': 0,
|
||||
'DIRECTORY': os.path.abspath('.')}.iteritems():
|
||||
self.context[k] = v
|
||||
try:
|
||||
# Can import globally because of bootstrapping issues.
|
||||
|
@ -341,7 +350,8 @@ class Preprocessor:
|
|||
elif self.actionLevel == 1:
|
||||
msg = 'no useful preprocessor directives found'
|
||||
if msg:
|
||||
class Fake(object): pass
|
||||
class Fake(object):
|
||||
pass
|
||||
fake = Fake()
|
||||
fake.context = {
|
||||
'FILE': file,
|
||||
|
@ -454,7 +464,7 @@ class Preprocessor:
|
|||
self.actionLevel = 2
|
||||
self.out.write(filteredLine)
|
||||
|
||||
def handleCommandLine(self, args, defaultToStdin = False):
|
||||
def handleCommandLine(self, args, defaultToStdin=False):
|
||||
"""
|
||||
Parse a commandline into this parser.
|
||||
Uses OptionParser internally, no args mean sys.argv[1:].
|
||||
|
@ -505,9 +515,10 @@ class Preprocessor:
|
|||
if options.output:
|
||||
out.close()
|
||||
|
||||
def getCommandLineParser(self, unescapeDefines = False):
|
||||
def getCommandLineParser(self, unescapeDefines=False):
|
||||
escapedValue = re.compile('".*"$')
|
||||
numberValue = re.compile('\d+$')
|
||||
|
||||
def handleD(option, opt, value, parser):
|
||||
vals = value.split('=', 1)
|
||||
if len(vals) == 1:
|
||||
|
@ -518,12 +529,16 @@ class Preprocessor:
|
|||
elif numberValue.match(vals[1]):
|
||||
vals[1] = int(vals[1])
|
||||
self.context[vals[0]] = vals[1]
|
||||
|
||||
def handleU(option, opt, value, parser):
|
||||
del self.context[value]
|
||||
|
||||
def handleF(option, opt, value, parser):
|
||||
self.do_filter(value)
|
||||
|
||||
def handleMarker(option, opt, value, parser):
|
||||
self.setMarker(value)
|
||||
|
||||
def handleSilenceDirectiveWarnings(option, opt, value, parse):
|
||||
self.setSilenceDirectiveWarnings(True)
|
||||
p = OptionParser()
|
||||
|
@ -534,7 +549,7 @@ class Preprocessor:
|
|||
p.add_option('-F', action='callback', callback=handleF, type="string",
|
||||
metavar="FILTER", help='Enable the specified filter')
|
||||
p.add_option('-o', '--output', type="string", default=None,
|
||||
metavar="FILENAME", help='Output to the specified file '+
|
||||
metavar="FILENAME", help='Output to the specified file ' +
|
||||
'instead of stdout')
|
||||
p.add_option('--depend', type="string", default=None, metavar="FILENAME",
|
||||
help='Generate dependencies in the given file')
|
||||
|
@ -586,6 +601,7 @@ class Preprocessor:
|
|||
except:
|
||||
pass
|
||||
self.context[m.group('name')] = val
|
||||
|
||||
def do_undef(self, args):
|
||||
m = re.match('(?P<name>\w+)$', args, re.U)
|
||||
if not m:
|
||||
|
@ -593,9 +609,11 @@ class Preprocessor:
|
|||
if args in self.context:
|
||||
del self.context[args]
|
||||
# Logic
|
||||
|
||||
def ensure_not_else(self):
|
||||
if len(self.ifStates) == 0 or self.ifStates[-1] == 2:
|
||||
sys.stderr.write('WARNING: bad nesting of #else in %s\n' % self.context['FILE'])
|
||||
|
||||
def do_if(self, args, replace=False):
|
||||
if self.disableLevel and not replace:
|
||||
self.disableLevel += 1
|
||||
|
@ -619,6 +637,7 @@ class Preprocessor:
|
|||
else:
|
||||
self.ifStates.append(self.disableLevel)
|
||||
pass
|
||||
|
||||
def do_ifdef(self, args, replace=False):
|
||||
if self.disableLevel and not replace:
|
||||
self.disableLevel += 1
|
||||
|
@ -634,6 +653,7 @@ class Preprocessor:
|
|||
else:
|
||||
self.ifStates.append(self.disableLevel)
|
||||
pass
|
||||
|
||||
def do_ifndef(self, args, replace=False):
|
||||
if self.disableLevel and not replace:
|
||||
self.disableLevel += 1
|
||||
|
@ -649,51 +669,60 @@ class Preprocessor:
|
|||
else:
|
||||
self.ifStates.append(self.disableLevel)
|
||||
pass
|
||||
def do_else(self, args, ifState = 2):
|
||||
|
||||
def do_else(self, args, ifState=2):
|
||||
self.ensure_not_else()
|
||||
hadTrue = self.ifStates[-1] == 0
|
||||
self.ifStates[-1] = ifState # in-else
|
||||
self.ifStates[-1] = ifState # in-else
|
||||
if hadTrue:
|
||||
self.disableLevel = 1
|
||||
return
|
||||
self.disableLevel = 0
|
||||
|
||||
def do_elif(self, args):
|
||||
if self.disableLevel == 1:
|
||||
if self.ifStates[-1] == 1:
|
||||
self.do_if(args, replace=True)
|
||||
else:
|
||||
self.do_else(None, self.ifStates[-1])
|
||||
|
||||
def do_elifdef(self, args):
|
||||
if self.disableLevel == 1:
|
||||
if self.ifStates[-1] == 1:
|
||||
self.do_ifdef(args, replace=True)
|
||||
else:
|
||||
self.do_else(None, self.ifStates[-1])
|
||||
|
||||
def do_elifndef(self, args):
|
||||
if self.disableLevel == 1:
|
||||
if self.ifStates[-1] == 1:
|
||||
self.do_ifndef(args, replace=True)
|
||||
else:
|
||||
self.do_else(None, self.ifStates[-1])
|
||||
|
||||
def do_endif(self, args):
|
||||
if self.disableLevel > 0:
|
||||
self.disableLevel -= 1
|
||||
if self.disableLevel == 0:
|
||||
self.ifStates.pop()
|
||||
# output processing
|
||||
|
||||
def do_expand(self, args):
|
||||
lst = re.split('__(\w+)__', args, re.U)
|
||||
do_replace = False
|
||||
|
||||
def vsubst(v):
|
||||
if v in self.context:
|
||||
return str(self.context[v])
|
||||
return ''
|
||||
for i in range(1, len(lst), 2):
|
||||
lst[i] = vsubst(lst[i])
|
||||
lst.append('\n') # add back the newline
|
||||
lst.append('\n') # add back the newline
|
||||
self.write(reduce(lambda x, y: x+y, lst, ''))
|
||||
|
||||
def do_literal(self, args):
|
||||
self.write(args + '\n')
|
||||
|
||||
def do_filter(self, args):
|
||||
filters = [f for f in args.split(' ') if hasattr(self, 'filter_' + f)]
|
||||
if len(filters) == 0:
|
||||
|
@ -705,6 +734,7 @@ class Preprocessor:
|
|||
filterNames.sort()
|
||||
self.filters = [(fn, current[fn]) for fn in filterNames]
|
||||
return
|
||||
|
||||
def do_unfilter(self, args):
|
||||
filters = args.split(' ')
|
||||
current = dict(self.filters)
|
||||
|
@ -719,12 +749,14 @@ class Preprocessor:
|
|||
#
|
||||
# emptyLines
|
||||
# Strips blank lines from the output.
|
||||
|
||||
def filter_emptyLines(self, aLine):
|
||||
if aLine == '\n':
|
||||
return ''
|
||||
return aLine
|
||||
# slashslash
|
||||
# Strips everything after //
|
||||
|
||||
def filter_slashslash(self, aLine):
|
||||
if (aLine.find('//') == -1):
|
||||
return aLine
|
||||
|
@ -734,10 +766,12 @@ class Preprocessor:
|
|||
return aLine
|
||||
# spaces
|
||||
# Collapses sequences of spaces into a single space
|
||||
|
||||
def filter_spaces(self, aLine):
|
||||
return re.sub(' +', ' ', aLine).strip(' ')
|
||||
# substition
|
||||
# helper to be used by both substition and attemptSubstitution
|
||||
|
||||
def filter_substitution(self, aLine, fatal=True):
|
||||
def repl(matchobj):
|
||||
varname = matchobj.group('VAR')
|
||||
|
@ -747,9 +781,11 @@ class Preprocessor:
|
|||
raise Preprocessor.Error(self, 'UNDEFINED_VAR', varname)
|
||||
return matchobj.group(0)
|
||||
return self.varsubst.sub(repl, aLine)
|
||||
|
||||
def filter_attemptSubstitution(self, aLine):
|
||||
return self.filter_substitution(aLine, fatal=False)
|
||||
# File ops
|
||||
|
||||
def do_include(self, args, filters=True):
|
||||
"""
|
||||
Preprocess a given file.
|
||||
|
@ -806,15 +842,17 @@ class Preprocessor:
|
|||
self.context['LINE'] = oldLine
|
||||
self.context['DIRECTORY'] = oldDir
|
||||
self.curdir = oldCurdir
|
||||
|
||||
def do_includesubst(self, args):
|
||||
args = self.filter_substitution(args)
|
||||
self.do_include(args)
|
||||
|
||||
def do_error(self, args):
|
||||
raise Preprocessor.Error(self, 'Error: ', str(args))
|
||||
|
||||
|
||||
def preprocess(includes=[sys.stdin], defines={},
|
||||
output = sys.stdout,
|
||||
output=sys.stdout,
|
||||
marker='#'):
|
||||
pp = Preprocessor(defines=defines,
|
||||
marker=marker)
|
||||
|
|
|
@ -11,6 +11,7 @@ import mozpack.path as mozpath
|
|||
from mozpack.dmg import create_dmg
|
||||
from application_ini import get_application_ini_value
|
||||
|
||||
|
||||
def repackage_dmg(infile, output):
|
||||
|
||||
if not tarfile.is_tarfile(infile):
|
||||
|
|
|
@ -16,16 +16,17 @@ _MSI_ARCH = {
|
|||
'x86_64': 'x64',
|
||||
}
|
||||
|
||||
|
||||
def update_wsx(wfile, pvalues):
|
||||
|
||||
parsed = minidom.parse(wfile)
|
||||
|
||||
# construct a dictinary for the pre-processing options
|
||||
# iterate over that list and add them to the wsx xml doc
|
||||
for k,v in pvalues.items():
|
||||
entry = parsed.createProcessingInstruction('define', k + ' = "' + v + '"')
|
||||
root = parsed.firstChild
|
||||
parsed.insertBefore(entry, root)
|
||||
for k, v in pvalues.items():
|
||||
entry = parsed.createProcessingInstruction('define', k + ' = "' + v + '"')
|
||||
root = parsed.firstChild
|
||||
parsed.insertBefore(entry, root)
|
||||
# write out xml to new wfile
|
||||
new_w_file = wfile + ".new"
|
||||
fh = open(new_w_file, "wb")
|
||||
|
@ -76,8 +77,8 @@ def repackage_msi(topsrcdir, wsx, version, locale, arch, setupexe, candle, light
|
|||
try:
|
||||
wsx_file = os.path.split(wsx)[1]
|
||||
shutil.copy(wsx, tmpdir)
|
||||
temp_wsx_file = os.path.join(tmpdir, wsx_file)
|
||||
temp_wsx_file = mozpath.realpath(temp_wsx_file)
|
||||
temp_wsx_file = os.path.join(tmpdir, wsx_file)
|
||||
temp_wsx_file = mozpath.realpath(temp_wsx_file)
|
||||
pre_values = {'Vendor': 'Mozilla',
|
||||
'BrandFullName': 'Mozilla Firefox',
|
||||
'Version': version,
|
||||
|
@ -100,7 +101,7 @@ def repackage_msi(topsrcdir, wsx, version, locale, arch, setupexe, candle, light
|
|||
'-out', wix_installer, wix_object_file]
|
||||
subprocess.check_call(light_cmd, env=env)
|
||||
os.remove(wix_object_file)
|
||||
#mv file to output dir
|
||||
# mv file to output dir
|
||||
shutil.move(wix_installer, output)
|
||||
finally:
|
||||
os.chdir(old_cwd)
|
||||
|
|
|
@ -22,6 +22,7 @@ def _tokens2re(**tokens):
|
|||
# backslash, captured in the "escape" match group.
|
||||
return re.compile('(?:%s|%s)' % (nonescaped, r'(?P<escape>\\\\)'))
|
||||
|
||||
|
||||
UNQUOTED_TOKENS_RE = _tokens2re(
|
||||
whitespace=r'[\t\r\n ]+',
|
||||
quote=r'[\'"]',
|
||||
|
@ -54,6 +55,7 @@ class _ClineSplitter(object):
|
|||
Parses a given command line string and creates a list of command
|
||||
and arguments, with wildcard expansion.
|
||||
'''
|
||||
|
||||
def __init__(self, cline):
|
||||
self.arg = None
|
||||
self.cline = cline
|
||||
|
|
|
@ -187,9 +187,9 @@ def setup(app):
|
|||
# properly. We leverage the in-tree virtualenv for this.
|
||||
topsrcdir = manager.topsrcdir
|
||||
ve = VirtualenvManager(topsrcdir,
|
||||
os.path.join(topsrcdir, 'dummy-objdir'),
|
||||
os.path.join(app.outdir, '_venv'),
|
||||
sys.stderr,
|
||||
os.path.join(topsrcdir, 'build', 'virtualenv_packages.txt'))
|
||||
os.path.join(topsrcdir, 'dummy-objdir'),
|
||||
os.path.join(app.outdir, '_venv'),
|
||||
sys.stderr,
|
||||
os.path.join(topsrcdir, 'build', 'virtualenv_packages.txt'))
|
||||
ve.ensure()
|
||||
ve.activate()
|
||||
|
|
|
@ -238,6 +238,7 @@ def get_build_attrs(attrs):
|
|||
res['cpu_percent'] = int(round(usage['cpu_percent']))
|
||||
return res
|
||||
|
||||
|
||||
def filter_args(command, argv, paths):
|
||||
'''
|
||||
Given the full list of command-line arguments, remove anything up to and including `command`,
|
||||
|
|
|
@ -4,7 +4,10 @@
|
|||
|
||||
import unittest
|
||||
|
||||
import os, sys, os.path, time
|
||||
import os
|
||||
import sys
|
||||
import os.path
|
||||
import time
|
||||
from tempfile import mkdtemp
|
||||
from shutil import rmtree
|
||||
import mozunit
|
||||
|
@ -13,77 +16,79 @@ from mozbuild.action.buildlist import addEntriesToListFile
|
|||
|
||||
|
||||
class TestBuildList(unittest.TestCase):
|
||||
"""
|
||||
Unit tests for buildlist.py
|
||||
"""
|
||||
def setUp(self):
|
||||
self.tmpdir = mkdtemp()
|
||||
"""
|
||||
Unit tests for buildlist.py
|
||||
"""
|
||||
|
||||
def tearDown(self):
|
||||
rmtree(self.tmpdir)
|
||||
def setUp(self):
|
||||
self.tmpdir = mkdtemp()
|
||||
|
||||
# utility methods for tests
|
||||
def touch(self, file, dir=None):
|
||||
if dir is None:
|
||||
dir = self.tmpdir
|
||||
f = os.path.join(dir, file)
|
||||
open(f, 'w').close()
|
||||
return f
|
||||
def tearDown(self):
|
||||
rmtree(self.tmpdir)
|
||||
|
||||
def assertFileContains(self, filename, l):
|
||||
"""Assert that the lines in the file |filename| are equal
|
||||
to the contents of the list |l|, in order."""
|
||||
l = l[:]
|
||||
f = open(filename, 'r')
|
||||
lines = [line.rstrip() for line in f.readlines()]
|
||||
f.close()
|
||||
for line in lines:
|
||||
self.assert_(len(l) > 0,
|
||||
"ran out of expected lines! (expected '{0}', got '{1}')"
|
||||
.format(l, lines))
|
||||
self.assertEqual(line, l.pop(0))
|
||||
self.assert_(len(l) == 0,
|
||||
"not enough lines in file! (expected '{0}',"
|
||||
" got '{1}'".format(l, lines))
|
||||
# utility methods for tests
|
||||
def touch(self, file, dir=None):
|
||||
if dir is None:
|
||||
dir = self.tmpdir
|
||||
f = os.path.join(dir, file)
|
||||
open(f, 'w').close()
|
||||
return f
|
||||
|
||||
def test_basic(self):
|
||||
"Test that addEntriesToListFile works when file doesn't exist."
|
||||
testfile = os.path.join(self.tmpdir, "test.list")
|
||||
l = ["a", "b", "c"]
|
||||
addEntriesToListFile(testfile, l)
|
||||
self.assertFileContains(testfile, l)
|
||||
# ensure that attempting to add the same entries again doesn't change it
|
||||
addEntriesToListFile(testfile, l)
|
||||
self.assertFileContains(testfile, l)
|
||||
def assertFileContains(self, filename, l):
|
||||
"""Assert that the lines in the file |filename| are equal
|
||||
to the contents of the list |l|, in order."""
|
||||
l = l[:]
|
||||
f = open(filename, 'r')
|
||||
lines = [line.rstrip() for line in f.readlines()]
|
||||
f.close()
|
||||
for line in lines:
|
||||
self.assert_(len(l) > 0,
|
||||
"ran out of expected lines! (expected '{0}', got '{1}')"
|
||||
.format(l, lines))
|
||||
self.assertEqual(line, l.pop(0))
|
||||
self.assert_(len(l) == 0,
|
||||
"not enough lines in file! (expected '{0}',"
|
||||
" got '{1}'".format(l, lines))
|
||||
|
||||
def test_append(self):
|
||||
"Test adding new entries."
|
||||
testfile = os.path.join(self.tmpdir, "test.list")
|
||||
l = ["a", "b", "c"]
|
||||
addEntriesToListFile(testfile, l)
|
||||
self.assertFileContains(testfile, l)
|
||||
l2 = ["x","y","z"]
|
||||
addEntriesToListFile(testfile, l2)
|
||||
l.extend(l2)
|
||||
self.assertFileContains(testfile, l)
|
||||
def test_basic(self):
|
||||
"Test that addEntriesToListFile works when file doesn't exist."
|
||||
testfile = os.path.join(self.tmpdir, "test.list")
|
||||
l = ["a", "b", "c"]
|
||||
addEntriesToListFile(testfile, l)
|
||||
self.assertFileContains(testfile, l)
|
||||
# ensure that attempting to add the same entries again doesn't change it
|
||||
addEntriesToListFile(testfile, l)
|
||||
self.assertFileContains(testfile, l)
|
||||
|
||||
def test_append_some(self):
|
||||
"Test adding new entries mixed with existing entries."
|
||||
testfile = os.path.join(self.tmpdir, "test.list")
|
||||
l = ["a", "b", "c"]
|
||||
addEntriesToListFile(testfile, l)
|
||||
self.assertFileContains(testfile, l)
|
||||
addEntriesToListFile(testfile, ["a", "x", "c", "z"])
|
||||
self.assertFileContains(testfile, ["a", "b", "c", "x", "z"])
|
||||
def test_append(self):
|
||||
"Test adding new entries."
|
||||
testfile = os.path.join(self.tmpdir, "test.list")
|
||||
l = ["a", "b", "c"]
|
||||
addEntriesToListFile(testfile, l)
|
||||
self.assertFileContains(testfile, l)
|
||||
l2 = ["x", "y", "z"]
|
||||
addEntriesToListFile(testfile, l2)
|
||||
l.extend(l2)
|
||||
self.assertFileContains(testfile, l)
|
||||
|
||||
def test_append_some(self):
|
||||
"Test adding new entries mixed with existing entries."
|
||||
testfile = os.path.join(self.tmpdir, "test.list")
|
||||
l = ["a", "b", "c"]
|
||||
addEntriesToListFile(testfile, l)
|
||||
self.assertFileContains(testfile, l)
|
||||
addEntriesToListFile(testfile, ["a", "x", "c", "z"])
|
||||
self.assertFileContains(testfile, ["a", "b", "c", "x", "z"])
|
||||
|
||||
def test_add_multiple(self):
|
||||
"""Test that attempting to add the same entry multiple times results in
|
||||
only one entry being added."""
|
||||
testfile = os.path.join(self.tmpdir, "test.list")
|
||||
addEntriesToListFile(testfile, ["a", "b", "a", "a", "b"])
|
||||
self.assertFileContains(testfile, ["a", "b"])
|
||||
addEntriesToListFile(testfile, ["c", "a", "c", "b", "c"])
|
||||
self.assertFileContains(testfile, ["a", "b", "c"])
|
||||
|
||||
def test_add_multiple(self):
|
||||
"""Test that attempting to add the same entry multiple times results in
|
||||
only one entry being added."""
|
||||
testfile = os.path.join(self.tmpdir, "test.list")
|
||||
addEntriesToListFile(testfile, ["a","b","a","a","b"])
|
||||
self.assertFileContains(testfile, ["a","b"])
|
||||
addEntriesToListFile(testfile, ["c","a","c","b","c"])
|
||||
self.assertFileContains(testfile, ["a","b","c"])
|
||||
|
||||
if __name__ == '__main__':
|
||||
mozunit.main()
|
||||
mozunit.main()
|
||||
|
|
|
@ -21,6 +21,7 @@ test_data_path = mozpath.join(test_data_path, 'data', 'node')
|
|||
def data(name):
|
||||
return os.path.join(test_data_path, name)
|
||||
|
||||
|
||||
TEST_SCRIPT = data("node-test-script.js")
|
||||
NONEXISTENT_TEST_SCRIPT = data("non-existent-test-script.js")
|
||||
|
||||
|
|
|
@ -70,5 +70,6 @@ class TestGenerateManifest(TestWithTmpDir):
|
|||
self.assertFalse(os.path.exists(self.tmppath('dest/foo/file2')))
|
||||
self.assertFalse(os.path.exists(self.tmppath('dest/foo/file3')))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
mozunit.main()
|
||||
|
|
|
@ -20,40 +20,40 @@ CREATE_NORMAL_LINK = """CREATE TABLE normal_link
|
|||
(from_id integer,
|
||||
to_id integer, unique(from_id, to_id));"""
|
||||
|
||||
NODE_DATA = [(1, 0 ,2, -1, '.'),
|
||||
(2, 100, 0, 1, 'Base64.cpp'),
|
||||
(3, 200, 0, 1, 'nsArray.cpp'),
|
||||
(4, 100, 0, 1, 'nsWildCard.h'),
|
||||
(5, -1, 1, 9426, 'CDD Unified_cpp_xpcom_io0.cpp'),
|
||||
(6, -1, 1, 5921, 'CXX Unified_cpp_xpcom_ds0.cpp'),
|
||||
(7, -1, 1, 11077, 'CXX /builds/worker/workspace/build/src/dom/\
|
||||
NODE_DATA = [(1, 0, 2, -1, '.'),
|
||||
(2, 100, 0, 1, 'Base64.cpp'),
|
||||
(3, 200, 0, 1, 'nsArray.cpp'),
|
||||
(4, 100, 0, 1, 'nsWildCard.h'),
|
||||
(5, -1, 1, 9426, 'CDD Unified_cpp_xpcom_io0.cpp'),
|
||||
(6, -1, 1, 5921, 'CXX Unified_cpp_xpcom_ds0.cpp'),
|
||||
(7, -1, 1, 11077, 'CXX /builds/worker/workspace/build/src/dom/\
|
||||
plugins/base/snNPAPIPlugin.cpp'),
|
||||
(8, -1, 1, 7677, 'CXX Unified_cpp_xpcom_io1.cpp'),
|
||||
(9, -1, 1, 8672, 'CXX Unified_cpp_modules_libjar0.cpp'),
|
||||
(10, -1, 4, 1, 'Unified_cpp_xpcom_io0.o'),
|
||||
(11, -1, 4, 1, 'Unified_cpp_xpcom_dso.o'),
|
||||
(12, -1, 4, 1, 'nsNPAPIPlugin.o'),
|
||||
(13, -1, 4, 1, 'Unified_cpp_xpcom_io1.o'),
|
||||
(14, -1, 4, 1, 'Unified_cpp_modules_libjar0.o'),
|
||||
(15, -1, 1, 52975, 'LINK libxul.so'),
|
||||
(16, -1, 4, 1, 'libxul.so'),
|
||||
(17, -1, 1, 180, 'LINK libtestcrasher.so'),
|
||||
(18, -1, 1, 944, 'python /builds/worker/workspace/build/src/toolkit/\
|
||||
(8, -1, 1, 7677, 'CXX Unified_cpp_xpcom_io1.cpp'),
|
||||
(9, -1, 1, 8672, 'CXX Unified_cpp_modules_libjar0.cpp'),
|
||||
(10, -1, 4, 1, 'Unified_cpp_xpcom_io0.o'),
|
||||
(11, -1, 4, 1, 'Unified_cpp_xpcom_dso.o'),
|
||||
(12, -1, 4, 1, 'nsNPAPIPlugin.o'),
|
||||
(13, -1, 4, 1, 'Unified_cpp_xpcom_io1.o'),
|
||||
(14, -1, 4, 1, 'Unified_cpp_modules_libjar0.o'),
|
||||
(15, -1, 1, 52975, 'LINK libxul.so'),
|
||||
(16, -1, 4, 1, 'libxul.so'),
|
||||
(17, -1, 1, 180, 'LINK libtestcrasher.so'),
|
||||
(18, -1, 1, 944, 'python /builds/worker/workspace/build/src/toolkit/\
|
||||
library/dependentlibs.py:gen_list -> [dependentlibs.list, \
|
||||
dependentlibs.list.gtest, dependentlibs.list.pp]'),
|
||||
(19, -1, 1, 348, 'LINK ../../dist/bin/plugin-container'),
|
||||
(20, -1, 1, 342, 'LINK ../../../dist/bin/xpcshell'),
|
||||
(21, -1, 4, 1, 'libtestcrasher.so'),
|
||||
(22, -1, 4, 1, 'dependentlibs.list'),
|
||||
(23, -1, 4, 1, 'dependentlibs.list.gtest'),
|
||||
(24, -1, 4, 1, 'dependentlibs.list.pp'),
|
||||
(25, -1, 4, 1, 'plugin-container'),
|
||||
(26, -1, 4, 1, 'xpcshell'),
|
||||
(27, -1, 6, 1, '<shlibs>'),
|
||||
(28, 1, 0, 1, 'dummy node'),
|
||||
(100, 300, 2, -1, 'io'),
|
||||
(200, 300, 2, -1, 'ds'),
|
||||
(300, 1, 2, -1, 'xpcom')]
|
||||
(19, -1, 1, 348, 'LINK ../../dist/bin/plugin-container'),
|
||||
(20, -1, 1, 342, 'LINK ../../../dist/bin/xpcshell'),
|
||||
(21, -1, 4, 1, 'libtestcrasher.so'),
|
||||
(22, -1, 4, 1, 'dependentlibs.list'),
|
||||
(23, -1, 4, 1, 'dependentlibs.list.gtest'),
|
||||
(24, -1, 4, 1, 'dependentlibs.list.pp'),
|
||||
(25, -1, 4, 1, 'plugin-container'),
|
||||
(26, -1, 4, 1, 'xpcshell'),
|
||||
(27, -1, 6, 1, '<shlibs>'),
|
||||
(28, 1, 0, 1, 'dummy node'),
|
||||
(100, 300, 2, -1, 'io'),
|
||||
(200, 300, 2, -1, 'ds'),
|
||||
(300, 1, 2, -1, 'xpcom')]
|
||||
|
||||
NORMAL_LINK_DATA = [(2, 5), (3, 6), (4, 7), (4, 8), (4, 9), (5, 10), (6, 11),
|
||||
(7, 12), (8, 13), (9, 14), (10, 15), (11, 15), (12, 15),
|
||||
|
@ -63,6 +63,7 @@ NORMAL_LINK_DATA = [(2, 5), (3, 6), (4, 7), (4, 8), (4, 9), (5, 10), (6, 11),
|
|||
|
||||
PATH_TO_TEST_DB = ':memory:'
|
||||
|
||||
|
||||
class TestGraph(unittest.TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
|
@ -93,10 +94,10 @@ class TestGraph(unittest.TestCase):
|
|||
self.assertEqual(len(g.get_node(21).cmds), 0)
|
||||
self.assertEqual(len(g.get_node(28).cmds), 0)
|
||||
# one immediate command child
|
||||
self.assertItemsEqual(g.get_node(2).get_cmd_ids(),[5] + libxul)
|
||||
self.assertItemsEqual(g.get_node(3).get_cmd_ids(),[6] + libxul)
|
||||
self.assertItemsEqual(g.get_node(2).get_cmd_ids(), [5] + libxul)
|
||||
self.assertItemsEqual(g.get_node(3).get_cmd_ids(), [6] + libxul)
|
||||
# multiple immediate command children
|
||||
self.assertItemsEqual(g.get_node(4).get_cmd_ids(),[7, 8, 9] + libxul)
|
||||
self.assertItemsEqual(g.get_node(4).get_cmd_ids(), [7, 8, 9] + libxul)
|
||||
# node is not a file or command
|
||||
self.assertItemsEqual(g.get_node(16).get_cmd_ids(), libxul[1:])
|
||||
self.assertItemsEqual(g.get_node(11).get_cmd_ids(), libxul)
|
||||
|
@ -131,5 +132,6 @@ class TestGraph(unittest.TestCase):
|
|||
self.assertEqual(g.get_node(4).path, 'xpcom/io/nsWildCard.h')
|
||||
self.assertEqual(g.get_node(28).path, 'dummy node')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
mozunit.main()
|
||||
mozunit.main()
|
||||
|
|
|
@ -233,5 +233,6 @@ class TestBuild(unittest.TestCase):
|
|||
'bin/app/modules/foo.jsm': 'foo.jsm\n',
|
||||
})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
|
@ -2,7 +2,8 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import os, posixpath
|
||||
import os
|
||||
import posixpath
|
||||
from StringIO import StringIO
|
||||
import unittest
|
||||
from mozunit import main, MockedOpen
|
||||
|
@ -39,15 +40,16 @@ class TestEnvironment(unittest.TestCase):
|
|||
and ALLEMPTYSUBSTS.
|
||||
'''
|
||||
env = ConfigEnvironment('.', '.',
|
||||
defines = { 'foo': 'bar', 'baz': 'qux 42',
|
||||
'abc': "d'e'f", 'extra': 'foobar' },
|
||||
non_global_defines = ['extra', 'ignore'],
|
||||
substs = { 'FOO': 'bar', 'FOOBAR': '', 'ABC': 'def',
|
||||
'bar': 'baz qux', 'zzz': '"abc def"',
|
||||
'qux': '' })
|
||||
defines={'foo': 'bar', 'baz': 'qux 42',
|
||||
'abc': "d'e'f", 'extra': 'foobar'},
|
||||
non_global_defines=['extra', 'ignore'],
|
||||
substs={'FOO': 'bar', 'FOOBAR': '', 'ABC': 'def',
|
||||
'bar': 'baz qux', 'zzz': '"abc def"',
|
||||
'qux': ''})
|
||||
# non_global_defines should be filtered out in ACDEFINES.
|
||||
# Original order of the defines need to be respected in ACDEFINES
|
||||
self.assertEqual(env.substs['ACDEFINES'], """-Dabc='d'\\''e'\\''f' -Dbaz='qux 42' -Dfoo=bar""")
|
||||
self.assertEqual(env.substs['ACDEFINES'],
|
||||
"""-Dabc='d'\\''e'\\''f' -Dbaz='qux 42' -Dfoo=bar""")
|
||||
# Likewise for ALLSUBSTS, which also must contain ACDEFINES
|
||||
self.assertEqual(env.substs['ALLSUBSTS'], '''ABC = def
|
||||
ACDEFINES = -Dabc='d'\\''e'\\''f' -Dbaz='qux 42' -Dfoo=bar
|
||||
|
|
|
@ -25,15 +25,15 @@ class TestFasterMakeBackend(BackendTester):
|
|||
"""Ensure the FasterMakeBackend works without error."""
|
||||
env = self._consume('stub0', FasterMakeBackend)
|
||||
self.assertTrue(os.path.exists(mozpath.join(env.topobjdir,
|
||||
'backend.FasterMakeBackend')))
|
||||
'backend.FasterMakeBackend')))
|
||||
self.assertTrue(os.path.exists(mozpath.join(env.topobjdir,
|
||||
'backend.FasterMakeBackend.in')))
|
||||
'backend.FasterMakeBackend.in')))
|
||||
|
||||
def test_final_target_files_wildcard(self):
|
||||
"""Ensure that wildcards in FINAL_TARGET_FILES work properly."""
|
||||
env = self._consume('final-target-files-wildcard', FasterMakeBackend)
|
||||
m = InstallManifest(path=mozpath.join(env.topobjdir,
|
||||
'faster', 'install_dist_bin'))
|
||||
'faster', 'install_dist_bin'))
|
||||
self.assertEqual(len(m), 1)
|
||||
reg = FileRegistry()
|
||||
m.populate_registry(reg)
|
||||
|
|
|
@ -30,6 +30,7 @@ from mozbuild.frontend.data import (
|
|||
UnifiedSources,
|
||||
)
|
||||
|
||||
|
||||
class TestGnMozbuildWriter(BackendTester):
|
||||
|
||||
def setUp(self):
|
||||
|
|
|
@ -82,7 +82,8 @@ class TestPartial(unittest.TestCase):
|
|||
self.assertTrue(os.path.exists(path))
|
||||
|
||||
def _assert_deps(self, env, deps):
|
||||
deps = sorted(['$(wildcard %s)' % (mozpath.join(env.topobjdir, 'config.statusd', d)) for d in deps])
|
||||
deps = sorted(['$(wildcard %s)' %
|
||||
(mozpath.join(env.topobjdir, 'config.statusd', d)) for d in deps])
|
||||
self.assertEqual(sorted(env.get_dependencies()), deps)
|
||||
|
||||
def test_dependencies(self):
|
||||
|
@ -107,7 +108,8 @@ class TestPartial(unittest.TestCase):
|
|||
|
||||
with self.assertRaises(KeyError):
|
||||
x = env.substs['NON_EXISTENT']
|
||||
self._assert_deps(env, ['defines/MOZ_FOO', 'defines/MOZ_BAR', 'substs/MOZ_SUBST_1', 'substs/NON_EXISTENT'])
|
||||
self._assert_deps(env, ['defines/MOZ_FOO', 'defines/MOZ_BAR',
|
||||
'substs/MOZ_SUBST_1', 'substs/NON_EXISTENT'])
|
||||
self.assertEqual(env.substs.get('NON_EXISTENT'), None)
|
||||
|
||||
def test_set_subst(self):
|
||||
|
@ -159,5 +161,6 @@ class TestPartial(unittest.TestCase):
|
|||
self.assertEqual(mydefines['DEBUG'], '1')
|
||||
self.assertEqual(mydefines['MOZ_FOO'], '1')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -59,9 +59,10 @@ class TestRecursiveMakeTraversal(unittest.TestCase):
|
|||
traversal.add('X')
|
||||
|
||||
parallels = set(('G', 'H', 'I', 'J', 'O', 'P', 'Q', 'R', 'U'))
|
||||
|
||||
def filter(current, subdirs):
|
||||
return (current, [d for d in subdirs.dirs if d in parallels],
|
||||
[d for d in subdirs.dirs if d not in parallels])
|
||||
[d for d in subdirs.dirs if d not in parallels])
|
||||
|
||||
start, deps = traversal.compute_dependencies(filter)
|
||||
self.assertEqual(start, ('X',))
|
||||
|
@ -95,8 +96,8 @@ class TestRecursiveMakeTraversal(unittest.TestCase):
|
|||
|
||||
self.assertEqual(list(traversal.traverse('', filter)),
|
||||
['', 'A', 'B', 'E', 'F', 'C', 'G', 'H', 'D', 'I',
|
||||
'M', 'N', 'T', 'J', 'O', 'P', 'U', 'K', 'Q', 'R',
|
||||
'V', 'L', 'S', 'W', 'X'])
|
||||
'M', 'N', 'T', 'J', 'O', 'P', 'U', 'K', 'Q', 'R',
|
||||
'V', 'L', 'S', 'W', 'X'])
|
||||
|
||||
self.assertEqual(list(traversal.traverse('C', filter)),
|
||||
['C', 'G', 'H'])
|
||||
|
@ -191,14 +192,15 @@ class TestRecursiveMakeTraversal(unittest.TestCase):
|
|||
'J': ('',),
|
||||
})
|
||||
|
||||
|
||||
class TestRecursiveMakeBackend(BackendTester):
|
||||
def test_basic(self):
|
||||
"""Ensure the RecursiveMakeBackend works without error."""
|
||||
env = self._consume('stub0', RecursiveMakeBackend)
|
||||
self.assertTrue(os.path.exists(mozpath.join(env.topobjdir,
|
||||
'backend.RecursiveMakeBackend')))
|
||||
'backend.RecursiveMakeBackend')))
|
||||
self.assertTrue(os.path.exists(mozpath.join(env.topobjdir,
|
||||
'backend.RecursiveMakeBackend.in')))
|
||||
'backend.RecursiveMakeBackend.in')))
|
||||
|
||||
def test_output_files(self):
|
||||
"""Ensure proper files are generated."""
|
||||
|
@ -385,7 +387,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
|||
|
||||
# EXPORTS files should appear in the dist_include install manifest.
|
||||
m = InstallManifest(path=mozpath.join(env.topobjdir,
|
||||
'_build_manifests', 'install', 'dist_include'))
|
||||
'_build_manifests', 'install', 'dist_include'))
|
||||
self.assertEqual(len(m), 7)
|
||||
self.assertIn('foo.h', m)
|
||||
self.assertIn('mozilla/mozilla1.h', m)
|
||||
|
@ -567,7 +569,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
|||
|
||||
# EXPORTS files should appear in the dist_include install manifest.
|
||||
m = InstallManifest(path=mozpath.join(env.topobjdir,
|
||||
'_build_manifests', 'install', 'dist_include'))
|
||||
'_build_manifests', 'install', 'dist_include'))
|
||||
self.assertEqual(len(m), 8)
|
||||
self.assertIn('foo.h', m)
|
||||
self.assertIn('mozilla/mozilla1.h', m)
|
||||
|
@ -605,7 +607,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
|||
|
||||
# RESOURCE_FILES should appear in the dist_bin install manifest.
|
||||
m = InstallManifest(path=os.path.join(env.topobjdir,
|
||||
'_build_manifests', 'install', 'dist_bin'))
|
||||
'_build_manifests', 'install', 'dist_bin'))
|
||||
self.assertEqual(len(m), 10)
|
||||
self.assertIn('res/foo.res', m)
|
||||
self.assertIn('res/fonts/font1.ttf', m)
|
||||
|
@ -637,7 +639,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
|||
"""Pattern matches in test manifests' support-files should be recorded."""
|
||||
env = self._consume('test-manifests-written', RecursiveMakeBackend)
|
||||
m = InstallManifest(path=mozpath.join(env.topobjdir,
|
||||
'_build_manifests', 'install', '_test_files'))
|
||||
'_build_manifests', 'install', '_test_files'))
|
||||
|
||||
# This is not the most robust test in the world, but it gets the job
|
||||
# done.
|
||||
|
@ -690,7 +692,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
|||
|
||||
# Install manifests should contain entries.
|
||||
install_dir = mozpath.join(env.topobjdir, '_build_manifests',
|
||||
'install')
|
||||
'install')
|
||||
self.assertTrue(os.path.isfile(mozpath.join(install_dir, 'xpidl')))
|
||||
|
||||
m = InstallManifest(path=mozpath.join(install_dir, 'xpidl'))
|
||||
|
@ -710,7 +712,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
|||
def test_test_support_files_tracked(self):
|
||||
env = self._consume('test-support-binaries-tracked', RecursiveMakeBackend)
|
||||
m = InstallManifest(path=mozpath.join(env.topobjdir,
|
||||
'_build_manifests', 'install', '_tests'))
|
||||
'_build_manifests', 'install', '_tests'))
|
||||
self.assertEqual(len(m), 4)
|
||||
self.assertIn('xpcshell/tests/mozbuildtest/test-library.dll', m)
|
||||
self.assertIn('xpcshell/tests/mozbuildtest/test-one.exe', m)
|
||||
|
@ -772,7 +774,8 @@ class TestRecursiveMakeBackend(BackendTester):
|
|||
topsrcdir = env.topsrcdir.replace(os.sep, '/')
|
||||
|
||||
expected = [
|
||||
"ALL_IPDLSRCS := bar1.ipdl foo1.ipdl %s/bar/bar.ipdl %s/bar/bar2.ipdlh %s/foo/foo.ipdl %s/foo/foo2.ipdlh" % tuple([topsrcdir] * 4),
|
||||
"ALL_IPDLSRCS := bar1.ipdl foo1.ipdl %s/bar/bar.ipdl %s/bar/bar2.ipdlh %s/foo/foo.ipdl %s/foo/foo2.ipdlh" % tuple([
|
||||
topsrcdir] * 4),
|
||||
"CPPSRCS := UnifiedProtocols0.cpp",
|
||||
"IPDLDIRS := %s %s/bar %s/foo" % (env.topobjdir, topsrcdir, topsrcdir),
|
||||
]
|
||||
|
@ -949,8 +952,8 @@ class TestRecursiveMakeBackend(BackendTester):
|
|||
backend_path = mozpath.join(key, 'backend.mk')
|
||||
lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]]
|
||||
found = [str for str in lines if
|
||||
str.startswith('FINAL_TARGET') or str.startswith('XPI_NAME') or
|
||||
str.startswith('DIST_SUBDIR')]
|
||||
str.startswith('FINAL_TARGET') or str.startswith('XPI_NAME') or
|
||||
str.startswith('DIST_SUBDIR')]
|
||||
self.assertEqual(found, expected_rules)
|
||||
|
||||
def test_final_target_pp_files(self):
|
||||
|
@ -1149,7 +1152,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
|||
def test_test_manifests_duplicate_support_files(self):
|
||||
"""Ensure duplicate support-files in test manifests work."""
|
||||
env = self._consume('test-manifests-duplicate-support-files',
|
||||
RecursiveMakeBackend)
|
||||
RecursiveMakeBackend)
|
||||
|
||||
p = os.path.join(env.topobjdir, '_build_manifests', 'install', '_test_files')
|
||||
m = InstallManifest(p)
|
||||
|
|
|
@ -41,7 +41,7 @@ class TestVisualStudioBackend(BackendTester):
|
|||
els = d.getElementsByTagName('NMakeForcedIncludes')
|
||||
self.assertEqual(len(els), 1)
|
||||
self.assertEqual(els[0].firstChild.nodeValue,
|
||||
'$(TopObjDir)\\dist\\include\\mozilla-config.h')
|
||||
'$(TopObjDir)\\dist\\include\\mozilla-config.h')
|
||||
|
||||
# LOCAL_INCLUDES get added to the include search path.
|
||||
els = d.getElementsByTagName('NMakeIncludeSearchPath')
|
||||
|
|
|
@ -108,6 +108,7 @@ LH:2
|
|||
end_of_record
|
||||
"""
|
||||
|
||||
|
||||
class TempFile():
|
||||
def __init__(self, content):
|
||||
self.file = NamedTemporaryFile(delete=False)
|
||||
|
@ -142,6 +143,7 @@ class TestLcovParser(unittest.TestCase):
|
|||
output = self.parser_roundtrip(fn_with_multiple_commas)
|
||||
self.assertEqual(fn_with_multiple_commas, output)
|
||||
|
||||
|
||||
multiple_included_files = """//@line 1 "/src/dir/foo.js"
|
||||
bazfoobar
|
||||
//@line 2 "/src/dir/path/bar.js"
|
||||
|
@ -158,6 +160,7 @@ baz
|
|||
fin
|
||||
"""
|
||||
|
||||
|
||||
class TestLineRemapping(unittest.TestCase):
|
||||
def setUp(self):
|
||||
chrome_map_file = os.path.join(buildconfig.topobjdir, 'chrome-map.json')
|
||||
|
@ -237,6 +240,7 @@ class TestLineRemapping(unittest.TestCase):
|
|||
# Rewrite preprocessed entries.
|
||||
lcov_file = lcov_rewriter.LcovFile([fpath])
|
||||
r_num = []
|
||||
|
||||
def rewrite_source(s):
|
||||
r_num.append(1)
|
||||
return s, pp_remap
|
||||
|
@ -262,6 +266,7 @@ class TestLineRemapping(unittest.TestCase):
|
|||
self.assertEqual(original_covered_function_count,
|
||||
sum(r.covered_function_count for r in records))
|
||||
|
||||
|
||||
class TestUrlFinder(unittest.TestCase):
|
||||
def setUp(self):
|
||||
chrome_map_file = os.path.join(buildconfig.topobjdir, 'chrome-map.json')
|
||||
|
@ -329,8 +334,10 @@ class TestUrlFinder(unittest.TestCase):
|
|||
omnijar_name = buildconfig.substs.get('OMNIJAR_NAME')
|
||||
|
||||
paths = [
|
||||
('jar:file:///home/worker/workspace/build/application/' + app_name + '/' + omnijar_name + '!/components/MainProcessSingleton.js', 'path1'),
|
||||
('jar:file:///home/worker/workspace/build/application/' + app_name + '/browser/features/firefox@getpocket.com.xpi!/bootstrap.js', 'path4'),
|
||||
('jar:file:///home/worker/workspace/build/application/' + app_name +
|
||||
'/' + omnijar_name + '!/components/MainProcessSingleton.js', 'path1'),
|
||||
('jar:file:///home/worker/workspace/build/application/' + app_name +
|
||||
'/browser/features/firefox@getpocket.com.xpi!/bootstrap.js', 'path4'),
|
||||
]
|
||||
|
||||
url_finder = lcov_rewriter.UrlFinder(self._chrome_map_file, '', '', [])
|
||||
|
@ -356,31 +363,37 @@ class TestUrlFinder(unittest.TestCase):
|
|||
def test_chrome_resource_paths(self):
|
||||
paths = [
|
||||
# Path with default url prefix
|
||||
('resource://gre/modules/osfile/osfile_async_worker.js', ('toolkit/components/osfile/modules/osfile_async_worker.js', None)),
|
||||
('resource://gre/modules/osfile/osfile_async_worker.js',
|
||||
('toolkit/components/osfile/modules/osfile_async_worker.js', None)),
|
||||
# Path with url prefix that is in chrome map
|
||||
('resource://activity-stream/lib/PrefsFeed.jsm', ('browser/components/newtab/lib/PrefsFeed.jsm', None)),
|
||||
('resource://activity-stream/lib/PrefsFeed.jsm',
|
||||
('browser/components/newtab/lib/PrefsFeed.jsm', None)),
|
||||
# Path which is in url overrides
|
||||
('chrome://global/content/netError.xhtml', ('browser/base/content/aboutNetError.xhtml', None)),
|
||||
('chrome://global/content/netError.xhtml',
|
||||
('browser/base/content/aboutNetError.xhtml', None)),
|
||||
# Path which ends with > eval
|
||||
('resource://gre/modules/osfile/osfile_async_worker.js line 3 > eval', None),
|
||||
# Path which ends with > Function
|
||||
('resource://gre/modules/osfile/osfile_async_worker.js line 3 > Function', None),
|
||||
# Path which contains "->"
|
||||
('resource://gre/modules/addons/XPIProvider.jsm -> resource://gre/modules/osfile/osfile_async_worker.js', ('toolkit/components/osfile/modules/osfile_async_worker.js', None)),
|
||||
('resource://gre/modules/addons/XPIProvider.jsm -> resource://gre/modules/osfile/osfile_async_worker.js',
|
||||
('toolkit/components/osfile/modules/osfile_async_worker.js', None)),
|
||||
# Path with pp_info
|
||||
('resource://gre/modules/AppConstants.jsm', ('toolkit/modules/AppConstants.jsm', {
|
||||
'101,102': [
|
||||
'toolkit/modules/AppConstants.jsm',
|
||||
'toolkit/modules/AppConstants.jsm',
|
||||
135
|
||||
],
|
||||
})),
|
||||
# Path with query
|
||||
('resource://activity-stream/lib/PrefsFeed.jsm?q=0.9098419174803978', ('browser/components/newtab/lib/PrefsFeed.jsm', None)),
|
||||
('resource://activity-stream/lib/PrefsFeed.jsm?q=0.9098419174803978',
|
||||
('browser/components/newtab/lib/PrefsFeed.jsm', None)),
|
||||
]
|
||||
|
||||
url_finder = lcov_rewriter.UrlFinder(self._chrome_map_file, '', 'dist/bin/', [])
|
||||
for path, expected in paths:
|
||||
self.assertEqual(url_finder.rewrite_url(path), expected)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
mozunit.main()
|
||||
|
|
|
@ -46,7 +46,7 @@ class MockConfig(object):
|
|||
topsrcdir='/path/to/topsrcdir',
|
||||
extra_substs={},
|
||||
error_is_fatal=True,
|
||||
):
|
||||
):
|
||||
self.topsrcdir = mozpath.abspath(topsrcdir)
|
||||
self.topobjdir = mozpath.abspath('/path/to/topobjdir')
|
||||
|
||||
|
|
|
@ -27,11 +27,12 @@ MSVC_TESTS = [
|
|||
"conversion from 'double' to 'uint32_t', possible loss of data",
|
||||
'C:/mozilla-central/test/foo.cpp', 793, 'C4244',
|
||||
"'return' : conversion from 'double' to 'uint32_t', possible loss of "
|
||||
'data')
|
||||
'data')
|
||||
]
|
||||
|
||||
CURRENT_LINE = 1
|
||||
|
||||
|
||||
def get_warning():
|
||||
global CURRENT_LINE
|
||||
|
||||
|
@ -45,6 +46,7 @@ def get_warning():
|
|||
|
||||
return w
|
||||
|
||||
|
||||
class TestCompilerWarning(unittest.TestCase):
|
||||
def test_equivalence(self):
|
||||
w1 = CompilerWarning()
|
||||
|
@ -122,6 +124,7 @@ class TestCompilerWarning(unittest.TestCase):
|
|||
self.assertGreaterEqual(w2, w1)
|
||||
self.assertGreaterEqual(w1, w2)
|
||||
|
||||
|
||||
class TestWarningsParsing(unittest.TestCase):
|
||||
def test_clang_parsing(self):
|
||||
for source, filename, line, column, message, flag in CLANG_TESTS:
|
||||
|
@ -148,6 +151,7 @@ class TestWarningsParsing(unittest.TestCase):
|
|||
self.assertEqual(warning['flag'], flag)
|
||||
self.assertEqual(warning['message'], message)
|
||||
|
||||
|
||||
class TestWarningsDatabase(unittest.TestCase):
|
||||
def test_basic(self):
|
||||
db = WarningsDatabase()
|
||||
|
|
|
@ -31,6 +31,7 @@ def fake_short_path(path):
|
|||
for p in mozpath.split(path))
|
||||
return path
|
||||
|
||||
|
||||
def ensure_exe_extension(path):
|
||||
if sys.platform.startswith('win'):
|
||||
return path + '.exe'
|
||||
|
@ -73,6 +74,7 @@ class ConfigureTestSandbox(ConfigureSandbox):
|
|||
This class is only meant to implement the minimal things to make
|
||||
moz.configure testing possible. As such, it takes shortcuts.
|
||||
'''
|
||||
|
||||
def __init__(self, paths, config, environ, *args, **kwargs):
|
||||
self._search_path = environ.get('PATH', '').split(os.pathsep)
|
||||
|
||||
|
@ -153,7 +155,6 @@ class ConfigureTestSandbox(ConfigureSandbox):
|
|||
def __call__(self, *args, **kwargs):
|
||||
return self._func(*args, **kwargs)
|
||||
|
||||
|
||||
return ReadOnlyNamespace(
|
||||
create_unicode_buffer=self.create_unicode_buffer,
|
||||
windll=ReadOnlyNamespace(
|
||||
|
|
|
@ -233,7 +233,6 @@ class TestChecksConfigure(unittest.TestCase):
|
|||
self.assertEqual(config, {'FOO': self.KNOWN_A})
|
||||
self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_A)
|
||||
|
||||
|
||||
def test_check_prog_with_args(self):
|
||||
config, out, status = self.get_result(
|
||||
'check_prog("FOO", ("unknown", "known-b", "known c"))',
|
||||
|
@ -431,7 +430,8 @@ class TestChecksConfigure(unittest.TestCase):
|
|||
'single element, or a string')
|
||||
|
||||
def test_check_prog_with_path(self):
|
||||
config, out, status = self.get_result('check_prog("A", ("known-a",), paths=["/some/path"])')
|
||||
config, out, status = self.get_result(
|
||||
'check_prog("A", ("known-a",), paths=["/some/path"])')
|
||||
self.assertEqual(status, 1)
|
||||
self.assertEqual(config, {})
|
||||
self.assertEqual(out, textwrap.dedent('''\
|
||||
|
@ -675,7 +675,6 @@ class TestChecksConfigure(unittest.TestCase):
|
|||
*** to the full path to pkg-config.
|
||||
'''))
|
||||
|
||||
|
||||
config, output, status = get_result("pkg_check_modules('MOZ_VALID', 'valid')",
|
||||
extra_paths=extra_paths)
|
||||
self.assertEqual(status, 0)
|
||||
|
|
|
@ -21,6 +21,7 @@ from test_toolchain_helpers import FakeCompiler
|
|||
class BaseCompileChecks(unittest.TestCase):
|
||||
def get_mock_compiler(self, expected_test_content=None, expected_flags=None):
|
||||
expected_flags = expected_flags or []
|
||||
|
||||
def mock_compiler(stdin, args):
|
||||
args, test_file = args[:-1], args[-1]
|
||||
self.assertIn('-c', args)
|
||||
|
@ -220,7 +221,7 @@ class TestHeaderChecks(BaseCompileChecks):
|
|||
config, out, status = self.do_compile_test(cmd)
|
||||
self.assertEqual(status, 0)
|
||||
self.assertEqual(out, '')
|
||||
self.assertEqual(config, {'DEFINES':{}})
|
||||
self.assertEqual(config, {'DEFINES': {}})
|
||||
|
||||
def test_check_header_include(self):
|
||||
expected_test_content = textwrap.dedent('''\
|
||||
|
|
|
@ -247,16 +247,16 @@ class TestConfigure(unittest.TestCase):
|
|||
def foo():
|
||||
import sys
|
||||
foo()'''),
|
||||
sandbox
|
||||
)
|
||||
sandbox
|
||||
)
|
||||
|
||||
exec_(textwrap.dedent('''
|
||||
@template
|
||||
@imports('sys')
|
||||
def foo():
|
||||
return sys'''),
|
||||
sandbox
|
||||
)
|
||||
sandbox
|
||||
)
|
||||
|
||||
self.assertIs(sandbox['foo'](), sys)
|
||||
|
||||
|
@ -265,8 +265,8 @@ class TestConfigure(unittest.TestCase):
|
|||
@imports(_from='os', _import='path')
|
||||
def foo():
|
||||
return path'''),
|
||||
sandbox
|
||||
)
|
||||
sandbox
|
||||
)
|
||||
|
||||
self.assertIs(sandbox['foo'](), os.path)
|
||||
|
||||
|
@ -275,8 +275,8 @@ class TestConfigure(unittest.TestCase):
|
|||
@imports(_from='os', _import='path', _as='os_path')
|
||||
def foo():
|
||||
return os_path'''),
|
||||
sandbox
|
||||
)
|
||||
sandbox
|
||||
)
|
||||
|
||||
self.assertIs(sandbox['foo'](), os.path)
|
||||
|
||||
|
@ -285,8 +285,8 @@ class TestConfigure(unittest.TestCase):
|
|||
@imports('__builtin__')
|
||||
def foo():
|
||||
return __builtin__'''),
|
||||
sandbox
|
||||
)
|
||||
sandbox
|
||||
)
|
||||
|
||||
import __builtin__
|
||||
self.assertIs(sandbox['foo'](), __builtin__)
|
||||
|
@ -296,8 +296,8 @@ class TestConfigure(unittest.TestCase):
|
|||
@imports(_from='__builtin__', _import='open')
|
||||
def foo():
|
||||
return open('%s')''' % os.devnull),
|
||||
sandbox
|
||||
)
|
||||
sandbox
|
||||
)
|
||||
|
||||
f = sandbox['foo']()
|
||||
self.assertEquals(f.name, os.devnull)
|
||||
|
@ -310,8 +310,8 @@ class TestConfigure(unittest.TestCase):
|
|||
def foo():
|
||||
import sys
|
||||
return sys'''),
|
||||
sandbox
|
||||
)
|
||||
sandbox
|
||||
)
|
||||
|
||||
self.assertIs(sandbox['foo'](), sys)
|
||||
|
||||
|
@ -320,8 +320,8 @@ class TestConfigure(unittest.TestCase):
|
|||
@imports('__sandbox__')
|
||||
def foo():
|
||||
return __sandbox__'''),
|
||||
sandbox
|
||||
)
|
||||
sandbox
|
||||
)
|
||||
|
||||
self.assertIs(sandbox['foo'](), sandbox)
|
||||
|
||||
|
@ -330,8 +330,8 @@ class TestConfigure(unittest.TestCase):
|
|||
@imports(_import='__sandbox__', _as='s')
|
||||
def foo():
|
||||
return s'''),
|
||||
sandbox
|
||||
)
|
||||
sandbox
|
||||
)
|
||||
|
||||
self.assertIs(sandbox['foo'](), sandbox)
|
||||
|
||||
|
@ -348,8 +348,8 @@ class TestConfigure(unittest.TestCase):
|
|||
return sys
|
||||
return bar
|
||||
bar = foo()'''),
|
||||
sandbox
|
||||
)
|
||||
sandbox
|
||||
)
|
||||
|
||||
with self.assertRaises(NameError) as e:
|
||||
sandbox._depends[sandbox['bar']].result()
|
||||
|
@ -377,8 +377,8 @@ class TestConfigure(unittest.TestCase):
|
|||
return sys
|
||||
foo()
|
||||
foo()'''),
|
||||
sandbox
|
||||
)
|
||||
sandbox
|
||||
)
|
||||
|
||||
self.assertEquals(len(imports), 1)
|
||||
|
||||
|
@ -587,7 +587,7 @@ class TestConfigure(unittest.TestCase):
|
|||
|
||||
config = get_config(['--enable-foo=a,b'])
|
||||
self.assertIn('BAR', config)
|
||||
self.assertEquals(config['BAR'], PositiveOptionValue(('a','b')))
|
||||
self.assertEquals(config['BAR'], PositiveOptionValue(('a', 'b')))
|
||||
|
||||
with self.assertRaises(InvalidOptionError) as e:
|
||||
get_config(['--enable-foo=a,b', '--disable-bar'])
|
||||
|
@ -639,18 +639,18 @@ class TestConfigure(unittest.TestCase):
|
|||
mozpath.join(test_data_path, 'imply_option', 'imm.configure'))
|
||||
|
||||
with self.assertRaisesRegexp(InvalidOptionError,
|
||||
"--enable-foo' implied by 'imply_option at %s:7' conflicts with "
|
||||
"'--disable-foo' from the command-line" % config_path):
|
||||
"--enable-foo' implied by 'imply_option at %s:7' conflicts with "
|
||||
"'--disable-foo' from the command-line" % config_path):
|
||||
get_config(['--disable-foo'])
|
||||
|
||||
with self.assertRaisesRegexp(InvalidOptionError,
|
||||
"--enable-bar=foo,bar' implied by 'imply_option at %s:16' conflicts"
|
||||
" with '--enable-bar=a,b,c' from the command-line" % config_path):
|
||||
"--enable-bar=foo,bar' implied by 'imply_option at %s:16' conflicts"
|
||||
" with '--enable-bar=a,b,c' from the command-line" % config_path):
|
||||
get_config(['--enable-bar=a,b,c'])
|
||||
|
||||
with self.assertRaisesRegexp(InvalidOptionError,
|
||||
"--enable-baz=BAZ' implied by 'imply_option at %s:25' conflicts"
|
||||
" with '--enable-baz=QUUX' from the command-line" % config_path):
|
||||
"--enable-baz=BAZ' implied by 'imply_option at %s:25' conflicts"
|
||||
" with '--enable-baz=QUUX' from the command-line" % config_path):
|
||||
get_config(['--enable-baz=QUUX'])
|
||||
|
||||
def test_imply_option_failures(self):
|
||||
|
|
|
@ -250,7 +250,7 @@ class TestOption(unittest.TestCase):
|
|||
self.assertEquals(PositiveOptionValue(('c',)), value)
|
||||
|
||||
value = option.get_value('--with-option=-b,+d')
|
||||
self.assertEquals(PositiveOptionValue(('c','d')), value)
|
||||
self.assertEquals(PositiveOptionValue(('c', 'd')), value)
|
||||
|
||||
# Adding something that is in the default is fine
|
||||
value = option.get_value('--with-option=+b')
|
||||
|
@ -876,13 +876,13 @@ class TestCommandLineHelper(unittest.TestCase):
|
|||
bar = Option('--bar',
|
||||
possible_origins=('mozconfig',))
|
||||
with self.assertRaisesRegexp(InvalidOptionError,
|
||||
"--bar can not be set by command-line. Values are accepted from: mozconfig"):
|
||||
"--bar can not be set by command-line. Values are accepted from: mozconfig"):
|
||||
helper.handle(bar)
|
||||
|
||||
baz = Option(env='BAZ',
|
||||
possible_origins=('implied',))
|
||||
with self.assertRaisesRegexp(InvalidOptionError,
|
||||
"BAZ=1 can not be set by environment. Values are accepted from: implied"):
|
||||
"BAZ=1 can not be set by environment. Values are accepted from: implied"):
|
||||
helper.handle(baz)
|
||||
|
||||
|
||||
|
|
|
@ -42,6 +42,7 @@ class CompilerPreprocessor(Preprocessor):
|
|||
# different handling than what our Preprocessor does out of the box.
|
||||
# Hack around it enough that the configure tests work properly.
|
||||
context = self.context
|
||||
|
||||
def normalize_numbers(value):
|
||||
if isinstance(value, types.StringTypes):
|
||||
if value[-1:] == 'L' and value[:-1].isdigit():
|
||||
|
@ -49,6 +50,7 @@ class CompilerPreprocessor(Preprocessor):
|
|||
return value
|
||||
# Our Preprocessor doesn't handle macros with parameters, so we hack
|
||||
# around that for __has_feature()-like things.
|
||||
|
||||
def normalize_has_feature_or_builtin(expr):
|
||||
return self.HAS_FEATURE_OR_BUILTIN.sub(r'\1\2', expr)
|
||||
self.context = self.Context(
|
||||
|
@ -162,6 +164,7 @@ class FakeCompiler(dict):
|
|||
|
||||
For convenience, FakeCompiler instances can be added (+) to one another.
|
||||
'''
|
||||
|
||||
def __init__(self, *definitions):
|
||||
for definition in definitions:
|
||||
if all(not isinstance(d, dict) for d in definition.itervalues()):
|
||||
|
|
|
@ -82,7 +82,7 @@ class TestToolkitMozConfigure(BaseConfigureTest):
|
|||
self.assertEqual(get_value(environ={'MOZILLA_OFFICIAL': 1}), None)
|
||||
|
||||
self.assertEqual(get_value(['--enable-release'],
|
||||
environ={'MOZILLA_OFFICIAL': 1}), None)
|
||||
environ={'MOZILLA_OFFICIAL': 1}), None)
|
||||
|
||||
with self.assertRaises(InvalidOptionError):
|
||||
get_value(['--disable-release'],
|
||||
|
|
|
@ -57,7 +57,7 @@ class TestConfigureOutputHandler(unittest.TestCase):
|
|||
name = '%s.test_format' % self.__class__.__name__
|
||||
logger = logging.getLogger(name)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
handler = ConfigureOutputHandler(out, err)
|
||||
handler = ConfigureOutputHandler(out, err)
|
||||
handler.setFormatter(logging.Formatter('%(levelname)s:%(message)s'))
|
||||
logger.addHandler(handler)
|
||||
|
||||
|
@ -79,7 +79,7 @@ class TestConfigureOutputHandler(unittest.TestCase):
|
|||
name = '%s.test_continuation' % self.__class__.__name__
|
||||
logger = logging.getLogger(name)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
handler = ConfigureOutputHandler(out, out)
|
||||
handler = ConfigureOutputHandler(out, out)
|
||||
handler.setFormatter(logging.Formatter('%(levelname)s:%(message)s'))
|
||||
logger.addHandler(handler)
|
||||
|
||||
|
@ -138,7 +138,7 @@ class TestConfigureOutputHandler(unittest.TestCase):
|
|||
err = StringIO()
|
||||
|
||||
logger.removeHandler(handler)
|
||||
handler = ConfigureOutputHandler(out, err)
|
||||
handler = ConfigureOutputHandler(out, err)
|
||||
handler.setFormatter(logging.Formatter('%(levelname)s:%(message)s'))
|
||||
logger.addHandler(handler)
|
||||
|
||||
|
@ -167,7 +167,7 @@ class TestConfigureOutputHandler(unittest.TestCase):
|
|||
name = '%s.test_queue_debug' % self.__class__.__name__
|
||||
logger = logging.getLogger(name)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
handler = ConfigureOutputHandler(out, out, maxlen=3)
|
||||
handler = ConfigureOutputHandler(out, out, maxlen=3)
|
||||
handler.setFormatter(logging.Formatter('%(levelname)s:%(message)s'))
|
||||
logger.addHandler(handler)
|
||||
|
||||
|
@ -268,7 +268,7 @@ class TestConfigureOutputHandler(unittest.TestCase):
|
|||
name = '%s.test_queue_debug_reentrant' % self.__class__.__name__
|
||||
logger = logging.getLogger(name)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
handler = ConfigureOutputHandler(out, out, maxlen=10)
|
||||
handler = ConfigureOutputHandler(out, out, maxlen=10)
|
||||
handler.setFormatter(logging.Formatter('%(levelname)s| %(message)s'))
|
||||
logger.addHandler(handler)
|
||||
|
||||
|
@ -424,7 +424,7 @@ class TestLogSubprocessOutput(unittest.TestCase):
|
|||
sandbox = ConfigureSandbox({}, {}, ['configure'], out, out)
|
||||
|
||||
sandbox.include_file(mozpath.join(topsrcdir, 'build',
|
||||
'moz.configure', 'util.configure'))
|
||||
'moz.configure', 'util.configure'))
|
||||
sandbox.include_file(mozpath.join(topsrcdir, 'python', 'mozbuild',
|
||||
'mozbuild', 'test', 'configure',
|
||||
'data', 'subprocess.configure'))
|
||||
|
@ -474,6 +474,7 @@ class TestVersion(unittest.TestCase):
|
|||
self.assertEqual(v.minor, 0)
|
||||
self.assertEqual(v.patch, 0)
|
||||
|
||||
|
||||
class TestCheckCmdOutput(unittest.TestCase):
|
||||
|
||||
def get_result(self, command='', paths=None):
|
||||
|
@ -483,7 +484,7 @@ class TestCheckCmdOutput(unittest.TestCase):
|
|||
sandbox = ConfigureTestSandbox(paths, config, {}, ['/bin/configure'],
|
||||
out, out)
|
||||
sandbox.include_file(mozpath.join(topsrcdir, 'build',
|
||||
'moz.configure', 'util.configure'))
|
||||
'moz.configure', 'util.configure'))
|
||||
status = 0
|
||||
try:
|
||||
exec_(command, sandbox)
|
||||
|
|
|
@ -291,5 +291,6 @@ class TestCcacheStats(unittest.TestCase):
|
|||
stat9 = CCacheStats(self.STAT9)
|
||||
self.assertTrue(stat9)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
|
@ -123,7 +123,7 @@ class TestClobberer(unittest.TestCase):
|
|||
self.assertFalse(os.path.exists(dummy_path))
|
||||
self.assertTrue(os.path.exists(c.obj_clobber))
|
||||
self.assertGreaterEqual(os.path.getmtime(c.obj_clobber),
|
||||
os.path.getmtime(c.src_clobber))
|
||||
os.path.getmtime(c.src_clobber))
|
||||
|
||||
def test_objdir_is_srcdir(self):
|
||||
"""If topobjdir is the topsrcdir, refuse to clobber."""
|
||||
|
@ -181,7 +181,6 @@ class TestClobberer(unittest.TestCase):
|
|||
self.assertFalse(performed)
|
||||
self.assertIn('Cannot clobber while the shell is inside', reason)
|
||||
|
||||
|
||||
def test_mozconfig_opt_in(self):
|
||||
"""Auto clobber iff AUTOCLOBBER is in the environment."""
|
||||
|
||||
|
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче