Backed out 6 changesets (bug 1542963) for causing Bug 1552400. a=backout

Backed out changeset 70fbe1a158ae (bug 1542963)
Backed out changeset a1a84e0feabb (bug 1542963)
Backed out changeset 14aa1bd254a4 (bug 1542963)
Backed out changeset 11a714f491d5 (bug 1542963)
Backed out changeset 2715bac40d2f (bug 1542963)
Backed out changeset 8f69c7eeb6fd (bug 1542963)

--HG--
extra : histedit_source : 5903adc5313d6af4fdafb40ae55aaa79856f3726
This commit is contained in:
Narcis Beleuzu 2019-05-17 11:30:27 +03:00
Родитель 1aace747e3
Коммит 65e97bc31e
190 изменённых файлов: 2092 добавлений и 2820 удалений

Просмотреть файл

@ -25,6 +25,7 @@ exclude =
python/devtools/migrate-l10n/migrate/main.py,
python/l10n/fluent_migrations,
python/mozbuild/dumbmake,
python/mozbuild/mozbuild,
servo/components/style,
testing/jsshell/benchmark.py,
testing/marionette/mach_commands.py,
@ -68,7 +69,6 @@ exclude =
memory/moz.configure,
mobile/android/*.configure,
node_modules,
python/mozbuild/mozbuild/test/configure/data,
security/nss/,
testing/marionette/harness/marionette_harness/runner/mixins,
testing/marionette/harness/marionette_harness/tests,
@ -92,8 +92,6 @@ ignore =
per-file-ignores =
ipc/ipdl/*: F403, F405
# cpp_eclipse has a lot of multi-line embedded XML which exceeds line length
python/mozbuild/mozbuild/backend/cpp_eclipse.py: E501
testing/firefox-ui/**/__init__.py: F401
testing/marionette/**/__init__.py: F401
testing/mozharness/configs/*: E124, E127, E128, E131, E231, E261, E265, E266, E501, W391

Просмотреть файл

@ -17,33 +17,32 @@ from mozbuild.util import (
lock_file,
)
def addEntriesToListFile(listFile, entries):
"""Given a file |listFile| containing one entry per line,
add each entry in |entries| to the file, unless it is already
present."""
ensureParentDir(listFile)
lock = lock_file(listFile + ".lck")
try:
if os.path.exists(listFile):
f = open(listFile)
existing = set(x.strip() for x in f.readlines())
f.close()
else:
existing = set()
for e in entries:
if e not in existing:
existing.add(e)
with open(listFile, 'wb') as f:
f.write("\n".join(sorted(existing))+"\n")
finally:
del lock # Explicitly release the lock_file to free it
"""Given a file |listFile| containing one entry per line,
add each entry in |entries| to the file, unless it is already
present."""
ensureParentDir(listFile)
lock = lock_file(listFile + ".lck")
try:
if os.path.exists(listFile):
f = open(listFile)
existing = set(x.strip() for x in f.readlines())
f.close()
else:
existing = set()
for e in entries:
if e not in existing:
existing.add(e)
with open(listFile, 'wb') as f:
f.write("\n".join(sorted(existing))+"\n")
finally:
lock = None
def main(args):
if len(args) < 2:
print("Usage: buildlist.py <list file> <entry> [<entry> ...]",
file=sys.stderr)
file=sys.stderr)
return 1
return addEntriesToListFile(args[0], args[1:])

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
from __future__ import print_function, unicode_literals
import argparse
import os
@ -280,14 +280,13 @@ def check_networking(binary):
s = 'TEST-UNEXPECTED-FAIL | check_networking | {} | Identified {} ' + \
'networking function(s) being imported in the rust static library ({})'
print(s.format(basename, len(bad_occurences_names),
",".join(sorted(bad_occurences_names))),
file=sys.stderr)
",".join(sorted(bad_occurences_names))),
file=sys.stderr)
retcode = 1
elif buildconfig.substs.get('MOZ_AUTOMATION'):
print('TEST-PASS | check_networking | {}'.format(basename))
return retcode
def checks(target, binary):
# The clang-plugin is built as target but is really a host binary.
# Cheat and pretend we were passed the right argument.
@ -346,7 +345,7 @@ def main(args):
if options.networking and options.host:
print('--networking is only valid with --target',
file=sys.stderr)
file=sys.stderr)
return 1
if options.networking:

Просмотреть файл

@ -4,8 +4,7 @@
# This action is used to generate the wpt manifest
from __future__ import absolute_import, print_function
import os
import sys
import buildconfig

Просмотреть файл

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
# We invoke a Python program to dump our environment in order to get
# native paths printed on Windows so that these paths can be incorporated
# into Python configure's environment.

Просмотреть файл

@ -11,7 +11,6 @@ import shutil
import sys
import os
def dump_symbols(target, tracking_file, count_ctors=False):
# Our tracking file, if present, will contain path(s) to the previously generated
# symbols. Remove them in this case so we don't simply accumulate old symbols
@ -59,9 +58,8 @@ def dump_symbols(target, tracking_file, count_ctors=False):
if objcopy:
os.environ['OBJCOPY'] = objcopy
args = ([buildconfig.substs['PYTHON'],
os.path.join(buildconfig.topsrcdir, 'toolkit',
'crashreporter', 'tools', 'symbolstore.py')] +
args = ([buildconfig.substs['PYTHON'], os.path.join(buildconfig.topsrcdir, 'toolkit',
'crashreporter', 'tools', 'symbolstore.py')] +
sym_store_args +
['-s', buildconfig.topsrcdir, dump_syms_bin, os.path.join(buildconfig.topobjdir,
'dist',
@ -75,7 +73,6 @@ def dump_symbols(target, tracking_file, count_ctors=False):
fh.write(out_files)
fh.flush()
def main(argv):
parser = argparse.ArgumentParser(
usage="Usage: dumpsymbols.py <library or program> <tracking file>")

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
from __future__ import print_function
import os
import shutil
@ -13,7 +13,6 @@ import mozpack.path as mozpath
import buildconfig
from mozbuild.base import BuildEnvironmentNotFoundException
def archive_exe(pkg_dir, tagfile, sfx_package, package, use_upx):
tmpdir = tempfile.mkdtemp(prefix='tmp')
try:
@ -31,10 +30,7 @@ def archive_exe(pkg_dir, tagfile, sfx_package, package, use_upx):
except BuildEnvironmentNotFoundException:
# configure hasn't been run, just use the default
sevenz = '7z'
subprocess.check_call([
sevenz, 'a', '-r', '-t7z', mozpath.join(tmpdir, 'app.7z'), '-mx',
'-m0=BCJ2', '-m1=LZMA:d25', '-m2=LZMA:d19', '-m3=LZMA:d19', '-mb0:1',
'-mb0s1:2', '-mb0s2:3'])
subprocess.check_call([sevenz, 'a', '-r', '-t7z', mozpath.join(tmpdir, 'app.7z'), '-mx', '-m0=BCJ2', '-m1=LZMA:d25', '-m2=LZMA:d19', '-m3=LZMA:d19', '-mb0:1', '-mb0s1:2', '-mb0s2:3'])
with open(package, 'wb') as o:
for i in [final_sfx, tagfile, mozpath.join(tmpdir, 'app.7z')]:
@ -45,7 +41,6 @@ def archive_exe(pkg_dir, tagfile, sfx_package, package, use_upx):
shutil.move('core', pkg_dir)
shutil.rmtree(tmpdir)
def main(args):
if len(args) != 4:
print('Usage: exe_7z_archive.py <pkg_dir> <tagfile> <sfx_package> <package> <use_upx>',
@ -55,6 +50,5 @@ def main(args):
archive_exe(args[0], args[1], args[2], args[3], args[4])
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

Просмотреть файл

@ -2,18 +2,16 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
from __future__ import print_function
import shutil
import sys
import subprocess
def extract_exe(package, target):
subprocess.check_call(['7z', 'x', package, 'core'])
shutil.move('core', target)
def main(args):
if len(args) != 2:
print('Usage: exe_7z_extract.py <package> <target>',
@ -23,6 +21,5 @@ def main(args):
extract_exe(args[0], args[1])
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

Просмотреть файл

@ -69,7 +69,7 @@ def main(argv):
with FileAvoidWrite(args.output_file, mode='rb') as output:
try:
ret = module.__dict__[method](output, *args.additional_arguments, **kwargs)
except Exception:
except:
# Ensure that we don't overwrite the file if the script failed.
output.avoid_writing_to_file()
raise
@ -116,6 +116,5 @@ def main(argv):
return 1
return ret
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

Просмотреть файл

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
import sys
import json
import copy
@ -16,61 +14,54 @@ output_file = sys.argv[3]
output = open(output_file, 'w')
with open(sys.argv[1]) as f:
searchinfo = json.load(f)
searchinfo = json.load(f)
# If we have a locale, use it, otherwise use the default
if locale in searchinfo["locales"]:
localeSearchInfo = searchinfo["locales"][locale]
localeSearchInfo = searchinfo["locales"][locale]
else:
localeSearchInfo = {}
localeSearchInfo["default"] = searchinfo["default"]
localeSearchInfo = {}
localeSearchInfo["default"] = searchinfo["default"]
def validateDefault(key):
if key not in searchinfo["default"]:
print("Error: Missing default %s in list.json" % (key), file=sys.stderr)
sys.exit(1)
if (not key in searchinfo["default"]):
print >>sys.stderr, "Error: Missing default %s in list.json" % (key)
sys.exit(1)
validateDefault("searchDefault")
validateDefault("visibleDefaultEngines")
validateDefault("searchDefault");
validateDefault("visibleDefaultEngines");
# If the selected locale doesn't have a searchDefault,
# use the global one.
if "searchDefault" not in localeSearchInfo["default"]:
localeSearchInfo["default"]["searchDefault"] = searchinfo["default"]["searchDefault"]
if not "searchDefault" in localeSearchInfo["default"]:
localeSearchInfo["default"]["searchDefault"] = searchinfo["default"]["searchDefault"]
# If the selected locale doesn't have a searchOrder,
# use the global one if present.
# searchOrder is NOT required.
if (
"searchOrder" not in localeSearchInfo["default"]
and "searchOrder" in searchinfo["default"]
):
if not "searchOrder" in localeSearchInfo["default"] and "searchOrder" in searchinfo["default"]:
localeSearchInfo["default"]["searchOrder"] = searchinfo["default"]["searchOrder"]
# If we have region overrides, enumerate through them
# and add the additional regions to the locale information.
if "regionOverrides" in searchinfo:
regionOverrides = searchinfo["regionOverrides"]
regionOverrides = searchinfo["regionOverrides"]
for region in regionOverrides:
# Only add a new engine list if there is an engine that is overridden
enginesToOverride = set(regionOverrides[region].keys())
if region in localeSearchInfo and "visibleDefaultEngines" in localeSearchInfo[region]:
visibleDefaultEngines = localeSearchInfo[region]["visibleDefaultEngines"]
else:
visibleDefaultEngines = localeSearchInfo["default"]["visibleDefaultEngines"]
if set(visibleDefaultEngines) & enginesToOverride:
if region not in localeSearchInfo:
localeSearchInfo[region] = {}
localeSearchInfo[region]["visibleDefaultEngines"] = copy.deepcopy(
visibleDefaultEngines)
for i, engine in enumerate(localeSearchInfo[region]["visibleDefaultEngines"]):
if engine in regionOverrides[region]:
localeSearchInfo[region]["visibleDefaultEngines"][i] = \
regionOverrides[region][engine]
for region in regionOverrides:
# Only add a new engine list if there is an engine that is overridden
enginesToOverride = set(regionOverrides[region].keys())
if region in localeSearchInfo and "visibleDefaultEngines" in localeSearchInfo[region]:
visibleDefaultEngines = localeSearchInfo[region]["visibleDefaultEngines"]
else:
visibleDefaultEngines = localeSearchInfo["default"]["visibleDefaultEngines"]
if set(visibleDefaultEngines) & enginesToOverride:
if region not in localeSearchInfo:
localeSearchInfo[region] = {}
localeSearchInfo[region]["visibleDefaultEngines"] = copy.deepcopy(visibleDefaultEngines)
for i, engine in enumerate(localeSearchInfo[region]["visibleDefaultEngines"]):
if engine in regionOverrides[region]:
localeSearchInfo[region]["visibleDefaultEngines"][i] = regionOverrides[region][engine]
output.write(json.dumps(localeSearchInfo, ensure_ascii=False).encode('utf8'))
output.close()
output.close();

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
from __future__ import absolute_import
import sys

Просмотреть файл

@ -38,9 +38,13 @@ import os
from mozbuild.dotproperties import (
DotProperties,
)
from mozbuild.util import (
FileAvoidWrite,
)
from mozpack.files import (
FileFinder,
)
import mozpack.path as mozpath
def merge_properties(paths):
@ -88,8 +92,7 @@ def main(output, *args, **kwargs):
properties = merge_properties(sources)
# Keep these two in sync.
image_url_template = \
'android.resource://%s/drawable/suggestedsites_{name}' % opts.android_package_name
image_url_template = 'android.resource://%s/drawable/suggestedsites_{name}' % opts.android_package_name
drawables_template = 'drawable*/suggestedsites_{name}.*'
# Load properties corresponding to each site name and define their
@ -99,8 +102,7 @@ def main(output, *args, **kwargs):
def add_names(names, defaults={}):
for name in names:
site = copy.deepcopy(defaults)
site.update(properties.get_dict('browser.suggestedsites.{name}'.format(
name=name), required_keys=('title', 'url', 'bgcolor')))
site.update(properties.get_dict('browser.suggestedsites.{name}'.format(name=name), required_keys=('title', 'url', 'bgcolor')))
site['imageurl'] = image_url_template.format(name=name)
sites.append(site)
@ -114,13 +116,11 @@ def main(output, *args, **kwargs):
matches = [p for p, _ in finder.find(drawables_template.format(name=name))]
if not matches:
raise Exception("Could not find drawable in '{resources}' for '{name}'"
.format(resources=resources, name=name))
.format(resources=resources, name=name))
else:
if opts.verbose:
print("Found {len} drawables in '{resources}' for '{name}': {matches}"
.format(len=len(matches), resources=resources,
name=name, matches=matches)
)
.format(len=len(matches), resources=resources, name=name, matches=matches))
# We want the lists to be ordered for reproducibility. Each list has a
# "default" JSON list item which will be extended by the properties read.
@ -129,14 +129,12 @@ def main(output, *args, **kwargs):
('browser.suggestedsites.restricted.list', {'restricted': True}),
]
if opts.verbose:
print('Reading {len} suggested site lists: {lists}'.format(
len=len(lists), lists=[list_name for list_name, _ in lists]))
print('Reading {len} suggested site lists: {lists}'.format(len=len(lists), lists=[list_name for list_name, _ in lists]))
for (list_name, list_item_defaults) in lists:
names = properties.get_list(list_name)
if opts.verbose:
print('Reading {len} suggested sites from {list}: {names}'.format(
len=len(names), list=list_name, names=names))
print('Reading {len} suggested sites from {list}: {names}'.format(len=len(names), list=list_name, names=names))
add_names(names, list_item_defaults)
# We must define at least one site -- that's what the fallback is for.

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
from __future__ import absolute_import
import sys

Просмотреть файл

@ -8,7 +8,7 @@
# the locale directory, chrome registry entries and other information
# necessary to produce the complete manifest file for a language pack.
###
from __future__ import absolute_import, print_function
from __future__ import absolute_import
import argparse
import sys
@ -26,6 +26,7 @@ from mozpack.chrome.manifest import (
)
from mozbuild.configure.util import Version
from mozbuild.preprocessor import Preprocessor
import buildconfig
def write_file(path, content):
@ -307,7 +308,7 @@ def get_version_maybe_buildid(min_version):
version = str(min_version)
buildid = os.environ.get('MOZ_BUILD_DATE')
if buildid and len(buildid) != 14:
print('Ignoring invalid MOZ_BUILD_DATE: %s' % buildid, file=sys.stderr)
print >>sys.stderr, 'Ignoring invalid MOZ_BUILD_DATE: %s' % buildid
buildid = None
if buildid:
version = version + "buildid" + buildid

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
from __future__ import print_function
from mozpack import dmg

Просмотреть файл

@ -2,16 +2,14 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
from __future__ import print_function
import sys
import subprocess
def make_unzip(package):
subprocess.check_call(['unzip', package])
def main(args):
if len(args) != 1:
print('Usage: make_unzip.py <package>',
@ -21,6 +19,5 @@ def main(args):
make_unzip(args[0])
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

Просмотреть файл

@ -2,16 +2,14 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
from __future__ import print_function
import sys
import subprocess
def make_zip(source, package):
subprocess.check_call(['zip', '-r9D', package, source, '-x', '\*/.mkdir.done'])
def main(args):
if len(args) != 2:
print('Usage: make_zip.py <source> <package>',
@ -21,6 +19,5 @@ def main(args):
make_zip(args[0], args[1])
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

Просмотреть файл

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
import sys
import json
@ -12,24 +10,24 @@ engines = []
locale = sys.argv[2]
with open(sys.argv[1]) as f:
searchinfo = json.load(f)
searchinfo = json.load(f)
# Get a list of the engines from the locale or the default
engines = set()
if locale in searchinfo["locales"]:
for region, table in searchinfo["locales"][locale].iteritems():
if "visibleDefaultEngines" in table:
engines.update(table["visibleDefaultEngines"])
for region, table in searchinfo["locales"][locale].iteritems():
if "visibleDefaultEngines" in table:
engines.update(table["visibleDefaultEngines"])
if not engines:
engines.update(searchinfo["default"]["visibleDefaultEngines"])
engines.update(searchinfo["default"]["visibleDefaultEngines"])
# Get additional engines from regionOverrides
for region, overrides in searchinfo["regionOverrides"].iteritems():
for originalengine, replacement in overrides.iteritems():
if originalengine in engines:
# We add the engine because we still need the original
engines.add(replacement)
for originalengine, replacement in overrides.iteritems():
if originalengine in engines:
# We add the engine because we still need the original
engines.add(replacement)
# join() will take an iterable, not just a list.
print('\n'.join(engines))

Просмотреть файл

@ -11,6 +11,7 @@ from __future__ import absolute_import, print_function
import argparse
import buildconfig
import os
import subprocess
import sys
from mozpack.copier import Jarrer
@ -69,7 +70,7 @@ def package_fennec_apk(inputs=[], omni_ja=None,
if verbose:
print('Packaging %s from %s' % (path, file.path))
if not os.path.exists(abspath):
raise ValueError('File %s not found (looked for %s)' %
raise ValueError('File %s not found (looked for %s)' % \
(file.path, abspath))
if jarrer.contains(path):
jarrer.remove(path)

Просмотреть файл

@ -5,6 +5,8 @@
from __future__ import absolute_import, print_function, unicode_literals
import argparse
import json
import os.path
import sys
import buildconfig
@ -21,15 +23,13 @@ def main(argv):
args = parser.parse_args(argv)
objdir_abspath = mozpath.abspath(buildconfig.topobjdir)
def is_valid_entry(entry):
if isinstance(entry[1], BaseFile):
entry_abspath = mozpath.abspath(entry[1].path)
else:
entry_abspath = mozpath.abspath(entry[1])
if not entry_abspath.startswith(objdir_abspath):
print("Warning: omitting generated source [%s] from archive" % entry_abspath,
file=sys.stderr)
print("Warning: omitting generated source [%s] from archive" % entry_abspath, file=sys.stderr)
return False
return True

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
from __future__ import absolute_import
import sys
@ -15,11 +15,10 @@ def generate(output, *args):
pp.handleCommandLine(list(args), True)
return set(pp.includes)
def main(args):
pp = Preprocessor()
pp.handleCommandLine(args, True)
if __name__ == "__main__":
main(sys.argv[1:])
main(sys.argv[1:])

Просмотреть файл

@ -10,6 +10,7 @@ import re
import sys
from buildconfig import topsrcdir, topobjdir
from mozbuild.backend.configenvironment import PartialConfigEnvironment
from mozbuild.util import FileAvoidWrite
import mozpack.path as mozpath
@ -50,12 +51,11 @@ def process_define_file(output, input):
raise Exception(
'`#define ALLDEFINES` is not allowed in a '
'CONFIGURE_DEFINE_FILE')
# WebRTC files like to define WINVER and _WIN32_WINNT
# via the command line, which raises a mass of macro
# redefinition warnings. Just handle those macros
# specially here.
def define_for_name(name, val):
"""WebRTC files like to define WINVER and _WIN32_WINNT
via the command line, which raises a mass of macro
redefinition warnings. Just handle those macros
specially here."""
define = "#define {name} {val}".format(name=name, val=val)
if name in ('WINVER', '_WIN32_WINNT'):
return '#if !defined({name})\n{define}\n#endif' \

Просмотреть файл

@ -29,8 +29,8 @@ COMPLETE = 'Elapsed: {elapsed:.2f}s; From {dest}: Kept {existing} existing; ' \
def process_manifest(destdir, paths, track,
no_symlinks=False,
defines={}):
no_symlinks=False,
defines={}):
if os.path.exists(track):
# We use the same format as install manifests for the tracking
@ -44,15 +44,15 @@ def process_manifest(destdir, paths, track,
for p, f in finder.find(dest):
remove_unaccounted.add(p, dummy_file)
remove_empty_directories = True
remove_all_directory_symlinks = True
remove_empty_directories=True
remove_all_directory_symlinks=True
else:
# If tracking is enabled and there is no file, we don't want to
# be removing anything.
remove_unaccounted = False
remove_empty_directories = False
remove_all_directory_symlinks = False
remove_empty_directories=False
remove_all_directory_symlinks=False
manifest = InstallManifest()
for path in paths:
@ -64,9 +64,9 @@ def process_manifest(destdir, paths, track,
copier, defines_override=defines, link_policy=link_policy
)
result = copier.copy(destdir,
remove_unaccounted=remove_unaccounted,
remove_all_directory_symlinks=remove_all_directory_symlinks,
remove_empty_directories=remove_empty_directories)
remove_unaccounted=remove_unaccounted,
remove_all_directory_symlinks=remove_all_directory_symlinks,
remove_empty_directories=remove_empty_directories)
if track:
# We should record files that we actually copied.
@ -83,21 +83,21 @@ def main(argv):
parser.add_argument('destdir', help='Destination directory.')
parser.add_argument('manifests', nargs='+', help='Path to manifest file(s).')
parser.add_argument('--no-symlinks', action='store_true',
help='Do not install symbolic links. Always copy files')
help='Do not install symbolic links. Always copy files')
parser.add_argument('--track', metavar="PATH", required=True,
help='Use installed files tracking information from the given path.')
help='Use installed files tracking information from the given path.')
parser.add_argument('-D', action=DefinesAction,
dest='defines', metavar="VAR[=VAL]",
help='Define a variable to override what is specified in the manifest')
dest='defines', metavar="VAR[=VAL]",
help='Define a variable to override what is specified in the manifest')
args = parser.parse_args(argv)
start = time.time()
result = process_manifest(args.destdir, args.manifests,
track=args.track,
no_symlinks=args.no_symlinks,
defines=args.defines)
track=args.track,
no_symlinks=args.no_symlinks,
defines=args.defines)
elapsed = time.time() - start
@ -109,6 +109,5 @@ def main(argv):
rm_files=result.removed_files_count,
rm_dirs=result.removed_directories_count))
if __name__ == '__main__':
main(sys.argv[1:])

Просмотреть файл

@ -12,7 +12,6 @@ from mozpack.files import FileFinder
from mozpack.mozjar import JarWriter
import mozpack.path as mozpath
def make_archive(archive_name, base, exclude, include):
compress = ['**/*.sym']
finder = FileFinder(base, ignore=exclude)
@ -28,13 +27,11 @@ def make_archive(archive_name, base, exclude, include):
writer.add(p.encode('utf-8'), f, mode=f.mode,
compress=should_compress, skip_duplicates=True)
def main(argv):
parser = argparse.ArgumentParser(description='Produce a symbols archive')
parser.add_argument('archive', help='Which archive to generate')
parser.add_argument('base', help='Base directory to package')
parser.add_argument('--full-archive', action='store_true',
help='Generate a full symbol archive')
parser.add_argument('--full-archive', action='store_true', help='Generate a full symbol archive')
args = parser.parse_args(argv)
@ -50,6 +47,5 @@ def main(argv):
make_archive(args.archive, args.base, excludes, includes)
if __name__ == '__main__':
main(sys.argv[1:])

Просмотреть файл

@ -642,7 +642,7 @@ for k, v in ARCHIVE_FILES.items():
continue
ignores = set(itertools.chain(*(e.get('ignore', [])
for e in ARCHIVE_FILES['common'])))
for e in ARCHIVE_FILES['common'])))
if not any(p.startswith('%s/' % k) for p in ignores):
raise Exception('"common" ignore list probably should contain %s' % k)

Просмотреть файл

@ -17,8 +17,6 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
from __future__ import absolute_import, print_function
# A manifest file specifies files in that directory that are stored
# elsewhere. This file should only list files in the same directory
# in which the manifest file resides and it should be called
@ -750,7 +748,7 @@ def _cache_checksum_matches(base_file, checksum):
log.info("Cache matches, avoiding extracting in '%s'" % base_file)
return True
return False
except IOError:
except IOError as e:
return False
@ -1024,7 +1022,7 @@ def _authorize(req, auth_file):
try:
auth_file_content = json.loads(auth_file_content)
is_taskcluster_auth = True
except Exception:
except:
pass
if is_taskcluster_auth:
@ -1302,6 +1300,5 @@ def main(argv, _skip_logging=False):
return 0 if process_command(options, args) else 1
if __name__ == "__main__": # pragma: no cover
sys.exit(main(sys.argv))

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
from __future__ import print_function
from mozpack import dmg

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
from __future__ import absolute_import
import sys

Просмотреть файл

@ -9,7 +9,6 @@ import subprocess
import sys
import os
def parse_outputs(crate_output, dep_outputs, pass_l_flag):
env = {}
args = []
@ -60,7 +59,6 @@ def parse_outputs(crate_output, dep_outputs, pass_l_flag):
return env, args
def wrap_rustc(args):
parser = argparse.ArgumentParser()
parser.add_argument('--crate-out', nargs='?')
@ -75,6 +73,5 @@ def wrap_rustc(args):
os.environ.update(new_env)
return subprocess.Popen(args.cmd + new_args, cwd=args.cwd).wait()
if __name__ == '__main__':
sys.exit(wrap_rustc(sys.argv[1:]))

Просмотреть файл

@ -8,93 +8,76 @@ corresponding .ini file.
Usage: xpccheck.py <directory> [<directory> ...]
'''
from __future__ import absolute_import, print_function
from __future__ import absolute_import
import sys
import os
from glob import glob
import manifestparser
def getIniTests(testdir):
mp = manifestparser.ManifestParser(strict=False)
mp.read(os.path.join(testdir, 'xpcshell.ini'))
return mp.tests
mp = manifestparser.ManifestParser(strict=False)
mp.read(os.path.join(testdir, 'xpcshell.ini'))
return mp.tests
def verifyDirectory(initests, directory):
files = glob(os.path.join(os.path.abspath(directory), "test_*"))
for f in files:
if (not os.path.isfile(f)):
continue
files = glob(os.path.join(os.path.abspath(directory), "test_*"))
for f in files:
if (not os.path.isfile(f)):
continue
name = os.path.basename(f)
if name.endswith('.in'):
name = name[:-3]
name = os.path.basename(f)
if name.endswith('.in'):
name = name[:-3]
if not name.endswith('.js'):
continue
found = False
for test in initests:
if os.path.join(os.path.abspath(directory), name) == test['path']:
found = True
break
if not found:
print(('TEST-UNEXPECTED-FAIL | xpccheck | test '
'%s is missing from test manifest %s!') % (
name,
os.path.join(directory, 'xpcshell.ini'),
),
file=sys.stderr,
)
sys.exit(1)
if not name.endswith('.js'):
continue
found = False
for test in initests:
if os.path.join(os.path.abspath(directory), name) == test['path']:
found = True
break
if not found:
print >>sys.stderr, "TEST-UNEXPECTED-FAIL | xpccheck | test %s is missing from test manifest %s!" % (name, os.path.join(directory, 'xpcshell.ini'))
sys.exit(1)
def verifyIniFile(initests, directory):
files = glob(os.path.join(os.path.abspath(directory), "test_*"))
for test in initests:
name = test['path'].split('/')[-1]
files = glob(os.path.join(os.path.abspath(directory), "test_*"))
for test in initests:
name = test['path'].split('/')[-1]
found = False
for f in files:
found = False
for f in files:
fname = f.split('/')[-1]
if fname.endswith('.in'):
fname = '.in'.join(fname.split('.in')[:-1])
fname = f.split('/')[-1]
if fname.endswith('.in'):
fname = '.in'.join(fname.split('.in')[:-1])
if os.path.join(os.path.abspath(directory), fname) == test['path']:
found = True
break
if not found:
print(("TEST-UNEXPECTED-FAIL | xpccheck | found "
"%s in xpcshell.ini and not in directory '%s'") % (
name,
directory,
),
file=sys.stderr,
)
sys.exit(1)
if os.path.join(os.path.abspath(directory), fname) == test['path']:
found = True
break
if not found:
print >>sys.stderr, "TEST-UNEXPECTED-FAIL | xpccheck | found %s in xpcshell.ini and not in directory '%s'" % (name, directory)
sys.exit(1)
def main(argv):
if len(argv) < 2:
print("Usage: xpccheck.py <topsrcdir> <directory> [<directory> ...]",
file=sys.stderr)
sys.exit(1)
if len(argv) < 2:
print >>sys.stderr, "Usage: xpccheck.py <topsrcdir> <directory> [<directory> ...]"
sys.exit(1)
for d in argv[1:]:
# xpcshell-unpack is a copy of xpcshell sibling directory and in the Makefile
# we copy all files (including xpcshell.ini from the sibling directory.
if d.endswith('toolkit/mozapps/extensions/test/xpcshell-unpack'):
continue
initests = getIniTests(d)
verifyDirectory(initests, d)
verifyIniFile(initests, d)
topsrcdir = argv[0]
for d in argv[1:]:
# xpcshell-unpack is a copy of xpcshell sibling directory and in the Makefile
# we copy all files (including xpcshell.ini from the sibling directory.
if d.endswith('toolkit/mozapps/extensions/test/xpcshell-unpack'):
continue
initests = getIniTests(d)
verifyDirectory(initests, d)
verifyIniFile(initests, d)
if __name__ == '__main__':
main(sys.argv[1:])

Просмотреть файл

@ -7,12 +7,14 @@
# input IDL file(s). It's purpose is to directly support the build
# system. The API will change to meet the needs of the build system.
from __future__ import absolute_import, print_function
from __future__ import absolute_import
import argparse
import os
import sys
from io import BytesIO
from xpidl import jsonxpt
from buildconfig import topsrcdir
from xpidl.header import print_header
@ -85,33 +87,32 @@ def process(input_dirs, inc_paths, bindings_conf, cache_dir, header_dir,
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument('--cache-dir',
help='Directory in which to find or write cached lexer data.')
help='Directory in which to find or write cached lexer data.')
parser.add_argument('--depsdir',
help='Directory in which to write dependency files.')
help='Directory in which to write dependency files.')
parser.add_argument('--bindings-conf',
help='Path to the WebIDL binding configuration file.')
help='Path to the WebIDL binding configuration file.')
parser.add_argument('--input-dir', dest='input_dirs',
action='append', default=[],
help='Directory(ies) in which to find source .idl files.')
parser.add_argument('headerdir',
help='Directory in which to write header files.')
help='Directory in which to write header files.')
parser.add_argument('xpcrsdir',
help='Directory in which to write rust xpcom binding files.')
help='Directory in which to write rust xpcom binding files.')
parser.add_argument('xptdir',
help='Directory in which to write xpt file.')
help='Directory in which to write xpt file.')
parser.add_argument('module',
help='Final module name to use for linked output xpt file.')
help='Final module name to use for linked output xpt file.')
parser.add_argument('idls', nargs='+',
help='Source .idl file(s).')
help='Source .idl file(s).')
parser.add_argument('-I', dest='incpath', action='append', default=[],
help='Extra directories where to look for included .idl files.')
help='Extra directories where to look for included .idl files.')
args = parser.parse_args(argv)
incpath = [os.path.join(topsrcdir, p) for p in args.incpath]
process(args.input_dirs, incpath, args.bindings_conf, args.cache_dir,
args.headerdir, args.xpcrsdir, args.xptdir, args.depsdir, args.module,
args.idls)
args.headerdir, args.xpcrsdir, args.xptdir, args.depsdir, args.module,
args.idls)
if __name__ == '__main__':
main(sys.argv[1:])

Просмотреть файл

@ -5,7 +5,7 @@
# This script creates a zip file, but will also strip any binaries
# it finds before adding them to the zip.
from __future__ import absolute_import, print_function
from __future__ import absolute_import
from mozpack.files import FileFinder
from mozpack.copier import Jarrer

Просмотреть файл

@ -2,12 +2,9 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
import os
import sqlite3 as lite
class Node(object):
def __init__(self, graph, node_id):
@ -42,12 +39,11 @@ class Node(object):
return os.path.join(parent.get_path(graph), self.name)
def calculate_mtime(self):
if self.type == 0: # only files have meaningful costs
if self.type == 0: # only files have meaningful costs
return sum(x.mtime for x in self.cmds)
else:
return None
class Graph(object):
def __init__(self, path=None, connect=None):
@ -71,7 +67,7 @@ class Graph(object):
self.connect.close()
def query_arg(self, q, arg):
assert isinstance(arg, tuple) # execute() requires tuple argument
assert isinstance(arg, tuple) #execute() requires tuple argument
cursor = self.connect.cursor()
cursor.execute(q, arg)
return cursor
@ -94,7 +90,7 @@ class Graph(object):
ret = self.query_arg('SELECT id FROM node \
WHERE dir=? AND name=?', (nodeid, part)).fetchone()
# fetchone should be ok bc dir and and name combo is unique
if ret is None:
if ret == None:
print ("\nCould not find id number for '%s'" % filepath)
return None
nodeid = ret[0]
@ -116,7 +112,7 @@ class Graph(object):
m, s = sec / 60, sec % 60
print ("\n------ Summary for %s ------\
\nTotal Build Time (mm:ss) = %d:%d\nNum Downstream Commands = %d"
% (f, m, s, node.num_cmds))
% (f, m, s, node.num_cmds))
def populate(self):
# make nodes for files with downstream commands
@ -131,4 +127,5 @@ class Graph(object):
def get_cost_dict(self):
if self.results is None:
self.populate()
return {k: v for k, v in self.results if v > 0}
return {k:v for k,v in self.results if v > 0}

Просмотреть файл

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
import bisect
import gzip
import json
@ -19,36 +17,31 @@ PUSHLOG_CHUNK_SIZE = 500
URL = 'https://hg.mozilla.org/mozilla-central/json-pushes?'
def unix_epoch(date):
return (date - datetime(1970, 1, 1)).total_seconds()
return (date - datetime(1970,1,1)).total_seconds()
def unix_from_date(n, today):
return unix_epoch(today - timedelta(days=n))
def get_lastpid(session):
return session.get(URL+'&version=2').json()['lastpushid']
def get_pushlog_chunk(session, start, end):
# returns pushes sorted by date
res = session.get(URL+'version=1&startID={0}&\
endID={1}&full=1'.format(start, end)).json()
return sorted(res.items(), key=lambda x: x[1]['date'])
return sorted(res.items(), key = lambda x: x[1]['date'])
def collect_data(session, date):
if date < 1206031764: # first push
raise Exception("No pushes exist before March 20, 2008.")
if date < 1206031764: #first push
raise Exception ("No pushes exist before March 20, 2008.")
lastpushid = get_lastpid(session)
data = []
start_id = lastpushid - PUSHLOG_CHUNK_SIZE
end_id = lastpushid + 1
while True:
res = get_pushlog_chunk(session, start_id, end_id)
starting_date = res[0][1]['date'] # date of oldest push in chunk
starting_date = res[0][1]['date'] # date of oldest push in chunk
dates = [x[1]['date'] for x in res]
if starting_date < date:
i = bisect.bisect_left(dates, date)
@ -59,12 +52,10 @@ def collect_data(session, date):
end_id = start_id + 1
start_id = start_id - PUSHLOG_CHUNK_SIZE
def get_data(epoch):
session = requests.Session()
data = collect_data(session, epoch)
return {k: v for sublist in data for (k, v) in sublist}
return {k:v for sublist in data for (k,v) in sublist}
class Pushlog(object):
@ -87,7 +78,6 @@ class Pushlog(object):
keys.sort()
return keys
class Push(object):
def __init__(self, pid, p_dict):
@ -95,7 +85,6 @@ class Push(object):
self.date = p_dict['date']
self.files = [f for x in p_dict['changesets'] for f in x['files']]
class Report(object):
def __init__(self, days, path=None, cost_dict=None):
@ -123,7 +112,7 @@ class Report(object):
cost = costs.get(f)
count = counts.get(f)
if cost is not None:
res.append((f, cost, count, round(cost*count, 3)))
res.append((f, cost, count, round(cost*count,3)))
return res
def get_sorted_report(self, format):
@ -154,8 +143,7 @@ class Report(object):
res = self.get_sorted_report(format)
if limit is not None:
res = self.cut(limit, res)
for x in res:
data.append(x)
for x in res: data.append(x)
if format == 'pretty':
print (data)
else:
@ -172,3 +160,4 @@ class Report(object):
with open(file_path, 'wb') as f:
f.write(content)
print ("Created report: %s" % file_path)

Просмотреть файл

@ -11,8 +11,7 @@ import time
# Builds before this build ID use the v0 version scheme. Builds after this
# build ID use the v1 version scheme.
V1_CUTOFF = 20150801000000 # YYYYmmddHHMMSS
V1_CUTOFF = 20150801000000 # YYYYmmddHHMMSS
def android_version_code_v0(buildid, cpu_arch=None, min_sdk=0, max_sdk=0):
base = int(str(buildid)[:10])
@ -31,7 +30,6 @@ def android_version_code_v0(buildid, cpu_arch=None, min_sdk=0, max_sdk=0):
raise ValueError("Don't know how to compute android:versionCode "
"for CPU arch %s" % cpu_arch)
def android_version_code_v1(buildid, cpu_arch=None, min_sdk=0, max_sdk=0):
'''Generate a v1 android:versionCode.
@ -136,7 +134,6 @@ def android_version_code_v1(buildid, cpu_arch=None, min_sdk=0, max_sdk=0):
return version
def android_version_code(buildid, *args, **kwargs):
base = int(str(buildid))
if base < V1_CUTOFF:
@ -169,9 +166,9 @@ def main(argv):
args = parser.parse_args(argv)
code = android_version_code(args.buildid,
cpu_arch=args.cpu_arch,
min_sdk=args.min_sdk,
max_sdk=args.max_sdk)
cpu_arch=args.cpu_arch,
min_sdk=args.min_sdk,
max_sdk=args.max_sdk)
print(code)
return 0

Просмотреть файл

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
# The values correspond to entries at
# https://tools.taskcluster.net/index/artifacts/#gecko.v2.mozilla-central.latest/gecko.v2.mozilla-central.latest
JOB_CHOICES = {

Просмотреть файл

@ -77,8 +77,7 @@ from mozpack.mozjar import (
from mozpack.packager.unpack import UnpackFinder
import mozpack.path as mozpath
# Number of candidate pushheads to cache per parent changeset.
NUM_PUSHHEADS_TO_QUERY_PER_PARENT = 50
NUM_PUSHHEADS_TO_QUERY_PER_PARENT = 50 # Number of candidate pushheads to cache per parent changeset.
# Number of parent changesets to consider as possible pushheads.
# There isn't really such a thing as a reasonable default here, because we don't
@ -314,8 +313,8 @@ class AndroidArtifactJob(ArtifactJob):
dirname, basename = os.path.split(p)
self.log(logging.INFO, 'artifact',
{'basename': basename},
'Adding {basename} to processed archive')
{'basename': basename},
'Adding {basename} to processed archive')
basedir = 'bin'
if not basename.endswith('.so'):
@ -324,8 +323,7 @@ class AndroidArtifactJob(ArtifactJob):
writer.add(basename.encode('utf-8'), f.open())
def process_symbols_archive(self, filename, processed_filename):
ArtifactJob.process_symbols_archive(
self, filename, processed_filename, skip_compressed=True)
ArtifactJob.process_symbols_archive(self, filename, processed_filename, skip_compressed=True)
if self._symbols_archive_suffix != 'crashreporter-symbols-full.zip':
return
@ -338,11 +336,9 @@ class AndroidArtifactJob(ArtifactJob):
if not filename.endswith('.gz'):
continue
# Uncompress "libxul.so/D3271457813E976AE7BF5DAFBABABBFD0/libxul.so.dbg.gz"
# into "libxul.so.dbg".
# Uncompress "libxul.so/D3271457813E976AE7BF5DAFBABABBFD0/libxul.so.dbg.gz" into "libxul.so.dbg".
#
# After running `settings append target.debug-file-search-paths $file`,
# where file=/path/to/topobjdir/dist/crashreporter-symbols,
# After `settings append target.debug-file-search-paths /path/to/topobjdir/dist/crashreporter-symbols`,
# Android Studio's lldb (7.0.0, at least) will find the ELF debug symbol files.
#
# There are other paths that will work but none seem more desireable. See
@ -351,8 +347,7 @@ class AndroidArtifactJob(ArtifactJob):
destpath = mozpath.join('crashreporter-symbols', basename)
self.log(logging.INFO, 'artifact',
{'destpath': destpath},
'Adding uncompressed ELF debug symbol file '
'{destpath} to processed archive')
'Adding uncompressed ELF debug symbol file {destpath} to processed archive')
writer.add(destpath.encode('utf-8'),
gzip.GzipFile(fileobj=reader[filename].uncompressed_data))
@ -445,8 +440,8 @@ class MacArtifactJob(ArtifactJob):
oldcwd = os.getcwd()
try:
self.log(logging.INFO, 'artifact',
{'tempdir': tempdir},
'Unpacking DMG into {tempdir}')
{'tempdir': tempdir},
'Unpacking DMG into {tempdir}')
if self._substs['HOST_OS_ARCH'] == 'Linux':
# This is a cross build, use hfsplus and dmg tools to extract the dmg.
os.chdir(tempdir)
@ -491,8 +486,8 @@ class MacArtifactJob(ArtifactJob):
for path in paths:
for p, f in finder.find(path):
self.log(logging.INFO, 'artifact',
{'path': p},
'Adding {path} to processed archive')
{'path': p},
'Adding {path} to processed archive')
destpath = mozpath.join('bin', os.path.basename(p))
writer.add(destpath.encode('utf-8'), f, mode=f.mode)
@ -512,8 +507,8 @@ class MacArtifactJob(ArtifactJob):
shutil.rmtree(tempdir)
except (OSError, IOError):
self.log(logging.WARN, 'artifact',
{'tempdir': tempdir},
'Unable to delete {tempdir}')
{'tempdir': tempdir},
'Unable to delete {tempdir}')
pass
@ -566,8 +561,8 @@ class WinArtifactJob(ArtifactJob):
basename = mozpath.relpath(p, self.product)
basename = mozpath.join('bin', basename)
self.log(logging.INFO, 'artifact',
{'basename': basename},
'Adding {basename} to processed archive')
{'basename': basename},
'Adding {basename} to processed archive')
writer.add(basename.encode('utf-8'), f.open(), mode=f.mode)
added_entry = True
@ -659,8 +654,7 @@ class CacheManager(object):
Provide simple logging.
'''
def __init__(self, cache_dir, cache_name, cache_size, cache_callback=None,
log=None, skip_cache=False):
def __init__(self, cache_dir, cache_name, cache_size, cache_callback=None, log=None, skip_cache=False):
self._skip_cache = skip_cache
self._cache = pylru.lrucache(cache_size, callback=cache_callback)
self._cache_filename = mozpath.join(cache_dir, cache_name + '-cache.pickle')
@ -674,8 +668,8 @@ class CacheManager(object):
def load_cache(self):
if self._skip_cache:
self.log(logging.INFO, 'artifact',
{},
'Skipping cache: ignoring load_cache!')
{},
'Skipping cache: ignoring load_cache!')
return
try:
@ -687,26 +681,25 @@ class CacheManager(object):
# exceptions, so it's not worth trying to be fine grained here.
# We ignore any exception, so the cache is effectively dropped.
self.log(logging.INFO, 'artifact',
{'filename': self._cache_filename, 'exception': repr(e)},
'Ignoring exception unpickling cache file {filename}: {exception}')
{'filename': self._cache_filename, 'exception': repr(e)},
'Ignoring exception unpickling cache file {filename}: {exception}')
pass
def dump_cache(self):
if self._skip_cache:
self.log(logging.INFO, 'artifact',
{},
'Skipping cache: ignoring dump_cache!')
{},
'Skipping cache: ignoring dump_cache!')
return
ensureParentDir(self._cache_filename)
pickle.dump(list(reversed(list(self._cache.items()))),
open(self._cache_filename, 'wb'), -1)
pickle.dump(list(reversed(list(self._cache.items()))), open(self._cache_filename, 'wb'), -1)
def clear_cache(self):
if self._skip_cache:
self.log(logging.INFO, 'artifact',
{},
'Skipping cache: ignoring clear_cache!')
{},
'Skipping cache: ignoring clear_cache!')
return
with self:
@ -719,13 +712,11 @@ class CacheManager(object):
def __exit__(self, type, value, traceback):
self.dump_cache()
class PushheadCache(CacheManager):
'''Helps map tree/revision pairs to parent pushheads according to the pushlog.'''
def __init__(self, cache_dir, log=None, skip_cache=False):
CacheManager.__init__(self, cache_dir, 'pushhead_cache',
MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
CacheManager.__init__(self, cache_dir, 'pushhead_cache', MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
@cachedmethod(operator.attrgetter('_cache'))
def parent_pushhead_id(self, tree, revision):
@ -752,13 +743,11 @@ class PushheadCache(CacheManager):
p['changesets'][-1] for p in result['pushes'].values()
]
class TaskCache(CacheManager):
'''Map candidate pushheads to Task Cluster task IDs and artifact URLs.'''
def __init__(self, cache_dir, log=None, skip_cache=False):
CacheManager.__init__(self, cache_dir, 'artifact_url',
MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
CacheManager.__init__(self, cache_dir, 'artifact_url', MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
@cachedmethod(operator.attrgetter('_cache'))
def artifacts(self, tree, job, artifact_job_class, rev):
@ -793,8 +782,7 @@ class TaskCache(CacheManager):
except KeyError:
# Not all revisions correspond to pushes that produce the job we
# care about; and even those that do may not have completed yet.
raise ValueError(
'Task for {namespace} does not exist (yet)!'.format(namespace=namespace))
raise ValueError('Task for {namespace} does not exist (yet)!'.format(namespace=namespace))
return taskId, list_artifacts(taskId)
@ -832,15 +820,13 @@ class Artifacts(object):
substs=self._substs)
except KeyError:
self.log(logging.INFO, 'artifact',
{'job': self._job},
'Unknown job {job}')
{'job': self._job},
'Unknown job {job}')
raise KeyError("Unknown job")
self._task_cache = TaskCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
self._artifact_cache = ArtifactCache(
self._cache_dir, log=self._log, skip_cache=self._skip_cache)
self._pushhead_cache = PushheadCache(
self._cache_dir, log=self._log, skip_cache=self._skip_cache)
self._artifact_cache = ArtifactCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
self._pushhead_cache = PushheadCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
def log(self, *args, **kwargs):
if self._log:
@ -964,12 +950,11 @@ class Artifacts(object):
There are no public revisions.
This can happen if the repository is created from bundle file and never pulled
from remote. Please run `hg pull` and build again.
see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code/Mercurial/Bundles\
""")
see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code/Mercurial/Bundles""")
self.log(logging.INFO, 'artifact',
{'len': len(last_revs)},
'hg suggested {len} candidate revisions')
{'len': len(last_revs)},
'hg suggested {len} candidate revisions')
def to_pair(line):
rev, node = line.split(':', 1)
@ -1010,16 +995,14 @@ see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code
yield candidate_pushheads[rev], rev
if not count:
raise Exception(
'Could not find any candidate pushheads in the last {num} revisions.\n'
'Search started with {rev}, which must be known to Mozilla automation.\n\n'
'see https://developer.mozilla.org/en-US/docs/Artifact_builds'.format(
rev=last_revs[0], num=NUM_PUSHHEADS_TO_QUERY_PER_PARENT))
raise Exception('Could not find any candidate pushheads in the last {num} revisions.\n'
'Search started with {rev}, which must be known to Mozilla automation.\n\n'
'see https://developer.mozilla.org/en-US/docs/Artifact_builds'.format(
rev=last_revs[0], num=NUM_PUSHHEADS_TO_QUERY_PER_PARENT))
def find_pushhead_artifacts(self, task_cache, job, tree, pushhead):
try:
taskId, artifacts = task_cache.artifacts(
tree, job, self._artifact_job.__class__, pushhead)
taskId, artifacts = task_cache.artifacts(tree, job, self._artifact_job.__class__, pushhead)
except ValueError:
return None
@ -1040,32 +1023,32 @@ see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code
def install_from_file(self, filename, distdir):
self.log(logging.INFO, 'artifact',
{'filename': filename},
'Installing from {filename}')
{'filename': filename},
'Installing from {filename}')
# Do we need to post-process?
processed_filename = filename + PROCESSED_SUFFIX
if self._skip_cache and os.path.exists(processed_filename):
self.log(logging.INFO, 'artifact',
{'path': processed_filename},
'Skipping cache: removing cached processed artifact {path}')
{'path': processed_filename},
'Skipping cache: removing cached processed artifact {path}')
os.remove(processed_filename)
if not os.path.exists(processed_filename):
self.log(logging.INFO, 'artifact',
{'filename': filename},
'Processing contents of {filename}')
{'filename': filename},
'Processing contents of {filename}')
self.log(logging.INFO, 'artifact',
{'processed_filename': processed_filename},
'Writing processed {processed_filename}')
{'processed_filename': processed_filename},
'Writing processed {processed_filename}')
self._artifact_job.process_artifact(filename, processed_filename)
self._artifact_cache._persist_limit.register_file(processed_filename)
self.log(logging.INFO, 'artifact',
{'processed_filename': processed_filename},
'Installing from processed {processed_filename}')
{'processed_filename': processed_filename},
'Installing from processed {processed_filename}')
# Copy all .so files, avoiding modification where possible.
ensureParentDir(mozpath.join(distdir, '.dummy'))
@ -1079,21 +1062,20 @@ see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code
shutil.copyfileobj(zf.open(info), fh)
file_existed, file_updated = fh.close()
self.log(logging.INFO, 'artifact',
{'updating': 'Updating' if file_updated else 'Not updating',
'filename': n},
'{updating} {filename}')
{'updating': 'Updating' if file_updated else 'Not updating', 'filename': n},
'{updating} {filename}')
if not file_existed or file_updated:
# Libraries and binaries may need to be marked executable,
# depending on platform.
perms = info.external_attr >> 16 # See http://stackoverflow.com/a/434689.
perms |= stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH # u+w, a+r.
perms = info.external_attr >> 16 # See http://stackoverflow.com/a/434689.
perms |= stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH # u+w, a+r.
os.chmod(n, perms)
return 0
def install_from_url(self, url, distdir):
self.log(logging.INFO, 'artifact',
{'url': url},
'Installing from {url}')
{'url': url},
'Installing from {url}')
filename = self._artifact_cache.fetch(url)
return self.install_from_file(filename, distdir)
@ -1135,7 +1117,7 @@ see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code
try:
if self._hg:
revision = subprocess.check_output([self._hg, 'log', '--template', '{node}\n',
'-r', revset], cwd=self._topsrcdir).strip()
'-r', revset], cwd=self._topsrcdir).strip()
elif self._git:
revset = subprocess.check_output([
self._git, 'rev-parse', '%s^{commit}' % revset],
@ -1183,8 +1165,7 @@ see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code
url = get_artifact_url(taskId, artifact_name)
urls.append(url)
if not urls:
raise ValueError(
'Task {taskId} existed, but no artifacts found!'.format(taskId=taskId))
raise ValueError('Task {taskId} existed, but no artifacts found!'.format(taskId=taskId))
for url in urls:
if self.install_from_url(url, distdir):
return 1
@ -1213,10 +1194,11 @@ see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code
return self.install_from_recent(distdir)
def clear_cache(self):
self.log(logging.INFO, 'artifact',
{},
'Deleting cached artifacts and caches.')
{},
'Deleting cached artifacts and caches.')
self._task_cache.clear_cache()
self._artifact_cache.clear_cache()
self._pushhead_cache.clear_cache()

Просмотреть файл

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
backends = {
'ChromeMap': 'mozbuild.codecoverage.chrome_map',
'CompileDB': 'mozbuild.compilation.database',

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
from __future__ import absolute_import, unicode_literals
from abc import (
ABCMeta,

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
from __future__ import absolute_import, unicode_literals
cargo_extra_outputs = {
'bindgen': [

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
from __future__ import absolute_import, unicode_literals
import itertools
import json
@ -33,6 +33,7 @@ from mozbuild.frontend.data import (
GnProjectData,
HostLibrary,
HostGeneratedSources,
HostRustLibrary,
IPDLCollection,
LocalizedPreprocessedFiles,
LocalizedFiles,
@ -52,10 +53,10 @@ from mozbuild.preprocessor import Preprocessor
from mozpack.chrome.manifest import parse_manifest_line
from mozbuild.util import (
group_unified_files,
mkdir,
)
class XPIDLManager(object):
"""Helps manage XPCOM IDLs in the context of the build system."""
@ -101,7 +102,6 @@ class XPIDLManager(object):
"""
return itertools.chain(*[m.stems() for m in self.modules.itervalues()])
class BinariesCollection(object):
"""Tracks state of binaries produced by the build."""
@ -109,7 +109,6 @@ class BinariesCollection(object):
self.shared_libraries = []
self.programs = []
class CommonBackend(BuildBackend):
"""Holds logic common to all build backends."""
@ -183,8 +182,7 @@ class CommonBackend(BuildBackend):
return False
elif isinstance(obj, Exports):
objdir_files = [f.full_path for path, files in obj.files.walk()
for f in files if isinstance(f, ObjDirPath)]
objdir_files = [f.full_path for path, files in obj.files.walk() for f in files if isinstance(f, ObjDirPath)]
if objdir_files:
self._handle_generated_sources(objdir_files)
return False
@ -203,9 +201,9 @@ class CommonBackend(BuildBackend):
if len(self._idl_manager.modules):
self._write_rust_xpidl_summary(self._idl_manager)
self._handle_idl_manager(self._idl_manager)
self._handle_generated_sources(
mozpath.join(self.environment.topobjdir, 'dist/include/%s.h' % stem)
for stem in self._idl_manager.idl_stems())
self._handle_generated_sources(mozpath.join(self.environment.topobjdir, 'dist/include/%s.h' % stem)
for stem in self._idl_manager.idl_stems())
for config in self._configs:
self.backend_input_files.add(config.source)
@ -296,7 +294,7 @@ class CommonBackend(BuildBackend):
seen_libs.add(lib)
os_libs.append(lib)
return (objs, sorted(seen_pgo_gen_only_objs), no_pgo_objs,
return (objs, sorted(seen_pgo_gen_only_objs), no_pgo_objs, \
shared_libs, os_libs, static_libs)
def _make_list_file(self, kind, objdir, objs, name):
@ -334,8 +332,7 @@ class CommonBackend(BuildBackend):
return ref
def _handle_generated_sources(self, files):
self._generated_sources.update(mozpath.relpath(
f, self.environment.topobjdir) for f in files)
self._generated_sources.update(mozpath.relpath(f, self.environment.topobjdir) for f in files)
def _handle_webidl_collection(self, webidls):
@ -399,7 +396,7 @@ class CommonBackend(BuildBackend):
includeTemplate += (
'\n'
'#if defined(_WINDOWS_) && !defined(MOZ_WRAPPED_WINDOWS_H)\n'
'#pragma message("wrapper failure reason: " MOZ_WINDOWS_WRAPPER_DISABLED_REASON)\n' # noqa
'#pragma message("wrapper failure reason: " MOZ_WINDOWS_WRAPPER_DISABLED_REASON)\n'
'#error "%(cppfile)s included unwrapped windows.h"\n'
"#endif")
includeTemplate += (
@ -414,7 +411,7 @@ class CommonBackend(BuildBackend):
'so it cannot be built in unified mode."\n'
'#undef INITGUID\n'
'#endif')
f.write('\n'.join(includeTemplate % {"cppfile": s} for
f.write('\n'.join(includeTemplate % { "cppfile": s } for
s in source_filenames))
def _write_unified_files(self, unified_source_mapping, output_directory,

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
from __future__ import absolute_import
import os
import sys
@ -49,7 +49,7 @@ class BuildConfig(object):
# cache the compiled code as it can be reused
# we cache it the first time, or if the file changed
if path not in code_cache or code_cache[path][0] != mtime:
if not path in code_cache or code_cache[path][0] != mtime:
# Add config.status manually to sys.modules so it gets picked up by
# iter_modules_in_path() for automatic dependencies.
mod = ModuleType('config.status')
@ -118,7 +118,7 @@ class ConfigEnvironment(object):
"""
def __init__(self, topsrcdir, topobjdir, defines=None,
non_global_defines=None, substs=None, source=None, mozconfig=None):
non_global_defines=None, substs=None, source=None, mozconfig=None):
if not source:
source = mozpath.join(topobjdir, 'config.status')
@ -148,35 +148,20 @@ class ConfigEnvironment(object):
self.bin_suffix = self.substs.get('BIN_SUFFIX', '')
global_defines = [name for name in self.defines
if name not in self.non_global_defines]
self.substs["ACDEFINES"] = ' '.join(
[
'-D%s=%s' % (name, shell_quote(self.defines[name]).replace('$', '$$'))
for name in sorted(global_defines)
]
)
if not name in self.non_global_defines]
self.substs['ACDEFINES'] = ' '.join(['-D%s=%s' % (name,
shell_quote(self.defines[name]).replace('$', '$$'))
for name in sorted(global_defines)])
def serialize(name, obj):
if isinstance(obj, StringTypes):
return obj
if isinstance(obj, Iterable):
return ' '.join(obj)
raise Exception('Unhandled type %s for %s', type(obj), str(name))
self.substs['ALLSUBSTS'] = '\n'.join(
sorted([
'%s = %s' % (
name,
serialize(name, self.substs[name])
)
for name in self.substs if self.substs[name]
])
)
self.substs['ALLEMPTYSUBSTS'] = '\n'.join(
sorted([
'%s =' % name
for name in self.substs if not self.substs[name]
])
)
self.substs['ALLSUBSTS'] = '\n'.join(sorted(['%s = %s' % (name,
serialize(name, self.substs[name])) for name in self.substs if self.substs[name]]))
self.substs['ALLEMPTYSUBSTS'] = '\n'.join(sorted(['%s =' % name
for name in self.substs if not self.substs[name]]))
self.substs = ReadOnlyDict(self.substs)
@ -229,7 +214,7 @@ class ConfigEnvironment(object):
config = BuildConfig.from_config_status(path)
return ConfigEnvironment(config.topsrcdir, config.topobjdir,
config.defines, config.non_global_defines, config.substs, path)
config.defines, config.non_global_defines, config.substs, path)
class PartialConfigDict(object):
@ -239,7 +224,6 @@ class PartialConfigDict(object):
similar for substs), where the value of FOO is delay-loaded until it is
needed.
"""
def __init__(self, config_statusd, typ, environ_override=False):
self._dict = {}
self._datadir = mozpath.join(config_statusd, typ)
@ -354,7 +338,6 @@ class PartialConfigEnvironment(object):
intended to be used instead of the defines structure from config.status so
that scripts can depend directly on its value.
"""
def __init__(self, topobjdir):
config_statusd = mozpath.join(topobjdir, 'config.statusd')
self.substs = PartialConfigDict(config_statusd, 'substs', environ_override=True)
@ -370,8 +353,8 @@ class PartialConfigEnvironment(object):
if name not in config['non_global_defines']
]
acdefines = ' '.join(['-D%s=%s' % (name,
shell_quote(config['defines'][name]).replace('$', '$$'))
for name in sorted(global_defines)])
shell_quote(config['defines'][name]).replace('$', '$$'))
for name in sorted(global_defines)])
substs['ACDEFINES'] = acdefines
all_defines = OrderedDict()

Просмотреть файл

@ -2,18 +2,22 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
from __future__ import absolute_import
import errno
import glob
import random
import os
import shutil
import subprocess
import types
from xml.sax.saxutils import quoteattr
import xml.etree.ElementTree as ET
from .common import CommonBackend
from ..frontend.data import (
ComputedFlags,
Defines,
)
from mozbuild.base import ExecutionSummary
@ -22,7 +26,6 @@ from mozbuild.base import ExecutionSummary
# Open eclipse:
# /Users/bgirard/mozilla/eclipse/eclipse/eclipse/eclipse -data $PWD/workspace
class CppEclipseBackend(CommonBackend):
"""Backend that generates Cpp Eclipse project files.
"""
@ -78,8 +81,7 @@ class CppEclipseBackend(CommonBackend):
# Note that unlike VS, Eclipse' indexer seem to crawl the headers and
# isn't picky about the local includes.
if isinstance(obj, ComputedFlags):
args = self._args_for_dirs.setdefault(
'tree/' + reldir, {'includes': [], 'defines': []})
args = self._args_for_dirs.setdefault('tree/' + reldir, {'includes': [], 'defines': []})
# use the same args for any objdirs we include:
if reldir == 'dom/bindings':
self._args_for_dirs.setdefault('generated-webidl', args)
@ -103,8 +105,7 @@ class CppEclipseBackend(CommonBackend):
def consume_finished(self):
settings_dir = os.path.join(self._project_dir, '.settings')
launch_dir = os.path.join(self._project_dir, 'RunConfigurations')
workspace_settings_dir = os.path.join(
self._workspace_dir, '.metadata/.plugins/org.eclipse.core.runtime/.settings')
workspace_settings_dir = os.path.join(self._workspace_dir, '.metadata/.plugins/org.eclipse.core.runtime/.settings')
for dir_name in [self._project_dir, settings_dir, launch_dir, workspace_settings_dir, self._workspace_lang_dir]:
try:
@ -128,25 +129,22 @@ class CppEclipseBackend(CommonBackend):
workspace_language_path = os.path.join(self._workspace_lang_dir, 'language.settings.xml')
with open(workspace_language_path, 'wb') as fh:
workspace_lang_settings = WORKSPACE_LANGUAGE_SETTINGS_TEMPLATE
workspace_lang_settings = workspace_lang_settings.replace(
"@COMPILER_FLAGS@", self._cxx + " " + self._cppflags)
workspace_lang_settings = workspace_lang_settings.replace("@COMPILER_FLAGS@", self._cxx + " " + self._cppflags);
fh.write(workspace_lang_settings)
self._write_launch_files(launch_dir)
core_resources_prefs_path = os.path.join(
workspace_settings_dir, 'org.eclipse.core.resources.prefs')
core_resources_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.core.resources.prefs')
with open(core_resources_prefs_path, 'wb') as fh:
fh.write(STATIC_CORE_RESOURCES_PREFS)
fh.write(STATIC_CORE_RESOURCES_PREFS);
core_runtime_prefs_path = os.path.join(
workspace_settings_dir, 'org.eclipse.core.runtime.prefs')
core_runtime_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.core.runtime.prefs')
with open(core_runtime_prefs_path, 'wb') as fh:
fh.write(STATIC_CORE_RUNTIME_PREFS)
fh.write(STATIC_CORE_RUNTIME_PREFS);
ui_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.ui.prefs')
with open(ui_prefs_path, 'wb') as fh:
fh.write(STATIC_UI_PREFS)
fh.write(STATIC_UI_PREFS);
cdt_ui_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.cdt.ui.prefs')
cdt_ui_prefs = STATIC_CDT_UI_PREFS
@ -157,11 +155,10 @@ class CppEclipseBackend(CommonBackend):
XML_PREF_TEMPLATE = """<setting id\="@PREF_NAME@" value\="@PREF_VAL@"/>\\n"""
for line in FORMATTER_SETTINGS.splitlines():
[pref, val] = line.split("=")
cdt_ui_prefs += XML_PREF_TEMPLATE.replace("@PREF_NAME@",
pref).replace("@PREF_VAL@", val)
cdt_ui_prefs += XML_PREF_TEMPLATE.replace("@PREF_NAME@", pref).replace("@PREF_VAL@", val)
cdt_ui_prefs += "</profile>\\n</profiles>\\n"
with open(cdt_ui_prefs_path, 'wb') as fh:
fh.write(cdt_ui_prefs)
fh.write(cdt_ui_prefs);
cdt_core_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.cdt.core.prefs')
with open(cdt_core_prefs_path, 'wb') as fh:
@ -171,11 +168,11 @@ class CppEclipseBackend(CommonBackend):
# as the active formatter all its prefs are set in this prefs file,
# so we need add those now:
cdt_core_prefs += FORMATTER_SETTINGS
fh.write(cdt_core_prefs)
fh.write(cdt_core_prefs);
editor_prefs_path = os.path.join(workspace_settings_dir, "org.eclipse.ui.editors.prefs")
editor_prefs_path = os.path.join(workspace_settings_dir, "org.eclipse.ui.editors.prefs");
with open(editor_prefs_path, 'wb') as fh:
fh.write(EDITOR_SETTINGS)
fh.write(EDITOR_SETTINGS);
# Now import the project into the workspace
self._import_project()
@ -191,7 +188,7 @@ class CppEclipseBackend(CommonBackend):
self._write_noindex()
try:
subprocess.check_call(
process = subprocess.check_call(
["eclipse", "-application", "-nosplash",
"org.eclipse.cdt.managedbuilder.core.headlessbuild",
"-data", self._workspace_dir, "-importAll", self._project_dir])
@ -211,7 +208,7 @@ class CppEclipseBackend(CommonBackend):
def _write_noindex(self):
noindex_path = os.path.join(self._project_dir, '.settings/org.eclipse.cdt.core.prefs')
with open(noindex_path, 'wb') as fh:
fh.write(NOINDEX_TEMPLATE)
fh.write(NOINDEX_TEMPLATE);
def _remove_noindex(self):
# Below we remove the config file that temporarily disabled the indexer
@ -260,8 +257,7 @@ class CppEclipseBackend(CommonBackend):
dirsettings_template = LANGUAGE_SETTINGS_TEMPLATE_DIR_HEADER
# Add OS_COMPILE_CXXFLAGS args (same as OS_COMPILE_CFLAGS):
dirsettings_template = dirsettings_template.replace('@PREINCLUDE_FILE_PATH@', os.path.join(
self.environment.topobjdir, 'dist/include/mozilla-config.h'))
dirsettings_template = dirsettings_template.replace('@PREINCLUDE_FILE_PATH@', os.path.join(self.environment.topobjdir, 'dist/include/mozilla-config.h'))
dirsettings_template += add_define('MOZILLA_CLIENT', '1')
# Add EXTRA_INCLUDES args:
@ -308,7 +304,7 @@ class CppEclipseBackend(CommonBackend):
# netwerk/sctp/src uses -U__APPLE__ on Mac
# XXX We should make this code smart enough to remove existing defines.
continue
d = d[2:] # get rid of leading "-D"
d = d[2:] # get rid of leading "-D"
name_value = d.split("=", 1)
name = name_value[0]
value = ""
@ -318,8 +314,7 @@ class CppEclipseBackend(CommonBackend):
dirsettings += LANGUAGE_SETTINGS_TEMPLATE_DIR_FOOTER
fh.write(dirsettings)
fh.write(LANGUAGE_SETTINGS_TEMPLATE_FOOTER.replace(
"@COMPILER_FLAGS@", self._cxx + " " + self._cppflags))
fh.write(LANGUAGE_SETTINGS_TEMPLATE_FOOTER.replace("@COMPILER_FLAGS@", self._cxx + " " + self._cppflags))
def _write_launch_files(self, launch_dir):
bin_dir = os.path.join(self.environment.topobjdir, 'dist')
@ -339,25 +334,21 @@ class CppEclipseBackend(CommonBackend):
launch = launch.replace('@LAUNCH_ARGS@', '-P -no-remote')
fh.write(launch)
# TODO Add more launch configs (and delegate calls to mach)
#TODO Add more launch configs (and delegate calls to mach)
def _write_project(self, fh):
project = PROJECT_TEMPLATE
project = PROJECT_TEMPLATE;
project = project.replace('@PROJECT_NAME@', self._project_name)
project = project.replace('@PROJECT_TOPSRCDIR@', self.environment.topsrcdir)
project = project.replace('@GENERATED_IPDL_FILES@', os.path.join(
self.environment.topobjdir, "ipc", "ipdl"))
project = project.replace('@GENERATED_WEBIDL_FILES@', os.path.join(
self.environment.topobjdir, "dom", "bindings"))
project = project.replace('@GENERATED_IPDL_FILES@', os.path.join(self.environment.topobjdir, "ipc", "ipdl"))
project = project.replace('@GENERATED_WEBIDL_FILES@', os.path.join(self.environment.topobjdir, "dom", "bindings"))
fh.write(project)
def _write_cproject(self, fh):
cproject_header = CPROJECT_TEMPLATE_HEADER
cproject_header = cproject_header.replace(
'@PROJECT_TOPSRCDIR@', self.environment.topobjdir)
cproject_header = cproject_header.replace(
'@MACH_COMMAND@', os.path.join(self.environment.topsrcdir, 'mach'))
cproject_header = cproject_header.replace('@PROJECT_TOPSRCDIR@', self.environment.topobjdir)
cproject_header = cproject_header.replace('@MACH_COMMAND@', os.path.join(self.environment.topsrcdir, 'mach'))
fh.write(cproject_header)
fh.write(CPROJECT_TEMPLATE_FOOTER)
@ -378,8 +369,8 @@ PROJECT_TEMPLATE = """<?xml version="1.0" encoding="UTF-8"?>
<buildCommand>
<name>org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder</name>
<triggers></triggers>
<arguments>
</arguments>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
@ -477,7 +468,7 @@ CPROJECT_TEMPLATE_HEADER = """<?xml version="1.0" encoding="UTF-8" standalone="n
<folderInfo id="0.1674256904." name="/" resourcePath="">
<toolChain id="cdt.managedbuild.toolchain.gnu.cross.exe.debug.1276586933" name="Cross GCC" superClass="cdt.managedbuild.toolchain.gnu.cross.exe.debug">
<targetPlatform archList="all" binaryParser="" id="cdt.managedbuild.targetPlatform.gnu.cross.710759961" isAbstract="false" osList="all" superClass="cdt.managedbuild.targetPlatform.gnu.cross"/>
<builder arguments="--log-no-times build" buildPath="@PROJECT_TOPSRCDIR@" command="@MACH_COMMAND@" enableCleanBuild="false" incrementalBuildTarget="binaries" id="org.eclipse.cdt.build.core.settings.default.builder.1437267827" keepEnvironmentInBuildfile="false" name="Gnu Make Builder" superClass="org.eclipse.cdt.build.core.settings.default.builder"/>
<builder arguments="--log-no-times build" buildPath="@PROJECT_TOPSRCDIR@" command="@MACH_COMMAND@" enableCleanBuild="false" incrementalBuildTarget="binaries" id="org.eclipse.cdt.build.core.settings.default.builder.1437267827" keepEnvironmentInBuildfile="false" name="Gnu Make Builder" superClass="org.eclipse.cdt.build.core.settings.default.builder"/>
</toolChain>
</folderInfo>
"""
@ -491,9 +482,9 @@ CPROJECT_TEMPLATE_FILEINFO = """ <fileInf
</fileInfo>
"""
CPROJECT_TEMPLATE_FOOTER = """
<sourceEntries>
<entry excluding="**/lib*|**/third_party/|tree/*.xcodeproj/|tree/.cargo/|tree/.vscode/|tree/build/|tree/extensions/|tree/gfx/angle/|tree/gfx/cairo/|tree/gfx/skia/skia/|tree/intl/icu/|tree/js/|tree/media/|tree/modules/freetype2|tree/modules/pdfium/|tree/netwerk/|tree/netwerk/sctp|tree/netwerk/srtp|tree/nsprpub/lib|tree/nsprpub/pr/src|tree/other-licenses/|tree/parser/|tree/python/|tree/security/nss/|tree/tools/" flags="VALUE_WORKSPACE_PATH" kind="sourcePath" name=""/>
</sourceEntries>
<sourceEntries>
<entry excluding="**/lib*|**/third_party/|tree/*.xcodeproj/|tree/.cargo/|tree/.vscode/|tree/build/|tree/extensions/|tree/gfx/angle/|tree/gfx/cairo/|tree/gfx/skia/skia/|tree/intl/icu/|tree/js/|tree/media/|tree/modules/freetype2|tree/modules/pdfium/|tree/netwerk/|tree/netwerk/sctp|tree/netwerk/srtp|tree/nsprpub/lib|tree/nsprpub/pr/src|tree/other-licenses/|tree/parser/|tree/python/|tree/security/nss/|tree/tools/" flags="VALUE_WORKSPACE_PATH" kind="sourcePath" name=""/>
</sourceEntries>
</configuration>
</storageModule>
<storageModule moduleId="org.eclipse.cdt.core.externalSettings"/>
@ -535,38 +526,38 @@ WORKSPACE_LANGUAGE_SETTINGS_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" s
LANGUAGE_SETTINGS_TEMPLATE_HEADER = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<project>
<configuration id="0.1674256904" name="Default">
<extension point="org.eclipse.cdt.core.LanguageSettingsProvider">
<provider class="org.eclipse.cdt.core.language.settings.providers.LanguageSettingsGenericProvider" id="org.eclipse.cdt.ui.UserLanguageSettingsProvider" name="CDT User Setting Entries" prefer-non-shared="true" store-entries-with-project="true">
<language id="org.eclipse.cdt.core.g++">
<configuration id="0.1674256904" name="Default">
<extension point="org.eclipse.cdt.core.LanguageSettingsProvider">
<provider class="org.eclipse.cdt.core.language.settings.providers.LanguageSettingsGenericProvider" id="org.eclipse.cdt.ui.UserLanguageSettingsProvider" name="CDT User Setting Entries" prefer-non-shared="true" store-entries-with-project="true">
<language id="org.eclipse.cdt.core.g++">
"""
LANGUAGE_SETTINGS_TEMPLATE_DIR_HEADER = """ <resource project-relative-path="@RELATIVE_PATH@">
<entry kind="includeFile" name="@PREINCLUDE_FILE_PATH@">
<flag value="LOCAL"/>
</entry>
LANGUAGE_SETTINGS_TEMPLATE_DIR_HEADER = """ <resource project-relative-path="@RELATIVE_PATH@">
<entry kind="includeFile" name="@PREINCLUDE_FILE_PATH@">
<flag value="LOCAL"/>
</entry>
"""
LANGUAGE_SETTINGS_TEMPLATE_DIR_INCLUDE = """ <entry kind="includePath" name="@INCLUDE_PATH@">
<flag value="LOCAL"/>
</entry>
LANGUAGE_SETTINGS_TEMPLATE_DIR_INCLUDE = """ <entry kind="includePath" name="@INCLUDE_PATH@">
<flag value="LOCAL"/>
</entry>
"""
LANGUAGE_SETTINGS_TEMPLATE_DIR_DEFINE = """ <entry kind="macro" name="@NAME@" value=@VALUE@/>
LANGUAGE_SETTINGS_TEMPLATE_DIR_DEFINE = """ <entry kind="macro" name="@NAME@" value=@VALUE@/>
"""
LANGUAGE_SETTINGS_TEMPLATE_DIR_FOOTER = """ </resource>
LANGUAGE_SETTINGS_TEMPLATE_DIR_FOOTER = """ </resource>
"""
LANGUAGE_SETTINGS_TEMPLATE_FOOTER = """ </language>
</provider>
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="-859273372804152468" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="@COMPILER_FLAGS@ -E -P -v -dD &quot;${INPUTS}&quot; -std=c++11" prefer-non-shared="true" store-entries-with-project="true">
<language-scope id="org.eclipse.cdt.core.gcc"/>
<language-scope id="org.eclipse.cdt.core.g++"/>
</provider>
<provider-reference id="org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider" ref="shared-provider"/>
</extension>
</configuration>
LANGUAGE_SETTINGS_TEMPLATE_FOOTER = """ </language>
</provider>
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="-859273372804152468" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="@COMPILER_FLAGS@ -E -P -v -dD &quot;${INPUTS}&quot; -std=c++11" prefer-non-shared="true" store-entries-with-project="true">
<language-scope id="org.eclipse.cdt.core.gcc"/>
<language-scope id="org.eclipse.cdt.core.g++"/>
</provider>
<provider-reference id="org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider" ref="shared-provider"/>
</extension>
</configuration>
</project>
"""
@ -624,21 +615,21 @@ undoHistorySize=200
"""
STATIC_CORE_RESOURCES_PREFS = """eclipse.preferences.version=1
STATIC_CORE_RESOURCES_PREFS="""eclipse.preferences.version=1
refresh.enabled=true
"""
STATIC_CORE_RUNTIME_PREFS = """eclipse.preferences.version=1
STATIC_CORE_RUNTIME_PREFS="""eclipse.preferences.version=1
content-types/org.eclipse.cdt.core.cxxSource/file-extensions=mm
content-types/org.eclipse.core.runtime.xml/file-extensions=xul
content-types/org.eclipse.wst.jsdt.core.jsSource/file-extensions=jsm
"""
STATIC_UI_PREFS = """eclipse.preferences.version=1
STATIC_UI_PREFS="""eclipse.preferences.version=1
showIntro=false
"""
STATIC_CDT_CORE_PREFS = """eclipse.preferences.version=1
STATIC_CDT_CORE_PREFS="""eclipse.preferences.version=1
indexer.updatePolicy=0
"""
@ -806,7 +797,7 @@ org.eclipse.cdt.core.formatter.tabulation.size=2
org.eclipse.cdt.core.formatter.use_tabs_only_for_leading_indentations=false
"""
STATIC_CDT_UI_PREFS = """eclipse.preferences.version=1
STATIC_CDT_UI_PREFS="""eclipse.preferences.version=1
buildConsoleLines=10000
Console.limitConsoleOutput=false
ensureNewlineAtEOF=false

Просмотреть файл

@ -134,12 +134,10 @@ class FasterMakeBackend(CommonBackend, PartialBackend):
elif isinstance(obj, GeneratedFile):
if obj.outputs:
first_output = mozpath.relpath(mozpath.join(
obj.objdir, obj.outputs[0]), self.environment.topobjdir)
first_output = mozpath.relpath(mozpath.join(obj.objdir, obj.outputs[0]), self.environment.topobjdir)
for o in obj.outputs[1:]:
fullpath = mozpath.join(obj.objdir, o)
self._generated_files_map[mozpath.relpath(
fullpath, self.environment.topobjdir)] = first_output
self._generated_files_map[mozpath.relpath(fullpath, self.environment.topobjdir)] = first_output
# We don't actually handle GeneratedFiles, we just need to know if
# we can build multiple of them from a single make invocation in the
# faster backend.
@ -196,6 +194,7 @@ class FasterMakeBackend(CommonBackend, PartialBackend):
mk.create_rule([target]).add_dependencies(
'$(TOPOBJDIR)/%s' % d for d in deps)
# This is not great, but it's better to have some dependencies on these Python files.
python_deps = [
'$(TOPSRCDIR)/python/mozbuild/mozbuild/action/l10n_merge.py',
@ -209,14 +208,7 @@ class FasterMakeBackend(CommonBackend, PartialBackend):
for (merge, ref_file, l10n_file) in deps:
rule = mk.create_rule([merge]).add_dependencies(
[ref_file, l10n_file] + python_deps)
rule.add_commands(
[
'$(PYTHON) -m mozbuild.action.l10n_merge '
'--output {} --ref-file {} --l10n-file {}'.format(
merge, ref_file, l10n_file
)
]
)
rule.add_commands(['$(PYTHON) -m mozbuild.action.l10n_merge --output {} --ref-file {} --l10n-file {}'.format(merge, ref_file, l10n_file)])
# Add a dummy rule for the l10n file since it might not exist.
mk.create_rule([l10n_file])
@ -228,8 +220,7 @@ class FasterMakeBackend(CommonBackend, PartialBackend):
'install_%s' % base.replace('/', '_'))) as fh:
install_manifest.write(fileobj=fh)
# For artifact builds only, write a single unified manifest
# for consumption by |mach watch|.
# For artifact builds only, write a single unified manifest for consumption by |mach watch|.
if self.environment.is_artifact_build:
unified_manifest = InstallManifest()
for base, install_manifest in self._install_manifests.iteritems():

Просмотреть файл

@ -6,6 +6,7 @@ from __future__ import absolute_import, print_function, unicode_literals
import argparse
import os
import sys
import subprocess
import which
@ -19,11 +20,10 @@ from mach.decorators import (
Command,
)
@CommandProvider
class MachCommands(MachCommandBase):
@Command('ide', category='devenv',
description='Generate a project and launch an IDE.')
description='Generate a project and launch an IDE.')
@CommandArgument('ide', choices=['eclipse', 'visualstudio'])
@CommandArgument('args', nargs=argparse.REMAINDER)
def eclipse(self, ide, args):
@ -57,12 +57,10 @@ class MachCommands(MachCommandBase):
if ide == 'eclipse':
eclipse_workspace_dir = self.get_eclipse_workspace_path()
subprocess.check_call(['eclipse', '-data', eclipse_workspace_dir])
process = subprocess.check_call(['eclipse', '-data', eclipse_workspace_dir])
elif ide == 'visualstudio':
visual_studio_workspace_dir = self.get_visualstudio_workspace_path()
subprocess.check_call(
['explorer.exe', visual_studio_workspace_dir]
)
process = subprocess.check_call(['explorer.exe', visual_studio_workspace_dir])
def get_eclipse_workspace_path(self):
from mozbuild.backend.cpp_eclipse import CppEclipseBackend

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
from __future__ import absolute_import, unicode_literals
import logging
import os
@ -51,6 +51,7 @@ from ..frontend.data import (
HostSources,
InstallationTarget,
JARManifest,
Library,
Linkable,
LocalInclude,
LocalizedFiles,
@ -125,7 +126,7 @@ MOZBUILD_VARIABLES = [
b'TEST_DIRS',
b'TOOL_DIRS',
# XXX config/Makefile.in specifies this in a make invocation
# 'USE_EXTENSION_MANIFEST',
#'USE_EXTENSION_MANIFEST',
b'XPCSHELL_TESTS',
b'XPIDL_MODULE',
]
@ -224,9 +225,9 @@ class BackendMakeFile(object):
self.fh.write('NONRECURSIVE_TARGETS += export\n')
self.fh.write('NONRECURSIVE_TARGETS_export += xpidl\n')
self.fh.write('NONRECURSIVE_TARGETS_export_xpidl_DIRECTORY = '
'$(DEPTH)/xpcom/xpidl\n')
'$(DEPTH)/xpcom/xpidl\n')
self.fh.write('NONRECURSIVE_TARGETS_export_xpidl_TARGETS += '
'export\n')
'export\n')
return self.fh.close()
@ -247,7 +248,6 @@ class RecursiveMakeTraversal(object):
"""
SubDirectoryCategories = ['dirs', 'tests']
SubDirectoriesTuple = namedtuple('SubDirectories', SubDirectoryCategories)
class SubDirectories(SubDirectoriesTuple):
def __new__(self):
return RecursiveMakeTraversal.SubDirectoriesTuple.__new__(self, [], [])
@ -312,7 +312,7 @@ class RecursiveMakeTraversal(object):
if start_node != '':
deps[start_node] = prev_nodes
prev_nodes = (start_node,)
if start_node not in self._traversal:
if not start_node in self._traversal:
return prev_nodes
parallel_nodes = []
for node in parallel:
@ -338,7 +338,7 @@ class RecursiveMakeTraversal(object):
current, parallel, sequential = self.call_filter(start, filter)
if current is not None:
yield start
if start not in self._traversal:
if not start in self._traversal:
return
for node in parallel:
for n in self.traverse(node, filter):
@ -390,7 +390,7 @@ class RecursiveMakeBackend(CommonBackend):
self._test_manifests = {}
self.backend_input_files.add(mozpath.join(self.environment.topobjdir,
'config', 'autoconf.mk'))
'config', 'autoconf.mk'))
self._install_manifests = defaultdict(InstallManifest)
# The build system relies on some install manifests always existing
@ -424,7 +424,7 @@ class RecursiveMakeBackend(CommonBackend):
if obj.objdir not in self._backend_files:
self._backend_files[obj.objdir] = \
BackendMakeFile(obj.srcdir, obj.objdir, obj.config,
obj.topsrcdir, self.environment.topobjdir, self.dry_run)
obj.topsrcdir, self.environment.topobjdir, self.dry_run)
return self._backend_files[obj.objdir]
def consume_object(self, obj):
@ -603,30 +603,25 @@ class RecursiveMakeBackend(CommonBackend):
backend_file.write('GARBAGE += %s\n' % stub_file)
backend_file.write('EXTRA_MDDEPEND_FILES += %s\n' % dep_file)
backend_file.write((
"""{stub}: {script}{inputs}{backend}{force}
backend_file.write("""{stub}: {script}{inputs}{backend}{force}
\t$(REPORT_BUILD)
\t$(call py_action,file_generate,{locale}{script} """ # wrap for E501
"""{method} {output} $(MDDEPDIR)/{dep_file} {stub}{inputs}{flags})
\t$(call py_action,file_generate,{locale}{script} {method} {output} $(MDDEPDIR)/{dep_file} {stub}{inputs}{flags})
\t@$(TOUCH) $@
""").format(
stub=stub_file,
output=first_output,
dep_file=dep_file,
inputs=' ' + ' '.join(inputs) if inputs else '',
flags=' ' + ' '.join(shell_quote(f) for f in obj.flags) if obj.flags else '',
backend=' backend.mk' if obj.flags else '',
# Locale repacks repack multiple locales from a single configured objdir,
# so standard mtime dependencies won't work properly when the build is re-run
# with a different locale as input. IS_LANGUAGE_REPACK will reliably be set
# in this situation, so simply force the generation to run in that case.
force=force,
locale='--locale=$(AB_CD) ' if obj.localized else '',
script=obj.script,
method=obj.method
)
)
""".format(stub=stub_file,
output=first_output,
dep_file=dep_file,
inputs=' ' + ' '.join(inputs) if inputs else '',
flags=' ' + ' '.join(shell_quote(f) for f in obj.flags) if obj.flags else '',
backend=' backend.mk' if obj.flags else '',
# Locale repacks repack multiple locales from a single configured objdir,
# so standard mtime dependencies won't work properly when the build is re-run
# with a different locale as input. IS_LANGUAGE_REPACK will reliably be set
# in this situation, so simply force the generation to run in that case.
force=force,
locale='--locale=$(AB_CD) ' if obj.localized else '',
script=obj.script,
method=obj.method))
elif isinstance(obj, JARManifest):
self._no_skip['libs'].add(backend_file.relobjdir)
@ -789,7 +784,7 @@ class RecursiveMakeBackend(CommonBackend):
main, all_deps = \
self._traversal.compute_dependencies(filter)
for dir, deps in all_deps.items():
if deps is not None or (dir in self._idl_dirs
if deps is not None or (dir in self._idl_dirs \
and tier == 'export'):
rule = root_deps_mk.create_rule(['%s/%s' % (dir, tier)])
if deps:
@ -800,8 +795,8 @@ class RecursiveMakeBackend(CommonBackend):
if main:
rule.add_dependencies('%s/%s' % (d, tier) for d in main)
all_compile_deps = reduce(lambda x, y: x | y,
self._compile_graph.values()) if self._compile_graph else set()
all_compile_deps = reduce(lambda x,y: x|y,
self._compile_graph.values()) if self._compile_graph else set()
# Include the following as dependencies of the top recursion target for
# compilation:
# - nodes that are not dependended upon by anything. Typically, this
@ -822,10 +817,8 @@ class RecursiveMakeBackend(CommonBackend):
# Directories containing rust compilations don't generally depend
# on other directories in the tree, so putting them first here will
# start them earlier in the build.
rule.add_dependencies(
chain((r for r in roots if mozpath.dirname(r) in self._rust_dirs),
(r for r in roots if mozpath.dirname(r) not in self._rust_dirs))
)
rule.add_dependencies(chain((r for r in roots if mozpath.dirname(r) in self._rust_dirs),
(r for r in roots if mozpath.dirname(r) not in self._rust_dirs)))
for target, deps in sorted(graph.items()):
if deps:
rule = root_deps_mk.create_rule([target])
@ -908,8 +901,7 @@ class RecursiveMakeBackend(CommonBackend):
all_sources))
if include_curdir_build_rules:
makefile.add_statement(
'\n'
makefile.add_statement('\n'
'# Make sometimes gets confused between "foo" and "$(CURDIR)/foo".\n'
'# Help it out by explicitly specifiying dependencies.')
makefile.add_statement('all_absolute_unified_files := \\\n'
@ -957,7 +949,7 @@ class RecursiveMakeBackend(CommonBackend):
stub = not os.path.exists(makefile_in)
if not stub:
self.log(logging.DEBUG, 'substitute_makefile',
{'path': makefile}, 'Substituting makefile: {path}')
{'path': makefile}, 'Substituting makefile: {path}')
self._makefile_in_count += 1
# In the export and libs tiers, we don't skip directories
@ -969,7 +961,7 @@ class RecursiveMakeBackend(CommonBackend):
self._no_skip[tier].add(bf.relobjdir)
else:
self.log(logging.DEBUG, 'stub_makefile',
{'path': makefile}, 'Creating stub Makefile: {path}')
{'path': makefile}, 'Creating stub Makefile: {path}')
obj = self.Substitution()
obj.output_path = makefile
@ -984,7 +976,7 @@ class RecursiveMakeBackend(CommonBackend):
# XPI_PKGNAME or INSTALL_EXTENSION_ID can't be skipped and
# must run during the 'tools' tier.
for t in (b'XPI_PKGNAME', b'INSTALL_EXTENSION_ID',
b'tools'):
b'tools'):
if t not in content:
continue
if t == b'tools' and not re.search('(?:^|\s)tools.*::', content, re.M):
@ -992,13 +984,13 @@ class RecursiveMakeBackend(CommonBackend):
if objdir == self.environment.topobjdir:
continue
self._no_skip['tools'].add(mozpath.relpath(objdir,
self.environment.topobjdir))
self.environment.topobjdir))
# Directories with a Makefile containing a check target
# can't be skipped and must run during the 'check' tier.
if re.search('(?:^|\s)check.*::', content, re.M):
self._no_skip['check'].add(mozpath.relpath(objdir,
self.environment.topobjdir))
self.environment.topobjdir))
# Detect any Makefile.ins that contain variables on the
# moz.build-only list
@ -1075,10 +1067,10 @@ class RecursiveMakeBackend(CommonBackend):
return (mozpath.relpath(d.translated, base) for d in dirs)
if obj.dirs:
fh.write('DIRS := %s\n' % ' '.join(relativize(backend_file.objdir, obj.dirs)))
self._traversal.add(
backend_file.relobjdir, dirs=relativize(self.environment.topobjdir, obj.dirs)
)
fh.write('DIRS := %s\n' % ' '.join(
relativize(backend_file.objdir, obj.dirs)))
self._traversal.add(backend_file.relobjdir,
dirs=relativize(self.environment.topobjdir, obj.dirs))
# The directory needs to be registered whether subdirectories have been
# registered or not.
@ -1102,10 +1094,7 @@ class RecursiveMakeBackend(CommonBackend):
if obj.target and not obj.is_custom():
backend_file.write('FINAL_TARGET = $(DEPTH)/%s\n' % (obj.target))
else:
backend_file.write(
'FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),'
'$(DIST)/bin)$(DIST_SUBDIR:%=/%)\n'
)
backend_file.write('FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),$(DIST)/bin)$(DIST_SUBDIR:%=/%)\n')
if not obj.enabled:
backend_file.write('NO_DIST_INSTALL := 1\n')
@ -1120,7 +1109,8 @@ class RecursiveMakeBackend(CommonBackend):
self._install_manifests['dist_include'].add_optional_exists('%s.h' % stem)
for module in manager.modules:
build_files.add_optional_exists(mozpath.join('.deps', '%s.pp' % module))
build_files.add_optional_exists(mozpath.join('.deps',
'%s.pp' % module))
modules = manager.modules
xpt_modules = sorted(modules.keys())
@ -1160,9 +1150,9 @@ class RecursiveMakeBackend(CommonBackend):
obj = self.Substitution()
obj.output_path = mozpath.join(self.environment.topobjdir, 'config',
'makefiles', 'xpidl', 'Makefile')
'makefiles', 'xpidl', 'Makefile')
obj.input_path = mozpath.join(self.environment.topsrcdir, 'config',
'makefiles', 'xpidl', 'Makefile.in')
'makefiles', 'xpidl', 'Makefile.in')
obj.topsrcdir = self.environment.topsrcdir
obj.topobjdir = self.environment.topobjdir
obj.config = self.environment
@ -1246,7 +1236,7 @@ class RecursiveMakeBackend(CommonBackend):
# Much of the logic in this function could be moved to CommonBackend.
for source in obj.source_relpaths:
self.backend_input_files.add(mozpath.join(obj.topsrcdir,
source))
source))
# Don't allow files to be defined multiple times unless it is allowed.
# We currently allow duplicates for non-test files or test files if
@ -1261,7 +1251,7 @@ class RecursiveMakeBackend(CommonBackend):
for base, pattern, dest in obj.pattern_installs:
try:
self._install_manifests['_test_files'].add_pattern_link(base,
pattern, dest)
pattern, dest)
except ValueError:
if not obj.dupe_manifest:
raise
@ -1274,7 +1264,7 @@ class RecursiveMakeBackend(CommonBackend):
raise
m = self._test_manifests.setdefault(obj.flavor,
(obj.install_prefix, set()))
(obj.install_prefix, set()))
m[1].add(obj.manifest_obj_relpath)
try:
@ -1305,14 +1295,12 @@ class RecursiveMakeBackend(CommonBackend):
def _process_per_source_flag(self, per_source_flag, backend_file):
for flag in per_source_flag.flags:
backend_file.write('%s_FLAGS += %s\n' %
(mozpath.basename(per_source_flag.file_name), flag))
backend_file.write('%s_FLAGS += %s\n' % (mozpath.basename(per_source_flag.file_name), flag))
def _process_computed_flags(self, computed_flags, backend_file):
for var, flags in computed_flags.get_flags():
backend_file.write(
'COMPUTED_%s += %s\n' % (var,
' '.join(make_quote(shell_quote(f)) for f in flags)))
backend_file.write('COMPUTED_%s += %s\n' % (var,
' '.join(make_quote(shell_quote(f)) for f in flags)))
def _process_non_default_target(self, libdef, target_name, backend_file):
backend_file.write("%s:: %s\n" % (libdef.output_category, target_name))
@ -1369,13 +1357,14 @@ class RecursiveMakeBackend(CommonBackend):
else:
target_name = obj.KIND
return '%s/%s' % (mozpath.relpath(obj.objdir,
self.environment.topobjdir), target_name)
self.environment.topobjdir), target_name)
def _process_linked_libraries(self, obj, backend_file):
def pretty_relpath(lib, name):
return os.path.normpath(mozpath.join(mozpath.relpath(lib.objdir, obj.objdir),
name))
topobjdir = mozpath.normsep(obj.topobjdir)
# This will create the node even if there aren't any linked libraries.
build_target = self._build_target_for_obj(obj)
self._compile_graph[build_target]
@ -1425,7 +1414,7 @@ class RecursiveMakeBackend(CommonBackend):
# incorrect list file format to the host compiler as well as when
# creating an archive with AR, which doesn't understand list files.
if (objs == obj.objs and not isinstance(obj, (HostLibrary, StaticLibrary)) or
isinstance(obj, StaticLibrary) and obj.no_expand_lib):
isinstance(obj, StaticLibrary) and obj.no_expand_lib):
backend_file.write_once('%s_OBJS := %s\n' % (obj.name,
objs_ref))
if profile_gen_objs:
@ -1530,8 +1519,8 @@ class RecursiveMakeBackend(CommonBackend):
if f.startswith('/') or isinstance(f, AbsolutePath):
basepath, wild = os.path.split(f.full_path)
if '*' in basepath:
raise Exception("Wildcards are only supported in the filename part"
" of srcdir-relative or absolute paths.")
raise Exception("Wildcards are only supported in the filename part of "
"srcdir-relative or absolute paths.")
install_manifest.add_pattern_link(basepath, wild, path)
else:
@ -1662,7 +1651,7 @@ class RecursiveMakeBackend(CommonBackend):
def _write_manifests(self, dest, manifests):
man_dir = mozpath.join(self.environment.topobjdir, '_build_manifests',
dest)
dest)
for k, manifest in manifests.items():
with self._write_file(mozpath.join(man_dir, k)) as fh:
@ -1699,7 +1688,7 @@ class RecursiveMakeBackend(CommonBackend):
pp.context.update(extra)
if not pp.context.get('autoconfmk', ''):
pp.context['autoconfmk'] = 'autoconf.mk'
pp.handleLine(b'# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.\n')
pp.handleLine(b'# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.\n');
pp.handleLine(b'DEPTH := @DEPTH@\n')
pp.handleLine(b'topobjdir := @topobjdir@\n')
pp.handleLine(b'topsrcdir := @top_srcdir@\n')
@ -1743,19 +1732,18 @@ class RecursiveMakeBackend(CommonBackend):
rule.add_commands([
'$(RM) $@',
'$(call py_action,preprocessor,$(DEFINES) $(ACDEFINES) '
'$< -o $@)'
'$< -o $@)'
])
mk.add_statement('ALL_IPDLSRCS := %s %s' % (' '.join(sorted_nonstatic_ipdl_basenames),
' '.join(sorted_static_ipdl_sources)))
' '.join(sorted_static_ipdl_sources)))
self._add_unified_build_rules(mk, unified_ipdl_cppsrcs_mapping,
unified_files_makefile_variable='CPPSRCS')
# Preprocessed ipdl files are generated in ipdl_dir.
mk.add_statement(
'IPDLDIRS := %s %s' % (ipdl_dir, ' '.join(sorted(set(mozpath.dirname(p)
for p in sorted_static_ipdl_sources)))))
mk.add_statement('IPDLDIRS := %s %s' % (ipdl_dir, ' '.join(sorted(set(mozpath.dirname(p)
for p in sorted_static_ipdl_sources)))))
with self._write_file(mozpath.join(ipdl_dir, 'ipdlsrcs.mk')) as ipdls:
mk.dump(ipdls, removal_guard=False)
@ -1764,7 +1752,7 @@ class RecursiveMakeBackend(CommonBackend):
webidls, expected_build_output_files,
global_define_files):
include_dir = mozpath.join(self.environment.topobjdir, 'dist',
'include')
'include')
for f in expected_build_output_files:
if f.startswith(include_dir):
self._install_manifests['dist_include'].add_optional_exists(
@ -1795,12 +1783,12 @@ class RecursiveMakeBackend(CommonBackend):
# which would modify content in the source directory.
'$(RM) $@',
'$(call py_action,preprocessor,$(DEFINES) $(ACDEFINES) '
'$< -o $@)'
'$< -o $@)'
])
self._add_unified_build_rules(mk,
unified_source_mapping,
unified_files_makefile_variable='unified_binding_cpp_files')
unified_source_mapping,
unified_files_makefile_variable='unified_binding_cpp_files')
webidls_mk = mozpath.join(bindings_dir, 'webidlsrcs.mk')
with self._write_file(webidls_mk) as fh:

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
from __future__ import absolute_import, unicode_literals
import cPickle as pickle
from collections import defaultdict

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
from __future__ import absolute_import, unicode_literals
import os
import gzip
@ -12,9 +12,12 @@ import sys
import shutil
import mozpack.path as mozpath
from mozbuild import shellutil
from mozbuild.analyze.graph import Graph
from mozbuild.analyze.hg import Report
from mozbuild.base import MozbuildObject
from mozbuild.backend.base import PartialBackend, HybridBackend
from mozbuild.backend.recursivemake import RecursiveMakeBackend
from mozbuild.mozconfig import MozconfigLoader
from mozbuild.shellutil import quote as shell_quote
from mozbuild.util import OrderedDefaultDict
@ -56,6 +59,7 @@ from ..frontend.data import (
)
from ..util import (
FileAvoidWrite,
expand_variables,
)
from ..frontend.context import (
AbsolutePath,
@ -143,8 +147,7 @@ class BackendTupfile(object):
else:
caret_text = flags
self.write((': %(inputs)s%(extra_inputs)s |> %(display)s%(cmd)s |> '
'%(outputs)s%(output_group)s\n') % {
self.write(': %(inputs)s%(extra_inputs)s |> %(display)s%(cmd)s |> %(outputs)s%(output_group)s\n' % {
'inputs': ' '.join(inputs),
'extra_inputs': ' | ' + ' '.join(extra_inputs) if extra_inputs else '',
'display': '^%s^ ' % caret_text if caret_text else '',
@ -269,8 +272,7 @@ class TupBackend(CommonBackend):
self._rust_cmds = set()
self._built_in_addons = set()
self._built_in_addons_file = \
'dist/bin/browser/chrome/browser/content/browser/built_in_addons.json'
self._built_in_addons_file = 'dist/bin/browser/chrome/browser/content/browser/built_in_addons.json'
def _output_group(self, label):
if label:
@ -330,9 +332,9 @@ class TupBackend(CommonBackend):
tiers.set_tiers(('tup',))
tiers.begin_tier('tup')
status = config.run_process(args=args,
line_handler=output.on_line,
ensure_exit_code=False,
append_env=self._get_mozconfig_env(config))
line_handler=output.on_line,
ensure_exit_code=False,
append_env=self._get_mozconfig_env(config))
tiers.finish_tier('tup')
if not status and self.environment.substs.get('MOZ_AUTOMATION'):
config.log_manager.enable_unstructured()
@ -473,6 +475,7 @@ class TupBackend(CommonBackend):
# accurate once we start building libraries in their final locations.
inputs = objs + static_libs + shared_libs + [self._shlibs]
rust_linked = [l for l in prog.linked_libraries
if isinstance(l, RustLibrary)]
@ -510,10 +513,12 @@ class TupBackend(CommonBackend):
display='LINK %o'
)
def _gen_host_programs(self, backend_file):
for p in backend_file.host_programs:
self._gen_host_program(backend_file, p)
def _gen_host_program(self, backend_file, prog):
_, _, _, _, extra_libs, _ = self._expand_libs(prog)
objs = prog.objs
@ -554,6 +559,7 @@ class TupBackend(CommonBackend):
display='LINK %o'
)
def _gen_static_library(self, backend_file):
ar = [
backend_file.environment.substs['AR'],
@ -578,6 +584,7 @@ class TupBackend(CommonBackend):
display='AR %o'
)
def consume_object(self, obj):
"""Write out build files necessary to build with tup."""
@ -670,13 +677,11 @@ class TupBackend(CommonBackend):
for objdir, backend_file in sorted(self._backend_files.items()):
backend_file.gen_sources_rules([self._installed_files])
for var, gen_method in (
(backend_file.shared_lib, self._gen_shared_library),
(backend_file.static_lib and backend_file.static_lib.no_expand_lib,
self._gen_static_library),
(backend_file.programs, self._gen_programs),
(backend_file.host_programs, self._gen_host_programs)
):
for var, gen_method in ((backend_file.shared_lib, self._gen_shared_library),
(backend_file.static_lib and backend_file.static_lib.no_expand_lib,
self._gen_static_library),
(backend_file.programs, self._gen_programs),
(backend_file.host_programs, self._gen_host_programs)):
if var:
backend_file.export_shell()
backend_file.export_icecc()
@ -689,9 +694,8 @@ class TupBackend(CommonBackend):
pass
with self._write_file(mozpath.join(self.environment.topobjdir, 'Tuprules.tup')) as fh:
acdefines_flags = ' '.join(
['-D%s=%s' % (name, shell_quote(value))
for (name, value) in sorted(self.environment.acdefines.iteritems())])
acdefines_flags = ' '.join(['-D%s=%s' % (name, shell_quote(value))
for (name, value) in sorted(self.environment.acdefines.iteritems())])
# TODO: AB_CD only exists in Makefiles at the moment.
acdefines_flags += ' -DAB_CD=en-US'
@ -725,13 +729,12 @@ class TupBackend(CommonBackend):
# Ask the user to figure out where to run 'tup init' before
# continuing.
raise Exception("Please run `tup init --no-sync` in a common "
"ancestor directory of your objdir and srcdir, possibly "
"%s. To reduce file scanning overhead, this directory "
"should contain the fewest files possible that are not "
"necessary for this build." % tup_base_dir)
"ancestor directory of your objdir and srcdir, possibly "
"%s. To reduce file scanning overhead, this directory "
"should contain the fewest files possible that are not "
"necessary for this build." % tup_base_dir)
tup = self.environment.substs.get('TUP', 'tup')
self._cmd.run_process(cwd=tup_base_dir, log_name='tup',
args=[tup, 'init', '--no-sync'])
self._cmd.run_process(cwd=tup_base_dir, log_name='tup', args=[tup, 'init', '--no-sync'])
def _get_cargo_flags(self, obj):
@ -810,11 +813,13 @@ class TupBackend(CommonBackend):
# Enable link-time optimization for release builds.
cargo_library_flags = []
if not obj.config.substs.get('DEVELOPER_OPTIONS') and not obj.config.substs.get(
'MOZ_DEBUG_RUST'
):
if (not obj.config.substs.get('DEVELOPER_OPTIONS') and
not obj.config.substs.get('MOZ_DEBUG_RUST')):
cargo_library_flags += ['-C', 'lto']
rust_build_home = mozpath.join(self.environment.topobjdir,
'toolkit/library/rust')
def display_name(invocation):
output_str = ''
if invocation['outputs']:
@ -868,9 +873,7 @@ class TupBackend(CommonBackend):
invocation['full-deps'] = set()
if os.path.basename(invocation['program']) in [
'build-script-build', 'build-script-main'
]:
if os.path.basename(invocation['program']) in ['build-script-build', 'build-script-main']:
out_dir = invocation['env']['OUT_DIR']
for output in cargo_extra_outputs.get(shortname, []):
outputs.append(os.path.join(out_dir, output))
@ -981,15 +984,16 @@ class TupBackend(CommonBackend):
obj.name),
output_group)
for val in enumerate(invocations):
_process(*val)
def _gen_rust_rules(self, obj, backend_file):
cargo_flags = self._get_cargo_flags(obj)
cargo_env = self._get_cargo_env(obj, backend_file)
output_lines = []
def accumulate_output(line):
output_lines.append(line)
@ -1010,6 +1014,7 @@ class TupBackend(CommonBackend):
self._gen_cargo_rules(obj, cargo_plan, cargo_env, output_group)
self.backend_input_files |= set(cargo_plan['inputs'])
def _process_generated_file(self, backend_file, obj):
if obj.script and obj.method:
backend_file.export_shell()
@ -1020,8 +1025,8 @@ class TupBackend(CommonBackend):
obj.script,
obj.method,
obj.outputs[0],
'%s.pp' % obj.outputs[0], # deps file required
'unused', # deps target is required
'%s.pp' % obj.outputs[0], # deps file required
'unused', # deps target is required
])
full_inputs = [f.full_path for f in obj.inputs]
cmd.extend(full_inputs)
@ -1120,9 +1125,8 @@ class TupBackend(CommonBackend):
if f.startswith('/') or isinstance(f, AbsolutePath):
basepath, wild = os.path.split(f.full_path)
if '*' in basepath:
raise Exception(
"Wildcards are only supported in the filename part of "
"srcdir-relative or absolute paths.")
raise Exception("Wildcards are only supported in the filename part of "
"srcdir-relative or absolute paths.")
# TODO: This is only needed for Windows, so we can
# skip this for now.
@ -1151,6 +1155,7 @@ class TupBackend(CommonBackend):
finder = FileFinder(prefix)
for p, _ in finder.find(f.full_path[len(prefix):]):
install_dir = prefix[len(obj.srcdir) + 1:]
output = p
if f.target_basename and '*' not in f.target_basename:
output = mozpath.join(f.target_basename, output)
@ -1158,12 +1163,10 @@ class TupBackend(CommonBackend):
output=mozpath.join(output_dir, output),
output_group=output_group)
else:
backend_file.symlink_rule(
f.full_path, output=f.target_basename, output_group=output_group)
backend_file.symlink_rule(f.full_path, output=f.target_basename, output_group=output_group)
else:
if (self.environment.is_artifact_build and
any(mozpath.match(f.target_basename, p)
for p in self._compile_env_gen_files)):
any(mozpath.match(f.target_basename, p) for p in self._compile_env_gen_files)):
# If we have an artifact build we never would have generated this file,
# so do not attempt to install it.
continue
@ -1172,19 +1175,18 @@ class TupBackend(CommonBackend):
f.target_basename)
gen_backend_file = self._get_backend_file(f.context.relobjdir)
if gen_backend_file.requires_delay([f]):
gen_backend_file.delayed_installed_files.append(
(f.full_path, output, output_group))
gen_backend_file.delayed_installed_files.append((f.full_path, output, output_group))
else:
gen_backend_file.symlink_rule(f.full_path, output=output,
output_group=output_group)
def _process_final_target_pp_files(self, obj, backend_file):
for i, (path, files) in enumerate(obj.files.walk()):
self._add_features(obj.install_target, path)
for f in files:
self._preprocess(backend_file, f.full_path,
destdir=mozpath.join(self.environment.topobjdir,
obj.install_target, path),
destdir=mozpath.join(self.environment.topobjdir, obj.install_target, path),
target=f.target_basename)
def _process_computed_flags(self, obj, backend_file):
@ -1313,8 +1315,7 @@ class TupBackend(CommonBackend):
cmd.extend(['-I%s' % d for d in ipdldirs])
cmd.extend(sorted_ipdl_sources)
outputs = ['IPCMessageTypeName.cpp', mozpath.join(
outheaderdir, 'IPCMessageStart.h'), 'ipdl_lextab.py', 'ipdl_yacctab.py']
outputs = ['IPCMessageTypeName.cpp', mozpath.join(outheaderdir, 'IPCMessageStart.h'), 'ipdl_lextab.py', 'ipdl_yacctab.py']
for filename in sorted_ipdl_sources:
filepath, ext = os.path.splitext(filename)
@ -1378,5 +1379,4 @@ class TupBackend(CommonBackend):
backend_file.sources['.cpp'].extend(sorted(global_define_files))
test_backend_file = self._get_backend_file('dom/bindings/test')
test_backend_file.sources['.cpp'].extend(
sorted('../%sBinding.cpp' % s for s in webidls.all_test_stems()))
test_backend_file.sources['.cpp'].extend(sorted('../%sBinding.cpp' % s for s in webidls.all_test_stems()))

Просмотреть файл

@ -5,11 +5,12 @@
# This file contains a build backend for generating Visual Studio project
# files.
from __future__ import absolute_import, print_function, unicode_literals
from __future__ import absolute_import, unicode_literals
import errno
import os
import re
import types
import uuid
from xml.dom import getDOMImplementation
@ -33,25 +34,21 @@ from mozbuild.base import ExecutionSummary
MSBUILD_NAMESPACE = 'http://schemas.microsoft.com/developer/msbuild/2003'
def get_id(name):
return str(uuid.uuid5(uuid.NAMESPACE_URL, name)).upper()
def visual_studio_product_to_solution_version(version):
if version == '2017':
return '12.00', '15'
else:
raise Exception('Unknown version seen: %s' % version)
def visual_studio_product_to_platform_toolset_version(version):
if version == '2017':
return 'v141'
else:
raise Exception('Unknown version seen: %s' % version)
class VisualStudioBackend(CommonBackend):
"""Generate Visual Studio project files.
@ -103,7 +100,7 @@ class VisualStudioBackend(CommonBackend):
elif isinstance(obj, UnifiedSources):
# XXX we should be letting CommonBackend.consume_object call this
# for us instead.
self._process_unified_sources(obj)
self._process_unified_sources(obj);
elif isinstance(obj, Library):
self._libs_to_paths[obj.basename] = reldir
@ -136,9 +133,9 @@ class VisualStudioBackend(CommonBackend):
out_proj_dir = os.path.join(self._out_dir, self._projsubdir)
projects = self._write_projects_for_sources(self._libs_to_paths,
"library", out_proj_dir)
"library", out_proj_dir)
projects.update(self._write_projects_for_sources(self._progs_to_paths,
"binary", out_proj_dir))
"binary", out_proj_dir))
# Generate projects that can be used to build common targets.
for target in ('export', 'binaries', 'tools', 'full'):
@ -147,16 +144,15 @@ class VisualStudioBackend(CommonBackend):
if target != 'full':
command += ' %s' % target
project_id = self._write_vs_project(
out_proj_dir, basename, target, build_command=command,
project_id = self._write_vs_project(out_proj_dir, basename, target,
build_command=command,
clean_command='$(SolutionDir)\\mach.bat build clean')
projects[basename] = (project_id, basename, target)
# A project that can be used to regenerate the visual studio projects.
basename = 'target_vs'
project_id = self._write_vs_project(
out_proj_dir, basename, 'visual-studio',
project_id = self._write_vs_project(out_proj_dir, basename, 'visual-studio',
build_command='$(SolutionDir)\\mach.bat build-backend -b VisualStudio')
projects[basename] = (project_id, basename, 'visual-studio')
@ -194,7 +190,7 @@ class VisualStudioBackend(CommonBackend):
headers = [t[0] for t in finder.find('*.h')]
headers = [os.path.normpath(os.path.join('$(TopSrcDir)',
path, f)) for f in headers]
path, f)) for f in headers]
includes = [
os.path.join('$(TopSrcDir)', path),
@ -205,7 +201,7 @@ class VisualStudioBackend(CommonBackend):
includes.append('$(TopObjDir)\\dist\\include')
for v in ('NSPR_CFLAGS', 'NSS_CFLAGS', 'MOZ_JPEG_CFLAGS',
'MOZ_PNG_CFLAGS', 'MOZ_ZLIB_CFLAGS', 'MOZ_PIXMAN_CFLAGS'):
'MOZ_PNG_CFLAGS', 'MOZ_ZLIB_CFLAGS', 'MOZ_PIXMAN_CFLAGS'):
if not config:
break
@ -227,7 +223,7 @@ class VisualStudioBackend(CommonBackend):
else:
defines.append('%s=%s' % (k, v))
debugger = None
debugger=None
if prefix == 'binary':
if item.startswith(self.environment.substs['MOZ_APP_NAME']):
app_args = '-no-remote -profile $(TopObjDir)\\tmp\\profile-default'
@ -239,8 +235,7 @@ class VisualStudioBackend(CommonBackend):
basename = '%s_%s' % (prefix, item)
project_id = self._write_vs_project(
out_dir, basename, item,
project_id = self._write_vs_project(out_dir, basename, item,
includes=includes,
forced_includes=['$(TopObjDir)\\dist\\include\\mozilla-config.h'],
defines=defines,
@ -421,7 +416,7 @@ class VisualStudioBackend(CommonBackend):
fh.write(b'$env:%s = "%s"\r\n' % (k, v))
relpath = os.path.relpath(self.environment.topsrcdir,
self.environment.topobjdir).replace('\\', '/')
self.environment.topobjdir).replace('\\', '/')
fh.write(b'$bashargs = "%s/mach", "--log-no-times"\r\n' % relpath)
fh.write(b'$bashargs = $bashargs + $args\r\n')
@ -430,9 +425,9 @@ class VisualStudioBackend(CommonBackend):
fh.write(b'$procargs = "-c", $expanded\r\n')
fh.write(b'Start-Process -WorkingDirectory $env:TOPOBJDIR '
b'-FilePath $env:MOZILLABUILD\\msys\\bin\\bash '
b'-ArgumentList $procargs '
b'-Wait -NoNewWindow\r\n')
b'-FilePath $env:MOZILLABUILD\\msys\\bin\\bash '
b'-ArgumentList $procargs '
b'-Wait -NoNewWindow\r\n')
def _write_mach_batch(self, fh):
"""Write out a batch script that builds the tree.
@ -450,34 +445,34 @@ class VisualStudioBackend(CommonBackend):
# relative paths, since munging c:\ to /c/ is slightly more
# complicated.
relpath = os.path.relpath(self.environment.topsrcdir,
self.environment.topobjdir).replace('\\', '/')
self.environment.topobjdir).replace('\\', '/')
# We go through mach because it has the logic for choosing the most
# appropriate build tool.
fh.write(b'"%%MOZILLABUILD%%\\msys\\bin\\bash" '
b'-c "%s/mach --log-no-times %%1 %%2 %%3 %%4 %%5 %%6 %%7"' % relpath)
b'-c "%s/mach --log-no-times %%1 %%2 %%3 %%4 %%5 %%6 %%7"' % relpath)
def _write_vs_project(self, out_dir, basename, name, **kwargs):
root = '%s.vcxproj' % basename
project_id = get_id(basename.encode('utf-8'))
with self._write_file(os.path.join(out_dir, root), mode='rb') as fh:
project_id, name = VisualStudioBackend.write_vs_project(
fh, self._version, project_id, name, **kwargs)
project_id, name = VisualStudioBackend.write_vs_project(fh,
self._version, project_id, name, **kwargs)
with self._write_file(os.path.join(out_dir, '%s.user' % root), mode='rb') as fh:
fh.write('<?xml version="1.0" encoding="utf-8"?>\r\n')
fh.write('<Project ToolsVersion="4.0" xmlns="%s">\r\n' %
MSBUILD_NAMESPACE)
MSBUILD_NAMESPACE)
fh.write('</Project>\r\n')
return project_id
@staticmethod
def write_vs_project(fh, version, project_id, name, includes=[],
forced_includes=[], defines=[],
build_command=None, clean_command=None,
debugger=None, headers=[], sources=[]):
forced_includes=[], defines=[],
build_command=None, clean_command=None,
debugger=None, headers=[], sources=[]):
impl = getDOMImplementation()
doc = impl.createDocument(MSBUILD_NAMESPACE, 'Project', None)
@ -515,8 +510,7 @@ class VisualStudioBackend(CommonBackend):
rn.appendChild(doc.createTextNode('mozilla'))
pts = pg.appendChild(doc.createElement('PlatformToolset'))
pts.appendChild(doc.createTextNode(
visual_studio_product_to_platform_toolset_version(version)))
pts.appendChild(doc.createTextNode(visual_studio_product_to_platform_toolset_version(version)))
i = project.appendChild(doc.createElement('Import'))
i.setAttribute('Project', '$(VCTargetsPath)\\Microsoft.Cpp.Default.props')

Просмотреть файл

@ -37,6 +37,7 @@ from .mozconfig import (
)
from .pythonutil import find_python3_executable
from .util import (
ReadOnlyNamespace,
memoize,
memoized_property,
)
@ -52,14 +53,12 @@ def ancestors(path):
break
path = newpath
def samepath(path1, path2):
if hasattr(os.path, 'samefile'):
return os.path.samefile(path1, path2)
return os.path.normcase(os.path.realpath(path1)) == \
os.path.normcase(os.path.realpath(path2))
class BadEnvironmentException(Exception):
"""Base class for errors raised when the build environment is not sane."""
@ -70,7 +69,6 @@ class BuildEnvironmentNotFoundException(BadEnvironmentException):
class ObjdirMismatchException(BadEnvironmentException):
"""Raised when the current dir is an objdir and doesn't match the mozconfig."""
def __init__(self, objdir1, objdir2):
self.objdir1 = objdir1
self.objdir2 = objdir2
@ -87,7 +85,6 @@ class MozbuildObject(ProcessExecutionMixin):
running processes, etc. This classes provides that functionality. Other
modules can inherit from this class to obtain this functionality easily.
"""
def __init__(self, topsrcdir, settings, log_manager, topobjdir=None,
mozconfig=MozconfigLoader.AUTODETECT):
"""Create a new Mozbuild object instance.
@ -178,8 +175,7 @@ class MozbuildObject(ProcessExecutionMixin):
topobjdir = mozpath.normsep(os.path.normpath(topobjdir))
if topsrcdir == topobjdir:
raise BadEnvironmentException(
'The object directory appears '
raise BadEnvironmentException('The object directory appears '
'to be the same as your source directory (%s). This build '
'configuration is not supported.' % topsrcdir)
@ -195,7 +191,7 @@ class MozbuildObject(ProcessExecutionMixin):
if '@CONFIG_GUESS@' in topobjdir:
topobjdir = topobjdir.replace('@CONFIG_GUESS@',
self.resolve_config_guess())
self.resolve_config_guess())
if not os.path.isabs(topobjdir):
topobjdir = os.path.abspath(os.path.join(self.topsrcdir, topobjdir))
@ -256,13 +252,10 @@ class MozbuildObject(ProcessExecutionMixin):
@property
def virtualenv_manager(self):
if self._virtualenv_manager is None:
self._virtualenv_manager = VirtualenvManager(
self.topsrcdir,
self.topobjdir,
os.path.join(self.topobjdir, '_virtualenvs', 'init'),
sys.stdout,
os.path.join(self.topsrcdir, 'build', 'virtualenv_packages.txt')
)
self._virtualenv_manager = VirtualenvManager(self.topsrcdir,
self.topobjdir, os.path.join(self.topobjdir, '_virtualenvs', 'init'),
sys.stdout, os.path.join(self.topsrcdir, 'build',
'virtualenv_packages.txt'))
return self._virtualenv_manager
@ -497,6 +490,7 @@ class MozbuildObject(ProcessExecutionMixin):
return BuildReader(config, finder=finder)
@memoized_property
def python3(self):
"""Obtain info about a Python 3 executable.
@ -548,10 +542,10 @@ class MozbuildObject(ProcessExecutionMixin):
if substs['OS_ARCH'] == 'Darwin':
if substs['MOZ_BUILD_APP'] == 'xulrunner':
stem = os.path.join(stem, 'XUL.framework')
stem = os.path.join(stem, 'XUL.framework');
else:
stem = os.path.join(stem, substs['MOZ_MACBUNDLE_NAME'], 'Contents',
'MacOS')
'MacOS')
elif where == 'default':
stem = os.path.join(stem, 'bin')
@ -584,14 +578,13 @@ class MozbuildObject(ProcessExecutionMixin):
notifier = which.which('terminal-notifier')
except which.WhichError:
raise Exception('Install terminal-notifier to get '
'a notification when the build finishes.')
'a notification when the build finishes.')
self.run_process([notifier, '-title',
'Mozilla Build System', '-group', 'mozbuild',
'-message', msg], ensure_exit_code=False)
'Mozilla Build System', '-group', 'mozbuild',
'-message', msg], ensure_exit_code=False)
elif sys.platform.startswith('win'):
from ctypes import Structure, windll, POINTER, sizeof
from ctypes.wintypes import DWORD, HANDLE, WINFUNCTYPE, BOOL, UINT
class FLASHWINDOW(Structure):
_fields_ = [("cbSize", UINT),
("hwnd", HANDLE),
@ -611,21 +604,21 @@ class MozbuildObject(ProcessExecutionMixin):
return
params = FLASHWINDOW(sizeof(FLASHWINDOW),
console,
FLASHW_CAPTION | FLASHW_TRAY | FLASHW_TIMERNOFG, 3, 0)
console,
FLASHW_CAPTION | FLASHW_TRAY | FLASHW_TIMERNOFG, 3, 0)
FlashWindowEx(params)
else:
try:
notifier = which.which('notify-send')
except which.WhichError:
raise Exception('Install notify-send (usually part of '
'the libnotify package) to get a notification when '
'the build finishes.')
'the libnotify package) to get a notification when '
'the build finishes.')
self.run_process([notifier, '--app-name=Mozilla Build System',
'Mozilla Build System', msg], ensure_exit_code=False)
'Mozilla Build System', msg], ensure_exit_code=False)
except Exception as e:
self.log(logging.WARNING, 'notifier-failed',
{'error': e.message}, 'Notification center failed: {error}')
self.log(logging.WARNING, 'notifier-failed', {'error':
e.message}, 'Notification center failed: {error}')
def _ensure_objdir_exists(self):
if os.path.isdir(self.statedir):
@ -653,10 +646,10 @@ class MozbuildObject(ProcessExecutionMixin):
return PathArgument(arg, self.topsrcdir, self.topobjdir)
def _run_make(self, directory=None, filename=None, target=None, log=True,
srcdir=False, allow_parallel=True, line_handler=None,
append_env=None, explicit_env=None, ignore_errors=False,
ensure_exit_code=0, silent=True, print_directory=True,
pass_thru=False, num_jobs=0, keep_going=False):
srcdir=False, allow_parallel=True, line_handler=None,
append_env=None, explicit_env=None, ignore_errors=False,
ensure_exit_code=0, silent=True, print_directory=True,
pass_thru=False, num_jobs=0, keep_going=False):
"""Invoke make.
directory -- Relative directory to look for Makefile in.
@ -798,11 +791,11 @@ class MozbuildObject(ProcessExecutionMixin):
if xcode_lisense_error:
raise Exception('Xcode requires accepting to the license agreement.\n'
'Please run Xcode and accept the license agreement.')
'Please run Xcode and accept the license agreement.')
if self._is_windows():
raise Exception('Could not find a suitable make implementation.\n'
'Please use MozillaBuild 1.9 or newer')
'Please use MozillaBuild 1.9 or newer')
else:
raise Exception('Could not find a suitable make implementation.')
@ -827,12 +820,13 @@ class MozbuildObject(ProcessExecutionMixin):
"""
return cls(self.topsrcdir, self.settings, self.log_manager,
topobjdir=self.topobjdir)
topobjdir=self.topobjdir)
def _activate_virtualenv(self):
self.virtualenv_manager.ensure()
self.virtualenv_manager.activate()
def _set_log_level(self, verbose):
self.log_manager.terminal_handler.setLevel(logging.INFO if not verbose else logging.DEBUG)
@ -841,8 +835,7 @@ class MozbuildObject(ProcessExecutionMixin):
pipenv = os.path.join(self.virtualenv_manager.bin_path, 'pipenv')
if not os.path.exists(pipenv):
for package in ['certifi', 'pipenv', 'six', 'virtualenv', 'virtualenv-clone']:
path = os.path.normpath(os.path.join(
self.topsrcdir, 'third_party/python', package))
path = os.path.normpath(os.path.join(self.topsrcdir, 'third_party/python', package))
self.virtualenv_manager.install_pip_package(path, vendored=True)
return pipenv
@ -868,10 +861,9 @@ class MachCommandBase(MozbuildObject):
detect_virtualenv_mozinfo = True
if hasattr(context, 'detect_virtualenv_mozinfo'):
detect_virtualenv_mozinfo = getattr(context,
'detect_virtualenv_mozinfo')
'detect_virtualenv_mozinfo')
try:
dummy = MozbuildObject.from_environment(
cwd=context.cwd,
dummy = MozbuildObject.from_environment(cwd=context.cwd,
detect_virtualenv_mozinfo=detect_virtualenv_mozinfo)
topsrcdir = dummy.topsrcdir
topobjdir = dummy._topobjdir
@ -889,12 +881,12 @@ class MachCommandBase(MozbuildObject):
pass
except ObjdirMismatchException as e:
print('Ambiguous object directory detected. We detected that '
'both %s and %s could be object directories. This is '
'typically caused by having a mozconfig pointing to a '
'different object directory from the current working '
'directory. To solve this problem, ensure you do not have a '
'default mozconfig in searched paths.' % (e.objdir1,
e.objdir2))
'both %s and %s could be object directories. This is '
'typically caused by having a mozconfig pointing to a '
'different object directory from the current working '
'directory. To solve this problem, ensure you do not have a '
'default mozconfig in searched paths.' % (e.objdir1,
e.objdir2))
sys.exit(1)
except MozconfigLoadException as e:
@ -911,7 +903,7 @@ class MachCommandBase(MozbuildObject):
sys.exit(1)
MozbuildObject.__init__(self, topsrcdir, context.settings,
context.log_manager, topobjdir=topobjdir)
context.log_manager, topobjdir=topobjdir)
self._mach_context = context

Просмотреть файл

@ -7,8 +7,6 @@
# of version 1.2. Its license (MPL2) is contained in repo root LICENSE file.
# Please make modifications there where possible.
from __future__ import absolute_import, print_function
from itertools import islice
@ -56,3 +54,4 @@ def chunkify(things, this_chunk, chunks):
return things[start:end]
except TypeError:
return islice(things, start, end)

Просмотреть файл

@ -2,11 +2,11 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
from collections import defaultdict
import json
import os
import re
import urlparse
from mach.config import ConfigSettings
from mach.logging import LoggingManager
@ -22,12 +22,10 @@ from mozpack.files import PreprocessedFile
from mozpack.manifests import InstallManifest
import mozpack.path as mozpath
from .manifest_handler import ChromeManifestHandler
from manifest_handler import ChromeManifestHandler
_line_comment_re = re.compile('^//@line (\d+) "(.+)"$')
def generate_pp_info(path, topsrcdir):
with open(path) as fh:
# (start, end) -> (included_source, start)
@ -59,8 +57,6 @@ def generate_pp_info(path, topsrcdir):
# This build backend is assuming the build to have happened already, as it is parsing
# built preprocessed files to generate data to map them to the original sources.
class ChromeMapBackend(CommonBackend):
def _init(self):
CommonBackend._init(self)
@ -120,11 +116,9 @@ class ChromeMapBackend(CommonBackend):
# A map from url prefixes to objdir directories:
# { "chrome://mozapps/content/": [ "dist/bin/chrome/toolkit/content/mozapps" ], ... }
# A map of overrides.
# A map from objdir paths to sourcedir paths, and an object storing mapping
# information for preprocessed files:
# A map from objdir paths to sourcedir paths, and an object storing mapping information for preprocessed files:
# { "dist/bin/browser/chrome/browser/content/browser/aboutSessionRestore.js":
# [ "$topsrcdir/browser/components/sessionstore/content/aboutSessionRestore.js", {} ],
# ... }
# [ "$topsrcdir/browser/components/sessionstore/content/aboutSessionRestore.js", {} ], ... }
# An object containing build configuration information.
outputfile = os.path.join(self.environment.topobjdir, 'chrome-map.json')
with self._write_file(outputfile) as fh:

Просмотреть файл

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
from argparse import ArgumentParser
import json
import os
@ -17,8 +15,7 @@ from six import viewitems
from mozpack.chrome.manifest import parse_manifest
import mozpack.path as mozpath
from .manifest_handler import ChromeManifestHandler
from manifest_handler import ChromeManifestHandler
class LcovRecord(object):
__slots__ = ("test_name",
@ -33,7 +30,6 @@ class LcovRecord(object):
"lines",
"line_count",
"covered_line_count")
def __init__(self):
self.functions = {}
self.function_exec_counts = {}
@ -68,16 +64,14 @@ class LcovRecord(object):
# Re-calculate summaries after generating or splitting a record.
self.function_count = len(self.functions.keys())
# Function records may have moved between files, so filter here.
self.function_exec_counts = {
fn_name: count for fn_name, count in viewitems(self.function_exec_counts)
if fn_name in self.functions.values()}
self.function_exec_counts = {fn_name: count for fn_name, count in viewitems(self.function_exec_counts)
if fn_name in self.functions.values()}
self.covered_function_count = len([c for c in self.function_exec_counts.values() if c])
self.line_count = len(self.lines)
self.covered_line_count = len([c for c, _ in self.lines.values() if c])
self.branch_count = len(self.branches)
self.covered_branch_count = len([c for c in self.branches.values() if c])
class RecordRewriter(object):
# Helper class for rewriting/spliting individual lcov records according
# to what the preprocessor did.
@ -170,8 +164,7 @@ class RecordRewriter(object):
def rewrite_record(self, record, pp_info):
# Rewrite the lines in the given record according to preprocessor info
# and split to additional records when pp_info has included file info.
self._current_pp_info = dict(
[(tuple([int(l) for l in k.split(',')]), v) for k, v in pp_info.items()])
self._current_pp_info = dict([(tuple([int(l) for l in k.split(',')]), v) for k, v in pp_info.items()])
self._ranges = sorted(self._current_pp_info.keys())
self._additions = {}
self._rewrite_lines(record)
@ -185,7 +178,6 @@ class RecordRewriter(object):
r.resummarize()
return generated_records
class LcovFile(object):
# Simple parser/pretty-printer for lcov format.
# lcov parsing based on http://ltp.sourceforge.net/coverage/lcov/geninfo.1.php
@ -412,7 +404,6 @@ class LcovFile(object):
class UrlFinderError(Exception):
pass
class UrlFinder(object):
# Given a "chrome://" or "resource://" url, uses data from the UrlMapBackend
# and install manifests to find a path to the source file and the corresponding
@ -578,10 +569,10 @@ class UrlFinder(object):
if app_name in url:
if omnijar_name in url:
# e.g. file:///home/worker/workspace/build/application/firefox/omni.ja!/components/MainProcessSingleton.js # noqa
# e.g. file:///home/worker/workspace/build/application/firefox/omni.ja!/components/MainProcessSingleton.js
parts = url_obj.path.split(omnijar_name + '!', 1)
elif '.xpi!' in url:
# e.g. file:///home/worker/workspace/build/application/firefox/browser/features/e10srollout@mozilla.org.xpi!/bootstrap.js # noqa
# e.g. file:///home/worker/workspace/build/application/firefox/browser/features/e10srollout@mozilla.org.xpi!/bootstrap.js
parts = url_obj.path.split('.xpi!', 1)
else:
# We don't know how to handle this jar: path, so return it to the
@ -589,10 +580,7 @@ class UrlFinder(object):
return url_obj.path, None
dir_parts = parts[0].rsplit(app_name + '/', 1)
url = mozpath.normpath(
mozpath.join(self.topobjdir, 'dist',
'bin', dir_parts[1].lstrip('/'), parts[1].lstrip('/'))
)
url = mozpath.normpath(mozpath.join(self.topobjdir, 'dist', 'bin', dir_parts[1].lstrip('/'), parts[1].lstrip('/')))
elif '.xpi!' in url:
# This matching mechanism is quite brittle and based on examples seen in the wild.
# There's no rule to match the XPI name to the path in dist/xpi-stage.
@ -602,8 +590,7 @@ class UrlFinder(object):
addon_name = addon_name[:-len('-test@mozilla.org')]
elif addon_name.endswith('@mozilla.org'):
addon_name = addon_name[:-len('@mozilla.org')]
url = mozpath.normpath(mozpath.join(self.topobjdir, 'dist',
'xpi-stage', addon_name, parts[1].lstrip('/')))
url = mozpath.normpath(mozpath.join(self.topobjdir, 'dist', 'xpi-stage', addon_name, parts[1].lstrip('/')))
elif url_obj.scheme == 'file' and os.path.isabs(url_obj.path):
path = url_obj.path
if not os.path.isfile(path):
@ -620,12 +607,10 @@ class UrlFinder(object):
self._final_mapping[url] = result
return result
class LcovFileRewriter(object):
# Class for partial parses of LCOV format and rewriting to resolve urls
# and preprocessed file lines.
def __init__(self, chrome_map_path, appdir='dist/bin/browser/',
gredir='dist/bin/', extra_chrome_manifests=[]):
def __init__(self, chrome_map_path, appdir='dist/bin/browser/', gredir='dist/bin/', extra_chrome_manifests=[]):
self.url_finder = UrlFinder(chrome_map_path, appdir, gredir, extra_chrome_manifests)
self.pp_rewriter = RecordRewriter()
@ -646,11 +631,9 @@ class LcovFileRewriter(object):
return None
source_file, pp_info = res
# We can't assert that the file exists here, because we don't have the source
# checkout available on test machines. We can bring back this assertion when
# bug 1432287 is fixed.
# assert os.path.isfile(source_file), "Couldn't find mapped source file %s at %s!" % (
# url, source_file)
# We can't assert that the file exists here, because we don't have the source checkout available
# on test machines. We can bring back this assertion when bug 1432287 is fixed.
# assert os.path.isfile(source_file), "Couldn't find mapped source file %s at %s!" % (url, source_file)
found_valid[0] = True
@ -674,44 +657,28 @@ class LcovFileRewriter(object):
def main():
parser = ArgumentParser(
description="Given a set of gcov .info files produced "
"by spidermonkey's code coverage, re-maps file urls "
"back to source files and lines in preprocessed files "
"back to their original locations."
)
parser.add_argument(
"--chrome-map-path", default="chrome-map.json", help="Path to the chrome-map.json file."
)
parser.add_argument(
"--app-dir",
default="dist/bin/browser/",
help="Prefix of the appdir in use. This is used to map "
"urls starting with resource:///. It may differ by "
"app, but defaults to the valid value for firefox.",
)
parser.add_argument(
"--gre-dir",
default="dist/bin/",
help="Prefix of the gre dir in use. This is used to map "
"urls starting with resource://gre. It may differ by "
"app, but defaults to the valid value for firefox.",
)
parser.add_argument(
"--output-suffix", default=".out", help="The suffix to append to output files."
)
parser.add_argument(
"--extra-chrome-manifests",
nargs='+',
help="Paths to files containing extra chrome registration.",
)
parser.add_argument(
"--output-file",
default="",
help="The output file where the results are merged. Leave empty to make the rewriter not "
"merge files.",
)
parser.add_argument("files", nargs='+', help="The set of files to process.")
parser = ArgumentParser(description="Given a set of gcov .info files produced "
"by spidermonkey's code coverage, re-maps file urls "
"back to source files and lines in preprocessed files "
"back to their original locations.")
parser.add_argument("--chrome-map-path", default="chrome-map.json",
help="Path to the chrome-map.json file.")
parser.add_argument("--app-dir", default="dist/bin/browser/",
help="Prefix of the appdir in use. This is used to map "
"urls starting with resource:///. It may differ by "
"app, but defaults to the valid value for firefox.")
parser.add_argument("--gre-dir", default="dist/bin/",
help="Prefix of the gre dir in use. This is used to map "
"urls starting with resource://gre. It may differ by "
"app, but defaults to the valid value for firefox.")
parser.add_argument("--output-suffix", default=".out",
help="The suffix to append to output files.")
parser.add_argument("--extra-chrome-manifests", nargs='+',
help="Paths to files containing extra chrome registration.")
parser.add_argument("--output-file", default="",
help="The output file where the results are merged. Leave empty to make the rewriter not merge files.")
parser.add_argument("files", nargs='+',
help="The set of files to process.")
args = parser.parse_args()
@ -727,6 +694,5 @@ def main():
rewriter.rewrite_files(files, args.output_file, args.output_suffix)
if __name__ == '__main__':
main()

Просмотреть файл

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
from collections import defaultdict
try:

Просмотреть файл

@ -5,7 +5,6 @@
from __future__ import absolute_import, print_function
import argparse
import errno
import sys
import json
import buildconfig
@ -18,7 +17,6 @@ from mozpack.manifests import (
)
import mozpack.path as mozpath
def describe_install_manifest(manifest, dest_dir):
try:
manifest = InstallManifest(manifest)
@ -77,6 +75,5 @@ def cli(args=sys.argv[1:]):
return package_coverage_data(args.root, args.output_file)
if __name__ == '__main__':
sys.exit(cli())

Просмотреть файл

@ -4,7 +4,9 @@
# This modules provides functionality for dealing with code completion.
from __future__ import absolute_import, print_function
from __future__ import absolute_import
import os
from mach.decorators import (
CommandArgument,
@ -24,9 +26,9 @@ class Introspection(MachCommandBase):
"""Instropection commands."""
@Command('compileflags', category='devenv',
description='Display the compilation flags for a given source file')
description='Display the compilation flags for a given source file')
@CommandArgument('what', default=None,
help='Source file to display compilation flags for')
help='Source file to display compilation flags for')
def compileflags(self, what):
from mozbuild.util import resolve_target_to_make
from mozbuild.compilation import util
@ -37,7 +39,7 @@ class Introspection(MachCommandBase):
path_arg = self._wrap_path_argument(what)
make_dir, make_target = resolve_target_to_make(self.topobjdir,
path_arg.relpath())
path_arg.relpath())
if make_dir is None and make_target is None:
return 1

Просмотреть файл

@ -4,8 +4,6 @@
# This modules provides functionality for dealing with code completion.
from __future__ import absolute_import, print_function
import os
import types
@ -15,8 +13,11 @@ from mozbuild.frontend.data import (
Sources,
GeneratedSources,
DirectoryTraversal,
Linkable,
LocalInclude,
PerSourceFlag,
VariablePassthru,
SimpleProgram,
)
from mozbuild.shellutil import (
quote as shell_quote,
@ -175,8 +176,8 @@ class CompileDBBackend(CommonBackend):
if canonical_suffix not in self.COMPILERS:
return
db = self._db.setdefault((objdir, filename, unified),
cenv.substs[self.COMPILERS[canonical_suffix]].split() +
['-o', '/dev/null', '-c'])
cenv.substs[self.COMPILERS[canonical_suffix]].split() +
['-o', '/dev/null', '-c'])
reldir = reldir or mozpath.relpath(objdir, cenv.topobjdir)
def append_var(name):

Просмотреть файл

@ -2,20 +2,17 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
import os
from mozbuild import shellutil
def check_top_objdir(topobjdir):
top_make = os.path.join(topobjdir, 'Makefile')
if not os.path.exists(top_make):
print('Your tree has not been built yet. Please run '
'|mach build| with no arguments.')
'|mach build| with no arguments.')
return False
return True
def get_build_vars(directory, cmd):
build_vars = {}
@ -30,14 +27,13 @@ def get_build_vars(directory, cmd):
try:
old_logger = cmd.log_manager.replace_terminal_handler(None)
cmd._run_make(directory=directory, target='showbuild', log=False,
print_directory=False, allow_parallel=False, silent=True,
line_handler=on_line)
print_directory=False, allow_parallel=False, silent=True,
line_handler=on_line)
finally:
cmd.log_manager.replace_terminal_handler(old_logger)
return build_vars
def sanitize_cflags(flags):
# We filter out -Xclang arguments as clang based tools typically choke on
# passing these flags down to the clang driver. -Xclang tells the clang

Просмотреть файл

@ -4,7 +4,7 @@
# This modules provides functionality for dealing with compiler warnings.
from __future__ import absolute_import, print_function, unicode_literals
from __future__ import absolute_import, unicode_literals
import errno
import json
@ -87,22 +87,22 @@ class CompilerWarning(dict):
return func(self._cmpkey(), other._cmpkey())
def __eq__(self, other):
return self._compare(other, lambda s, o: s == o)
return self._compare(other, lambda s,o: s == o)
def __neq__(self, other):
return self._compare(other, lambda s, o: s != o)
return self._compare(other, lambda s,o: s != o)
def __lt__(self, other):
return self._compare(other, lambda s, o: s < o)
return self._compare(other, lambda s,o: s < o)
def __le__(self, other):
return self._compare(other, lambda s, o: s <= o)
return self._compare(other, lambda s,o: s <= o)
def __gt__(self, other):
return self._compare(other, lambda s, o: s > o)
return self._compare(other, lambda s,o: s > o)
def __ge__(self, other):
return self._compare(other, lambda s, o: s >= o)
return self._compare(other, lambda s,o: s >= o)
def __hash__(self):
"""Define so this can exist inside a set, etc."""
@ -132,7 +132,6 @@ class WarningsDatabase(object):
Callers should periodically prune old, invalid warnings from the database
by calling prune(). A good time to do this is at the end of a build.
"""
def __init__(self):
"""Create an empty database."""
self._files = {}
@ -305,7 +304,6 @@ class WarningsCollector(object):
output from the compiler. Therefore, it can maintain state to parse
multi-line warning messages.
"""
def __init__(self, cb, objdir=None):
"""Initialize a new collector.

Просмотреть файл

@ -10,6 +10,7 @@ from __future__ import absolute_import, print_function
import logging
import os
import subprocess
import sys
import time
@ -79,14 +80,14 @@ def config_status(topobjdir='.', topsrcdir='.', defines=None,
if 'CONFIG_FILES' in os.environ:
raise Exception('Using the CONFIG_FILES environment variable is not '
'supported.')
'supported.')
if 'CONFIG_HEADERS' in os.environ:
raise Exception('Using the CONFIG_HEADERS environment variable is not '
'supported.')
'supported.')
if not os.path.isabs(topsrcdir):
raise Exception('topsrcdir must be defined as an absolute directory: '
'%s' % topsrcdir)
'%s' % topsrcdir)
default_backends = ['RecursiveMake']
default_backends = (substs or {}).get('BUILD_BACKENDS', ['RecursiveMake'])
@ -111,8 +112,8 @@ def config_status(topobjdir='.', topsrcdir='.', defines=None,
topobjdir = os.path.abspath('.')
env = ConfigEnvironment(topsrcdir, topobjdir, defines=defines,
non_global_defines=non_global_defines, substs=substs,
source=source, mozconfig=mozconfig)
non_global_defines=non_global_defines, substs=substs,
source=source, mozconfig=mozconfig)
with FileAvoidWrite(os.path.join(topobjdir, 'mozinfo.json')) as f:
write_mozinfo(f, env, os.environ)

Просмотреть файл

@ -49,7 +49,6 @@ class ConfigureError(Exception):
class SandboxDependsFunction(object):
'''Sandbox-visible representation of @depends functions.'''
def __init__(self, unsandboxed):
self._or = unsandboxed.__or__
self._and = unsandboxed.__and__
@ -234,7 +233,6 @@ class CombinedDependsFunction(DependsFunction):
def __ne__(self, other):
return not self == other
class SandboxedGlobal(dict):
'''Identifiable dict type for use as function global'''
@ -359,12 +357,10 @@ class ConfigureSandbox(dict):
# that can't be converted to ascii. Make our log methods robust to this
# by detecting the encoding that a producer is likely to have used.
encoding = getpreferredencoding()
def wrapped_log_method(logger, key):
method = getattr(logger, key)
if not encoding:
return method
def wrapped(*args, **kwargs):
out_args = [
arg.decode(encoding) if isinstance(arg, str) else arg
@ -665,7 +661,7 @@ class ConfigureSandbox(dict):
when = self._normalize_when(kwargs.get('when'), 'option')
args = [self._resolve(arg) for arg in args]
kwargs = {k: self._resolve(v) for k, v in kwargs.iteritems()
if k != 'when'}
if k != 'when'}
option = Option(*args, **kwargs)
if when:
self._conditions[option] = when

Просмотреть файл

@ -6,13 +6,12 @@
# to a given compilation unit. This is used as a helper to find a bug in some
# versions of GNU ld.
from __future__ import absolute_import, print_function
from __future__ import absolute_import
import subprocess
import sys
import re
def get_range_for(compilation_unit, debug_info):
'''Returns the range offset for a given compilation unit
in a given debug_info.'''
@ -33,7 +32,6 @@ def get_range_for(compilation_unit, debug_info):
ranges = nfo.rsplit(None, 1)[1]
return None
def get_range_length(range, debug_ranges):
'''Returns the number of items in the range starting at the
given offset.'''
@ -44,9 +42,8 @@ def get_range_length(range, debug_ranges):
length += 1
return length
def main(bin, compilation_unit):
p = subprocess.Popen(['objdump', '-W', bin], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p = subprocess.Popen(['objdump', '-W', bin], stdout = subprocess.PIPE, stderr = subprocess.PIPE)
(out, err) = p.communicate()
sections = re.split('\n(Contents of the|The section) ', out)
debug_info = [s for s in sections if s.startswith('.debug_info')]

Просмотреть файл

@ -13,7 +13,7 @@
# will be used from shell, we just print the two assignments and evaluate
# them from shell.
from __future__ import absolute_import, print_function
from __future__ import absolute_import
import os
import subprocess
@ -21,14 +21,12 @@ import re
re_for_ld = re.compile('.*\((.*)\).*')
def parse_readelf_line(x):
"""Return the version from a readelf line that looks like:
0x00ec: Rev: 1 Flags: none Index: 8 Cnt: 2 Name: GLIBCXX_3.4.6
"""
return x.split(':')[-1].split('_')[-1].strip()
def parse_ld_line(x):
"""Parse a line from the output of ld -t. The output of gold is just
the full path, gnu ld prints "-lstdc++ (path)".
@ -38,13 +36,11 @@ def parse_ld_line(x):
return t.groups()[0].strip()
return x.strip()
def split_ver(v):
"""Covert the string '1.2.3' into the list [1,2,3]
"""
return [int(x) for x in v.split('.')]
def cmp_ver(a, b):
"""Compare versions in the form 'a.b.c'
"""
@ -53,19 +49,17 @@ def cmp_ver(a, b):
return i - j
return 0
def encode_ver(v):
"""Encode the version as a single number.
"""
t = split_ver(v)
return t[0] << 16 | t[1] << 8 | t[2]
def find_version(args):
"""Given a base command line for a compiler, find the version of the
libstdc++ it uses.
"""
args += ['-shared', '-Wl,-t']
args += ['-shared', '-Wl,-t']
p = subprocess.Popen(args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
candidates = [x for x in p.stdout if 'libstdc++.so' in x]
candidates = [x for x in candidates if 'skipping incompatible' not in x]
@ -83,10 +77,9 @@ candidates:
p = subprocess.Popen(['readelf', '-V', libstdcxx], stdout=subprocess.PIPE)
versions = [parse_readelf_line(x)
for x in p.stdout.readlines() if 'Name: GLIBCXX' in x]
last_version = sorted(versions, cmp=cmp_ver)[-1]
last_version = sorted(versions, cmp = cmp_ver)[-1]
return (last_version, encode_ver(last_version))
if __name__ == '__main__':
"""Given the value of environment variable CXX or HOST_CXX, find the
version of the libstdc++ it uses.

Просмотреть файл

@ -237,6 +237,7 @@ class LintSandbox(ConfigureSandbox):
name, default))
self._raise_from(e, frame.f_back if frame else None)
def _check_help_for_option_with_func_default(self, option, *args, **kwargs):
default = kwargs['default']
@ -284,7 +285,6 @@ class LintSandbox(ConfigureSandbox):
def imports_impl(self, _import, _from=None, _as=None):
wrapper = super(LintSandbox, self).imports_impl(_import, _from=_from, _as=_as)
def decorator(func):
self._has_imports.add(func)
return wrapper(func)

Просмотреть файл

@ -46,7 +46,7 @@ def disassemble_as_iter(co):
c = code[i]
op = ord(c)
opname = dis.opname[op]
i += 1
i += 1;
if op >= dis.HAVE_ARGUMENT:
arg = ord(code[i]) + ord(code[i + 1]) * 256 + extended_arg
extended_arg = 0

Просмотреть файл

@ -106,7 +106,6 @@ class PositiveOptionValue(OptionValue):
in the form of a tuple for when values are given to the option (in the form
--option=value[,value2...].
'''
def __nonzero__(self):
return True
@ -425,7 +424,6 @@ class CommandLineHelper(object):
Extra options can be added afterwards through API calls. For those,
conflicting values will raise an exception.
'''
def __init__(self, environ=os.environ, argv=sys.argv):
self._environ = dict(environ)
self._args = OrderedDict()

Просмотреть файл

@ -14,7 +14,6 @@ from collections import deque
from contextlib import contextmanager
from distutils.version import LooseVersion
def getpreferredencoding():
# locale._parse_localename makes locale.getpreferredencoding
# return None when LC_ALL is C, instead of e.g. 'US-ASCII' or
@ -30,7 +29,6 @@ def getpreferredencoding():
encoding = 'utf-8'
return encoding
class Version(LooseVersion):
'''A simple subclass of distutils.version.LooseVersion.
Adds attributes for `major`, `minor`, `patch` for the first three
@ -42,14 +40,13 @@ class Version(LooseVersion):
v.minor == 2
v.patch == 0
'''
def __init__(self, version):
# Can't use super, LooseVersion's base class is not a new-style class.
LooseVersion.__init__(self, version)
# Take the first three integer components, stopping at the first
# non-integer and padding the rest with zeroes.
(self.major, self.minor, self.patch) = list(itertools.chain(
itertools.takewhile(lambda x: isinstance(x, int), self.version),
itertools.takewhile(lambda x:isinstance(x, int), self.version),
(0, 0, 0)))[:3]
def __cmp__(self, other):
@ -74,7 +71,6 @@ class ConfigureOutputHandler(logging.Handler):
printed out. This feature is only enabled under the `queue_debug` context
manager.
'''
def __init__(self, stdout=sys.stdout, stderr=sys.stderr, maxlen=20):
super(ConfigureOutputHandler, self).__init__()
@ -155,7 +151,7 @@ class ConfigureOutputHandler(logging.Handler):
stream.flush()
except (KeyboardInterrupt, SystemExit, IOError):
raise
except Exception:
except:
self.handleError(record)
@contextmanager
@ -197,7 +193,6 @@ class LineIO(object):
'''File-like class that sends each line of the written data to a callback
(without carriage returns).
'''
def __init__(self, callback, errors='strict'):
self._callback = callback
self._buf = ''

Просмотреть файл

@ -2,10 +2,11 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
from __future__ import absolute_import, unicode_literals
import errno
import getpass
import glob
import io
import json
import logging
@ -78,7 +79,7 @@ Preferences.
INSTALL_TESTS_CLOBBER = ''.join([TextWrapper().fill(line) + '\n' for line in
'''
'''
The build system was unable to install tests because the CLOBBER file has \
been updated. This means if you edited any test files, your changes may not \
be picked up until a full/clobber build is performed.
@ -106,7 +107,7 @@ this is a clobber bug and not due to local changes.
BuildOutputResult = namedtuple('BuildOutputResult',
('warning', 'state_changed', 'message'))
('warning', 'state_changed', 'message'))
class TierStatus(object):
@ -172,9 +173,9 @@ class TierStatus(object):
def add_resources_to_dict(self, entry, start=None, end=None, phase=None):
"""Helper function to append resource information to a dict."""
cpu_percent = self.resources.aggregate_cpu_percent(start=start,
end=end, phase=phase, per_cpu=False)
end=end, phase=phase, per_cpu=False)
cpu_times = self.resources.aggregate_cpu_times(start=start, end=end,
phase=phase, per_cpu=False)
phase=phase, per_cpu=False)
io = self.resources.aggregate_io(start=start, end=end, phase=phase)
if cpu_percent is None:
@ -304,7 +305,7 @@ class BuildMonitor(MozbuildObject):
try:
warning = self._warnings_collector.process_line(line)
message = line
except Exception:
except:
pass
return BuildOutputResult(warning, False, message)
@ -338,8 +339,8 @@ class BuildMonitor(MozbuildObject):
json.dump(self.resources.as_dict(), fh, indent=2)
except Exception as e:
self.log(logging.WARNING, 'build_resources_error',
{'msg': str(e)},
'Exception when writing resource usage file: {msg}')
{'msg': str(e)},
'Exception when writing resource usage file: {msg}')
def _get_finder_cpu_usage(self):
"""Obtain the CPU usage of the Finder app on OS X.
@ -437,9 +438,9 @@ class BuildMonitor(MozbuildObject):
return None
cpu_percent = self.resources.aggregate_cpu_percent(phase=None,
per_cpu=False)
per_cpu=False)
cpu_times = self.resources.aggregate_cpu_times(phase=None,
per_cpu=False)
per_cpu=False)
io = self.resources.aggregate_io(phase=None)
o = dict(
@ -461,9 +462,9 @@ class BuildMonitor(MozbuildObject):
for usage in self.resources.range_usage():
cpu_percent = self.resources.aggregate_cpu_percent(usage.start,
usage.end, per_cpu=False)
usage.end, per_cpu=False)
cpu_times = self.resources.aggregate_cpu_times(usage.start,
usage.end, per_cpu=False)
usage.end, per_cpu=False)
entry = dict(
start=usage.start,
@ -473,10 +474,11 @@ class BuildMonitor(MozbuildObject):
)
self.tiers.add_resources_to_dict(entry, start=usage.start,
end=usage.end)
end=usage.end)
o['resources'].append(entry)
# If the imports for this file ran before the in-tree virtualenv
# was bootstrapped (for instance, for a clobber build in automation),
# psutil might not be available.
@ -522,8 +524,8 @@ class BuildMonitor(MozbuildObject):
sin /= 1048576
sout /= 1048576
self.log(logging.WARNING, 'swap_activity',
{'sin': sin, 'sout': sout},
'Swap in/out (MB): {sin}/{sout}')
{'sin': sin, 'sout': sout},
'Swap in/out (MB): {sin}/{sout}')
def ccache_stats(self):
ccache_stats = None
@ -546,7 +548,6 @@ class TerminalLoggingHandler(logging.Handler):
This class should probably live elsewhere, like the mach core. Consider
this a proving ground for its usefulness.
"""
def __init__(self):
logging.Handler.__init__(self)
@ -682,6 +683,7 @@ class BuildOutputManager(OutputManager):
# collection child process hasn't been told to stop.
self.monitor.stop_resource_recording()
def on_line(self, line):
warning, state_changed, message = self.monitor.on_line(line)
@ -742,7 +744,7 @@ class StaticAnalysisOutputManager(OutputManager):
if warning:
self.log(logging.INFO, 'compiler_warning', warning,
'Warning: {flag} in {filename}: {message}')
'Warning: {flag} in {filename}: {message}')
if relevant:
self.log(logging.INFO, 'build_output', {'line': line}, '{line}')
@ -784,7 +786,7 @@ class CCacheStats(object):
STATS_KEYS = [
# (key, description)
# Refer to stats.c in ccache project for all the descriptions.
('stats_zeroed', 'stats zero time'), # Old name prior to ccache 3.4
('stats_zeroed', 'stats zero time'), # Old name prior to ccache 3.4
('stats_zeroed', 'stats zeroed'),
('stats_updated', 'stats updated'),
('cache_hit_direct', 'cache hit (direct)'),
@ -901,10 +903,7 @@ class CCacheStats(object):
return int(numeric * unit)
def hit_rate_message(self):
return ('ccache (direct) hit rate: {:.1%}; (preprocessed) hit rate: {:.1%};'
' miss rate: {:.1%}'.format(
*self.hit_rates()
))
return 'ccache (direct) hit rate: {:.1%}; (preprocessed) hit rate: {:.1%}; miss rate: {:.1%}'.format(*self.hit_rates())
def hit_rates(self):
direct = self._values['cache_hit_direct']
@ -1006,11 +1005,11 @@ class BuildDriver(MozbuildObject):
if directory is not None and not what:
print('Can only use -C/--directory with an explicit target '
'name.')
'name.')
return 1
if directory is not None:
disable_extra_make_dependencies = True
disable_extra_make_dependencies=True
directory = mozpath.normsep(directory)
if directory.startswith('/'):
directory = directory[1:]
@ -1105,7 +1104,7 @@ class BuildDriver(MozbuildObject):
else:
make_dir, make_target = \
resolve_target_to_make(self.topobjdir,
path_arg.relpath())
path_arg.relpath())
if make_dir is None and make_target is None:
return 1
@ -1146,12 +1145,10 @@ class BuildDriver(MozbuildObject):
# could potentially be fixed if the build monitor were more
# intelligent about encountering undefined state.
no_build_status = b'1' if make_dir is not None else b''
status = self._run_make(
directory=make_dir, target=make_target,
status = self._run_make(directory=make_dir, target=make_target,
line_handler=output.on_line, log=False, print_directory=False,
ensure_exit_code=False, num_jobs=jobs, silent=not verbose,
append_env={
b'NO_BUILDSTATUS_MESSAGES': no_build_status},
append_env={b'NO_BUILDSTATUS_MESSAGES': no_build_status},
keep_going=keep_going)
if status != 0:
@ -1207,8 +1204,8 @@ class BuildDriver(MozbuildObject):
# in these directories.
pathToThirdparty = os.path.join(self.topsrcdir,
"tools",
"rewriting",
"ThirdPartyPaths.txt")
"rewriting",
"ThirdPartyPaths.txt")
if os.path.exists(pathToThirdparty):
with open(pathToThirdparty) as f:
@ -1287,8 +1284,7 @@ class BuildDriver(MozbuildObject):
long_build = monitor.elapsed > 600
if long_build:
output.on_line(
'We know it took a while, but your build finally finished successfully!')
output.on_line('We know it took a while, but your build finally finished successfully!')
else:
output.on_line('Your build was successful!')
@ -1302,10 +1298,8 @@ class BuildDriver(MozbuildObject):
print('To take your build for a test drive, run: |mach run|')
app = self.substs['MOZ_BUILD_APP']
if app in ('browser', 'mobile/android'):
print(
'For more information on what to do now, see '
'https://developer.mozilla.org/docs/Developer_Guide/So_You_Just_Built_Firefox' # noqa
)
print('For more information on what to do now, see '
'https://developer.mozilla.org/docs/Developer_Guide/So_You_Just_Built_Firefox')
except Exception:
# Ignore Exceptions in case we can't find config.status (such
# as when doing OSX Universal builds)
@ -1331,14 +1325,14 @@ class BuildDriver(MozbuildObject):
# Only print build status messages when we have an active
# monitor.
if not buildstatus_messages:
append_env[b'NO_BUILDSTATUS_MESSAGES'] = b'1'
append_env[b'NO_BUILDSTATUS_MESSAGES'] = b'1'
status = self._run_client_mk(target='configure',
line_handler=line_handler,
append_env=append_env)
if not status:
print('Configure complete!')
print('Be sure to run |mach build| to pick up any changes')
print('Be sure to run |mach build| to pick up any changes');
return status

Просмотреть файл

@ -16,7 +16,7 @@ from textwrap import TextWrapper
CLOBBER_MESSAGE = ''.join([TextWrapper().fill(line) + '\n' for line in
'''
'''
The CLOBBER file has been updated, indicating that an incremental build since \
your last build will probably not work. A full/clobber build is required.
@ -39,7 +39,6 @@ Well, are ya? -- you can ignore this clobber requirement by running:
$ touch {clobber_file}
'''.splitlines()])
class Clobberer(object):
def __init__(self, topsrcdir, topobjdir):
"""Create a new object to manage clobbering the tree.
@ -70,7 +69,7 @@ class Clobberer(object):
# Object directory clobber older than current is fine.
if os.path.getmtime(self.src_clobber) <= \
os.path.getmtime(self.obj_clobber):
os.path.getmtime(self.obj_clobber):
return False
@ -95,7 +94,7 @@ class Clobberer(object):
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
return p.wait() == 1 and p.stdout.read().startswith('winrm')
except Exception:
except:
return False
def remove_objdir(self, full=True):
@ -183,8 +182,8 @@ class Clobberer(object):
if not allow_auto:
return True, False, \
self._message('Automatic clobbering is not enabled\n'
' (add "mk_add_options AUTOCLOBBER=1" to your '
'mozconfig).')
' (add "mk_add_options AUTOCLOBBER=1" to your '
'mozconfig).')
if cwd.startswith(self.topobjdir) and cwd != self.topobjdir:
return True, False, self._message(
@ -205,4 +204,4 @@ class Clobberer(object):
lines = [' ' + line for line in self.clobber_cause()]
return CLOBBER_MESSAGE.format(clobber_reason='\n'.join(lines),
no_reason=' ' + reason, clobber_file=self.obj_clobber)
no_reason=' ' + reason, clobber_file=self.obj_clobber)

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
from __future__ import absolute_import
import os
import subprocess
@ -34,7 +34,6 @@ hour. Backup programs that rely on this feature may be affected.
https://technet.microsoft.com/en-us/library/cc785435.aspx
'''
class Doctor(object):
def __init__(self, srcdir, objdir, fix):
self.srcdir = mozpath.normpath(srcdir)
@ -70,7 +69,7 @@ class Doctor(object):
denied = True
if denied:
print('run "mach doctor --fix" AS ADMIN to re-attempt fixing your system')
elif False and fixable: # elif fixable: # 'and fixable' avoids flake8 error
elif False: # elif fixable:
print('run "mach doctor --fix" as admin to attempt fixing your system')
return int(not good)
@ -203,7 +202,7 @@ class Doctor(object):
status = 'GOOD'
desc = 'lastaccess disabled systemwide'
elif disablelastaccess == 0:
if False: # if self.fix:
if False: # if self.fix:
choice = self.prompt_bool(DISABLE_LASTACCESS_WIN)
if not choice:
return {'status': 'BAD, NOT FIXED',

Просмотреть файл

@ -5,7 +5,7 @@
# This file contains utility functions for reading .properties files, like
# region.properties.
from __future__ import absolute_import, print_function, unicode_literals
from __future__ import absolute_import, unicode_literals
import codecs
import re
@ -16,7 +16,6 @@ if sys.version_info[0] == 3:
else:
str_type = basestring
class DotProperties:
r'''A thin representation of a key=value .properties file.'''
@ -78,7 +77,7 @@ class DotProperties:
if k.startswith(prefix) and '.' not in k[len(prefix):])
for required_key in required_keys:
if required_key not in D:
if not required_key in D:
raise ValueError('Required key %s not present' % required_key)
return D

Просмотреть файл

@ -1,6 +1,6 @@
#!/usr/bin/env python
from __future__ import absolute_import, print_function, unicode_literals
from __future__ import print_function, unicode_literals
"""
This script converts the build system telemetry schema from voluptuous format to json-schema.

Просмотреть файл

@ -108,8 +108,8 @@ class Daemon(object):
['name', '.hg', 'wholename'],
['dirname', '.git'],
['name', '.git', 'wholename'],
],
],
],
],
'fields': ['name'],
}
@ -204,7 +204,7 @@ class Daemon(object):
while True:
try:
self.client.receive()
_watch_result = self.client.receive()
changed = self.changed_files()
if not changed:
@ -228,7 +228,7 @@ class Daemon(object):
except pywatchman.SocketTimeout:
# Let's check to see if we're still functional.
self.client.query('version')
_version = self.client.query('version')
except pywatchman.CommandError as e:
# Abstract away pywatchman errors.

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -15,18 +15,19 @@ contains the code for converting executed mozbuild files into these data
structures.
"""
from __future__ import absolute_import, print_function, unicode_literals
from __future__ import absolute_import, unicode_literals
from mozbuild.frontend.context import (
ObjDirPath,
SourcePath,
)
from mozbuild.util import StrictOrderingOnAppendList
from mozpack.chrome.manifest import ManifestEntry
import mozpack.path as mozpath
from .context import FinalTargetValue
from collections import defaultdict
from collections import defaultdict, OrderedDict
import itertools
from ..util import (
@ -191,7 +192,6 @@ class ComputedFlags(ContextDerived):
flags[dest_var].extend(value)
return flags.items()
class XPIDLModule(ContextDerived):
"""Describes an XPIDL module to be compiled."""
@ -207,7 +207,6 @@ class XPIDLModule(ContextDerived):
self.name = name
self.idl_files = idl_files
class BaseDefines(ContextDerived):
"""Context derived container object for DEFINES/HOST_DEFINES,
which are OrderedDicts.
@ -233,15 +232,12 @@ class BaseDefines(ContextDerived):
else:
self.defines.update(more_defines)
class Defines(BaseDefines):
pass
class HostDefines(BaseDefines):
pass
class WebIDLCollection(ContextDerived):
"""Collects WebIDL info referenced during the build."""
@ -517,8 +513,7 @@ class BaseProgram(Linkable):
@property
def output_path(self):
if self.installed:
return ObjDirPath(self._context, '!/' + mozpath.join(
self.install_target, self.program))
return ObjDirPath(self._context, '!/' + mozpath.join(self.install_target, self.program))
else:
return ObjDirPath(self._context, '!' + self.program)
@ -681,7 +676,7 @@ class StaticLibrary(Library):
)
def __init__(self, context, basename, real_name=None,
link_into=None, no_expand_lib=False):
link_into=None, no_expand_lib=False):
Library.__init__(self, context, basename, real_name)
self.link_into = link_into
self.no_expand_lib = no_expand_lib
@ -713,8 +708,8 @@ class RustLibrary(StaticLibrary):
# many other things in the build system depend on that.
assert self.crate_type == 'staticlib'
self.lib_name = '%s%s%s' % (context.config.rust_lib_prefix,
basename.replace('-', '_'),
context.config.rust_lib_suffix)
basename.replace('-', '_'),
context.config.rust_lib_suffix)
self.dependencies = dependencies
self.features = features
self.target_dir = target_dir
@ -897,8 +892,8 @@ class TestManifest(ContextDerived):
)
def __init__(self, context, path, manifest, flavor=None,
install_prefix=None, relpath=None, sources=(),
dupe_manifest=False):
install_prefix=None, relpath=None, sources=(),
dupe_manifest=False):
ContextDerived.__init__(self, context)
assert flavor in all_test_flavors()
@ -1041,13 +1036,11 @@ class UnifiedSources(BaseSources):
unified_prefix = unified_prefix.replace('/', '_')
suffix = self.canonical_suffix[1:]
unified_prefix = 'Unified_%s_%s' % (suffix, unified_prefix)
self.unified_source_mapping = list(
group_unified_files(source_files,
unified_prefix=unified_prefix,
unified_suffix=suffix,
files_per_unified_file=files_per_unified_file)
)
unified_prefix='Unified_%s_%s' % (suffix, unified_prefix)
self.unified_source_mapping = list(group_unified_files(source_files,
unified_prefix=unified_prefix,
unified_suffix=suffix,
files_per_unified_file=files_per_unified_file))
class InstallationTarget(ContextDerived):
@ -1107,7 +1100,6 @@ class FinalTargetPreprocessedFiles(ContextDerived):
ContextDerived.__init__(self, sandbox)
self.files = files
class LocalizedFiles(FinalTargetFiles):
"""Sandbox container object for LOCALIZED_FILES, which is a
HierarchicalStringList.
@ -1195,16 +1187,12 @@ class GeneratedFile(ContextDerived):
'.inc',
'.py',
'.rs',
'node.stub', # To avoid VPATH issues with installing node files:
# https://bugzilla.mozilla.org/show_bug.cgi?id=1461714#c55
# We need to compile Java to generate JNI wrappers for native code
# compilation to consume.
'android_apks',
'node.stub', # To avoid VPATH issues with installing node files: https://bugzilla.mozilla.org/show_bug.cgi?id=1461714#c55
'android_apks', # We need to compile Java to generate JNI wrappers for native code compilation to consume.
'.profdata',
'.webidl'
)
self.required_for_compile = [
f for f in self.outputs if f.endswith(suffixes) or 'stl_wrappers/' in f]
self.required_for_compile = [f for f in self.outputs if f.endswith(suffixes) or 'stl_wrappers/' in f]
class ChromeManifestEntry(ContextDerived):

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
from __future__ import absolute_import, unicode_literals
import itertools
import logging
@ -24,6 +24,7 @@ import pytoml
from .data import (
BaseRustProgram,
BaseSources,
ChromeManifestEntry,
ComputedFlags,
ConfigFileSubstitution,
@ -92,6 +93,7 @@ from .context import (
ObjDirPath,
Path,
SubContext,
TemplateContext,
)
from mozbuild.base import ExecutionSummary
@ -183,8 +185,7 @@ class TreeMetadataEmitter(LoggingMixin):
objs = list(emitfn(out))
self._emitter_time += time.time() - start
for o in emit_objs(objs):
yield o
for o in emit_objs(objs): yield o
else:
raise Exception('Unhandled output type: %s' % type(out))
@ -195,8 +196,7 @@ class TreeMetadataEmitter(LoggingMixin):
objs = list(self._emit_libs_derived(contexts))
self._emitter_time += time.time() - start
for o in emit_objs(objs):
yield o
for o in emit_objs(objs): yield o
def _emit_libs_derived(self, contexts):
@ -234,11 +234,11 @@ class TreeMetadataEmitter(LoggingMixin):
if isinstance(collection, WebIDLCollection):
# Test webidl sources are added here as a somewhat special
# case.
idl_sources[mozpath.join(root, 'test')] = [
s for s in collection.all_test_cpp_basenames()]
idl_sources[mozpath.join(root, 'test')] = [s for s in collection.all_test_cpp_basenames()]
yield collection
# Next do FINAL_LIBRARY linkage.
for lib in (l for libs in self._libs.values() for l in libs):
if not isinstance(lib, (StaticLibrary, RustLibrary)) or not lib.link_into:
@ -255,14 +255,14 @@ class TreeMetadataEmitter(LoggingMixin):
# both a static and a shared library in a directory, and having
# that as a FINAL_LIBRARY.
if len(set(type(l) for l in candidates)) == len(candidates) and \
len(set(l.objdir for l in candidates)) == 1:
len(set(l.objdir for l in candidates)) == 1:
for c in candidates:
c.link_library(lib)
else:
raise SandboxValidationError(
'FINAL_LIBRARY ("%s") matches a LIBRARY_NAME defined in '
'multiple places:\n %s' % (lib.link_into,
'\n '.join(l.objdir for l in candidates)),
'\n '.join(l.objdir for l in candidates)),
contexts[lib.objdir])
# ...and USE_LIBS linkage.
@ -281,13 +281,13 @@ class TreeMetadataEmitter(LoggingMixin):
for lib in self._static_linking_shared:
if all(isinstance(o, StaticLibrary) for o in recurse_refs(lib)):
shared_libs = sorted(l.basename for l in lib.linked_libraries
if isinstance(l, SharedLibrary))
if isinstance(l, SharedLibrary))
raise SandboxValidationError(
'The static "%s" library is not used in a shared library '
'or a program, but USE_LIBS contains the following shared '
'library names:\n %s\n\nMaybe you can remove the '
'static "%s" library?' % (lib.basename,
'\n '.join(shared_libs), lib.basename),
'\n '.join(shared_libs), lib.basename),
contexts[lib.objdir])
# Propagate LIBRARY_DEFINES to all child libraries recursively.
@ -305,6 +305,7 @@ class TreeMetadataEmitter(LoggingMixin):
propagate_defines(lib, lib.lib_defines)
yield lib
for lib in (l for libs in self._libs.values() for l in libs):
lib_defines = list(lib.lib_defines.get_defines())
if lib_defines:
@ -324,6 +325,7 @@ class TreeMetadataEmitter(LoggingMixin):
for obj in self._binaries.values():
yield obj
LIBRARY_NAME_VAR = {
'host': 'HOST_LIBRARY_NAME',
'target': 'LIBRARY_NAME',
@ -391,14 +393,14 @@ class TreeMetadataEmitter(LoggingMixin):
for d in self._external_paths:
if dir.startswith('%s/' % d):
candidates = [self._get_external_library(dir, name,
force_static)]
force_static)]
break
if not candidates:
raise SandboxValidationError(
'%s contains "%s", but there is no "%s" %s in %s.'
% (variable, path, name,
self.LIBRARY_NAME_VAR[obj.KIND], dir), context)
self.LIBRARY_NAME_VAR[obj.KIND], dir), context)
if len(candidates) > 1:
# If there's more than one remaining candidate, it could be
@ -421,11 +423,11 @@ class TreeMetadataEmitter(LoggingMixin):
raise SandboxValidationError(
'%s contains "static:%s", but there is no static '
'"%s" %s in %s.' % (variable, path, name,
self.LIBRARY_NAME_VAR[obj.KIND], dir), context)
self.LIBRARY_NAME_VAR[obj.KIND], dir), context)
raise SandboxValidationError(
'%s contains "static:%s", but there is no static "%s" '
'%s in the tree' % (variable, name, name,
self.LIBRARY_NAME_VAR[obj.KIND]), context)
self.LIBRARY_NAME_VAR[obj.KIND]), context)
if not candidates:
raise SandboxValidationError(
@ -435,23 +437,23 @@ class TreeMetadataEmitter(LoggingMixin):
elif len(candidates) > 1:
paths = (mozpath.join(l.relsrcdir, 'moz.build')
for l in candidates)
for l in candidates)
raise SandboxValidationError(
'%s contains "%s", which matches a %s defined in multiple '
'places:\n %s' % (variable, path,
self.LIBRARY_NAME_VAR[obj.KIND],
'\n '.join(paths)), context)
self.LIBRARY_NAME_VAR[obj.KIND],
'\n '.join(paths)), context)
elif force_static and not isinstance(candidates[0], StaticLibrary):
raise SandboxValidationError(
'%s contains "static:%s", but there is only a shared "%s" '
'in %s. You may want to add FORCE_STATIC_LIB=True in '
'%s/moz.build, or remove "static:".' % (
variable, path, name, candidates[0].relobjdir, candidates[0].relobjdir),
'%s/moz.build, or remove "static:".' % (variable, path,
name, candidates[0].relobjdir, candidates[0].relobjdir),
context)
elif isinstance(obj, StaticLibrary) and isinstance(candidates[0],
SharedLibrary):
SharedLibrary):
self._static_linking_shared.add(obj)
obj.link_library(candidates[0])
@ -483,29 +485,25 @@ class TreeMetadataEmitter(LoggingMixin):
# A simple version number.
if isinstance(values, (str, unicode)):
raise SandboxValidationError(
'%s %s of crate %s does not list a path' % (
description, dep_crate_name, crate_name),
'%s %s of crate %s does not list a path' % (description, dep_crate_name, crate_name),
context)
dep_path = values.get('path', None)
if not dep_path:
raise SandboxValidationError(
'%s %s of crate %s does not list a path' % (
description, dep_crate_name, crate_name),
'%s %s of crate %s does not list a path' % (description, dep_crate_name, crate_name),
context)
# Try to catch the case where somebody listed a
# local path for development.
if os.path.isabs(dep_path):
raise SandboxValidationError(
'%s %s of crate %s has a non-relative path' % (
description, dep_crate_name, crate_name),
'%s %s of crate %s has a non-relative path' % (description, dep_crate_name, crate_name),
context)
if not os.path.exists(mozpath.join(context.config.topsrcdir, crate_dir, dep_path)):
raise SandboxValidationError(
'%s %s of crate %s refers to a non-existent path' % (
description, dep_crate_name, crate_name),
'%s %s of crate %s refers to a non-existent path' % (description, dep_crate_name, crate_name),
context)
def _rust_library(self, context, libname, static_args, cls=RustLibrary):
@ -551,6 +549,7 @@ class TreeMetadataEmitter(LoggingMixin):
return cls(context, libname, cargo_file, crate_type, dependencies,
features, cargo_target_dir, **static_args)
def _handle_gn_dirs(self, context):
for target_dir in context.get('GN_DIRS', []):
context['DIRS'] += [target_dir]
@ -570,10 +569,10 @@ class TreeMetadataEmitter(LoggingMixin):
yield GnProjectData(context, target_dir, gn_dir, non_unified_sources)
def _handle_linkables(self, context, passthru, generated_files):
linkables = []
host_linkables = []
def add_program(prog, var):
if var.startswith('HOST_'):
host_linkables.append(prog)
@ -584,15 +583,15 @@ class TreeMetadataEmitter(LoggingMixin):
if program in self._binaries:
raise SandboxValidationError(
'Cannot use "%s" as %s name, '
'because it is already used in %s' % (
program, kind, self._binaries[program].relsrcdir), context)
'because it is already used in %s' % (program, kind,
self._binaries[program].relsrcdir), context)
for kind, cls in [('PROGRAM', Program), ('HOST_PROGRAM', HostProgram)]:
program = context.get(kind)
if program:
check_unique_binary(program, kind)
self._binaries[program] = cls(context, program)
self._linkage.append((context, self._binaries[program],
kind.replace('PROGRAM', 'USE_LIBS')))
kind.replace('PROGRAM', 'USE_LIBS')))
add_program(self._binaries[program], kind)
all_rust_programs = []
@ -606,7 +605,7 @@ class TreeMetadataEmitter(LoggingMixin):
# Verify Rust program definitions.
if all_rust_programs:
config, cargo_file = self._parse_cargo_file(context)
config, cargo_file = self._parse_cargo_file(context);
bin_section = config.get('bin', None)
if not bin_section:
raise SandboxValidationError(
@ -634,13 +633,13 @@ class TreeMetadataEmitter(LoggingMixin):
if program in self._binaries:
raise SandboxValidationError(
'Cannot use "%s" in %s, '
'because it is already used in %s' % (
program, kind, self._binaries[program].relsrcdir), context)
'because it is already used in %s' % (program, kind,
self._binaries[program].relsrcdir), context)
self._binaries[program] = cls(context, program,
is_unit_test=kind == 'CPP_UNIT_TESTS')
is_unit_test=kind == 'CPP_UNIT_TESTS')
self._linkage.append((context, self._binaries[program],
'HOST_USE_LIBS' if kind == 'HOST_SIMPLE_PROGRAMS'
else 'USE_LIBS'))
'HOST_USE_LIBS' if kind == 'HOST_SIMPLE_PROGRAMS'
else 'USE_LIBS'))
add_program(self._binaries[program], kind)
host_libname = context.get('HOST_LIBRARY_NAME')
@ -648,8 +647,8 @@ class TreeMetadataEmitter(LoggingMixin):
if host_libname:
if host_libname == libname:
raise SandboxValidationError(
'LIBRARY_NAME and HOST_LIBRARY_NAME must have a different value', context)
raise SandboxValidationError('LIBRARY_NAME and '
'HOST_LIBRARY_NAME must have a different value', context)
is_rust_library = context.get('IS_RUST_LIBRARY')
if is_rust_library:
@ -780,7 +779,7 @@ class TreeMetadataEmitter(LoggingMixin):
raise SandboxValidationError(
'Path specified in SYMBOLS_FILE does not exist: %s '
'(resolved to %s)' % (symbols_file,
symbols_file.full_path), context)
symbols_file.full_path), context)
shared_args['symbols_file'] = True
else:
if symbols_file.target_basename not in generated_files:
@ -804,8 +803,8 @@ class TreeMetadataEmitter(LoggingMixin):
if lib.defines:
defines = lib.defines.get_defines()
yield GeneratedFile(context, script,
'generate_symbols_file', lib.symbols_file,
[symbols_file], defines)
'generate_symbols_file', lib.symbols_file,
[symbols_file], defines)
if static_lib:
is_rust_library = context.get('IS_RUST_LIBRARY')
if is_rust_library:
@ -819,7 +818,7 @@ class TreeMetadataEmitter(LoggingMixin):
if lib_defines:
if not libname:
raise SandboxValidationError('LIBRARY_DEFINES needs a '
'LIBRARY_NAME to take effect', context)
'LIBRARY_NAME to take effect', context)
lib.lib_defines.update(lib_defines)
# Only emit sources if we have linkables defined in the same context.
@ -850,7 +849,7 @@ class TreeMetadataEmitter(LoggingMixin):
for f in context_srcs:
if f in seen_sources:
raise SandboxValidationError('Source file should only '
'be added to %s once: %s' % (symbol, f), context)
'be added to %s once: %s' % (symbol, f), context)
seen_sources.add(f)
full_path = f.full_path
if isinstance(f, SourcePath):
@ -859,27 +858,26 @@ class TreeMetadataEmitter(LoggingMixin):
assert isinstance(f, Path)
gen_srcs.append(full_path)
if symbol == 'SOURCES':
context_flags = context_srcs[f]
if context_flags:
all_flags[full_path] = context_flags
flags = context_srcs[f]
if flags:
all_flags[full_path] = flags
# Files for the generation phase of PGO are unusual, so
# it's not unreasonable to require them to be special.
if context_flags.pgo_generate_only:
if flags.pgo_generate_only:
if not isinstance(f, Path):
raise SandboxValidationError(
'pgo_generate_only file must not be a generated file: %s' % f,
context)
raise SandboxValidationError('pgo_generate_only file'
'must not be a generated file: %s' % f, context)
if mozpath.splitext(f)[1] != '.cpp':
raise SandboxValidationError('pgo_generate_only file'
'must be a .cpp file: %s' % f, context)
if context_flags.no_pgo:
raise SandboxValidationError(
'pgo_generate_only files cannot be marked no_pgo: %s' % f, context)
'must be a .cpp file: %s' % f, context)
if flags.no_pgo:
raise SandboxValidationError('pgo_generate_only files'
'cannot be marked no_pgo: %s' % f, context)
pgo_generate_only.add(f)
if isinstance(f, SourcePath) and not os.path.exists(full_path):
raise SandboxValidationError('File listed in %s does not '
'exist: \'%s\'' % (symbol, full_path), context)
'exist: \'%s\'' % (symbol, full_path), context)
# UNIFIED_SOURCES only take SourcePaths, so there should be no
# generated source in here
@ -893,7 +891,7 @@ class TreeMetadataEmitter(LoggingMixin):
if no_pgo:
if no_pgo_sources:
raise SandboxValidationError('NO_PGO and SOURCES[...].no_pgo '
'cannot be set at the same time', context)
'cannot be set at the same time', context)
passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo
if no_pgo_sources:
passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo_sources
@ -1005,6 +1003,7 @@ class TreeMetadataEmitter(LoggingMixin):
l.cxx_link = True
break
def emit_from_context(self, context):
"""Convert a Context to tree metadata objects.
@ -1023,8 +1022,7 @@ class TreeMetadataEmitter(LoggingMixin):
# We always emit a directory traversal descriptor. This is needed by
# the recursive make backend.
for o in self._emit_directory_traversal_from_context(context):
yield o
for o in self._emit_directory_traversal_from_context(context): yield o
for obj in self._process_xpidl(context):
yield obj
@ -1054,7 +1052,7 @@ class TreeMetadataEmitter(LoggingMixin):
if context.config.substs.get('OS_TARGET') == 'WINNT' and \
context['DELAYLOAD_DLLS']:
context['LDFLAGS'].extend([('-DELAYLOAD:%s' % dll)
for dll in context['DELAYLOAD_DLLS']])
for dll in context['DELAYLOAD_DLLS']])
context['OS_LIBS'].append('delayimp')
for v in ['CMFLAGS', 'CMMFLAGS']:
@ -1079,7 +1077,7 @@ class TreeMetadataEmitter(LoggingMixin):
raise SandboxValidationError(
'Path specified in DEFFILE does not exist: %s '
'(resolved to %s)' % (deffile,
deffile.full_path), context)
deffile.full_path), context)
path = mozpath.relpath(deffile.full_path, context.objdir)
else:
path = deffile.target_basename
@ -1140,15 +1138,12 @@ class TreeMetadataEmitter(LoggingMixin):
for path in context['CONFIGURE_SUBST_FILES']:
sub = self._create_substitution(ConfigFileSubstitution, context,
path)
path)
generated_files.add(str(sub.relpath))
yield sub
for defines_var, cls, backend_flags in (('DEFINES', Defines,
(computed_flags, computed_as_flags)),
('HOST_DEFINES', HostDefines,
(computed_host_flags,))
):
for defines_var, cls, backend_flags in (('DEFINES', Defines, (computed_flags, computed_as_flags)),
('HOST_DEFINES', HostDefines, (computed_host_flags,))):
defines = context.get(defines_var)
if defines:
defines_obj = cls(context, defines)
@ -1189,18 +1184,16 @@ class TreeMetadataEmitter(LoggingMixin):
full_path = local_include.full_path
if not isinstance(local_include, ObjDirPath):
if not os.path.exists(full_path):
raise SandboxValidationError(
'Path specified in LOCAL_INCLUDES does not exist: %s (resolved to %s)' %
(local_include, full_path), context)
raise SandboxValidationError('Path specified in LOCAL_INCLUDES '
'does not exist: %s (resolved to %s)' % (local_include,
full_path), context)
if not os.path.isdir(full_path):
raise SandboxValidationError('Path specified in LOCAL_INCLUDES '
'is a filename, but a directory is required: %s '
'(resolved to %s)' % (local_include, full_path),
context)
'is a filename, but a directory is required: %s '
'(resolved to %s)' % (local_include, full_path), context)
if (full_path == context.config.topsrcdir or
full_path == context.config.topobjdir):
raise SandboxValidationError(
'Path specified in LOCAL_INCLUDES '
raise SandboxValidationError('Path specified in LOCAL_INCLUDES '
'(%s) resolves to the topsrcdir or topobjdir (%s), which is '
'not allowed' % (local_include, full_path), context)
include_obj = LocalInclude(context, local_include)
@ -1214,8 +1207,7 @@ class TreeMetadataEmitter(LoggingMixin):
for obj in self._handle_linkables(context, passthru, generated_files):
yield obj
generated_files.update(['%s%s' % (k, self.config.substs.get('BIN_SUFFIX', ''))
for k in self._binaries.keys()])
generated_files.update(['%s%s' % (k, self.config.substs.get('BIN_SUFFIX', '')) for k in self._binaries.keys()])
components = []
for var, cls in (
@ -1297,13 +1289,8 @@ class TreeMetadataEmitter(LoggingMixin):
# in anything *but* LOCALIZED_FILES.
if f.target_basename in localized_generated_files:
raise SandboxValidationError(
(
'Outputs of LOCALIZED_GENERATED_FILES cannot '
'be used in %s: %s'
)
% (var, f),
context,
)
('Outputs of LOCALIZED_GENERATED_FILES cannot be used in %s: ' +
'%s') % (var, f), context)
# Addons (when XPI_NAME is defined) and Applications (when
# DIST_SUBDIR is defined) use a different preferences directory
@ -1389,6 +1376,7 @@ class TreeMetadataEmitter(LoggingMixin):
if context.objdir in self._host_compile_dirs:
yield computed_host_flags
def _create_substitution(self, cls, context, path):
sub = cls(context)
sub.input_path = '%s.in' % path.full_path
@ -1407,12 +1395,12 @@ class TreeMetadataEmitter(LoggingMixin):
if not xpidl_module:
if context['XPIDL_SOURCES']:
raise SandboxValidationError('XPIDL_MODULE must be defined if '
'XPIDL_SOURCES is defined.', context)
'XPIDL_SOURCES is defined.', context)
return
if not context['XPIDL_SOURCES']:
raise SandboxValidationError('XPIDL_MODULE cannot be defined '
'unless there are XPIDL_SOURCES', context)
'unless there are XPIDL_SOURCES', context)
if context['DIST_INSTALL'] is False:
self.log(logging.WARN, 'mozbuild_warning', dict(
@ -1422,7 +1410,7 @@ class TreeMetadataEmitter(LoggingMixin):
for idl in context['XPIDL_SOURCES']:
if not os.path.exists(idl.full_path):
raise SandboxValidationError('File %s from XPIDL_SOURCES '
'does not exist' % idl.full_path, context)
'does not exist' % idl.full_path, context)
yield XPIDLModule(context, xpidl_module, context['XPIDL_SOURCES'])
@ -1494,7 +1482,7 @@ class TreeMetadataEmitter(LoggingMixin):
path = manifest_path.full_path
manifest_dir = mozpath.dirname(path)
manifest_reldir = mozpath.dirname(mozpath.relpath(path,
context.config.topsrcdir))
context.config.topsrcdir))
manifest_sources = [mozpath.relpath(pth, context.config.topsrcdir)
for pth in mpmanifest.source_files]
install_prefix = mozpath.join(install_root, install_subdir)
@ -1502,22 +1490,22 @@ class TreeMetadataEmitter(LoggingMixin):
try:
if not mpmanifest.tests:
raise SandboxValidationError('Empty test manifest: %s'
% path, context)
% path, context)
defaults = mpmanifest.manifest_defaults[os.path.normpath(path)]
obj = TestManifest(context, path, mpmanifest, flavor=flavor,
install_prefix=install_prefix,
relpath=mozpath.join(manifest_reldir, mozpath.basename(path)),
sources=manifest_sources,
dupe_manifest='dupe-manifest' in defaults)
install_prefix=install_prefix,
relpath=mozpath.join(manifest_reldir, mozpath.basename(path)),
sources=manifest_sources,
dupe_manifest='dupe-manifest' in defaults)
filtered = mpmanifest.tests
missing = [t['name'] for t in filtered if not os.path.exists(t['path'])]
if missing:
raise SandboxValidationError('Test manifest (%s) lists '
'test that does not exist: %s' % (
path, ', '.join(missing)), context)
'test that does not exist: %s' % (
path, ', '.join(missing)), context)
out_dir = mozpath.join(install_prefix, manifest_reldir)
if 'install-to-subdir' in defaults:
@ -1540,10 +1528,9 @@ class TreeMetadataEmitter(LoggingMixin):
not os.path.isfile(mozpath.join(context.config.topsrcdir,
install_path[2:])),
install_path not in install_info.external_installs]):
raise SandboxValidationError(
'Error processing test '
'manifest %s: entry in support-files not present '
'in the srcdir: %s' % (path, install_path), context)
raise SandboxValidationError('Error processing test '
'manifest %s: entry in support-files not present '
'in the srcdir: %s' % (path, install_path), context)
obj.deferred_installs |= install_info.deferred_installs
@ -1554,7 +1541,7 @@ class TreeMetadataEmitter(LoggingMixin):
# test package. They function as identifiers rather than files.
if package_tests:
manifest_relpath = mozpath.relpath(test['path'],
mozpath.dirname(test['manifest']))
mozpath.dirname(test['manifest']))
obj.installs[mozpath.normpath(test['path'])] = \
((mozpath.join(out_dir, manifest_relpath)), True)
@ -1580,31 +1567,29 @@ class TreeMetadataEmitter(LoggingMixin):
try:
del obj.installs[mozpath.join(manifest_dir, f)]
except KeyError:
raise SandboxValidationError(
'Error processing test '
raise SandboxValidationError('Error processing test '
'manifest %s: entry in generated-files not present '
'elsewhere in manifest: %s' % (path, f), context)
yield obj
except (AssertionError, Exception):
raise SandboxValidationError(
'Error processing test '
raise SandboxValidationError('Error processing test '
'manifest file %s: %s' % (path,
'\n'.join(traceback.format_exception(*sys.exc_info()))),
'\n'.join(traceback.format_exception(*sys.exc_info()))),
context)
def _process_reftest_manifest(self, context, flavor, manifest_path, manifest):
manifest_full_path = manifest_path.full_path
manifest_reldir = mozpath.dirname(mozpath.relpath(manifest_full_path,
context.config.topsrcdir))
context.config.topsrcdir))
# reftest manifests don't come from manifest parser. But they are
# similar enough that we can use the same emitted objects. Note
# that we don't perform any installs for reftests.
obj = TestManifest(context, manifest_full_path, manifest,
flavor=flavor, install_prefix='%s/' % flavor,
relpath=mozpath.join(manifest_reldir,
mozpath.basename(manifest_path)))
flavor=flavor, install_prefix='%s/' % flavor,
relpath=mozpath.join(manifest_reldir,
mozpath.basename(manifest_path)))
for test, source_manifest in sorted(manifest.tests):
obj.tests.append({
@ -1623,7 +1608,7 @@ class TreeMetadataEmitter(LoggingMixin):
jar_manifests = context.get('JAR_MANIFESTS', [])
if len(jar_manifests) > 1:
raise SandboxValidationError('While JAR_MANIFESTS is a list, '
'it is currently limited to one value.', context)
'it is currently limited to one value.', context)
for path in jar_manifests:
yield JARManifest(context, path)
@ -1635,8 +1620,8 @@ class TreeMetadataEmitter(LoggingMixin):
if os.path.exists(os.path.join(context.srcdir, 'jar.mn')):
if 'jar.mn' not in jar_manifests:
raise SandboxValidationError('A jar.mn exists but it '
'is not referenced in the moz.build file. '
'Please define JAR_MANIFESTS.', context)
'is not referenced in the moz.build file. '
'Please define JAR_MANIFESTS.', context)
def _emit_directory_traversal_from_context(self, context):
o = DirectoryTraversal(context)
@ -1645,6 +1630,6 @@ class TreeMetadataEmitter(LoggingMixin):
# Some paths have a subconfigure, yet also have a moz.build. Those
# shouldn't end up in self._external_paths.
if o.objdir:
self._external_paths -= {o.relobjdir}
self._external_paths -= { o.relobjdir }
yield o

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
from __future__ import absolute_import, unicode_literals
import gyp
import gyp.msvs_emulation
@ -10,6 +10,7 @@ import sys
import os
import time
import types
import warnings
import mozpack.path as mozpath
from mozpack.files import FileFinder
@ -22,7 +23,10 @@ from .context import (
)
from mozbuild.util import (
expand_variables,
List,
memoize,
)
from .reader import SandboxValidationError
# Define this module as gyp.generator.mozbuild so that gyp can use it
# as a generator under the name "mozbuild".
@ -36,7 +40,7 @@ sys.modules['gyp.generator.mozbuild'] = sys.modules[__name__]
# chrome_src for the default includes, so go backwards from the pylib
# directory, which is the parent directory of gyp module.
chrome_src = mozpath.abspath(mozpath.join(mozpath.dirname(gyp.__file__),
'../../../../..'))
'../../../../..'))
script_dir = mozpath.join(chrome_src, 'build')
@ -70,11 +74,10 @@ class GypContext(TemplateContext):
relobjdir is the object directory that will be used for this context,
relative to the topobjdir defined in the ConfigEnvironment.
"""
def __init__(self, config, relobjdir):
self._relobjdir = relobjdir
TemplateContext.__init__(self, template='Gyp',
allowed_variables=VARIABLES, config=config)
allowed_variables=VARIABLES, config=config)
def handle_actions(actions, context, action_overrides):
@ -85,15 +88,10 @@ def handle_actions(actions, context, action_overrides):
raise RuntimeError('GYP action %s not listed in action_overrides' % name)
outputs = action['outputs']
if len(outputs) > 1:
raise NotImplementedError(
'GYP actions with more than one output not supported: %s' % name)
raise NotImplementedError('GYP actions with more than one output not supported: %s' % name)
output = outputs[0]
if not output.startswith(idir):
raise NotImplementedError(
'GYP actions outputting to somewhere other than '
'<(INTERMEDIATE_DIR) not supported: %s'
% output
)
raise NotImplementedError('GYP actions outputting to somewhere other than <(INTERMEDIATE_DIR) not supported: %s' % output)
output = output[len(idir):]
context['GENERATED_FILES'] += [output]
g = context['GENERATED_FILES'][output]
@ -106,8 +104,7 @@ def handle_copies(copies, context):
for copy in copies:
dest = copy['destination']
if not dest.startswith(dist):
raise NotImplementedError(
'GYP copies to somewhere other than <(PRODUCT_DIR)/dist not supported: %s' % dest)
raise NotImplementedError('GYP copies to somewhere other than <(PRODUCT_DIR)/dist not supported: %s' % dest)
dest_paths = dest[len(dist):].split('/')
exports = context['EXPORTS']
while dest_paths:
@ -133,8 +130,8 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
# directory. Since several targets can be in a given build_file,
# separate them in subdirectories using the build_file basename
# and the target_name.
reldir = mozpath.relpath(mozpath.dirname(build_file),
mozpath.dirname(path))
reldir = mozpath.relpath(mozpath.dirname(build_file),
mozpath.dirname(path))
subdir = '%s_%s' % (
mozpath.splitext(mozpath.basename(build_file))[0],
target_name,
@ -158,13 +155,12 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
target_conf = spec['configurations'][c]
if 'actions' in spec:
handle_actions(spec['actions'], context, action_overrides)
handle_actions(spec['actions'], context, action_overrides)
if 'copies' in spec:
handle_copies(spec['copies'], context)
handle_copies(spec['copies'], context)
use_libs = []
libs = []
def add_deps(s):
for t in s.get('dependencies', []) + s.get('dependencies_original', []):
ty = targets[t]['type']
@ -175,7 +171,7 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
if ty in ('static_library', 'none'):
add_deps(targets[t])
libs.extend(spec.get('libraries', []))
# XXX: this sucks, but webrtc breaks with this right now because
#XXX: this sucks, but webrtc breaks with this right now because
# it builds a library called 'gtest' and we just get lucky
# that it isn't in USE_LIBS by that name anywhere.
if no_chromium:
@ -183,20 +179,20 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
os_libs = []
for l in libs:
if l.startswith('-'):
os_libs.append(l)
elif l.endswith('.lib'):
os_libs.append(l[:-4])
elif l:
# For library names passed in from moz.build.
use_libs.append(os.path.basename(l))
if l.startswith('-'):
os_libs.append(l)
elif l.endswith('.lib'):
os_libs.append(l[:-4])
elif l:
# For library names passed in from moz.build.
use_libs.append(os.path.basename(l))
if spec['type'] == 'none':
if not ('actions' in spec or 'copies' in spec):
continue
if not ('actions' in spec or 'copies' in spec):
continue
elif spec['type'] in ('static_library', 'shared_library', 'executable'):
# Remove leading 'lib' from the target_name if any, and use as
# library name.
# Remove leading 'lib' from the target_name if any, and use as
# library name.
name = spec['target_name']
if spec['type'] in ('static_library', 'shared_library'):
if name.startswith('lib'):
@ -207,8 +203,7 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
context['PROGRAM'] = name.decode('utf-8')
if spec['type'] == 'shared_library':
context['FORCE_SHARED_LIB'] = True
elif spec['type'] == 'static_library' and \
spec.get('variables', {}).get('no_expand_libs', '0') == '1':
elif spec['type'] == 'static_library' and spec.get('variables', {}).get('no_expand_libs', '0') == '1':
# PSM links a NSS static library, but our folded libnss
# doesn't actually export everything that all of the
# objects within would need, so that one library
@ -227,9 +222,9 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
ext = mozpath.splitext(f)[-1]
extensions.add(ext)
if f.startswith('$INTERMEDIATE_DIR/'):
s = ObjDirPath(context, f.replace('$INTERMEDIATE_DIR/', '!'))
s = ObjDirPath(context, f.replace('$INTERMEDIATE_DIR/', '!'))
else:
s = SourcePath(context, f)
s = SourcePath(context, f)
if ext == '.h':
continue
if ext == '.def':
@ -256,8 +251,7 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
name, value = define.split('=', 1)
# The NSS gyp file doesn't expose a way to override this
# currently, so we do so here.
if name == 'NSS_ALLOW_SSLKEYLOGFILE' and \
config.substs.get('RELEASE_OR_BETA', False):
if name == 'NSS_ALLOW_SSLKEYLOGFILE' and config.substs.get('RELEASE_OR_BETA', False):
continue
context['DEFINES'][name] = value
else:
@ -288,8 +282,7 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
if include.startswith('/'):
resolved = mozpath.abspath(mozpath.join(config.topsrcdir, include[1:]))
elif not include.startswith(('!', '%')):
resolved = mozpath.abspath(mozpath.join(
mozpath.dirname(build_file), include))
resolved = mozpath.abspath(mozpath.join(mozpath.dirname(build_file), include))
if not include.startswith(('!', '%')) and not os.path.exists(resolved):
continue
context['LOCAL_INCLUDES'] += [include]
@ -332,17 +325,17 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
raise NotImplementedError('Unsupported gyp target type: %s' % spec['type'])
if not no_chromium:
# Add some features to all contexts. Put here in case LOCAL_INCLUDES
# order matters.
context['LOCAL_INCLUDES'] += [
'!/ipc/ipdl/_ipdlheaders',
'/ipc/chromium/src',
'/ipc/glue',
]
# These get set via VC project file settings for normal GYP builds.
if config.substs['OS_TARGET'] == 'WINNT':
context['DEFINES']['UNICODE'] = True
context['DEFINES']['_UNICODE'] = True
# Add some features to all contexts. Put here in case LOCAL_INCLUDES
# order matters.
context['LOCAL_INCLUDES'] += [
'!/ipc/ipdl/_ipdlheaders',
'/ipc/chromium/src',
'/ipc/glue',
]
# These get set via VC project file settings for normal GYP builds.
if config.substs['OS_TARGET'] == 'WINNT':
context['DEFINES']['UNICODE'] = True
context['DEFINES']['_UNICODE'] = True
context['COMPILE_FLAGS']['OS_INCLUDES'] = []
for key, value in gyp_dir_attrs.sandbox_vars.items():
@ -375,7 +368,6 @@ class GypProcessor(object):
gyp dependencies will be. gyp_dir_attrs are attributes set for the dir
from moz.build.
"""
def __init__(self, config, gyp_dir_attrs, path, output, executor,
action_overrides, non_unified_sources):
self._path = path

Просмотреть файл

@ -21,7 +21,6 @@ import mozpack.path as mozpath
TOPSRCDIR = os.path.abspath(os.path.join(__file__, '../../../../../'))
class InvalidPathException(Exception):
"""Represents an error due to an invalid path."""
@ -29,11 +28,11 @@ class InvalidPathException(Exception):
@CommandProvider
class MozbuildFileCommands(MachCommandBase):
@Command('mozbuild-reference', category='build-dev',
description='View reference documentation on mozbuild files.')
description='View reference documentation on mozbuild files.')
@CommandArgument('symbol', default=None, nargs='*',
help='Symbol to view help on. If not specified, all will be shown.')
help='Symbol to view help on. If not specified, all will be shown.')
@CommandArgument('--name-only', '-n', default=False, action='store_true',
help='Print symbol names only.')
help='Print symbol names only.')
def reference(self, symbol, name_only=False):
# mozbuild.sphinx imports some Sphinx modules, so we need to be sure
# the optional Sphinx package is installed.
@ -285,6 +284,7 @@ class MozbuildFileCommands(MachCommandBase):
print(e.message)
return 1
def _get_files_info(self, paths, rev=None):
reader = self.mozbuild_reader(config_mode='empty', vcs_revision=rev)
@ -328,6 +328,7 @@ class MozbuildFileCommands(MachCommandBase):
return reader.files_info(allpaths)
@SubCommand('file-info', 'schedules',
'Show the combined SCHEDULES for the files listed.')
@CommandArgument('paths', nargs='+',

Просмотреть файл

@ -80,6 +80,7 @@ from mozbuild.base import ExecutionSummary
from concurrent.futures.process import ProcessPoolExecutor
if sys.version_info.major == 2:
text_type = unicode
type_type = types.TypeType
@ -105,7 +106,6 @@ class EmptyConfig(object):
This variation is needed because CONFIG uses .get() to access members.
Without it, None (instead of our EmptyValue types) would be returned.
"""
def get(self, key, default=None):
return self[key]
@ -182,7 +182,6 @@ class MozbuildSandbox(Sandbox):
metadata is a dict of metadata that can be used during the sandbox
evaluation.
"""
def __init__(self, context, metadata={}, finder=default_finder):
assert isinstance(context, Context)
@ -242,7 +241,7 @@ class MozbuildSandbox(Sandbox):
# protection, so it is omitted.
if not is_read_allowed(path, self._context.config):
raise SandboxLoadError(self._context.source_stack,
sys.exc_info()[2], illegal_path=path)
sys.exc_info()[2], illegal_path=path)
Sandbox.exec_file(self, path)
@ -291,14 +290,14 @@ class MozbuildSandbox(Sandbox):
if not inspect.isfunction(func):
raise Exception('`template` is a function decorator. You must '
'use it as `@template` preceding a function declaration.')
'use it as `@template` preceding a function declaration.')
name = func.func_name
if name in self.templates:
raise KeyError(
'A template named "%s" was already declared in %s.' % (name,
self.templates[name].path))
self.templates[name].path))
if name.islower() or name.isupper() or name[0].islower():
raise NameError('Template function names must be CamelCase.')
@ -321,7 +320,6 @@ class MozbuildSandbox(Sandbox):
The wrapper function does type coercion on the function arguments
"""
func, args_def, doc = function_def
def function(*args):
def coerce(arg, type):
if not isinstance(arg, type):
@ -418,7 +416,7 @@ class TemplateFunction(object):
# actually never calls __getitem__ and __setitem__, so we need to
# modify the AST so that accesses to globals are properly directed
# to a dict.
self._global_name = b'_data' # AST wants str for this, not unicode
self._global_name = b'_data' # AST wants str for this, not unicode
# In case '_data' is a name used for a variable in the function code,
# prepend more underscores until we find an unused name.
while (self._global_name in code.co_names or
@ -465,7 +463,6 @@ class TemplateFunction(object):
"""AST Node Transformer to rewrite variable accesses to go through
a dict.
"""
def __init__(self, sandbox, global_name):
self._sandbox = sandbox
self._global_name = global_name
@ -494,7 +491,6 @@ class TemplateFunction(object):
class SandboxValidationError(Exception):
"""Represents an error encountered when validating sandbox results."""
def __init__(self, message, context):
Exception.__init__(self, message)
self.context = context
@ -536,10 +532,9 @@ class BuildReaderError(Exception):
MozbuildSandbox has over Sandbox (e.g. the concept of included files -
which affect error messages, of course).
"""
def __init__(self, file_stack, trace, sandbox_exec_error=None,
sandbox_load_error=None, validation_error=None, other_error=None,
sandbox_called_error=None):
sandbox_load_error=None, validation_error=None, other_error=None,
sandbox_called_error=None):
self.file_stack = file_stack
self.trace = trace
@ -564,7 +559,7 @@ class BuildReaderError(Exception):
return self.file_stack[-2]
if self.sandbox_error is not None and \
len(self.sandbox_error.file_stack):
len(self.sandbox_error.file_stack):
return self.sandbox_error.file_stack[-1]
return self.file_stack[-1]
@ -607,7 +602,7 @@ class BuildReaderError(Exception):
s.write('\n')
for l in traceback.format_exception(type(self.other), self.other,
self.trace):
self.trace):
s.write(unicode(l))
return s.getvalue()
@ -765,7 +760,7 @@ class BuildReaderError(Exception):
if inner.args[2] in DEPRECATION_HINTS:
s.write('%s\n' %
textwrap.dedent(DEPRECATION_HINTS[inner.args[2]]).strip())
textwrap.dedent(DEPRECATION_HINTS[inner.args[2]]).strip())
return
s.write('Please change the file to not use this variable.\n')
@ -807,7 +802,7 @@ class BuildReaderError(Exception):
s.write(' %s\n' % inner.args[4].__name__)
else:
for t in inner.args[4]:
s.write(' %s\n' % t.__name__)
s.write( ' %s\n' % t.__name__)
s.write('\n')
s.write('Change the file to write a value of the appropriate type ')
s.write('and try again.\n')
@ -1058,23 +1053,23 @@ class BuildReader(object):
except SandboxCalledError as sce:
raise BuildReaderError(list(self._execution_stack),
sys.exc_info()[2], sandbox_called_error=sce)
sys.exc_info()[2], sandbox_called_error=sce)
except SandboxExecutionError as se:
raise BuildReaderError(list(self._execution_stack),
sys.exc_info()[2], sandbox_exec_error=se)
sys.exc_info()[2], sandbox_exec_error=se)
except SandboxLoadError as sle:
raise BuildReaderError(list(self._execution_stack),
sys.exc_info()[2], sandbox_load_error=sle)
sys.exc_info()[2], sandbox_load_error=sle)
except SandboxValidationError as ve:
raise BuildReaderError(list(self._execution_stack),
sys.exc_info()[2], validation_error=ve)
sys.exc_info()[2], validation_error=ve)
except Exception as e:
raise BuildReaderError(list(self._execution_stack),
sys.exc_info()[2], other_error=e)
sys.exc_info()[2], other_error=e)
def _read_mozbuild(self, path, config, descend, metadata):
path = mozpath.normpath(path)
@ -1132,7 +1127,7 @@ class BuildReader(object):
for v in ('input', 'variables'):
if not getattr(gyp_dir, v):
raise SandboxValidationError('Missing value for '
'GYP_DIRS["%s"].%s' % (target_dir, v), context)
'GYP_DIRS["%s"].%s' % (target_dir, v), context)
# The make backend assumes contexts for sub-directories are
# emitted after their parent, so accumulate the gyp contexts.
@ -1145,7 +1140,7 @@ class BuildReader(object):
source = SourcePath(context, s)
if not self.finder.get(source.full_path):
raise SandboxValidationError('Cannot find %s.' % source,
context)
context)
non_unified_sources.add(source)
action_overrides = {}
for action, script in gyp_dir.action_overrides.iteritems():
@ -1194,7 +1189,7 @@ class BuildReader(object):
if not is_read_allowed(child_path, context.config):
raise SandboxValidationError(
'Attempting to process file outside of allowed paths: %s' %
child_path, context)
child_path, context)
if not descend:
continue
@ -1288,7 +1283,6 @@ class BuildReader(object):
# Exporting doesn't work reliably in tree traversal mode. Override
# the function to no-op.
functions = dict(FUNCTIONS)
def export(sandbox):
return lambda varname: None
functions['export'] = tuple([export] + list(FUNCTIONS['export'][1:]))
@ -1343,7 +1337,6 @@ class BuildReader(object):
# times (once for every path in a directory that doesn't have any
# test metadata). So, we cache the function call.
defaults_cache = {}
def test_defaults_for_path(ctxs):
key = tuple(ctx.current_path or ctx.main_path for ctx in ctxs)
@ -1401,8 +1394,7 @@ class BuildReader(object):
test_manifest_contexts = set(
['%s_MANIFESTS' % key for key in TEST_MANIFESTS] +
['%s_MANIFESTS' % flavor.upper() for flavor in REFTEST_FLAVORS] +
['%s_MANIFESTS' % flavor.upper().replace('-', '_')
for flavor in WEB_PLATFORM_TESTS_FLAVORS]
['%s_MANIFESTS' % flavor.upper().replace('-', '_') for flavor in WEB_PLATFORM_TESTS_FLAVORS]
)
result_context = Files(Context())

Просмотреть файл

@ -17,7 +17,7 @@ KeyError are machine parseable. This machine-friendly data is used to present
user-friendly error messages in the case of errors.
"""
from __future__ import absolute_import, print_function, unicode_literals
from __future__ import absolute_import, unicode_literals
import os
import sys
@ -53,7 +53,6 @@ class SandboxExecutionError(SandboxError):
This is a simple container exception. It's purpose is to capture state
so something else can report on it.
"""
def __init__(self, file_stack, exc_type, exc_value, trace):
SandboxError.__init__(self, file_stack)
@ -70,7 +69,6 @@ class SandboxLoadError(SandboxError):
a file. If so, the file_stack will be non-empty and the file that caused
the load will be on top of the stack.
"""
def __init__(self, file_stack, trace, illegal_path=None, read_error=None):
SandboxError.__init__(self, file_stack)
@ -155,9 +153,9 @@ class Sandbox(dict):
try:
source = self._finder.get(path).read()
except Exception:
except Exception as e:
raise SandboxLoadError(self._context.source_stack,
sys.exc_info()[2], read_error=path)
sys.exc_info()[2], read_error=path)
self.exec_source(source, path)
@ -290,7 +288,7 @@ class Sandbox(dict):
raise KeyError('global_ns', 'reassign', key)
if (key not in self._context and isinstance(value, (list, dict))
and not value):
and not value):
raise KeyError('Variable %s assigned an empty value.' % key)
self._context[key] = value

Просмотреть файл

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
import sys
from mozbuild.backend.test_manifest import TestManifestBackend

Просмотреть файл

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
import hashlib
import json
import os

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
from __future__ import print_function
from collections import defaultdict
from copy import deepcopy
@ -78,10 +78,7 @@ class MozbuildWriter(object):
self.write('\n')
self.write(self.indent + key)
self.write(' += [\n ' + self.indent)
self.write(
(',\n ' + self.indent).join(
alphabetical_sorted(self.mb_serialize(v) for v in value))
)
self.write((',\n ' + self.indent).join(alphabetical_sorted(self.mb_serialize(v) for v in value)))
self.write('\n')
self.write_ln(']')
@ -115,6 +112,7 @@ class MozbuildWriter(object):
if not wrote_ln:
self.write_ln("%s[%s] = %s" % subst_vals)
def write_condition(self, values):
def mk_condition(k, v):
if not v:
@ -277,9 +275,9 @@ def process_gn_config(gn_config, srcdir, config, output, non_unified_sources,
context_attrs['LOCAL_INCLUDES'] += [include]
context_attrs['ASFLAGS'] = spec.get('asflags_mozilla', [])
if use_defines_in_asflags and context_attrs['DEFINES']:
context_attrs['ASFLAGS'] += ['-D' + d for d in context_attrs['DEFINES']]
flags = [_f for _f in spec.get('cflags', []) if _f in mozilla_flags]
if use_defines_in_asflags and defines:
context_attrs['ASFLAGS'] += ['-D' + d for d in defines]
flags = [f for f in spec.get('cflags', []) if f in mozilla_flags]
if flags:
suffix_map = {
'.c': 'CFLAGS',
@ -434,6 +432,7 @@ def write_mozbuild(config, srcdir, output, non_unified_sources, gn_config_files,
mb.write('\n')
mb.write(generated_header)
all_attr_sets = [attrs for _, attrs in configs]
all_args = [args for args, _ in configs]
# Start with attributes that will be a part of the mozconfig

Просмотреть файл

@ -4,7 +4,7 @@
# This module contains code for running an HTTP server to view build info.
from __future__ import absolute_import, print_function, unicode_literals
from __future__ import absolute_import, unicode_literals
import BaseHTTPServer
import json

Просмотреть файл

@ -8,7 +8,7 @@ processing jar.mn files.
See the documentation for jar.mn on MDC for further details on the format.
'''
from __future__ import absolute_import, print_function
from __future__ import absolute_import
import sys
import os
@ -18,6 +18,7 @@ import logging
from time import localtime
from MozZipFile import ZipFile
from cStringIO import StringIO
from collections import defaultdict
from mozbuild.preprocessor import Preprocessor
from mozbuild.action.buildlist import addEntriesToListFile
@ -89,8 +90,7 @@ class JarInfo(object):
self.entries = []
class DeprecatedJarManifest(Exception):
pass
class DeprecatedJarManifest(Exception): pass
class JarManifestParser(object):
@ -107,10 +107,9 @@ class JarManifestParser(object):
relsrcline = re.compile('relativesrcdir\s+(?P<relativesrcdir>.+?):')
regline = re.compile('\%\s+(.*)$')
entryre = '(?P<optPreprocess>\*)?(?P<optOverwrite>\+?)\s+'
entryline = re.compile(
entryre + ('(?P<output>[\w\d.\-\_\\\/\+\@]+)\s*'
'(\((?P<locale>\%?)(?P<source>[\w\d.\-\_\\\/\@\*]+)\))?\s*$')
)
entryline = re.compile(entryre
+ '(?P<output>[\w\d.\-\_\\\/\+\@]+)\s*(\((?P<locale>\%?)(?P<source>[\w\d.\-\_\\\/\@\*]+)\))?\s*$'
)
def __init__(self):
self._current_jar = None
@ -198,7 +197,7 @@ class JarMaker(object):
'''
def __init__(self, outputFormat='flat', useJarfileManifest=True,
useChromeManifest=False):
useChromeManifest=False):
self.outputFormat = outputFormat
self.useJarfileManifest = useJarfileManifest
@ -225,10 +224,10 @@ class JarMaker(object):
p = self.pp.getCommandLineParser(unescapeDefines=True)
p.add_option('-f', type='choice', default='jar',
choices=('jar', 'flat', 'symlink'),
help='fileformat used for output',
metavar='[jar, flat, symlink]',
)
choices=('jar', 'flat', 'symlink'),
help='fileformat used for output',
metavar='[jar, flat, symlink]',
)
p.add_option('-v', action='store_true', dest='verbose',
help='verbose output')
p.add_option('-q', action='store_false', dest='verbose',
@ -239,14 +238,14 @@ class JarMaker(object):
p.add_option('-s', type='string', action='append', default=[],
help='source directory')
p.add_option('-t', type='string', help='top source directory')
p.add_option('-c', '--l10n-src', type='string',
action='append', help='localization directory')
p.add_option('-c', '--l10n-src', type='string', action='append'
, help='localization directory')
p.add_option('--l10n-base', type='string', action='store',
help='base directory to be used for localization (requires relativesrcdir)'
)
p.add_option('--locale-mergedir', type='string', action='store',
help='base directory to be used for l10n-merge '
'(requires l10n-base and relativesrcdir)'
p.add_option('--locale-mergedir', type='string', action='store'
,
help='base directory to be used for l10n-merge (requires l10n-base and relativesrcdir)'
)
p.add_option('--relativesrcdir', type='string',
help='relativesrcdir to be used for localization')
@ -293,18 +292,18 @@ class JarMaker(object):
chromeDir = \
os.path.basename(os.path.dirname(os.path.normpath(chromeManifest)))
logging.info("adding '%s' entry to root chrome manifest appid=%s"
% (chromeDir, self.rootManifestAppId))
% (chromeDir, self.rootManifestAppId))
addEntriesToListFile(rootChromeManifest,
['manifest %s/chrome.manifest application=%s'
% (chromeDir,
self.rootManifestAppId)])
self.rootManifestAppId)])
def updateManifest(self, manifestPath, chromebasepath, register):
'''updateManifest replaces the % in the chrome registration entries
with the given chrome base path, and updates the given manifest file.
'''
myregister = dict.fromkeys(map(lambda s: s.replace('%',
chromebasepath), register))
chromebasepath), register))
addEntriesToListFile(manifestPath, myregister.iterkeys())
def makeJar(self, infile, jardir):
@ -315,7 +314,7 @@ class JarMaker(object):
'''
# making paths absolute, guess srcdir if file and add to sourcedirs
def _normpath(p): return os.path.normpath(os.path.abspath(p))
_normpath = lambda p: os.path.normpath(os.path.abspath(p))
self.topsourcedir = _normpath(self.topsourcedir)
self.sourcedirs = [_normpath(p) for p in self.sourcedirs]
if self.localedirs:
@ -349,7 +348,7 @@ class JarMaker(object):
if self.l10nmerge or not self.l10nbase:
# add en-US if we merge, or if it's not l10n
locdirs.append(os.path.join(self.topsourcedir,
relativesrcdir, 'en-US'))
relativesrcdir, 'en-US'))
return locdirs
def processJarSection(self, jarinfo, jardir):
@ -458,7 +457,7 @@ class JarMaker(object):
if jf is not None:
jf.close()
raise RuntimeError('File "{0}" not found in {1}'.format(src,
', '.join(src_base)))
', '.join(src_base)))
if out in self._seen_output:
raise RuntimeError('%s already added' % out)
@ -502,7 +501,7 @@ class JarMaker(object):
try:
info = self.jarfile.getinfo(aPath)
return info.date_time
except Exception:
except:
return 0
def getOutput(self, name):
@ -586,7 +585,7 @@ def main(args=None):
jm.l10nmerge = options.locale_mergedir
if jm.l10nmerge and not os.path.isdir(jm.l10nmerge):
logging.warning("WARNING: --locale-mergedir passed, but '%s' does not exist. "
"Ignore this message if the locale is complete." % jm.l10nmerge)
"Ignore this message if the locale is complete." % jm.l10nmerge)
elif options.locale_mergedir:
p.error('l10n-base required when using locale-mergedir')
jm.localedirs = options.l10n_src

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
from __future__ import absolute_import
import os
import re
@ -62,7 +62,6 @@ class _SimpleOrderedSet(object):
It doesn't expose a complete API, and normalizes path separators
at insertion.
'''
def __init__(self):
self._list = []
self._set = set()
@ -96,7 +95,6 @@ class Rule(object):
command2
...
'''
def __init__(self, targets=[]):
self._targets = _SimpleOrderedSet()
self._dependencies = _SimpleOrderedSet()
@ -129,7 +127,7 @@ class Rule(object):
def dependencies(self):
'''Return an iterator on the rule dependencies.'''
return iter(d for d in self._dependencies if d not in self._targets)
return iter(d for d in self._dependencies if not d in self._targets)
def commands(self):
'''Return an iterator on the rule commands.'''
@ -177,7 +175,6 @@ def read_dep_makefile(fh):
if rule:
raise Exception('Makefile finishes with a backslash. Expected more input.')
def write_dep_makefile(fh, target, deps):
'''
Write a Makefile containing only target's dependencies to the file handle

Просмотреть файл

@ -306,7 +306,6 @@ def _schema_1_additional(filename, manifest, require_license_file=True):
class License(object):
"""Voluptuous validator which verifies the license(s) are valid as per our
whitelist."""
def __call__(self, values):
if isinstance(values, str):
values = [values]

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
from __future__ import absolute_import, unicode_literals
import filecmp
import os
@ -11,6 +11,7 @@ import sys
import subprocess
import traceback
from collections import defaultdict
from mozpack import path as mozpath
@ -65,7 +66,7 @@ class MozconfigLoader(object):
\s* [?:]?= \s* # Assignment operator surrounded by optional
# spaces
(?P<value>.*$)''', # Everything else (likely the value)
re.VERBOSE)
re.VERBOSE)
# Default mozconfig files in the topsrcdir.
DEFAULT_TOPSRCDIR_PATHS = ('.mozconfig', 'mozconfig')
@ -144,7 +145,7 @@ class MozconfigLoader(object):
'does not exist in any of ' + ', '.join(potential_roots))
env_path = os.path.join(existing[0], env_path)
elif not os.path.exists(env_path): # non-relative path
elif not os.path.exists(env_path): # non-relative path
raise MozconfigFindException(
'MOZCONFIG environment variable refers to a path that '
'does not exist: ' + env_path)
@ -155,12 +156,12 @@ class MozconfigLoader(object):
'non-file: ' + env_path)
srcdir_paths = [os.path.join(self.topsrcdir, p) for p in
self.DEFAULT_TOPSRCDIR_PATHS]
self.DEFAULT_TOPSRCDIR_PATHS]
existing = [p for p in srcdir_paths if os.path.isfile(p)]
if env_path is None and len(existing) > 1:
raise MozconfigFindException('Multiple default mozconfig files '
'present. Remove all but one. ' + ', '.join(existing))
'present. Remove all but one. ' + ', '.join(existing))
path = None
@ -174,12 +175,12 @@ class MozconfigLoader(object):
return os.path.abspath(path)
deprecated_paths = [os.path.join(self.topsrcdir, s) for s in
self.DEPRECATED_TOPSRCDIR_PATHS]
self.DEPRECATED_TOPSRCDIR_PATHS]
home = env.get('HOME', None)
if home is not None:
deprecated_paths.extend([os.path.join(home, s) for s in
self.DEPRECATED_HOME_PATHS])
self.DEPRECATED_HOME_PATHS])
for path in deprecated_paths:
if os.path.exists(path):
@ -242,7 +243,7 @@ class MozconfigLoader(object):
# We need to capture stderr because that's where the shell sends
# errors if execution fails.
output = subprocess.check_output(command, stderr=subprocess.STDOUT,
cwd=self.topsrcdir, env=env)
cwd=self.topsrcdir, env=env)
except subprocess.CalledProcessError as e:
lines = e.output.splitlines()
@ -305,7 +306,7 @@ class MozconfigLoader(object):
# Environment variables also appear as shell variables, but that's
# uninteresting duplication of information. Filter them out.
def filt(x, y): return {k: v for k, v in x.items() if k not in y}
filt = lambda x, y: {k: v for k, v in x.items() if k not in y}
result['vars'] = diff_vars(
filt(parsed['vars_before'], parsed['env_before']),
filt(parsed['vars_after'], parsed['env_after'])

Просмотреть файл

@ -5,7 +5,7 @@
# This module produces a JSON file that provides basic build info and
# configuration metadata.
from __future__ import absolute_import, print_function
from __future__ import absolute_import
import os
import re
@ -58,7 +58,7 @@ def build_dict(config, env=os.environ):
# processor
p = substs["TARGET_CPU"]
# do some slight massaging for some values
# TODO: retain specific values in case someone wants them?
#TODO: retain specific values in case someone wants them?
if p.startswith("arm"):
p = "arm"
elif re.match("i[3-9]86", p):
@ -130,7 +130,7 @@ def build_dict(config, env=os.environ):
d['platform_guess'] = guess_platform()
d['buildtype_guess'] = guess_buildtype()
if d.get('buildapp', '') == 'mobile/android' and 'MOZ_ANDROID_MIN_SDK_VERSION' in substs:
if 'buildapp' in d and d['buildapp'] == 'mobile/android' and 'MOZ_ANDROID_MIN_SDK_VERSION' in substs:
d['android_min_sdk'] = substs['MOZ_ANDROID_MIN_SDK_VERSION']
return d

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
from __future__ import absolute_import
import os
import subprocess

Просмотреть файл

@ -22,14 +22,12 @@ value :
| \w+ # string identifier or value;
"""
from __future__ import absolute_import, print_function
import sys
import os
import re
from optparse import OptionParser
import errno
from mozbuild.makeutil import Makefile
from makeutil import Makefile
# hack around win32 mangling our line endings
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/65443
@ -200,7 +198,6 @@ class Expression:
rv = not rv
return rv
# Helper function to evaluate __get_logical_and and __get_logical_or results
def eval_logical_op(tok):
left = opmap[tok[0].type](tok[0])
right = opmap[tok[2].type](tok[2])
@ -220,13 +217,12 @@ class Expression:
'defined': lambda tok: tok.value in context,
'int': lambda tok: tok.value}
return opmap[self.e.type](self.e)
return opmap[self.e.type](self.e);
class __AST(list):
"""
Internal class implementing Abstract Syntax Tree nodes
"""
def __init__(self, type):
self.type = type
super(self.__class__, self).__init__(self)
@ -235,14 +231,11 @@ class Expression:
"""
Internal class implementing Abstract Syntax Tree leafs
"""
def __init__(self, type, value):
self.value = value
self.type = type
def __str__(self):
return self.value.__str__()
def __repr__(self):
return self.value.__repr__()
@ -252,16 +245,13 @@ class Expression:
It has two members, offset and content, which give the offset of the
error and the offending content.
"""
def __init__(self, expression):
self.offset = expression.offset
self.content = expression.content[:3]
def __str__(self):
return 'Unexpected content at offset {0}, "{1}"'.format(self.offset,
self.content)
class Context(dict):
"""
This class holds variable values by subclassing dict, and while it
@ -276,7 +266,6 @@ class Context(dict):
to reflect the ambiguity between string literals and preprocessor
variables.
"""
def __getitem__(self, key):
if key in self:
return super(self.__class__, self).__getitem__(key)
@ -296,9 +285,9 @@ class Preprocessor:
def __init__(self, defines=None, marker='#'):
self.context = Context()
for k, v in {'FILE': '',
'LINE': 0,
'DIRECTORY': os.path.abspath('.')}.iteritems():
for k,v in {'FILE': '',
'LINE': 0,
'DIRECTORY': os.path.abspath('.')}.iteritems():
self.context[k] = v
try:
# Can import globally because of bootstrapping issues.
@ -352,8 +341,7 @@ class Preprocessor:
elif self.actionLevel == 1:
msg = 'no useful preprocessor directives found'
if msg:
class Fake(object):
pass
class Fake(object): pass
fake = Fake()
fake.context = {
'FILE': file,
@ -466,7 +454,7 @@ class Preprocessor:
self.actionLevel = 2
self.out.write(filteredLine)
def handleCommandLine(self, args, defaultToStdin=False):
def handleCommandLine(self, args, defaultToStdin = False):
"""
Parse a commandline into this parser.
Uses OptionParser internally, no args mean sys.argv[1:].
@ -497,6 +485,11 @@ class Preprocessor:
if not options.output:
raise Preprocessor.Error(self, "--depend doesn't work with stdout",
None)
try:
from makeutil import Makefile
except:
raise Preprocessor.Error(self, "--depend requires the "
"mozbuild.makeutil module", None)
depfile = get_output_file(options.depend)
if args:
@ -512,10 +505,9 @@ class Preprocessor:
if options.output:
out.close()
def getCommandLineParser(self, unescapeDefines=False):
def getCommandLineParser(self, unescapeDefines = False):
escapedValue = re.compile('".*"$')
numberValue = re.compile('\d+$')
def handleD(option, opt, value, parser):
vals = value.split('=', 1)
if len(vals) == 1:
@ -526,16 +518,12 @@ class Preprocessor:
elif numberValue.match(vals[1]):
vals[1] = int(vals[1])
self.context[vals[0]] = vals[1]
def handleU(option, opt, value, parser):
del self.context[value]
def handleF(option, opt, value, parser):
self.do_filter(value)
def handleMarker(option, opt, value, parser):
self.setMarker(value)
def handleSilenceDirectiveWarnings(option, opt, value, parse):
self.setSilenceDirectiveWarnings(True)
p = OptionParser()
@ -546,7 +534,7 @@ class Preprocessor:
p.add_option('-F', action='callback', callback=handleF, type="string",
metavar="FILTER", help='Enable the specified filter')
p.add_option('-o', '--output', type="string", default=None,
metavar="FILENAME", help='Output to the specified file ' +
metavar="FILENAME", help='Output to the specified file '+
'instead of stdout')
p.add_option('--depend', type="string", default=None, metavar="FILENAME",
help='Generate dependencies in the given file')
@ -595,10 +583,9 @@ class Preprocessor:
val = self.applyFilters(m.group('value'))
try:
val = int(val)
except Exception:
except:
pass
self.context[m.group('name')] = val
def do_undef(self, args):
m = re.match('(?P<name>\w+)$', args, re.U)
if not m:
@ -606,11 +593,9 @@ class Preprocessor:
if args in self.context:
del self.context[args]
# Logic
def ensure_not_else(self):
if len(self.ifStates) == 0 or self.ifStates[-1] == 2:
sys.stderr.write('WARNING: bad nesting of #else in %s\n' % self.context['FILE'])
def do_if(self, args, replace=False):
if self.disableLevel and not replace:
self.disableLevel += 1
@ -634,7 +619,6 @@ class Preprocessor:
else:
self.ifStates.append(self.disableLevel)
pass
def do_ifdef(self, args, replace=False):
if self.disableLevel and not replace:
self.disableLevel += 1
@ -650,7 +634,6 @@ class Preprocessor:
else:
self.ifStates.append(self.disableLevel)
pass
def do_ifndef(self, args, replace=False):
if self.disableLevel and not replace:
self.disableLevel += 1
@ -666,59 +649,51 @@ class Preprocessor:
else:
self.ifStates.append(self.disableLevel)
pass
def do_else(self, args, ifState=2):
def do_else(self, args, ifState = 2):
self.ensure_not_else()
hadTrue = self.ifStates[-1] == 0
self.ifStates[-1] = ifState # in-else
self.ifStates[-1] = ifState # in-else
if hadTrue:
self.disableLevel = 1
return
self.disableLevel = 0
def do_elif(self, args):
if self.disableLevel == 1:
if self.ifStates[-1] == 1:
self.do_if(args, replace=True)
else:
self.do_else(None, self.ifStates[-1])
def do_elifdef(self, args):
if self.disableLevel == 1:
if self.ifStates[-1] == 1:
self.do_ifdef(args, replace=True)
else:
self.do_else(None, self.ifStates[-1])
def do_elifndef(self, args):
if self.disableLevel == 1:
if self.ifStates[-1] == 1:
self.do_ifndef(args, replace=True)
else:
self.do_else(None, self.ifStates[-1])
def do_endif(self, args):
if self.disableLevel > 0:
self.disableLevel -= 1
if self.disableLevel == 0:
self.ifStates.pop()
# output processing
def do_expand(self, args):
lst = re.split('__(\w+)__', args, re.U)
do_replace = False
def vsubst(v):
if v in self.context:
return str(self.context[v])
return ''
for i in range(1, len(lst), 2):
lst[i] = vsubst(lst[i])
lst.append('\n') # add back the newline
lst.append('\n') # add back the newline
self.write(reduce(lambda x, y: x+y, lst, ''))
def do_literal(self, args):
self.write(args + '\n')
def do_filter(self, args):
filters = [f for f in args.split(' ') if hasattr(self, 'filter_' + f)]
if len(filters) == 0:
@ -730,7 +705,6 @@ class Preprocessor:
filterNames.sort()
self.filters = [(fn, current[fn]) for fn in filterNames]
return
def do_unfilter(self, args):
filters = args.split(' ')
current = dict(self.filters)
@ -745,14 +719,12 @@ class Preprocessor:
#
# emptyLines
# Strips blank lines from the output.
def filter_emptyLines(self, aLine):
if aLine == '\n':
return ''
return aLine
# slashslash
# Strips everything after //
def filter_slashslash(self, aLine):
if (aLine.find('//') == -1):
return aLine
@ -762,12 +734,10 @@ class Preprocessor:
return aLine
# spaces
# Collapses sequences of spaces into a single space
def filter_spaces(self, aLine):
return re.sub(' +', ' ', aLine).strip(' ')
# substition
# helper to be used by both substition and attemptSubstitution
def filter_substitution(self, aLine, fatal=True):
def repl(matchobj):
varname = matchobj.group('VAR')
@ -777,11 +747,9 @@ class Preprocessor:
raise Preprocessor.Error(self, 'UNDEFINED_VAR', varname)
return matchobj.group(0)
return self.varsubst.sub(repl, aLine)
def filter_attemptSubstitution(self, aLine):
return self.filter_substitution(aLine, fatal=False)
# File ops
def do_include(self, args, filters=True):
"""
Preprocess a given file.
@ -801,7 +769,7 @@ class Preprocessor:
args = open(args, 'rU')
except Preprocessor.Error:
raise
except Exception:
except:
raise Preprocessor.Error(self, 'FILE_NOT_FOUND', str(args))
self.checkLineNumbers = bool(re.search('\.(js|jsm|java|webidl)(?:\.in)?$', args.name))
oldFile = self.context['FILE']
@ -838,17 +806,15 @@ class Preprocessor:
self.context['LINE'] = oldLine
self.context['DIRECTORY'] = oldDir
self.curdir = oldCurdir
def do_includesubst(self, args):
args = self.filter_substitution(args)
self.do_include(args)
def do_error(self, args):
raise Preprocessor.Error(self, 'Error: ', str(args))
def preprocess(includes=[sys.stdin], defines={},
output=sys.stdout,
output = sys.stdout,
marker='#'):
pp = Preprocessor(defines=defines,
marker=marker)

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
from __future__ import absolute_import
import os
import subprocess

Просмотреть файл

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
import os
import ConfigParser
import mozpack.path as mozpath

Просмотреть файл

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
import errno
import os
import tempfile
@ -11,8 +9,7 @@ import tarfile
import shutil
import mozpack.path as mozpath
from mozpack.dmg import create_dmg
from mozbuild.repackaging.application_ini import get_application_ini_value
from application_ini import get_application_ini_value
def repackage_dmg(infile, output):

Просмотреть файл

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
import os
import tempfile
import shutil
@ -13,8 +11,7 @@ from mozbuild.action.exe_7z_archive import archive_exe
from mozbuild.util import ensureParentDir
def repackage_installer(topsrcdir, tag, setupexe, package, output,
package_name, sfx_stub, use_upx):
def repackage_installer(topsrcdir, tag, setupexe, package, output, package_name, sfx_stub, use_upx):
if package and not zipfile.is_zipfile(package):
raise Exception("Package file %s is not a valid .zip file." % package)
if package is not None and package_name is None:

Просмотреть файл

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
import os
import sys
import tempfile
@ -12,7 +10,7 @@ import zipfile
import tarfile
import subprocess
import mozpack.path as mozpath
from mozbuild.repackaging.application_ini import get_application_ini_value
from application_ini import get_application_ini_value
from mozbuild.util import ensureParentDir

Просмотреть файл

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
import os
import tempfile
import shutil
@ -18,17 +16,16 @@ _MSI_ARCH = {
'x86_64': 'x64',
}
def update_wsx(wfile, pvalues):
parsed = minidom.parse(wfile)
# construct a dictinary for the pre-processing options
# iterate over that list and add them to the wsx xml doc
for k, v in pvalues.items():
entry = parsed.createProcessingInstruction('define', k + ' = "' + v + '"')
root = parsed.firstChild
parsed.insertBefore(entry, root)
for k,v in pvalues.items():
entry = parsed.createProcessingInstruction('define', k + ' = "' + v + '"')
root = parsed.firstChild
parsed.insertBefore(entry, root)
# write out xml to new wfile
new_w_file = wfile + ".new"
fh = open(new_w_file, "wb")
@ -59,7 +56,7 @@ def repackage_msi(topsrcdir, wsx, version, locale, arch, setupexe, candle, light
raise Exception("%s does not exist." % light)
embeddedVersion = '0.0.0.0'
# Version string cannot contain 'a' or 'b' when embedding in msi manifest.
if 'a' not in version and 'b' not in version:
if not 'a' in version and not 'b' in version:
if version.endswith('esr'):
parts = version[:-3].split('.')
else:
@ -79,8 +76,8 @@ def repackage_msi(topsrcdir, wsx, version, locale, arch, setupexe, candle, light
try:
wsx_file = os.path.split(wsx)[1]
shutil.copy(wsx, tmpdir)
temp_wsx_file = os.path.join(tmpdir, wsx_file)
temp_wsx_file = mozpath.realpath(temp_wsx_file)
temp_wsx_file = os.path.join(tmpdir, wsx_file)
temp_wsx_file = mozpath.realpath(temp_wsx_file)
pre_values = {'Vendor': 'Mozilla',
'BrandFullName': 'Mozilla Firefox',
'Version': version,
@ -103,7 +100,7 @@ def repackage_msi(topsrcdir, wsx, version, locale, arch, setupexe, candle, light
'-out', wix_installer, wix_object_file]
subprocess.check_call(light_cmd, env=env)
os.remove(wix_object_file)
# mv file to output dir
#mv file to output dir
shutil.move(wix_installer, output)
finally:
os.chdir(old_cwd)

Просмотреть файл

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
import re
@ -24,7 +22,6 @@ def _tokens2re(**tokens):
# backslash, captured in the "escape" match group.
return re.compile('(?:%s|%s)' % (nonescaped, r'(?P<escape>\\\\)'))
UNQUOTED_TOKENS_RE = _tokens2re(
whitespace=r'[\t\r\n ]+',
quote=r'[\'"]',
@ -57,7 +54,6 @@ class _ClineSplitter(object):
Parses a given command line string and creates a list of command
and arguments, with wildcard expansion.
'''
def __init__(self, cline):
self.arg = None
self.cline = cline

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
from __future__ import absolute_import
import importlib
import os
@ -187,9 +187,9 @@ def setup(app):
# properly. We leverage the in-tree virtualenv for this.
topsrcdir = manager.topsrcdir
ve = VirtualenvManager(topsrcdir,
os.path.join(topsrcdir, 'dummy-objdir'),
os.path.join(app.outdir, '_venv'),
sys.stderr,
os.path.join(topsrcdir, 'build', 'virtualenv_packages.txt'))
os.path.join(topsrcdir, 'dummy-objdir'),
os.path.join(app.outdir, '_venv'),
sys.stderr,
os.path.join(topsrcdir, 'build', 'virtualenv_packages.txt'))
ve.ensure()
ve.activate()

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше