зеркало из https://github.com/mozilla/gecko-dev.git
Backed out 6 changesets (bug 1542963) for causing Bug 1552400. a=backout
Backed out changeset 70fbe1a158ae (bug 1542963) Backed out changeset a1a84e0feabb (bug 1542963) Backed out changeset 14aa1bd254a4 (bug 1542963) Backed out changeset 11a714f491d5 (bug 1542963) Backed out changeset 2715bac40d2f (bug 1542963) Backed out changeset 8f69c7eeb6fd (bug 1542963) --HG-- extra : histedit_source : 5903adc5313d6af4fdafb40ae55aaa79856f3726
This commit is contained in:
Родитель
1aace747e3
Коммит
65e97bc31e
4
.flake8
4
.flake8
|
@ -25,6 +25,7 @@ exclude =
|
|||
python/devtools/migrate-l10n/migrate/main.py,
|
||||
python/l10n/fluent_migrations,
|
||||
python/mozbuild/dumbmake,
|
||||
python/mozbuild/mozbuild,
|
||||
servo/components/style,
|
||||
testing/jsshell/benchmark.py,
|
||||
testing/marionette/mach_commands.py,
|
||||
|
@ -68,7 +69,6 @@ exclude =
|
|||
memory/moz.configure,
|
||||
mobile/android/*.configure,
|
||||
node_modules,
|
||||
python/mozbuild/mozbuild/test/configure/data,
|
||||
security/nss/,
|
||||
testing/marionette/harness/marionette_harness/runner/mixins,
|
||||
testing/marionette/harness/marionette_harness/tests,
|
||||
|
@ -92,8 +92,6 @@ ignore =
|
|||
|
||||
per-file-ignores =
|
||||
ipc/ipdl/*: F403, F405
|
||||
# cpp_eclipse has a lot of multi-line embedded XML which exceeds line length
|
||||
python/mozbuild/mozbuild/backend/cpp_eclipse.py: E501
|
||||
testing/firefox-ui/**/__init__.py: F401
|
||||
testing/marionette/**/__init__.py: F401
|
||||
testing/mozharness/configs/*: E124, E127, E128, E131, E231, E261, E265, E266, E501, W391
|
||||
|
|
|
@ -17,7 +17,6 @@ from mozbuild.util import (
|
|||
lock_file,
|
||||
)
|
||||
|
||||
|
||||
def addEntriesToListFile(listFile, entries):
|
||||
"""Given a file |listFile| containing one entry per line,
|
||||
add each entry in |entries| to the file, unless it is already
|
||||
|
@ -37,7 +36,7 @@ def addEntriesToListFile(listFile, entries):
|
|||
with open(listFile, 'wb') as f:
|
||||
f.write("\n".join(sorted(existing))+"\n")
|
||||
finally:
|
||||
del lock # Explicitly release the lock_file to free it
|
||||
lock = None
|
||||
|
||||
|
||||
def main(args):
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
import os
|
||||
|
@ -287,7 +287,6 @@ def check_networking(binary):
|
|||
print('TEST-PASS | check_networking | {}'.format(basename))
|
||||
return retcode
|
||||
|
||||
|
||||
def checks(target, binary):
|
||||
# The clang-plugin is built as target but is really a host binary.
|
||||
# Cheat and pretend we were passed the right argument.
|
||||
|
|
|
@ -4,8 +4,7 @@
|
|||
|
||||
# This action is used to generate the wpt manifest
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import buildconfig
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
# We invoke a Python program to dump our environment in order to get
|
||||
# native paths printed on Windows so that these paths can be incorporated
|
||||
# into Python configure's environment.
|
||||
|
|
|
@ -11,7 +11,6 @@ import shutil
|
|||
import sys
|
||||
import os
|
||||
|
||||
|
||||
def dump_symbols(target, tracking_file, count_ctors=False):
|
||||
# Our tracking file, if present, will contain path(s) to the previously generated
|
||||
# symbols. Remove them in this case so we don't simply accumulate old symbols
|
||||
|
@ -59,8 +58,7 @@ def dump_symbols(target, tracking_file, count_ctors=False):
|
|||
if objcopy:
|
||||
os.environ['OBJCOPY'] = objcopy
|
||||
|
||||
args = ([buildconfig.substs['PYTHON'],
|
||||
os.path.join(buildconfig.topsrcdir, 'toolkit',
|
||||
args = ([buildconfig.substs['PYTHON'], os.path.join(buildconfig.topsrcdir, 'toolkit',
|
||||
'crashreporter', 'tools', 'symbolstore.py')] +
|
||||
sym_store_args +
|
||||
['-s', buildconfig.topsrcdir, dump_syms_bin, os.path.join(buildconfig.topobjdir,
|
||||
|
@ -75,7 +73,6 @@ def dump_symbols(target, tracking_file, count_ctors=False):
|
|||
fh.write(out_files)
|
||||
fh.flush()
|
||||
|
||||
|
||||
def main(argv):
|
||||
parser = argparse.ArgumentParser(
|
||||
usage="Usage: dumpsymbols.py <library or program> <tracking file>")
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
@ -13,7 +13,6 @@ import mozpack.path as mozpath
|
|||
import buildconfig
|
||||
from mozbuild.base import BuildEnvironmentNotFoundException
|
||||
|
||||
|
||||
def archive_exe(pkg_dir, tagfile, sfx_package, package, use_upx):
|
||||
tmpdir = tempfile.mkdtemp(prefix='tmp')
|
||||
try:
|
||||
|
@ -31,10 +30,7 @@ def archive_exe(pkg_dir, tagfile, sfx_package, package, use_upx):
|
|||
except BuildEnvironmentNotFoundException:
|
||||
# configure hasn't been run, just use the default
|
||||
sevenz = '7z'
|
||||
subprocess.check_call([
|
||||
sevenz, 'a', '-r', '-t7z', mozpath.join(tmpdir, 'app.7z'), '-mx',
|
||||
'-m0=BCJ2', '-m1=LZMA:d25', '-m2=LZMA:d19', '-m3=LZMA:d19', '-mb0:1',
|
||||
'-mb0s1:2', '-mb0s2:3'])
|
||||
subprocess.check_call([sevenz, 'a', '-r', '-t7z', mozpath.join(tmpdir, 'app.7z'), '-mx', '-m0=BCJ2', '-m1=LZMA:d25', '-m2=LZMA:d19', '-m3=LZMA:d19', '-mb0:1', '-mb0s1:2', '-mb0s2:3'])
|
||||
|
||||
with open(package, 'wb') as o:
|
||||
for i in [final_sfx, tagfile, mozpath.join(tmpdir, 'app.7z')]:
|
||||
|
@ -45,7 +41,6 @@ def archive_exe(pkg_dir, tagfile, sfx_package, package, use_upx):
|
|||
shutil.move('core', pkg_dir)
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def main(args):
|
||||
if len(args) != 4:
|
||||
print('Usage: exe_7z_archive.py <pkg_dir> <tagfile> <sfx_package> <package> <use_upx>',
|
||||
|
@ -55,6 +50,5 @@ def main(args):
|
|||
archive_exe(args[0], args[1], args[2], args[3], args[4])
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
|
|
@ -2,18 +2,16 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import print_function
|
||||
|
||||
import shutil
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
|
||||
def extract_exe(package, target):
|
||||
subprocess.check_call(['7z', 'x', package, 'core'])
|
||||
shutil.move('core', target)
|
||||
|
||||
|
||||
def main(args):
|
||||
if len(args) != 2:
|
||||
print('Usage: exe_7z_extract.py <package> <target>',
|
||||
|
@ -23,6 +21,5 @@ def main(args):
|
|||
extract_exe(args[0], args[1])
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
|
|
@ -69,7 +69,7 @@ def main(argv):
|
|||
with FileAvoidWrite(args.output_file, mode='rb') as output:
|
||||
try:
|
||||
ret = module.__dict__[method](output, *args.additional_arguments, **kwargs)
|
||||
except Exception:
|
||||
except:
|
||||
# Ensure that we don't overwrite the file if the script failed.
|
||||
output.avoid_writing_to_file()
|
||||
raise
|
||||
|
@ -116,6 +116,5 @@ def main(argv):
|
|||
return 1
|
||||
return ret
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import sys
|
||||
import json
|
||||
import copy
|
||||
|
@ -25,28 +23,23 @@ else:
|
|||
localeSearchInfo = {}
|
||||
localeSearchInfo["default"] = searchinfo["default"]
|
||||
|
||||
|
||||
def validateDefault(key):
|
||||
if key not in searchinfo["default"]:
|
||||
print("Error: Missing default %s in list.json" % (key), file=sys.stderr)
|
||||
if (not key in searchinfo["default"]):
|
||||
print >>sys.stderr, "Error: Missing default %s in list.json" % (key)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
validateDefault("searchDefault")
|
||||
validateDefault("visibleDefaultEngines")
|
||||
validateDefault("searchDefault");
|
||||
validateDefault("visibleDefaultEngines");
|
||||
|
||||
# If the selected locale doesn't have a searchDefault,
|
||||
# use the global one.
|
||||
if "searchDefault" not in localeSearchInfo["default"]:
|
||||
if not "searchDefault" in localeSearchInfo["default"]:
|
||||
localeSearchInfo["default"]["searchDefault"] = searchinfo["default"]["searchDefault"]
|
||||
|
||||
# If the selected locale doesn't have a searchOrder,
|
||||
# use the global one if present.
|
||||
# searchOrder is NOT required.
|
||||
if (
|
||||
"searchOrder" not in localeSearchInfo["default"]
|
||||
and "searchOrder" in searchinfo["default"]
|
||||
):
|
||||
if not "searchOrder" in localeSearchInfo["default"] and "searchOrder" in searchinfo["default"]:
|
||||
localeSearchInfo["default"]["searchOrder"] = searchinfo["default"]["searchOrder"]
|
||||
|
||||
# If we have region overrides, enumerate through them
|
||||
|
@ -64,13 +57,11 @@ if "regionOverrides" in searchinfo:
|
|||
if set(visibleDefaultEngines) & enginesToOverride:
|
||||
if region not in localeSearchInfo:
|
||||
localeSearchInfo[region] = {}
|
||||
localeSearchInfo[region]["visibleDefaultEngines"] = copy.deepcopy(
|
||||
visibleDefaultEngines)
|
||||
localeSearchInfo[region]["visibleDefaultEngines"] = copy.deepcopy(visibleDefaultEngines)
|
||||
for i, engine in enumerate(localeSearchInfo[region]["visibleDefaultEngines"]):
|
||||
if engine in regionOverrides[region]:
|
||||
localeSearchInfo[region]["visibleDefaultEngines"][i] = \
|
||||
regionOverrides[region][engine]
|
||||
localeSearchInfo[region]["visibleDefaultEngines"][i] = regionOverrides[region][engine]
|
||||
|
||||
output.write(json.dumps(localeSearchInfo, ensure_ascii=False).encode('utf8'))
|
||||
|
||||
output.close()
|
||||
output.close();
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
|
||||
|
|
|
@ -38,9 +38,13 @@ import os
|
|||
from mozbuild.dotproperties import (
|
||||
DotProperties,
|
||||
)
|
||||
from mozbuild.util import (
|
||||
FileAvoidWrite,
|
||||
)
|
||||
from mozpack.files import (
|
||||
FileFinder,
|
||||
)
|
||||
import mozpack.path as mozpath
|
||||
|
||||
|
||||
def merge_properties(paths):
|
||||
|
@ -88,8 +92,7 @@ def main(output, *args, **kwargs):
|
|||
properties = merge_properties(sources)
|
||||
|
||||
# Keep these two in sync.
|
||||
image_url_template = \
|
||||
'android.resource://%s/drawable/suggestedsites_{name}' % opts.android_package_name
|
||||
image_url_template = 'android.resource://%s/drawable/suggestedsites_{name}' % opts.android_package_name
|
||||
drawables_template = 'drawable*/suggestedsites_{name}.*'
|
||||
|
||||
# Load properties corresponding to each site name and define their
|
||||
|
@ -99,8 +102,7 @@ def main(output, *args, **kwargs):
|
|||
def add_names(names, defaults={}):
|
||||
for name in names:
|
||||
site = copy.deepcopy(defaults)
|
||||
site.update(properties.get_dict('browser.suggestedsites.{name}'.format(
|
||||
name=name), required_keys=('title', 'url', 'bgcolor')))
|
||||
site.update(properties.get_dict('browser.suggestedsites.{name}'.format(name=name), required_keys=('title', 'url', 'bgcolor')))
|
||||
site['imageurl'] = image_url_template.format(name=name)
|
||||
sites.append(site)
|
||||
|
||||
|
@ -118,9 +120,7 @@ def main(output, *args, **kwargs):
|
|||
else:
|
||||
if opts.verbose:
|
||||
print("Found {len} drawables in '{resources}' for '{name}': {matches}"
|
||||
.format(len=len(matches), resources=resources,
|
||||
name=name, matches=matches)
|
||||
)
|
||||
.format(len=len(matches), resources=resources, name=name, matches=matches))
|
||||
|
||||
# We want the lists to be ordered for reproducibility. Each list has a
|
||||
# "default" JSON list item which will be extended by the properties read.
|
||||
|
@ -129,14 +129,12 @@ def main(output, *args, **kwargs):
|
|||
('browser.suggestedsites.restricted.list', {'restricted': True}),
|
||||
]
|
||||
if opts.verbose:
|
||||
print('Reading {len} suggested site lists: {lists}'.format(
|
||||
len=len(lists), lists=[list_name for list_name, _ in lists]))
|
||||
print('Reading {len} suggested site lists: {lists}'.format(len=len(lists), lists=[list_name for list_name, _ in lists]))
|
||||
|
||||
for (list_name, list_item_defaults) in lists:
|
||||
names = properties.get_list(list_name)
|
||||
if opts.verbose:
|
||||
print('Reading {len} suggested sites from {list}: {names}'.format(
|
||||
len=len(names), list=list_name, names=names))
|
||||
print('Reading {len} suggested sites from {list}: {names}'.format(len=len(names), list=list_name, names=names))
|
||||
add_names(names, list_item_defaults)
|
||||
|
||||
# We must define at least one site -- that's what the fallback is for.
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
# the locale directory, chrome registry entries and other information
|
||||
# necessary to produce the complete manifest file for a language pack.
|
||||
###
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
@ -26,6 +26,7 @@ from mozpack.chrome.manifest import (
|
|||
)
|
||||
from mozbuild.configure.util import Version
|
||||
from mozbuild.preprocessor import Preprocessor
|
||||
import buildconfig
|
||||
|
||||
|
||||
def write_file(path, content):
|
||||
|
@ -307,7 +308,7 @@ def get_version_maybe_buildid(min_version):
|
|||
version = str(min_version)
|
||||
buildid = os.environ.get('MOZ_BUILD_DATE')
|
||||
if buildid and len(buildid) != 14:
|
||||
print('Ignoring invalid MOZ_BUILD_DATE: %s' % buildid, file=sys.stderr)
|
||||
print >>sys.stderr, 'Ignoring invalid MOZ_BUILD_DATE: %s' % buildid
|
||||
buildid = None
|
||||
if buildid:
|
||||
version = version + "buildid" + buildid
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import print_function
|
||||
|
||||
from mozpack import dmg
|
||||
|
||||
|
|
|
@ -2,16 +2,14 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
|
||||
def make_unzip(package):
|
||||
subprocess.check_call(['unzip', package])
|
||||
|
||||
|
||||
def main(args):
|
||||
if len(args) != 1:
|
||||
print('Usage: make_unzip.py <package>',
|
||||
|
@ -21,6 +19,5 @@ def main(args):
|
|||
make_unzip(args[0])
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
|
|
@ -2,16 +2,14 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
|
||||
def make_zip(source, package):
|
||||
subprocess.check_call(['zip', '-r9D', package, source, '-x', '\*/.mkdir.done'])
|
||||
|
||||
|
||||
def main(args):
|
||||
if len(args) != 2:
|
||||
print('Usage: make_zip.py <source> <package>',
|
||||
|
@ -21,6 +19,5 @@ def main(args):
|
|||
make_zip(args[0], args[1])
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import sys
|
||||
import json
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@ from __future__ import absolute_import, print_function
|
|||
import argparse
|
||||
import buildconfig
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from mozpack.copier import Jarrer
|
||||
|
@ -69,7 +70,7 @@ def package_fennec_apk(inputs=[], omni_ja=None,
|
|||
if verbose:
|
||||
print('Packaging %s from %s' % (path, file.path))
|
||||
if not os.path.exists(abspath):
|
||||
raise ValueError('File %s not found (looked for %s)' %
|
||||
raise ValueError('File %s not found (looked for %s)' % \
|
||||
(file.path, abspath))
|
||||
if jarrer.contains(path):
|
||||
jarrer.remove(path)
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
import buildconfig
|
||||
|
@ -21,15 +23,13 @@ def main(argv):
|
|||
args = parser.parse_args(argv)
|
||||
|
||||
objdir_abspath = mozpath.abspath(buildconfig.topobjdir)
|
||||
|
||||
def is_valid_entry(entry):
|
||||
if isinstance(entry[1], BaseFile):
|
||||
entry_abspath = mozpath.abspath(entry[1].path)
|
||||
else:
|
||||
entry_abspath = mozpath.abspath(entry[1])
|
||||
if not entry_abspath.startswith(objdir_abspath):
|
||||
print("Warning: omitting generated source [%s] from archive" % entry_abspath,
|
||||
file=sys.stderr)
|
||||
print("Warning: omitting generated source [%s] from archive" % entry_abspath, file=sys.stderr)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
|
||||
|
@ -15,7 +15,6 @@ def generate(output, *args):
|
|||
pp.handleCommandLine(list(args), True)
|
||||
return set(pp.includes)
|
||||
|
||||
|
||||
def main(args):
|
||||
pp = Preprocessor()
|
||||
pp.handleCommandLine(args, True)
|
||||
|
|
|
@ -10,6 +10,7 @@ import re
|
|||
import sys
|
||||
from buildconfig import topsrcdir, topobjdir
|
||||
from mozbuild.backend.configenvironment import PartialConfigEnvironment
|
||||
from mozbuild.util import FileAvoidWrite
|
||||
import mozpack.path as mozpath
|
||||
|
||||
|
||||
|
@ -50,12 +51,11 @@ def process_define_file(output, input):
|
|||
raise Exception(
|
||||
'`#define ALLDEFINES` is not allowed in a '
|
||||
'CONFIGURE_DEFINE_FILE')
|
||||
|
||||
# WebRTC files like to define WINVER and _WIN32_WINNT
|
||||
# via the command line, which raises a mass of macro
|
||||
# redefinition warnings. Just handle those macros
|
||||
# specially here.
|
||||
def define_for_name(name, val):
|
||||
"""WebRTC files like to define WINVER and _WIN32_WINNT
|
||||
via the command line, which raises a mass of macro
|
||||
redefinition warnings. Just handle those macros
|
||||
specially here."""
|
||||
define = "#define {name} {val}".format(name=name, val=val)
|
||||
if name in ('WINVER', '_WIN32_WINNT'):
|
||||
return '#if !defined({name})\n{define}\n#endif' \
|
||||
|
|
|
@ -109,6 +109,5 @@ def main(argv):
|
|||
rm_files=result.removed_files_count,
|
||||
rm_dirs=result.removed_directories_count))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1:])
|
||||
|
|
|
@ -12,7 +12,6 @@ from mozpack.files import FileFinder
|
|||
from mozpack.mozjar import JarWriter
|
||||
import mozpack.path as mozpath
|
||||
|
||||
|
||||
def make_archive(archive_name, base, exclude, include):
|
||||
compress = ['**/*.sym']
|
||||
finder = FileFinder(base, ignore=exclude)
|
||||
|
@ -28,13 +27,11 @@ def make_archive(archive_name, base, exclude, include):
|
|||
writer.add(p.encode('utf-8'), f, mode=f.mode,
|
||||
compress=should_compress, skip_duplicates=True)
|
||||
|
||||
|
||||
def main(argv):
|
||||
parser = argparse.ArgumentParser(description='Produce a symbols archive')
|
||||
parser.add_argument('archive', help='Which archive to generate')
|
||||
parser.add_argument('base', help='Base directory to package')
|
||||
parser.add_argument('--full-archive', action='store_true',
|
||||
help='Generate a full symbol archive')
|
||||
parser.add_argument('--full-archive', action='store_true', help='Generate a full symbol archive')
|
||||
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
|
@ -50,6 +47,5 @@ def main(argv):
|
|||
|
||||
make_archive(args.archive, args.base, excludes, includes)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1:])
|
||||
|
|
|
@ -17,8 +17,6 @@
|
|||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301, USA.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
# A manifest file specifies files in that directory that are stored
|
||||
# elsewhere. This file should only list files in the same directory
|
||||
# in which the manifest file resides and it should be called
|
||||
|
@ -750,7 +748,7 @@ def _cache_checksum_matches(base_file, checksum):
|
|||
log.info("Cache matches, avoiding extracting in '%s'" % base_file)
|
||||
return True
|
||||
return False
|
||||
except IOError:
|
||||
except IOError as e:
|
||||
return False
|
||||
|
||||
|
||||
|
@ -1024,7 +1022,7 @@ def _authorize(req, auth_file):
|
|||
try:
|
||||
auth_file_content = json.loads(auth_file_content)
|
||||
is_taskcluster_auth = True
|
||||
except Exception:
|
||||
except:
|
||||
pass
|
||||
|
||||
if is_taskcluster_auth:
|
||||
|
@ -1302,6 +1300,5 @@ def main(argv, _skip_logging=False):
|
|||
|
||||
return 0 if process_command(options, args) else 1
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover
|
||||
sys.exit(main(sys.argv))
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import print_function
|
||||
|
||||
from mozpack import dmg
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
|
||||
|
|
|
@ -9,7 +9,6 @@ import subprocess
|
|||
import sys
|
||||
import os
|
||||
|
||||
|
||||
def parse_outputs(crate_output, dep_outputs, pass_l_flag):
|
||||
env = {}
|
||||
args = []
|
||||
|
@ -60,7 +59,6 @@ def parse_outputs(crate_output, dep_outputs, pass_l_flag):
|
|||
|
||||
return env, args
|
||||
|
||||
|
||||
def wrap_rustc(args):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--crate-out', nargs='?')
|
||||
|
@ -75,6 +73,5 @@ def wrap_rustc(args):
|
|||
os.environ.update(new_env)
|
||||
return subprocess.Popen(args.cmd + new_args, cwd=args.cwd).wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(wrap_rustc(sys.argv[1:]))
|
||||
|
|
|
@ -8,20 +8,18 @@ corresponding .ini file.
|
|||
Usage: xpccheck.py <directory> [<directory> ...]
|
||||
'''
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
import os
|
||||
from glob import glob
|
||||
import manifestparser
|
||||
|
||||
|
||||
def getIniTests(testdir):
|
||||
mp = manifestparser.ManifestParser(strict=False)
|
||||
mp.read(os.path.join(testdir, 'xpcshell.ini'))
|
||||
return mp.tests
|
||||
|
||||
|
||||
def verifyDirectory(initests, directory):
|
||||
files = glob(os.path.join(os.path.abspath(directory), "test_*"))
|
||||
for f in files:
|
||||
|
@ -42,16 +40,9 @@ def verifyDirectory(initests, directory):
|
|||
break
|
||||
|
||||
if not found:
|
||||
print(('TEST-UNEXPECTED-FAIL | xpccheck | test '
|
||||
'%s is missing from test manifest %s!') % (
|
||||
name,
|
||||
os.path.join(directory, 'xpcshell.ini'),
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
print >>sys.stderr, "TEST-UNEXPECTED-FAIL | xpccheck | test %s is missing from test manifest %s!" % (name, os.path.join(directory, 'xpcshell.ini'))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def verifyIniFile(initests, directory):
|
||||
files = glob(os.path.join(os.path.abspath(directory), "test_*"))
|
||||
for test in initests:
|
||||
|
@ -69,22 +60,15 @@ def verifyIniFile(initests, directory):
|
|||
break
|
||||
|
||||
if not found:
|
||||
print(("TEST-UNEXPECTED-FAIL | xpccheck | found "
|
||||
"%s in xpcshell.ini and not in directory '%s'") % (
|
||||
name,
|
||||
directory,
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
print >>sys.stderr, "TEST-UNEXPECTED-FAIL | xpccheck | found %s in xpcshell.ini and not in directory '%s'" % (name, directory)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def main(argv):
|
||||
if len(argv) < 2:
|
||||
print("Usage: xpccheck.py <topsrcdir> <directory> [<directory> ...]",
|
||||
file=sys.stderr)
|
||||
print >>sys.stderr, "Usage: xpccheck.py <topsrcdir> <directory> [<directory> ...]"
|
||||
sys.exit(1)
|
||||
|
||||
topsrcdir = argv[0]
|
||||
for d in argv[1:]:
|
||||
# xpcshell-unpack is a copy of xpcshell sibling directory and in the Makefile
|
||||
# we copy all files (including xpcshell.ini from the sibling directory.
|
||||
|
@ -95,6 +79,5 @@ def main(argv):
|
|||
verifyDirectory(initests, d)
|
||||
verifyIniFile(initests, d)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1:])
|
||||
|
|
|
@ -7,12 +7,14 @@
|
|||
# input IDL file(s). It's purpose is to directly support the build
|
||||
# system. The API will change to meet the needs of the build system.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
from io import BytesIO
|
||||
|
||||
from xpidl import jsonxpt
|
||||
from buildconfig import topsrcdir
|
||||
from xpidl.header import print_header
|
||||
|
@ -112,6 +114,5 @@ def main(argv):
|
|||
args.headerdir, args.xpcrsdir, args.xptdir, args.depsdir, args.module,
|
||||
args.idls)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1:])
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
# This script creates a zip file, but will also strip any binaries
|
||||
# it finds before adding them to the zip.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
from mozpack.files import FileFinder
|
||||
from mozpack.copier import Jarrer
|
||||
|
|
|
@ -2,12 +2,9 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import os
|
||||
import sqlite3 as lite
|
||||
|
||||
|
||||
class Node(object):
|
||||
|
||||
def __init__(self, graph, node_id):
|
||||
|
@ -47,7 +44,6 @@ class Node(object):
|
|||
else:
|
||||
return None
|
||||
|
||||
|
||||
class Graph(object):
|
||||
|
||||
def __init__(self, path=None, connect=None):
|
||||
|
@ -94,7 +90,7 @@ class Graph(object):
|
|||
ret = self.query_arg('SELECT id FROM node \
|
||||
WHERE dir=? AND name=?', (nodeid, part)).fetchone()
|
||||
# fetchone should be ok bc dir and and name combo is unique
|
||||
if ret is None:
|
||||
if ret == None:
|
||||
print ("\nCould not find id number for '%s'" % filepath)
|
||||
return None
|
||||
nodeid = ret[0]
|
||||
|
@ -132,3 +128,4 @@ class Graph(object):
|
|||
if self.results is None:
|
||||
self.populate()
|
||||
return {k:v for k,v in self.results if v > 0}
|
||||
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import bisect
|
||||
import gzip
|
||||
import json
|
||||
|
@ -19,26 +17,21 @@ PUSHLOG_CHUNK_SIZE = 500
|
|||
|
||||
URL = 'https://hg.mozilla.org/mozilla-central/json-pushes?'
|
||||
|
||||
|
||||
def unix_epoch(date):
|
||||
return (date - datetime(1970,1,1)).total_seconds()
|
||||
|
||||
|
||||
def unix_from_date(n, today):
|
||||
return unix_epoch(today - timedelta(days=n))
|
||||
|
||||
|
||||
def get_lastpid(session):
|
||||
return session.get(URL+'&version=2').json()['lastpushid']
|
||||
|
||||
|
||||
def get_pushlog_chunk(session, start, end):
|
||||
# returns pushes sorted by date
|
||||
res = session.get(URL+'version=1&startID={0}&\
|
||||
endID={1}&full=1'.format(start, end)).json()
|
||||
return sorted(res.items(), key = lambda x: x[1]['date'])
|
||||
|
||||
|
||||
def collect_data(session, date):
|
||||
if date < 1206031764: #first push
|
||||
raise Exception ("No pushes exist before March 20, 2008.")
|
||||
|
@ -59,13 +52,11 @@ def collect_data(session, date):
|
|||
end_id = start_id + 1
|
||||
start_id = start_id - PUSHLOG_CHUNK_SIZE
|
||||
|
||||
|
||||
def get_data(epoch):
|
||||
session = requests.Session()
|
||||
data = collect_data(session, epoch)
|
||||
return {k:v for sublist in data for (k,v) in sublist}
|
||||
|
||||
|
||||
class Pushlog(object):
|
||||
|
||||
def __init__(self, days):
|
||||
|
@ -87,7 +78,6 @@ class Pushlog(object):
|
|||
keys.sort()
|
||||
return keys
|
||||
|
||||
|
||||
class Push(object):
|
||||
|
||||
def __init__(self, pid, p_dict):
|
||||
|
@ -95,7 +85,6 @@ class Push(object):
|
|||
self.date = p_dict['date']
|
||||
self.files = [f for x in p_dict['changesets'] for f in x['files']]
|
||||
|
||||
|
||||
class Report(object):
|
||||
|
||||
def __init__(self, days, path=None, cost_dict=None):
|
||||
|
@ -154,8 +143,7 @@ class Report(object):
|
|||
res = self.get_sorted_report(format)
|
||||
if limit is not None:
|
||||
res = self.cut(limit, res)
|
||||
for x in res:
|
||||
data.append(x)
|
||||
for x in res: data.append(x)
|
||||
if format == 'pretty':
|
||||
print (data)
|
||||
else:
|
||||
|
@ -172,3 +160,4 @@ class Report(object):
|
|||
with open(file_path, 'wb') as f:
|
||||
f.write(content)
|
||||
print ("Created report: %s" % file_path)
|
||||
|
||||
|
|
|
@ -13,7 +13,6 @@ import time
|
|||
# build ID use the v1 version scheme.
|
||||
V1_CUTOFF = 20150801000000 # YYYYmmddHHMMSS
|
||||
|
||||
|
||||
def android_version_code_v0(buildid, cpu_arch=None, min_sdk=0, max_sdk=0):
|
||||
base = int(str(buildid)[:10])
|
||||
# None is interpreted as arm.
|
||||
|
@ -31,7 +30,6 @@ def android_version_code_v0(buildid, cpu_arch=None, min_sdk=0, max_sdk=0):
|
|||
raise ValueError("Don't know how to compute android:versionCode "
|
||||
"for CPU arch %s" % cpu_arch)
|
||||
|
||||
|
||||
def android_version_code_v1(buildid, cpu_arch=None, min_sdk=0, max_sdk=0):
|
||||
'''Generate a v1 android:versionCode.
|
||||
|
||||
|
@ -136,7 +134,6 @@ def android_version_code_v1(buildid, cpu_arch=None, min_sdk=0, max_sdk=0):
|
|||
|
||||
return version
|
||||
|
||||
|
||||
def android_version_code(buildid, *args, **kwargs):
|
||||
base = int(str(buildid))
|
||||
if base < V1_CUTOFF:
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
# The values correspond to entries at
|
||||
# https://tools.taskcluster.net/index/artifacts/#gecko.v2.mozilla-central.latest/gecko.v2.mozilla-central.latest
|
||||
JOB_CHOICES = {
|
||||
|
|
|
@ -77,8 +77,7 @@ from mozpack.mozjar import (
|
|||
from mozpack.packager.unpack import UnpackFinder
|
||||
import mozpack.path as mozpath
|
||||
|
||||
# Number of candidate pushheads to cache per parent changeset.
|
||||
NUM_PUSHHEADS_TO_QUERY_PER_PARENT = 50
|
||||
NUM_PUSHHEADS_TO_QUERY_PER_PARENT = 50 # Number of candidate pushheads to cache per parent changeset.
|
||||
|
||||
# Number of parent changesets to consider as possible pushheads.
|
||||
# There isn't really such a thing as a reasonable default here, because we don't
|
||||
|
@ -324,8 +323,7 @@ class AndroidArtifactJob(ArtifactJob):
|
|||
writer.add(basename.encode('utf-8'), f.open())
|
||||
|
||||
def process_symbols_archive(self, filename, processed_filename):
|
||||
ArtifactJob.process_symbols_archive(
|
||||
self, filename, processed_filename, skip_compressed=True)
|
||||
ArtifactJob.process_symbols_archive(self, filename, processed_filename, skip_compressed=True)
|
||||
|
||||
if self._symbols_archive_suffix != 'crashreporter-symbols-full.zip':
|
||||
return
|
||||
|
@ -338,11 +336,9 @@ class AndroidArtifactJob(ArtifactJob):
|
|||
if not filename.endswith('.gz'):
|
||||
continue
|
||||
|
||||
# Uncompress "libxul.so/D3271457813E976AE7BF5DAFBABABBFD0/libxul.so.dbg.gz"
|
||||
# into "libxul.so.dbg".
|
||||
# Uncompress "libxul.so/D3271457813E976AE7BF5DAFBABABBFD0/libxul.so.dbg.gz" into "libxul.so.dbg".
|
||||
#
|
||||
# After running `settings append target.debug-file-search-paths $file`,
|
||||
# where file=/path/to/topobjdir/dist/crashreporter-symbols,
|
||||
# After `settings append target.debug-file-search-paths /path/to/topobjdir/dist/crashreporter-symbols`,
|
||||
# Android Studio's lldb (7.0.0, at least) will find the ELF debug symbol files.
|
||||
#
|
||||
# There are other paths that will work but none seem more desireable. See
|
||||
|
@ -351,8 +347,7 @@ class AndroidArtifactJob(ArtifactJob):
|
|||
destpath = mozpath.join('crashreporter-symbols', basename)
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{'destpath': destpath},
|
||||
'Adding uncompressed ELF debug symbol file '
|
||||
'{destpath} to processed archive')
|
||||
'Adding uncompressed ELF debug symbol file {destpath} to processed archive')
|
||||
writer.add(destpath.encode('utf-8'),
|
||||
gzip.GzipFile(fileobj=reader[filename].uncompressed_data))
|
||||
|
||||
|
@ -659,8 +654,7 @@ class CacheManager(object):
|
|||
Provide simple logging.
|
||||
'''
|
||||
|
||||
def __init__(self, cache_dir, cache_name, cache_size, cache_callback=None,
|
||||
log=None, skip_cache=False):
|
||||
def __init__(self, cache_dir, cache_name, cache_size, cache_callback=None, log=None, skip_cache=False):
|
||||
self._skip_cache = skip_cache
|
||||
self._cache = pylru.lrucache(cache_size, callback=cache_callback)
|
||||
self._cache_filename = mozpath.join(cache_dir, cache_name + '-cache.pickle')
|
||||
|
@ -699,8 +693,7 @@ class CacheManager(object):
|
|||
return
|
||||
|
||||
ensureParentDir(self._cache_filename)
|
||||
pickle.dump(list(reversed(list(self._cache.items()))),
|
||||
open(self._cache_filename, 'wb'), -1)
|
||||
pickle.dump(list(reversed(list(self._cache.items()))), open(self._cache_filename, 'wb'), -1)
|
||||
|
||||
def clear_cache(self):
|
||||
if self._skip_cache:
|
||||
|
@ -719,13 +712,11 @@ class CacheManager(object):
|
|||
def __exit__(self, type, value, traceback):
|
||||
self.dump_cache()
|
||||
|
||||
|
||||
class PushheadCache(CacheManager):
|
||||
'''Helps map tree/revision pairs to parent pushheads according to the pushlog.'''
|
||||
|
||||
def __init__(self, cache_dir, log=None, skip_cache=False):
|
||||
CacheManager.__init__(self, cache_dir, 'pushhead_cache',
|
||||
MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
|
||||
CacheManager.__init__(self, cache_dir, 'pushhead_cache', MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
|
||||
|
||||
@cachedmethod(operator.attrgetter('_cache'))
|
||||
def parent_pushhead_id(self, tree, revision):
|
||||
|
@ -752,13 +743,11 @@ class PushheadCache(CacheManager):
|
|||
p['changesets'][-1] for p in result['pushes'].values()
|
||||
]
|
||||
|
||||
|
||||
class TaskCache(CacheManager):
|
||||
'''Map candidate pushheads to Task Cluster task IDs and artifact URLs.'''
|
||||
|
||||
def __init__(self, cache_dir, log=None, skip_cache=False):
|
||||
CacheManager.__init__(self, cache_dir, 'artifact_url',
|
||||
MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
|
||||
CacheManager.__init__(self, cache_dir, 'artifact_url', MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
|
||||
|
||||
@cachedmethod(operator.attrgetter('_cache'))
|
||||
def artifacts(self, tree, job, artifact_job_class, rev):
|
||||
|
@ -793,8 +782,7 @@ class TaskCache(CacheManager):
|
|||
except KeyError:
|
||||
# Not all revisions correspond to pushes that produce the job we
|
||||
# care about; and even those that do may not have completed yet.
|
||||
raise ValueError(
|
||||
'Task for {namespace} does not exist (yet)!'.format(namespace=namespace))
|
||||
raise ValueError('Task for {namespace} does not exist (yet)!'.format(namespace=namespace))
|
||||
|
||||
return taskId, list_artifacts(taskId)
|
||||
|
||||
|
@ -837,10 +825,8 @@ class Artifacts(object):
|
|||
raise KeyError("Unknown job")
|
||||
|
||||
self._task_cache = TaskCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
|
||||
self._artifact_cache = ArtifactCache(
|
||||
self._cache_dir, log=self._log, skip_cache=self._skip_cache)
|
||||
self._pushhead_cache = PushheadCache(
|
||||
self._cache_dir, log=self._log, skip_cache=self._skip_cache)
|
||||
self._artifact_cache = ArtifactCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
|
||||
self._pushhead_cache = PushheadCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
|
||||
|
||||
def log(self, *args, **kwargs):
|
||||
if self._log:
|
||||
|
@ -964,8 +950,7 @@ class Artifacts(object):
|
|||
There are no public revisions.
|
||||
This can happen if the repository is created from bundle file and never pulled
|
||||
from remote. Please run `hg pull` and build again.
|
||||
see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code/Mercurial/Bundles\
|
||||
""")
|
||||
see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code/Mercurial/Bundles""")
|
||||
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{'len': len(last_revs)},
|
||||
|
@ -1010,16 +995,14 @@ see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code
|
|||
yield candidate_pushheads[rev], rev
|
||||
|
||||
if not count:
|
||||
raise Exception(
|
||||
'Could not find any candidate pushheads in the last {num} revisions.\n'
|
||||
raise Exception('Could not find any candidate pushheads in the last {num} revisions.\n'
|
||||
'Search started with {rev}, which must be known to Mozilla automation.\n\n'
|
||||
'see https://developer.mozilla.org/en-US/docs/Artifact_builds'.format(
|
||||
rev=last_revs[0], num=NUM_PUSHHEADS_TO_QUERY_PER_PARENT))
|
||||
|
||||
def find_pushhead_artifacts(self, task_cache, job, tree, pushhead):
|
||||
try:
|
||||
taskId, artifacts = task_cache.artifacts(
|
||||
tree, job, self._artifact_job.__class__, pushhead)
|
||||
taskId, artifacts = task_cache.artifacts(tree, job, self._artifact_job.__class__, pushhead)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
@ -1079,8 +1062,7 @@ see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code
|
|||
shutil.copyfileobj(zf.open(info), fh)
|
||||
file_existed, file_updated = fh.close()
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{'updating': 'Updating' if file_updated else 'Not updating',
|
||||
'filename': n},
|
||||
{'updating': 'Updating' if file_updated else 'Not updating', 'filename': n},
|
||||
'{updating} {filename}')
|
||||
if not file_existed or file_updated:
|
||||
# Libraries and binaries may need to be marked executable,
|
||||
|
@ -1183,8 +1165,7 @@ see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code
|
|||
url = get_artifact_url(taskId, artifact_name)
|
||||
urls.append(url)
|
||||
if not urls:
|
||||
raise ValueError(
|
||||
'Task {taskId} existed, but no artifacts found!'.format(taskId=taskId))
|
||||
raise ValueError('Task {taskId} existed, but no artifacts found!'.format(taskId=taskId))
|
||||
for url in urls:
|
||||
if self.install_from_url(url, distdir):
|
||||
return 1
|
||||
|
@ -1213,6 +1194,7 @@ see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code
|
|||
|
||||
return self.install_from_recent(distdir)
|
||||
|
||||
|
||||
def clear_cache(self):
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{},
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
backends = {
|
||||
'ChromeMap': 'mozbuild.codecoverage.chrome_map',
|
||||
'CompileDB': 'mozbuild.compilation.database',
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from abc import (
|
||||
ABCMeta,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
cargo_extra_outputs = {
|
||||
'bindgen': [
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import itertools
|
||||
import json
|
||||
|
@ -33,6 +33,7 @@ from mozbuild.frontend.data import (
|
|||
GnProjectData,
|
||||
HostLibrary,
|
||||
HostGeneratedSources,
|
||||
HostRustLibrary,
|
||||
IPDLCollection,
|
||||
LocalizedPreprocessedFiles,
|
||||
LocalizedFiles,
|
||||
|
@ -52,10 +53,10 @@ from mozbuild.preprocessor import Preprocessor
|
|||
from mozpack.chrome.manifest import parse_manifest_line
|
||||
|
||||
from mozbuild.util import (
|
||||
group_unified_files,
|
||||
mkdir,
|
||||
)
|
||||
|
||||
|
||||
class XPIDLManager(object):
|
||||
"""Helps manage XPCOM IDLs in the context of the build system."""
|
||||
|
||||
|
@ -101,7 +102,6 @@ class XPIDLManager(object):
|
|||
"""
|
||||
return itertools.chain(*[m.stems() for m in self.modules.itervalues()])
|
||||
|
||||
|
||||
class BinariesCollection(object):
|
||||
"""Tracks state of binaries produced by the build."""
|
||||
|
||||
|
@ -109,7 +109,6 @@ class BinariesCollection(object):
|
|||
self.shared_libraries = []
|
||||
self.programs = []
|
||||
|
||||
|
||||
class CommonBackend(BuildBackend):
|
||||
"""Holds logic common to all build backends."""
|
||||
|
||||
|
@ -183,8 +182,7 @@ class CommonBackend(BuildBackend):
|
|||
return False
|
||||
|
||||
elif isinstance(obj, Exports):
|
||||
objdir_files = [f.full_path for path, files in obj.files.walk()
|
||||
for f in files if isinstance(f, ObjDirPath)]
|
||||
objdir_files = [f.full_path for path, files in obj.files.walk() for f in files if isinstance(f, ObjDirPath)]
|
||||
if objdir_files:
|
||||
self._handle_generated_sources(objdir_files)
|
||||
return False
|
||||
|
@ -203,10 +201,10 @@ class CommonBackend(BuildBackend):
|
|||
if len(self._idl_manager.modules):
|
||||
self._write_rust_xpidl_summary(self._idl_manager)
|
||||
self._handle_idl_manager(self._idl_manager)
|
||||
self._handle_generated_sources(
|
||||
mozpath.join(self.environment.topobjdir, 'dist/include/%s.h' % stem)
|
||||
self._handle_generated_sources(mozpath.join(self.environment.topobjdir, 'dist/include/%s.h' % stem)
|
||||
for stem in self._idl_manager.idl_stems())
|
||||
|
||||
|
||||
for config in self._configs:
|
||||
self.backend_input_files.add(config.source)
|
||||
|
||||
|
@ -296,7 +294,7 @@ class CommonBackend(BuildBackend):
|
|||
seen_libs.add(lib)
|
||||
os_libs.append(lib)
|
||||
|
||||
return (objs, sorted(seen_pgo_gen_only_objs), no_pgo_objs,
|
||||
return (objs, sorted(seen_pgo_gen_only_objs), no_pgo_objs, \
|
||||
shared_libs, os_libs, static_libs)
|
||||
|
||||
def _make_list_file(self, kind, objdir, objs, name):
|
||||
|
@ -334,8 +332,7 @@ class CommonBackend(BuildBackend):
|
|||
return ref
|
||||
|
||||
def _handle_generated_sources(self, files):
|
||||
self._generated_sources.update(mozpath.relpath(
|
||||
f, self.environment.topobjdir) for f in files)
|
||||
self._generated_sources.update(mozpath.relpath(f, self.environment.topobjdir) for f in files)
|
||||
|
||||
def _handle_webidl_collection(self, webidls):
|
||||
|
||||
|
@ -399,7 +396,7 @@ class CommonBackend(BuildBackend):
|
|||
includeTemplate += (
|
||||
'\n'
|
||||
'#if defined(_WINDOWS_) && !defined(MOZ_WRAPPED_WINDOWS_H)\n'
|
||||
'#pragma message("wrapper failure reason: " MOZ_WINDOWS_WRAPPER_DISABLED_REASON)\n' # noqa
|
||||
'#pragma message("wrapper failure reason: " MOZ_WINDOWS_WRAPPER_DISABLED_REASON)\n'
|
||||
'#error "%(cppfile)s included unwrapped windows.h"\n'
|
||||
"#endif")
|
||||
includeTemplate += (
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
@ -49,7 +49,7 @@ class BuildConfig(object):
|
|||
|
||||
# cache the compiled code as it can be reused
|
||||
# we cache it the first time, or if the file changed
|
||||
if path not in code_cache or code_cache[path][0] != mtime:
|
||||
if not path in code_cache or code_cache[path][0] != mtime:
|
||||
# Add config.status manually to sys.modules so it gets picked up by
|
||||
# iter_modules_in_path() for automatic dependencies.
|
||||
mod = ModuleType('config.status')
|
||||
|
@ -148,35 +148,20 @@ class ConfigEnvironment(object):
|
|||
self.bin_suffix = self.substs.get('BIN_SUFFIX', '')
|
||||
|
||||
global_defines = [name for name in self.defines
|
||||
if name not in self.non_global_defines]
|
||||
self.substs["ACDEFINES"] = ' '.join(
|
||||
[
|
||||
'-D%s=%s' % (name, shell_quote(self.defines[name]).replace('$', '$$'))
|
||||
for name in sorted(global_defines)
|
||||
]
|
||||
)
|
||||
|
||||
if not name in self.non_global_defines]
|
||||
self.substs['ACDEFINES'] = ' '.join(['-D%s=%s' % (name,
|
||||
shell_quote(self.defines[name]).replace('$', '$$'))
|
||||
for name in sorted(global_defines)])
|
||||
def serialize(name, obj):
|
||||
if isinstance(obj, StringTypes):
|
||||
return obj
|
||||
if isinstance(obj, Iterable):
|
||||
return ' '.join(obj)
|
||||
raise Exception('Unhandled type %s for %s', type(obj), str(name))
|
||||
self.substs['ALLSUBSTS'] = '\n'.join(
|
||||
sorted([
|
||||
'%s = %s' % (
|
||||
name,
|
||||
serialize(name, self.substs[name])
|
||||
)
|
||||
for name in self.substs if self.substs[name]
|
||||
])
|
||||
)
|
||||
self.substs['ALLEMPTYSUBSTS'] = '\n'.join(
|
||||
sorted([
|
||||
'%s =' % name
|
||||
for name in self.substs if not self.substs[name]
|
||||
])
|
||||
)
|
||||
self.substs['ALLSUBSTS'] = '\n'.join(sorted(['%s = %s' % (name,
|
||||
serialize(name, self.substs[name])) for name in self.substs if self.substs[name]]))
|
||||
self.substs['ALLEMPTYSUBSTS'] = '\n'.join(sorted(['%s =' % name
|
||||
for name in self.substs if not self.substs[name]]))
|
||||
|
||||
self.substs = ReadOnlyDict(self.substs)
|
||||
|
||||
|
@ -239,7 +224,6 @@ class PartialConfigDict(object):
|
|||
similar for substs), where the value of FOO is delay-loaded until it is
|
||||
needed.
|
||||
"""
|
||||
|
||||
def __init__(self, config_statusd, typ, environ_override=False):
|
||||
self._dict = {}
|
||||
self._datadir = mozpath.join(config_statusd, typ)
|
||||
|
@ -354,7 +338,6 @@ class PartialConfigEnvironment(object):
|
|||
intended to be used instead of the defines structure from config.status so
|
||||
that scripts can depend directly on its value.
|
||||
"""
|
||||
|
||||
def __init__(self, topobjdir):
|
||||
config_statusd = mozpath.join(topobjdir, 'config.statusd')
|
||||
self.substs = PartialConfigDict(config_statusd, 'substs', environ_override=True)
|
||||
|
|
|
@ -2,18 +2,22 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
import errno
|
||||
import glob
|
||||
import random
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import types
|
||||
from xml.sax.saxutils import quoteattr
|
||||
import xml.etree.ElementTree as ET
|
||||
from .common import CommonBackend
|
||||
|
||||
from ..frontend.data import (
|
||||
ComputedFlags,
|
||||
Defines,
|
||||
)
|
||||
from mozbuild.base import ExecutionSummary
|
||||
|
||||
|
@ -22,7 +26,6 @@ from mozbuild.base import ExecutionSummary
|
|||
# Open eclipse:
|
||||
# /Users/bgirard/mozilla/eclipse/eclipse/eclipse/eclipse -data $PWD/workspace
|
||||
|
||||
|
||||
class CppEclipseBackend(CommonBackend):
|
||||
"""Backend that generates Cpp Eclipse project files.
|
||||
"""
|
||||
|
@ -78,8 +81,7 @@ class CppEclipseBackend(CommonBackend):
|
|||
# Note that unlike VS, Eclipse' indexer seem to crawl the headers and
|
||||
# isn't picky about the local includes.
|
||||
if isinstance(obj, ComputedFlags):
|
||||
args = self._args_for_dirs.setdefault(
|
||||
'tree/' + reldir, {'includes': [], 'defines': []})
|
||||
args = self._args_for_dirs.setdefault('tree/' + reldir, {'includes': [], 'defines': []})
|
||||
# use the same args for any objdirs we include:
|
||||
if reldir == 'dom/bindings':
|
||||
self._args_for_dirs.setdefault('generated-webidl', args)
|
||||
|
@ -103,8 +105,7 @@ class CppEclipseBackend(CommonBackend):
|
|||
def consume_finished(self):
|
||||
settings_dir = os.path.join(self._project_dir, '.settings')
|
||||
launch_dir = os.path.join(self._project_dir, 'RunConfigurations')
|
||||
workspace_settings_dir = os.path.join(
|
||||
self._workspace_dir, '.metadata/.plugins/org.eclipse.core.runtime/.settings')
|
||||
workspace_settings_dir = os.path.join(self._workspace_dir, '.metadata/.plugins/org.eclipse.core.runtime/.settings')
|
||||
|
||||
for dir_name in [self._project_dir, settings_dir, launch_dir, workspace_settings_dir, self._workspace_lang_dir]:
|
||||
try:
|
||||
|
@ -128,25 +129,22 @@ class CppEclipseBackend(CommonBackend):
|
|||
workspace_language_path = os.path.join(self._workspace_lang_dir, 'language.settings.xml')
|
||||
with open(workspace_language_path, 'wb') as fh:
|
||||
workspace_lang_settings = WORKSPACE_LANGUAGE_SETTINGS_TEMPLATE
|
||||
workspace_lang_settings = workspace_lang_settings.replace(
|
||||
"@COMPILER_FLAGS@", self._cxx + " " + self._cppflags)
|
||||
workspace_lang_settings = workspace_lang_settings.replace("@COMPILER_FLAGS@", self._cxx + " " + self._cppflags);
|
||||
fh.write(workspace_lang_settings)
|
||||
|
||||
self._write_launch_files(launch_dir)
|
||||
|
||||
core_resources_prefs_path = os.path.join(
|
||||
workspace_settings_dir, 'org.eclipse.core.resources.prefs')
|
||||
core_resources_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.core.resources.prefs')
|
||||
with open(core_resources_prefs_path, 'wb') as fh:
|
||||
fh.write(STATIC_CORE_RESOURCES_PREFS)
|
||||
fh.write(STATIC_CORE_RESOURCES_PREFS);
|
||||
|
||||
core_runtime_prefs_path = os.path.join(
|
||||
workspace_settings_dir, 'org.eclipse.core.runtime.prefs')
|
||||
core_runtime_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.core.runtime.prefs')
|
||||
with open(core_runtime_prefs_path, 'wb') as fh:
|
||||
fh.write(STATIC_CORE_RUNTIME_PREFS)
|
||||
fh.write(STATIC_CORE_RUNTIME_PREFS);
|
||||
|
||||
ui_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.ui.prefs')
|
||||
with open(ui_prefs_path, 'wb') as fh:
|
||||
fh.write(STATIC_UI_PREFS)
|
||||
fh.write(STATIC_UI_PREFS);
|
||||
|
||||
cdt_ui_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.cdt.ui.prefs')
|
||||
cdt_ui_prefs = STATIC_CDT_UI_PREFS
|
||||
|
@ -157,11 +155,10 @@ class CppEclipseBackend(CommonBackend):
|
|||
XML_PREF_TEMPLATE = """<setting id\="@PREF_NAME@" value\="@PREF_VAL@"/>\\n"""
|
||||
for line in FORMATTER_SETTINGS.splitlines():
|
||||
[pref, val] = line.split("=")
|
||||
cdt_ui_prefs += XML_PREF_TEMPLATE.replace("@PREF_NAME@",
|
||||
pref).replace("@PREF_VAL@", val)
|
||||
cdt_ui_prefs += XML_PREF_TEMPLATE.replace("@PREF_NAME@", pref).replace("@PREF_VAL@", val)
|
||||
cdt_ui_prefs += "</profile>\\n</profiles>\\n"
|
||||
with open(cdt_ui_prefs_path, 'wb') as fh:
|
||||
fh.write(cdt_ui_prefs)
|
||||
fh.write(cdt_ui_prefs);
|
||||
|
||||
cdt_core_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.cdt.core.prefs')
|
||||
with open(cdt_core_prefs_path, 'wb') as fh:
|
||||
|
@ -171,11 +168,11 @@ class CppEclipseBackend(CommonBackend):
|
|||
# as the active formatter all its prefs are set in this prefs file,
|
||||
# so we need add those now:
|
||||
cdt_core_prefs += FORMATTER_SETTINGS
|
||||
fh.write(cdt_core_prefs)
|
||||
fh.write(cdt_core_prefs);
|
||||
|
||||
editor_prefs_path = os.path.join(workspace_settings_dir, "org.eclipse.ui.editors.prefs")
|
||||
editor_prefs_path = os.path.join(workspace_settings_dir, "org.eclipse.ui.editors.prefs");
|
||||
with open(editor_prefs_path, 'wb') as fh:
|
||||
fh.write(EDITOR_SETTINGS)
|
||||
fh.write(EDITOR_SETTINGS);
|
||||
|
||||
# Now import the project into the workspace
|
||||
self._import_project()
|
||||
|
@ -191,7 +188,7 @@ class CppEclipseBackend(CommonBackend):
|
|||
self._write_noindex()
|
||||
|
||||
try:
|
||||
subprocess.check_call(
|
||||
process = subprocess.check_call(
|
||||
["eclipse", "-application", "-nosplash",
|
||||
"org.eclipse.cdt.managedbuilder.core.headlessbuild",
|
||||
"-data", self._workspace_dir, "-importAll", self._project_dir])
|
||||
|
@ -211,7 +208,7 @@ class CppEclipseBackend(CommonBackend):
|
|||
def _write_noindex(self):
|
||||
noindex_path = os.path.join(self._project_dir, '.settings/org.eclipse.cdt.core.prefs')
|
||||
with open(noindex_path, 'wb') as fh:
|
||||
fh.write(NOINDEX_TEMPLATE)
|
||||
fh.write(NOINDEX_TEMPLATE);
|
||||
|
||||
def _remove_noindex(self):
|
||||
# Below we remove the config file that temporarily disabled the indexer
|
||||
|
@ -260,8 +257,7 @@ class CppEclipseBackend(CommonBackend):
|
|||
dirsettings_template = LANGUAGE_SETTINGS_TEMPLATE_DIR_HEADER
|
||||
|
||||
# Add OS_COMPILE_CXXFLAGS args (same as OS_COMPILE_CFLAGS):
|
||||
dirsettings_template = dirsettings_template.replace('@PREINCLUDE_FILE_PATH@', os.path.join(
|
||||
self.environment.topobjdir, 'dist/include/mozilla-config.h'))
|
||||
dirsettings_template = dirsettings_template.replace('@PREINCLUDE_FILE_PATH@', os.path.join(self.environment.topobjdir, 'dist/include/mozilla-config.h'))
|
||||
dirsettings_template += add_define('MOZILLA_CLIENT', '1')
|
||||
|
||||
# Add EXTRA_INCLUDES args:
|
||||
|
@ -318,8 +314,7 @@ class CppEclipseBackend(CommonBackend):
|
|||
dirsettings += LANGUAGE_SETTINGS_TEMPLATE_DIR_FOOTER
|
||||
fh.write(dirsettings)
|
||||
|
||||
fh.write(LANGUAGE_SETTINGS_TEMPLATE_FOOTER.replace(
|
||||
"@COMPILER_FLAGS@", self._cxx + " " + self._cppflags))
|
||||
fh.write(LANGUAGE_SETTINGS_TEMPLATE_FOOTER.replace("@COMPILER_FLAGS@", self._cxx + " " + self._cppflags))
|
||||
|
||||
def _write_launch_files(self, launch_dir):
|
||||
bin_dir = os.path.join(self.environment.topobjdir, 'dist')
|
||||
|
@ -342,22 +337,18 @@ class CppEclipseBackend(CommonBackend):
|
|||
#TODO Add more launch configs (and delegate calls to mach)
|
||||
|
||||
def _write_project(self, fh):
|
||||
project = PROJECT_TEMPLATE
|
||||
project = PROJECT_TEMPLATE;
|
||||
|
||||
project = project.replace('@PROJECT_NAME@', self._project_name)
|
||||
project = project.replace('@PROJECT_TOPSRCDIR@', self.environment.topsrcdir)
|
||||
project = project.replace('@GENERATED_IPDL_FILES@', os.path.join(
|
||||
self.environment.topobjdir, "ipc", "ipdl"))
|
||||
project = project.replace('@GENERATED_WEBIDL_FILES@', os.path.join(
|
||||
self.environment.topobjdir, "dom", "bindings"))
|
||||
project = project.replace('@GENERATED_IPDL_FILES@', os.path.join(self.environment.topobjdir, "ipc", "ipdl"))
|
||||
project = project.replace('@GENERATED_WEBIDL_FILES@', os.path.join(self.environment.topobjdir, "dom", "bindings"))
|
||||
fh.write(project)
|
||||
|
||||
def _write_cproject(self, fh):
|
||||
cproject_header = CPROJECT_TEMPLATE_HEADER
|
||||
cproject_header = cproject_header.replace(
|
||||
'@PROJECT_TOPSRCDIR@', self.environment.topobjdir)
|
||||
cproject_header = cproject_header.replace(
|
||||
'@MACH_COMMAND@', os.path.join(self.environment.topsrcdir, 'mach'))
|
||||
cproject_header = cproject_header.replace('@PROJECT_TOPSRCDIR@', self.environment.topobjdir)
|
||||
cproject_header = cproject_header.replace('@MACH_COMMAND@', os.path.join(self.environment.topsrcdir, 'mach'))
|
||||
fh.write(cproject_header)
|
||||
fh.write(CPROJECT_TEMPLATE_FOOTER)
|
||||
|
||||
|
|
|
@ -134,12 +134,10 @@ class FasterMakeBackend(CommonBackend, PartialBackend):
|
|||
|
||||
elif isinstance(obj, GeneratedFile):
|
||||
if obj.outputs:
|
||||
first_output = mozpath.relpath(mozpath.join(
|
||||
obj.objdir, obj.outputs[0]), self.environment.topobjdir)
|
||||
first_output = mozpath.relpath(mozpath.join(obj.objdir, obj.outputs[0]), self.environment.topobjdir)
|
||||
for o in obj.outputs[1:]:
|
||||
fullpath = mozpath.join(obj.objdir, o)
|
||||
self._generated_files_map[mozpath.relpath(
|
||||
fullpath, self.environment.topobjdir)] = first_output
|
||||
self._generated_files_map[mozpath.relpath(fullpath, self.environment.topobjdir)] = first_output
|
||||
# We don't actually handle GeneratedFiles, we just need to know if
|
||||
# we can build multiple of them from a single make invocation in the
|
||||
# faster backend.
|
||||
|
@ -196,6 +194,7 @@ class FasterMakeBackend(CommonBackend, PartialBackend):
|
|||
mk.create_rule([target]).add_dependencies(
|
||||
'$(TOPOBJDIR)/%s' % d for d in deps)
|
||||
|
||||
|
||||
# This is not great, but it's better to have some dependencies on these Python files.
|
||||
python_deps = [
|
||||
'$(TOPSRCDIR)/python/mozbuild/mozbuild/action/l10n_merge.py',
|
||||
|
@ -209,14 +208,7 @@ class FasterMakeBackend(CommonBackend, PartialBackend):
|
|||
for (merge, ref_file, l10n_file) in deps:
|
||||
rule = mk.create_rule([merge]).add_dependencies(
|
||||
[ref_file, l10n_file] + python_deps)
|
||||
rule.add_commands(
|
||||
[
|
||||
'$(PYTHON) -m mozbuild.action.l10n_merge '
|
||||
'--output {} --ref-file {} --l10n-file {}'.format(
|
||||
merge, ref_file, l10n_file
|
||||
)
|
||||
]
|
||||
)
|
||||
rule.add_commands(['$(PYTHON) -m mozbuild.action.l10n_merge --output {} --ref-file {} --l10n-file {}'.format(merge, ref_file, l10n_file)])
|
||||
# Add a dummy rule for the l10n file since it might not exist.
|
||||
mk.create_rule([l10n_file])
|
||||
|
||||
|
@ -228,8 +220,7 @@ class FasterMakeBackend(CommonBackend, PartialBackend):
|
|||
'install_%s' % base.replace('/', '_'))) as fh:
|
||||
install_manifest.write(fileobj=fh)
|
||||
|
||||
# For artifact builds only, write a single unified manifest
|
||||
# for consumption by |mach watch|.
|
||||
# For artifact builds only, write a single unified manifest for consumption by |mach watch|.
|
||||
if self.environment.is_artifact_build:
|
||||
unified_manifest = InstallManifest()
|
||||
for base, install_manifest in self._install_manifests.iteritems():
|
||||
|
|
|
@ -6,6 +6,7 @@ from __future__ import absolute_import, print_function, unicode_literals
|
|||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import which
|
||||
|
||||
|
@ -19,7 +20,6 @@ from mach.decorators import (
|
|||
Command,
|
||||
)
|
||||
|
||||
|
||||
@CommandProvider
|
||||
class MachCommands(MachCommandBase):
|
||||
@Command('ide', category='devenv',
|
||||
|
@ -57,12 +57,10 @@ class MachCommands(MachCommandBase):
|
|||
|
||||
if ide == 'eclipse':
|
||||
eclipse_workspace_dir = self.get_eclipse_workspace_path()
|
||||
subprocess.check_call(['eclipse', '-data', eclipse_workspace_dir])
|
||||
process = subprocess.check_call(['eclipse', '-data', eclipse_workspace_dir])
|
||||
elif ide == 'visualstudio':
|
||||
visual_studio_workspace_dir = self.get_visualstudio_workspace_path()
|
||||
subprocess.check_call(
|
||||
['explorer.exe', visual_studio_workspace_dir]
|
||||
)
|
||||
process = subprocess.check_call(['explorer.exe', visual_studio_workspace_dir])
|
||||
|
||||
def get_eclipse_workspace_path(self):
|
||||
from mozbuild.backend.cpp_eclipse import CppEclipseBackend
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
@ -51,6 +51,7 @@ from ..frontend.data import (
|
|||
HostSources,
|
||||
InstallationTarget,
|
||||
JARManifest,
|
||||
Library,
|
||||
Linkable,
|
||||
LocalInclude,
|
||||
LocalizedFiles,
|
||||
|
@ -247,7 +248,6 @@ class RecursiveMakeTraversal(object):
|
|||
"""
|
||||
SubDirectoryCategories = ['dirs', 'tests']
|
||||
SubDirectoriesTuple = namedtuple('SubDirectories', SubDirectoryCategories)
|
||||
|
||||
class SubDirectories(SubDirectoriesTuple):
|
||||
def __new__(self):
|
||||
return RecursiveMakeTraversal.SubDirectoriesTuple.__new__(self, [], [])
|
||||
|
@ -312,7 +312,7 @@ class RecursiveMakeTraversal(object):
|
|||
if start_node != '':
|
||||
deps[start_node] = prev_nodes
|
||||
prev_nodes = (start_node,)
|
||||
if start_node not in self._traversal:
|
||||
if not start_node in self._traversal:
|
||||
return prev_nodes
|
||||
parallel_nodes = []
|
||||
for node in parallel:
|
||||
|
@ -338,7 +338,7 @@ class RecursiveMakeTraversal(object):
|
|||
current, parallel, sequential = self.call_filter(start, filter)
|
||||
if current is not None:
|
||||
yield start
|
||||
if start not in self._traversal:
|
||||
if not start in self._traversal:
|
||||
return
|
||||
for node in parallel:
|
||||
for n in self.traverse(node, filter):
|
||||
|
@ -603,15 +603,12 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
backend_file.write('GARBAGE += %s\n' % stub_file)
|
||||
backend_file.write('EXTRA_MDDEPEND_FILES += %s\n' % dep_file)
|
||||
|
||||
backend_file.write((
|
||||
"""{stub}: {script}{inputs}{backend}{force}
|
||||
backend_file.write("""{stub}: {script}{inputs}{backend}{force}
|
||||
\t$(REPORT_BUILD)
|
||||
\t$(call py_action,file_generate,{locale}{script} """ # wrap for E501
|
||||
"""{method} {output} $(MDDEPDIR)/{dep_file} {stub}{inputs}{flags})
|
||||
\t$(call py_action,file_generate,{locale}{script} {method} {output} $(MDDEPDIR)/{dep_file} {stub}{inputs}{flags})
|
||||
\t@$(TOUCH) $@
|
||||
|
||||
""").format(
|
||||
stub=stub_file,
|
||||
""".format(stub=stub_file,
|
||||
output=first_output,
|
||||
dep_file=dep_file,
|
||||
inputs=' ' + ' '.join(inputs) if inputs else '',
|
||||
|
@ -624,9 +621,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
force=force,
|
||||
locale='--locale=$(AB_CD) ' if obj.localized else '',
|
||||
script=obj.script,
|
||||
method=obj.method
|
||||
)
|
||||
)
|
||||
method=obj.method))
|
||||
|
||||
elif isinstance(obj, JARManifest):
|
||||
self._no_skip['libs'].add(backend_file.relobjdir)
|
||||
|
@ -789,7 +784,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
main, all_deps = \
|
||||
self._traversal.compute_dependencies(filter)
|
||||
for dir, deps in all_deps.items():
|
||||
if deps is not None or (dir in self._idl_dirs
|
||||
if deps is not None or (dir in self._idl_dirs \
|
||||
and tier == 'export'):
|
||||
rule = root_deps_mk.create_rule(['%s/%s' % (dir, tier)])
|
||||
if deps:
|
||||
|
@ -822,10 +817,8 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
# Directories containing rust compilations don't generally depend
|
||||
# on other directories in the tree, so putting them first here will
|
||||
# start them earlier in the build.
|
||||
rule.add_dependencies(
|
||||
chain((r for r in roots if mozpath.dirname(r) in self._rust_dirs),
|
||||
(r for r in roots if mozpath.dirname(r) not in self._rust_dirs))
|
||||
)
|
||||
rule.add_dependencies(chain((r for r in roots if mozpath.dirname(r) in self._rust_dirs),
|
||||
(r for r in roots if mozpath.dirname(r) not in self._rust_dirs)))
|
||||
for target, deps in sorted(graph.items()):
|
||||
if deps:
|
||||
rule = root_deps_mk.create_rule([target])
|
||||
|
@ -908,8 +901,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
all_sources))
|
||||
|
||||
if include_curdir_build_rules:
|
||||
makefile.add_statement(
|
||||
'\n'
|
||||
makefile.add_statement('\n'
|
||||
'# Make sometimes gets confused between "foo" and "$(CURDIR)/foo".\n'
|
||||
'# Help it out by explicitly specifiying dependencies.')
|
||||
makefile.add_statement('all_absolute_unified_files := \\\n'
|
||||
|
@ -1075,10 +1067,10 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
return (mozpath.relpath(d.translated, base) for d in dirs)
|
||||
|
||||
if obj.dirs:
|
||||
fh.write('DIRS := %s\n' % ' '.join(relativize(backend_file.objdir, obj.dirs)))
|
||||
self._traversal.add(
|
||||
backend_file.relobjdir, dirs=relativize(self.environment.topobjdir, obj.dirs)
|
||||
)
|
||||
fh.write('DIRS := %s\n' % ' '.join(
|
||||
relativize(backend_file.objdir, obj.dirs)))
|
||||
self._traversal.add(backend_file.relobjdir,
|
||||
dirs=relativize(self.environment.topobjdir, obj.dirs))
|
||||
|
||||
# The directory needs to be registered whether subdirectories have been
|
||||
# registered or not.
|
||||
|
@ -1102,10 +1094,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
if obj.target and not obj.is_custom():
|
||||
backend_file.write('FINAL_TARGET = $(DEPTH)/%s\n' % (obj.target))
|
||||
else:
|
||||
backend_file.write(
|
||||
'FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),'
|
||||
'$(DIST)/bin)$(DIST_SUBDIR:%=/%)\n'
|
||||
)
|
||||
backend_file.write('FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),$(DIST)/bin)$(DIST_SUBDIR:%=/%)\n')
|
||||
|
||||
if not obj.enabled:
|
||||
backend_file.write('NO_DIST_INSTALL := 1\n')
|
||||
|
@ -1120,7 +1109,8 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
self._install_manifests['dist_include'].add_optional_exists('%s.h' % stem)
|
||||
|
||||
for module in manager.modules:
|
||||
build_files.add_optional_exists(mozpath.join('.deps', '%s.pp' % module))
|
||||
build_files.add_optional_exists(mozpath.join('.deps',
|
||||
'%s.pp' % module))
|
||||
|
||||
modules = manager.modules
|
||||
xpt_modules = sorted(modules.keys())
|
||||
|
@ -1305,13 +1295,11 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
|
||||
def _process_per_source_flag(self, per_source_flag, backend_file):
|
||||
for flag in per_source_flag.flags:
|
||||
backend_file.write('%s_FLAGS += %s\n' %
|
||||
(mozpath.basename(per_source_flag.file_name), flag))
|
||||
backend_file.write('%s_FLAGS += %s\n' % (mozpath.basename(per_source_flag.file_name), flag))
|
||||
|
||||
def _process_computed_flags(self, computed_flags, backend_file):
|
||||
for var, flags in computed_flags.get_flags():
|
||||
backend_file.write(
|
||||
'COMPUTED_%s += %s\n' % (var,
|
||||
backend_file.write('COMPUTED_%s += %s\n' % (var,
|
||||
' '.join(make_quote(shell_quote(f)) for f in flags)))
|
||||
|
||||
def _process_non_default_target(self, libdef, target_name, backend_file):
|
||||
|
@ -1376,6 +1364,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
return os.path.normpath(mozpath.join(mozpath.relpath(lib.objdir, obj.objdir),
|
||||
name))
|
||||
|
||||
topobjdir = mozpath.normsep(obj.topobjdir)
|
||||
# This will create the node even if there aren't any linked libraries.
|
||||
build_target = self._build_target_for_obj(obj)
|
||||
self._compile_graph[build_target]
|
||||
|
@ -1530,8 +1519,8 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
if f.startswith('/') or isinstance(f, AbsolutePath):
|
||||
basepath, wild = os.path.split(f.full_path)
|
||||
if '*' in basepath:
|
||||
raise Exception("Wildcards are only supported in the filename part"
|
||||
" of srcdir-relative or absolute paths.")
|
||||
raise Exception("Wildcards are only supported in the filename part of "
|
||||
"srcdir-relative or absolute paths.")
|
||||
|
||||
install_manifest.add_pattern_link(basepath, wild, path)
|
||||
else:
|
||||
|
@ -1699,7 +1688,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
pp.context.update(extra)
|
||||
if not pp.context.get('autoconfmk', ''):
|
||||
pp.context['autoconfmk'] = 'autoconf.mk'
|
||||
pp.handleLine(b'# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.\n')
|
||||
pp.handleLine(b'# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.\n');
|
||||
pp.handleLine(b'DEPTH := @DEPTH@\n')
|
||||
pp.handleLine(b'topobjdir := @topobjdir@\n')
|
||||
pp.handleLine(b'topsrcdir := @top_srcdir@\n')
|
||||
|
@ -1753,8 +1742,7 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
unified_files_makefile_variable='CPPSRCS')
|
||||
|
||||
# Preprocessed ipdl files are generated in ipdl_dir.
|
||||
mk.add_statement(
|
||||
'IPDLDIRS := %s %s' % (ipdl_dir, ' '.join(sorted(set(mozpath.dirname(p)
|
||||
mk.add_statement('IPDLDIRS := %s %s' % (ipdl_dir, ' '.join(sorted(set(mozpath.dirname(p)
|
||||
for p in sorted_static_ipdl_sources)))))
|
||||
|
||||
with self._write_file(mozpath.join(ipdl_dir, 'ipdlsrcs.mk')) as ipdls:
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import cPickle as pickle
|
||||
from collections import defaultdict
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import os
|
||||
import gzip
|
||||
|
@ -12,9 +12,12 @@ import sys
|
|||
import shutil
|
||||
|
||||
import mozpack.path as mozpath
|
||||
from mozbuild import shellutil
|
||||
from mozbuild.analyze.graph import Graph
|
||||
from mozbuild.analyze.hg import Report
|
||||
from mozbuild.base import MozbuildObject
|
||||
from mozbuild.backend.base import PartialBackend, HybridBackend
|
||||
from mozbuild.backend.recursivemake import RecursiveMakeBackend
|
||||
from mozbuild.mozconfig import MozconfigLoader
|
||||
from mozbuild.shellutil import quote as shell_quote
|
||||
from mozbuild.util import OrderedDefaultDict
|
||||
|
@ -56,6 +59,7 @@ from ..frontend.data import (
|
|||
)
|
||||
from ..util import (
|
||||
FileAvoidWrite,
|
||||
expand_variables,
|
||||
)
|
||||
from ..frontend.context import (
|
||||
AbsolutePath,
|
||||
|
@ -143,8 +147,7 @@ class BackendTupfile(object):
|
|||
else:
|
||||
caret_text = flags
|
||||
|
||||
self.write((': %(inputs)s%(extra_inputs)s |> %(display)s%(cmd)s |> '
|
||||
'%(outputs)s%(output_group)s\n') % {
|
||||
self.write(': %(inputs)s%(extra_inputs)s |> %(display)s%(cmd)s |> %(outputs)s%(output_group)s\n' % {
|
||||
'inputs': ' '.join(inputs),
|
||||
'extra_inputs': ' | ' + ' '.join(extra_inputs) if extra_inputs else '',
|
||||
'display': '^%s^ ' % caret_text if caret_text else '',
|
||||
|
@ -269,8 +272,7 @@ class TupBackend(CommonBackend):
|
|||
self._rust_cmds = set()
|
||||
|
||||
self._built_in_addons = set()
|
||||
self._built_in_addons_file = \
|
||||
'dist/bin/browser/chrome/browser/content/browser/built_in_addons.json'
|
||||
self._built_in_addons_file = 'dist/bin/browser/chrome/browser/content/browser/built_in_addons.json'
|
||||
|
||||
def _output_group(self, label):
|
||||
if label:
|
||||
|
@ -473,6 +475,7 @@ class TupBackend(CommonBackend):
|
|||
# accurate once we start building libraries in their final locations.
|
||||
inputs = objs + static_libs + shared_libs + [self._shlibs]
|
||||
|
||||
|
||||
rust_linked = [l for l in prog.linked_libraries
|
||||
if isinstance(l, RustLibrary)]
|
||||
|
||||
|
@ -510,10 +513,12 @@ class TupBackend(CommonBackend):
|
|||
display='LINK %o'
|
||||
)
|
||||
|
||||
|
||||
def _gen_host_programs(self, backend_file):
|
||||
for p in backend_file.host_programs:
|
||||
self._gen_host_program(backend_file, p)
|
||||
|
||||
|
||||
def _gen_host_program(self, backend_file, prog):
|
||||
_, _, _, _, extra_libs, _ = self._expand_libs(prog)
|
||||
objs = prog.objs
|
||||
|
@ -554,6 +559,7 @@ class TupBackend(CommonBackend):
|
|||
display='LINK %o'
|
||||
)
|
||||
|
||||
|
||||
def _gen_static_library(self, backend_file):
|
||||
ar = [
|
||||
backend_file.environment.substs['AR'],
|
||||
|
@ -578,6 +584,7 @@ class TupBackend(CommonBackend):
|
|||
display='AR %o'
|
||||
)
|
||||
|
||||
|
||||
def consume_object(self, obj):
|
||||
"""Write out build files necessary to build with tup."""
|
||||
|
||||
|
@ -670,13 +677,11 @@ class TupBackend(CommonBackend):
|
|||
|
||||
for objdir, backend_file in sorted(self._backend_files.items()):
|
||||
backend_file.gen_sources_rules([self._installed_files])
|
||||
for var, gen_method in (
|
||||
(backend_file.shared_lib, self._gen_shared_library),
|
||||
for var, gen_method in ((backend_file.shared_lib, self._gen_shared_library),
|
||||
(backend_file.static_lib and backend_file.static_lib.no_expand_lib,
|
||||
self._gen_static_library),
|
||||
(backend_file.programs, self._gen_programs),
|
||||
(backend_file.host_programs, self._gen_host_programs)
|
||||
):
|
||||
(backend_file.host_programs, self._gen_host_programs)):
|
||||
if var:
|
||||
backend_file.export_shell()
|
||||
backend_file.export_icecc()
|
||||
|
@ -689,8 +694,7 @@ class TupBackend(CommonBackend):
|
|||
pass
|
||||
|
||||
with self._write_file(mozpath.join(self.environment.topobjdir, 'Tuprules.tup')) as fh:
|
||||
acdefines_flags = ' '.join(
|
||||
['-D%s=%s' % (name, shell_quote(value))
|
||||
acdefines_flags = ' '.join(['-D%s=%s' % (name, shell_quote(value))
|
||||
for (name, value) in sorted(self.environment.acdefines.iteritems())])
|
||||
# TODO: AB_CD only exists in Makefiles at the moment.
|
||||
acdefines_flags += ' -DAB_CD=en-US'
|
||||
|
@ -730,8 +734,7 @@ class TupBackend(CommonBackend):
|
|||
"should contain the fewest files possible that are not "
|
||||
"necessary for this build." % tup_base_dir)
|
||||
tup = self.environment.substs.get('TUP', 'tup')
|
||||
self._cmd.run_process(cwd=tup_base_dir, log_name='tup',
|
||||
args=[tup, 'init', '--no-sync'])
|
||||
self._cmd.run_process(cwd=tup_base_dir, log_name='tup', args=[tup, 'init', '--no-sync'])
|
||||
|
||||
def _get_cargo_flags(self, obj):
|
||||
|
||||
|
@ -810,11 +813,13 @@ class TupBackend(CommonBackend):
|
|||
|
||||
# Enable link-time optimization for release builds.
|
||||
cargo_library_flags = []
|
||||
if not obj.config.substs.get('DEVELOPER_OPTIONS') and not obj.config.substs.get(
|
||||
'MOZ_DEBUG_RUST'
|
||||
):
|
||||
if (not obj.config.substs.get('DEVELOPER_OPTIONS') and
|
||||
not obj.config.substs.get('MOZ_DEBUG_RUST')):
|
||||
cargo_library_flags += ['-C', 'lto']
|
||||
|
||||
rust_build_home = mozpath.join(self.environment.topobjdir,
|
||||
'toolkit/library/rust')
|
||||
|
||||
def display_name(invocation):
|
||||
output_str = ''
|
||||
if invocation['outputs']:
|
||||
|
@ -868,9 +873,7 @@ class TupBackend(CommonBackend):
|
|||
|
||||
invocation['full-deps'] = set()
|
||||
|
||||
if os.path.basename(invocation['program']) in [
|
||||
'build-script-build', 'build-script-main'
|
||||
]:
|
||||
if os.path.basename(invocation['program']) in ['build-script-build', 'build-script-main']:
|
||||
out_dir = invocation['env']['OUT_DIR']
|
||||
for output in cargo_extra_outputs.get(shortname, []):
|
||||
outputs.append(os.path.join(out_dir, output))
|
||||
|
@ -981,15 +984,16 @@ class TupBackend(CommonBackend):
|
|||
obj.name),
|
||||
output_group)
|
||||
|
||||
|
||||
for val in enumerate(invocations):
|
||||
_process(*val)
|
||||
|
||||
|
||||
def _gen_rust_rules(self, obj, backend_file):
|
||||
cargo_flags = self._get_cargo_flags(obj)
|
||||
cargo_env = self._get_cargo_env(obj, backend_file)
|
||||
|
||||
output_lines = []
|
||||
|
||||
def accumulate_output(line):
|
||||
output_lines.append(line)
|
||||
|
||||
|
@ -1010,6 +1014,7 @@ class TupBackend(CommonBackend):
|
|||
self._gen_cargo_rules(obj, cargo_plan, cargo_env, output_group)
|
||||
self.backend_input_files |= set(cargo_plan['inputs'])
|
||||
|
||||
|
||||
def _process_generated_file(self, backend_file, obj):
|
||||
if obj.script and obj.method:
|
||||
backend_file.export_shell()
|
||||
|
@ -1120,8 +1125,7 @@ class TupBackend(CommonBackend):
|
|||
if f.startswith('/') or isinstance(f, AbsolutePath):
|
||||
basepath, wild = os.path.split(f.full_path)
|
||||
if '*' in basepath:
|
||||
raise Exception(
|
||||
"Wildcards are only supported in the filename part of "
|
||||
raise Exception("Wildcards are only supported in the filename part of "
|
||||
"srcdir-relative or absolute paths.")
|
||||
|
||||
# TODO: This is only needed for Windows, so we can
|
||||
|
@ -1151,6 +1155,7 @@ class TupBackend(CommonBackend):
|
|||
|
||||
finder = FileFinder(prefix)
|
||||
for p, _ in finder.find(f.full_path[len(prefix):]):
|
||||
install_dir = prefix[len(obj.srcdir) + 1:]
|
||||
output = p
|
||||
if f.target_basename and '*' not in f.target_basename:
|
||||
output = mozpath.join(f.target_basename, output)
|
||||
|
@ -1158,12 +1163,10 @@ class TupBackend(CommonBackend):
|
|||
output=mozpath.join(output_dir, output),
|
||||
output_group=output_group)
|
||||
else:
|
||||
backend_file.symlink_rule(
|
||||
f.full_path, output=f.target_basename, output_group=output_group)
|
||||
backend_file.symlink_rule(f.full_path, output=f.target_basename, output_group=output_group)
|
||||
else:
|
||||
if (self.environment.is_artifact_build and
|
||||
any(mozpath.match(f.target_basename, p)
|
||||
for p in self._compile_env_gen_files)):
|
||||
any(mozpath.match(f.target_basename, p) for p in self._compile_env_gen_files)):
|
||||
# If we have an artifact build we never would have generated this file,
|
||||
# so do not attempt to install it.
|
||||
continue
|
||||
|
@ -1172,19 +1175,18 @@ class TupBackend(CommonBackend):
|
|||
f.target_basename)
|
||||
gen_backend_file = self._get_backend_file(f.context.relobjdir)
|
||||
if gen_backend_file.requires_delay([f]):
|
||||
gen_backend_file.delayed_installed_files.append(
|
||||
(f.full_path, output, output_group))
|
||||
gen_backend_file.delayed_installed_files.append((f.full_path, output, output_group))
|
||||
else:
|
||||
gen_backend_file.symlink_rule(f.full_path, output=output,
|
||||
output_group=output_group)
|
||||
|
||||
|
||||
def _process_final_target_pp_files(self, obj, backend_file):
|
||||
for i, (path, files) in enumerate(obj.files.walk()):
|
||||
self._add_features(obj.install_target, path)
|
||||
for f in files:
|
||||
self._preprocess(backend_file, f.full_path,
|
||||
destdir=mozpath.join(self.environment.topobjdir,
|
||||
obj.install_target, path),
|
||||
destdir=mozpath.join(self.environment.topobjdir, obj.install_target, path),
|
||||
target=f.target_basename)
|
||||
|
||||
def _process_computed_flags(self, obj, backend_file):
|
||||
|
@ -1313,8 +1315,7 @@ class TupBackend(CommonBackend):
|
|||
cmd.extend(['-I%s' % d for d in ipdldirs])
|
||||
cmd.extend(sorted_ipdl_sources)
|
||||
|
||||
outputs = ['IPCMessageTypeName.cpp', mozpath.join(
|
||||
outheaderdir, 'IPCMessageStart.h'), 'ipdl_lextab.py', 'ipdl_yacctab.py']
|
||||
outputs = ['IPCMessageTypeName.cpp', mozpath.join(outheaderdir, 'IPCMessageStart.h'), 'ipdl_lextab.py', 'ipdl_yacctab.py']
|
||||
|
||||
for filename in sorted_ipdl_sources:
|
||||
filepath, ext = os.path.splitext(filename)
|
||||
|
@ -1378,5 +1379,4 @@ class TupBackend(CommonBackend):
|
|||
backend_file.sources['.cpp'].extend(sorted(global_define_files))
|
||||
|
||||
test_backend_file = self._get_backend_file('dom/bindings/test')
|
||||
test_backend_file.sources['.cpp'].extend(
|
||||
sorted('../%sBinding.cpp' % s for s in webidls.all_test_stems()))
|
||||
test_backend_file.sources['.cpp'].extend(sorted('../%sBinding.cpp' % s for s in webidls.all_test_stems()))
|
||||
|
|
|
@ -5,11 +5,12 @@
|
|||
# This file contains a build backend for generating Visual Studio project
|
||||
# files.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import types
|
||||
import uuid
|
||||
|
||||
from xml.dom import getDOMImplementation
|
||||
|
@ -33,25 +34,21 @@ from mozbuild.base import ExecutionSummary
|
|||
|
||||
MSBUILD_NAMESPACE = 'http://schemas.microsoft.com/developer/msbuild/2003'
|
||||
|
||||
|
||||
def get_id(name):
|
||||
return str(uuid.uuid5(uuid.NAMESPACE_URL, name)).upper()
|
||||
|
||||
|
||||
def visual_studio_product_to_solution_version(version):
|
||||
if version == '2017':
|
||||
return '12.00', '15'
|
||||
else:
|
||||
raise Exception('Unknown version seen: %s' % version)
|
||||
|
||||
|
||||
def visual_studio_product_to_platform_toolset_version(version):
|
||||
if version == '2017':
|
||||
return 'v141'
|
||||
else:
|
||||
raise Exception('Unknown version seen: %s' % version)
|
||||
|
||||
|
||||
class VisualStudioBackend(CommonBackend):
|
||||
"""Generate Visual Studio project files.
|
||||
|
||||
|
@ -103,7 +100,7 @@ class VisualStudioBackend(CommonBackend):
|
|||
elif isinstance(obj, UnifiedSources):
|
||||
# XXX we should be letting CommonBackend.consume_object call this
|
||||
# for us instead.
|
||||
self._process_unified_sources(obj)
|
||||
self._process_unified_sources(obj);
|
||||
|
||||
elif isinstance(obj, Library):
|
||||
self._libs_to_paths[obj.basename] = reldir
|
||||
|
@ -147,16 +144,15 @@ class VisualStudioBackend(CommonBackend):
|
|||
if target != 'full':
|
||||
command += ' %s' % target
|
||||
|
||||
project_id = self._write_vs_project(
|
||||
out_proj_dir, basename, target, build_command=command,
|
||||
project_id = self._write_vs_project(out_proj_dir, basename, target,
|
||||
build_command=command,
|
||||
clean_command='$(SolutionDir)\\mach.bat build clean')
|
||||
|
||||
projects[basename] = (project_id, basename, target)
|
||||
|
||||
# A project that can be used to regenerate the visual studio projects.
|
||||
basename = 'target_vs'
|
||||
project_id = self._write_vs_project(
|
||||
out_proj_dir, basename, 'visual-studio',
|
||||
project_id = self._write_vs_project(out_proj_dir, basename, 'visual-studio',
|
||||
build_command='$(SolutionDir)\\mach.bat build-backend -b VisualStudio')
|
||||
projects[basename] = (project_id, basename, 'visual-studio')
|
||||
|
||||
|
@ -239,8 +235,7 @@ class VisualStudioBackend(CommonBackend):
|
|||
|
||||
basename = '%s_%s' % (prefix, item)
|
||||
|
||||
project_id = self._write_vs_project(
|
||||
out_dir, basename, item,
|
||||
project_id = self._write_vs_project(out_dir, basename, item,
|
||||
includes=includes,
|
||||
forced_includes=['$(TopObjDir)\\dist\\include\\mozilla-config.h'],
|
||||
defines=defines,
|
||||
|
@ -462,8 +457,8 @@ class VisualStudioBackend(CommonBackend):
|
|||
project_id = get_id(basename.encode('utf-8'))
|
||||
|
||||
with self._write_file(os.path.join(out_dir, root), mode='rb') as fh:
|
||||
project_id, name = VisualStudioBackend.write_vs_project(
|
||||
fh, self._version, project_id, name, **kwargs)
|
||||
project_id, name = VisualStudioBackend.write_vs_project(fh,
|
||||
self._version, project_id, name, **kwargs)
|
||||
|
||||
with self._write_file(os.path.join(out_dir, '%s.user' % root), mode='rb') as fh:
|
||||
fh.write('<?xml version="1.0" encoding="utf-8"?>\r\n')
|
||||
|
@ -515,8 +510,7 @@ class VisualStudioBackend(CommonBackend):
|
|||
rn.appendChild(doc.createTextNode('mozilla'))
|
||||
|
||||
pts = pg.appendChild(doc.createElement('PlatformToolset'))
|
||||
pts.appendChild(doc.createTextNode(
|
||||
visual_studio_product_to_platform_toolset_version(version)))
|
||||
pts.appendChild(doc.createTextNode(visual_studio_product_to_platform_toolset_version(version)))
|
||||
|
||||
i = project.appendChild(doc.createElement('Import'))
|
||||
i.setAttribute('Project', '$(VCTargetsPath)\\Microsoft.Cpp.Default.props')
|
||||
|
|
|
@ -37,6 +37,7 @@ from .mozconfig import (
|
|||
)
|
||||
from .pythonutil import find_python3_executable
|
||||
from .util import (
|
||||
ReadOnlyNamespace,
|
||||
memoize,
|
||||
memoized_property,
|
||||
)
|
||||
|
@ -52,14 +53,12 @@ def ancestors(path):
|
|||
break
|
||||
path = newpath
|
||||
|
||||
|
||||
def samepath(path1, path2):
|
||||
if hasattr(os.path, 'samefile'):
|
||||
return os.path.samefile(path1, path2)
|
||||
return os.path.normcase(os.path.realpath(path1)) == \
|
||||
os.path.normcase(os.path.realpath(path2))
|
||||
|
||||
|
||||
class BadEnvironmentException(Exception):
|
||||
"""Base class for errors raised when the build environment is not sane."""
|
||||
|
||||
|
@ -70,7 +69,6 @@ class BuildEnvironmentNotFoundException(BadEnvironmentException):
|
|||
|
||||
class ObjdirMismatchException(BadEnvironmentException):
|
||||
"""Raised when the current dir is an objdir and doesn't match the mozconfig."""
|
||||
|
||||
def __init__(self, objdir1, objdir2):
|
||||
self.objdir1 = objdir1
|
||||
self.objdir2 = objdir2
|
||||
|
@ -87,7 +85,6 @@ class MozbuildObject(ProcessExecutionMixin):
|
|||
running processes, etc. This classes provides that functionality. Other
|
||||
modules can inherit from this class to obtain this functionality easily.
|
||||
"""
|
||||
|
||||
def __init__(self, topsrcdir, settings, log_manager, topobjdir=None,
|
||||
mozconfig=MozconfigLoader.AUTODETECT):
|
||||
"""Create a new Mozbuild object instance.
|
||||
|
@ -178,8 +175,7 @@ class MozbuildObject(ProcessExecutionMixin):
|
|||
topobjdir = mozpath.normsep(os.path.normpath(topobjdir))
|
||||
|
||||
if topsrcdir == topobjdir:
|
||||
raise BadEnvironmentException(
|
||||
'The object directory appears '
|
||||
raise BadEnvironmentException('The object directory appears '
|
||||
'to be the same as your source directory (%s). This build '
|
||||
'configuration is not supported.' % topsrcdir)
|
||||
|
||||
|
@ -256,13 +252,10 @@ class MozbuildObject(ProcessExecutionMixin):
|
|||
@property
|
||||
def virtualenv_manager(self):
|
||||
if self._virtualenv_manager is None:
|
||||
self._virtualenv_manager = VirtualenvManager(
|
||||
self.topsrcdir,
|
||||
self.topobjdir,
|
||||
os.path.join(self.topobjdir, '_virtualenvs', 'init'),
|
||||
sys.stdout,
|
||||
os.path.join(self.topsrcdir, 'build', 'virtualenv_packages.txt')
|
||||
)
|
||||
self._virtualenv_manager = VirtualenvManager(self.topsrcdir,
|
||||
self.topobjdir, os.path.join(self.topobjdir, '_virtualenvs', 'init'),
|
||||
sys.stdout, os.path.join(self.topsrcdir, 'build',
|
||||
'virtualenv_packages.txt'))
|
||||
|
||||
return self._virtualenv_manager
|
||||
|
||||
|
@ -497,6 +490,7 @@ class MozbuildObject(ProcessExecutionMixin):
|
|||
|
||||
return BuildReader(config, finder=finder)
|
||||
|
||||
|
||||
@memoized_property
|
||||
def python3(self):
|
||||
"""Obtain info about a Python 3 executable.
|
||||
|
@ -548,7 +542,7 @@ class MozbuildObject(ProcessExecutionMixin):
|
|||
|
||||
if substs['OS_ARCH'] == 'Darwin':
|
||||
if substs['MOZ_BUILD_APP'] == 'xulrunner':
|
||||
stem = os.path.join(stem, 'XUL.framework')
|
||||
stem = os.path.join(stem, 'XUL.framework');
|
||||
else:
|
||||
stem = os.path.join(stem, substs['MOZ_MACBUNDLE_NAME'], 'Contents',
|
||||
'MacOS')
|
||||
|
@ -591,7 +585,6 @@ class MozbuildObject(ProcessExecutionMixin):
|
|||
elif sys.platform.startswith('win'):
|
||||
from ctypes import Structure, windll, POINTER, sizeof
|
||||
from ctypes.wintypes import DWORD, HANDLE, WINFUNCTYPE, BOOL, UINT
|
||||
|
||||
class FLASHWINDOW(Structure):
|
||||
_fields_ = [("cbSize", UINT),
|
||||
("hwnd", HANDLE),
|
||||
|
@ -624,8 +617,8 @@ class MozbuildObject(ProcessExecutionMixin):
|
|||
self.run_process([notifier, '--app-name=Mozilla Build System',
|
||||
'Mozilla Build System', msg], ensure_exit_code=False)
|
||||
except Exception as e:
|
||||
self.log(logging.WARNING, 'notifier-failed',
|
||||
{'error': e.message}, 'Notification center failed: {error}')
|
||||
self.log(logging.WARNING, 'notifier-failed', {'error':
|
||||
e.message}, 'Notification center failed: {error}')
|
||||
|
||||
def _ensure_objdir_exists(self):
|
||||
if os.path.isdir(self.statedir):
|
||||
|
@ -833,6 +826,7 @@ class MozbuildObject(ProcessExecutionMixin):
|
|||
self.virtualenv_manager.ensure()
|
||||
self.virtualenv_manager.activate()
|
||||
|
||||
|
||||
def _set_log_level(self, verbose):
|
||||
self.log_manager.terminal_handler.setLevel(logging.INFO if not verbose else logging.DEBUG)
|
||||
|
||||
|
@ -841,8 +835,7 @@ class MozbuildObject(ProcessExecutionMixin):
|
|||
pipenv = os.path.join(self.virtualenv_manager.bin_path, 'pipenv')
|
||||
if not os.path.exists(pipenv):
|
||||
for package in ['certifi', 'pipenv', 'six', 'virtualenv', 'virtualenv-clone']:
|
||||
path = os.path.normpath(os.path.join(
|
||||
self.topsrcdir, 'third_party/python', package))
|
||||
path = os.path.normpath(os.path.join(self.topsrcdir, 'third_party/python', package))
|
||||
self.virtualenv_manager.install_pip_package(path, vendored=True)
|
||||
return pipenv
|
||||
|
||||
|
@ -870,8 +863,7 @@ class MachCommandBase(MozbuildObject):
|
|||
detect_virtualenv_mozinfo = getattr(context,
|
||||
'detect_virtualenv_mozinfo')
|
||||
try:
|
||||
dummy = MozbuildObject.from_environment(
|
||||
cwd=context.cwd,
|
||||
dummy = MozbuildObject.from_environment(cwd=context.cwd,
|
||||
detect_virtualenv_mozinfo=detect_virtualenv_mozinfo)
|
||||
topsrcdir = dummy.topsrcdir
|
||||
topobjdir = dummy._topobjdir
|
||||
|
|
|
@ -7,8 +7,6 @@
|
|||
# of version 1.2. Its license (MPL2) is contained in repo root LICENSE file.
|
||||
# Please make modifications there where possible.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
from itertools import islice
|
||||
|
||||
|
||||
|
@ -56,3 +54,4 @@ def chunkify(things, this_chunk, chunks):
|
|||
return things[start:end]
|
||||
except TypeError:
|
||||
return islice(things, start, end)
|
||||
|
||||
|
|
|
@ -2,11 +2,11 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
from collections import defaultdict
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import urlparse
|
||||
|
||||
from mach.config import ConfigSettings
|
||||
from mach.logging import LoggingManager
|
||||
|
@ -22,12 +22,10 @@ from mozpack.files import PreprocessedFile
|
|||
from mozpack.manifests import InstallManifest
|
||||
import mozpack.path as mozpath
|
||||
|
||||
from .manifest_handler import ChromeManifestHandler
|
||||
from manifest_handler import ChromeManifestHandler
|
||||
|
||||
|
||||
_line_comment_re = re.compile('^//@line (\d+) "(.+)"$')
|
||||
|
||||
|
||||
def generate_pp_info(path, topsrcdir):
|
||||
with open(path) as fh:
|
||||
# (start, end) -> (included_source, start)
|
||||
|
@ -59,8 +57,6 @@ def generate_pp_info(path, topsrcdir):
|
|||
|
||||
# This build backend is assuming the build to have happened already, as it is parsing
|
||||
# built preprocessed files to generate data to map them to the original sources.
|
||||
|
||||
|
||||
class ChromeMapBackend(CommonBackend):
|
||||
def _init(self):
|
||||
CommonBackend._init(self)
|
||||
|
@ -120,11 +116,9 @@ class ChromeMapBackend(CommonBackend):
|
|||
# A map from url prefixes to objdir directories:
|
||||
# { "chrome://mozapps/content/": [ "dist/bin/chrome/toolkit/content/mozapps" ], ... }
|
||||
# A map of overrides.
|
||||
# A map from objdir paths to sourcedir paths, and an object storing mapping
|
||||
# information for preprocessed files:
|
||||
# A map from objdir paths to sourcedir paths, and an object storing mapping information for preprocessed files:
|
||||
# { "dist/bin/browser/chrome/browser/content/browser/aboutSessionRestore.js":
|
||||
# [ "$topsrcdir/browser/components/sessionstore/content/aboutSessionRestore.js", {} ],
|
||||
# ... }
|
||||
# [ "$topsrcdir/browser/components/sessionstore/content/aboutSessionRestore.js", {} ], ... }
|
||||
# An object containing build configuration information.
|
||||
outputfile = os.path.join(self.environment.topobjdir, 'chrome-map.json')
|
||||
with self._write_file(outputfile) as fh:
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
from argparse import ArgumentParser
|
||||
import json
|
||||
import os
|
||||
|
@ -17,8 +15,7 @@ from six import viewitems
|
|||
|
||||
from mozpack.chrome.manifest import parse_manifest
|
||||
import mozpack.path as mozpath
|
||||
from .manifest_handler import ChromeManifestHandler
|
||||
|
||||
from manifest_handler import ChromeManifestHandler
|
||||
|
||||
class LcovRecord(object):
|
||||
__slots__ = ("test_name",
|
||||
|
@ -33,7 +30,6 @@ class LcovRecord(object):
|
|||
"lines",
|
||||
"line_count",
|
||||
"covered_line_count")
|
||||
|
||||
def __init__(self):
|
||||
self.functions = {}
|
||||
self.function_exec_counts = {}
|
||||
|
@ -68,8 +64,7 @@ class LcovRecord(object):
|
|||
# Re-calculate summaries after generating or splitting a record.
|
||||
self.function_count = len(self.functions.keys())
|
||||
# Function records may have moved between files, so filter here.
|
||||
self.function_exec_counts = {
|
||||
fn_name: count for fn_name, count in viewitems(self.function_exec_counts)
|
||||
self.function_exec_counts = {fn_name: count for fn_name, count in viewitems(self.function_exec_counts)
|
||||
if fn_name in self.functions.values()}
|
||||
self.covered_function_count = len([c for c in self.function_exec_counts.values() if c])
|
||||
self.line_count = len(self.lines)
|
||||
|
@ -77,7 +72,6 @@ class LcovRecord(object):
|
|||
self.branch_count = len(self.branches)
|
||||
self.covered_branch_count = len([c for c in self.branches.values() if c])
|
||||
|
||||
|
||||
class RecordRewriter(object):
|
||||
# Helper class for rewriting/spliting individual lcov records according
|
||||
# to what the preprocessor did.
|
||||
|
@ -170,8 +164,7 @@ class RecordRewriter(object):
|
|||
def rewrite_record(self, record, pp_info):
|
||||
# Rewrite the lines in the given record according to preprocessor info
|
||||
# and split to additional records when pp_info has included file info.
|
||||
self._current_pp_info = dict(
|
||||
[(tuple([int(l) for l in k.split(',')]), v) for k, v in pp_info.items()])
|
||||
self._current_pp_info = dict([(tuple([int(l) for l in k.split(',')]), v) for k, v in pp_info.items()])
|
||||
self._ranges = sorted(self._current_pp_info.keys())
|
||||
self._additions = {}
|
||||
self._rewrite_lines(record)
|
||||
|
@ -185,7 +178,6 @@ class RecordRewriter(object):
|
|||
r.resummarize()
|
||||
return generated_records
|
||||
|
||||
|
||||
class LcovFile(object):
|
||||
# Simple parser/pretty-printer for lcov format.
|
||||
# lcov parsing based on http://ltp.sourceforge.net/coverage/lcov/geninfo.1.php
|
||||
|
@ -412,7 +404,6 @@ class LcovFile(object):
|
|||
class UrlFinderError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class UrlFinder(object):
|
||||
# Given a "chrome://" or "resource://" url, uses data from the UrlMapBackend
|
||||
# and install manifests to find a path to the source file and the corresponding
|
||||
|
@ -578,10 +569,10 @@ class UrlFinder(object):
|
|||
|
||||
if app_name in url:
|
||||
if omnijar_name in url:
|
||||
# e.g. file:///home/worker/workspace/build/application/firefox/omni.ja!/components/MainProcessSingleton.js # noqa
|
||||
# e.g. file:///home/worker/workspace/build/application/firefox/omni.ja!/components/MainProcessSingleton.js
|
||||
parts = url_obj.path.split(omnijar_name + '!', 1)
|
||||
elif '.xpi!' in url:
|
||||
# e.g. file:///home/worker/workspace/build/application/firefox/browser/features/e10srollout@mozilla.org.xpi!/bootstrap.js # noqa
|
||||
# e.g. file:///home/worker/workspace/build/application/firefox/browser/features/e10srollout@mozilla.org.xpi!/bootstrap.js
|
||||
parts = url_obj.path.split('.xpi!', 1)
|
||||
else:
|
||||
# We don't know how to handle this jar: path, so return it to the
|
||||
|
@ -589,10 +580,7 @@ class UrlFinder(object):
|
|||
return url_obj.path, None
|
||||
|
||||
dir_parts = parts[0].rsplit(app_name + '/', 1)
|
||||
url = mozpath.normpath(
|
||||
mozpath.join(self.topobjdir, 'dist',
|
||||
'bin', dir_parts[1].lstrip('/'), parts[1].lstrip('/'))
|
||||
)
|
||||
url = mozpath.normpath(mozpath.join(self.topobjdir, 'dist', 'bin', dir_parts[1].lstrip('/'), parts[1].lstrip('/')))
|
||||
elif '.xpi!' in url:
|
||||
# This matching mechanism is quite brittle and based on examples seen in the wild.
|
||||
# There's no rule to match the XPI name to the path in dist/xpi-stage.
|
||||
|
@ -602,8 +590,7 @@ class UrlFinder(object):
|
|||
addon_name = addon_name[:-len('-test@mozilla.org')]
|
||||
elif addon_name.endswith('@mozilla.org'):
|
||||
addon_name = addon_name[:-len('@mozilla.org')]
|
||||
url = mozpath.normpath(mozpath.join(self.topobjdir, 'dist',
|
||||
'xpi-stage', addon_name, parts[1].lstrip('/')))
|
||||
url = mozpath.normpath(mozpath.join(self.topobjdir, 'dist', 'xpi-stage', addon_name, parts[1].lstrip('/')))
|
||||
elif url_obj.scheme == 'file' and os.path.isabs(url_obj.path):
|
||||
path = url_obj.path
|
||||
if not os.path.isfile(path):
|
||||
|
@ -620,12 +607,10 @@ class UrlFinder(object):
|
|||
self._final_mapping[url] = result
|
||||
return result
|
||||
|
||||
|
||||
class LcovFileRewriter(object):
|
||||
# Class for partial parses of LCOV format and rewriting to resolve urls
|
||||
# and preprocessed file lines.
|
||||
def __init__(self, chrome_map_path, appdir='dist/bin/browser/',
|
||||
gredir='dist/bin/', extra_chrome_manifests=[]):
|
||||
def __init__(self, chrome_map_path, appdir='dist/bin/browser/', gredir='dist/bin/', extra_chrome_manifests=[]):
|
||||
self.url_finder = UrlFinder(chrome_map_path, appdir, gredir, extra_chrome_manifests)
|
||||
self.pp_rewriter = RecordRewriter()
|
||||
|
||||
|
@ -646,11 +631,9 @@ class LcovFileRewriter(object):
|
|||
return None
|
||||
|
||||
source_file, pp_info = res
|
||||
# We can't assert that the file exists here, because we don't have the source
|
||||
# checkout available on test machines. We can bring back this assertion when
|
||||
# bug 1432287 is fixed.
|
||||
# assert os.path.isfile(source_file), "Couldn't find mapped source file %s at %s!" % (
|
||||
# url, source_file)
|
||||
# We can't assert that the file exists here, because we don't have the source checkout available
|
||||
# on test machines. We can bring back this assertion when bug 1432287 is fixed.
|
||||
# assert os.path.isfile(source_file), "Couldn't find mapped source file %s at %s!" % (url, source_file)
|
||||
|
||||
found_valid[0] = True
|
||||
|
||||
|
@ -674,44 +657,28 @@ class LcovFileRewriter(object):
|
|||
|
||||
|
||||
def main():
|
||||
parser = ArgumentParser(
|
||||
description="Given a set of gcov .info files produced "
|
||||
parser = ArgumentParser(description="Given a set of gcov .info files produced "
|
||||
"by spidermonkey's code coverage, re-maps file urls "
|
||||
"back to source files and lines in preprocessed files "
|
||||
"back to their original locations."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--chrome-map-path", default="chrome-map.json", help="Path to the chrome-map.json file."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--app-dir",
|
||||
default="dist/bin/browser/",
|
||||
"back to their original locations.")
|
||||
parser.add_argument("--chrome-map-path", default="chrome-map.json",
|
||||
help="Path to the chrome-map.json file.")
|
||||
parser.add_argument("--app-dir", default="dist/bin/browser/",
|
||||
help="Prefix of the appdir in use. This is used to map "
|
||||
"urls starting with resource:///. It may differ by "
|
||||
"app, but defaults to the valid value for firefox.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--gre-dir",
|
||||
default="dist/bin/",
|
||||
"app, but defaults to the valid value for firefox.")
|
||||
parser.add_argument("--gre-dir", default="dist/bin/",
|
||||
help="Prefix of the gre dir in use. This is used to map "
|
||||
"urls starting with resource://gre. It may differ by "
|
||||
"app, but defaults to the valid value for firefox.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output-suffix", default=".out", help="The suffix to append to output files."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--extra-chrome-manifests",
|
||||
nargs='+',
|
||||
help="Paths to files containing extra chrome registration.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output-file",
|
||||
default="",
|
||||
help="The output file where the results are merged. Leave empty to make the rewriter not "
|
||||
"merge files.",
|
||||
)
|
||||
parser.add_argument("files", nargs='+', help="The set of files to process.")
|
||||
"app, but defaults to the valid value for firefox.")
|
||||
parser.add_argument("--output-suffix", default=".out",
|
||||
help="The suffix to append to output files.")
|
||||
parser.add_argument("--extra-chrome-manifests", nargs='+',
|
||||
help="Paths to files containing extra chrome registration.")
|
||||
parser.add_argument("--output-file", default="",
|
||||
help="The output file where the results are merged. Leave empty to make the rewriter not merge files.")
|
||||
parser.add_argument("files", nargs='+',
|
||||
help="The set of files to process.")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
|
@ -727,6 +694,5 @@ def main():
|
|||
|
||||
rewriter.rewrite_files(files, args.output_file, args.output_suffix)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
try:
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
from __future__ import absolute_import, print_function
|
||||
|
||||
import argparse
|
||||
import errno
|
||||
import sys
|
||||
import json
|
||||
import buildconfig
|
||||
|
@ -18,7 +17,6 @@ from mozpack.manifests import (
|
|||
)
|
||||
import mozpack.path as mozpath
|
||||
|
||||
|
||||
def describe_install_manifest(manifest, dest_dir):
|
||||
try:
|
||||
manifest = InstallManifest(manifest)
|
||||
|
@ -77,6 +75,5 @@ def cli(args=sys.argv[1:]):
|
|||
|
||||
return package_coverage_data(args.root, args.output_file)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(cli())
|
||||
|
|
|
@ -4,7 +4,9 @@
|
|||
|
||||
# This modules provides functionality for dealing with code completion.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
|
||||
from mach.decorators import (
|
||||
CommandArgument,
|
||||
|
|
|
@ -4,8 +4,6 @@
|
|||
|
||||
# This modules provides functionality for dealing with code completion.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import os
|
||||
import types
|
||||
|
||||
|
@ -15,8 +13,11 @@ from mozbuild.frontend.data import (
|
|||
Sources,
|
||||
GeneratedSources,
|
||||
DirectoryTraversal,
|
||||
Linkable,
|
||||
LocalInclude,
|
||||
PerSourceFlag,
|
||||
VariablePassthru,
|
||||
SimpleProgram,
|
||||
)
|
||||
from mozbuild.shellutil import (
|
||||
quote as shell_quote,
|
||||
|
|
|
@ -2,10 +2,8 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import os
|
||||
|
||||
from mozbuild import shellutil
|
||||
|
||||
def check_top_objdir(topobjdir):
|
||||
top_make = os.path.join(topobjdir, 'Makefile')
|
||||
|
@ -15,7 +13,6 @@ def check_top_objdir(topobjdir):
|
|||
return False
|
||||
return True
|
||||
|
||||
|
||||
def get_build_vars(directory, cmd):
|
||||
build_vars = {}
|
||||
|
||||
|
@ -37,7 +34,6 @@ def get_build_vars(directory, cmd):
|
|||
|
||||
return build_vars
|
||||
|
||||
|
||||
def sanitize_cflags(flags):
|
||||
# We filter out -Xclang arguments as clang based tools typically choke on
|
||||
# passing these flags down to the clang driver. -Xclang tells the clang
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
# This modules provides functionality for dealing with compiler warnings.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import errno
|
||||
import json
|
||||
|
@ -132,7 +132,6 @@ class WarningsDatabase(object):
|
|||
Callers should periodically prune old, invalid warnings from the database
|
||||
by calling prune(). A good time to do this is at the end of a build.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Create an empty database."""
|
||||
self._files = {}
|
||||
|
@ -305,7 +304,6 @@ class WarningsCollector(object):
|
|||
output from the compiler. Therefore, it can maintain state to parse
|
||||
multi-line warning messages.
|
||||
"""
|
||||
|
||||
def __init__(self, cb, objdir=None):
|
||||
"""Initialize a new collector.
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@ from __future__ import absolute_import, print_function
|
|||
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
|
|
|
@ -49,7 +49,6 @@ class ConfigureError(Exception):
|
|||
|
||||
class SandboxDependsFunction(object):
|
||||
'''Sandbox-visible representation of @depends functions.'''
|
||||
|
||||
def __init__(self, unsandboxed):
|
||||
self._or = unsandboxed.__or__
|
||||
self._and = unsandboxed.__and__
|
||||
|
@ -234,7 +233,6 @@ class CombinedDependsFunction(DependsFunction):
|
|||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
class SandboxedGlobal(dict):
|
||||
'''Identifiable dict type for use as function global'''
|
||||
|
||||
|
@ -359,12 +357,10 @@ class ConfigureSandbox(dict):
|
|||
# that can't be converted to ascii. Make our log methods robust to this
|
||||
# by detecting the encoding that a producer is likely to have used.
|
||||
encoding = getpreferredencoding()
|
||||
|
||||
def wrapped_log_method(logger, key):
|
||||
method = getattr(logger, key)
|
||||
if not encoding:
|
||||
return method
|
||||
|
||||
def wrapped(*args, **kwargs):
|
||||
out_args = [
|
||||
arg.decode(encoding) if isinstance(arg, str) else arg
|
||||
|
|
|
@ -6,13 +6,12 @@
|
|||
# to a given compilation unit. This is used as a helper to find a bug in some
|
||||
# versions of GNU ld.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
import re
|
||||
|
||||
|
||||
def get_range_for(compilation_unit, debug_info):
|
||||
'''Returns the range offset for a given compilation unit
|
||||
in a given debug_info.'''
|
||||
|
@ -33,7 +32,6 @@ def get_range_for(compilation_unit, debug_info):
|
|||
ranges = nfo.rsplit(None, 1)[1]
|
||||
return None
|
||||
|
||||
|
||||
def get_range_length(range, debug_ranges):
|
||||
'''Returns the number of items in the range starting at the
|
||||
given offset.'''
|
||||
|
@ -44,7 +42,6 @@ def get_range_length(range, debug_ranges):
|
|||
length += 1
|
||||
return length
|
||||
|
||||
|
||||
def main(bin, compilation_unit):
|
||||
p = subprocess.Popen(['objdump', '-W', bin], stdout = subprocess.PIPE, stderr = subprocess.PIPE)
|
||||
(out, err) = p.communicate()
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
# will be used from shell, we just print the two assignments and evaluate
|
||||
# them from shell.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
|
@ -21,14 +21,12 @@ import re
|
|||
|
||||
re_for_ld = re.compile('.*\((.*)\).*')
|
||||
|
||||
|
||||
def parse_readelf_line(x):
|
||||
"""Return the version from a readelf line that looks like:
|
||||
0x00ec: Rev: 1 Flags: none Index: 8 Cnt: 2 Name: GLIBCXX_3.4.6
|
||||
"""
|
||||
return x.split(':')[-1].split('_')[-1].strip()
|
||||
|
||||
|
||||
def parse_ld_line(x):
|
||||
"""Parse a line from the output of ld -t. The output of gold is just
|
||||
the full path, gnu ld prints "-lstdc++ (path)".
|
||||
|
@ -38,13 +36,11 @@ def parse_ld_line(x):
|
|||
return t.groups()[0].strip()
|
||||
return x.strip()
|
||||
|
||||
|
||||
def split_ver(v):
|
||||
"""Covert the string '1.2.3' into the list [1,2,3]
|
||||
"""
|
||||
return [int(x) for x in v.split('.')]
|
||||
|
||||
|
||||
def cmp_ver(a, b):
|
||||
"""Compare versions in the form 'a.b.c'
|
||||
"""
|
||||
|
@ -53,14 +49,12 @@ def cmp_ver(a, b):
|
|||
return i - j
|
||||
return 0
|
||||
|
||||
|
||||
def encode_ver(v):
|
||||
"""Encode the version as a single number.
|
||||
"""
|
||||
t = split_ver(v)
|
||||
return t[0] << 16 | t[1] << 8 | t[2]
|
||||
|
||||
|
||||
def find_version(args):
|
||||
"""Given a base command line for a compiler, find the version of the
|
||||
libstdc++ it uses.
|
||||
|
@ -86,7 +80,6 @@ candidates:
|
|||
last_version = sorted(versions, cmp = cmp_ver)[-1]
|
||||
return (last_version, encode_ver(last_version))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
"""Given the value of environment variable CXX or HOST_CXX, find the
|
||||
version of the libstdc++ it uses.
|
||||
|
|
|
@ -237,6 +237,7 @@ class LintSandbox(ConfigureSandbox):
|
|||
name, default))
|
||||
self._raise_from(e, frame.f_back if frame else None)
|
||||
|
||||
|
||||
def _check_help_for_option_with_func_default(self, option, *args, **kwargs):
|
||||
default = kwargs['default']
|
||||
|
||||
|
@ -284,7 +285,6 @@ class LintSandbox(ConfigureSandbox):
|
|||
|
||||
def imports_impl(self, _import, _from=None, _as=None):
|
||||
wrapper = super(LintSandbox, self).imports_impl(_import, _from=_from, _as=_as)
|
||||
|
||||
def decorator(func):
|
||||
self._has_imports.add(func)
|
||||
return wrapper(func)
|
||||
|
|
|
@ -46,7 +46,7 @@ def disassemble_as_iter(co):
|
|||
c = code[i]
|
||||
op = ord(c)
|
||||
opname = dis.opname[op]
|
||||
i += 1
|
||||
i += 1;
|
||||
if op >= dis.HAVE_ARGUMENT:
|
||||
arg = ord(code[i]) + ord(code[i + 1]) * 256 + extended_arg
|
||||
extended_arg = 0
|
||||
|
|
|
@ -106,7 +106,6 @@ class PositiveOptionValue(OptionValue):
|
|||
in the form of a tuple for when values are given to the option (in the form
|
||||
--option=value[,value2...].
|
||||
'''
|
||||
|
||||
def __nonzero__(self):
|
||||
return True
|
||||
|
||||
|
@ -425,7 +424,6 @@ class CommandLineHelper(object):
|
|||
Extra options can be added afterwards through API calls. For those,
|
||||
conflicting values will raise an exception.
|
||||
'''
|
||||
|
||||
def __init__(self, environ=os.environ, argv=sys.argv):
|
||||
self._environ = dict(environ)
|
||||
self._args = OrderedDict()
|
||||
|
|
|
@ -14,7 +14,6 @@ from collections import deque
|
|||
from contextlib import contextmanager
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
|
||||
def getpreferredencoding():
|
||||
# locale._parse_localename makes locale.getpreferredencoding
|
||||
# return None when LC_ALL is C, instead of e.g. 'US-ASCII' or
|
||||
|
@ -30,7 +29,6 @@ def getpreferredencoding():
|
|||
encoding = 'utf-8'
|
||||
return encoding
|
||||
|
||||
|
||||
class Version(LooseVersion):
|
||||
'''A simple subclass of distutils.version.LooseVersion.
|
||||
Adds attributes for `major`, `minor`, `patch` for the first three
|
||||
|
@ -42,7 +40,6 @@ class Version(LooseVersion):
|
|||
v.minor == 2
|
||||
v.patch == 0
|
||||
'''
|
||||
|
||||
def __init__(self, version):
|
||||
# Can't use super, LooseVersion's base class is not a new-style class.
|
||||
LooseVersion.__init__(self, version)
|
||||
|
@ -74,7 +71,6 @@ class ConfigureOutputHandler(logging.Handler):
|
|||
printed out. This feature is only enabled under the `queue_debug` context
|
||||
manager.
|
||||
'''
|
||||
|
||||
def __init__(self, stdout=sys.stdout, stderr=sys.stderr, maxlen=20):
|
||||
super(ConfigureOutputHandler, self).__init__()
|
||||
|
||||
|
@ -155,7 +151,7 @@ class ConfigureOutputHandler(logging.Handler):
|
|||
stream.flush()
|
||||
except (KeyboardInterrupt, SystemExit, IOError):
|
||||
raise
|
||||
except Exception:
|
||||
except:
|
||||
self.handleError(record)
|
||||
|
||||
@contextmanager
|
||||
|
@ -197,7 +193,6 @@ class LineIO(object):
|
|||
'''File-like class that sends each line of the written data to a callback
|
||||
(without carriage returns).
|
||||
'''
|
||||
|
||||
def __init__(self, callback, errors='strict'):
|
||||
self._callback = callback
|
||||
self._buf = ''
|
||||
|
|
|
@ -2,10 +2,11 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import errno
|
||||
import getpass
|
||||
import glob
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
|
@ -304,7 +305,7 @@ class BuildMonitor(MozbuildObject):
|
|||
try:
|
||||
warning = self._warnings_collector.process_line(line)
|
||||
message = line
|
||||
except Exception:
|
||||
except:
|
||||
pass
|
||||
|
||||
return BuildOutputResult(warning, False, message)
|
||||
|
@ -477,6 +478,7 @@ class BuildMonitor(MozbuildObject):
|
|||
|
||||
o['resources'].append(entry)
|
||||
|
||||
|
||||
# If the imports for this file ran before the in-tree virtualenv
|
||||
# was bootstrapped (for instance, for a clobber build in automation),
|
||||
# psutil might not be available.
|
||||
|
@ -546,7 +548,6 @@ class TerminalLoggingHandler(logging.Handler):
|
|||
This class should probably live elsewhere, like the mach core. Consider
|
||||
this a proving ground for its usefulness.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
logging.Handler.__init__(self)
|
||||
|
||||
|
@ -682,6 +683,7 @@ class BuildOutputManager(OutputManager):
|
|||
# collection child process hasn't been told to stop.
|
||||
self.monitor.stop_resource_recording()
|
||||
|
||||
|
||||
def on_line(self, line):
|
||||
warning, state_changed, message = self.monitor.on_line(line)
|
||||
|
||||
|
@ -901,10 +903,7 @@ class CCacheStats(object):
|
|||
return int(numeric * unit)
|
||||
|
||||
def hit_rate_message(self):
|
||||
return ('ccache (direct) hit rate: {:.1%}; (preprocessed) hit rate: {:.1%};'
|
||||
' miss rate: {:.1%}'.format(
|
||||
*self.hit_rates()
|
||||
))
|
||||
return 'ccache (direct) hit rate: {:.1%}; (preprocessed) hit rate: {:.1%}; miss rate: {:.1%}'.format(*self.hit_rates())
|
||||
|
||||
def hit_rates(self):
|
||||
direct = self._values['cache_hit_direct']
|
||||
|
@ -1146,12 +1145,10 @@ class BuildDriver(MozbuildObject):
|
|||
# could potentially be fixed if the build monitor were more
|
||||
# intelligent about encountering undefined state.
|
||||
no_build_status = b'1' if make_dir is not None else b''
|
||||
status = self._run_make(
|
||||
directory=make_dir, target=make_target,
|
||||
status = self._run_make(directory=make_dir, target=make_target,
|
||||
line_handler=output.on_line, log=False, print_directory=False,
|
||||
ensure_exit_code=False, num_jobs=jobs, silent=not verbose,
|
||||
append_env={
|
||||
b'NO_BUILDSTATUS_MESSAGES': no_build_status},
|
||||
append_env={b'NO_BUILDSTATUS_MESSAGES': no_build_status},
|
||||
keep_going=keep_going)
|
||||
|
||||
if status != 0:
|
||||
|
@ -1287,8 +1284,7 @@ class BuildDriver(MozbuildObject):
|
|||
long_build = monitor.elapsed > 600
|
||||
|
||||
if long_build:
|
||||
output.on_line(
|
||||
'We know it took a while, but your build finally finished successfully!')
|
||||
output.on_line('We know it took a while, but your build finally finished successfully!')
|
||||
else:
|
||||
output.on_line('Your build was successful!')
|
||||
|
||||
|
@ -1302,10 +1298,8 @@ class BuildDriver(MozbuildObject):
|
|||
print('To take your build for a test drive, run: |mach run|')
|
||||
app = self.substs['MOZ_BUILD_APP']
|
||||
if app in ('browser', 'mobile/android'):
|
||||
print(
|
||||
'For more information on what to do now, see '
|
||||
'https://developer.mozilla.org/docs/Developer_Guide/So_You_Just_Built_Firefox' # noqa
|
||||
)
|
||||
print('For more information on what to do now, see '
|
||||
'https://developer.mozilla.org/docs/Developer_Guide/So_You_Just_Built_Firefox')
|
||||
except Exception:
|
||||
# Ignore Exceptions in case we can't find config.status (such
|
||||
# as when doing OSX Universal builds)
|
||||
|
@ -1338,7 +1332,7 @@ class BuildDriver(MozbuildObject):
|
|||
|
||||
if not status:
|
||||
print('Configure complete!')
|
||||
print('Be sure to run |mach build| to pick up any changes')
|
||||
print('Be sure to run |mach build| to pick up any changes');
|
||||
|
||||
return status
|
||||
|
||||
|
|
|
@ -39,7 +39,6 @@ Well, are ya? -- you can ignore this clobber requirement by running:
|
|||
$ touch {clobber_file}
|
||||
'''.splitlines()])
|
||||
|
||||
|
||||
class Clobberer(object):
|
||||
def __init__(self, topsrcdir, topobjdir):
|
||||
"""Create a new object to manage clobbering the tree.
|
||||
|
@ -95,7 +94,7 @@ class Clobberer(object):
|
|||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT)
|
||||
return p.wait() == 1 and p.stdout.read().startswith('winrm')
|
||||
except Exception:
|
||||
except:
|
||||
return False
|
||||
|
||||
def remove_objdir(self, full=True):
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
|
@ -34,7 +34,6 @@ hour. Backup programs that rely on this feature may be affected.
|
|||
https://technet.microsoft.com/en-us/library/cc785435.aspx
|
||||
'''
|
||||
|
||||
|
||||
class Doctor(object):
|
||||
def __init__(self, srcdir, objdir, fix):
|
||||
self.srcdir = mozpath.normpath(srcdir)
|
||||
|
@ -70,7 +69,7 @@ class Doctor(object):
|
|||
denied = True
|
||||
if denied:
|
||||
print('run "mach doctor --fix" AS ADMIN to re-attempt fixing your system')
|
||||
elif False and fixable: # elif fixable: # 'and fixable' avoids flake8 error
|
||||
elif False: # elif fixable:
|
||||
print('run "mach doctor --fix" as admin to attempt fixing your system')
|
||||
return int(not good)
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
# This file contains utility functions for reading .properties files, like
|
||||
# region.properties.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import codecs
|
||||
import re
|
||||
|
@ -16,7 +16,6 @@ if sys.version_info[0] == 3:
|
|||
else:
|
||||
str_type = basestring
|
||||
|
||||
|
||||
class DotProperties:
|
||||
r'''A thin representation of a key=value .properties file.'''
|
||||
|
||||
|
@ -78,7 +77,7 @@ class DotProperties:
|
|||
if k.startswith(prefix) and '.' not in k[len(prefix):])
|
||||
|
||||
for required_key in required_keys:
|
||||
if required_key not in D:
|
||||
if not required_key in D:
|
||||
raise ValueError('Required key %s not present' % required_key)
|
||||
|
||||
return D
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
"""
|
||||
This script converts the build system telemetry schema from voluptuous format to json-schema.
|
||||
|
|
|
@ -204,7 +204,7 @@ class Daemon(object):
|
|||
|
||||
while True:
|
||||
try:
|
||||
self.client.receive()
|
||||
_watch_result = self.client.receive()
|
||||
|
||||
changed = self.changed_files()
|
||||
if not changed:
|
||||
|
@ -228,7 +228,7 @@ class Daemon(object):
|
|||
|
||||
except pywatchman.SocketTimeout:
|
||||
# Let's check to see if we're still functional.
|
||||
self.client.query('version')
|
||||
_version = self.client.query('version')
|
||||
|
||||
except pywatchman.CommandError as e:
|
||||
# Abstract away pywatchman errors.
|
||||
|
|
|
@ -14,7 +14,7 @@ If you are looking for the absolute authority on what moz.build files can
|
|||
contain, you've come to the right place.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import os
|
||||
|
||||
|
@ -89,7 +89,6 @@ class Context(KeyedDefaultDict):
|
|||
|
||||
config is the ConfigEnvironment for this context.
|
||||
"""
|
||||
|
||||
def __init__(self, allowed_variables={}, config=None, finder=None):
|
||||
self._allowed_variables = allowed_variables
|
||||
self.main_path = None
|
||||
|
@ -270,7 +269,6 @@ class SubContext(Context, ContextDerivedValue):
|
|||
Sub-contexts inherit paths and other relevant state from the parent
|
||||
context.
|
||||
"""
|
||||
|
||||
def __init__(self, parent):
|
||||
assert isinstance(parent, Context)
|
||||
|
||||
|
@ -572,7 +570,6 @@ class PathMeta(type):
|
|||
cls = SourcePath
|
||||
return super(PathMeta, cls).__call__(context, value)
|
||||
|
||||
|
||||
class Path(ContextDerivedValue, unicode):
|
||||
"""Stores and resolves a source path relative to a given context
|
||||
|
||||
|
@ -639,7 +636,6 @@ class Path(ContextDerivedValue, unicode):
|
|||
|
||||
class SourcePath(Path):
|
||||
"""Like Path, but limited to paths in the source directory."""
|
||||
|
||||
def __init__(self, context, value):
|
||||
if value.startswith('!'):
|
||||
raise ValueError('Object directory paths are not allowed')
|
||||
|
@ -680,7 +676,6 @@ class RenamedSourcePath(SourcePath):
|
|||
This class is not meant to be exposed to moz.build sandboxes as of now,
|
||||
and is not supported by the RecursiveMake backend.
|
||||
"""
|
||||
|
||||
def __init__(self, context, value):
|
||||
assert isinstance(value, tuple)
|
||||
source, self._target_basename = value
|
||||
|
@ -693,7 +688,6 @@ class RenamedSourcePath(SourcePath):
|
|||
|
||||
class ObjDirPath(Path):
|
||||
"""Like Path, but limited to paths in the object directory."""
|
||||
|
||||
def __init__(self, context, value=None):
|
||||
if not value.startswith('!'):
|
||||
raise ValueError('Object directory paths must start with ! prefix')
|
||||
|
@ -708,7 +702,6 @@ class ObjDirPath(Path):
|
|||
|
||||
class AbsolutePath(Path):
|
||||
"""Like Path, but allows arbitrary paths outside the source and object directories."""
|
||||
|
||||
def __init__(self, context, value=None):
|
||||
if not value.startswith('%'):
|
||||
raise ValueError('Absolute paths must start with % prefix')
|
||||
|
@ -724,7 +717,6 @@ def ContextDerivedTypedList(klass, base_class=List):
|
|||
"""Specialized TypedList for use with ContextDerivedValue types.
|
||||
"""
|
||||
assert issubclass(klass, ContextDerivedValue)
|
||||
|
||||
class _TypedList(ContextDerivedValue, TypedList(klass, base_class)):
|
||||
def __init__(self, context, iterable=[], **kwargs):
|
||||
self.context = context
|
||||
|
@ -737,7 +729,6 @@ def ContextDerivedTypedList(klass, base_class=List):
|
|||
|
||||
return _TypedList
|
||||
|
||||
|
||||
@memoize
|
||||
def ContextDerivedTypedListWithItems(type, base_class=List):
|
||||
"""Specialized TypedList for use with ContextDerivedValue types.
|
||||
|
@ -871,7 +862,6 @@ def ContextDerivedTypedHierarchicalStringList(type):
|
|||
|
||||
return _TypedListWithItems
|
||||
|
||||
|
||||
def OrderedPathListWithAction(action):
|
||||
"""Returns a class which behaves as a StrictOrderingOnAppendList, but
|
||||
invokes the given callable with each input and a context as it is
|
||||
|
@ -889,7 +879,6 @@ def OrderedPathListWithAction(action):
|
|||
|
||||
return _OrderedListWithAction
|
||||
|
||||
|
||||
def TypedListWithAction(typ, action):
|
||||
"""Returns a class which behaves as a TypedList with the provided type, but
|
||||
invokes the given given callable with each input and a context as it is
|
||||
|
@ -905,7 +894,6 @@ def TypedListWithAction(typ, action):
|
|||
super(_TypedListWithAction, self).__init__(action=_action, *args)
|
||||
return _TypedListWithAction
|
||||
|
||||
|
||||
ManifestparserManifestList = OrderedPathListWithAction(read_manifestparser_manifest)
|
||||
ReftestManifestList = OrderedPathListWithAction(read_reftest_manifest)
|
||||
|
||||
|
@ -1201,105 +1189,66 @@ SUBCONTEXTS = {cls.__name__: cls for cls in SUBCONTEXTS}
|
|||
# (storage_type, input_types, docs)
|
||||
|
||||
VARIABLES = {
|
||||
'SOURCES': (
|
||||
ContextDerivedTypedListWithItems(
|
||||
Path,
|
||||
StrictOrderingOnAppendListWithFlagsFactory(
|
||||
{'no_pgo': bool,
|
||||
'flags': List,
|
||||
'pgo_generate_only': bool
|
||||
}
|
||||
)
|
||||
),
|
||||
list,
|
||||
'SOURCES': (ContextDerivedTypedListWithItems(Path, StrictOrderingOnAppendListWithFlagsFactory({'no_pgo': bool, 'flags': List, 'pgo_generate_only': bool})), list,
|
||||
"""Source code files.
|
||||
|
||||
This variable contains a list of source code files to compile.
|
||||
Accepts assembler, C, C++, Objective C/C++.
|
||||
"""
|
||||
),
|
||||
"""),
|
||||
|
||||
'FILES_PER_UNIFIED_FILE': (
|
||||
int,
|
||||
int,
|
||||
'FILES_PER_UNIFIED_FILE': (int, int,
|
||||
"""The number of source files to compile into each unified source file.
|
||||
|
||||
"""
|
||||
),
|
||||
"""),
|
||||
|
||||
'IS_RUST_LIBRARY': (
|
||||
bool,
|
||||
bool,
|
||||
'IS_RUST_LIBRARY': (bool, bool,
|
||||
"""Whether the current library defined by this moz.build is built by Rust.
|
||||
|
||||
The library defined by this moz.build should have a build definition in
|
||||
a Cargo.toml file that exists in this moz.build's directory.
|
||||
"""
|
||||
),
|
||||
"""),
|
||||
|
||||
'RUST_LIBRARY_FEATURES': (
|
||||
List,
|
||||
list,
|
||||
'RUST_LIBRARY_FEATURES': (List, list,
|
||||
"""Cargo features to activate for this library.
|
||||
|
||||
This variable should not be used directly; you should be using the
|
||||
RustLibrary template instead.
|
||||
"""
|
||||
),
|
||||
"""),
|
||||
|
||||
'RUST_LIBRARY_TARGET_DIR': (
|
||||
unicode,
|
||||
unicode,
|
||||
'RUST_LIBRARY_TARGET_DIR': (unicode, unicode,
|
||||
"""Where CARGO_TARGET_DIR should point when compiling this library. If
|
||||
not set, it defaults to the current objdir. It should be a relative path
|
||||
to the current objdir; absolute paths should not be used.
|
||||
|
||||
This variable should not be used directly; you should be using the
|
||||
RustLibrary template instead.
|
||||
"""
|
||||
),
|
||||
"""),
|
||||
|
||||
'HOST_RUST_LIBRARY_FEATURES': (
|
||||
List,
|
||||
list,
|
||||
'HOST_RUST_LIBRARY_FEATURES': (List, list,
|
||||
"""Cargo features to activate for this host library.
|
||||
|
||||
This variable should not be used directly; you should be using the
|
||||
HostRustLibrary template instead.
|
||||
"""
|
||||
),
|
||||
"""),
|
||||
|
||||
'RUST_TESTS': (
|
||||
TypedList(unicode),
|
||||
list,
|
||||
'RUST_TESTS': (TypedList(unicode), list,
|
||||
"""Names of Rust tests to build and run via `cargo test`.
|
||||
"""),
|
||||
|
||||
'RUST_TEST_FEATURES': (
|
||||
TypedList(unicode),
|
||||
list,
|
||||
'RUST_TEST_FEATURES': (TypedList(unicode), list,
|
||||
"""Cargo features to activate for RUST_TESTS.
|
||||
"""
|
||||
),
|
||||
"""),
|
||||
|
||||
'UNIFIED_SOURCES': (
|
||||
ContextDerivedTypedList(
|
||||
SourcePath,
|
||||
StrictOrderingOnAppendList
|
||||
),
|
||||
list,
|
||||
'UNIFIED_SOURCES': (ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList), list,
|
||||
"""Source code files that can be compiled together.
|
||||
|
||||
This variable contains a list of source code files to compile,
|
||||
that can be concatenated all together and built as a single source
|
||||
file. This can help make the build faster and reduce the debug info
|
||||
size.
|
||||
"""
|
||||
),
|
||||
"""),
|
||||
|
||||
'GENERATED_FILES': (
|
||||
GeneratedFilesList,
|
||||
list,
|
||||
'GENERATED_FILES': (GeneratedFilesList, list,
|
||||
"""Generic generated files.
|
||||
|
||||
This variable contains a list of files for the build system to
|
||||
|
@ -1348,12 +1297,9 @@ VARIABLES = {
|
|||
build, regardless of whether it is stale. This is special to the
|
||||
RecursiveMake backend and intended for special situations only (e.g.,
|
||||
localization). Please consult a build peer before using ``force``.
|
||||
"""
|
||||
),
|
||||
"""),
|
||||
|
||||
'DEFINES': (
|
||||
InitializedDefines,
|
||||
dict,
|
||||
'DEFINES': (InitializedDefines, dict,
|
||||
"""Dictionary of compiler defines to declare.
|
||||
|
||||
These are passed in to the compiler as ``-Dkey='value'`` for string
|
||||
|
@ -1379,22 +1325,16 @@ VARIABLES = {
|
|||
'MOZ_EXTENSIONS_DB_SCHEMA': 15,
|
||||
'DLL_SUFFIX': '".so"',
|
||||
})
|
||||
"""
|
||||
),
|
||||
"""),
|
||||
|
||||
'DELAYLOAD_DLLS': (
|
||||
List,
|
||||
list,
|
||||
'DELAYLOAD_DLLS': (List, list,
|
||||
"""Delay-loaded DLLs.
|
||||
|
||||
This variable contains a list of DLL files which the module being linked
|
||||
should load lazily. This only has an effect when building with MSVC.
|
||||
"""
|
||||
),
|
||||
"""),
|
||||
|
||||
'DIRS': (
|
||||
ContextDerivedTypedList(SourcePath),
|
||||
list,
|
||||
'DIRS': (ContextDerivedTypedList(SourcePath), list,
|
||||
"""Child directories to descend into looking for build frontend files.
|
||||
|
||||
This works similarly to the ``DIRS`` variable in make files. Each str
|
||||
|
@ -1406,12 +1346,9 @@ VARIABLES = {
|
|||
Values are relative paths. They can be multiple directory levels
|
||||
above or below. Use ``..`` for parent directories and ``/`` for path
|
||||
delimiters.
|
||||
"""
|
||||
),
|
||||
"""),
|
||||
|
||||
'HAS_MISC_RULE': (
|
||||
bool,
|
||||
bool,
|
||||
'HAS_MISC_RULE': (bool, bool,
|
||||
"""Whether this directory should be traversed in the ``misc`` tier.
|
||||
|
||||
Many ``libs`` rules still exist in Makefile.in files. We highly prefer
|
||||
|
@ -1425,8 +1362,7 @@ VARIABLES = {
|
|||
Please note that converting ``libs`` rules to the ``misc`` tier must
|
||||
be done with care, as there are many implicit dependencies that can
|
||||
break the build in subtle ways.
|
||||
"""
|
||||
),
|
||||
"""),
|
||||
|
||||
'FINAL_TARGET_FILES': (ContextDerivedTypedHierarchicalStringList(Path), list,
|
||||
"""List of files to be installed into the application directory.
|
||||
|
@ -1733,12 +1669,7 @@ VARIABLES = {
|
|||
the Cargo.toml in the same directory.
|
||||
"""),
|
||||
|
||||
'CONFIGURE_SUBST_FILES': (
|
||||
ContextDerivedTypedList(
|
||||
SourcePath,
|
||||
StrictOrderingOnAppendList
|
||||
),
|
||||
list,
|
||||
'CONFIGURE_SUBST_FILES': (ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList), list,
|
||||
"""Output files that will be generated using configure-like substitution.
|
||||
|
||||
This is a substitute for ``AC_OUTPUT`` in autoconf. For each path in this
|
||||
|
@ -1746,22 +1677,15 @@ VARIABLES = {
|
|||
``{path}.in``. The contents of this file will be read and variable
|
||||
patterns like ``@foo@`` will be substituted with the values of the
|
||||
``AC_SUBST`` variables declared during configure.
|
||||
"""
|
||||
),
|
||||
"""),
|
||||
|
||||
'CONFIGURE_DEFINE_FILES': (
|
||||
ContextDerivedTypedList(
|
||||
SourcePath,
|
||||
StrictOrderingOnAppendList
|
||||
),
|
||||
list,
|
||||
'CONFIGURE_DEFINE_FILES': (ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList), list,
|
||||
"""Output files generated from configure/config.status.
|
||||
|
||||
This is a substitute for ``AC_CONFIG_HEADER`` in autoconf. This is very
|
||||
similar to ``CONFIGURE_SUBST_FILES`` except the generation logic takes
|
||||
into account the values of ``AC_DEFINE`` instead of ``AC_SUBST``.
|
||||
"""
|
||||
),
|
||||
"""),
|
||||
|
||||
'EXPORTS': (ContextDerivedTypedHierarchicalStringList(Path), list,
|
||||
"""List of files to be exported, and in which subdirectories.
|
||||
|
@ -2488,31 +2412,23 @@ SPECIAL_VARIABLES = {
|
|||
``$(FINAL_TARGET)/components/``.
|
||||
"""),
|
||||
|
||||
'EXTRA_PP_COMPONENTS': (
|
||||
lambda context: context['FINAL_TARGET_PP_FILES'].components._strings,
|
||||
list,
|
||||
'EXTRA_PP_COMPONENTS': (lambda context: context['FINAL_TARGET_PP_FILES'].components._strings, list,
|
||||
"""Javascript XPCOM files.
|
||||
|
||||
This variable contains a list of files to preprocess. Generated
|
||||
files will be installed in the ``/components`` directory of the distribution.
|
||||
"""
|
||||
),
|
||||
"""),
|
||||
|
||||
'JS_PREFERENCE_FILES': (
|
||||
lambda context: context['FINAL_TARGET_FILES'].defaults.pref._strings,
|
||||
list,
|
||||
'JS_PREFERENCE_FILES': (lambda context: context['FINAL_TARGET_FILES'].defaults.pref._strings, list,
|
||||
"""Exported JavaScript files.
|
||||
|
||||
A list of files copied into the dist directory for packaging and installation.
|
||||
Path will be defined for gre or application prefs dir based on what is building.
|
||||
"""),
|
||||
|
||||
'JS_PREFERENCE_PP_FILES': (
|
||||
lambda context: context['FINAL_TARGET_PP_FILES'].defaults.pref._strings,
|
||||
list,
|
||||
'JS_PREFERENCE_PP_FILES': (lambda context: context['FINAL_TARGET_PP_FILES'].defaults.pref._strings, list,
|
||||
"""Like JS_PREFERENCE_FILES, preprocessed..
|
||||
"""
|
||||
),
|
||||
"""),
|
||||
|
||||
'RESOURCE_FILES': (lambda context: context['FINAL_TARGET_FILES'].res, list,
|
||||
"""List of resources to be exported, and in which subdirectories.
|
||||
|
@ -2529,16 +2445,13 @@ SPECIAL_VARIABLES = {
|
|||
RESOURCE_FILES.fonts += ['bar.res']
|
||||
"""),
|
||||
|
||||
'CONTENT_ACCESSIBLE_FILES': (
|
||||
lambda context: context['FINAL_TARGET_FILES'].contentaccessible,
|
||||
list,
|
||||
'CONTENT_ACCESSIBLE_FILES': (lambda context: context['FINAL_TARGET_FILES'].contentaccessible, list,
|
||||
"""List of files which can be accessed by web content through resource:// URIs.
|
||||
|
||||
``CONTENT_ACCESSIBLE_FILES`` is used to list the files to be exported
|
||||
to ``dist/bin/contentaccessible``. Files can also be appended to a
|
||||
field to indicate which subdirectory they should be exported to.
|
||||
"""
|
||||
),
|
||||
"""),
|
||||
|
||||
'EXTRA_JS_MODULES': (lambda context: context['FINAL_TARGET_FILES'].modules, list,
|
||||
"""Additional JavaScript files to distribute.
|
||||
|
|
|
@ -15,18 +15,19 @@ contains the code for converting executed mozbuild files into these data
|
|||
structures.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from mozbuild.frontend.context import (
|
||||
ObjDirPath,
|
||||
SourcePath,
|
||||
)
|
||||
from mozbuild.util import StrictOrderingOnAppendList
|
||||
from mozpack.chrome.manifest import ManifestEntry
|
||||
|
||||
import mozpack.path as mozpath
|
||||
from .context import FinalTargetValue
|
||||
|
||||
from collections import defaultdict
|
||||
from collections import defaultdict, OrderedDict
|
||||
import itertools
|
||||
|
||||
from ..util import (
|
||||
|
@ -191,7 +192,6 @@ class ComputedFlags(ContextDerived):
|
|||
flags[dest_var].extend(value)
|
||||
return flags.items()
|
||||
|
||||
|
||||
class XPIDLModule(ContextDerived):
|
||||
"""Describes an XPIDL module to be compiled."""
|
||||
|
||||
|
@ -207,7 +207,6 @@ class XPIDLModule(ContextDerived):
|
|||
self.name = name
|
||||
self.idl_files = idl_files
|
||||
|
||||
|
||||
class BaseDefines(ContextDerived):
|
||||
"""Context derived container object for DEFINES/HOST_DEFINES,
|
||||
which are OrderedDicts.
|
||||
|
@ -233,15 +232,12 @@ class BaseDefines(ContextDerived):
|
|||
else:
|
||||
self.defines.update(more_defines)
|
||||
|
||||
|
||||
class Defines(BaseDefines):
|
||||
pass
|
||||
|
||||
|
||||
class HostDefines(BaseDefines):
|
||||
pass
|
||||
|
||||
|
||||
class WebIDLCollection(ContextDerived):
|
||||
"""Collects WebIDL info referenced during the build."""
|
||||
|
||||
|
@ -517,8 +513,7 @@ class BaseProgram(Linkable):
|
|||
@property
|
||||
def output_path(self):
|
||||
if self.installed:
|
||||
return ObjDirPath(self._context, '!/' + mozpath.join(
|
||||
self.install_target, self.program))
|
||||
return ObjDirPath(self._context, '!/' + mozpath.join(self.install_target, self.program))
|
||||
else:
|
||||
return ObjDirPath(self._context, '!' + self.program)
|
||||
|
||||
|
@ -1042,12 +1037,10 @@ class UnifiedSources(BaseSources):
|
|||
|
||||
suffix = self.canonical_suffix[1:]
|
||||
unified_prefix='Unified_%s_%s' % (suffix, unified_prefix)
|
||||
self.unified_source_mapping = list(
|
||||
group_unified_files(source_files,
|
||||
self.unified_source_mapping = list(group_unified_files(source_files,
|
||||
unified_prefix=unified_prefix,
|
||||
unified_suffix=suffix,
|
||||
files_per_unified_file=files_per_unified_file)
|
||||
)
|
||||
files_per_unified_file=files_per_unified_file))
|
||||
|
||||
|
||||
class InstallationTarget(ContextDerived):
|
||||
|
@ -1107,7 +1100,6 @@ class FinalTargetPreprocessedFiles(ContextDerived):
|
|||
ContextDerived.__init__(self, sandbox)
|
||||
self.files = files
|
||||
|
||||
|
||||
class LocalizedFiles(FinalTargetFiles):
|
||||
"""Sandbox container object for LOCALIZED_FILES, which is a
|
||||
HierarchicalStringList.
|
||||
|
@ -1195,16 +1187,12 @@ class GeneratedFile(ContextDerived):
|
|||
'.inc',
|
||||
'.py',
|
||||
'.rs',
|
||||
'node.stub', # To avoid VPATH issues with installing node files:
|
||||
# https://bugzilla.mozilla.org/show_bug.cgi?id=1461714#c55
|
||||
# We need to compile Java to generate JNI wrappers for native code
|
||||
# compilation to consume.
|
||||
'android_apks',
|
||||
'node.stub', # To avoid VPATH issues with installing node files: https://bugzilla.mozilla.org/show_bug.cgi?id=1461714#c55
|
||||
'android_apks', # We need to compile Java to generate JNI wrappers for native code compilation to consume.
|
||||
'.profdata',
|
||||
'.webidl'
|
||||
)
|
||||
self.required_for_compile = [
|
||||
f for f in self.outputs if f.endswith(suffixes) or 'stl_wrappers/' in f]
|
||||
self.required_for_compile = [f for f in self.outputs if f.endswith(suffixes) or 'stl_wrappers/' in f]
|
||||
|
||||
|
||||
class ChromeManifestEntry(ContextDerived):
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import itertools
|
||||
import logging
|
||||
|
@ -24,6 +24,7 @@ import pytoml
|
|||
|
||||
from .data import (
|
||||
BaseRustProgram,
|
||||
BaseSources,
|
||||
ChromeManifestEntry,
|
||||
ComputedFlags,
|
||||
ConfigFileSubstitution,
|
||||
|
@ -92,6 +93,7 @@ from .context import (
|
|||
ObjDirPath,
|
||||
Path,
|
||||
SubContext,
|
||||
TemplateContext,
|
||||
)
|
||||
|
||||
from mozbuild.base import ExecutionSummary
|
||||
|
@ -183,8 +185,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
objs = list(emitfn(out))
|
||||
self._emitter_time += time.time() - start
|
||||
|
||||
for o in emit_objs(objs):
|
||||
yield o
|
||||
for o in emit_objs(objs): yield o
|
||||
|
||||
else:
|
||||
raise Exception('Unhandled output type: %s' % type(out))
|
||||
|
@ -195,8 +196,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
objs = list(self._emit_libs_derived(contexts))
|
||||
self._emitter_time += time.time() - start
|
||||
|
||||
for o in emit_objs(objs):
|
||||
yield o
|
||||
for o in emit_objs(objs): yield o
|
||||
|
||||
def _emit_libs_derived(self, contexts):
|
||||
|
||||
|
@ -234,11 +234,11 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
if isinstance(collection, WebIDLCollection):
|
||||
# Test webidl sources are added here as a somewhat special
|
||||
# case.
|
||||
idl_sources[mozpath.join(root, 'test')] = [
|
||||
s for s in collection.all_test_cpp_basenames()]
|
||||
idl_sources[mozpath.join(root, 'test')] = [s for s in collection.all_test_cpp_basenames()]
|
||||
|
||||
yield collection
|
||||
|
||||
|
||||
# Next do FINAL_LIBRARY linkage.
|
||||
for lib in (l for libs in self._libs.values() for l in libs):
|
||||
if not isinstance(lib, (StaticLibrary, RustLibrary)) or not lib.link_into:
|
||||
|
@ -305,6 +305,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
propagate_defines(lib, lib.lib_defines)
|
||||
yield lib
|
||||
|
||||
|
||||
for lib in (l for libs in self._libs.values() for l in libs):
|
||||
lib_defines = list(lib.lib_defines.get_defines())
|
||||
if lib_defines:
|
||||
|
@ -324,6 +325,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
for obj in self._binaries.values():
|
||||
yield obj
|
||||
|
||||
|
||||
LIBRARY_NAME_VAR = {
|
||||
'host': 'HOST_LIBRARY_NAME',
|
||||
'target': 'LIBRARY_NAME',
|
||||
|
@ -446,8 +448,8 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
raise SandboxValidationError(
|
||||
'%s contains "static:%s", but there is only a shared "%s" '
|
||||
'in %s. You may want to add FORCE_STATIC_LIB=True in '
|
||||
'%s/moz.build, or remove "static:".' % (
|
||||
variable, path, name, candidates[0].relobjdir, candidates[0].relobjdir),
|
||||
'%s/moz.build, or remove "static:".' % (variable, path,
|
||||
name, candidates[0].relobjdir, candidates[0].relobjdir),
|
||||
context)
|
||||
|
||||
elif isinstance(obj, StaticLibrary) and isinstance(candidates[0],
|
||||
|
@ -483,29 +485,25 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
# A simple version number.
|
||||
if isinstance(values, (str, unicode)):
|
||||
raise SandboxValidationError(
|
||||
'%s %s of crate %s does not list a path' % (
|
||||
description, dep_crate_name, crate_name),
|
||||
'%s %s of crate %s does not list a path' % (description, dep_crate_name, crate_name),
|
||||
context)
|
||||
|
||||
dep_path = values.get('path', None)
|
||||
if not dep_path:
|
||||
raise SandboxValidationError(
|
||||
'%s %s of crate %s does not list a path' % (
|
||||
description, dep_crate_name, crate_name),
|
||||
'%s %s of crate %s does not list a path' % (description, dep_crate_name, crate_name),
|
||||
context)
|
||||
|
||||
# Try to catch the case where somebody listed a
|
||||
# local path for development.
|
||||
if os.path.isabs(dep_path):
|
||||
raise SandboxValidationError(
|
||||
'%s %s of crate %s has a non-relative path' % (
|
||||
description, dep_crate_name, crate_name),
|
||||
'%s %s of crate %s has a non-relative path' % (description, dep_crate_name, crate_name),
|
||||
context)
|
||||
|
||||
if not os.path.exists(mozpath.join(context.config.topsrcdir, crate_dir, dep_path)):
|
||||
raise SandboxValidationError(
|
||||
'%s %s of crate %s refers to a non-existent path' % (
|
||||
description, dep_crate_name, crate_name),
|
||||
'%s %s of crate %s refers to a non-existent path' % (description, dep_crate_name, crate_name),
|
||||
context)
|
||||
|
||||
def _rust_library(self, context, libname, static_args, cls=RustLibrary):
|
||||
|
@ -551,6 +549,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
return cls(context, libname, cargo_file, crate_type, dependencies,
|
||||
features, cargo_target_dir, **static_args)
|
||||
|
||||
|
||||
def _handle_gn_dirs(self, context):
|
||||
for target_dir in context.get('GN_DIRS', []):
|
||||
context['DIRS'] += [target_dir]
|
||||
|
@ -570,10 +569,10 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
|
||||
yield GnProjectData(context, target_dir, gn_dir, non_unified_sources)
|
||||
|
||||
|
||||
def _handle_linkables(self, context, passthru, generated_files):
|
||||
linkables = []
|
||||
host_linkables = []
|
||||
|
||||
def add_program(prog, var):
|
||||
if var.startswith('HOST_'):
|
||||
host_linkables.append(prog)
|
||||
|
@ -584,8 +583,8 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
if program in self._binaries:
|
||||
raise SandboxValidationError(
|
||||
'Cannot use "%s" as %s name, '
|
||||
'because it is already used in %s' % (
|
||||
program, kind, self._binaries[program].relsrcdir), context)
|
||||
'because it is already used in %s' % (program, kind,
|
||||
self._binaries[program].relsrcdir), context)
|
||||
for kind, cls in [('PROGRAM', Program), ('HOST_PROGRAM', HostProgram)]:
|
||||
program = context.get(kind)
|
||||
if program:
|
||||
|
@ -606,7 +605,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
|
||||
# Verify Rust program definitions.
|
||||
if all_rust_programs:
|
||||
config, cargo_file = self._parse_cargo_file(context)
|
||||
config, cargo_file = self._parse_cargo_file(context);
|
||||
bin_section = config.get('bin', None)
|
||||
if not bin_section:
|
||||
raise SandboxValidationError(
|
||||
|
@ -634,8 +633,8 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
if program in self._binaries:
|
||||
raise SandboxValidationError(
|
||||
'Cannot use "%s" in %s, '
|
||||
'because it is already used in %s' % (
|
||||
program, kind, self._binaries[program].relsrcdir), context)
|
||||
'because it is already used in %s' % (program, kind,
|
||||
self._binaries[program].relsrcdir), context)
|
||||
self._binaries[program] = cls(context, program,
|
||||
is_unit_test=kind == 'CPP_UNIT_TESTS')
|
||||
self._linkage.append((context, self._binaries[program],
|
||||
|
@ -648,8 +647,8 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
|
||||
if host_libname:
|
||||
if host_libname == libname:
|
||||
raise SandboxValidationError(
|
||||
'LIBRARY_NAME and HOST_LIBRARY_NAME must have a different value', context)
|
||||
raise SandboxValidationError('LIBRARY_NAME and '
|
||||
'HOST_LIBRARY_NAME must have a different value', context)
|
||||
|
||||
is_rust_library = context.get('IS_RUST_LIBRARY')
|
||||
if is_rust_library:
|
||||
|
@ -859,22 +858,21 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
assert isinstance(f, Path)
|
||||
gen_srcs.append(full_path)
|
||||
if symbol == 'SOURCES':
|
||||
context_flags = context_srcs[f]
|
||||
if context_flags:
|
||||
all_flags[full_path] = context_flags
|
||||
flags = context_srcs[f]
|
||||
if flags:
|
||||
all_flags[full_path] = flags
|
||||
# Files for the generation phase of PGO are unusual, so
|
||||
# it's not unreasonable to require them to be special.
|
||||
if context_flags.pgo_generate_only:
|
||||
if flags.pgo_generate_only:
|
||||
if not isinstance(f, Path):
|
||||
raise SandboxValidationError(
|
||||
'pgo_generate_only file must not be a generated file: %s' % f,
|
||||
context)
|
||||
raise SandboxValidationError('pgo_generate_only file'
|
||||
'must not be a generated file: %s' % f, context)
|
||||
if mozpath.splitext(f)[1] != '.cpp':
|
||||
raise SandboxValidationError('pgo_generate_only file'
|
||||
'must be a .cpp file: %s' % f, context)
|
||||
if context_flags.no_pgo:
|
||||
raise SandboxValidationError(
|
||||
'pgo_generate_only files cannot be marked no_pgo: %s' % f, context)
|
||||
if flags.no_pgo:
|
||||
raise SandboxValidationError('pgo_generate_only files'
|
||||
'cannot be marked no_pgo: %s' % f, context)
|
||||
pgo_generate_only.add(f)
|
||||
|
||||
if isinstance(f, SourcePath) and not os.path.exists(full_path):
|
||||
|
@ -1005,6 +1003,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
l.cxx_link = True
|
||||
break
|
||||
|
||||
|
||||
def emit_from_context(self, context):
|
||||
"""Convert a Context to tree metadata objects.
|
||||
|
||||
|
@ -1023,8 +1022,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
|
||||
# We always emit a directory traversal descriptor. This is needed by
|
||||
# the recursive make backend.
|
||||
for o in self._emit_directory_traversal_from_context(context):
|
||||
yield o
|
||||
for o in self._emit_directory_traversal_from_context(context): yield o
|
||||
|
||||
for obj in self._process_xpidl(context):
|
||||
yield obj
|
||||
|
@ -1144,11 +1142,8 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
generated_files.add(str(sub.relpath))
|
||||
yield sub
|
||||
|
||||
for defines_var, cls, backend_flags in (('DEFINES', Defines,
|
||||
(computed_flags, computed_as_flags)),
|
||||
('HOST_DEFINES', HostDefines,
|
||||
(computed_host_flags,))
|
||||
):
|
||||
for defines_var, cls, backend_flags in (('DEFINES', Defines, (computed_flags, computed_as_flags)),
|
||||
('HOST_DEFINES', HostDefines, (computed_host_flags,))):
|
||||
defines = context.get(defines_var)
|
||||
if defines:
|
||||
defines_obj = cls(context, defines)
|
||||
|
@ -1189,18 +1184,16 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
full_path = local_include.full_path
|
||||
if not isinstance(local_include, ObjDirPath):
|
||||
if not os.path.exists(full_path):
|
||||
raise SandboxValidationError(
|
||||
'Path specified in LOCAL_INCLUDES does not exist: %s (resolved to %s)' %
|
||||
(local_include, full_path), context)
|
||||
raise SandboxValidationError('Path specified in LOCAL_INCLUDES '
|
||||
'does not exist: %s (resolved to %s)' % (local_include,
|
||||
full_path), context)
|
||||
if not os.path.isdir(full_path):
|
||||
raise SandboxValidationError('Path specified in LOCAL_INCLUDES '
|
||||
'is a filename, but a directory is required: %s '
|
||||
'(resolved to %s)' % (local_include, full_path),
|
||||
context)
|
||||
'(resolved to %s)' % (local_include, full_path), context)
|
||||
if (full_path == context.config.topsrcdir or
|
||||
full_path == context.config.topobjdir):
|
||||
raise SandboxValidationError(
|
||||
'Path specified in LOCAL_INCLUDES '
|
||||
raise SandboxValidationError('Path specified in LOCAL_INCLUDES '
|
||||
'(%s) resolves to the topsrcdir or topobjdir (%s), which is '
|
||||
'not allowed' % (local_include, full_path), context)
|
||||
include_obj = LocalInclude(context, local_include)
|
||||
|
@ -1214,8 +1207,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
for obj in self._handle_linkables(context, passthru, generated_files):
|
||||
yield obj
|
||||
|
||||
generated_files.update(['%s%s' % (k, self.config.substs.get('BIN_SUFFIX', ''))
|
||||
for k in self._binaries.keys()])
|
||||
generated_files.update(['%s%s' % (k, self.config.substs.get('BIN_SUFFIX', '')) for k in self._binaries.keys()])
|
||||
|
||||
components = []
|
||||
for var, cls in (
|
||||
|
@ -1297,13 +1289,8 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
# in anything *but* LOCALIZED_FILES.
|
||||
if f.target_basename in localized_generated_files:
|
||||
raise SandboxValidationError(
|
||||
(
|
||||
'Outputs of LOCALIZED_GENERATED_FILES cannot '
|
||||
'be used in %s: %s'
|
||||
)
|
||||
% (var, f),
|
||||
context,
|
||||
)
|
||||
('Outputs of LOCALIZED_GENERATED_FILES cannot be used in %s: ' +
|
||||
'%s') % (var, f), context)
|
||||
|
||||
# Addons (when XPI_NAME is defined) and Applications (when
|
||||
# DIST_SUBDIR is defined) use a different preferences directory
|
||||
|
@ -1389,6 +1376,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
if context.objdir in self._host_compile_dirs:
|
||||
yield computed_host_flags
|
||||
|
||||
|
||||
def _create_substitution(self, cls, context, path):
|
||||
sub = cls(context)
|
||||
sub.input_path = '%s.in' % path.full_path
|
||||
|
@ -1540,8 +1528,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
not os.path.isfile(mozpath.join(context.config.topsrcdir,
|
||||
install_path[2:])),
|
||||
install_path not in install_info.external_installs]):
|
||||
raise SandboxValidationError(
|
||||
'Error processing test '
|
||||
raise SandboxValidationError('Error processing test '
|
||||
'manifest %s: entry in support-files not present '
|
||||
'in the srcdir: %s' % (path, install_path), context)
|
||||
|
||||
|
@ -1580,15 +1567,13 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
try:
|
||||
del obj.installs[mozpath.join(manifest_dir, f)]
|
||||
except KeyError:
|
||||
raise SandboxValidationError(
|
||||
'Error processing test '
|
||||
raise SandboxValidationError('Error processing test '
|
||||
'manifest %s: entry in generated-files not present '
|
||||
'elsewhere in manifest: %s' % (path, f), context)
|
||||
|
||||
yield obj
|
||||
except (AssertionError, Exception):
|
||||
raise SandboxValidationError(
|
||||
'Error processing test '
|
||||
raise SandboxValidationError('Error processing test '
|
||||
'manifest file %s: %s' % (path,
|
||||
'\n'.join(traceback.format_exception(*sys.exc_info()))),
|
||||
context)
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import gyp
|
||||
import gyp.msvs_emulation
|
||||
|
@ -10,6 +10,7 @@ import sys
|
|||
import os
|
||||
import time
|
||||
import types
|
||||
import warnings
|
||||
|
||||
import mozpack.path as mozpath
|
||||
from mozpack.files import FileFinder
|
||||
|
@ -22,7 +23,10 @@ from .context import (
|
|||
)
|
||||
from mozbuild.util import (
|
||||
expand_variables,
|
||||
List,
|
||||
memoize,
|
||||
)
|
||||
from .reader import SandboxValidationError
|
||||
|
||||
# Define this module as gyp.generator.mozbuild so that gyp can use it
|
||||
# as a generator under the name "mozbuild".
|
||||
|
@ -70,7 +74,6 @@ class GypContext(TemplateContext):
|
|||
relobjdir is the object directory that will be used for this context,
|
||||
relative to the topobjdir defined in the ConfigEnvironment.
|
||||
"""
|
||||
|
||||
def __init__(self, config, relobjdir):
|
||||
self._relobjdir = relobjdir
|
||||
TemplateContext.__init__(self, template='Gyp',
|
||||
|
@ -85,15 +88,10 @@ def handle_actions(actions, context, action_overrides):
|
|||
raise RuntimeError('GYP action %s not listed in action_overrides' % name)
|
||||
outputs = action['outputs']
|
||||
if len(outputs) > 1:
|
||||
raise NotImplementedError(
|
||||
'GYP actions with more than one output not supported: %s' % name)
|
||||
raise NotImplementedError('GYP actions with more than one output not supported: %s' % name)
|
||||
output = outputs[0]
|
||||
if not output.startswith(idir):
|
||||
raise NotImplementedError(
|
||||
'GYP actions outputting to somewhere other than '
|
||||
'<(INTERMEDIATE_DIR) not supported: %s'
|
||||
% output
|
||||
)
|
||||
raise NotImplementedError('GYP actions outputting to somewhere other than <(INTERMEDIATE_DIR) not supported: %s' % output)
|
||||
output = output[len(idir):]
|
||||
context['GENERATED_FILES'] += [output]
|
||||
g = context['GENERATED_FILES'][output]
|
||||
|
@ -106,8 +104,7 @@ def handle_copies(copies, context):
|
|||
for copy in copies:
|
||||
dest = copy['destination']
|
||||
if not dest.startswith(dist):
|
||||
raise NotImplementedError(
|
||||
'GYP copies to somewhere other than <(PRODUCT_DIR)/dist not supported: %s' % dest)
|
||||
raise NotImplementedError('GYP copies to somewhere other than <(PRODUCT_DIR)/dist not supported: %s' % dest)
|
||||
dest_paths = dest[len(dist):].split('/')
|
||||
exports = context['EXPORTS']
|
||||
while dest_paths:
|
||||
|
@ -164,7 +161,6 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
|
|||
|
||||
use_libs = []
|
||||
libs = []
|
||||
|
||||
def add_deps(s):
|
||||
for t in s.get('dependencies', []) + s.get('dependencies_original', []):
|
||||
ty = targets[t]['type']
|
||||
|
@ -207,8 +203,7 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
|
|||
context['PROGRAM'] = name.decode('utf-8')
|
||||
if spec['type'] == 'shared_library':
|
||||
context['FORCE_SHARED_LIB'] = True
|
||||
elif spec['type'] == 'static_library' and \
|
||||
spec.get('variables', {}).get('no_expand_libs', '0') == '1':
|
||||
elif spec['type'] == 'static_library' and spec.get('variables', {}).get('no_expand_libs', '0') == '1':
|
||||
# PSM links a NSS static library, but our folded libnss
|
||||
# doesn't actually export everything that all of the
|
||||
# objects within would need, so that one library
|
||||
|
@ -256,8 +251,7 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
|
|||
name, value = define.split('=', 1)
|
||||
# The NSS gyp file doesn't expose a way to override this
|
||||
# currently, so we do so here.
|
||||
if name == 'NSS_ALLOW_SSLKEYLOGFILE' and \
|
||||
config.substs.get('RELEASE_OR_BETA', False):
|
||||
if name == 'NSS_ALLOW_SSLKEYLOGFILE' and config.substs.get('RELEASE_OR_BETA', False):
|
||||
continue
|
||||
context['DEFINES'][name] = value
|
||||
else:
|
||||
|
@ -288,8 +282,7 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
|
|||
if include.startswith('/'):
|
||||
resolved = mozpath.abspath(mozpath.join(config.topsrcdir, include[1:]))
|
||||
elif not include.startswith(('!', '%')):
|
||||
resolved = mozpath.abspath(mozpath.join(
|
||||
mozpath.dirname(build_file), include))
|
||||
resolved = mozpath.abspath(mozpath.join(mozpath.dirname(build_file), include))
|
||||
if not include.startswith(('!', '%')) and not os.path.exists(resolved):
|
||||
continue
|
||||
context['LOCAL_INCLUDES'] += [include]
|
||||
|
@ -375,7 +368,6 @@ class GypProcessor(object):
|
|||
gyp dependencies will be. gyp_dir_attrs are attributes set for the dir
|
||||
from moz.build.
|
||||
"""
|
||||
|
||||
def __init__(self, config, gyp_dir_attrs, path, output, executor,
|
||||
action_overrides, non_unified_sources):
|
||||
self._path = path
|
||||
|
|
|
@ -21,7 +21,6 @@ import mozpack.path as mozpath
|
|||
|
||||
TOPSRCDIR = os.path.abspath(os.path.join(__file__, '../../../../../'))
|
||||
|
||||
|
||||
class InvalidPathException(Exception):
|
||||
"""Represents an error due to an invalid path."""
|
||||
|
||||
|
@ -285,6 +284,7 @@ class MozbuildFileCommands(MachCommandBase):
|
|||
print(e.message)
|
||||
return 1
|
||||
|
||||
|
||||
def _get_files_info(self, paths, rev=None):
|
||||
reader = self.mozbuild_reader(config_mode='empty', vcs_revision=rev)
|
||||
|
||||
|
@ -328,6 +328,7 @@ class MozbuildFileCommands(MachCommandBase):
|
|||
|
||||
return reader.files_info(allpaths)
|
||||
|
||||
|
||||
@SubCommand('file-info', 'schedules',
|
||||
'Show the combined SCHEDULES for the files listed.')
|
||||
@CommandArgument('paths', nargs='+',
|
||||
|
|
|
@ -80,6 +80,7 @@ from mozbuild.base import ExecutionSummary
|
|||
from concurrent.futures.process import ProcessPoolExecutor
|
||||
|
||||
|
||||
|
||||
if sys.version_info.major == 2:
|
||||
text_type = unicode
|
||||
type_type = types.TypeType
|
||||
|
@ -105,7 +106,6 @@ class EmptyConfig(object):
|
|||
This variation is needed because CONFIG uses .get() to access members.
|
||||
Without it, None (instead of our EmptyValue types) would be returned.
|
||||
"""
|
||||
|
||||
def get(self, key, default=None):
|
||||
return self[key]
|
||||
|
||||
|
@ -182,7 +182,6 @@ class MozbuildSandbox(Sandbox):
|
|||
metadata is a dict of metadata that can be used during the sandbox
|
||||
evaluation.
|
||||
"""
|
||||
|
||||
def __init__(self, context, metadata={}, finder=default_finder):
|
||||
assert isinstance(context, Context)
|
||||
|
||||
|
@ -321,7 +320,6 @@ class MozbuildSandbox(Sandbox):
|
|||
The wrapper function does type coercion on the function arguments
|
||||
"""
|
||||
func, args_def, doc = function_def
|
||||
|
||||
def function(*args):
|
||||
def coerce(arg, type):
|
||||
if not isinstance(arg, type):
|
||||
|
@ -465,7 +463,6 @@ class TemplateFunction(object):
|
|||
"""AST Node Transformer to rewrite variable accesses to go through
|
||||
a dict.
|
||||
"""
|
||||
|
||||
def __init__(self, sandbox, global_name):
|
||||
self._sandbox = sandbox
|
||||
self._global_name = global_name
|
||||
|
@ -494,7 +491,6 @@ class TemplateFunction(object):
|
|||
|
||||
class SandboxValidationError(Exception):
|
||||
"""Represents an error encountered when validating sandbox results."""
|
||||
|
||||
def __init__(self, message, context):
|
||||
Exception.__init__(self, message)
|
||||
self.context = context
|
||||
|
@ -536,7 +532,6 @@ class BuildReaderError(Exception):
|
|||
MozbuildSandbox has over Sandbox (e.g. the concept of included files -
|
||||
which affect error messages, of course).
|
||||
"""
|
||||
|
||||
def __init__(self, file_stack, trace, sandbox_exec_error=None,
|
||||
sandbox_load_error=None, validation_error=None, other_error=None,
|
||||
sandbox_called_error=None):
|
||||
|
@ -1288,7 +1283,6 @@ class BuildReader(object):
|
|||
# Exporting doesn't work reliably in tree traversal mode. Override
|
||||
# the function to no-op.
|
||||
functions = dict(FUNCTIONS)
|
||||
|
||||
def export(sandbox):
|
||||
return lambda varname: None
|
||||
functions['export'] = tuple([export] + list(FUNCTIONS['export'][1:]))
|
||||
|
@ -1343,7 +1337,6 @@ class BuildReader(object):
|
|||
# times (once for every path in a directory that doesn't have any
|
||||
# test metadata). So, we cache the function call.
|
||||
defaults_cache = {}
|
||||
|
||||
def test_defaults_for_path(ctxs):
|
||||
key = tuple(ctx.current_path or ctx.main_path for ctx in ctxs)
|
||||
|
||||
|
@ -1401,8 +1394,7 @@ class BuildReader(object):
|
|||
test_manifest_contexts = set(
|
||||
['%s_MANIFESTS' % key for key in TEST_MANIFESTS] +
|
||||
['%s_MANIFESTS' % flavor.upper() for flavor in REFTEST_FLAVORS] +
|
||||
['%s_MANIFESTS' % flavor.upper().replace('-', '_')
|
||||
for flavor in WEB_PLATFORM_TESTS_FLAVORS]
|
||||
['%s_MANIFESTS' % flavor.upper().replace('-', '_') for flavor in WEB_PLATFORM_TESTS_FLAVORS]
|
||||
)
|
||||
|
||||
result_context = Files(Context())
|
||||
|
|
|
@ -17,7 +17,7 @@ KeyError are machine parseable. This machine-friendly data is used to present
|
|||
user-friendly error messages in the case of errors.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
@ -53,7 +53,6 @@ class SandboxExecutionError(SandboxError):
|
|||
This is a simple container exception. It's purpose is to capture state
|
||||
so something else can report on it.
|
||||
"""
|
||||
|
||||
def __init__(self, file_stack, exc_type, exc_value, trace):
|
||||
SandboxError.__init__(self, file_stack)
|
||||
|
||||
|
@ -70,7 +69,6 @@ class SandboxLoadError(SandboxError):
|
|||
a file. If so, the file_stack will be non-empty and the file that caused
|
||||
the load will be on top of the stack.
|
||||
"""
|
||||
|
||||
def __init__(self, file_stack, trace, illegal_path=None, read_error=None):
|
||||
SandboxError.__init__(self, file_stack)
|
||||
|
||||
|
@ -155,7 +153,7 @@ class Sandbox(dict):
|
|||
|
||||
try:
|
||||
source = self._finder.get(path).read()
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
raise SandboxLoadError(self._context.source_stack,
|
||||
sys.exc_info()[2], read_error=path)
|
||||
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import sys
|
||||
|
||||
from mozbuild.backend.test_manifest import TestManifestBackend
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import print_function
|
||||
|
||||
from collections import defaultdict
|
||||
from copy import deepcopy
|
||||
|
@ -78,10 +78,7 @@ class MozbuildWriter(object):
|
|||
self.write('\n')
|
||||
self.write(self.indent + key)
|
||||
self.write(' += [\n ' + self.indent)
|
||||
self.write(
|
||||
(',\n ' + self.indent).join(
|
||||
alphabetical_sorted(self.mb_serialize(v) for v in value))
|
||||
)
|
||||
self.write((',\n ' + self.indent).join(alphabetical_sorted(self.mb_serialize(v) for v in value)))
|
||||
self.write('\n')
|
||||
self.write_ln(']')
|
||||
|
||||
|
@ -115,6 +112,7 @@ class MozbuildWriter(object):
|
|||
if not wrote_ln:
|
||||
self.write_ln("%s[%s] = %s" % subst_vals)
|
||||
|
||||
|
||||
def write_condition(self, values):
|
||||
def mk_condition(k, v):
|
||||
if not v:
|
||||
|
@ -277,9 +275,9 @@ def process_gn_config(gn_config, srcdir, config, output, non_unified_sources,
|
|||
context_attrs['LOCAL_INCLUDES'] += [include]
|
||||
|
||||
context_attrs['ASFLAGS'] = spec.get('asflags_mozilla', [])
|
||||
if use_defines_in_asflags and context_attrs['DEFINES']:
|
||||
context_attrs['ASFLAGS'] += ['-D' + d for d in context_attrs['DEFINES']]
|
||||
flags = [_f for _f in spec.get('cflags', []) if _f in mozilla_flags]
|
||||
if use_defines_in_asflags and defines:
|
||||
context_attrs['ASFLAGS'] += ['-D' + d for d in defines]
|
||||
flags = [f for f in spec.get('cflags', []) if f in mozilla_flags]
|
||||
if flags:
|
||||
suffix_map = {
|
||||
'.c': 'CFLAGS',
|
||||
|
@ -434,6 +432,7 @@ def write_mozbuild(config, srcdir, output, non_unified_sources, gn_config_files,
|
|||
mb.write('\n')
|
||||
mb.write(generated_header)
|
||||
|
||||
all_attr_sets = [attrs for _, attrs in configs]
|
||||
all_args = [args for args, _ in configs]
|
||||
|
||||
# Start with attributes that will be a part of the mozconfig
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
# This module contains code for running an HTTP server to view build info.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import BaseHTTPServer
|
||||
import json
|
||||
|
|
|
@ -8,7 +8,7 @@ processing jar.mn files.
|
|||
See the documentation for jar.mn on MDC for further details on the format.
|
||||
'''
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
@ -18,6 +18,7 @@ import logging
|
|||
from time import localtime
|
||||
from MozZipFile import ZipFile
|
||||
from cStringIO import StringIO
|
||||
from collections import defaultdict
|
||||
|
||||
from mozbuild.preprocessor import Preprocessor
|
||||
from mozbuild.action.buildlist import addEntriesToListFile
|
||||
|
@ -89,8 +90,7 @@ class JarInfo(object):
|
|||
self.entries = []
|
||||
|
||||
|
||||
class DeprecatedJarManifest(Exception):
|
||||
pass
|
||||
class DeprecatedJarManifest(Exception): pass
|
||||
|
||||
|
||||
class JarManifestParser(object):
|
||||
|
@ -107,9 +107,8 @@ class JarManifestParser(object):
|
|||
relsrcline = re.compile('relativesrcdir\s+(?P<relativesrcdir>.+?):')
|
||||
regline = re.compile('\%\s+(.*)$')
|
||||
entryre = '(?P<optPreprocess>\*)?(?P<optOverwrite>\+?)\s+'
|
||||
entryline = re.compile(
|
||||
entryre + ('(?P<output>[\w\d.\-\_\\\/\+\@]+)\s*'
|
||||
'(\((?P<locale>\%?)(?P<source>[\w\d.\-\_\\\/\@\*]+)\))?\s*$')
|
||||
entryline = re.compile(entryre
|
||||
+ '(?P<output>[\w\d.\-\_\\\/\+\@]+)\s*(\((?P<locale>\%?)(?P<source>[\w\d.\-\_\\\/\@\*]+)\))?\s*$'
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
|
@ -239,14 +238,14 @@ class JarMaker(object):
|
|||
p.add_option('-s', type='string', action='append', default=[],
|
||||
help='source directory')
|
||||
p.add_option('-t', type='string', help='top source directory')
|
||||
p.add_option('-c', '--l10n-src', type='string',
|
||||
action='append', help='localization directory')
|
||||
p.add_option('-c', '--l10n-src', type='string', action='append'
|
||||
, help='localization directory')
|
||||
p.add_option('--l10n-base', type='string', action='store',
|
||||
help='base directory to be used for localization (requires relativesrcdir)'
|
||||
)
|
||||
p.add_option('--locale-mergedir', type='string', action='store',
|
||||
help='base directory to be used for l10n-merge '
|
||||
'(requires l10n-base and relativesrcdir)'
|
||||
p.add_option('--locale-mergedir', type='string', action='store'
|
||||
,
|
||||
help='base directory to be used for l10n-merge (requires l10n-base and relativesrcdir)'
|
||||
)
|
||||
p.add_option('--relativesrcdir', type='string',
|
||||
help='relativesrcdir to be used for localization')
|
||||
|
@ -315,7 +314,7 @@ class JarMaker(object):
|
|||
'''
|
||||
|
||||
# making paths absolute, guess srcdir if file and add to sourcedirs
|
||||
def _normpath(p): return os.path.normpath(os.path.abspath(p))
|
||||
_normpath = lambda p: os.path.normpath(os.path.abspath(p))
|
||||
self.topsourcedir = _normpath(self.topsourcedir)
|
||||
self.sourcedirs = [_normpath(p) for p in self.sourcedirs]
|
||||
if self.localedirs:
|
||||
|
@ -502,7 +501,7 @@ class JarMaker(object):
|
|||
try:
|
||||
info = self.jarfile.getinfo(aPath)
|
||||
return info.date_time
|
||||
except Exception:
|
||||
except:
|
||||
return 0
|
||||
|
||||
def getOutput(self, name):
|
||||
|
|
|
@ -108,12 +108,12 @@ class Watch(MachCommandBase):
|
|||
|
||||
if not conditions.is_artifact_build(self):
|
||||
print('mach watch requires an artifact build. See '
|
||||
'https://developer.mozilla.org/docs/Mozilla/Developer_guide/Build_Instructions/Simple_Firefox_build') # noqa
|
||||
'https://developer.mozilla.org/docs/Mozilla/Developer_guide/Build_Instructions/Simple_Firefox_build')
|
||||
return 1
|
||||
|
||||
if not self.substs.get('WATCHMAN', None):
|
||||
print('mach watch requires watchman to be installed. See '
|
||||
'https://developer.mozilla.org/docs/Mozilla/Developer_guide/Build_Instructions/Incremental_builds_with_filesystem_watching') # noqa
|
||||
'https://developer.mozilla.org/docs/Mozilla/Developer_guide/Build_Instructions/Incremental_builds_with_filesystem_watching')
|
||||
return 1
|
||||
|
||||
self._activate_virtualenv()
|
||||
|
@ -121,7 +121,7 @@ class Watch(MachCommandBase):
|
|||
self.virtualenv_manager.install_pip_package('pywatchman==1.3.0')
|
||||
except Exception:
|
||||
print('Could not install pywatchman from pip. See '
|
||||
'https://developer.mozilla.org/docs/Mozilla/Developer_guide/Build_Instructions/Incremental_builds_with_filesystem_watching') # noqa
|
||||
'https://developer.mozilla.org/docs/Mozilla/Developer_guide/Build_Instructions/Incremental_builds_with_filesystem_watching')
|
||||
return 1
|
||||
|
||||
from mozbuild.faster_daemon import Daemon
|
||||
|
@ -310,7 +310,7 @@ class CargoProvider(MachCommandBase):
|
|||
|
||||
if all_crates:
|
||||
crates = crates_and_roots.keys()
|
||||
elif crates is None or crates == []:
|
||||
elif crates == None or crates == []:
|
||||
crates = ['gkrust']
|
||||
|
||||
for crate in crates:
|
||||
|
@ -335,7 +335,6 @@ class CargoProvider(MachCommandBase):
|
|||
|
||||
return 0
|
||||
|
||||
|
||||
@CommandProvider
|
||||
class Doctor(MachCommandBase):
|
||||
"""Provide commands for diagnosing common build environment problems"""
|
||||
|
@ -349,12 +348,10 @@ class Doctor(MachCommandBase):
|
|||
doctor = Doctor(self.topsrcdir, self.topobjdir, fix)
|
||||
return doctor.check_all()
|
||||
|
||||
|
||||
@CommandProvider
|
||||
class Clobber(MachCommandBase):
|
||||
NO_AUTO_LOG = True
|
||||
CLOBBER_CHOICES = ['objdir', 'python']
|
||||
|
||||
@Command('clobber', category='build',
|
||||
description='Clobber the tree (delete the object directory).')
|
||||
@CommandArgument('what', default=['objdir'], nargs='*',
|
||||
|
@ -424,7 +421,6 @@ class Clobber(MachCommandBase):
|
|||
except BuildEnvironmentNotFoundException:
|
||||
return {}
|
||||
|
||||
|
||||
@CommandProvider
|
||||
class Logs(MachCommandBase):
|
||||
"""Provide commands to read mach logs."""
|
||||
|
@ -572,9 +568,8 @@ class Warnings(MachCommandBase):
|
|||
continue
|
||||
|
||||
if warning['column'] is not None:
|
||||
print('%s:%d:%d [%s] %s' % (
|
||||
filename, warning['line'], warning['column'],
|
||||
warning['flag'], warning['message']))
|
||||
print('%s:%d:%d [%s] %s' % (filename, warning['line'],
|
||||
warning['column'], warning['flag'], warning['message']))
|
||||
else:
|
||||
print('%s:%d [%s] %s' % (filename, warning['line'],
|
||||
warning['flag'], warning['message']))
|
||||
|
@ -588,21 +583,20 @@ class Warnings(MachCommandBase):
|
|||
print('Specified directory not found.')
|
||||
return None
|
||||
|
||||
|
||||
@CommandProvider
|
||||
class GTestCommands(MachCommandBase):
|
||||
@Command('gtest', category='testing',
|
||||
description='Run GTest unit tests (C++ tests).')
|
||||
@CommandArgument('gtest_filter', default=b"*", nargs='?', metavar='gtest_filter',
|
||||
help="test_filter is a ':'-separated list of wildcard patterns "
|
||||
"(called the positive patterns), optionally followed by a '-' "
|
||||
"and another ':'-separated pattern list (called the negative patterns).")
|
||||
help="test_filter is a ':'-separated list of wildcard patterns (called the positive patterns),"
|
||||
"optionally followed by a '-' and another ':'-separated pattern list (called the negative patterns).")
|
||||
@CommandArgument('--jobs', '-j', default='1', nargs='?', metavar='jobs', type=int,
|
||||
help='Run the tests in parallel using multiple processes.')
|
||||
@CommandArgument('--tbpl-parser', '-t', action='store_true',
|
||||
help='Output test results in a format that can be parsed by TBPL.')
|
||||
@CommandArgument('--shuffle', '-s', action='store_true',
|
||||
help='Randomize the execution order of tests.')
|
||||
|
||||
@CommandArgument('--package',
|
||||
default='org.mozilla.geckoview.test',
|
||||
help='(Android only) Package name of test app.')
|
||||
|
@ -621,16 +615,16 @@ class GTestCommands(MachCommandBase):
|
|||
@CommandArgument('--libxul',
|
||||
dest='libxul_path',
|
||||
help='(Android only) Path to gtest libxul.so.')
|
||||
|
||||
@CommandArgumentGroup('debugging')
|
||||
@CommandArgument('--debug', action='store_true', group='debugging',
|
||||
help='Enable the debugger. Not specifying a --debugger option will result in '
|
||||
'the default debugger being used.')
|
||||
help='Enable the debugger. Not specifying a --debugger option will result in the default debugger being used.')
|
||||
@CommandArgument('--debugger', default=None, type=str, group='debugging',
|
||||
help='Name of debugger to use.')
|
||||
@CommandArgument('--debugger-args', default=None, metavar='params', type=str,
|
||||
group='debugging',
|
||||
help='Command-line arguments to pass to the debugger itself; '
|
||||
'split as the Bourne shell would.')
|
||||
help='Command-line arguments to pass to the debugger itself; split as the Bourne shell would.')
|
||||
|
||||
def gtest(self, shuffle, jobs, gtest_filter, tbpl_parser,
|
||||
package, adb_path, device_serial, remote_test_root, libxul_path,
|
||||
debug, debugger, debugger_args):
|
||||
|
@ -669,14 +663,13 @@ class GTestCommands(MachCommandBase):
|
|||
if debug or debugger or debugger_args:
|
||||
print("--debug options are not supported on Android and will be ignored")
|
||||
return self.android_gtest(cwd, shuffle, gtest_filter,
|
||||
package, adb_path, device_serial,
|
||||
remote_test_root, libxul_path)
|
||||
package, adb_path, device_serial, remote_test_root, libxul_path)
|
||||
|
||||
if package or adb_path or device_serial or remote_test_root or libxul_path:
|
||||
print("One or more Android-only options will be ignored")
|
||||
|
||||
app_path = self.get_binary_path('app')
|
||||
args = [app_path, '-unittest', '--gtest_death_test_style=threadsafe']
|
||||
args = [app_path, '-unittest', '--gtest_death_test_style=threadsafe'];
|
||||
|
||||
if sys.platform.startswith('win') and \
|
||||
'MOZ_LAUNCHER_PROCESS' in self.defines:
|
||||
|
@ -717,7 +710,6 @@ class GTestCommands(MachCommandBase):
|
|||
|
||||
from mozprocess import ProcessHandlerMixin
|
||||
import functools
|
||||
|
||||
def handle_line(job_id, line):
|
||||
# Prepend the jobId
|
||||
line = '[%d] %s' % (job_id + 1, line.strip())
|
||||
|
@ -730,8 +722,7 @@ class GTestCommands(MachCommandBase):
|
|||
processes[i] = ProcessHandlerMixin([app_path, "-unittest"],
|
||||
cwd=cwd,
|
||||
env=gtest_env,
|
||||
processOutputLine=[
|
||||
functools.partial(handle_line, i)],
|
||||
processOutputLine=[functools.partial(handle_line, i)],
|
||||
universal_newlines=True)
|
||||
processes[i].run()
|
||||
|
||||
|
@ -754,7 +745,7 @@ class GTestCommands(MachCommandBase):
|
|||
from mozlog.commandline import setup_logging
|
||||
format_args = {'level': self._mach_context.settings['test']['level']}
|
||||
default_format = self._mach_context.settings['test']['format']
|
||||
setup_logging('mach-gtest', {}, {default_format: sys.stdout}, format_args)
|
||||
log = setup_logging('mach-gtest', {}, {default_format: sys.stdout}, format_args)
|
||||
|
||||
# ensure that a device is available and test app is installed
|
||||
from mozrunner.devices.android_device import (verify_android_device, get_adb_path)
|
||||
|
@ -817,7 +808,6 @@ class GTestCommands(MachCommandBase):
|
|||
args = [debuggerInfo.path] + debuggerInfo.args + args
|
||||
return args
|
||||
|
||||
|
||||
@CommandProvider
|
||||
class ClangCommands(MachCommandBase):
|
||||
@Command('clang-complete', category='devenv',
|
||||
|
@ -886,7 +876,6 @@ class Package(MachCommandBase):
|
|||
self.notify('Packaging complete')
|
||||
return ret
|
||||
|
||||
|
||||
@CommandProvider
|
||||
class Install(MachCommandBase):
|
||||
"""Install a package."""
|
||||
|
@ -904,7 +893,6 @@ class Install(MachCommandBase):
|
|||
self.notify('Install complete')
|
||||
return ret
|
||||
|
||||
|
||||
@SettingsProvider
|
||||
class RunSettings():
|
||||
config_settings = [
|
||||
|
@ -915,7 +903,6 @@ single quoted to force them to be strings.
|
|||
""".strip()),
|
||||
]
|
||||
|
||||
|
||||
@CommandProvider
|
||||
class RunProgram(MachCommandBase):
|
||||
"""Run the compiled program."""
|
||||
|
@ -925,9 +912,7 @@ class RunProgram(MachCommandBase):
|
|||
@Command('run', category='post-build',
|
||||
description='Run the compiled program, possibly under a debugger or DMD.')
|
||||
@CommandArgument('params', nargs='...', group=prog_group,
|
||||
help='Command-line arguments to be passed through to the program. Not '
|
||||
'specifying a --profile or -P option will result in a temporary profile '
|
||||
'being used.')
|
||||
help='Command-line arguments to be passed through to the program. Not specifying a --profile or -P option will result in a temporary profile being used.')
|
||||
@CommandArgumentGroup(prog_group)
|
||||
@CommandArgument('--remote', '-r', action='store_true', group=prog_group,
|
||||
help='Do not pass the --no-remote argument by default.')
|
||||
|
@ -940,29 +925,24 @@ class RunProgram(MachCommandBase):
|
|||
@CommandArgument('--enable-crash-reporter', action='store_true', group=prog_group,
|
||||
help='Run the program with the crash reporter enabled.')
|
||||
@CommandArgument('--setpref', action='append', default=[], group=prog_group,
|
||||
help='Set the specified pref before starting the program. Can be set '
|
||||
'multiple times. Prefs can also be set in ~/.mozbuild/machrc in the '
|
||||
'[runprefs] section - see `./mach settings` for more information.')
|
||||
help='Set the specified pref before starting the program. Can be set multiple times. Prefs can also be set in ~/.mozbuild/machrc in the [runprefs] section - see `./mach settings` for more information.')
|
||||
@CommandArgument('--temp-profile', action='store_true', group=prog_group,
|
||||
help='Run the program using a new temporary profile created inside '
|
||||
'the objdir.')
|
||||
help='Run the program using a new temporary profile created inside the objdir.')
|
||||
@CommandArgument('--macos-open', action='store_true', group=prog_group,
|
||||
help="On macOS, run the program using the open(1) command. Per open(1), "
|
||||
"the browser is launched \"just as if you had double-clicked the file's "
|
||||
"icon\". The browser can not be launched under a debugger with this option.")
|
||||
help="On macOS, run the program using the open(1) command. Per open(1), the browser is launched \"just as if you had double-clicked the file's icon\". The browser can not be launched under a debugger with this option.")
|
||||
|
||||
@CommandArgumentGroup('debugging')
|
||||
@CommandArgument('--debug', action='store_true', group='debugging',
|
||||
help='Enable the debugger. Not specifying a --debugger option will result '
|
||||
'in the default debugger being used.')
|
||||
help='Enable the debugger. Not specifying a --debugger option will result in the default debugger being used.')
|
||||
@CommandArgument('--debugger', default=None, type=str, group='debugging',
|
||||
help='Name of debugger to use.')
|
||||
@CommandArgument('--debugger-args', default=None, metavar='params', type=str,
|
||||
group='debugging',
|
||||
help='Command-line arguments to pass to the debugger itself; '
|
||||
'split as the Bourne shell would.')
|
||||
help='Command-line arguments to pass to the debugger itself; split as the Bourne shell would.')
|
||||
@CommandArgument('--debugparams', action=StoreDebugParamsAndWarnAction,
|
||||
default=None, type=str, dest='debugger_args', group='debugging',
|
||||
help=argparse.SUPPRESS)
|
||||
|
||||
@CommandArgumentGroup('DMD')
|
||||
@CommandArgument('--dmd', action='store_true', group='DMD',
|
||||
help='Enable DMD. The following arguments have no effect without this.')
|
||||
|
@ -981,10 +961,7 @@ class RunProgram(MachCommandBase):
|
|||
if dmd:
|
||||
print("DMD is not supported for Firefox for Android")
|
||||
return 1
|
||||
from mozrunner.devices.android_device import (
|
||||
verify_android_device,
|
||||
run_firefox_for_android
|
||||
)
|
||||
from mozrunner.devices.android_device import verify_android_device, run_firefox_for_android
|
||||
if not (debug or debugger or debugger_args):
|
||||
verify_android_device(self, install=True)
|
||||
return run_firefox_for_android(self, params)
|
||||
|
@ -1135,7 +1112,6 @@ class RunProgram(MachCommandBase):
|
|||
return self.run_process(args=args, ensure_exit_code=False,
|
||||
pass_thru=True, append_env=extra_env)
|
||||
|
||||
|
||||
@CommandProvider
|
||||
class Buildsymbols(MachCommandBase):
|
||||
"""Produce a package of debug symbols suitable for use with Breakpad."""
|
||||
|
@ -1145,7 +1121,6 @@ class Buildsymbols(MachCommandBase):
|
|||
def buildsymbols(self):
|
||||
return self._run_make(directory=".", target='buildsymbols', ensure_exit_code=False)
|
||||
|
||||
|
||||
@CommandProvider
|
||||
class Makefiles(MachCommandBase):
|
||||
@Command('empty-makefiles', category='build-dev',
|
||||
|
@ -1202,7 +1177,6 @@ class Makefiles(MachCommandBase):
|
|||
if f == 'Makefile.in':
|
||||
yield os.path.join(root, f)
|
||||
|
||||
|
||||
@CommandProvider
|
||||
class MachDebug(MachCommandBase):
|
||||
@Command('environment', category='build-dev',
|
||||
|
@ -1278,7 +1252,6 @@ class MachDebug(MachCommandBase):
|
|||
|
||||
def _environment_json(self, out, verbose):
|
||||
import json
|
||||
|
||||
class EnvironmentEncoder(json.JSONEncoder):
|
||||
def default(self, obj):
|
||||
if isinstance(obj, MozbuildObject):
|
||||
|
@ -1694,7 +1667,6 @@ class PackageFrontend(MachCommandBase):
|
|||
|
||||
return 0
|
||||
|
||||
|
||||
class StaticAnalysisSubCommand(SubCommand):
|
||||
def __call__(self, func):
|
||||
after = SubCommand.__call__(self, func)
|
||||
|
@ -1754,7 +1726,7 @@ class StaticAnalysisMonitor(object):
|
|||
|
||||
try:
|
||||
warning = self._warnings_collector.process_line(line)
|
||||
except Exception:
|
||||
except:
|
||||
pass
|
||||
|
||||
if line.find('clang-tidy') != -1:
|
||||
|
@ -1835,8 +1807,8 @@ class StaticAnalysis(MachCommandBase):
|
|||
help='Output format to write in a file')
|
||||
@CommandArgument('--outgoing', default=False, action='store_true',
|
||||
help='Run static analysis checks on outgoing files from mercurial repository')
|
||||
def check(self, source=None, jobs=2, strip=1, verbose=False, checks='-*',
|
||||
fix=False, header_filter='', output=None, format='text', outgoing=False):
|
||||
def check(self, source=None, jobs=2, strip=1, verbose=False,
|
||||
checks='-*', fix=False, header_filter='', output=None, format='text', outgoing=False):
|
||||
from mozbuild.controller.building import (
|
||||
StaticAnalysisFooter,
|
||||
StaticAnalysisOutputManager,
|
||||
|
@ -1850,8 +1822,7 @@ class StaticAnalysis(MachCommandBase):
|
|||
return rc
|
||||
|
||||
if self._is_version_eligible() is False:
|
||||
self.log(logging.ERROR, 'static-analysis', {},
|
||||
"You're using an old version of clang-format binary."
|
||||
self.log(logging.ERROR, 'static-analysis', {}, "You're using an old version of clang-format binary."
|
||||
" Please update to a more recent one by running: './mach bootstrap'")
|
||||
return 1
|
||||
|
||||
|
@ -1891,13 +1862,11 @@ class StaticAnalysis(MachCommandBase):
|
|||
args = self._get_clang_tidy_command(
|
||||
checks=checks, header_filter=header_filter, sources=source, jobs=jobs, fix=fix)
|
||||
|
||||
monitor = StaticAnalysisMonitor(
|
||||
self.topsrcdir, self.topobjdir, self._clang_tidy_config, total)
|
||||
monitor = StaticAnalysisMonitor(self.topsrcdir, self.topobjdir, self._clang_tidy_config, total)
|
||||
|
||||
footer = StaticAnalysisFooter(self.log_manager.terminal, monitor)
|
||||
with StaticAnalysisOutputManager(self.log_manager, monitor, footer) as output_manager:
|
||||
rc = self.run_process(args=args, ensure_exit_code=False,
|
||||
line_handler=output_manager.on_line, cwd=cwd)
|
||||
rc = self.run_process(args=args, ensure_exit_code=False, line_handler=output_manager.on_line, cwd=cwd)
|
||||
|
||||
self.log(logging.WARNING, 'warning_summary',
|
||||
{'count': len(monitor.warnings_db)},
|
||||
|
@ -1926,18 +1895,16 @@ class StaticAnalysis(MachCommandBase):
|
|||
help='Write coverity output translated to json output in a file')
|
||||
@CommandArgument('--coverity_output_path', '-co', default=None,
|
||||
help='Path where to write coverity results as cov-results.json. '
|
||||
'If no path is specified the default path from the coverity working '
|
||||
'directory, ~./mozbuild/coverity is used.')
|
||||
'If no path is specified the default path from the coverity working directory, '
|
||||
'~./mozbuild/coverity is used.')
|
||||
@CommandArgument('--outgoing', default=False, action='store_true',
|
||||
help='Run coverity on outgoing files from mercurial or git repository')
|
||||
def check_coverity(self, source=[], output=None, coverity_output_path=None,
|
||||
outgoing=False, verbose=False):
|
||||
def check_coverity(self, source=[], output=None, coverity_output_path=None, outgoing=False, verbose=False):
|
||||
self._set_log_level(verbose)
|
||||
self.log_manager.enable_all_structured_loggers()
|
||||
|
||||
if 'MOZ_AUTOMATION' not in os.environ:
|
||||
self.log(logging.INFO, 'static-analysis', {},
|
||||
'Coverity based static-analysis cannot be ran outside automation.')
|
||||
self.log(logging.INFO, 'static-analysis', {}, 'Coverity based static-analysis cannot be ran outside automation.')
|
||||
return
|
||||
|
||||
# Use outgoing files instead of source files
|
||||
|
@ -1947,8 +1914,7 @@ class StaticAnalysis(MachCommandBase):
|
|||
source = map(os.path.abspath, files)
|
||||
|
||||
if len(source) == 0:
|
||||
self.log(logging.ERROR, 'static-analysis', {},
|
||||
'There are no files that coverity can use to scan.')
|
||||
self.log(logging.ERROR, 'static-analysis', {}, 'There are no files that coverity can use to scan.')
|
||||
return 0
|
||||
|
||||
rc = self._build_compile_db(verbose=verbose)
|
||||
|
@ -1959,8 +1925,7 @@ class StaticAnalysis(MachCommandBase):
|
|||
|
||||
commands_list = self.get_files_with_commands(source)
|
||||
if len(commands_list) == 0:
|
||||
self.log(logging.INFO, 'static-analysis', {},
|
||||
'There are no files that need to be analyzed.')
|
||||
self.log(logging.INFO, 'static-analysis', {}, 'There are no files that need to be analyzed.')
|
||||
return 0
|
||||
|
||||
# Load the configuration file for coverity static-analysis
|
||||
|
@ -1974,37 +1939,31 @@ class StaticAnalysis(MachCommandBase):
|
|||
|
||||
# First run cov-run-desktop --setup in order to setup the analysis env
|
||||
cmd = [self.cov_run_desktop, '--setup']
|
||||
self.log(logging.INFO, 'static-analysis', {},
|
||||
'Running {} --setup'.format(self.cov_run_desktop))
|
||||
self.log(logging.INFO, 'static-analysis', {}, 'Running {} --setup'.format(self.cov_run_desktop))
|
||||
|
||||
rc = self.run_process(args=cmd, cwd=self.cov_path, pass_thru=True)
|
||||
|
||||
if rc != 0:
|
||||
self.log(logging.ERROR, 'static-analysis', {},
|
||||
'Running {} --setup failed!'.format(self.cov_run_desktop))
|
||||
self.log(logging.ERROR, 'static-analysis', {}, 'Running {} --setup failed!'.format(self.cov_run_desktop))
|
||||
return rc
|
||||
|
||||
# Run cov-configure for clang
|
||||
cmd = [self.cov_configure, '--clang']
|
||||
self.log(logging.INFO, 'static-analysis', {},
|
||||
'Running {} --clang'.format(self.cov_configure))
|
||||
self.log(logging.INFO, 'static-analysis', {}, 'Running {} --clang'.format(self.cov_configure))
|
||||
|
||||
rc = self.run_process(args=cmd, cwd=self.cov_path, pass_thru=True)
|
||||
|
||||
if rc != 0:
|
||||
self.log(logging.ERROR, 'static-analysis', {},
|
||||
'Running {} --clang failed!'.format(self.cov_configure))
|
||||
self.log(logging.ERROR, 'static-analysis', {}, 'Running {} --clang failed!'.format(self.cov_configure))
|
||||
return rc
|
||||
|
||||
# For each element in commands_list run `cov-translate`
|
||||
for element in commands_list:
|
||||
cmd = [self.cov_translate, '--dir', self.cov_idir_path] + element['command'].split(' ')
|
||||
self.log(logging.INFO, 'static-analysis', {},
|
||||
'Running Coverity Tranlate for {}'.format(cmd))
|
||||
self.log(logging.INFO, 'static-analysis', {}, 'Running Coverity Tranlate for {}'.format(cmd))
|
||||
rc = self.run_process(args=cmd, cwd=element['directory'], pass_thru=True)
|
||||
if rc != 0:
|
||||
self.log(logging.ERROR, 'static-analysis', {},
|
||||
'Running Coverity Tranlate failed for {}'.format(cmd))
|
||||
self.log(logging.ERROR, 'static-analysis', {}, 'Running Coverity Tranlate failed for {}'.format(cmd))
|
||||
return cmd
|
||||
|
||||
if coverity_output_path is None:
|
||||
|
@ -2014,8 +1973,7 @@ class StaticAnalysis(MachCommandBase):
|
|||
|
||||
# Once the capture is performed we need to do the actual Coverity Desktop analysis
|
||||
cmd = [self.cov_run_desktop, '--json-output-v6', cov_result, '--analyze-captured-source']
|
||||
self.log(logging.INFO, 'static-analysis', {},
|
||||
'Running Coverity Analysis for {}'.format(cmd))
|
||||
self.log(logging.INFO, 'static-analysis', {}, 'Running Coverity Analysis for {}'.format(cmd))
|
||||
rc = self.run_process(cmd, cwd=self.cov_state_path, pass_thru=True)
|
||||
if rc != 0:
|
||||
self.log(logging.ERROR, 'static-analysis', {}, 'Coverity Analysis failed!')
|
||||
|
@ -2026,16 +1984,14 @@ class StaticAnalysis(MachCommandBase):
|
|||
def get_reliability_index_for_cov_checker(self, checker_name):
|
||||
if self._cov_config is None:
|
||||
self.log(logging.INFO, 'static-analysis', {}, 'Coverity config file not found, '
|
||||
'using default-value \'reliablity\' = medium. for checker {}'.format(
|
||||
checker_name))
|
||||
'using default-value \'reliablity\' = medium. for checker {}'.format(checker_name))
|
||||
return 'medium'
|
||||
|
||||
checkers = self._cov_config['coverity_checkers']
|
||||
if checker_name not in checkers:
|
||||
self.log(logging.INFO, 'static-analysis', {},
|
||||
'Coverity checker {} not found to determine reliability index. '
|
||||
'For the moment we shall use the default \'reliablity\' = medium.'.format(
|
||||
checker_name))
|
||||
'For the moment we shall use the default \'reliablity\' = medium.'.format(checker_name))
|
||||
return 'medium'
|
||||
|
||||
if 'reliability' not in checkers[checker_name]:
|
||||
|
@ -2043,8 +1999,7 @@ class StaticAnalysis(MachCommandBase):
|
|||
self.log(logging.INFO, 'static-analysis', {},
|
||||
'Coverity checker {} doesn\'t have a reliability index set, '
|
||||
'field \'reliability is missing\', please cosinder adding it. '
|
||||
'For the moment we shall use the default \'reliablity\' = medium.'.format(
|
||||
checker_name))
|
||||
'For the moment we shall use the default \'reliablity\' = medium.'.format(checker_name))
|
||||
return 'medium'
|
||||
|
||||
return checkers[checker_name]['reliability']
|
||||
|
@ -2061,16 +2016,13 @@ class StaticAnalysis(MachCommandBase):
|
|||
|
||||
def build_element(issue):
|
||||
# We look only for main event
|
||||
event_path = next(
|
||||
(event for event in issue['events'] if event['main'] is True), None)
|
||||
event_path = next((event for event in issue['events'] if event['main'] is True), None)
|
||||
|
||||
dict_issue = {
|
||||
'line': issue['mainEventLineNumber'],
|
||||
'flag': issue['checkerName'],
|
||||
'message': event_path['eventDescription'],
|
||||
'reliability': self.get_reliability_index_for_cov_checker(
|
||||
issue['checkerName']
|
||||
),
|
||||
'reliability': self.get_reliability_index_for_cov_checker(issue['checkerName']),
|
||||
'extra': {
|
||||
'category': issue['checkerProperties']['category'],
|
||||
'stateOnServer': issue['stateOnServer'],
|
||||
|
@ -2080,8 +2032,7 @@ class StaticAnalysis(MachCommandBase):
|
|||
|
||||
# Embed all events into extra message
|
||||
for event in issue['events']:
|
||||
dict_issue['extra']['stack'].append(
|
||||
{'file_path': event['strippedFilePathname'],
|
||||
dict_issue['extra']['stack'].append({'file_path': event['strippedFilePathname'],
|
||||
'line_number': event['lineNumber'],
|
||||
'path_type': event['eventTag'],
|
||||
'description': event['eventDescription']})
|
||||
|
@ -2092,12 +2043,8 @@ class StaticAnalysis(MachCommandBase):
|
|||
path = self.cov_is_file_in_source(issue['strippedMainEventFilePathname'], source)
|
||||
if path is None:
|
||||
# Since we skip a result we should log it
|
||||
self.log(logging.INFO, 'static-analysis', {},
|
||||
'Skipping CID: {0} from file: {1} since it\'s not related '
|
||||
'with the current patch.'.format(
|
||||
issue['stateOnServer']['cid'],
|
||||
issue['strippedMainEventFilePathname'])
|
||||
)
|
||||
self.log(logging.INFO, 'static-analysis', {}, 'Skipping CID: {0} from file: {1} since it\'s not related with the current patch.'.format(
|
||||
issue['stateOnServer']['cid'], issue['strippedMainEventFilePathname']))
|
||||
continue
|
||||
if path in files_list:
|
||||
files_list[path]['warnings'].append(build_element(issue))
|
||||
|
@ -2113,8 +2060,7 @@ class StaticAnalysis(MachCommandBase):
|
|||
secret_name = 'project/relman/coverity'
|
||||
secrets_url = '{}/secrets/v1/secret/{}'.format(get_root_url(True), secret_name)
|
||||
|
||||
self.log(logging.INFO, 'static-analysis', {},
|
||||
'Using symbol upload token from the secrets service: "{}"'.format(secrets_url))
|
||||
self.log(logging.INFO, 'static-analysis', {}, 'Using symbol upload token from the secrets service: "{}"'.format(secrets_url))
|
||||
|
||||
import requests
|
||||
res = requests.get(secrets_url)
|
||||
|
@ -2123,8 +2069,7 @@ class StaticAnalysis(MachCommandBase):
|
|||
cov_config = secret['secret'] if 'secret' in secret else None
|
||||
|
||||
if cov_config is None:
|
||||
self.log(logging.ERROR, 'static-analysis', {},
|
||||
'Ill formatted secret for Coverity. Aborting analysis.')
|
||||
self.log(logging.ERROR, 'static-analysis', {}, 'Ill formatted secret for Coverity. Aborting analysis.')
|
||||
return 1
|
||||
|
||||
self.cov_analysis_url = cov_config.get('package_url')
|
||||
|
@ -2211,8 +2156,7 @@ class StaticAnalysis(MachCommandBase):
|
|||
self.cov_idir_path = mozpath.join(self.cov_work_path, self.cov_package_ver, 'idir')
|
||||
|
||||
if not os.path.exists(self.cov_path):
|
||||
self.log(logging.ERROR, 'static-analysis', {},
|
||||
'Missing Coverity in {}'.format(self.cov_path))
|
||||
self.log(logging.ERROR, 'static-analysis', {}, 'Missing Coverity in {}'.format(self.cov_path))
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
@ -2382,7 +2326,7 @@ class StaticAnalysis(MachCommandBase):
|
|||
if item['publish']:
|
||||
checkers.append(item['name'])
|
||||
tp_path = mozpath.join(self.topsrcdir, config['third_party'])
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
print('Looks like config.yaml is not valid, so we are unable '
|
||||
'to determine default checkers, and which folder to '
|
||||
'exclude, using defaults provided by infer')
|
||||
|
@ -2432,8 +2376,7 @@ class StaticAnalysis(MachCommandBase):
|
|||
if 'package_version' in self._clang_tidy_config:
|
||||
version = self._clang_tidy_config['package_version']
|
||||
else:
|
||||
self.log(logging.ERROR, 'static-analysis', {},
|
||||
"Unable to find 'package_version' in the config.yml")
|
||||
self.log(logging.ERROR, 'static-analysis', {}, "Unable to find 'package_version' in the config.yml")
|
||||
return False
|
||||
|
||||
# Because the fact that we ship together clang-tidy and clang-format
|
||||
|
@ -2448,8 +2391,7 @@ class StaticAnalysis(MachCommandBase):
|
|||
return True
|
||||
except subprocess.CalledProcessError as e:
|
||||
self.log(logging.ERROR, 'static-analysis', {},
|
||||
"Error determining the version clang-tidy/format binary, please see the "
|
||||
"attached exception: \n{}".format(e.output))
|
||||
"Error determining the version clang-tidy/format binary, please see the attached exception: \n{}".format(e.output))
|
||||
|
||||
return False
|
||||
|
||||
|
@ -2593,11 +2535,8 @@ class StaticAnalysis(MachCommandBase):
|
|||
platform, _ = self.platform
|
||||
|
||||
if platform not in self._clang_tidy_config['platforms']:
|
||||
self.log(
|
||||
logging.ERROR, 'static-analysis', {},
|
||||
"RUNNING: clang-tidy autotest for platform {} not supported.".format(
|
||||
platform)
|
||||
)
|
||||
self.log(logging.ERROR, 'static-analysis', {},
|
||||
"RUNNING: clang-tidy autotest for platform {} not supported.".format(platform))
|
||||
return self.TOOLS_UNSUPORTED_PLATFORM
|
||||
|
||||
import concurrent.futures
|
||||
|
@ -2627,14 +2566,12 @@ class StaticAnalysis(MachCommandBase):
|
|||
# 1. Checker attribute 'publish' is False.
|
||||
not_published = not bool(item.get('publish', True))
|
||||
# 2. Checker has restricted-platforms and current platform is not of them.
|
||||
ignored_platform = ('restricted-platforms' in item and
|
||||
platform not in item['restricted-platforms'])
|
||||
ignored_platform = 'restricted-platforms' in item and platform not in item['restricted-platforms']
|
||||
# 3. Checker name is mozilla-* or -*.
|
||||
ignored_checker = item['name'] in ['mozilla-*', '-*']
|
||||
# 4. List checker_names is passed and the current checker is not part of the
|
||||
# list or 'publish' is False
|
||||
checker_not_in_list = checker_names and (
|
||||
item['name'] not in checker_names or not_published)
|
||||
checker_not_in_list = checker_names and (item['name'] not in checker_names or not_published)
|
||||
if not_published or \
|
||||
ignored_platform or \
|
||||
ignored_checker or \
|
||||
|
@ -2655,8 +2592,7 @@ class StaticAnalysis(MachCommandBase):
|
|||
|
||||
if error_code != self.TOOLS_SUCCESS:
|
||||
|
||||
self.log(logging.INFO, 'static-analysis', {},
|
||||
"FAIL: the following clang-tidy check(s) failed:")
|
||||
self.log(logging.INFO, 'static-analysis', {}, "FAIL: the following clang-tidy check(s) failed:")
|
||||
for failure in checkers_results:
|
||||
checker_error = failure['checker-error']
|
||||
checker_name = failure['checker-name']
|
||||
|
@ -2666,29 +2602,18 @@ class StaticAnalysis(MachCommandBase):
|
|||
|
||||
message_to_log = ''
|
||||
if checker_error == self.TOOLS_CHECKER_NOT_FOUND:
|
||||
message_to_log = \
|
||||
"\tChecker {} not present in this clang-tidy version.".format(
|
||||
message_to_log = "\tChecker {} not present in this clang-tidy version.".format(
|
||||
checker_name)
|
||||
elif checker_error == self.TOOLS_CHECKER_NO_TEST_FILE:
|
||||
message_to_log = \
|
||||
"\tChecker {0} does not have a test file - {0}.cpp".format(
|
||||
checker_name)
|
||||
message_to_log = "\tChecker {0} does not have a test file - {0}.cpp".format(checker_name)
|
||||
elif checker_error == self.TOOLS_CHECKER_RETURNED_NO_ISSUES:
|
||||
message_to_log = (
|
||||
"\tChecker {0} did not find any issues in its test file, "
|
||||
"clang-tidy output for the run is:\n{1}"
|
||||
).format(checker_name, info1)
|
||||
message_to_log = "\tChecker {0} did not find any issues in its test file, clang-tidy output for the run is:\n{1}".format(
|
||||
checker_name, info1)
|
||||
elif checker_error == self.TOOLS_CHECKER_RESULT_FILE_NOT_FOUND:
|
||||
message_to_log = \
|
||||
"\tChecker {0} does not have a result file - {0}.json".format(
|
||||
checker_name)
|
||||
message_to_log = "\tChecker {0} does not have a result file - {0}.json".format(checker_name)
|
||||
elif checker_error == self.TOOLS_CHECKER_DIFF_FAILED:
|
||||
message_to_log = (
|
||||
"\tChecker {0}\nExpected: {1}\n"
|
||||
"Got: {2}\n"
|
||||
"clang-tidy output for the run is:\n"
|
||||
"{3}"
|
||||
).format(checker_name, info1, info2, info3)
|
||||
message_to_log = "\tChecker {0}\nExpected: {1}\nGot: {2}\nclang-tidy output for the run is:\n{3}".format(
|
||||
checker_name, info1, info2, info3)
|
||||
|
||||
print('\n'+message_to_log)
|
||||
|
||||
|
@ -2722,19 +2647,14 @@ class StaticAnalysis(MachCommandBase):
|
|||
return self._parse_issues(clang_output), clang_output
|
||||
|
||||
def _run_analysis_batch(self, items):
|
||||
self.log(logging.INFO, 'static-analysis', {},
|
||||
"RUNNING: clang-tidy checker batch analysis.")
|
||||
self.log(logging.INFO, 'static-analysis', {},"RUNNING: clang-tidy checker batch analysis.")
|
||||
if not len(items):
|
||||
self.log(logging.ERROR, 'static-analysis', {},
|
||||
"ERROR: clang-tidy checker list is empty!")
|
||||
self.log(logging.ERROR, 'static-analysis', {}, "ERROR: clang-tidy checker list is empty!")
|
||||
return self.TOOLS_CHECKER_LIST_EMPTY
|
||||
|
||||
issues, clang_output = self._run_analysis(
|
||||
checks='-*,' + ",".join(items),
|
||||
header_filter='',
|
||||
sources=[mozpath.join(self._clang_tidy_base_path, "test", checker) + '.cpp'
|
||||
for checker in items],
|
||||
print_out=True)
|
||||
checks='-*,' + ",".join(items), header_filter='',
|
||||
sources=[mozpath.join(self._clang_tidy_base_path, "test", checker) + '.cpp' for checker in items], print_out=True)
|
||||
|
||||
if issues is None:
|
||||
return self.TOOLS_CHECKER_FAILED_FILE
|
||||
|
@ -2742,8 +2662,7 @@ class StaticAnalysis(MachCommandBase):
|
|||
failed_checks = []
|
||||
failed_checks_baseline = []
|
||||
for checker in items:
|
||||
test_file_path_json = mozpath.join(
|
||||
self._clang_tidy_base_path, "test", checker) + '.json'
|
||||
test_file_path_json = mozpath.join(self._clang_tidy_base_path, "test", checker) + '.json'
|
||||
# Read the pre-determined issues
|
||||
baseline_issues = self._get_autotest_stored_issues(test_file_path_json)
|
||||
|
||||
|
@ -2757,15 +2676,12 @@ class StaticAnalysis(MachCommandBase):
|
|||
failed_checks_baseline.append(baseline_issues)
|
||||
|
||||
if len(failed_checks) > 0:
|
||||
self.log(logging.ERROR, 'static-analysis', {},
|
||||
'The following check(s) failed for bulk analysis: ' + ' '.join(failed_checks))
|
||||
self.log(logging.ERROR, 'static-analysis', {}, 'The following check(s) failed for bulk analysis: ' + ' '.join(failed_checks))
|
||||
|
||||
for failed_check, baseline_issue in zip(failed_checks, failed_checks_baseline):
|
||||
print('\tChecker {0} expect following results: \n\t\t{1}'.format(
|
||||
failed_check, baseline_issue))
|
||||
print('\tChecker {0} expect following results: \n\t\t{1}'.format(failed_check, baseline_issue))
|
||||
|
||||
print('This is the output generated by clang-tidy for the bulk build:\n{}'.format(
|
||||
clang_output))
|
||||
print('This is the output generated by clang-tidy for the bulk build:\n{}'.format(clang_output))
|
||||
return self.TOOLS_CHECKER_DIFF_FAILED
|
||||
|
||||
return self.TOOLS_SUCCESS
|
||||
|
@ -2926,11 +2842,9 @@ class StaticAnalysis(MachCommandBase):
|
|||
@CommandArgument('--force', action='store_true',
|
||||
help='Force re-install even though the tool exists in mozbuild.',
|
||||
default=False)
|
||||
@CommandArgument('--minimal-install', action='store_true',
|
||||
help='Download only clang based tool.',
|
||||
@CommandArgument('--minimal-install', action='store_true', help='Download only clang based tool.',
|
||||
default=False)
|
||||
def install(self, source=None, skip_cache=False, force=False, minimal_install=False,
|
||||
verbose=False):
|
||||
def install(self, source=None, skip_cache=False, force=False, minimal_install=False, verbose=False):
|
||||
self._set_log_level(verbose)
|
||||
rc = self._get_clang_tools(force=force, skip_cache=skip_cache,
|
||||
source=source, verbose=verbose)
|
||||
|
@ -2994,14 +2908,12 @@ class StaticAnalysis(MachCommandBase):
|
|||
@CommandArgument('--output', '-o', default=None, dest='output_path',
|
||||
help='Specify a file handle to write clang-format raw output instead of '
|
||||
'applying changes. This can be stdout or a file path.')
|
||||
@CommandArgument('--format', '-f', choices=('diff', 'json'), default='diff',
|
||||
dest='output_format',
|
||||
@CommandArgument('--format', '-f', choices=('diff', 'json'), default='diff', dest='output_format',
|
||||
help='Specify the output format used: diff is the raw patch provided by '
|
||||
'clang-format, json is a list of atomic changes to process.')
|
||||
@CommandArgument('--outgoing', default=False, action='store_true',
|
||||
help='Run clang-format on outgoing files from mercurial repository')
|
||||
def clang_format(self, assume_filename, path, commit, output_path=None, output_format='diff',
|
||||
verbose=False, outgoing=False):
|
||||
def clang_format(self, assume_filename, path, commit, output_path=None, output_format='diff', verbose=False, outgoing=False):
|
||||
# Run clang-format or clang-format-diff on the local changes
|
||||
# or files/directories
|
||||
if path is None and outgoing:
|
||||
|
@ -3010,7 +2922,7 @@ class StaticAnalysis(MachCommandBase):
|
|||
|
||||
if path:
|
||||
# Create the full path list
|
||||
def path_maker(f_name): return os.path.join(self.topsrcdir, f_name)
|
||||
path_maker = lambda f_name: os.path.join(self.topsrcdir, f_name)
|
||||
path = map(path_maker, path)
|
||||
|
||||
os.chdir(self.topsrcdir)
|
||||
|
@ -3042,8 +2954,7 @@ class StaticAnalysis(MachCommandBase):
|
|||
return rc
|
||||
|
||||
if self._is_version_eligible() is False:
|
||||
self.log(logging.ERROR, 'static-analysis', {},
|
||||
"You're using an old version of clang-format binary."
|
||||
self.log(logging.ERROR, 'static-analysis', {}, "You're using an old version of clang-format binary."
|
||||
" Please update to a more recent one by running: './mach bootstrap'")
|
||||
return 1
|
||||
|
||||
|
@ -3052,8 +2963,7 @@ class StaticAnalysis(MachCommandBase):
|
|||
self._clang_format_path, commit, output)
|
||||
|
||||
if assume_filename:
|
||||
return self._run_clang_format_in_console(self._clang_format_path,
|
||||
path, assume_filename)
|
||||
return self._run_clang_format_in_console(self._clang_format_path, path, assume_filename)
|
||||
|
||||
return self._run_clang_format_path(self._clang_format_path, path, output, output_format)
|
||||
|
||||
|
@ -3076,7 +2986,7 @@ class StaticAnalysis(MachCommandBase):
|
|||
}
|
||||
|
||||
# Verify if this checker actually exists
|
||||
if check not in self._clang_tidy_checks:
|
||||
if not check in self._clang_tidy_checks:
|
||||
checker_error['checker-error'] = self.TOOLS_CHECKER_NOT_FOUND
|
||||
checkers_results.append(checker_error)
|
||||
return self.TOOLS_CHECKER_NOT_FOUND
|
||||
|
@ -3220,7 +3130,7 @@ class StaticAnalysis(MachCommandBase):
|
|||
ran_configure = True
|
||||
try:
|
||||
config = self.config_environment
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
return (0, config, ran_configure)
|
||||
|
@ -3282,10 +3192,10 @@ class StaticAnalysis(MachCommandBase):
|
|||
self._clang_apply_replacements = mozpath.join(
|
||||
self._clang_tools_path, "clang-tidy", "bin",
|
||||
"clang-apply-replacements" + config.substs.get('BIN_SUFFIX', ''))
|
||||
self._run_clang_tidy_path = mozpath.join(self._clang_tools_path, "clang-tidy",
|
||||
"share", "clang", "run-clang-tidy.py")
|
||||
self._clang_format_diff = mozpath.join(self._clang_tools_path, "clang-tidy",
|
||||
"share", "clang", "clang-format-diff.py")
|
||||
self._run_clang_tidy_path = mozpath.join(self._clang_tools_path, "clang-tidy", "share", "clang",
|
||||
"run-clang-tidy.py")
|
||||
self._clang_format_diff = mozpath.join(self._clang_tools_path, "clang-tidy", "share", "clang",
|
||||
"clang-format-diff.py")
|
||||
return 0
|
||||
|
||||
def _do_clang_tools_exist(self):
|
||||
|
@ -3596,8 +3506,7 @@ class StaticAnalysis(MachCommandBase):
|
|||
# here, we expect changes. if we are here, this means that
|
||||
# there is a diff to show
|
||||
if e.output:
|
||||
# Replace the temp path by the path relative to the repository to
|
||||
# display a valid patch
|
||||
# Replace the temp path by the path relative to the repository to display a valid patch
|
||||
relative_path = os.path.relpath(original_path, self.topsrcdir)
|
||||
patch = e.output.replace(target_file, relative_path)
|
||||
patch = patch.replace(original_path, relative_path)
|
||||
|
@ -3710,13 +3619,11 @@ class Vendor(MachCommandBase):
|
|||
vendor_command.vendor(**kwargs)
|
||||
|
||||
@SubCommand('vendor', 'aom',
|
||||
description='Vendor av1 video codec reference implementation into the '
|
||||
'source repository.')
|
||||
description='Vendor av1 video codec reference implementation into the source repository.')
|
||||
@CommandArgument('-r', '--revision',
|
||||
help='Repository tag or commit to update to.')
|
||||
@CommandArgument('--repo',
|
||||
help='Repository url to pull a snapshot from. '
|
||||
'Supports github and googlesource.')
|
||||
help='Repository url to pull a snapshot from. Supports github and googlesource.')
|
||||
@CommandArgument('--ignore-modified', action='store_true',
|
||||
help='Ignore modified files in current checkout',
|
||||
default=False)
|
||||
|
@ -3724,7 +3631,6 @@ class Vendor(MachCommandBase):
|
|||
from mozbuild.vendor_aom import VendorAOM
|
||||
vendor_command = self._spawn(VendorAOM)
|
||||
vendor_command.vendor(**kwargs)
|
||||
|
||||
@SubCommand('vendor', 'dav1d',
|
||||
description='Vendor dav1d implementation of AV1 into the source repository.')
|
||||
@CommandArgument('-r', '--revision',
|
||||
|
@ -3744,11 +3650,7 @@ class Vendor(MachCommandBase):
|
|||
@CommandArgument('--with-windows-wheel', action='store_true',
|
||||
help='Vendor a wheel for Windows along with the source package',
|
||||
default=False)
|
||||
@CommandArgument('packages', default=None, nargs='*',
|
||||
help='Packages to vendor. If omitted, packages and their dependencies '
|
||||
'defined in Pipfile.lock will be vendored. If Pipfile has been modified, '
|
||||
'then Pipfile.lock will be regenerated. Note that transient dependencies '
|
||||
'may be updated when running this command.')
|
||||
@CommandArgument('packages', default=None, nargs='*', help='Packages to vendor. If omitted, packages and their dependencies defined in Pipfile.lock will be vendored. If Pipfile has been modified, then Pipfile.lock will be regenerated. Note that transient dependencies may be updated when running this command.')
|
||||
def vendor_python(self, **kwargs):
|
||||
from mozbuild.vendor_python import VendorPython
|
||||
vendor_command = self._spawn(VendorPython)
|
||||
|
@ -3766,25 +3668,21 @@ class Vendor(MachCommandBase):
|
|||
from mozbuild.vendor_manifest import verify_manifests
|
||||
verify_manifests(files)
|
||||
|
||||
|
||||
@CommandProvider
|
||||
class WebRTCGTestCommands(GTestCommands):
|
||||
@Command('webrtc-gtest', category='testing',
|
||||
description='Run WebRTC.org GTest unit tests.')
|
||||
@CommandArgument('gtest_filter', default=b"*", nargs='?', metavar='gtest_filter',
|
||||
help="test_filter is a ':'-separated list of wildcard patterns "
|
||||
"(called the positive patterns), optionally followed by a '-' and "
|
||||
"another ':'-separated pattern list (called the negative patterns).")
|
||||
help="test_filter is a ':'-separated list of wildcard patterns (called the positive patterns),"
|
||||
"optionally followed by a '-' and another ':'-separated pattern list (called the negative patterns).")
|
||||
@CommandArgumentGroup('debugging')
|
||||
@CommandArgument('--debug', action='store_true', group='debugging',
|
||||
help='Enable the debugger. Not specifying a --debugger option will '
|
||||
'result in the default debugger being used.')
|
||||
help='Enable the debugger. Not specifying a --debugger option will result in the default debugger being used.')
|
||||
@CommandArgument('--debugger', default=None, type=str, group='debugging',
|
||||
help='Name of debugger to use.')
|
||||
@CommandArgument('--debugger-args', default=None, metavar='params', type=str,
|
||||
group='debugging',
|
||||
help='Command-line arguments to pass to the debugger itself; '
|
||||
'split as the Bourne shell would.')
|
||||
help='Command-line arguments to pass to the debugger itself; split as the Bourne shell would.')
|
||||
def gtest(self, gtest_filter, debug, debugger,
|
||||
debugger_args):
|
||||
app_path = self.get_binary_path('webrtc-gtest')
|
||||
|
@ -3816,7 +3714,6 @@ class WebRTCGTestCommands(GTestCommands):
|
|||
ensure_exit_code=False,
|
||||
pass_thru=True)
|
||||
|
||||
|
||||
@CommandProvider
|
||||
class Repackage(MachCommandBase):
|
||||
'''Repackages artifacts into different formats.
|
||||
|
@ -3927,7 +3824,6 @@ class Repackage(MachCommandBase):
|
|||
from mozbuild.repackaging.mar import repackage_mar
|
||||
repackage_mar(self.topsrcdir, input, mar, output, format, arch=arch)
|
||||
|
||||
|
||||
@CommandProvider
|
||||
class Analyze(MachCommandBase):
|
||||
""" Get information about a file in the build graph """
|
||||
|
@ -3955,8 +3851,7 @@ class Analyze(MachCommandBase):
|
|||
return 1
|
||||
|
||||
@SubCommand('analyze', 'all',
|
||||
description='Get a report of files changed within the last n days and '
|
||||
'their corresponding build cost.')
|
||||
description='Get a report of files changed within the last n days and their corresponding build cost.')
|
||||
@CommandArgument('--days', '-d', type=int, default=14,
|
||||
help='Number of days to include in the report.')
|
||||
@CommandArgument('--format', default='pretty',
|
||||
|
@ -4074,8 +3969,7 @@ class L10NCommands(MachCommandBase):
|
|||
self.log(logging.INFO, 'package-multi-locale', {},
|
||||
'Invoking `mach android archive-geckoview`')
|
||||
self.run_process(
|
||||
[mozpath.join(self.topsrcdir, 'mach'), 'android',
|
||||
'archive-geckoview'.format(locale)],
|
||||
[mozpath.join(self.topsrcdir, 'mach'), 'android', 'archive-geckoview'.format(locale)],
|
||||
append_env=append_env,
|
||||
pass_thru=True,
|
||||
ensure_exit_code=True,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import re
|
||||
|
@ -62,7 +62,6 @@ class _SimpleOrderedSet(object):
|
|||
It doesn't expose a complete API, and normalizes path separators
|
||||
at insertion.
|
||||
'''
|
||||
|
||||
def __init__(self):
|
||||
self._list = []
|
||||
self._set = set()
|
||||
|
@ -96,7 +95,6 @@ class Rule(object):
|
|||
command2
|
||||
...
|
||||
'''
|
||||
|
||||
def __init__(self, targets=[]):
|
||||
self._targets = _SimpleOrderedSet()
|
||||
self._dependencies = _SimpleOrderedSet()
|
||||
|
@ -129,7 +127,7 @@ class Rule(object):
|
|||
|
||||
def dependencies(self):
|
||||
'''Return an iterator on the rule dependencies.'''
|
||||
return iter(d for d in self._dependencies if d not in self._targets)
|
||||
return iter(d for d in self._dependencies if not d in self._targets)
|
||||
|
||||
def commands(self):
|
||||
'''Return an iterator on the rule commands.'''
|
||||
|
@ -177,7 +175,6 @@ def read_dep_makefile(fh):
|
|||
if rule:
|
||||
raise Exception('Makefile finishes with a backslash. Expected more input.')
|
||||
|
||||
|
||||
def write_dep_makefile(fh, target, deps):
|
||||
'''
|
||||
Write a Makefile containing only target's dependencies to the file handle
|
||||
|
|
|
@ -306,7 +306,6 @@ def _schema_1_additional(filename, manifest, require_license_file=True):
|
|||
class License(object):
|
||||
"""Voluptuous validator which verifies the license(s) are valid as per our
|
||||
whitelist."""
|
||||
|
||||
def __call__(self, values):
|
||||
if isinstance(values, str):
|
||||
values = [values]
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import filecmp
|
||||
import os
|
||||
|
@ -11,6 +11,7 @@ import sys
|
|||
import subprocess
|
||||
import traceback
|
||||
|
||||
from collections import defaultdict
|
||||
from mozpack import path as mozpath
|
||||
|
||||
|
||||
|
@ -305,7 +306,7 @@ class MozconfigLoader(object):
|
|||
|
||||
# Environment variables also appear as shell variables, but that's
|
||||
# uninteresting duplication of information. Filter them out.
|
||||
def filt(x, y): return {k: v for k, v in x.items() if k not in y}
|
||||
filt = lambda x, y: {k: v for k, v in x.items() if k not in y}
|
||||
result['vars'] = diff_vars(
|
||||
filt(parsed['vars_before'], parsed['env_before']),
|
||||
filt(parsed['vars_after'], parsed['env_after'])
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
# This module produces a JSON file that provides basic build info and
|
||||
# configuration metadata.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import re
|
||||
|
@ -130,7 +130,7 @@ def build_dict(config, env=os.environ):
|
|||
d['platform_guess'] = guess_platform()
|
||||
d['buildtype_guess'] = guess_buildtype()
|
||||
|
||||
if d.get('buildapp', '') == 'mobile/android' and 'MOZ_ANDROID_MIN_SDK_VERSION' in substs:
|
||||
if 'buildapp' in d and d['buildapp'] == 'mobile/android' and 'MOZ_ANDROID_MIN_SDK_VERSION' in substs:
|
||||
d['android_min_sdk'] = substs['MOZ_ANDROID_MIN_SDK_VERSION']
|
||||
|
||||
return d
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
|
|
|
@ -22,14 +22,12 @@ value :
|
|||
| \w+ # string identifier or value;
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
from optparse import OptionParser
|
||||
import errno
|
||||
from mozbuild.makeutil import Makefile
|
||||
from makeutil import Makefile
|
||||
|
||||
# hack around win32 mangling our line endings
|
||||
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/65443
|
||||
|
@ -200,7 +198,6 @@ class Expression:
|
|||
rv = not rv
|
||||
return rv
|
||||
# Helper function to evaluate __get_logical_and and __get_logical_or results
|
||||
|
||||
def eval_logical_op(tok):
|
||||
left = opmap[tok[0].type](tok[0])
|
||||
right = opmap[tok[2].type](tok[2])
|
||||
|
@ -220,13 +217,12 @@ class Expression:
|
|||
'defined': lambda tok: tok.value in context,
|
||||
'int': lambda tok: tok.value}
|
||||
|
||||
return opmap[self.e.type](self.e)
|
||||
return opmap[self.e.type](self.e);
|
||||
|
||||
class __AST(list):
|
||||
"""
|
||||
Internal class implementing Abstract Syntax Tree nodes
|
||||
"""
|
||||
|
||||
def __init__(self, type):
|
||||
self.type = type
|
||||
super(self.__class__, self).__init__(self)
|
||||
|
@ -235,14 +231,11 @@ class Expression:
|
|||
"""
|
||||
Internal class implementing Abstract Syntax Tree leafs
|
||||
"""
|
||||
|
||||
def __init__(self, type, value):
|
||||
self.value = value
|
||||
self.type = type
|
||||
|
||||
def __str__(self):
|
||||
return self.value.__str__()
|
||||
|
||||
def __repr__(self):
|
||||
return self.value.__repr__()
|
||||
|
||||
|
@ -252,16 +245,13 @@ class Expression:
|
|||
It has two members, offset and content, which give the offset of the
|
||||
error and the offending content.
|
||||
"""
|
||||
|
||||
def __init__(self, expression):
|
||||
self.offset = expression.offset
|
||||
self.content = expression.content[:3]
|
||||
|
||||
def __str__(self):
|
||||
return 'Unexpected content at offset {0}, "{1}"'.format(self.offset,
|
||||
self.content)
|
||||
|
||||
|
||||
class Context(dict):
|
||||
"""
|
||||
This class holds variable values by subclassing dict, and while it
|
||||
|
@ -276,7 +266,6 @@ class Context(dict):
|
|||
to reflect the ambiguity between string literals and preprocessor
|
||||
variables.
|
||||
"""
|
||||
|
||||
def __getitem__(self, key):
|
||||
if key in self:
|
||||
return super(self.__class__, self).__getitem__(key)
|
||||
|
@ -352,8 +341,7 @@ class Preprocessor:
|
|||
elif self.actionLevel == 1:
|
||||
msg = 'no useful preprocessor directives found'
|
||||
if msg:
|
||||
class Fake(object):
|
||||
pass
|
||||
class Fake(object): pass
|
||||
fake = Fake()
|
||||
fake.context = {
|
||||
'FILE': file,
|
||||
|
@ -497,6 +485,11 @@ class Preprocessor:
|
|||
if not options.output:
|
||||
raise Preprocessor.Error(self, "--depend doesn't work with stdout",
|
||||
None)
|
||||
try:
|
||||
from makeutil import Makefile
|
||||
except:
|
||||
raise Preprocessor.Error(self, "--depend requires the "
|
||||
"mozbuild.makeutil module", None)
|
||||
depfile = get_output_file(options.depend)
|
||||
|
||||
if args:
|
||||
|
@ -515,7 +508,6 @@ class Preprocessor:
|
|||
def getCommandLineParser(self, unescapeDefines = False):
|
||||
escapedValue = re.compile('".*"$')
|
||||
numberValue = re.compile('\d+$')
|
||||
|
||||
def handleD(option, opt, value, parser):
|
||||
vals = value.split('=', 1)
|
||||
if len(vals) == 1:
|
||||
|
@ -526,16 +518,12 @@ class Preprocessor:
|
|||
elif numberValue.match(vals[1]):
|
||||
vals[1] = int(vals[1])
|
||||
self.context[vals[0]] = vals[1]
|
||||
|
||||
def handleU(option, opt, value, parser):
|
||||
del self.context[value]
|
||||
|
||||
def handleF(option, opt, value, parser):
|
||||
self.do_filter(value)
|
||||
|
||||
def handleMarker(option, opt, value, parser):
|
||||
self.setMarker(value)
|
||||
|
||||
def handleSilenceDirectiveWarnings(option, opt, value, parse):
|
||||
self.setSilenceDirectiveWarnings(True)
|
||||
p = OptionParser()
|
||||
|
@ -595,10 +583,9 @@ class Preprocessor:
|
|||
val = self.applyFilters(m.group('value'))
|
||||
try:
|
||||
val = int(val)
|
||||
except Exception:
|
||||
except:
|
||||
pass
|
||||
self.context[m.group('name')] = val
|
||||
|
||||
def do_undef(self, args):
|
||||
m = re.match('(?P<name>\w+)$', args, re.U)
|
||||
if not m:
|
||||
|
@ -606,11 +593,9 @@ class Preprocessor:
|
|||
if args in self.context:
|
||||
del self.context[args]
|
||||
# Logic
|
||||
|
||||
def ensure_not_else(self):
|
||||
if len(self.ifStates) == 0 or self.ifStates[-1] == 2:
|
||||
sys.stderr.write('WARNING: bad nesting of #else in %s\n' % self.context['FILE'])
|
||||
|
||||
def do_if(self, args, replace=False):
|
||||
if self.disableLevel and not replace:
|
||||
self.disableLevel += 1
|
||||
|
@ -634,7 +619,6 @@ class Preprocessor:
|
|||
else:
|
||||
self.ifStates.append(self.disableLevel)
|
||||
pass
|
||||
|
||||
def do_ifdef(self, args, replace=False):
|
||||
if self.disableLevel and not replace:
|
||||
self.disableLevel += 1
|
||||
|
@ -650,7 +634,6 @@ class Preprocessor:
|
|||
else:
|
||||
self.ifStates.append(self.disableLevel)
|
||||
pass
|
||||
|
||||
def do_ifndef(self, args, replace=False):
|
||||
if self.disableLevel and not replace:
|
||||
self.disableLevel += 1
|
||||
|
@ -666,7 +649,6 @@ class Preprocessor:
|
|||
else:
|
||||
self.ifStates.append(self.disableLevel)
|
||||
pass
|
||||
|
||||
def do_else(self, args, ifState = 2):
|
||||
self.ensure_not_else()
|
||||
hadTrue = self.ifStates[-1] == 0
|
||||
|
@ -675,38 +657,33 @@ class Preprocessor:
|
|||
self.disableLevel = 1
|
||||
return
|
||||
self.disableLevel = 0
|
||||
|
||||
def do_elif(self, args):
|
||||
if self.disableLevel == 1:
|
||||
if self.ifStates[-1] == 1:
|
||||
self.do_if(args, replace=True)
|
||||
else:
|
||||
self.do_else(None, self.ifStates[-1])
|
||||
|
||||
def do_elifdef(self, args):
|
||||
if self.disableLevel == 1:
|
||||
if self.ifStates[-1] == 1:
|
||||
self.do_ifdef(args, replace=True)
|
||||
else:
|
||||
self.do_else(None, self.ifStates[-1])
|
||||
|
||||
def do_elifndef(self, args):
|
||||
if self.disableLevel == 1:
|
||||
if self.ifStates[-1] == 1:
|
||||
self.do_ifndef(args, replace=True)
|
||||
else:
|
||||
self.do_else(None, self.ifStates[-1])
|
||||
|
||||
def do_endif(self, args):
|
||||
if self.disableLevel > 0:
|
||||
self.disableLevel -= 1
|
||||
if self.disableLevel == 0:
|
||||
self.ifStates.pop()
|
||||
# output processing
|
||||
|
||||
def do_expand(self, args):
|
||||
lst = re.split('__(\w+)__', args, re.U)
|
||||
|
||||
do_replace = False
|
||||
def vsubst(v):
|
||||
if v in self.context:
|
||||
return str(self.context[v])
|
||||
|
@ -715,10 +692,8 @@ class Preprocessor:
|
|||
lst[i] = vsubst(lst[i])
|
||||
lst.append('\n') # add back the newline
|
||||
self.write(reduce(lambda x, y: x+y, lst, ''))
|
||||
|
||||
def do_literal(self, args):
|
||||
self.write(args + '\n')
|
||||
|
||||
def do_filter(self, args):
|
||||
filters = [f for f in args.split(' ') if hasattr(self, 'filter_' + f)]
|
||||
if len(filters) == 0:
|
||||
|
@ -730,7 +705,6 @@ class Preprocessor:
|
|||
filterNames.sort()
|
||||
self.filters = [(fn, current[fn]) for fn in filterNames]
|
||||
return
|
||||
|
||||
def do_unfilter(self, args):
|
||||
filters = args.split(' ')
|
||||
current = dict(self.filters)
|
||||
|
@ -745,14 +719,12 @@ class Preprocessor:
|
|||
#
|
||||
# emptyLines
|
||||
# Strips blank lines from the output.
|
||||
|
||||
def filter_emptyLines(self, aLine):
|
||||
if aLine == '\n':
|
||||
return ''
|
||||
return aLine
|
||||
# slashslash
|
||||
# Strips everything after //
|
||||
|
||||
def filter_slashslash(self, aLine):
|
||||
if (aLine.find('//') == -1):
|
||||
return aLine
|
||||
|
@ -762,12 +734,10 @@ class Preprocessor:
|
|||
return aLine
|
||||
# spaces
|
||||
# Collapses sequences of spaces into a single space
|
||||
|
||||
def filter_spaces(self, aLine):
|
||||
return re.sub(' +', ' ', aLine).strip(' ')
|
||||
# substition
|
||||
# helper to be used by both substition and attemptSubstitution
|
||||
|
||||
def filter_substitution(self, aLine, fatal=True):
|
||||
def repl(matchobj):
|
||||
varname = matchobj.group('VAR')
|
||||
|
@ -777,11 +747,9 @@ class Preprocessor:
|
|||
raise Preprocessor.Error(self, 'UNDEFINED_VAR', varname)
|
||||
return matchobj.group(0)
|
||||
return self.varsubst.sub(repl, aLine)
|
||||
|
||||
def filter_attemptSubstitution(self, aLine):
|
||||
return self.filter_substitution(aLine, fatal=False)
|
||||
# File ops
|
||||
|
||||
def do_include(self, args, filters=True):
|
||||
"""
|
||||
Preprocess a given file.
|
||||
|
@ -801,7 +769,7 @@ class Preprocessor:
|
|||
args = open(args, 'rU')
|
||||
except Preprocessor.Error:
|
||||
raise
|
||||
except Exception:
|
||||
except:
|
||||
raise Preprocessor.Error(self, 'FILE_NOT_FOUND', str(args))
|
||||
self.checkLineNumbers = bool(re.search('\.(js|jsm|java|webidl)(?:\.in)?$', args.name))
|
||||
oldFile = self.context['FILE']
|
||||
|
@ -838,11 +806,9 @@ class Preprocessor:
|
|||
self.context['LINE'] = oldLine
|
||||
self.context['DIRECTORY'] = oldDir
|
||||
self.curdir = oldCurdir
|
||||
|
||||
def do_includesubst(self, args):
|
||||
args = self.filter_substitution(args)
|
||||
self.do_include(args)
|
||||
|
||||
def do_error(self, args):
|
||||
raise Preprocessor.Error(self, 'Error: ', str(args))
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import os
|
||||
import ConfigParser
|
||||
import mozpack.path as mozpath
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import errno
|
||||
import os
|
||||
import tempfile
|
||||
|
@ -11,8 +9,7 @@ import tarfile
|
|||
import shutil
|
||||
import mozpack.path as mozpath
|
||||
from mozpack.dmg import create_dmg
|
||||
from mozbuild.repackaging.application_ini import get_application_ini_value
|
||||
|
||||
from application_ini import get_application_ini_value
|
||||
|
||||
def repackage_dmg(infile, output):
|
||||
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
import shutil
|
||||
|
@ -13,8 +11,7 @@ from mozbuild.action.exe_7z_archive import archive_exe
|
|||
from mozbuild.util import ensureParentDir
|
||||
|
||||
|
||||
def repackage_installer(topsrcdir, tag, setupexe, package, output,
|
||||
package_name, sfx_stub, use_upx):
|
||||
def repackage_installer(topsrcdir, tag, setupexe, package, output, package_name, sfx_stub, use_upx):
|
||||
if package and not zipfile.is_zipfile(package):
|
||||
raise Exception("Package file %s is not a valid .zip file." % package)
|
||||
if package is not None and package_name is None:
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
|
@ -12,7 +10,7 @@ import zipfile
|
|||
import tarfile
|
||||
import subprocess
|
||||
import mozpack.path as mozpath
|
||||
from mozbuild.repackaging.application_ini import get_application_ini_value
|
||||
from application_ini import get_application_ini_value
|
||||
from mozbuild.util import ensureParentDir
|
||||
|
||||
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
import shutil
|
||||
|
@ -18,7 +16,6 @@ _MSI_ARCH = {
|
|||
'x86_64': 'x64',
|
||||
}
|
||||
|
||||
|
||||
def update_wsx(wfile, pvalues):
|
||||
|
||||
parsed = minidom.parse(wfile)
|
||||
|
@ -59,7 +56,7 @@ def repackage_msi(topsrcdir, wsx, version, locale, arch, setupexe, candle, light
|
|||
raise Exception("%s does not exist." % light)
|
||||
embeddedVersion = '0.0.0.0'
|
||||
# Version string cannot contain 'a' or 'b' when embedding in msi manifest.
|
||||
if 'a' not in version and 'b' not in version:
|
||||
if not 'a' in version and not 'b' in version:
|
||||
if version.endswith('esr'):
|
||||
parts = version[:-3].split('.')
|
||||
else:
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import re
|
||||
|
||||
|
||||
|
@ -24,7 +22,6 @@ def _tokens2re(**tokens):
|
|||
# backslash, captured in the "escape" match group.
|
||||
return re.compile('(?:%s|%s)' % (nonescaped, r'(?P<escape>\\\\)'))
|
||||
|
||||
|
||||
UNQUOTED_TOKENS_RE = _tokens2re(
|
||||
whitespace=r'[\t\r\n ]+',
|
||||
quote=r'[\'"]',
|
||||
|
@ -57,7 +54,6 @@ class _ClineSplitter(object):
|
|||
Parses a given command line string and creates a list of command
|
||||
and arguments, with wildcard expansion.
|
||||
'''
|
||||
|
||||
def __init__(self, cline):
|
||||
self.arg = None
|
||||
self.cline = cline
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
import importlib
|
||||
import os
|
||||
|
|
|
@ -238,7 +238,6 @@ def get_build_attrs(attrs):
|
|||
res['cpu_percent'] = int(round(usage['cpu_percent']))
|
||||
return res
|
||||
|
||||
|
||||
def filter_args(command, argv, paths):
|
||||
'''
|
||||
Given the full list of command-line arguments, remove anything up to and including `command`,
|
||||
|
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче