зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1542963 - Fix most flake8 issues in python/mozbuild/mozbuild/* r=firefox-build-system-reviewers,chmanchester
Leaving one fix for an explicit review. Lint python/mozbuild/{mozbuild,mozpack}. r=#build Differential Revision: https://phabricator.services.mozilla.com/D26641 --HG-- extra : moz-landing-system : lando
This commit is contained in:
Родитель
4a12eb86b4
Коммит
05822ea753
2
.flake8
2
.flake8
|
@ -92,6 +92,8 @@ ignore =
|
||||||
|
|
||||||
per-file-ignores =
|
per-file-ignores =
|
||||||
ipc/ipdl/*: F403, F405
|
ipc/ipdl/*: F403, F405
|
||||||
|
# cpp_eclipse has a lot of multi-line embedded XML which exceeds line length
|
||||||
|
python/mozbuild/mozbuild/backend/cpp_eclipse.py: E501
|
||||||
testing/firefox-ui/**/__init__.py: F401
|
testing/firefox-ui/**/__init__.py: F401
|
||||||
testing/marionette/**/__init__.py: F401
|
testing/marionette/**/__init__.py: F401
|
||||||
testing/mozharness/configs/*: E124, E127, E128, E131, E231, E261, E265, E266, E501, W391
|
testing/mozharness/configs/*: E124, E127, E128, E131, E231, E261, E265, E266, E501, W391
|
||||||
|
|
|
@ -37,7 +37,7 @@ def addEntriesToListFile(listFile, entries):
|
||||||
with open(listFile, 'wb') as f:
|
with open(listFile, 'wb') as f:
|
||||||
f.write("\n".join(sorted(existing))+"\n")
|
f.write("\n".join(sorted(existing))+"\n")
|
||||||
finally:
|
finally:
|
||||||
lock = None
|
del lock # Explicitly release the lock_file to free it
|
||||||
|
|
||||||
|
|
||||||
def main(args):
|
def main(args):
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
|
|
||||||
# This action is used to generate the wpt manifest
|
# This action is used to generate the wpt manifest
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import buildconfig
|
import buildconfig
|
||||||
|
|
|
@ -59,8 +59,9 @@ def dump_symbols(target, tracking_file, count_ctors=False):
|
||||||
if objcopy:
|
if objcopy:
|
||||||
os.environ['OBJCOPY'] = objcopy
|
os.environ['OBJCOPY'] = objcopy
|
||||||
|
|
||||||
args = ([buildconfig.substs['PYTHON'], os.path.join(buildconfig.topsrcdir, 'toolkit',
|
args = ([buildconfig.substs['PYTHON'],
|
||||||
'crashreporter', 'tools', 'symbolstore.py')] +
|
os.path.join(buildconfig.topsrcdir, 'toolkit',
|
||||||
|
'crashreporter', 'tools', 'symbolstore.py')] +
|
||||||
sym_store_args +
|
sym_store_args +
|
||||||
['-s', buildconfig.topsrcdir, dump_syms_bin, os.path.join(buildconfig.topobjdir,
|
['-s', buildconfig.topsrcdir, dump_syms_bin, os.path.join(buildconfig.topobjdir,
|
||||||
'dist',
|
'dist',
|
||||||
|
|
|
@ -31,8 +31,10 @@ def archive_exe(pkg_dir, tagfile, sfx_package, package, use_upx):
|
||||||
except BuildEnvironmentNotFoundException:
|
except BuildEnvironmentNotFoundException:
|
||||||
# configure hasn't been run, just use the default
|
# configure hasn't been run, just use the default
|
||||||
sevenz = '7z'
|
sevenz = '7z'
|
||||||
subprocess.check_call([sevenz, 'a', '-r', '-t7z', mozpath.join(tmpdir, 'app.7z'), '-mx',
|
subprocess.check_call([
|
||||||
'-m0=BCJ2', '-m1=LZMA:d25', '-m2=LZMA:d19', '-m3=LZMA:d19', '-mb0:1', '-mb0s1:2', '-mb0s2:3'])
|
sevenz, 'a', '-r', '-t7z', mozpath.join(tmpdir, 'app.7z'), '-mx',
|
||||||
|
'-m0=BCJ2', '-m1=LZMA:d25', '-m2=LZMA:d19', '-m3=LZMA:d19', '-mb0:1',
|
||||||
|
'-mb0s1:2', '-mb0s2:3'])
|
||||||
|
|
||||||
with open(package, 'wb') as o:
|
with open(package, 'wb') as o:
|
||||||
for i in [final_sfx, tagfile, mozpath.join(tmpdir, 'app.7z')]:
|
for i in [final_sfx, tagfile, mozpath.join(tmpdir, 'app.7z')]:
|
||||||
|
|
|
@ -69,7 +69,7 @@ def main(argv):
|
||||||
with FileAvoidWrite(args.output_file, mode='rb') as output:
|
with FileAvoidWrite(args.output_file, mode='rb') as output:
|
||||||
try:
|
try:
|
||||||
ret = module.__dict__[method](output, *args.additional_arguments, **kwargs)
|
ret = module.__dict__[method](output, *args.additional_arguments, **kwargs)
|
||||||
except:
|
except Exception:
|
||||||
# Ensure that we don't overwrite the file if the script failed.
|
# Ensure that we don't overwrite the file if the script failed.
|
||||||
output.avoid_writing_to_file()
|
output.avoid_writing_to_file()
|
||||||
raise
|
raise
|
||||||
|
|
|
@ -25,7 +25,7 @@ else:
|
||||||
|
|
||||||
|
|
||||||
def validateDefault(key):
|
def validateDefault(key):
|
||||||
if (not key in searchinfo["default"]):
|
if key not in searchinfo["default"]:
|
||||||
print >>sys.stderr, "Error: Missing default %s in list.json" % (key)
|
print >>sys.stderr, "Error: Missing default %s in list.json" % (key)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
@ -35,13 +35,16 @@ validateDefault("visibleDefaultEngines")
|
||||||
|
|
||||||
# If the selected locale doesn't have a searchDefault,
|
# If the selected locale doesn't have a searchDefault,
|
||||||
# use the global one.
|
# use the global one.
|
||||||
if not "searchDefault" in localeSearchInfo["default"]:
|
if "searchDefault" not in localeSearchInfo["default"]:
|
||||||
localeSearchInfo["default"]["searchDefault"] = searchinfo["default"]["searchDefault"]
|
localeSearchInfo["default"]["searchDefault"] = searchinfo["default"]["searchDefault"]
|
||||||
|
|
||||||
# If the selected locale doesn't have a searchOrder,
|
# If the selected locale doesn't have a searchOrder,
|
||||||
# use the global one if present.
|
# use the global one if present.
|
||||||
# searchOrder is NOT required.
|
# searchOrder is NOT required.
|
||||||
if not "searchOrder" in localeSearchInfo["default"] and "searchOrder" in searchinfo["default"]:
|
if (
|
||||||
|
"searchOrder" not in localeSearchInfo["default"]
|
||||||
|
and "searchOrder" in searchinfo["default"]
|
||||||
|
):
|
||||||
localeSearchInfo["default"]["searchOrder"] = searchinfo["default"]["searchOrder"]
|
localeSearchInfo["default"]["searchOrder"] = searchinfo["default"]["searchOrder"]
|
||||||
|
|
||||||
# If we have region overrides, enumerate through them
|
# If we have region overrides, enumerate through them
|
||||||
|
@ -63,7 +66,8 @@ if "regionOverrides" in searchinfo:
|
||||||
visibleDefaultEngines)
|
visibleDefaultEngines)
|
||||||
for i, engine in enumerate(localeSearchInfo[region]["visibleDefaultEngines"]):
|
for i, engine in enumerate(localeSearchInfo[region]["visibleDefaultEngines"]):
|
||||||
if engine in regionOverrides[region]:
|
if engine in regionOverrides[region]:
|
||||||
localeSearchInfo[region]["visibleDefaultEngines"][i] = regionOverrides[region][engine]
|
localeSearchInfo[region]["visibleDefaultEngines"][i] = \
|
||||||
|
regionOverrides[region][engine]
|
||||||
|
|
||||||
output.write(json.dumps(localeSearchInfo, ensure_ascii=False).encode('utf8'))
|
output.write(json.dumps(localeSearchInfo, ensure_ascii=False).encode('utf8'))
|
||||||
|
|
||||||
|
|
|
@ -38,13 +38,9 @@ import os
|
||||||
from mozbuild.dotproperties import (
|
from mozbuild.dotproperties import (
|
||||||
DotProperties,
|
DotProperties,
|
||||||
)
|
)
|
||||||
from mozbuild.util import (
|
|
||||||
FileAvoidWrite,
|
|
||||||
)
|
|
||||||
from mozpack.files import (
|
from mozpack.files import (
|
||||||
FileFinder,
|
FileFinder,
|
||||||
)
|
)
|
||||||
import mozpack.path as mozpath
|
|
||||||
|
|
||||||
|
|
||||||
def merge_properties(paths):
|
def merge_properties(paths):
|
||||||
|
@ -92,7 +88,8 @@ def main(output, *args, **kwargs):
|
||||||
properties = merge_properties(sources)
|
properties = merge_properties(sources)
|
||||||
|
|
||||||
# Keep these two in sync.
|
# Keep these two in sync.
|
||||||
image_url_template = 'android.resource://%s/drawable/suggestedsites_{name}' % opts.android_package_name
|
image_url_template = \
|
||||||
|
'android.resource://%s/drawable/suggestedsites_{name}' % opts.android_package_name
|
||||||
drawables_template = 'drawable*/suggestedsites_{name}.*'
|
drawables_template = 'drawable*/suggestedsites_{name}.*'
|
||||||
|
|
||||||
# Load properties corresponding to each site name and define their
|
# Load properties corresponding to each site name and define their
|
||||||
|
@ -121,7 +118,9 @@ def main(output, *args, **kwargs):
|
||||||
else:
|
else:
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
print("Found {len} drawables in '{resources}' for '{name}': {matches}"
|
print("Found {len} drawables in '{resources}' for '{name}': {matches}"
|
||||||
.format(len=len(matches), resources=resources, name=name, matches=matches))
|
.format(len=len(matches), resources=resources,
|
||||||
|
name=name, matches=matches)
|
||||||
|
)
|
||||||
|
|
||||||
# We want the lists to be ordered for reproducibility. Each list has a
|
# We want the lists to be ordered for reproducibility. Each list has a
|
||||||
# "default" JSON list item which will be extended by the properties read.
|
# "default" JSON list item which will be extended by the properties read.
|
||||||
|
|
|
@ -26,7 +26,6 @@ from mozpack.chrome.manifest import (
|
||||||
)
|
)
|
||||||
from mozbuild.configure.util import Version
|
from mozbuild.configure.util import Version
|
||||||
from mozbuild.preprocessor import Preprocessor
|
from mozbuild.preprocessor import Preprocessor
|
||||||
import buildconfig
|
|
||||||
|
|
||||||
|
|
||||||
def write_file(path, content):
|
def write_file(path, content):
|
||||||
|
|
|
@ -11,7 +11,6 @@ from __future__ import absolute_import, print_function
|
||||||
import argparse
|
import argparse
|
||||||
import buildconfig
|
import buildconfig
|
||||||
import os
|
import os
|
||||||
import subprocess
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from mozpack.copier import Jarrer
|
from mozpack.copier import Jarrer
|
||||||
|
|
|
@ -5,8 +5,6 @@
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
|
||||||
import os.path
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import buildconfig
|
import buildconfig
|
||||||
|
@ -30,7 +28,8 @@ def main(argv):
|
||||||
else:
|
else:
|
||||||
entry_abspath = mozpath.abspath(entry[1])
|
entry_abspath = mozpath.abspath(entry[1])
|
||||||
if not entry_abspath.startswith(objdir_abspath):
|
if not entry_abspath.startswith(objdir_abspath):
|
||||||
print("Warning: omitting generated source [%s] from archive" % entry_abspath, file=sys.stderr)
|
print("Warning: omitting generated source [%s] from archive" % entry_abspath,
|
||||||
|
file=sys.stderr)
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,6 @@ import re
|
||||||
import sys
|
import sys
|
||||||
from buildconfig import topsrcdir, topobjdir
|
from buildconfig import topsrcdir, topobjdir
|
||||||
from mozbuild.backend.configenvironment import PartialConfigEnvironment
|
from mozbuild.backend.configenvironment import PartialConfigEnvironment
|
||||||
from mozbuild.util import FileAvoidWrite
|
|
||||||
import mozpack.path as mozpath
|
import mozpack.path as mozpath
|
||||||
|
|
||||||
|
|
||||||
|
@ -51,12 +50,12 @@ def process_define_file(output, input):
|
||||||
raise Exception(
|
raise Exception(
|
||||||
'`#define ALLDEFINES` is not allowed in a '
|
'`#define ALLDEFINES` is not allowed in a '
|
||||||
'CONFIGURE_DEFINE_FILE')
|
'CONFIGURE_DEFINE_FILE')
|
||||||
# WebRTC files like to define WINVER and _WIN32_WINNT
|
|
||||||
# via the command line, which raises a mass of macro
|
|
||||||
# redefinition warnings. Just handle those macros
|
|
||||||
# specially here.
|
|
||||||
|
|
||||||
def define_for_name(name, val):
|
def define_for_name(name, val):
|
||||||
|
"""WebRTC files like to define WINVER and _WIN32_WINNT
|
||||||
|
via the command line, which raises a mass of macro
|
||||||
|
redefinition warnings. Just handle those macros
|
||||||
|
specially here."""
|
||||||
define = "#define {name} {val}".format(name=name, val=val)
|
define = "#define {name} {val}".format(name=name, val=val)
|
||||||
if name in ('WINVER', '_WIN32_WINNT'):
|
if name in ('WINVER', '_WIN32_WINNT'):
|
||||||
return '#if !defined({name})\n{define}\n#endif' \
|
return '#if !defined({name})\n{define}\n#endif' \
|
||||||
|
|
|
@ -979,7 +979,7 @@ def _authorize(req, auth_file):
|
||||||
try:
|
try:
|
||||||
auth_file_content = json.loads(auth_file_content)
|
auth_file_content = json.loads(auth_file_content)
|
||||||
is_taskcluster_auth = True
|
is_taskcluster_auth = True
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if is_taskcluster_auth:
|
if is_taskcluster_auth:
|
||||||
|
|
|
@ -42,8 +42,11 @@ def verifyDirectory(initests, directory):
|
||||||
break
|
break
|
||||||
|
|
||||||
if not found:
|
if not found:
|
||||||
print >>sys.stderr, "TEST-UNEXPECTED-FAIL | xpccheck | test %s is missing from test manifest %s!" % (
|
print >>sys.stderr, ("TEST-UNEXPECTED-FAIL | xpccheck | test "
|
||||||
name, os.path.join(directory, 'xpcshell.ini'))
|
"%s is missing from test manifest %s!") % (
|
||||||
|
name,
|
||||||
|
os.path.join(directory, 'xpcshell.ini'),
|
||||||
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
@ -64,8 +67,9 @@ def verifyIniFile(initests, directory):
|
||||||
break
|
break
|
||||||
|
|
||||||
if not found:
|
if not found:
|
||||||
print >>sys.stderr, "TEST-UNEXPECTED-FAIL | xpccheck | found %s in xpcshell.ini and not in directory '%s'" % (
|
print >>sys.stderr, ("TEST-UNEXPECTED-FAIL | xpccheck | found "
|
||||||
name, directory)
|
"%s in xpcshell.ini and not in directory '%s'"
|
||||||
|
) % (name, directory)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
@ -74,7 +78,6 @@ def main(argv):
|
||||||
print >>sys.stderr, "Usage: xpccheck.py <topsrcdir> <directory> [<directory> ...]"
|
print >>sys.stderr, "Usage: xpccheck.py <topsrcdir> <directory> [<directory> ...]"
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
topsrcdir = argv[0]
|
|
||||||
for d in argv[1:]:
|
for d in argv[1:]:
|
||||||
# xpcshell-unpack is a copy of xpcshell sibling directory and in the Makefile
|
# xpcshell-unpack is a copy of xpcshell sibling directory and in the Makefile
|
||||||
# we copy all files (including xpcshell.ini from the sibling directory.
|
# we copy all files (including xpcshell.ini from the sibling directory.
|
||||||
|
|
|
@ -13,8 +13,6 @@ import argparse
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from io import BytesIO
|
|
||||||
|
|
||||||
from xpidl import jsonxpt
|
from xpidl import jsonxpt
|
||||||
from buildconfig import topsrcdir
|
from buildconfig import topsrcdir
|
||||||
from xpidl.header import print_header
|
from xpidl.header import print_header
|
||||||
|
|
|
@ -92,7 +92,7 @@ class Graph(object):
|
||||||
ret = self.query_arg('SELECT id FROM node \
|
ret = self.query_arg('SELECT id FROM node \
|
||||||
WHERE dir=? AND name=?', (nodeid, part)).fetchone()
|
WHERE dir=? AND name=?', (nodeid, part)).fetchone()
|
||||||
# fetchone should be ok bc dir and and name combo is unique
|
# fetchone should be ok bc dir and and name combo is unique
|
||||||
if ret == None:
|
if ret is None:
|
||||||
print ("\nCould not find id number for '%s'" % filepath)
|
print ("\nCould not find id number for '%s'" % filepath)
|
||||||
return None
|
return None
|
||||||
nodeid = ret[0]
|
nodeid = ret[0]
|
||||||
|
|
|
@ -338,9 +338,11 @@ class AndroidArtifactJob(ArtifactJob):
|
||||||
if not filename.endswith('.gz'):
|
if not filename.endswith('.gz'):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Uncompress "libxul.so/D3271457813E976AE7BF5DAFBABABBFD0/libxul.so.dbg.gz" into "libxul.so.dbg".
|
# Uncompress "libxul.so/D3271457813E976AE7BF5DAFBABABBFD0/libxul.so.dbg.gz"
|
||||||
|
# into "libxul.so.dbg".
|
||||||
#
|
#
|
||||||
# After `settings append target.debug-file-search-paths /path/to/topobjdir/dist/crashreporter-symbols`,
|
# After running `settings append target.debug-file-search-paths $file`,
|
||||||
|
# where file=/path/to/topobjdir/dist/crashreporter-symbols,
|
||||||
# Android Studio's lldb (7.0.0, at least) will find the ELF debug symbol files.
|
# Android Studio's lldb (7.0.0, at least) will find the ELF debug symbol files.
|
||||||
#
|
#
|
||||||
# There are other paths that will work but none seem more desireable. See
|
# There are other paths that will work but none seem more desireable. See
|
||||||
|
@ -349,7 +351,8 @@ class AndroidArtifactJob(ArtifactJob):
|
||||||
destpath = mozpath.join('crashreporter-symbols', basename)
|
destpath = mozpath.join('crashreporter-symbols', basename)
|
||||||
self.log(logging.INFO, 'artifact',
|
self.log(logging.INFO, 'artifact',
|
||||||
{'destpath': destpath},
|
{'destpath': destpath},
|
||||||
'Adding uncompressed ELF debug symbol file {destpath} to processed archive')
|
'Adding uncompressed ELF debug symbol file '
|
||||||
|
'{destpath} to processed archive')
|
||||||
writer.add(destpath.encode('utf-8'),
|
writer.add(destpath.encode('utf-8'),
|
||||||
gzip.GzipFile(fileobj=reader[filename].uncompressed_data))
|
gzip.GzipFile(fileobj=reader[filename].uncompressed_data))
|
||||||
|
|
||||||
|
@ -656,7 +659,8 @@ class CacheManager(object):
|
||||||
Provide simple logging.
|
Provide simple logging.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
def __init__(self, cache_dir, cache_name, cache_size, cache_callback=None, log=None, skip_cache=False):
|
def __init__(self, cache_dir, cache_name, cache_size, cache_callback=None,
|
||||||
|
log=None, skip_cache=False):
|
||||||
self._skip_cache = skip_cache
|
self._skip_cache = skip_cache
|
||||||
self._cache = pylru.lrucache(cache_size, callback=cache_callback)
|
self._cache = pylru.lrucache(cache_size, callback=cache_callback)
|
||||||
self._cache_filename = mozpath.join(cache_dir, cache_name + '-cache.pickle')
|
self._cache_filename = mozpath.join(cache_dir, cache_name + '-cache.pickle')
|
||||||
|
@ -960,7 +964,8 @@ class Artifacts(object):
|
||||||
There are no public revisions.
|
There are no public revisions.
|
||||||
This can happen if the repository is created from bundle file and never pulled
|
This can happen if the repository is created from bundle file and never pulled
|
||||||
from remote. Please run `hg pull` and build again.
|
from remote. Please run `hg pull` and build again.
|
||||||
see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code/Mercurial/Bundles""")
|
see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code/Mercurial/Bundles\
|
||||||
|
""")
|
||||||
|
|
||||||
self.log(logging.INFO, 'artifact',
|
self.log(logging.INFO, 'artifact',
|
||||||
{'len': len(last_revs)},
|
{'len': len(last_revs)},
|
||||||
|
@ -1005,10 +1010,11 @@ see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code
|
||||||
yield candidate_pushheads[rev], rev
|
yield candidate_pushheads[rev], rev
|
||||||
|
|
||||||
if not count:
|
if not count:
|
||||||
raise Exception('Could not find any candidate pushheads in the last {num} revisions.\n'
|
raise Exception(
|
||||||
'Search started with {rev}, which must be known to Mozilla automation.\n\n'
|
'Could not find any candidate pushheads in the last {num} revisions.\n'
|
||||||
'see https://developer.mozilla.org/en-US/docs/Artifact_builds'.format(
|
'Search started with {rev}, which must be known to Mozilla automation.\n\n'
|
||||||
rev=last_revs[0], num=NUM_PUSHHEADS_TO_QUERY_PER_PARENT))
|
'see https://developer.mozilla.org/en-US/docs/Artifact_builds'.format(
|
||||||
|
rev=last_revs[0], num=NUM_PUSHHEADS_TO_QUERY_PER_PARENT))
|
||||||
|
|
||||||
def find_pushhead_artifacts(self, task_cache, job, tree, pushhead):
|
def find_pushhead_artifacts(self, task_cache, job, tree, pushhead):
|
||||||
try:
|
try:
|
||||||
|
@ -1073,7 +1079,8 @@ see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code
|
||||||
shutil.copyfileobj(zf.open(info), fh)
|
shutil.copyfileobj(zf.open(info), fh)
|
||||||
file_existed, file_updated = fh.close()
|
file_existed, file_updated = fh.close()
|
||||||
self.log(logging.INFO, 'artifact',
|
self.log(logging.INFO, 'artifact',
|
||||||
{'updating': 'Updating' if file_updated else 'Not updating', 'filename': n},
|
{'updating': 'Updating' if file_updated else 'Not updating',
|
||||||
|
'filename': n},
|
||||||
'{updating} {filename}')
|
'{updating} {filename}')
|
||||||
if not file_existed or file_updated:
|
if not file_existed or file_updated:
|
||||||
# Libraries and binaries may need to be marked executable,
|
# Libraries and binaries may need to be marked executable,
|
||||||
|
|
|
@ -33,7 +33,6 @@ from mozbuild.frontend.data import (
|
||||||
GnProjectData,
|
GnProjectData,
|
||||||
HostLibrary,
|
HostLibrary,
|
||||||
HostGeneratedSources,
|
HostGeneratedSources,
|
||||||
HostRustLibrary,
|
|
||||||
IPDLCollection,
|
IPDLCollection,
|
||||||
LocalizedPreprocessedFiles,
|
LocalizedPreprocessedFiles,
|
||||||
LocalizedFiles,
|
LocalizedFiles,
|
||||||
|
@ -53,7 +52,6 @@ from mozbuild.preprocessor import Preprocessor
|
||||||
from mozpack.chrome.manifest import parse_manifest_line
|
from mozpack.chrome.manifest import parse_manifest_line
|
||||||
|
|
||||||
from mozbuild.util import (
|
from mozbuild.util import (
|
||||||
group_unified_files,
|
|
||||||
mkdir,
|
mkdir,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -205,8 +203,9 @@ class CommonBackend(BuildBackend):
|
||||||
if len(self._idl_manager.modules):
|
if len(self._idl_manager.modules):
|
||||||
self._write_rust_xpidl_summary(self._idl_manager)
|
self._write_rust_xpidl_summary(self._idl_manager)
|
||||||
self._handle_idl_manager(self._idl_manager)
|
self._handle_idl_manager(self._idl_manager)
|
||||||
self._handle_generated_sources(mozpath.join(self.environment.topobjdir, 'dist/include/%s.h' % stem)
|
self._handle_generated_sources(
|
||||||
for stem in self._idl_manager.idl_stems())
|
mozpath.join(self.environment.topobjdir, 'dist/include/%s.h' % stem)
|
||||||
|
for stem in self._idl_manager.idl_stems())
|
||||||
|
|
||||||
for config in self._configs:
|
for config in self._configs:
|
||||||
self.backend_input_files.add(config.source)
|
self.backend_input_files.add(config.source)
|
||||||
|
@ -392,7 +391,7 @@ class CommonBackend(BuildBackend):
|
||||||
includeTemplate += (
|
includeTemplate += (
|
||||||
'\n'
|
'\n'
|
||||||
'#if defined(_WINDOWS_) && !defined(MOZ_WRAPPED_WINDOWS_H)\n'
|
'#if defined(_WINDOWS_) && !defined(MOZ_WRAPPED_WINDOWS_H)\n'
|
||||||
'#pragma message("wrapper failure reason: " MOZ_WINDOWS_WRAPPER_DISABLED_REASON)\n'
|
'#pragma message("wrapper failure reason: " MOZ_WINDOWS_WRAPPER_DISABLED_REASON)\n' # noqa
|
||||||
'#error "%(cppfile)s included unwrapped windows.h"\n'
|
'#error "%(cppfile)s included unwrapped windows.h"\n'
|
||||||
"#endif")
|
"#endif")
|
||||||
includeTemplate += (
|
includeTemplate += (
|
||||||
|
|
|
@ -49,7 +49,7 @@ class BuildConfig(object):
|
||||||
|
|
||||||
# cache the compiled code as it can be reused
|
# cache the compiled code as it can be reused
|
||||||
# we cache it the first time, or if the file changed
|
# we cache it the first time, or if the file changed
|
||||||
if not path in code_cache or code_cache[path][0] != mtime:
|
if path not in code_cache or code_cache[path][0] != mtime:
|
||||||
# Add config.status manually to sys.modules so it gets picked up by
|
# Add config.status manually to sys.modules so it gets picked up by
|
||||||
# iter_modules_in_path() for automatic dependencies.
|
# iter_modules_in_path() for automatic dependencies.
|
||||||
mod = ModuleType('config.status')
|
mod = ModuleType('config.status')
|
||||||
|
@ -148,10 +148,13 @@ class ConfigEnvironment(object):
|
||||||
self.bin_suffix = self.substs.get('BIN_SUFFIX', '')
|
self.bin_suffix = self.substs.get('BIN_SUFFIX', '')
|
||||||
|
|
||||||
global_defines = [name for name in self.defines
|
global_defines = [name for name in self.defines
|
||||||
if not name in self.non_global_defines]
|
if name not in self.non_global_defines]
|
||||||
self.substs['ACDEFINES'] = ' '.join(['-D%s=%s' % (name,
|
self.substs["ACDEFINES"] = ' '.join(
|
||||||
shell_quote(self.defines[name]).replace('$', '$$'))
|
[
|
||||||
for name in sorted(global_defines)])
|
'-D%s=%s' % (name, shell_quote(self.defines[name]).replace('$', '$$'))
|
||||||
|
for name in sorted(global_defines)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def serialize(name, obj):
|
def serialize(name, obj):
|
||||||
if isinstance(obj, StringTypes):
|
if isinstance(obj, StringTypes):
|
||||||
|
@ -159,10 +162,21 @@ class ConfigEnvironment(object):
|
||||||
if isinstance(obj, Iterable):
|
if isinstance(obj, Iterable):
|
||||||
return ' '.join(obj)
|
return ' '.join(obj)
|
||||||
raise Exception('Unhandled type %s for %s', type(obj), str(name))
|
raise Exception('Unhandled type %s for %s', type(obj), str(name))
|
||||||
self.substs['ALLSUBSTS'] = '\n'.join(sorted(['%s = %s' % (name,
|
self.substs['ALLSUBSTS'] = '\n'.join(
|
||||||
serialize(name, self.substs[name])) for name in self.substs if self.substs[name]]))
|
sorted([
|
||||||
self.substs['ALLEMPTYSUBSTS'] = '\n'.join(sorted(['%s =' % name
|
'%s = %s' % (
|
||||||
for name in self.substs if not self.substs[name]]))
|
name,
|
||||||
|
serialize(name, self.substs[name])
|
||||||
|
)
|
||||||
|
for name in self.substs if self.substs[name]
|
||||||
|
])
|
||||||
|
)
|
||||||
|
self.substs['ALLEMPTYSUBSTS'] = '\n'.join(
|
||||||
|
sorted([
|
||||||
|
'%s =' % name
|
||||||
|
for name in self.substs if not self.substs[name]
|
||||||
|
])
|
||||||
|
)
|
||||||
|
|
||||||
self.substs = ReadOnlyDict(self.substs)
|
self.substs = ReadOnlyDict(self.substs)
|
||||||
|
|
||||||
|
|
|
@ -6,18 +6,14 @@ from __future__ import absolute_import
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
import glob
|
import glob
|
||||||
import random
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
import types
|
|
||||||
from xml.sax.saxutils import quoteattr
|
from xml.sax.saxutils import quoteattr
|
||||||
import xml.etree.ElementTree as ET
|
|
||||||
from .common import CommonBackend
|
from .common import CommonBackend
|
||||||
|
|
||||||
from ..frontend.data import (
|
from ..frontend.data import (
|
||||||
ComputedFlags,
|
ComputedFlags,
|
||||||
Defines,
|
|
||||||
)
|
)
|
||||||
from mozbuild.base import ExecutionSummary
|
from mozbuild.base import ExecutionSummary
|
||||||
|
|
||||||
|
@ -195,7 +191,7 @@ class CppEclipseBackend(CommonBackend):
|
||||||
self._write_noindex()
|
self._write_noindex()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
process = subprocess.check_call(
|
subprocess.check_call(
|
||||||
["eclipse", "-application", "-nosplash",
|
["eclipse", "-application", "-nosplash",
|
||||||
"org.eclipse.cdt.managedbuilder.core.headlessbuild",
|
"org.eclipse.cdt.managedbuilder.core.headlessbuild",
|
||||||
"-data", self._workspace_dir, "-importAll", self._project_dir])
|
"-data", self._workspace_dir, "-importAll", self._project_dir])
|
||||||
|
@ -382,8 +378,8 @@ PROJECT_TEMPLATE = """<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<buildCommand>
|
<buildCommand>
|
||||||
<name>org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder</name>
|
<name>org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder</name>
|
||||||
<triggers></triggers>
|
<triggers></triggers>
|
||||||
<arguments>
|
<arguments>
|
||||||
</arguments>
|
</arguments>
|
||||||
</buildCommand>
|
</buildCommand>
|
||||||
</buildSpec>
|
</buildSpec>
|
||||||
<natures>
|
<natures>
|
||||||
|
@ -481,7 +477,7 @@ CPROJECT_TEMPLATE_HEADER = """<?xml version="1.0" encoding="UTF-8" standalone="n
|
||||||
<folderInfo id="0.1674256904." name="/" resourcePath="">
|
<folderInfo id="0.1674256904." name="/" resourcePath="">
|
||||||
<toolChain id="cdt.managedbuild.toolchain.gnu.cross.exe.debug.1276586933" name="Cross GCC" superClass="cdt.managedbuild.toolchain.gnu.cross.exe.debug">
|
<toolChain id="cdt.managedbuild.toolchain.gnu.cross.exe.debug.1276586933" name="Cross GCC" superClass="cdt.managedbuild.toolchain.gnu.cross.exe.debug">
|
||||||
<targetPlatform archList="all" binaryParser="" id="cdt.managedbuild.targetPlatform.gnu.cross.710759961" isAbstract="false" osList="all" superClass="cdt.managedbuild.targetPlatform.gnu.cross"/>
|
<targetPlatform archList="all" binaryParser="" id="cdt.managedbuild.targetPlatform.gnu.cross.710759961" isAbstract="false" osList="all" superClass="cdt.managedbuild.targetPlatform.gnu.cross"/>
|
||||||
<builder arguments="--log-no-times build" buildPath="@PROJECT_TOPSRCDIR@" command="@MACH_COMMAND@" enableCleanBuild="false" incrementalBuildTarget="binaries" id="org.eclipse.cdt.build.core.settings.default.builder.1437267827" keepEnvironmentInBuildfile="false" name="Gnu Make Builder" superClass="org.eclipse.cdt.build.core.settings.default.builder"/>
|
<builder arguments="--log-no-times build" buildPath="@PROJECT_TOPSRCDIR@" command="@MACH_COMMAND@" enableCleanBuild="false" incrementalBuildTarget="binaries" id="org.eclipse.cdt.build.core.settings.default.builder.1437267827" keepEnvironmentInBuildfile="false" name="Gnu Make Builder" superClass="org.eclipse.cdt.build.core.settings.default.builder"/>
|
||||||
</toolChain>
|
</toolChain>
|
||||||
</folderInfo>
|
</folderInfo>
|
||||||
"""
|
"""
|
||||||
|
@ -495,9 +491,9 @@ CPROJECT_TEMPLATE_FILEINFO = """ <fileInf
|
||||||
</fileInfo>
|
</fileInfo>
|
||||||
"""
|
"""
|
||||||
CPROJECT_TEMPLATE_FOOTER = """
|
CPROJECT_TEMPLATE_FOOTER = """
|
||||||
<sourceEntries>
|
<sourceEntries>
|
||||||
<entry excluding="**/lib*|**/third_party/|tree/*.xcodeproj/|tree/.cargo/|tree/.vscode/|tree/build/|tree/extensions/|tree/gfx/angle/|tree/gfx/cairo/|tree/gfx/skia/skia/|tree/intl/icu/|tree/js/|tree/media/|tree/modules/freetype2|tree/modules/pdfium/|tree/netwerk/|tree/netwerk/sctp|tree/netwerk/srtp|tree/nsprpub/lib|tree/nsprpub/pr/src|tree/other-licenses/|tree/parser/|tree/python/|tree/security/nss/|tree/tools/" flags="VALUE_WORKSPACE_PATH" kind="sourcePath" name=""/>
|
<entry excluding="**/lib*|**/third_party/|tree/*.xcodeproj/|tree/.cargo/|tree/.vscode/|tree/build/|tree/extensions/|tree/gfx/angle/|tree/gfx/cairo/|tree/gfx/skia/skia/|tree/intl/icu/|tree/js/|tree/media/|tree/modules/freetype2|tree/modules/pdfium/|tree/netwerk/|tree/netwerk/sctp|tree/netwerk/srtp|tree/nsprpub/lib|tree/nsprpub/pr/src|tree/other-licenses/|tree/parser/|tree/python/|tree/security/nss/|tree/tools/" flags="VALUE_WORKSPACE_PATH" kind="sourcePath" name=""/>
|
||||||
</sourceEntries>
|
</sourceEntries>
|
||||||
</configuration>
|
</configuration>
|
||||||
</storageModule>
|
</storageModule>
|
||||||
<storageModule moduleId="org.eclipse.cdt.core.externalSettings"/>
|
<storageModule moduleId="org.eclipse.cdt.core.externalSettings"/>
|
||||||
|
@ -539,38 +535,38 @@ WORKSPACE_LANGUAGE_SETTINGS_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" s
|
||||||
|
|
||||||
LANGUAGE_SETTINGS_TEMPLATE_HEADER = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
LANGUAGE_SETTINGS_TEMPLATE_HEADER = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
<project>
|
<project>
|
||||||
<configuration id="0.1674256904" name="Default">
|
<configuration id="0.1674256904" name="Default">
|
||||||
<extension point="org.eclipse.cdt.core.LanguageSettingsProvider">
|
<extension point="org.eclipse.cdt.core.LanguageSettingsProvider">
|
||||||
<provider class="org.eclipse.cdt.core.language.settings.providers.LanguageSettingsGenericProvider" id="org.eclipse.cdt.ui.UserLanguageSettingsProvider" name="CDT User Setting Entries" prefer-non-shared="true" store-entries-with-project="true">
|
<provider class="org.eclipse.cdt.core.language.settings.providers.LanguageSettingsGenericProvider" id="org.eclipse.cdt.ui.UserLanguageSettingsProvider" name="CDT User Setting Entries" prefer-non-shared="true" store-entries-with-project="true">
|
||||||
<language id="org.eclipse.cdt.core.g++">
|
<language id="org.eclipse.cdt.core.g++">
|
||||||
"""
|
"""
|
||||||
|
|
||||||
LANGUAGE_SETTINGS_TEMPLATE_DIR_HEADER = """ <resource project-relative-path="@RELATIVE_PATH@">
|
LANGUAGE_SETTINGS_TEMPLATE_DIR_HEADER = """ <resource project-relative-path="@RELATIVE_PATH@">
|
||||||
<entry kind="includeFile" name="@PREINCLUDE_FILE_PATH@">
|
<entry kind="includeFile" name="@PREINCLUDE_FILE_PATH@">
|
||||||
<flag value="LOCAL"/>
|
<flag value="LOCAL"/>
|
||||||
</entry>
|
</entry>
|
||||||
"""
|
"""
|
||||||
|
|
||||||
LANGUAGE_SETTINGS_TEMPLATE_DIR_INCLUDE = """ <entry kind="includePath" name="@INCLUDE_PATH@">
|
LANGUAGE_SETTINGS_TEMPLATE_DIR_INCLUDE = """ <entry kind="includePath" name="@INCLUDE_PATH@">
|
||||||
<flag value="LOCAL"/>
|
<flag value="LOCAL"/>
|
||||||
</entry>
|
</entry>
|
||||||
"""
|
"""
|
||||||
|
|
||||||
LANGUAGE_SETTINGS_TEMPLATE_DIR_DEFINE = """ <entry kind="macro" name="@NAME@" value=@VALUE@/>
|
LANGUAGE_SETTINGS_TEMPLATE_DIR_DEFINE = """ <entry kind="macro" name="@NAME@" value=@VALUE@/>
|
||||||
"""
|
"""
|
||||||
|
|
||||||
LANGUAGE_SETTINGS_TEMPLATE_DIR_FOOTER = """ </resource>
|
LANGUAGE_SETTINGS_TEMPLATE_DIR_FOOTER = """ </resource>
|
||||||
"""
|
"""
|
||||||
|
|
||||||
LANGUAGE_SETTINGS_TEMPLATE_FOOTER = """ </language>
|
LANGUAGE_SETTINGS_TEMPLATE_FOOTER = """ </language>
|
||||||
</provider>
|
</provider>
|
||||||
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="-859273372804152468" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="@COMPILER_FLAGS@ -E -P -v -dD "${INPUTS}" -std=c++11" prefer-non-shared="true" store-entries-with-project="true">
|
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="-859273372804152468" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="@COMPILER_FLAGS@ -E -P -v -dD "${INPUTS}" -std=c++11" prefer-non-shared="true" store-entries-with-project="true">
|
||||||
<language-scope id="org.eclipse.cdt.core.gcc"/>
|
<language-scope id="org.eclipse.cdt.core.gcc"/>
|
||||||
<language-scope id="org.eclipse.cdt.core.g++"/>
|
<language-scope id="org.eclipse.cdt.core.g++"/>
|
||||||
</provider>
|
</provider>
|
||||||
<provider-reference id="org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider" ref="shared-provider"/>
|
<provider-reference id="org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider" ref="shared-provider"/>
|
||||||
</extension>
|
</extension>
|
||||||
</configuration>
|
</configuration>
|
||||||
</project>
|
</project>
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -210,7 +210,13 @@ class FasterMakeBackend(CommonBackend, PartialBackend):
|
||||||
rule = mk.create_rule([merge]).add_dependencies(
|
rule = mk.create_rule([merge]).add_dependencies(
|
||||||
[ref_file, l10n_file] + python_deps)
|
[ref_file, l10n_file] + python_deps)
|
||||||
rule.add_commands(
|
rule.add_commands(
|
||||||
['$(PYTHON) -m mozbuild.action.l10n_merge --output {} --ref-file {} --l10n-file {}'.format(merge, ref_file, l10n_file)])
|
[
|
||||||
|
'$(PYTHON) -m mozbuild.action.l10n_merge '
|
||||||
|
'--output {} --ref-file {} --l10n-file {}'.format(
|
||||||
|
merge, ref_file, l10n_file
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
# Add a dummy rule for the l10n file since it might not exist.
|
# Add a dummy rule for the l10n file since it might not exist.
|
||||||
mk.create_rule([l10n_file])
|
mk.create_rule([l10n_file])
|
||||||
|
|
||||||
|
@ -222,7 +228,8 @@ class FasterMakeBackend(CommonBackend, PartialBackend):
|
||||||
'install_%s' % base.replace('/', '_'))) as fh:
|
'install_%s' % base.replace('/', '_'))) as fh:
|
||||||
install_manifest.write(fileobj=fh)
|
install_manifest.write(fileobj=fh)
|
||||||
|
|
||||||
# For artifact builds only, write a single unified manifest for consumption by |mach watch|.
|
# For artifact builds only, write a single unified manifest
|
||||||
|
# for consumption by |mach watch|.
|
||||||
if self.environment.is_artifact_build:
|
if self.environment.is_artifact_build:
|
||||||
unified_manifest = InstallManifest()
|
unified_manifest = InstallManifest()
|
||||||
for base, install_manifest in self._install_manifests.iteritems():
|
for base, install_manifest in self._install_manifests.iteritems():
|
||||||
|
|
|
@ -6,7 +6,6 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import which
|
import which
|
||||||
|
|
||||||
|
@ -58,10 +57,12 @@ class MachCommands(MachCommandBase):
|
||||||
|
|
||||||
if ide == 'eclipse':
|
if ide == 'eclipse':
|
||||||
eclipse_workspace_dir = self.get_eclipse_workspace_path()
|
eclipse_workspace_dir = self.get_eclipse_workspace_path()
|
||||||
process = subprocess.check_call(['eclipse', '-data', eclipse_workspace_dir])
|
subprocess.check_call(['eclipse', '-data', eclipse_workspace_dir])
|
||||||
elif ide == 'visualstudio':
|
elif ide == 'visualstudio':
|
||||||
visual_studio_workspace_dir = self.get_visualstudio_workspace_path()
|
visual_studio_workspace_dir = self.get_visualstudio_workspace_path()
|
||||||
process = subprocess.check_call(['explorer.exe', visual_studio_workspace_dir])
|
subprocess.check_call(
|
||||||
|
['explorer.exe', visual_studio_workspace_dir]
|
||||||
|
)
|
||||||
|
|
||||||
def get_eclipse_workspace_path(self):
|
def get_eclipse_workspace_path(self):
|
||||||
from mozbuild.backend.cpp_eclipse import CppEclipseBackend
|
from mozbuild.backend.cpp_eclipse import CppEclipseBackend
|
||||||
|
|
|
@ -51,7 +51,6 @@ from ..frontend.data import (
|
||||||
HostSources,
|
HostSources,
|
||||||
InstallationTarget,
|
InstallationTarget,
|
||||||
JARManifest,
|
JARManifest,
|
||||||
Library,
|
|
||||||
Linkable,
|
Linkable,
|
||||||
LocalInclude,
|
LocalInclude,
|
||||||
LocalizedFiles,
|
LocalizedFiles,
|
||||||
|
@ -59,7 +58,6 @@ from ..frontend.data import (
|
||||||
ObjdirFiles,
|
ObjdirFiles,
|
||||||
ObjdirPreprocessedFiles,
|
ObjdirPreprocessedFiles,
|
||||||
PerSourceFlag,
|
PerSourceFlag,
|
||||||
PgoGenerateOnlySources,
|
|
||||||
Program,
|
Program,
|
||||||
RustLibrary,
|
RustLibrary,
|
||||||
HostSharedLibrary,
|
HostSharedLibrary,
|
||||||
|
@ -313,7 +311,7 @@ class RecursiveMakeTraversal(object):
|
||||||
if start_node != '':
|
if start_node != '':
|
||||||
deps[start_node] = prev_nodes
|
deps[start_node] = prev_nodes
|
||||||
prev_nodes = (start_node,)
|
prev_nodes = (start_node,)
|
||||||
if not start_node in self._traversal:
|
if start_node not in self._traversal:
|
||||||
return prev_nodes
|
return prev_nodes
|
||||||
parallel_nodes = []
|
parallel_nodes = []
|
||||||
for node in parallel:
|
for node in parallel:
|
||||||
|
@ -339,7 +337,7 @@ class RecursiveMakeTraversal(object):
|
||||||
current, parallel, sequential = self.call_filter(start, filter)
|
current, parallel, sequential = self.call_filter(start, filter)
|
||||||
if current is not None:
|
if current is not None:
|
||||||
yield start
|
yield start
|
||||||
if not start in self._traversal:
|
if start not in self._traversal:
|
||||||
return
|
return
|
||||||
for node in parallel:
|
for node in parallel:
|
||||||
for n in self.traverse(node, filter):
|
for n in self.traverse(node, filter):
|
||||||
|
@ -599,12 +597,15 @@ class RecursiveMakeBackend(CommonBackend):
|
||||||
backend_file.write('GARBAGE += %s\n' % stub_file)
|
backend_file.write('GARBAGE += %s\n' % stub_file)
|
||||||
backend_file.write('EXTRA_MDDEPEND_FILES += %s\n' % dep_file)
|
backend_file.write('EXTRA_MDDEPEND_FILES += %s\n' % dep_file)
|
||||||
|
|
||||||
backend_file.write("""{stub}: {script}{inputs}{backend}{force}
|
backend_file.write((
|
||||||
|
"""{stub}: {script}{inputs}{backend}{force}
|
||||||
\t$(REPORT_BUILD)
|
\t$(REPORT_BUILD)
|
||||||
\t$(call py_action,file_generate,{locale}{script} {method} {output} $(MDDEPDIR)/{dep_file} {stub}{inputs}{flags})
|
\t$(call py_action,file_generate,{locale}{script} """ # wrap for E501
|
||||||
|
"""{method} {output} $(MDDEPDIR)/{dep_file} {stub}{inputs}{flags})
|
||||||
\t@$(TOUCH) $@
|
\t@$(TOUCH) $@
|
||||||
|
|
||||||
""".format(stub=stub_file,
|
""").format(
|
||||||
|
stub=stub_file,
|
||||||
output=first_output,
|
output=first_output,
|
||||||
dep_file=dep_file,
|
dep_file=dep_file,
|
||||||
inputs=' ' + ' '.join(inputs) if inputs else '',
|
inputs=' ' + ' '.join(inputs) if inputs else '',
|
||||||
|
@ -617,7 +618,9 @@ class RecursiveMakeBackend(CommonBackend):
|
||||||
force=force,
|
force=force,
|
||||||
locale='--locale=$(AB_CD) ' if obj.localized else '',
|
locale='--locale=$(AB_CD) ' if obj.localized else '',
|
||||||
script=obj.script,
|
script=obj.script,
|
||||||
method=obj.method))
|
method=obj.method
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
elif isinstance(obj, JARManifest):
|
elif isinstance(obj, JARManifest):
|
||||||
self._no_skip['libs'].add(backend_file.relobjdir)
|
self._no_skip['libs'].add(backend_file.relobjdir)
|
||||||
|
@ -813,8 +816,10 @@ class RecursiveMakeBackend(CommonBackend):
|
||||||
# Directories containing rust compilations don't generally depend
|
# Directories containing rust compilations don't generally depend
|
||||||
# on other directories in the tree, so putting them first here will
|
# on other directories in the tree, so putting them first here will
|
||||||
# start them earlier in the build.
|
# start them earlier in the build.
|
||||||
rule.add_dependencies(chain((r for r in roots if mozpath.dirname(r) in self._rust_dirs),
|
rule.add_dependencies(
|
||||||
(r for r in roots if mozpath.dirname(r) not in self._rust_dirs)))
|
chain((r for r in roots if mozpath.dirname(r) in self._rust_dirs),
|
||||||
|
(r for r in roots if mozpath.dirname(r) not in self._rust_dirs))
|
||||||
|
)
|
||||||
for target, deps in sorted(graph.items()):
|
for target, deps in sorted(graph.items()):
|
||||||
if deps:
|
if deps:
|
||||||
rule = root_deps_mk.create_rule([target])
|
rule = root_deps_mk.create_rule([target])
|
||||||
|
@ -897,9 +902,10 @@ class RecursiveMakeBackend(CommonBackend):
|
||||||
all_sources))
|
all_sources))
|
||||||
|
|
||||||
if include_curdir_build_rules:
|
if include_curdir_build_rules:
|
||||||
makefile.add_statement('\n'
|
makefile.add_statement(
|
||||||
'# Make sometimes gets confused between "foo" and "$(CURDIR)/foo".\n'
|
'\n'
|
||||||
'# Help it out by explicitly specifiying dependencies.')
|
'# Make sometimes gets confused between "foo" and "$(CURDIR)/foo".\n'
|
||||||
|
'# Help it out by explicitly specifiying dependencies.')
|
||||||
makefile.add_statement('all_absolute_unified_files := \\\n'
|
makefile.add_statement('all_absolute_unified_files := \\\n'
|
||||||
' $(addprefix $(CURDIR)/,$(%s))'
|
' $(addprefix $(CURDIR)/,$(%s))'
|
||||||
% unified_files_makefile_variable)
|
% unified_files_makefile_variable)
|
||||||
|
@ -1063,10 +1069,10 @@ class RecursiveMakeBackend(CommonBackend):
|
||||||
return (mozpath.relpath(d.translated, base) for d in dirs)
|
return (mozpath.relpath(d.translated, base) for d in dirs)
|
||||||
|
|
||||||
if obj.dirs:
|
if obj.dirs:
|
||||||
fh.write('DIRS := %s\n' % ' '.join(
|
fh.write('DIRS := %s\n' % ' '.join(relativize(backend_file.objdir, obj.dirs)))
|
||||||
relativize(backend_file.objdir, obj.dirs)))
|
self._traversal.add(
|
||||||
self._traversal.add(backend_file.relobjdir,
|
backend_file.relobjdir, dirs=relativize(self.environment.topobjdir, obj.dirs)
|
||||||
dirs=relativize(self.environment.topobjdir, obj.dirs))
|
)
|
||||||
|
|
||||||
# The directory needs to be registered whether subdirectories have been
|
# The directory needs to be registered whether subdirectories have been
|
||||||
# registered or not.
|
# registered or not.
|
||||||
|
@ -1091,7 +1097,9 @@ class RecursiveMakeBackend(CommonBackend):
|
||||||
backend_file.write('FINAL_TARGET = $(DEPTH)/%s\n' % (obj.target))
|
backend_file.write('FINAL_TARGET = $(DEPTH)/%s\n' % (obj.target))
|
||||||
else:
|
else:
|
||||||
backend_file.write(
|
backend_file.write(
|
||||||
'FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),$(DIST)/bin)$(DIST_SUBDIR:%=/%)\n')
|
'FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),'
|
||||||
|
'$(DIST)/bin)$(DIST_SUBDIR:%=/%)\n'
|
||||||
|
)
|
||||||
|
|
||||||
if not obj.enabled:
|
if not obj.enabled:
|
||||||
backend_file.write('NO_DIST_INSTALL := 1\n')
|
backend_file.write('NO_DIST_INSTALL := 1\n')
|
||||||
|
@ -1106,8 +1114,7 @@ class RecursiveMakeBackend(CommonBackend):
|
||||||
self._install_manifests['dist_include'].add_optional_exists('%s.h' % stem)
|
self._install_manifests['dist_include'].add_optional_exists('%s.h' % stem)
|
||||||
|
|
||||||
for module in manager.modules:
|
for module in manager.modules:
|
||||||
build_files.add_optional_exists(mozpath.join('.deps',
|
build_files.add_optional_exists(mozpath.join('.deps', '%s.pp' % module))
|
||||||
'%s.pp' % module))
|
|
||||||
|
|
||||||
modules = manager.modules
|
modules = manager.modules
|
||||||
xpt_modules = sorted(modules.keys())
|
xpt_modules = sorted(modules.keys())
|
||||||
|
@ -1297,8 +1304,9 @@ class RecursiveMakeBackend(CommonBackend):
|
||||||
|
|
||||||
def _process_computed_flags(self, computed_flags, backend_file):
|
def _process_computed_flags(self, computed_flags, backend_file):
|
||||||
for var, flags in computed_flags.get_flags():
|
for var, flags in computed_flags.get_flags():
|
||||||
backend_file.write('COMPUTED_%s += %s\n' % (var,
|
backend_file.write(
|
||||||
' '.join(make_quote(shell_quote(f)) for f in flags)))
|
'COMPUTED_%s += %s\n' % (var,
|
||||||
|
' '.join(make_quote(shell_quote(f)) for f in flags)))
|
||||||
|
|
||||||
def _process_non_default_target(self, libdef, target_name, backend_file):
|
def _process_non_default_target(self, libdef, target_name, backend_file):
|
||||||
backend_file.write("%s:: %s\n" % (libdef.output_category, target_name))
|
backend_file.write("%s:: %s\n" % (libdef.output_category, target_name))
|
||||||
|
@ -1364,7 +1372,6 @@ class RecursiveMakeBackend(CommonBackend):
|
||||||
return os.path.normpath(mozpath.join(mozpath.relpath(lib.objdir, obj.objdir),
|
return os.path.normpath(mozpath.join(mozpath.relpath(lib.objdir, obj.objdir),
|
||||||
name))
|
name))
|
||||||
|
|
||||||
topobjdir = mozpath.normsep(obj.topobjdir)
|
|
||||||
# This will create the node even if there aren't any linked libraries.
|
# This will create the node even if there aren't any linked libraries.
|
||||||
build_target = self._build_target_for_obj(obj)
|
build_target = self._build_target_for_obj(obj)
|
||||||
self._compile_graph[build_target]
|
self._compile_graph[build_target]
|
||||||
|
@ -1517,8 +1524,8 @@ class RecursiveMakeBackend(CommonBackend):
|
||||||
if f.startswith('/') or isinstance(f, AbsolutePath):
|
if f.startswith('/') or isinstance(f, AbsolutePath):
|
||||||
basepath, wild = os.path.split(f.full_path)
|
basepath, wild = os.path.split(f.full_path)
|
||||||
if '*' in basepath:
|
if '*' in basepath:
|
||||||
raise Exception("Wildcards are only supported in the filename part of "
|
raise Exception("Wildcards are only supported in the filename part"
|
||||||
"srcdir-relative or absolute paths.")
|
" of srcdir-relative or absolute paths.")
|
||||||
|
|
||||||
install_manifest.add_pattern_link(basepath, wild, path)
|
install_manifest.add_pattern_link(basepath, wild, path)
|
||||||
else:
|
else:
|
||||||
|
@ -1740,8 +1747,9 @@ class RecursiveMakeBackend(CommonBackend):
|
||||||
unified_files_makefile_variable='CPPSRCS')
|
unified_files_makefile_variable='CPPSRCS')
|
||||||
|
|
||||||
# Preprocessed ipdl files are generated in ipdl_dir.
|
# Preprocessed ipdl files are generated in ipdl_dir.
|
||||||
mk.add_statement('IPDLDIRS := %s %s' % (ipdl_dir, ' '.join(sorted(set(mozpath.dirname(p)
|
mk.add_statement(
|
||||||
for p in sorted_static_ipdl_sources)))))
|
'IPDLDIRS := %s %s' % (ipdl_dir, ' '.join(sorted(set(mozpath.dirname(p)
|
||||||
|
for p in sorted_static_ipdl_sources)))))
|
||||||
|
|
||||||
with self._write_file(mozpath.join(ipdl_dir, 'ipdlsrcs.mk')) as ipdls:
|
with self._write_file(mozpath.join(ipdl_dir, 'ipdlsrcs.mk')) as ipdls:
|
||||||
mk.dump(ipdls, removal_guard=False)
|
mk.dump(ipdls, removal_guard=False)
|
||||||
|
|
|
@ -12,12 +12,9 @@ import sys
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
import mozpack.path as mozpath
|
import mozpack.path as mozpath
|
||||||
from mozbuild import shellutil
|
|
||||||
from mozbuild.analyze.graph import Graph
|
from mozbuild.analyze.graph import Graph
|
||||||
from mozbuild.analyze.hg import Report
|
from mozbuild.analyze.hg import Report
|
||||||
from mozbuild.base import MozbuildObject
|
from mozbuild.base import MozbuildObject
|
||||||
from mozbuild.backend.base import PartialBackend, HybridBackend
|
|
||||||
from mozbuild.backend.recursivemake import RecursiveMakeBackend
|
|
||||||
from mozbuild.mozconfig import MozconfigLoader
|
from mozbuild.mozconfig import MozconfigLoader
|
||||||
from mozbuild.shellutil import quote as shell_quote
|
from mozbuild.shellutil import quote as shell_quote
|
||||||
from mozbuild.util import OrderedDefaultDict
|
from mozbuild.util import OrderedDefaultDict
|
||||||
|
@ -59,7 +56,6 @@ from ..frontend.data import (
|
||||||
)
|
)
|
||||||
from ..util import (
|
from ..util import (
|
||||||
FileAvoidWrite,
|
FileAvoidWrite,
|
||||||
expand_variables,
|
|
||||||
)
|
)
|
||||||
from ..frontend.context import (
|
from ..frontend.context import (
|
||||||
AbsolutePath,
|
AbsolutePath,
|
||||||
|
@ -147,7 +143,8 @@ class BackendTupfile(object):
|
||||||
else:
|
else:
|
||||||
caret_text = flags
|
caret_text = flags
|
||||||
|
|
||||||
self.write(': %(inputs)s%(extra_inputs)s |> %(display)s%(cmd)s |> %(outputs)s%(output_group)s\n' % {
|
self.write((': %(inputs)s%(extra_inputs)s |> %(display)s%(cmd)s |> '
|
||||||
|
'%(outputs)s%(output_group)s\n') % {
|
||||||
'inputs': ' '.join(inputs),
|
'inputs': ' '.join(inputs),
|
||||||
'extra_inputs': ' | ' + ' '.join(extra_inputs) if extra_inputs else '',
|
'extra_inputs': ' | ' + ' '.join(extra_inputs) if extra_inputs else '',
|
||||||
'display': '^%s^ ' % caret_text if caret_text else '',
|
'display': '^%s^ ' % caret_text if caret_text else '',
|
||||||
|
@ -272,7 +269,8 @@ class TupBackend(CommonBackend):
|
||||||
self._rust_cmds = set()
|
self._rust_cmds = set()
|
||||||
|
|
||||||
self._built_in_addons = set()
|
self._built_in_addons = set()
|
||||||
self._built_in_addons_file = 'dist/bin/browser/chrome/browser/content/browser/built_in_addons.json'
|
self._built_in_addons_file = \
|
||||||
|
'dist/bin/browser/chrome/browser/content/browser/built_in_addons.json'
|
||||||
|
|
||||||
def _output_group(self, label):
|
def _output_group(self, label):
|
||||||
if label:
|
if label:
|
||||||
|
@ -671,11 +669,13 @@ class TupBackend(CommonBackend):
|
||||||
|
|
||||||
for objdir, backend_file in sorted(self._backend_files.items()):
|
for objdir, backend_file in sorted(self._backend_files.items()):
|
||||||
backend_file.gen_sources_rules([self._installed_files])
|
backend_file.gen_sources_rules([self._installed_files])
|
||||||
for var, gen_method in ((backend_file.shared_lib, self._gen_shared_library),
|
for var, gen_method in (
|
||||||
(backend_file.static_lib and backend_file.static_lib.no_expand_lib,
|
(backend_file.shared_lib, self._gen_shared_library),
|
||||||
self._gen_static_library),
|
(backend_file.static_lib and backend_file.static_lib.no_expand_lib,
|
||||||
(backend_file.programs, self._gen_programs),
|
self._gen_static_library),
|
||||||
(backend_file.host_programs, self._gen_host_programs)):
|
(backend_file.programs, self._gen_programs),
|
||||||
|
(backend_file.host_programs, self._gen_host_programs)
|
||||||
|
):
|
||||||
if var:
|
if var:
|
||||||
backend_file.export_shell()
|
backend_file.export_shell()
|
||||||
backend_file.export_icecc()
|
backend_file.export_icecc()
|
||||||
|
@ -688,8 +688,9 @@ class TupBackend(CommonBackend):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
with self._write_file(mozpath.join(self.environment.topobjdir, 'Tuprules.tup')) as fh:
|
with self._write_file(mozpath.join(self.environment.topobjdir, 'Tuprules.tup')) as fh:
|
||||||
acdefines_flags = ' '.join(['-D%s=%s' % (name, shell_quote(value))
|
acdefines_flags = ' '.join(
|
||||||
for (name, value) in sorted(self.environment.acdefines.iteritems())])
|
['-D%s=%s' % (name, shell_quote(value))
|
||||||
|
for (name, value) in sorted(self.environment.acdefines.iteritems())])
|
||||||
# TODO: AB_CD only exists in Makefiles at the moment.
|
# TODO: AB_CD only exists in Makefiles at the moment.
|
||||||
acdefines_flags += ' -DAB_CD=en-US'
|
acdefines_flags += ' -DAB_CD=en-US'
|
||||||
|
|
||||||
|
@ -808,13 +809,11 @@ class TupBackend(CommonBackend):
|
||||||
|
|
||||||
# Enable link-time optimization for release builds.
|
# Enable link-time optimization for release builds.
|
||||||
cargo_library_flags = []
|
cargo_library_flags = []
|
||||||
if (not obj.config.substs.get('DEVELOPER_OPTIONS') and
|
if not obj.config.substs.get('DEVELOPER_OPTIONS') and not obj.config.substs.get(
|
||||||
not obj.config.substs.get('MOZ_DEBUG_RUST')):
|
'MOZ_DEBUG_RUST'
|
||||||
|
):
|
||||||
cargo_library_flags += ['-C', 'lto']
|
cargo_library_flags += ['-C', 'lto']
|
||||||
|
|
||||||
rust_build_home = mozpath.join(self.environment.topobjdir,
|
|
||||||
'toolkit/library/rust')
|
|
||||||
|
|
||||||
def display_name(invocation):
|
def display_name(invocation):
|
||||||
output_str = ''
|
output_str = ''
|
||||||
if invocation['outputs']:
|
if invocation['outputs']:
|
||||||
|
@ -868,7 +867,9 @@ class TupBackend(CommonBackend):
|
||||||
|
|
||||||
invocation['full-deps'] = set()
|
invocation['full-deps'] = set()
|
||||||
|
|
||||||
if os.path.basename(invocation['program']) in ['build-script-build', 'build-script-main']:
|
if os.path.basename(invocation['program']) in [
|
||||||
|
'build-script-build', 'build-script-main'
|
||||||
|
]:
|
||||||
out_dir = invocation['env']['OUT_DIR']
|
out_dir = invocation['env']['OUT_DIR']
|
||||||
for output in cargo_extra_outputs.get(shortname, []):
|
for output in cargo_extra_outputs.get(shortname, []):
|
||||||
outputs.append(os.path.join(out_dir, output))
|
outputs.append(os.path.join(out_dir, output))
|
||||||
|
@ -1118,8 +1119,9 @@ class TupBackend(CommonBackend):
|
||||||
if f.startswith('/') or isinstance(f, AbsolutePath):
|
if f.startswith('/') or isinstance(f, AbsolutePath):
|
||||||
basepath, wild = os.path.split(f.full_path)
|
basepath, wild = os.path.split(f.full_path)
|
||||||
if '*' in basepath:
|
if '*' in basepath:
|
||||||
raise Exception("Wildcards are only supported in the filename part of "
|
raise Exception(
|
||||||
"srcdir-relative or absolute paths.")
|
"Wildcards are only supported in the filename part of "
|
||||||
|
"srcdir-relative or absolute paths.")
|
||||||
|
|
||||||
# TODO: This is only needed for Windows, so we can
|
# TODO: This is only needed for Windows, so we can
|
||||||
# skip this for now.
|
# skip this for now.
|
||||||
|
@ -1148,7 +1150,6 @@ class TupBackend(CommonBackend):
|
||||||
|
|
||||||
finder = FileFinder(prefix)
|
finder = FileFinder(prefix)
|
||||||
for p, _ in finder.find(f.full_path[len(prefix):]):
|
for p, _ in finder.find(f.full_path[len(prefix):]):
|
||||||
install_dir = prefix[len(obj.srcdir) + 1:]
|
|
||||||
output = p
|
output = p
|
||||||
if f.target_basename and '*' not in f.target_basename:
|
if f.target_basename and '*' not in f.target_basename:
|
||||||
output = mozpath.join(f.target_basename, output)
|
output = mozpath.join(f.target_basename, output)
|
||||||
|
@ -1160,7 +1161,8 @@ class TupBackend(CommonBackend):
|
||||||
f.full_path, output=f.target_basename, output_group=output_group)
|
f.full_path, output=f.target_basename, output_group=output_group)
|
||||||
else:
|
else:
|
||||||
if (self.environment.is_artifact_build and
|
if (self.environment.is_artifact_build and
|
||||||
any(mozpath.match(f.target_basename, p) for p in self._compile_env_gen_files)):
|
any(mozpath.match(f.target_basename, p)
|
||||||
|
for p in self._compile_env_gen_files)):
|
||||||
# If we have an artifact build we never would have generated this file,
|
# If we have an artifact build we never would have generated this file,
|
||||||
# so do not attempt to install it.
|
# so do not attempt to install it.
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -10,7 +10,6 @@ from __future__ import absolute_import, unicode_literals
|
||||||
import errno
|
import errno
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import types
|
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from xml.dom import getDOMImplementation
|
from xml.dom import getDOMImplementation
|
||||||
|
@ -148,16 +147,17 @@ class VisualStudioBackend(CommonBackend):
|
||||||
if target != 'full':
|
if target != 'full':
|
||||||
command += ' %s' % target
|
command += ' %s' % target
|
||||||
|
|
||||||
project_id = self._write_vs_project(out_proj_dir, basename, target,
|
project_id = self._write_vs_project(
|
||||||
build_command=command,
|
out_proj_dir, basename, target, build_command=command,
|
||||||
clean_command='$(SolutionDir)\\mach.bat build clean')
|
clean_command='$(SolutionDir)\\mach.bat build clean')
|
||||||
|
|
||||||
projects[basename] = (project_id, basename, target)
|
projects[basename] = (project_id, basename, target)
|
||||||
|
|
||||||
# A project that can be used to regenerate the visual studio projects.
|
# A project that can be used to regenerate the visual studio projects.
|
||||||
basename = 'target_vs'
|
basename = 'target_vs'
|
||||||
project_id = self._write_vs_project(out_proj_dir, basename, 'visual-studio',
|
project_id = self._write_vs_project(
|
||||||
build_command='$(SolutionDir)\\mach.bat build-backend -b VisualStudio')
|
out_proj_dir, basename, 'visual-studio',
|
||||||
|
build_command='$(SolutionDir)\\mach.bat build-backend -b VisualStudio')
|
||||||
projects[basename] = (project_id, basename, 'visual-studio')
|
projects[basename] = (project_id, basename, 'visual-studio')
|
||||||
|
|
||||||
# Write out a shared property file with common variables.
|
# Write out a shared property file with common variables.
|
||||||
|
@ -239,14 +239,14 @@ class VisualStudioBackend(CommonBackend):
|
||||||
|
|
||||||
basename = '%s_%s' % (prefix, item)
|
basename = '%s_%s' % (prefix, item)
|
||||||
|
|
||||||
project_id = self._write_vs_project(out_dir, basename, item,
|
project_id = self._write_vs_project(
|
||||||
includes=includes,
|
out_dir, basename, item,
|
||||||
forced_includes=[
|
includes=includes,
|
||||||
'$(TopObjDir)\\dist\\include\\mozilla-config.h'],
|
forced_includes=['$(TopObjDir)\\dist\\include\\mozilla-config.h'],
|
||||||
defines=defines,
|
defines=defines,
|
||||||
headers=headers,
|
headers=headers,
|
||||||
sources=sources,
|
sources=sources,
|
||||||
debugger=debugger)
|
debugger=debugger)
|
||||||
|
|
||||||
projects[basename] = (project_id, basename, item)
|
projects[basename] = (project_id, basename, item)
|
||||||
|
|
||||||
|
@ -462,8 +462,8 @@ class VisualStudioBackend(CommonBackend):
|
||||||
project_id = get_id(basename.encode('utf-8'))
|
project_id = get_id(basename.encode('utf-8'))
|
||||||
|
|
||||||
with self._write_file(os.path.join(out_dir, root), mode='rb') as fh:
|
with self._write_file(os.path.join(out_dir, root), mode='rb') as fh:
|
||||||
project_id, name = VisualStudioBackend.write_vs_project(fh,
|
project_id, name = VisualStudioBackend.write_vs_project(
|
||||||
self._version, project_id, name, **kwargs)
|
fh, self._version, project_id, name, **kwargs)
|
||||||
|
|
||||||
with self._write_file(os.path.join(out_dir, '%s.user' % root), mode='rb') as fh:
|
with self._write_file(os.path.join(out_dir, '%s.user' % root), mode='rb') as fh:
|
||||||
fh.write('<?xml version="1.0" encoding="utf-8"?>\r\n')
|
fh.write('<?xml version="1.0" encoding="utf-8"?>\r\n')
|
||||||
|
|
|
@ -37,7 +37,6 @@ from .mozconfig import (
|
||||||
)
|
)
|
||||||
from .pythonutil import find_python3_executable
|
from .pythonutil import find_python3_executable
|
||||||
from .util import (
|
from .util import (
|
||||||
ReadOnlyNamespace,
|
|
||||||
memoize,
|
memoize,
|
||||||
memoized_property,
|
memoized_property,
|
||||||
)
|
)
|
||||||
|
@ -179,9 +178,10 @@ class MozbuildObject(ProcessExecutionMixin):
|
||||||
topobjdir = mozpath.normsep(os.path.normpath(topobjdir))
|
topobjdir = mozpath.normsep(os.path.normpath(topobjdir))
|
||||||
|
|
||||||
if topsrcdir == topobjdir:
|
if topsrcdir == topobjdir:
|
||||||
raise BadEnvironmentException('The object directory appears '
|
raise BadEnvironmentException(
|
||||||
'to be the same as your source directory (%s). This build '
|
'The object directory appears '
|
||||||
'configuration is not supported.' % topsrcdir)
|
'to be the same as your source directory (%s). This build '
|
||||||
|
'configuration is not supported.' % topsrcdir)
|
||||||
|
|
||||||
# If we can't resolve topobjdir, oh well. We'll figure out when we need
|
# If we can't resolve topobjdir, oh well. We'll figure out when we need
|
||||||
# one.
|
# one.
|
||||||
|
@ -256,11 +256,13 @@ class MozbuildObject(ProcessExecutionMixin):
|
||||||
@property
|
@property
|
||||||
def virtualenv_manager(self):
|
def virtualenv_manager(self):
|
||||||
if self._virtualenv_manager is None:
|
if self._virtualenv_manager is None:
|
||||||
self._virtualenv_manager = VirtualenvManager(self.topsrcdir,
|
self._virtualenv_manager = VirtualenvManager(
|
||||||
self.topobjdir, os.path.join(
|
self.topsrcdir,
|
||||||
self.topobjdir, '_virtualenvs', 'init'),
|
self.topobjdir,
|
||||||
sys.stdout, os.path.join(self.topsrcdir, 'build',
|
os.path.join(self.topobjdir, '_virtualenvs', 'init'),
|
||||||
'virtualenv_packages.txt'))
|
sys.stdout,
|
||||||
|
os.path.join(self.topsrcdir, 'build', 'virtualenv_packages.txt')
|
||||||
|
)
|
||||||
|
|
||||||
return self._virtualenv_manager
|
return self._virtualenv_manager
|
||||||
|
|
||||||
|
@ -622,8 +624,8 @@ class MozbuildObject(ProcessExecutionMixin):
|
||||||
self.run_process([notifier, '--app-name=Mozilla Build System',
|
self.run_process([notifier, '--app-name=Mozilla Build System',
|
||||||
'Mozilla Build System', msg], ensure_exit_code=False)
|
'Mozilla Build System', msg], ensure_exit_code=False)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.log(logging.WARNING, 'notifier-failed', {'error':
|
self.log(logging.WARNING, 'notifier-failed',
|
||||||
e.message}, 'Notification center failed: {error}')
|
{'error': e.message}, 'Notification center failed: {error}')
|
||||||
|
|
||||||
def _ensure_objdir_exists(self):
|
def _ensure_objdir_exists(self):
|
||||||
if os.path.isdir(self.statedir):
|
if os.path.isdir(self.statedir):
|
||||||
|
@ -868,8 +870,9 @@ class MachCommandBase(MozbuildObject):
|
||||||
detect_virtualenv_mozinfo = getattr(context,
|
detect_virtualenv_mozinfo = getattr(context,
|
||||||
'detect_virtualenv_mozinfo')
|
'detect_virtualenv_mozinfo')
|
||||||
try:
|
try:
|
||||||
dummy = MozbuildObject.from_environment(cwd=context.cwd,
|
dummy = MozbuildObject.from_environment(
|
||||||
detect_virtualenv_mozinfo=detect_virtualenv_mozinfo)
|
cwd=context.cwd,
|
||||||
|
detect_virtualenv_mozinfo=detect_virtualenv_mozinfo)
|
||||||
topsrcdir = dummy.topsrcdir
|
topsrcdir = dummy.topsrcdir
|
||||||
topobjdir = dummy._topobjdir
|
topobjdir = dummy._topobjdir
|
||||||
if topobjdir:
|
if topobjdir:
|
||||||
|
|
|
@ -2,11 +2,9 @@
|
||||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
from collections import defaultdict
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import urlparse
|
|
||||||
|
|
||||||
from mach.config import ConfigSettings
|
from mach.config import ConfigSettings
|
||||||
from mach.logging import LoggingManager
|
from mach.logging import LoggingManager
|
||||||
|
@ -120,9 +118,11 @@ class ChromeMapBackend(CommonBackend):
|
||||||
# A map from url prefixes to objdir directories:
|
# A map from url prefixes to objdir directories:
|
||||||
# { "chrome://mozapps/content/": [ "dist/bin/chrome/toolkit/content/mozapps" ], ... }
|
# { "chrome://mozapps/content/": [ "dist/bin/chrome/toolkit/content/mozapps" ], ... }
|
||||||
# A map of overrides.
|
# A map of overrides.
|
||||||
# A map from objdir paths to sourcedir paths, and an object storing mapping information for preprocessed files:
|
# A map from objdir paths to sourcedir paths, and an object storing mapping
|
||||||
|
# information for preprocessed files:
|
||||||
# { "dist/bin/browser/chrome/browser/content/browser/aboutSessionRestore.js":
|
# { "dist/bin/browser/chrome/browser/content/browser/aboutSessionRestore.js":
|
||||||
# [ "$topsrcdir/browser/components/sessionstore/content/aboutSessionRestore.js", {} ], ... }
|
# [ "$topsrcdir/browser/components/sessionstore/content/aboutSessionRestore.js", {} ],
|
||||||
|
# ... }
|
||||||
# An object containing build configuration information.
|
# An object containing build configuration information.
|
||||||
outputfile = os.path.join(self.environment.topobjdir, 'chrome-map.json')
|
outputfile = os.path.join(self.environment.topobjdir, 'chrome-map.json')
|
||||||
with self._write_file(outputfile) as fh:
|
with self._write_file(outputfile) as fh:
|
||||||
|
|
|
@ -66,8 +66,9 @@ class LcovRecord(object):
|
||||||
# Re-calculate summaries after generating or splitting a record.
|
# Re-calculate summaries after generating or splitting a record.
|
||||||
self.function_count = len(self.functions.keys())
|
self.function_count = len(self.functions.keys())
|
||||||
# Function records may have moved between files, so filter here.
|
# Function records may have moved between files, so filter here.
|
||||||
self.function_exec_counts = {fn_name: count for fn_name, count in viewitems(self.function_exec_counts)
|
self.function_exec_counts = {
|
||||||
if fn_name in self.functions.values()}
|
fn_name: count for fn_name, count in viewitems(self.function_exec_counts)
|
||||||
|
if fn_name in self.functions.values()}
|
||||||
self.covered_function_count = len([c for c in self.function_exec_counts.values() if c])
|
self.covered_function_count = len([c for c in self.function_exec_counts.values() if c])
|
||||||
self.line_count = len(self.lines)
|
self.line_count = len(self.lines)
|
||||||
self.covered_line_count = len([c for c, _ in self.lines.values() if c])
|
self.covered_line_count = len([c for c, _ in self.lines.values() if c])
|
||||||
|
@ -575,10 +576,10 @@ class UrlFinder(object):
|
||||||
|
|
||||||
if app_name in url:
|
if app_name in url:
|
||||||
if omnijar_name in url:
|
if omnijar_name in url:
|
||||||
# e.g. file:///home/worker/workspace/build/application/firefox/omni.ja!/components/MainProcessSingleton.js
|
# e.g. file:///home/worker/workspace/build/application/firefox/omni.ja!/components/MainProcessSingleton.js # noqa
|
||||||
parts = url_obj.path.split(omnijar_name + '!', 1)
|
parts = url_obj.path.split(omnijar_name + '!', 1)
|
||||||
elif '.xpi!' in url:
|
elif '.xpi!' in url:
|
||||||
# e.g. file:///home/worker/workspace/build/application/firefox/browser/features/e10srollout@mozilla.org.xpi!/bootstrap.js
|
# e.g. file:///home/worker/workspace/build/application/firefox/browser/features/e10srollout@mozilla.org.xpi!/bootstrap.js # noqa
|
||||||
parts = url_obj.path.split('.xpi!', 1)
|
parts = url_obj.path.split('.xpi!', 1)
|
||||||
else:
|
else:
|
||||||
# We don't know how to handle this jar: path, so return it to the
|
# We don't know how to handle this jar: path, so return it to the
|
||||||
|
@ -586,8 +587,10 @@ class UrlFinder(object):
|
||||||
return url_obj.path, None
|
return url_obj.path, None
|
||||||
|
|
||||||
dir_parts = parts[0].rsplit(app_name + '/', 1)
|
dir_parts = parts[0].rsplit(app_name + '/', 1)
|
||||||
url = mozpath.normpath(mozpath.join(self.topobjdir, 'dist',
|
url = mozpath.normpath(
|
||||||
'bin', dir_parts[1].lstrip('/'), parts[1].lstrip('/')))
|
mozpath.join(self.topobjdir, 'dist',
|
||||||
|
'bin', dir_parts[1].lstrip('/'), parts[1].lstrip('/'))
|
||||||
|
)
|
||||||
elif '.xpi!' in url:
|
elif '.xpi!' in url:
|
||||||
# This matching mechanism is quite brittle and based on examples seen in the wild.
|
# This matching mechanism is quite brittle and based on examples seen in the wild.
|
||||||
# There's no rule to match the XPI name to the path in dist/xpi-stage.
|
# There's no rule to match the XPI name to the path in dist/xpi-stage.
|
||||||
|
@ -619,7 +622,8 @@ class UrlFinder(object):
|
||||||
class LcovFileRewriter(object):
|
class LcovFileRewriter(object):
|
||||||
# Class for partial parses of LCOV format and rewriting to resolve urls
|
# Class for partial parses of LCOV format and rewriting to resolve urls
|
||||||
# and preprocessed file lines.
|
# and preprocessed file lines.
|
||||||
def __init__(self, chrome_map_path, appdir='dist/bin/browser/', gredir='dist/bin/', extra_chrome_manifests=[]):
|
def __init__(self, chrome_map_path, appdir='dist/bin/browser/',
|
||||||
|
gredir='dist/bin/', extra_chrome_manifests=[]):
|
||||||
self.url_finder = UrlFinder(chrome_map_path, appdir, gredir, extra_chrome_manifests)
|
self.url_finder = UrlFinder(chrome_map_path, appdir, gredir, extra_chrome_manifests)
|
||||||
self.pp_rewriter = RecordRewriter()
|
self.pp_rewriter = RecordRewriter()
|
||||||
|
|
||||||
|
@ -640,9 +644,11 @@ class LcovFileRewriter(object):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
source_file, pp_info = res
|
source_file, pp_info = res
|
||||||
# We can't assert that the file exists here, because we don't have the source checkout available
|
# We can't assert that the file exists here, because we don't have the source
|
||||||
# on test machines. We can bring back this assertion when bug 1432287 is fixed.
|
# checkout available on test machines. We can bring back this assertion when
|
||||||
# assert os.path.isfile(source_file), "Couldn't find mapped source file %s at %s!" % (url, source_file)
|
# bug 1432287 is fixed.
|
||||||
|
# assert os.path.isfile(source_file), "Couldn't find mapped source file %s at %s!" % (
|
||||||
|
# url, source_file)
|
||||||
|
|
||||||
found_valid[0] = True
|
found_valid[0] = True
|
||||||
|
|
||||||
|
@ -666,28 +672,44 @@ class LcovFileRewriter(object):
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = ArgumentParser(description="Given a set of gcov .info files produced "
|
parser = ArgumentParser(
|
||||||
"by spidermonkey's code coverage, re-maps file urls "
|
description="Given a set of gcov .info files produced "
|
||||||
"back to source files and lines in preprocessed files "
|
"by spidermonkey's code coverage, re-maps file urls "
|
||||||
"back to their original locations.")
|
"back to source files and lines in preprocessed files "
|
||||||
parser.add_argument("--chrome-map-path", default="chrome-map.json",
|
"back to their original locations."
|
||||||
help="Path to the chrome-map.json file.")
|
)
|
||||||
parser.add_argument("--app-dir", default="dist/bin/browser/",
|
parser.add_argument(
|
||||||
help="Prefix of the appdir in use. This is used to map "
|
"--chrome-map-path", default="chrome-map.json", help="Path to the chrome-map.json file."
|
||||||
"urls starting with resource:///. It may differ by "
|
)
|
||||||
"app, but defaults to the valid value for firefox.")
|
parser.add_argument(
|
||||||
parser.add_argument("--gre-dir", default="dist/bin/",
|
"--app-dir",
|
||||||
help="Prefix of the gre dir in use. This is used to map "
|
default="dist/bin/browser/",
|
||||||
"urls starting with resource://gre. It may differ by "
|
help="Prefix of the appdir in use. This is used to map "
|
||||||
"app, but defaults to the valid value for firefox.")
|
"urls starting with resource:///. It may differ by "
|
||||||
parser.add_argument("--output-suffix", default=".out",
|
"app, but defaults to the valid value for firefox.",
|
||||||
help="The suffix to append to output files.")
|
)
|
||||||
parser.add_argument("--extra-chrome-manifests", nargs='+',
|
parser.add_argument(
|
||||||
help="Paths to files containing extra chrome registration.")
|
"--gre-dir",
|
||||||
parser.add_argument("--output-file", default="",
|
default="dist/bin/",
|
||||||
help="The output file where the results are merged. Leave empty to make the rewriter not merge files.")
|
help="Prefix of the gre dir in use. This is used to map "
|
||||||
parser.add_argument("files", nargs='+',
|
"urls starting with resource://gre. It may differ by "
|
||||||
help="The set of files to process.")
|
"app, but defaults to the valid value for firefox.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--output-suffix", default=".out", help="The suffix to append to output files."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--extra-chrome-manifests",
|
||||||
|
nargs='+',
|
||||||
|
help="Paths to files containing extra chrome registration.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--output-file",
|
||||||
|
default="",
|
||||||
|
help="The output file where the results are merged. Leave empty to make the rewriter not "
|
||||||
|
"merge files.",
|
||||||
|
)
|
||||||
|
parser.add_argument("files", nargs='+', help="The set of files to process.")
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
from __future__ import absolute_import, print_function
|
from __future__ import absolute_import, print_function
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import errno
|
||||||
import sys
|
import sys
|
||||||
import json
|
import json
|
||||||
import buildconfig
|
import buildconfig
|
||||||
|
|
|
@ -6,8 +6,6 @@
|
||||||
|
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
from mach.decorators import (
|
from mach.decorators import (
|
||||||
CommandArgument,
|
CommandArgument,
|
||||||
CommandProvider,
|
CommandProvider,
|
||||||
|
|
|
@ -13,11 +13,8 @@ from mozbuild.frontend.data import (
|
||||||
Sources,
|
Sources,
|
||||||
GeneratedSources,
|
GeneratedSources,
|
||||||
DirectoryTraversal,
|
DirectoryTraversal,
|
||||||
Linkable,
|
|
||||||
LocalInclude,
|
|
||||||
PerSourceFlag,
|
PerSourceFlag,
|
||||||
VariablePassthru,
|
VariablePassthru,
|
||||||
SimpleProgram,
|
|
||||||
)
|
)
|
||||||
from mozbuild.shellutil import (
|
from mozbuild.shellutil import (
|
||||||
quote as shell_quote,
|
quote as shell_quote,
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from mozbuild import shellutil
|
|
||||||
|
|
||||||
|
|
||||||
def check_top_objdir(topobjdir):
|
def check_top_objdir(topobjdir):
|
||||||
|
|
|
@ -10,7 +10,6 @@ from __future__ import absolute_import, print_function
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import subprocess
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
|
|
@ -155,7 +155,7 @@ class ConfigureOutputHandler(logging.Handler):
|
||||||
stream.flush()
|
stream.flush()
|
||||||
except (KeyboardInterrupt, SystemExit, IOError):
|
except (KeyboardInterrupt, SystemExit, IOError):
|
||||||
raise
|
raise
|
||||||
except:
|
except Exception:
|
||||||
self.handleError(record)
|
self.handleError(record)
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
|
|
|
@ -6,7 +6,6 @@ from __future__ import absolute_import, unicode_literals
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
import getpass
|
import getpass
|
||||||
import glob
|
|
||||||
import io
|
import io
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
@ -305,7 +304,7 @@ class BuildMonitor(MozbuildObject):
|
||||||
try:
|
try:
|
||||||
warning = self._warnings_collector.process_line(line)
|
warning = self._warnings_collector.process_line(line)
|
||||||
message = line
|
message = line
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return BuildOutputResult(warning, False, message)
|
return BuildOutputResult(warning, False, message)
|
||||||
|
@ -902,7 +901,10 @@ class CCacheStats(object):
|
||||||
return int(numeric * unit)
|
return int(numeric * unit)
|
||||||
|
|
||||||
def hit_rate_message(self):
|
def hit_rate_message(self):
|
||||||
return 'ccache (direct) hit rate: {:.1%}; (preprocessed) hit rate: {:.1%}; miss rate: {:.1%}'.format(*self.hit_rates())
|
return ('ccache (direct) hit rate: {:.1%}; (preprocessed) hit rate: {:.1%};'
|
||||||
|
' miss rate: {:.1%}'.format(
|
||||||
|
*self.hit_rates()
|
||||||
|
))
|
||||||
|
|
||||||
def hit_rates(self):
|
def hit_rates(self):
|
||||||
direct = self._values['cache_hit_direct']
|
direct = self._values['cache_hit_direct']
|
||||||
|
@ -1144,12 +1146,13 @@ class BuildDriver(MozbuildObject):
|
||||||
# could potentially be fixed if the build monitor were more
|
# could potentially be fixed if the build monitor were more
|
||||||
# intelligent about encountering undefined state.
|
# intelligent about encountering undefined state.
|
||||||
no_build_status = b'1' if make_dir is not None else b''
|
no_build_status = b'1' if make_dir is not None else b''
|
||||||
status = self._run_make(directory=make_dir, target=make_target,
|
status = self._run_make(
|
||||||
line_handler=output.on_line, log=False, print_directory=False,
|
directory=make_dir, target=make_target,
|
||||||
ensure_exit_code=False, num_jobs=jobs, silent=not verbose,
|
line_handler=output.on_line, log=False, print_directory=False,
|
||||||
append_env={
|
ensure_exit_code=False, num_jobs=jobs, silent=not verbose,
|
||||||
b'NO_BUILDSTATUS_MESSAGES': no_build_status},
|
append_env={
|
||||||
keep_going=keep_going)
|
b'NO_BUILDSTATUS_MESSAGES': no_build_status},
|
||||||
|
keep_going=keep_going)
|
||||||
|
|
||||||
if status != 0:
|
if status != 0:
|
||||||
break
|
break
|
||||||
|
@ -1299,8 +1302,10 @@ class BuildDriver(MozbuildObject):
|
||||||
print('To take your build for a test drive, run: |mach run|')
|
print('To take your build for a test drive, run: |mach run|')
|
||||||
app = self.substs['MOZ_BUILD_APP']
|
app = self.substs['MOZ_BUILD_APP']
|
||||||
if app in ('browser', 'mobile/android'):
|
if app in ('browser', 'mobile/android'):
|
||||||
print('For more information on what to do now, see '
|
print(
|
||||||
'https://developer.mozilla.org/docs/Developer_Guide/So_You_Just_Built_Firefox')
|
'For more information on what to do now, see '
|
||||||
|
'https://developer.mozilla.org/docs/Developer_Guide/So_You_Just_Built_Firefox' # noqa
|
||||||
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
# Ignore Exceptions in case we can't find config.status (such
|
# Ignore Exceptions in case we can't find config.status (such
|
||||||
# as when doing OSX Universal builds)
|
# as when doing OSX Universal builds)
|
||||||
|
|
|
@ -95,7 +95,7 @@ class Clobberer(object):
|
||||||
stdout=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
stderr=subprocess.STDOUT)
|
stderr=subprocess.STDOUT)
|
||||||
return p.wait() == 1 and p.stdout.read().startswith('winrm')
|
return p.wait() == 1 and p.stdout.read().startswith('winrm')
|
||||||
except:
|
except Exception:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def remove_objdir(self, full=True):
|
def remove_objdir(self, full=True):
|
||||||
|
|
|
@ -70,7 +70,7 @@ class Doctor(object):
|
||||||
denied = True
|
denied = True
|
||||||
if denied:
|
if denied:
|
||||||
print('run "mach doctor --fix" AS ADMIN to re-attempt fixing your system')
|
print('run "mach doctor --fix" AS ADMIN to re-attempt fixing your system')
|
||||||
elif False: # elif fixable:
|
elif False and fixable: # elif fixable: # 'and fixable' avoids flake8 error
|
||||||
print('run "mach doctor --fix" as admin to attempt fixing your system')
|
print('run "mach doctor --fix" as admin to attempt fixing your system')
|
||||||
return int(not good)
|
return int(not good)
|
||||||
|
|
||||||
|
|
|
@ -78,7 +78,7 @@ class DotProperties:
|
||||||
if k.startswith(prefix) and '.' not in k[len(prefix):])
|
if k.startswith(prefix) and '.' not in k[len(prefix):])
|
||||||
|
|
||||||
for required_key in required_keys:
|
for required_key in required_keys:
|
||||||
if not required_key in D:
|
if required_key not in D:
|
||||||
raise ValueError('Required key %s not present' % required_key)
|
raise ValueError('Required key %s not present' % required_key)
|
||||||
|
|
||||||
return D
|
return D
|
||||||
|
|
|
@ -204,7 +204,7 @@ class Daemon(object):
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
_watch_result = self.client.receive()
|
self.client.receive()
|
||||||
|
|
||||||
changed = self.changed_files()
|
changed = self.changed_files()
|
||||||
if not changed:
|
if not changed:
|
||||||
|
@ -228,7 +228,7 @@ class Daemon(object):
|
||||||
|
|
||||||
except pywatchman.SocketTimeout:
|
except pywatchman.SocketTimeout:
|
||||||
# Let's check to see if we're still functional.
|
# Let's check to see if we're still functional.
|
||||||
_version = self.client.query('version')
|
self.client.query('version')
|
||||||
|
|
||||||
except pywatchman.CommandError as e:
|
except pywatchman.CommandError as e:
|
||||||
# Abstract away pywatchman errors.
|
# Abstract away pywatchman errors.
|
||||||
|
|
|
@ -1200,67 +1200,103 @@ SUBCONTEXTS = {cls.__name__: cls for cls in SUBCONTEXTS}
|
||||||
# (storage_type, input_types, docs)
|
# (storage_type, input_types, docs)
|
||||||
|
|
||||||
VARIABLES = {
|
VARIABLES = {
|
||||||
'SOURCES': (ContextDerivedTypedListWithItems(Path, StrictOrderingOnAppendListWithFlagsFactory({'no_pgo': bool, 'flags': List})), list,
|
'SOURCES': (
|
||||||
"""Source code files.
|
ContextDerivedTypedListWithItems(
|
||||||
|
Path,
|
||||||
|
StrictOrderingOnAppendListWithFlagsFactory(
|
||||||
|
{'no_pgo': bool, 'flags': List}
|
||||||
|
)
|
||||||
|
),
|
||||||
|
list,
|
||||||
|
"""Source code files.
|
||||||
|
|
||||||
This variable contains a list of source code files to compile.
|
This variable contains a list of source code files to compile.
|
||||||
Accepts assembler, C, C++, Objective C/C++.
|
Accepts assembler, C, C++, Objective C/C++.
|
||||||
"""),
|
"""
|
||||||
|
),
|
||||||
|
|
||||||
'FILES_PER_UNIFIED_FILE': (int, int,
|
'FILES_PER_UNIFIED_FILE': (
|
||||||
"""The number of source files to compile into each unified source file.
|
int,
|
||||||
|
int,
|
||||||
|
"""The number of source files to compile into each unified source file.
|
||||||
|
|
||||||
"""),
|
"""
|
||||||
|
),
|
||||||
|
|
||||||
'IS_RUST_LIBRARY': (bool, bool,
|
'IS_RUST_LIBRARY': (
|
||||||
"""Whether the current library defined by this moz.build is built by Rust.
|
bool,
|
||||||
|
bool,
|
||||||
|
"""Whether the current library defined by this moz.build is built by Rust.
|
||||||
|
|
||||||
The library defined by this moz.build should have a build definition in
|
The library defined by this moz.build should have a build definition in
|
||||||
a Cargo.toml file that exists in this moz.build's directory.
|
a Cargo.toml file that exists in this moz.build's directory.
|
||||||
"""),
|
"""
|
||||||
|
),
|
||||||
|
|
||||||
'RUST_LIBRARY_FEATURES': (List, list,
|
'RUST_LIBRARY_FEATURES': (
|
||||||
"""Cargo features to activate for this library.
|
List,
|
||||||
|
list,
|
||||||
|
"""Cargo features to activate for this library.
|
||||||
|
|
||||||
This variable should not be used directly; you should be using the
|
This variable should not be used directly; you should be using the
|
||||||
RustLibrary template instead.
|
RustLibrary template instead.
|
||||||
"""),
|
"""
|
||||||
|
),
|
||||||
|
|
||||||
'RUST_LIBRARY_TARGET_DIR': (unicode, unicode,
|
'RUST_LIBRARY_TARGET_DIR': (
|
||||||
"""Where CARGO_TARGET_DIR should point when compiling this library. If
|
unicode,
|
||||||
|
unicode,
|
||||||
|
"""Where CARGO_TARGET_DIR should point when compiling this library. If
|
||||||
not set, it defaults to the current objdir. It should be a relative path
|
not set, it defaults to the current objdir. It should be a relative path
|
||||||
to the current objdir; absolute paths should not be used.
|
to the current objdir; absolute paths should not be used.
|
||||||
|
|
||||||
This variable should not be used directly; you should be using the
|
This variable should not be used directly; you should be using the
|
||||||
RustLibrary template instead.
|
RustLibrary template instead.
|
||||||
"""),
|
"""
|
||||||
|
),
|
||||||
|
|
||||||
'HOST_RUST_LIBRARY_FEATURES': (List, list,
|
'HOST_RUST_LIBRARY_FEATURES': (
|
||||||
"""Cargo features to activate for this host library.
|
List,
|
||||||
|
list,
|
||||||
|
"""Cargo features to activate for this host library.
|
||||||
|
|
||||||
This variable should not be used directly; you should be using the
|
This variable should not be used directly; you should be using the
|
||||||
HostRustLibrary template instead.
|
HostRustLibrary template instead.
|
||||||
|
"""
|
||||||
|
),
|
||||||
|
|
||||||
|
'RUST_TESTS': (
|
||||||
|
TypedList(unicode),
|
||||||
|
list,
|
||||||
|
"""Names of Rust tests to build and run via `cargo test`.
|
||||||
"""),
|
"""),
|
||||||
|
|
||||||
'RUST_TESTS': (TypedList(unicode), list,
|
'RUST_TEST_FEATURES': (
|
||||||
"""Names of Rust tests to build and run via `cargo test`.
|
TypedList(unicode),
|
||||||
"""),
|
list,
|
||||||
|
"""Cargo features to activate for RUST_TESTS.
|
||||||
|
"""
|
||||||
|
),
|
||||||
|
|
||||||
'RUST_TEST_FEATURES': (TypedList(unicode), list,
|
'UNIFIED_SOURCES': (
|
||||||
"""Cargo features to activate for RUST_TESTS.
|
ContextDerivedTypedList(
|
||||||
"""),
|
SourcePath,
|
||||||
|
StrictOrderingOnAppendList
|
||||||
'UNIFIED_SOURCES': (ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList), list,
|
),
|
||||||
"""Source code files that can be compiled together.
|
list,
|
||||||
|
"""Source code files that can be compiled together.
|
||||||
|
|
||||||
This variable contains a list of source code files to compile,
|
This variable contains a list of source code files to compile,
|
||||||
that can be concatenated all together and built as a single source
|
that can be concatenated all together and built as a single source
|
||||||
file. This can help make the build faster and reduce the debug info
|
file. This can help make the build faster and reduce the debug info
|
||||||
size.
|
size.
|
||||||
"""),
|
"""
|
||||||
|
),
|
||||||
|
|
||||||
'GENERATED_FILES': (GeneratedFilesList, list,
|
'GENERATED_FILES': (
|
||||||
"""Generic generated files.
|
GeneratedFilesList,
|
||||||
|
list,
|
||||||
|
"""Generic generated files.
|
||||||
|
|
||||||
This variable contains a list of files for the build system to
|
This variable contains a list of files for the build system to
|
||||||
generate at export time. The generation method may be declared
|
generate at export time. The generation method may be declared
|
||||||
|
@ -1308,10 +1344,13 @@ VARIABLES = {
|
||||||
build, regardless of whether it is stale. This is special to the
|
build, regardless of whether it is stale. This is special to the
|
||||||
RecursiveMake backend and intended for special situations only (e.g.,
|
RecursiveMake backend and intended for special situations only (e.g.,
|
||||||
localization). Please consult a build peer before using ``force``.
|
localization). Please consult a build peer before using ``force``.
|
||||||
"""),
|
"""
|
||||||
|
),
|
||||||
|
|
||||||
'DEFINES': (InitializedDefines, dict,
|
'DEFINES': (
|
||||||
"""Dictionary of compiler defines to declare.
|
InitializedDefines,
|
||||||
|
dict,
|
||||||
|
"""Dictionary of compiler defines to declare.
|
||||||
|
|
||||||
These are passed in to the compiler as ``-Dkey='value'`` for string
|
These are passed in to the compiler as ``-Dkey='value'`` for string
|
||||||
values, ``-Dkey=value`` for numeric values, or ``-Dkey`` if the
|
values, ``-Dkey=value`` for numeric values, or ``-Dkey`` if the
|
||||||
|
@ -1336,17 +1375,23 @@ VARIABLES = {
|
||||||
'MOZ_EXTENSIONS_DB_SCHEMA': 15,
|
'MOZ_EXTENSIONS_DB_SCHEMA': 15,
|
||||||
'DLL_SUFFIX': '".so"',
|
'DLL_SUFFIX': '".so"',
|
||||||
})
|
})
|
||||||
"""),
|
"""
|
||||||
|
),
|
||||||
|
|
||||||
'DELAYLOAD_DLLS': (List, list,
|
'DELAYLOAD_DLLS': (
|
||||||
"""Delay-loaded DLLs.
|
List,
|
||||||
|
list,
|
||||||
|
"""Delay-loaded DLLs.
|
||||||
|
|
||||||
This variable contains a list of DLL files which the module being linked
|
This variable contains a list of DLL files which the module being linked
|
||||||
should load lazily. This only has an effect when building with MSVC.
|
should load lazily. This only has an effect when building with MSVC.
|
||||||
"""),
|
"""
|
||||||
|
),
|
||||||
|
|
||||||
'DIRS': (ContextDerivedTypedList(SourcePath), list,
|
'DIRS': (
|
||||||
"""Child directories to descend into looking for build frontend files.
|
ContextDerivedTypedList(SourcePath),
|
||||||
|
list,
|
||||||
|
"""Child directories to descend into looking for build frontend files.
|
||||||
|
|
||||||
This works similarly to the ``DIRS`` variable in make files. Each str
|
This works similarly to the ``DIRS`` variable in make files. Each str
|
||||||
value in the list is the name of a child directory. When this file is
|
value in the list is the name of a child directory. When this file is
|
||||||
|
@ -1357,10 +1402,13 @@ VARIABLES = {
|
||||||
Values are relative paths. They can be multiple directory levels
|
Values are relative paths. They can be multiple directory levels
|
||||||
above or below. Use ``..`` for parent directories and ``/`` for path
|
above or below. Use ``..`` for parent directories and ``/`` for path
|
||||||
delimiters.
|
delimiters.
|
||||||
"""),
|
"""
|
||||||
|
),
|
||||||
|
|
||||||
'HAS_MISC_RULE': (bool, bool,
|
'HAS_MISC_RULE': (
|
||||||
"""Whether this directory should be traversed in the ``misc`` tier.
|
bool,
|
||||||
|
bool,
|
||||||
|
"""Whether this directory should be traversed in the ``misc`` tier.
|
||||||
|
|
||||||
Many ``libs`` rules still exist in Makefile.in files. We highly prefer
|
Many ``libs`` rules still exist in Makefile.in files. We highly prefer
|
||||||
that these rules exist in the ``misc`` tier/target so that they can be
|
that these rules exist in the ``misc`` tier/target so that they can be
|
||||||
|
@ -1373,7 +1421,8 @@ VARIABLES = {
|
||||||
Please note that converting ``libs`` rules to the ``misc`` tier must
|
Please note that converting ``libs`` rules to the ``misc`` tier must
|
||||||
be done with care, as there are many implicit dependencies that can
|
be done with care, as there are many implicit dependencies that can
|
||||||
break the build in subtle ways.
|
break the build in subtle ways.
|
||||||
"""),
|
"""
|
||||||
|
),
|
||||||
|
|
||||||
'FINAL_TARGET_FILES': (ContextDerivedTypedHierarchicalStringList(Path), list,
|
'FINAL_TARGET_FILES': (ContextDerivedTypedHierarchicalStringList(Path), list,
|
||||||
"""List of files to be installed into the application directory.
|
"""List of files to be installed into the application directory.
|
||||||
|
@ -1680,23 +1729,35 @@ VARIABLES = {
|
||||||
the Cargo.toml in the same directory.
|
the Cargo.toml in the same directory.
|
||||||
"""),
|
"""),
|
||||||
|
|
||||||
'CONFIGURE_SUBST_FILES': (ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList), list,
|
'CONFIGURE_SUBST_FILES': (
|
||||||
"""Output files that will be generated using configure-like substitution.
|
ContextDerivedTypedList(
|
||||||
|
SourcePath,
|
||||||
|
StrictOrderingOnAppendList
|
||||||
|
),
|
||||||
|
list,
|
||||||
|
"""Output files that will be generated using configure-like substitution.
|
||||||
|
|
||||||
This is a substitute for ``AC_OUTPUT`` in autoconf. For each path in this
|
This is a substitute for ``AC_OUTPUT`` in autoconf. For each path in this
|
||||||
list, we will search for a file in the srcdir having the name
|
list, we will search for a file in the srcdir having the name
|
||||||
``{path}.in``. The contents of this file will be read and variable
|
``{path}.in``. The contents of this file will be read and variable
|
||||||
patterns like ``@foo@`` will be substituted with the values of the
|
patterns like ``@foo@`` will be substituted with the values of the
|
||||||
``AC_SUBST`` variables declared during configure.
|
``AC_SUBST`` variables declared during configure.
|
||||||
"""),
|
"""
|
||||||
|
),
|
||||||
|
|
||||||
'CONFIGURE_DEFINE_FILES': (ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList), list,
|
'CONFIGURE_DEFINE_FILES': (
|
||||||
"""Output files generated from configure/config.status.
|
ContextDerivedTypedList(
|
||||||
|
SourcePath,
|
||||||
|
StrictOrderingOnAppendList
|
||||||
|
),
|
||||||
|
list,
|
||||||
|
"""Output files generated from configure/config.status.
|
||||||
|
|
||||||
This is a substitute for ``AC_CONFIG_HEADER`` in autoconf. This is very
|
This is a substitute for ``AC_CONFIG_HEADER`` in autoconf. This is very
|
||||||
similar to ``CONFIGURE_SUBST_FILES`` except the generation logic takes
|
similar to ``CONFIGURE_SUBST_FILES`` except the generation logic takes
|
||||||
into account the values of ``AC_DEFINE`` instead of ``AC_SUBST``.
|
into account the values of ``AC_DEFINE`` instead of ``AC_SUBST``.
|
||||||
"""),
|
"""
|
||||||
|
),
|
||||||
|
|
||||||
'EXPORTS': (ContextDerivedTypedHierarchicalStringList(Path), list,
|
'EXPORTS': (ContextDerivedTypedHierarchicalStringList(Path), list,
|
||||||
"""List of files to be exported, and in which subdirectories.
|
"""List of files to be exported, and in which subdirectories.
|
||||||
|
@ -1920,7 +1981,7 @@ VARIABLES = {
|
||||||
"""),
|
"""),
|
||||||
|
|
||||||
'TELEMETRY_TESTS_CLIENT_MANIFESTS': (ManifestparserManifestList, list,
|
'TELEMETRY_TESTS_CLIENT_MANIFESTS': (ManifestparserManifestList, list,
|
||||||
"""List of manifest files defining telemetry client tests.
|
"""List of manifest files defining telemetry client tests.
|
||||||
"""),
|
"""),
|
||||||
|
|
||||||
# The following variables are used to control the target of installed files.
|
# The following variables are used to control the target of installed files.
|
||||||
|
@ -2426,23 +2487,31 @@ SPECIAL_VARIABLES = {
|
||||||
``$(FINAL_TARGET)/components/``.
|
``$(FINAL_TARGET)/components/``.
|
||||||
"""),
|
"""),
|
||||||
|
|
||||||
'EXTRA_PP_COMPONENTS': (lambda context: context['FINAL_TARGET_PP_FILES'].components._strings, list,
|
'EXTRA_PP_COMPONENTS': (
|
||||||
"""Javascript XPCOM files.
|
lambda context: context['FINAL_TARGET_PP_FILES'].components._strings,
|
||||||
|
list,
|
||||||
|
"""Javascript XPCOM files.
|
||||||
|
|
||||||
This variable contains a list of files to preprocess. Generated
|
This variable contains a list of files to preprocess. Generated
|
||||||
files will be installed in the ``/components`` directory of the distribution.
|
files will be installed in the ``/components`` directory of the distribution.
|
||||||
"""),
|
"""
|
||||||
|
),
|
||||||
|
|
||||||
'JS_PREFERENCE_FILES': (lambda context: context['FINAL_TARGET_FILES'].defaults.pref._strings, list,
|
'JS_PREFERENCE_FILES': (
|
||||||
"""Exported JavaScript files.
|
lambda context: context['FINAL_TARGET_FILES'].defaults.pref._strings,
|
||||||
|
list,
|
||||||
|
"""Exported JavaScript files.
|
||||||
|
|
||||||
A list of files copied into the dist directory for packaging and installation.
|
A list of files copied into the dist directory for packaging and installation.
|
||||||
Path will be defined for gre or application prefs dir based on what is building.
|
Path will be defined for gre or application prefs dir based on what is building.
|
||||||
"""),
|
"""),
|
||||||
|
|
||||||
'JS_PREFERENCE_PP_FILES': (lambda context: context['FINAL_TARGET_PP_FILES'].defaults.pref._strings, list,
|
'JS_PREFERENCE_PP_FILES': (
|
||||||
"""Like JS_PREFERENCE_FILES, preprocessed..
|
lambda context: context['FINAL_TARGET_PP_FILES'].defaults.pref._strings,
|
||||||
"""),
|
list,
|
||||||
|
"""Like JS_PREFERENCE_FILES, preprocessed..
|
||||||
|
"""
|
||||||
|
),
|
||||||
|
|
||||||
'RESOURCE_FILES': (lambda context: context['FINAL_TARGET_FILES'].res, list,
|
'RESOURCE_FILES': (lambda context: context['FINAL_TARGET_FILES'].res, list,
|
||||||
"""List of resources to be exported, and in which subdirectories.
|
"""List of resources to be exported, and in which subdirectories.
|
||||||
|
@ -2459,13 +2528,16 @@ SPECIAL_VARIABLES = {
|
||||||
RESOURCE_FILES.fonts += ['bar.res']
|
RESOURCE_FILES.fonts += ['bar.res']
|
||||||
"""),
|
"""),
|
||||||
|
|
||||||
'CONTENT_ACCESSIBLE_FILES': (lambda context: context['FINAL_TARGET_FILES'].contentaccessible, list,
|
'CONTENT_ACCESSIBLE_FILES': (
|
||||||
"""List of files which can be accessed by web content through resource:// URIs.
|
lambda context: context['FINAL_TARGET_FILES'].contentaccessible,
|
||||||
|
list,
|
||||||
|
"""List of files which can be accessed by web content through resource:// URIs.
|
||||||
|
|
||||||
``CONTENT_ACCESSIBLE_FILES`` is used to list the files to be exported
|
``CONTENT_ACCESSIBLE_FILES`` is used to list the files to be exported
|
||||||
to ``dist/bin/contentaccessible``. Files can also be appended to a
|
to ``dist/bin/contentaccessible``. Files can also be appended to a
|
||||||
field to indicate which subdirectory they should be exported to.
|
field to indicate which subdirectory they should be exported to.
|
||||||
"""),
|
"""
|
||||||
|
),
|
||||||
|
|
||||||
'EXTRA_JS_MODULES': (lambda context: context['FINAL_TARGET_FILES'].modules, list,
|
'EXTRA_JS_MODULES': (lambda context: context['FINAL_TARGET_FILES'].modules, list,
|
||||||
"""Additional JavaScript files to distribute.
|
"""Additional JavaScript files to distribute.
|
||||||
|
|
|
@ -21,13 +21,12 @@ from mozbuild.frontend.context import (
|
||||||
ObjDirPath,
|
ObjDirPath,
|
||||||
SourcePath,
|
SourcePath,
|
||||||
)
|
)
|
||||||
from mozbuild.util import StrictOrderingOnAppendList
|
|
||||||
from mozpack.chrome.manifest import ManifestEntry
|
from mozpack.chrome.manifest import ManifestEntry
|
||||||
|
|
||||||
import mozpack.path as mozpath
|
import mozpack.path as mozpath
|
||||||
from .context import FinalTargetValue
|
from .context import FinalTargetValue
|
||||||
|
|
||||||
from collections import defaultdict, OrderedDict
|
from collections import defaultdict
|
||||||
import itertools
|
import itertools
|
||||||
|
|
||||||
from ..util import (
|
from ..util import (
|
||||||
|
@ -512,7 +511,8 @@ class BaseProgram(Linkable):
|
||||||
@property
|
@property
|
||||||
def output_path(self):
|
def output_path(self):
|
||||||
if self.installed:
|
if self.installed:
|
||||||
return ObjDirPath(self._context, '!/' + mozpath.join(self.install_target, self.program))
|
return ObjDirPath(self._context, '!/' + mozpath.join(
|
||||||
|
self.install_target, self.program))
|
||||||
else:
|
else:
|
||||||
return ObjDirPath(self._context, '!' + self.program)
|
return ObjDirPath(self._context, '!' + self.program)
|
||||||
|
|
||||||
|
@ -1049,10 +1049,12 @@ class UnifiedSources(BaseSources):
|
||||||
|
|
||||||
suffix = self.canonical_suffix[1:]
|
suffix = self.canonical_suffix[1:]
|
||||||
unified_prefix = 'Unified_%s_%s' % (suffix, unified_prefix)
|
unified_prefix = 'Unified_%s_%s' % (suffix, unified_prefix)
|
||||||
self.unified_source_mapping = list(group_unified_files(source_files,
|
self.unified_source_mapping = list(
|
||||||
unified_prefix=unified_prefix,
|
group_unified_files(source_files,
|
||||||
unified_suffix=suffix,
|
unified_prefix=unified_prefix,
|
||||||
files_per_unified_file=files_per_unified_file))
|
unified_suffix=suffix,
|
||||||
|
files_per_unified_file=files_per_unified_file)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class InstallationTarget(ContextDerived):
|
class InstallationTarget(ContextDerived):
|
||||||
|
@ -1200,8 +1202,10 @@ class GeneratedFile(ContextDerived):
|
||||||
'.inc',
|
'.inc',
|
||||||
'.py',
|
'.py',
|
||||||
'.rs',
|
'.rs',
|
||||||
'node.stub', # To avoid VPATH issues with installing node files: https://bugzilla.mozilla.org/show_bug.cgi?id=1461714#c55
|
'node.stub', # To avoid VPATH issues with installing node files:
|
||||||
# We need to compile Java to generate JNI wrappers for native code compilation to consume.
|
# https://bugzilla.mozilla.org/show_bug.cgi?id=1461714#c55
|
||||||
|
# We need to compile Java to generate JNI wrappers for native code
|
||||||
|
# compilation to consume.
|
||||||
'android_apks',
|
'android_apks',
|
||||||
'.profdata',
|
'.profdata',
|
||||||
'.webidl'
|
'.webidl'
|
||||||
|
|
|
@ -24,7 +24,6 @@ import pytoml
|
||||||
|
|
||||||
from .data import (
|
from .data import (
|
||||||
BaseRustProgram,
|
BaseRustProgram,
|
||||||
BaseSources,
|
|
||||||
ChromeManifestEntry,
|
ChromeManifestEntry,
|
||||||
ComputedFlags,
|
ComputedFlags,
|
||||||
ConfigFileSubstitution,
|
ConfigFileSubstitution,
|
||||||
|
@ -57,7 +56,6 @@ from .data import (
|
||||||
ObjdirFiles,
|
ObjdirFiles,
|
||||||
ObjdirPreprocessedFiles,
|
ObjdirPreprocessedFiles,
|
||||||
PerSourceFlag,
|
PerSourceFlag,
|
||||||
PgoGenerateOnlySources,
|
|
||||||
WebIDLCollection,
|
WebIDLCollection,
|
||||||
Program,
|
Program,
|
||||||
RustLibrary,
|
RustLibrary,
|
||||||
|
@ -93,7 +91,6 @@ from .context import (
|
||||||
ObjDirPath,
|
ObjDirPath,
|
||||||
Path,
|
Path,
|
||||||
SubContext,
|
SubContext,
|
||||||
TemplateContext,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
from mozbuild.base import ExecutionSummary
|
from mozbuild.base import ExecutionSummary
|
||||||
|
@ -448,8 +445,8 @@ class TreeMetadataEmitter(LoggingMixin):
|
||||||
raise SandboxValidationError(
|
raise SandboxValidationError(
|
||||||
'%s contains "static:%s", but there is only a shared "%s" '
|
'%s contains "static:%s", but there is only a shared "%s" '
|
||||||
'in %s. You may want to add FORCE_STATIC_LIB=True in '
|
'in %s. You may want to add FORCE_STATIC_LIB=True in '
|
||||||
'%s/moz.build, or remove "static:".' % (variable, path,
|
'%s/moz.build, or remove "static:".' % (
|
||||||
name, candidates[0].relobjdir, candidates[0].relobjdir),
|
variable, path, name, candidates[0].relobjdir, candidates[0].relobjdir),
|
||||||
context)
|
context)
|
||||||
|
|
||||||
elif isinstance(obj, StaticLibrary) and isinstance(candidates[0],
|
elif isinstance(obj, StaticLibrary) and isinstance(candidates[0],
|
||||||
|
@ -586,8 +583,8 @@ class TreeMetadataEmitter(LoggingMixin):
|
||||||
if program in self._binaries:
|
if program in self._binaries:
|
||||||
raise SandboxValidationError(
|
raise SandboxValidationError(
|
||||||
'Cannot use "%s" as %s name, '
|
'Cannot use "%s" as %s name, '
|
||||||
'because it is already used in %s' % (program, kind,
|
'because it is already used in %s' % (
|
||||||
self._binaries[program].relsrcdir), context)
|
program, kind, self._binaries[program].relsrcdir), context)
|
||||||
for kind, cls in [('PROGRAM', Program), ('HOST_PROGRAM', HostProgram)]:
|
for kind, cls in [('PROGRAM', Program), ('HOST_PROGRAM', HostProgram)]:
|
||||||
program = context.get(kind)
|
program = context.get(kind)
|
||||||
if program:
|
if program:
|
||||||
|
@ -636,8 +633,8 @@ class TreeMetadataEmitter(LoggingMixin):
|
||||||
if program in self._binaries:
|
if program in self._binaries:
|
||||||
raise SandboxValidationError(
|
raise SandboxValidationError(
|
||||||
'Cannot use "%s" in %s, '
|
'Cannot use "%s" in %s, '
|
||||||
'because it is already used in %s' % (program, kind,
|
'because it is already used in %s' % (
|
||||||
self._binaries[program].relsrcdir), context)
|
program, kind, self._binaries[program].relsrcdir), context)
|
||||||
self._binaries[program] = cls(context, program,
|
self._binaries[program] = cls(context, program,
|
||||||
is_unit_test=kind == 'CPP_UNIT_TESTS')
|
is_unit_test=kind == 'CPP_UNIT_TESTS')
|
||||||
self._linkage.append((context, self._binaries[program],
|
self._linkage.append((context, self._binaries[program],
|
||||||
|
@ -650,8 +647,8 @@ class TreeMetadataEmitter(LoggingMixin):
|
||||||
|
|
||||||
if host_libname:
|
if host_libname:
|
||||||
if host_libname == libname:
|
if host_libname == libname:
|
||||||
raise SandboxValidationError('LIBRARY_NAME and '
|
raise SandboxValidationError(
|
||||||
'HOST_LIBRARY_NAME must have a different value', context)
|
'LIBRARY_NAME and HOST_LIBRARY_NAME must have a different value', context)
|
||||||
|
|
||||||
is_rust_library = context.get('IS_RUST_LIBRARY')
|
is_rust_library = context.get('IS_RUST_LIBRARY')
|
||||||
if is_rust_library:
|
if is_rust_library:
|
||||||
|
@ -860,9 +857,9 @@ class TreeMetadataEmitter(LoggingMixin):
|
||||||
assert isinstance(f, Path)
|
assert isinstance(f, Path)
|
||||||
gen_srcs.append(full_path)
|
gen_srcs.append(full_path)
|
||||||
if symbol == 'SOURCES':
|
if symbol == 'SOURCES':
|
||||||
flags = context_srcs[f]
|
context_flags = context_srcs[f]
|
||||||
if flags:
|
if context_flags:
|
||||||
all_flags[full_path] = flags
|
all_flags[full_path] = context_flags
|
||||||
|
|
||||||
if isinstance(f, SourcePath) and not os.path.exists(full_path):
|
if isinstance(f, SourcePath) and not os.path.exists(full_path):
|
||||||
raise SandboxValidationError('File listed in %s does not '
|
raise SandboxValidationError('File listed in %s does not '
|
||||||
|
@ -1110,8 +1107,11 @@ class TreeMetadataEmitter(LoggingMixin):
|
||||||
generated_files.add(str(sub.relpath))
|
generated_files.add(str(sub.relpath))
|
||||||
yield sub
|
yield sub
|
||||||
|
|
||||||
for defines_var, cls, backend_flags in (('DEFINES', Defines, (computed_flags, computed_as_flags)),
|
for defines_var, cls, backend_flags in (('DEFINES', Defines,
|
||||||
('HOST_DEFINES', HostDefines, (computed_host_flags,))):
|
(computed_flags, computed_as_flags)),
|
||||||
|
('HOST_DEFINES', HostDefines,
|
||||||
|
(computed_host_flags,))
|
||||||
|
):
|
||||||
defines = context.get(defines_var)
|
defines = context.get(defines_var)
|
||||||
if defines:
|
if defines:
|
||||||
defines_obj = cls(context, defines)
|
defines_obj = cls(context, defines)
|
||||||
|
@ -1152,18 +1152,20 @@ class TreeMetadataEmitter(LoggingMixin):
|
||||||
full_path = local_include.full_path
|
full_path = local_include.full_path
|
||||||
if not isinstance(local_include, ObjDirPath):
|
if not isinstance(local_include, ObjDirPath):
|
||||||
if not os.path.exists(full_path):
|
if not os.path.exists(full_path):
|
||||||
raise SandboxValidationError('Path specified in LOCAL_INCLUDES '
|
raise SandboxValidationError(
|
||||||
'does not exist: %s (resolved to %s)' % (local_include,
|
'Path specified in LOCAL_INCLUDES does not exist: %s (resolved to %s)' %
|
||||||
full_path), context)
|
(local_include, full_path), context)
|
||||||
if not os.path.isdir(full_path):
|
if not os.path.isdir(full_path):
|
||||||
raise SandboxValidationError('Path specified in LOCAL_INCLUDES '
|
raise SandboxValidationError('Path specified in LOCAL_INCLUDES '
|
||||||
'is a filename, but a directory is required: %s '
|
'is a filename, but a directory is required: %s '
|
||||||
'(resolved to %s)' % (local_include, full_path), context)
|
'(resolved to %s)' % (local_include, full_path),
|
||||||
|
context)
|
||||||
if (full_path == context.config.topsrcdir or
|
if (full_path == context.config.topsrcdir or
|
||||||
full_path == context.config.topobjdir):
|
full_path == context.config.topobjdir):
|
||||||
raise SandboxValidationError('Path specified in LOCAL_INCLUDES '
|
raise SandboxValidationError(
|
||||||
'(%s) resolves to the topsrcdir or topobjdir (%s), which is '
|
'Path specified in LOCAL_INCLUDES '
|
||||||
'not allowed' % (local_include, full_path), context)
|
'(%s) resolves to the topsrcdir or topobjdir (%s), which is '
|
||||||
|
'not allowed' % (local_include, full_path), context)
|
||||||
include_obj = LocalInclude(context, local_include)
|
include_obj = LocalInclude(context, local_include)
|
||||||
local_includes.append(include_obj.path.full_path)
|
local_includes.append(include_obj.path.full_path)
|
||||||
yield include_obj
|
yield include_obj
|
||||||
|
@ -1258,8 +1260,13 @@ class TreeMetadataEmitter(LoggingMixin):
|
||||||
# in anything *but* LOCALIZED_FILES.
|
# in anything *but* LOCALIZED_FILES.
|
||||||
if f.target_basename in localized_generated_files:
|
if f.target_basename in localized_generated_files:
|
||||||
raise SandboxValidationError(
|
raise SandboxValidationError(
|
||||||
('Outputs of LOCALIZED_GENERATED_FILES cannot be used in %s: ' +
|
(
|
||||||
'%s') % (var, f), context)
|
'Outputs of LOCALIZED_GENERATED_FILES cannot '
|
||||||
|
'be used in %s: %s'
|
||||||
|
)
|
||||||
|
% (var, f),
|
||||||
|
context,
|
||||||
|
)
|
||||||
|
|
||||||
# Addons (when XPI_NAME is defined) and Applications (when
|
# Addons (when XPI_NAME is defined) and Applications (when
|
||||||
# DIST_SUBDIR is defined) use a different preferences directory
|
# DIST_SUBDIR is defined) use a different preferences directory
|
||||||
|
@ -1496,9 +1503,10 @@ class TreeMetadataEmitter(LoggingMixin):
|
||||||
not os.path.isfile(mozpath.join(context.config.topsrcdir,
|
not os.path.isfile(mozpath.join(context.config.topsrcdir,
|
||||||
install_path[2:])),
|
install_path[2:])),
|
||||||
install_path not in install_info.external_installs]):
|
install_path not in install_info.external_installs]):
|
||||||
raise SandboxValidationError('Error processing test '
|
raise SandboxValidationError(
|
||||||
'manifest %s: entry in support-files not present '
|
'Error processing test '
|
||||||
'in the srcdir: %s' % (path, install_path), context)
|
'manifest %s: entry in support-files not present '
|
||||||
|
'in the srcdir: %s' % (path, install_path), context)
|
||||||
|
|
||||||
obj.deferred_installs |= install_info.deferred_installs
|
obj.deferred_installs |= install_info.deferred_installs
|
||||||
|
|
||||||
|
@ -1535,16 +1543,18 @@ class TreeMetadataEmitter(LoggingMixin):
|
||||||
try:
|
try:
|
||||||
del obj.installs[mozpath.join(manifest_dir, f)]
|
del obj.installs[mozpath.join(manifest_dir, f)]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise SandboxValidationError('Error processing test '
|
raise SandboxValidationError(
|
||||||
'manifest %s: entry in generated-files not present '
|
'Error processing test '
|
||||||
'elsewhere in manifest: %s' % (path, f), context)
|
'manifest %s: entry in generated-files not present '
|
||||||
|
'elsewhere in manifest: %s' % (path, f), context)
|
||||||
|
|
||||||
yield obj
|
yield obj
|
||||||
except (AssertionError, Exception):
|
except (AssertionError, Exception):
|
||||||
raise SandboxValidationError('Error processing test '
|
raise SandboxValidationError(
|
||||||
'manifest file %s: %s' % (path,
|
'Error processing test '
|
||||||
'\n'.join(traceback.format_exception(*sys.exc_info()))),
|
'manifest file %s: %s' % (path,
|
||||||
context)
|
'\n'.join(traceback.format_exception(*sys.exc_info()))),
|
||||||
|
context)
|
||||||
|
|
||||||
def _process_reftest_manifest(self, context, flavor, manifest_path, manifest):
|
def _process_reftest_manifest(self, context, flavor, manifest_path, manifest):
|
||||||
manifest_full_path = manifest_path.full_path
|
manifest_full_path = manifest_path.full_path
|
||||||
|
|
|
@ -10,7 +10,6 @@ import sys
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
import types
|
import types
|
||||||
import warnings
|
|
||||||
|
|
||||||
import mozpack.path as mozpath
|
import mozpack.path as mozpath
|
||||||
from mozpack.files import FileFinder
|
from mozpack.files import FileFinder
|
||||||
|
@ -23,10 +22,7 @@ from .context import (
|
||||||
)
|
)
|
||||||
from mozbuild.util import (
|
from mozbuild.util import (
|
||||||
expand_variables,
|
expand_variables,
|
||||||
List,
|
|
||||||
memoize,
|
|
||||||
)
|
)
|
||||||
from .reader import SandboxValidationError
|
|
||||||
|
|
||||||
# Define this module as gyp.generator.mozbuild so that gyp can use it
|
# Define this module as gyp.generator.mozbuild so that gyp can use it
|
||||||
# as a generator under the name "mozbuild".
|
# as a generator under the name "mozbuild".
|
||||||
|
@ -94,7 +90,10 @@ def handle_actions(actions, context, action_overrides):
|
||||||
output = outputs[0]
|
output = outputs[0]
|
||||||
if not output.startswith(idir):
|
if not output.startswith(idir):
|
||||||
raise NotImplementedError(
|
raise NotImplementedError(
|
||||||
'GYP actions outputting to somewhere other than <(INTERMEDIATE_DIR) not supported: %s' % output)
|
'GYP actions outputting to somewhere other than '
|
||||||
|
'<(INTERMEDIATE_DIR) not supported: %s'
|
||||||
|
% output
|
||||||
|
)
|
||||||
output = output[len(idir):]
|
output = output[len(idir):]
|
||||||
context['GENERATED_FILES'] += [output]
|
context['GENERATED_FILES'] += [output]
|
||||||
g = context['GENERATED_FILES'][output]
|
g = context['GENERATED_FILES'][output]
|
||||||
|
@ -208,7 +207,8 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
|
||||||
context['PROGRAM'] = name.decode('utf-8')
|
context['PROGRAM'] = name.decode('utf-8')
|
||||||
if spec['type'] == 'shared_library':
|
if spec['type'] == 'shared_library':
|
||||||
context['FORCE_SHARED_LIB'] = True
|
context['FORCE_SHARED_LIB'] = True
|
||||||
elif spec['type'] == 'static_library' and spec.get('variables', {}).get('no_expand_libs', '0') == '1':
|
elif spec['type'] == 'static_library' and \
|
||||||
|
spec.get('variables', {}).get('no_expand_libs', '0') == '1':
|
||||||
# PSM links a NSS static library, but our folded libnss
|
# PSM links a NSS static library, but our folded libnss
|
||||||
# doesn't actually export everything that all of the
|
# doesn't actually export everything that all of the
|
||||||
# objects within would need, so that one library
|
# objects within would need, so that one library
|
||||||
|
@ -256,7 +256,8 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
|
||||||
name, value = define.split('=', 1)
|
name, value = define.split('=', 1)
|
||||||
# The NSS gyp file doesn't expose a way to override this
|
# The NSS gyp file doesn't expose a way to override this
|
||||||
# currently, so we do so here.
|
# currently, so we do so here.
|
||||||
if name == 'NSS_ALLOW_SSLKEYLOGFILE' and config.substs.get('RELEASE_OR_BETA', False):
|
if name == 'NSS_ALLOW_SSLKEYLOGFILE' and \
|
||||||
|
config.substs.get('RELEASE_OR_BETA', False):
|
||||||
continue
|
continue
|
||||||
context['DEFINES'][name] = value
|
context['DEFINES'][name] = value
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -155,7 +155,7 @@ class Sandbox(dict):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
source = self._finder.get(path).read()
|
source = self._finder.get(path).read()
|
||||||
except Exception as e:
|
except Exception:
|
||||||
raise SandboxLoadError(self._context.source_stack,
|
raise SandboxLoadError(self._context.source_stack,
|
||||||
sys.exc_info()[2], read_error=path)
|
sys.exc_info()[2], read_error=path)
|
||||||
|
|
||||||
|
|
|
@ -79,7 +79,9 @@ class MozbuildWriter(object):
|
||||||
self.write(self.indent + key)
|
self.write(self.indent + key)
|
||||||
self.write(' += [\n ' + self.indent)
|
self.write(' += [\n ' + self.indent)
|
||||||
self.write(
|
self.write(
|
||||||
(',\n ' + self.indent).join(alphabetical_sorted(self.mb_serialize(v) for v in value)))
|
(',\n ' + self.indent).join(
|
||||||
|
alphabetical_sorted(self.mb_serialize(v) for v in value))
|
||||||
|
)
|
||||||
self.write('\n')
|
self.write('\n')
|
||||||
self.write_ln(']')
|
self.write_ln(']')
|
||||||
|
|
||||||
|
@ -277,7 +279,7 @@ def process_gn_config(gn_config, srcdir, config, output, non_unified_sources,
|
||||||
context_attrs['ASFLAGS'] = spec.get('asflags_mozilla', [])
|
context_attrs['ASFLAGS'] = spec.get('asflags_mozilla', [])
|
||||||
if use_defines_in_asflags and defines:
|
if use_defines_in_asflags and defines:
|
||||||
context_attrs['ASFLAGS'] += ['-D' + d for d in defines]
|
context_attrs['ASFLAGS'] += ['-D' + d for d in defines]
|
||||||
flags = [f for f in spec.get('cflags', []) if f in mozilla_flags]
|
flags = [_f for _f in spec.get('cflags', []) if _f in mozilla_flags]
|
||||||
if flags:
|
if flags:
|
||||||
suffix_map = {
|
suffix_map = {
|
||||||
'.c': 'CFLAGS',
|
'.c': 'CFLAGS',
|
||||||
|
@ -432,7 +434,6 @@ def write_mozbuild(config, srcdir, output, non_unified_sources, gn_config_files,
|
||||||
mb.write('\n')
|
mb.write('\n')
|
||||||
mb.write(generated_header)
|
mb.write(generated_header)
|
||||||
|
|
||||||
all_attr_sets = [attrs for _, attrs in configs]
|
|
||||||
all_args = [args for args, _ in configs]
|
all_args = [args for args, _ in configs]
|
||||||
|
|
||||||
# Start with attributes that will be a part of the mozconfig
|
# Start with attributes that will be a part of the mozconfig
|
||||||
|
|
|
@ -18,7 +18,6 @@ import logging
|
||||||
from time import localtime
|
from time import localtime
|
||||||
from MozZipFile import ZipFile
|
from MozZipFile import ZipFile
|
||||||
from cStringIO import StringIO
|
from cStringIO import StringIO
|
||||||
from collections import defaultdict
|
|
||||||
|
|
||||||
from mozbuild.preprocessor import Preprocessor
|
from mozbuild.preprocessor import Preprocessor
|
||||||
from mozbuild.action.buildlist import addEntriesToListFile
|
from mozbuild.action.buildlist import addEntriesToListFile
|
||||||
|
@ -108,9 +107,10 @@ class JarManifestParser(object):
|
||||||
relsrcline = re.compile('relativesrcdir\s+(?P<relativesrcdir>.+?):')
|
relsrcline = re.compile('relativesrcdir\s+(?P<relativesrcdir>.+?):')
|
||||||
regline = re.compile('\%\s+(.*)$')
|
regline = re.compile('\%\s+(.*)$')
|
||||||
entryre = '(?P<optPreprocess>\*)?(?P<optOverwrite>\+?)\s+'
|
entryre = '(?P<optPreprocess>\*)?(?P<optOverwrite>\+?)\s+'
|
||||||
entryline = re.compile(entryre
|
entryline = re.compile(
|
||||||
+ '(?P<output>[\w\d.\-\_\\\/\+\@]+)\s*(\((?P<locale>\%?)(?P<source>[\w\d.\-\_\\\/\@\*]+)\))?\s*$'
|
entryre + ('(?P<output>[\w\d.\-\_\\\/\+\@]+)\s*'
|
||||||
)
|
'(\((?P<locale>\%?)(?P<source>[\w\d.\-\_\\\/\@\*]+)\))?\s*$')
|
||||||
|
)
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self._current_jar = None
|
self._current_jar = None
|
||||||
|
@ -245,7 +245,8 @@ class JarMaker(object):
|
||||||
help='base directory to be used for localization (requires relativesrcdir)'
|
help='base directory to be used for localization (requires relativesrcdir)'
|
||||||
)
|
)
|
||||||
p.add_option('--locale-mergedir', type='string', action='store',
|
p.add_option('--locale-mergedir', type='string', action='store',
|
||||||
help='base directory to be used for l10n-merge (requires l10n-base and relativesrcdir)'
|
help='base directory to be used for l10n-merge '
|
||||||
|
'(requires l10n-base and relativesrcdir)'
|
||||||
)
|
)
|
||||||
p.add_option('--relativesrcdir', type='string',
|
p.add_option('--relativesrcdir', type='string',
|
||||||
help='relativesrcdir to be used for localization')
|
help='relativesrcdir to be used for localization')
|
||||||
|
@ -501,7 +502,7 @@ class JarMaker(object):
|
||||||
try:
|
try:
|
||||||
info = self.jarfile.getinfo(aPath)
|
info = self.jarfile.getinfo(aPath)
|
||||||
return info.date_time
|
return info.date_time
|
||||||
except:
|
except Exception:
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
def getOutput(self, name):
|
def getOutput(self, name):
|
||||||
|
|
|
@ -108,12 +108,12 @@ class Watch(MachCommandBase):
|
||||||
|
|
||||||
if not conditions.is_artifact_build(self):
|
if not conditions.is_artifact_build(self):
|
||||||
print('mach watch requires an artifact build. See '
|
print('mach watch requires an artifact build. See '
|
||||||
'https://developer.mozilla.org/docs/Mozilla/Developer_guide/Build_Instructions/Simple_Firefox_build')
|
'https://developer.mozilla.org/docs/Mozilla/Developer_guide/Build_Instructions/Simple_Firefox_build') # noqa
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
if not self.substs.get('WATCHMAN', None):
|
if not self.substs.get('WATCHMAN', None):
|
||||||
print('mach watch requires watchman to be installed. See '
|
print('mach watch requires watchman to be installed. See '
|
||||||
'https://developer.mozilla.org/docs/Mozilla/Developer_guide/Build_Instructions/Incremental_builds_with_filesystem_watching')
|
'https://developer.mozilla.org/docs/Mozilla/Developer_guide/Build_Instructions/Incremental_builds_with_filesystem_watching') # noqa
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
self._activate_virtualenv()
|
self._activate_virtualenv()
|
||||||
|
@ -121,7 +121,7 @@ class Watch(MachCommandBase):
|
||||||
self.virtualenv_manager.install_pip_package('pywatchman==1.3.0')
|
self.virtualenv_manager.install_pip_package('pywatchman==1.3.0')
|
||||||
except Exception:
|
except Exception:
|
||||||
print('Could not install pywatchman from pip. See '
|
print('Could not install pywatchman from pip. See '
|
||||||
'https://developer.mozilla.org/docs/Mozilla/Developer_guide/Build_Instructions/Incremental_builds_with_filesystem_watching')
|
'https://developer.mozilla.org/docs/Mozilla/Developer_guide/Build_Instructions/Incremental_builds_with_filesystem_watching') # noqa
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
from mozbuild.faster_daemon import Daemon
|
from mozbuild.faster_daemon import Daemon
|
||||||
|
@ -310,7 +310,7 @@ class CargoProvider(MachCommandBase):
|
||||||
|
|
||||||
if all_crates:
|
if all_crates:
|
||||||
crates = crates_and_roots.keys()
|
crates = crates_and_roots.keys()
|
||||||
elif crates == None or crates == []:
|
elif crates is None or crates == []:
|
||||||
crates = ['gkrust']
|
crates = ['gkrust']
|
||||||
|
|
||||||
for crate in crates:
|
for crate in crates:
|
||||||
|
@ -354,6 +354,7 @@ class Doctor(MachCommandBase):
|
||||||
class Clobber(MachCommandBase):
|
class Clobber(MachCommandBase):
|
||||||
NO_AUTO_LOG = True
|
NO_AUTO_LOG = True
|
||||||
CLOBBER_CHOICES = ['objdir', 'python']
|
CLOBBER_CHOICES = ['objdir', 'python']
|
||||||
|
|
||||||
@Command('clobber', category='build',
|
@Command('clobber', category='build',
|
||||||
description='Clobber the tree (delete the object directory).')
|
description='Clobber the tree (delete the object directory).')
|
||||||
@CommandArgument('what', default=['objdir'], nargs='*',
|
@CommandArgument('what', default=['objdir'], nargs='*',
|
||||||
|
@ -571,8 +572,9 @@ class Warnings(MachCommandBase):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if warning['column'] is not None:
|
if warning['column'] is not None:
|
||||||
print('%s:%d:%d [%s] %s' % (filename, warning['line'],
|
print('%s:%d:%d [%s] %s' % (
|
||||||
warning['column'], warning['flag'], warning['message']))
|
filename, warning['line'], warning['column'],
|
||||||
|
warning['flag'], warning['message']))
|
||||||
else:
|
else:
|
||||||
print('%s:%d [%s] %s' % (filename, warning['line'],
|
print('%s:%d [%s] %s' % (filename, warning['line'],
|
||||||
warning['flag'], warning['message']))
|
warning['flag'], warning['message']))
|
||||||
|
@ -592,8 +594,9 @@ class GTestCommands(MachCommandBase):
|
||||||
@Command('gtest', category='testing',
|
@Command('gtest', category='testing',
|
||||||
description='Run GTest unit tests (C++ tests).')
|
description='Run GTest unit tests (C++ tests).')
|
||||||
@CommandArgument('gtest_filter', default=b"*", nargs='?', metavar='gtest_filter',
|
@CommandArgument('gtest_filter', default=b"*", nargs='?', metavar='gtest_filter',
|
||||||
help="test_filter is a ':'-separated list of wildcard patterns (called the positive patterns),"
|
help="test_filter is a ':'-separated list of wildcard patterns "
|
||||||
"optionally followed by a '-' and another ':'-separated pattern list (called the negative patterns).")
|
"(called the positive patterns), optionally followed by a '-' "
|
||||||
|
"and another ':'-separated pattern list (called the negative patterns).")
|
||||||
@CommandArgument('--jobs', '-j', default='1', nargs='?', metavar='jobs', type=int,
|
@CommandArgument('--jobs', '-j', default='1', nargs='?', metavar='jobs', type=int,
|
||||||
help='Run the tests in parallel using multiple processes.')
|
help='Run the tests in parallel using multiple processes.')
|
||||||
@CommandArgument('--tbpl-parser', '-t', action='store_true',
|
@CommandArgument('--tbpl-parser', '-t', action='store_true',
|
||||||
|
@ -620,12 +623,14 @@ class GTestCommands(MachCommandBase):
|
||||||
help='(Android only) Path to gtest libxul.so.')
|
help='(Android only) Path to gtest libxul.so.')
|
||||||
@CommandArgumentGroup('debugging')
|
@CommandArgumentGroup('debugging')
|
||||||
@CommandArgument('--debug', action='store_true', group='debugging',
|
@CommandArgument('--debug', action='store_true', group='debugging',
|
||||||
help='Enable the debugger. Not specifying a --debugger option will result in the default debugger being used.')
|
help='Enable the debugger. Not specifying a --debugger option will result in '
|
||||||
|
'the default debugger being used.')
|
||||||
@CommandArgument('--debugger', default=None, type=str, group='debugging',
|
@CommandArgument('--debugger', default=None, type=str, group='debugging',
|
||||||
help='Name of debugger to use.')
|
help='Name of debugger to use.')
|
||||||
@CommandArgument('--debugger-args', default=None, metavar='params', type=str,
|
@CommandArgument('--debugger-args', default=None, metavar='params', type=str,
|
||||||
group='debugging',
|
group='debugging',
|
||||||
help='Command-line arguments to pass to the debugger itself; split as the Bourne shell would.')
|
help='Command-line arguments to pass to the debugger itself; '
|
||||||
|
'split as the Bourne shell would.')
|
||||||
def gtest(self, shuffle, jobs, gtest_filter, tbpl_parser,
|
def gtest(self, shuffle, jobs, gtest_filter, tbpl_parser,
|
||||||
package, adb_path, device_serial, remote_test_root, libxul_path,
|
package, adb_path, device_serial, remote_test_root, libxul_path,
|
||||||
debug, debugger, debugger_args):
|
debug, debugger, debugger_args):
|
||||||
|
@ -664,7 +669,8 @@ class GTestCommands(MachCommandBase):
|
||||||
if debug or debugger or debugger_args:
|
if debug or debugger or debugger_args:
|
||||||
print("--debug options are not supported on Android and will be ignored")
|
print("--debug options are not supported on Android and will be ignored")
|
||||||
return self.android_gtest(cwd, shuffle, gtest_filter,
|
return self.android_gtest(cwd, shuffle, gtest_filter,
|
||||||
package, adb_path, device_serial, remote_test_root, libxul_path)
|
package, adb_path, device_serial,
|
||||||
|
remote_test_root, libxul_path)
|
||||||
|
|
||||||
if package or adb_path or device_serial or remote_test_root or libxul_path:
|
if package or adb_path or device_serial or remote_test_root or libxul_path:
|
||||||
print("One or more Android-only options will be ignored")
|
print("One or more Android-only options will be ignored")
|
||||||
|
@ -748,7 +754,7 @@ class GTestCommands(MachCommandBase):
|
||||||
from mozlog.commandline import setup_logging
|
from mozlog.commandline import setup_logging
|
||||||
format_args = {'level': self._mach_context.settings['test']['level']}
|
format_args = {'level': self._mach_context.settings['test']['level']}
|
||||||
default_format = self._mach_context.settings['test']['format']
|
default_format = self._mach_context.settings['test']['format']
|
||||||
log = setup_logging('mach-gtest', {}, {default_format: sys.stdout}, format_args)
|
setup_logging('mach-gtest', {}, {default_format: sys.stdout}, format_args)
|
||||||
|
|
||||||
# ensure that a device is available and test app is installed
|
# ensure that a device is available and test app is installed
|
||||||
from mozrunner.devices.android_device import (verify_android_device, get_adb_path)
|
from mozrunner.devices.android_device import (verify_android_device, get_adb_path)
|
||||||
|
@ -916,11 +922,13 @@ class RunProgram(MachCommandBase):
|
||||||
prog_group = 'the compiled program'
|
prog_group = 'the compiled program'
|
||||||
|
|
||||||
@Command('run-desktop', category='post-build',
|
@Command('run-desktop', category='post-build',
|
||||||
conditional_name='run',
|
conditional_name='run',
|
||||||
conditions=[conditions.is_not_android],
|
conditions=[conditions.is_not_android],
|
||||||
description='Run the compiled program, possibly under a debugger or DMD.')
|
description='Run the compiled program, possibly under a debugger or DMD.')
|
||||||
@CommandArgument('params', nargs='...', group=prog_group,
|
@CommandArgument('params', nargs='...', group=prog_group,
|
||||||
help='Command-line arguments to be passed through to the program. Not specifying a --profile or -P option will result in a temporary profile being used.')
|
help='Command-line arguments to be passed through to the program. Not '
|
||||||
|
'specifying a --profile or -P option will result in a temporary profile '
|
||||||
|
'being used.')
|
||||||
@CommandArgumentGroup(prog_group)
|
@CommandArgumentGroup(prog_group)
|
||||||
@CommandArgument('--remote', '-r', action='store_true', group=prog_group,
|
@CommandArgument('--remote', '-r', action='store_true', group=prog_group,
|
||||||
help='Do not pass the --no-remote argument by default.')
|
help='Do not pass the --no-remote argument by default.')
|
||||||
|
@ -933,19 +941,26 @@ class RunProgram(MachCommandBase):
|
||||||
@CommandArgument('--enable-crash-reporter', action='store_true', group=prog_group,
|
@CommandArgument('--enable-crash-reporter', action='store_true', group=prog_group,
|
||||||
help='Run the program with the crash reporter enabled.')
|
help='Run the program with the crash reporter enabled.')
|
||||||
@CommandArgument('--setpref', action='append', default=[], group=prog_group,
|
@CommandArgument('--setpref', action='append', default=[], group=prog_group,
|
||||||
help='Set the specified pref before starting the program. Can be set multiple times. Prefs can also be set in ~/.mozbuild/machrc in the [runprefs] section - see `./mach settings` for more information.')
|
help='Set the specified pref before starting the program. Can be set '
|
||||||
|
'multiple times. Prefs can also be set in ~/.mozbuild/machrc in the '
|
||||||
|
'[runprefs] section - see `./mach settings` for more information.')
|
||||||
@CommandArgument('--temp-profile', action='store_true', group=prog_group,
|
@CommandArgument('--temp-profile', action='store_true', group=prog_group,
|
||||||
help='Run the program using a new temporary profile created inside the objdir.')
|
help='Run the program using a new temporary profile created inside '
|
||||||
|
'the objdir.')
|
||||||
@CommandArgument('--macos-open', action='store_true', group=prog_group,
|
@CommandArgument('--macos-open', action='store_true', group=prog_group,
|
||||||
help="On macOS, run the program using the open(1) command. Per open(1), the browser is launched \"just as if you had double-clicked the file's icon\". The browser can not be launched under a debugger with this option.")
|
help="On macOS, run the program using the open(1) command. Per open(1), "
|
||||||
|
"the browser is launched \"just as if you had double-clicked the file's "
|
||||||
|
"icon\". The browser can not be launched under a debugger with this option.")
|
||||||
@CommandArgumentGroup('debugging')
|
@CommandArgumentGroup('debugging')
|
||||||
@CommandArgument('--debug', action='store_true', group='debugging',
|
@CommandArgument('--debug', action='store_true', group='debugging',
|
||||||
help='Enable the debugger. Not specifying a --debugger option will result in the default debugger being used.')
|
help='Enable the debugger. Not specifying a --debugger option will result '
|
||||||
|
'in the default debugger being used.')
|
||||||
@CommandArgument('--debugger', default=None, type=str, group='debugging',
|
@CommandArgument('--debugger', default=None, type=str, group='debugging',
|
||||||
help='Name of debugger to use.')
|
help='Name of debugger to use.')
|
||||||
@CommandArgument('--debugger-args', default=None, metavar='params', type=str,
|
@CommandArgument('--debugger-args', default=None, metavar='params', type=str,
|
||||||
group='debugging',
|
group='debugging',
|
||||||
help='Command-line arguments to pass to the debugger itself; split as the Bourne shell would.')
|
help='Command-line arguments to pass to the debugger itself; '
|
||||||
|
'split as the Bourne shell would.')
|
||||||
@CommandArgument('--debugparams', action=StoreDebugParamsAndWarnAction,
|
@CommandArgument('--debugparams', action=StoreDebugParamsAndWarnAction,
|
||||||
default=None, type=str, dest='debugger_args', group='debugging',
|
default=None, type=str, dest='debugger_args', group='debugging',
|
||||||
help=argparse.SUPPRESS)
|
help=argparse.SUPPRESS)
|
||||||
|
@ -1724,7 +1739,7 @@ class StaticAnalysisMonitor(object):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
warning = self._warnings_collector.process_line(line)
|
warning = self._warnings_collector.process_line(line)
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if line.find('clang-tidy') != -1:
|
if line.find('clang-tidy') != -1:
|
||||||
|
@ -1805,8 +1820,8 @@ class StaticAnalysis(MachCommandBase):
|
||||||
help='Output format to write in a file')
|
help='Output format to write in a file')
|
||||||
@CommandArgument('--outgoing', default=False, action='store_true',
|
@CommandArgument('--outgoing', default=False, action='store_true',
|
||||||
help='Run static analysis checks on outgoing files from mercurial repository')
|
help='Run static analysis checks on outgoing files from mercurial repository')
|
||||||
def check(self, source=None, jobs=2, strip=1, verbose=False,
|
def check(self, source=None, jobs=2, strip=1, verbose=False, checks='-*',
|
||||||
checks='-*', fix=False, header_filter='', output=None, format='text', outgoing=False):
|
fix=False, header_filter='', output=None, format='text', outgoing=False):
|
||||||
from mozbuild.controller.building import (
|
from mozbuild.controller.building import (
|
||||||
StaticAnalysisFooter,
|
StaticAnalysisFooter,
|
||||||
StaticAnalysisOutputManager,
|
StaticAnalysisOutputManager,
|
||||||
|
@ -1820,7 +1835,8 @@ class StaticAnalysis(MachCommandBase):
|
||||||
return rc
|
return rc
|
||||||
|
|
||||||
if self._is_version_eligible() is False:
|
if self._is_version_eligible() is False:
|
||||||
self.log(logging.ERROR, 'static-analysis', {}, "You're using an old version of clang-format binary."
|
self.log(logging.ERROR, 'static-analysis', {},
|
||||||
|
"You're using an old version of clang-format binary."
|
||||||
" Please update to a more recent one by running: './mach bootstrap'")
|
" Please update to a more recent one by running: './mach bootstrap'")
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
@ -1895,11 +1911,12 @@ class StaticAnalysis(MachCommandBase):
|
||||||
help='Write coverity output translated to json output in a file')
|
help='Write coverity output translated to json output in a file')
|
||||||
@CommandArgument('--coverity_output_path', '-co', default=None,
|
@CommandArgument('--coverity_output_path', '-co', default=None,
|
||||||
help='Path where to write coverity results as cov-results.json. '
|
help='Path where to write coverity results as cov-results.json. '
|
||||||
'If no path is specified the default path from the coverity working directory, '
|
'If no path is specified the default path from the coverity working '
|
||||||
'~./mozbuild/coverity is used.')
|
'directory, ~./mozbuild/coverity is used.')
|
||||||
@CommandArgument('--outgoing', default=False, action='store_true',
|
@CommandArgument('--outgoing', default=False, action='store_true',
|
||||||
help='Run coverity on outgoing files from mercurial or git repository')
|
help='Run coverity on outgoing files from mercurial or git repository')
|
||||||
def check_coverity(self, source=[], output=None, coverity_output_path=None, outgoing=False, verbose=False):
|
def check_coverity(self, source=[], output=None, coverity_output_path=None,
|
||||||
|
outgoing=False, verbose=False):
|
||||||
self._set_log_level(verbose)
|
self._set_log_level(verbose)
|
||||||
self.log_manager.enable_all_structured_loggers()
|
self.log_manager.enable_all_structured_loggers()
|
||||||
|
|
||||||
|
@ -1994,14 +2011,16 @@ class StaticAnalysis(MachCommandBase):
|
||||||
def get_reliability_index_for_cov_checker(self, checker_name):
|
def get_reliability_index_for_cov_checker(self, checker_name):
|
||||||
if self._cov_config is None:
|
if self._cov_config is None:
|
||||||
self.log(logging.INFO, 'static-analysis', {}, 'Coverity config file not found, '
|
self.log(logging.INFO, 'static-analysis', {}, 'Coverity config file not found, '
|
||||||
'using default-value \'reliablity\' = medium. for checker {}'.format(checker_name))
|
'using default-value \'reliablity\' = medium. for checker {}'.format(
|
||||||
|
checker_name))
|
||||||
return 'medium'
|
return 'medium'
|
||||||
|
|
||||||
checkers = self._cov_config['coverity_checkers']
|
checkers = self._cov_config['coverity_checkers']
|
||||||
if checker_name not in checkers:
|
if checker_name not in checkers:
|
||||||
self.log(logging.INFO, 'static-analysis', {},
|
self.log(logging.INFO, 'static-analysis', {},
|
||||||
'Coverity checker {} not found to determine reliability index. '
|
'Coverity checker {} not found to determine reliability index. '
|
||||||
'For the moment we shall use the default \'reliablity\' = medium.'.format(checker_name))
|
'For the moment we shall use the default \'reliablity\' = medium.'.format(
|
||||||
|
checker_name))
|
||||||
return 'medium'
|
return 'medium'
|
||||||
|
|
||||||
if 'reliability' not in checkers[checker_name]:
|
if 'reliability' not in checkers[checker_name]:
|
||||||
|
@ -2009,7 +2028,8 @@ class StaticAnalysis(MachCommandBase):
|
||||||
self.log(logging.INFO, 'static-analysis', {},
|
self.log(logging.INFO, 'static-analysis', {},
|
||||||
'Coverity checker {} doesn\'t have a reliability index set, '
|
'Coverity checker {} doesn\'t have a reliability index set, '
|
||||||
'field \'reliability is missing\', please cosinder adding it. '
|
'field \'reliability is missing\', please cosinder adding it. '
|
||||||
'For the moment we shall use the default \'reliablity\' = medium.'.format(checker_name))
|
'For the moment we shall use the default \'reliablity\' = medium.'.format(
|
||||||
|
checker_name))
|
||||||
return 'medium'
|
return 'medium'
|
||||||
|
|
||||||
return checkers[checker_name]['reliability']
|
return checkers[checker_name]['reliability']
|
||||||
|
@ -2033,7 +2053,9 @@ class StaticAnalysis(MachCommandBase):
|
||||||
'line': issue['mainEventLineNumber'],
|
'line': issue['mainEventLineNumber'],
|
||||||
'flag': issue['checkerName'],
|
'flag': issue['checkerName'],
|
||||||
'message': event_path['eventDescription'],
|
'message': event_path['eventDescription'],
|
||||||
'reliability': self.get_reliability_index_for_cov_checker(issue['checkerName']),
|
'reliability': self.get_reliability_index_for_cov_checker(
|
||||||
|
issue['checkerName']
|
||||||
|
),
|
||||||
'extra': {
|
'extra': {
|
||||||
'category': issue['checkerProperties']['category'],
|
'category': issue['checkerProperties']['category'],
|
||||||
'stateOnServer': issue['stateOnServer'],
|
'stateOnServer': issue['stateOnServer'],
|
||||||
|
@ -2043,10 +2065,11 @@ class StaticAnalysis(MachCommandBase):
|
||||||
|
|
||||||
# Embed all events into extra message
|
# Embed all events into extra message
|
||||||
for event in issue['events']:
|
for event in issue['events']:
|
||||||
dict_issue['extra']['stack'].append({'file_path': event['strippedFilePathname'],
|
dict_issue['extra']['stack'].append(
|
||||||
'line_number': event['lineNumber'],
|
{'file_path': event['strippedFilePathname'],
|
||||||
'path_type': event['eventTag'],
|
'line_number': event['lineNumber'],
|
||||||
'description': event['eventDescription']})
|
'path_type': event['eventTag'],
|
||||||
|
'description': event['eventDescription']})
|
||||||
|
|
||||||
return dict_issue
|
return dict_issue
|
||||||
|
|
||||||
|
@ -2054,8 +2077,12 @@ class StaticAnalysis(MachCommandBase):
|
||||||
path = self.cov_is_file_in_source(issue['strippedMainEventFilePathname'], source)
|
path = self.cov_is_file_in_source(issue['strippedMainEventFilePathname'], source)
|
||||||
if path is None:
|
if path is None:
|
||||||
# Since we skip a result we should log it
|
# Since we skip a result we should log it
|
||||||
self.log(logging.INFO, 'static-analysis', {}, 'Skipping CID: {0} from file: {1} since it\'s not related with the current patch.'.format(
|
self.log(logging.INFO, 'static-analysis', {},
|
||||||
issue['stateOnServer']['cid'], issue['strippedMainEventFilePathname']))
|
'Skipping CID: {0} from file: {1} since it\'s not related '
|
||||||
|
'with the current patch.'.format(
|
||||||
|
issue['stateOnServer']['cid'],
|
||||||
|
issue['strippedMainEventFilePathname'])
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
if path in files_list:
|
if path in files_list:
|
||||||
files_list[path]['warnings'].append(build_element(issue))
|
files_list[path]['warnings'].append(build_element(issue))
|
||||||
|
@ -2340,7 +2367,7 @@ class StaticAnalysis(MachCommandBase):
|
||||||
if item['publish']:
|
if item['publish']:
|
||||||
checkers.append(item['name'])
|
checkers.append(item['name'])
|
||||||
tp_path = mozpath.join(self.topsrcdir, config['third_party'])
|
tp_path = mozpath.join(self.topsrcdir, config['third_party'])
|
||||||
except Exception as e:
|
except Exception:
|
||||||
print('Looks like config.yaml is not valid, so we are unable '
|
print('Looks like config.yaml is not valid, so we are unable '
|
||||||
'to determine default checkers, and which folder to '
|
'to determine default checkers, and which folder to '
|
||||||
'exclude, using defaults provided by infer')
|
'exclude, using defaults provided by infer')
|
||||||
|
@ -2406,7 +2433,8 @@ class StaticAnalysis(MachCommandBase):
|
||||||
return True
|
return True
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
self.log(logging.ERROR, 'static-analysis', {},
|
self.log(logging.ERROR, 'static-analysis', {},
|
||||||
"Error determining the version clang-tidy/format binary, please see the attached exception: \n{}".format(e.output))
|
"Error determining the version clang-tidy/format binary, please see the "
|
||||||
|
"attached exception: \n{}".format(e.output))
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -2550,8 +2578,11 @@ class StaticAnalysis(MachCommandBase):
|
||||||
platform, _ = self.platform
|
platform, _ = self.platform
|
||||||
|
|
||||||
if platform not in self._clang_tidy_config['platforms']:
|
if platform not in self._clang_tidy_config['platforms']:
|
||||||
self.log(logging.ERROR, 'static-analysis', {},
|
self.log(
|
||||||
"RUNNING: clang-tidy autotest for platform {} not supported.".format(platform))
|
logging.ERROR, 'static-analysis', {},
|
||||||
|
"RUNNING: clang-tidy autotest for platform {} not supported.".format(
|
||||||
|
platform)
|
||||||
|
)
|
||||||
return self.TOOLS_UNSUPORTED_PLATFORM
|
return self.TOOLS_UNSUPORTED_PLATFORM
|
||||||
|
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
|
@ -2581,7 +2612,8 @@ class StaticAnalysis(MachCommandBase):
|
||||||
# 1. Checker attribute 'publish' is False.
|
# 1. Checker attribute 'publish' is False.
|
||||||
not_published = not bool(item.get('publish', True))
|
not_published = not bool(item.get('publish', True))
|
||||||
# 2. Checker has restricted-platforms and current platform is not of them.
|
# 2. Checker has restricted-platforms and current platform is not of them.
|
||||||
ignored_platform = 'restricted-platforms' in item and platform not in item['restricted-platforms']
|
ignored_platform = ('restricted-platforms' in item and
|
||||||
|
platform not in item['restricted-platforms'])
|
||||||
# 3. Checker name is mozilla-* or -*.
|
# 3. Checker name is mozilla-* or -*.
|
||||||
ignored_checker = item['name'] in ['mozilla-*', '-*']
|
ignored_checker = item['name'] in ['mozilla-*', '-*']
|
||||||
# 4. List checker_names is passed and the current checker is not part of the
|
# 4. List checker_names is passed and the current checker is not part of the
|
||||||
|
@ -2619,20 +2651,29 @@ class StaticAnalysis(MachCommandBase):
|
||||||
|
|
||||||
message_to_log = ''
|
message_to_log = ''
|
||||||
if checker_error == self.TOOLS_CHECKER_NOT_FOUND:
|
if checker_error == self.TOOLS_CHECKER_NOT_FOUND:
|
||||||
message_to_log = "\tChecker {} not present in this clang-tidy version.".format(
|
message_to_log = \
|
||||||
checker_name)
|
"\tChecker {} not present in this clang-tidy version.".format(
|
||||||
|
checker_name)
|
||||||
elif checker_error == self.TOOLS_CHECKER_NO_TEST_FILE:
|
elif checker_error == self.TOOLS_CHECKER_NO_TEST_FILE:
|
||||||
message_to_log = "\tChecker {0} does not have a test file - {0}.cpp".format(
|
message_to_log = \
|
||||||
checker_name)
|
"\tChecker {0} does not have a test file - {0}.cpp".format(
|
||||||
|
checker_name)
|
||||||
elif checker_error == self.TOOLS_CHECKER_RETURNED_NO_ISSUES:
|
elif checker_error == self.TOOLS_CHECKER_RETURNED_NO_ISSUES:
|
||||||
message_to_log = "\tChecker {0} did not find any issues in its test file, clang-tidy output for the run is:\n{1}".format(
|
message_to_log = (
|
||||||
checker_name, info1)
|
"\tChecker {0} did not find any issues in its test file, "
|
||||||
|
"clang-tidy output for the run is:\n{1}"
|
||||||
|
).format(checker_name, info1)
|
||||||
elif checker_error == self.TOOLS_CHECKER_RESULT_FILE_NOT_FOUND:
|
elif checker_error == self.TOOLS_CHECKER_RESULT_FILE_NOT_FOUND:
|
||||||
message_to_log = "\tChecker {0} does not have a result file - {0}.json".format(
|
message_to_log = \
|
||||||
checker_name)
|
"\tChecker {0} does not have a result file - {0}.json".format(
|
||||||
|
checker_name)
|
||||||
elif checker_error == self.TOOLS_CHECKER_DIFF_FAILED:
|
elif checker_error == self.TOOLS_CHECKER_DIFF_FAILED:
|
||||||
message_to_log = "\tChecker {0}\nExpected: {1}\nGot: {2}\nclang-tidy output for the run is:\n{3}".format(
|
message_to_log = (
|
||||||
checker_name, info1, info2, info3)
|
"\tChecker {0}\nExpected: {1}\n"
|
||||||
|
"Got: {2}\n"
|
||||||
|
"clang-tidy output for the run is:\n"
|
||||||
|
"{3}"
|
||||||
|
).format(checker_name, info1, info2, info3)
|
||||||
|
|
||||||
print('\n'+message_to_log)
|
print('\n'+message_to_log)
|
||||||
|
|
||||||
|
@ -2674,8 +2715,11 @@ class StaticAnalysis(MachCommandBase):
|
||||||
return self.TOOLS_CHECKER_LIST_EMPTY
|
return self.TOOLS_CHECKER_LIST_EMPTY
|
||||||
|
|
||||||
issues, clang_output = self._run_analysis(
|
issues, clang_output = self._run_analysis(
|
||||||
checks='-*,' + ",".join(items), header_filter='',
|
checks='-*,' + ",".join(items),
|
||||||
sources=[mozpath.join(self._clang_tidy_base_path, "test", checker) + '.cpp' for checker in items], print_out=True)
|
header_filter='',
|
||||||
|
sources=[mozpath.join(self._clang_tidy_base_path, "test", checker) + '.cpp'
|
||||||
|
for checker in items],
|
||||||
|
print_out=True)
|
||||||
|
|
||||||
if issues is None:
|
if issues is None:
|
||||||
return self.TOOLS_CHECKER_FAILED_FILE
|
return self.TOOLS_CHECKER_FAILED_FILE
|
||||||
|
@ -2705,7 +2749,8 @@ class StaticAnalysis(MachCommandBase):
|
||||||
print('\tChecker {0} expect following results: \n\t\t{1}'.format(
|
print('\tChecker {0} expect following results: \n\t\t{1}'.format(
|
||||||
failed_check, baseline_issue))
|
failed_check, baseline_issue))
|
||||||
|
|
||||||
print('This is the output generated by clang-tidy for the bulk build:\n{}'.format(clang_output))
|
print('This is the output generated by clang-tidy for the bulk build:\n{}'.format(
|
||||||
|
clang_output))
|
||||||
return self.TOOLS_CHECKER_DIFF_FAILED
|
return self.TOOLS_CHECKER_DIFF_FAILED
|
||||||
|
|
||||||
return self.TOOLS_SUCCESS
|
return self.TOOLS_SUCCESS
|
||||||
|
@ -2866,9 +2911,11 @@ class StaticAnalysis(MachCommandBase):
|
||||||
@CommandArgument('--force', action='store_true',
|
@CommandArgument('--force', action='store_true',
|
||||||
help='Force re-install even though the tool exists in mozbuild.',
|
help='Force re-install even though the tool exists in mozbuild.',
|
||||||
default=False)
|
default=False)
|
||||||
@CommandArgument('--minimal-install', action='store_true', help='Download only clang based tool.',
|
@CommandArgument('--minimal-install', action='store_true',
|
||||||
|
help='Download only clang based tool.',
|
||||||
default=False)
|
default=False)
|
||||||
def install(self, source=None, skip_cache=False, force=False, minimal_install=False, verbose=False):
|
def install(self, source=None, skip_cache=False, force=False, minimal_install=False,
|
||||||
|
verbose=False):
|
||||||
self._set_log_level(verbose)
|
self._set_log_level(verbose)
|
||||||
rc = self._get_clang_tools(force=force, skip_cache=skip_cache,
|
rc = self._get_clang_tools(force=force, skip_cache=skip_cache,
|
||||||
source=source, verbose=verbose)
|
source=source, verbose=verbose)
|
||||||
|
@ -2932,12 +2979,14 @@ class StaticAnalysis(MachCommandBase):
|
||||||
@CommandArgument('--output', '-o', default=None, dest='output_path',
|
@CommandArgument('--output', '-o', default=None, dest='output_path',
|
||||||
help='Specify a file handle to write clang-format raw output instead of '
|
help='Specify a file handle to write clang-format raw output instead of '
|
||||||
'applying changes. This can be stdout or a file path.')
|
'applying changes. This can be stdout or a file path.')
|
||||||
@CommandArgument('--format', '-f', choices=('diff', 'json'), default='diff', dest='output_format',
|
@CommandArgument('--format', '-f', choices=('diff', 'json'), default='diff',
|
||||||
|
dest='output_format',
|
||||||
help='Specify the output format used: diff is the raw patch provided by '
|
help='Specify the output format used: diff is the raw patch provided by '
|
||||||
'clang-format, json is a list of atomic changes to process.')
|
'clang-format, json is a list of atomic changes to process.')
|
||||||
@CommandArgument('--outgoing', default=False, action='store_true',
|
@CommandArgument('--outgoing', default=False, action='store_true',
|
||||||
help='Run clang-format on outgoing files from mercurial repository')
|
help='Run clang-format on outgoing files from mercurial repository')
|
||||||
def clang_format(self, assume_filename, path, commit, output_path=None, output_format='diff', verbose=False, outgoing=False):
|
def clang_format(self, assume_filename, path, commit, output_path=None, output_format='diff',
|
||||||
|
verbose=False, outgoing=False):
|
||||||
# Run clang-format or clang-format-diff on the local changes
|
# Run clang-format or clang-format-diff on the local changes
|
||||||
# or files/directories
|
# or files/directories
|
||||||
if path is None and outgoing:
|
if path is None and outgoing:
|
||||||
|
@ -2978,7 +3027,8 @@ class StaticAnalysis(MachCommandBase):
|
||||||
return rc
|
return rc
|
||||||
|
|
||||||
if self._is_version_eligible() is False:
|
if self._is_version_eligible() is False:
|
||||||
self.log(logging.ERROR, 'static-analysis', {}, "You're using an old version of clang-format binary."
|
self.log(logging.ERROR, 'static-analysis', {},
|
||||||
|
"You're using an old version of clang-format binary."
|
||||||
" Please update to a more recent one by running: './mach bootstrap'")
|
" Please update to a more recent one by running: './mach bootstrap'")
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
@ -2987,7 +3037,8 @@ class StaticAnalysis(MachCommandBase):
|
||||||
self._clang_format_path, commit, output)
|
self._clang_format_path, commit, output)
|
||||||
|
|
||||||
if assume_filename:
|
if assume_filename:
|
||||||
return self._run_clang_format_in_console(self._clang_format_path, path, assume_filename)
|
return self._run_clang_format_in_console(self._clang_format_path,
|
||||||
|
path, assume_filename)
|
||||||
|
|
||||||
return self._run_clang_format_path(self._clang_format_path, path, output, output_format)
|
return self._run_clang_format_path(self._clang_format_path, path, output, output_format)
|
||||||
|
|
||||||
|
@ -3010,7 +3061,7 @@ class StaticAnalysis(MachCommandBase):
|
||||||
}
|
}
|
||||||
|
|
||||||
# Verify if this checker actually exists
|
# Verify if this checker actually exists
|
||||||
if not check in self._clang_tidy_checks:
|
if check not in self._clang_tidy_checks:
|
||||||
checker_error['checker-error'] = self.TOOLS_CHECKER_NOT_FOUND
|
checker_error['checker-error'] = self.TOOLS_CHECKER_NOT_FOUND
|
||||||
checkers_results.append(checker_error)
|
checkers_results.append(checker_error)
|
||||||
return self.TOOLS_CHECKER_NOT_FOUND
|
return self.TOOLS_CHECKER_NOT_FOUND
|
||||||
|
@ -3154,7 +3205,7 @@ class StaticAnalysis(MachCommandBase):
|
||||||
ran_configure = True
|
ran_configure = True
|
||||||
try:
|
try:
|
||||||
config = self.config_environment
|
config = self.config_environment
|
||||||
except Exception as e:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return (0, config, ran_configure)
|
return (0, config, ran_configure)
|
||||||
|
@ -3216,10 +3267,10 @@ class StaticAnalysis(MachCommandBase):
|
||||||
self._clang_apply_replacements = mozpath.join(
|
self._clang_apply_replacements = mozpath.join(
|
||||||
self._clang_tools_path, "clang-tidy", "bin",
|
self._clang_tools_path, "clang-tidy", "bin",
|
||||||
"clang-apply-replacements" + config.substs.get('BIN_SUFFIX', ''))
|
"clang-apply-replacements" + config.substs.get('BIN_SUFFIX', ''))
|
||||||
self._run_clang_tidy_path = mozpath.join(self._clang_tools_path, "clang-tidy", "share", "clang",
|
self._run_clang_tidy_path = mozpath.join(self._clang_tools_path, "clang-tidy",
|
||||||
"run-clang-tidy.py")
|
"share", "clang", "run-clang-tidy.py")
|
||||||
self._clang_format_diff = mozpath.join(self._clang_tools_path, "clang-tidy", "share", "clang",
|
self._clang_format_diff = mozpath.join(self._clang_tools_path, "clang-tidy",
|
||||||
"clang-format-diff.py")
|
"share", "clang", "clang-format-diff.py")
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
def _do_clang_tools_exist(self):
|
def _do_clang_tools_exist(self):
|
||||||
|
@ -3530,7 +3581,8 @@ class StaticAnalysis(MachCommandBase):
|
||||||
# here, we expect changes. if we are here, this means that
|
# here, we expect changes. if we are here, this means that
|
||||||
# there is a diff to show
|
# there is a diff to show
|
||||||
if e.output:
|
if e.output:
|
||||||
# Replace the temp path by the path relative to the repository to display a valid patch
|
# Replace the temp path by the path relative to the repository to
|
||||||
|
# display a valid patch
|
||||||
relative_path = os.path.relpath(original_path, self.topsrcdir)
|
relative_path = os.path.relpath(original_path, self.topsrcdir)
|
||||||
patch = e.output.replace(target_file, relative_path)
|
patch = e.output.replace(target_file, relative_path)
|
||||||
patch = patch.replace(original_path, relative_path)
|
patch = patch.replace(original_path, relative_path)
|
||||||
|
@ -3643,11 +3695,13 @@ class Vendor(MachCommandBase):
|
||||||
vendor_command.vendor(**kwargs)
|
vendor_command.vendor(**kwargs)
|
||||||
|
|
||||||
@SubCommand('vendor', 'aom',
|
@SubCommand('vendor', 'aom',
|
||||||
description='Vendor av1 video codec reference implementation into the source repository.')
|
description='Vendor av1 video codec reference implementation into the '
|
||||||
|
'source repository.')
|
||||||
@CommandArgument('-r', '--revision',
|
@CommandArgument('-r', '--revision',
|
||||||
help='Repository tag or commit to update to.')
|
help='Repository tag or commit to update to.')
|
||||||
@CommandArgument('--repo',
|
@CommandArgument('--repo',
|
||||||
help='Repository url to pull a snapshot from. Supports github and googlesource.')
|
help='Repository url to pull a snapshot from. '
|
||||||
|
'Supports github and googlesource.')
|
||||||
@CommandArgument('--ignore-modified', action='store_true',
|
@CommandArgument('--ignore-modified', action='store_true',
|
||||||
help='Ignore modified files in current checkout',
|
help='Ignore modified files in current checkout',
|
||||||
default=False)
|
default=False)
|
||||||
|
@ -3655,6 +3709,7 @@ class Vendor(MachCommandBase):
|
||||||
from mozbuild.vendor_aom import VendorAOM
|
from mozbuild.vendor_aom import VendorAOM
|
||||||
vendor_command = self._spawn(VendorAOM)
|
vendor_command = self._spawn(VendorAOM)
|
||||||
vendor_command.vendor(**kwargs)
|
vendor_command.vendor(**kwargs)
|
||||||
|
|
||||||
@SubCommand('vendor', 'dav1d',
|
@SubCommand('vendor', 'dav1d',
|
||||||
description='Vendor dav1d implementation of AV1 into the source repository.')
|
description='Vendor dav1d implementation of AV1 into the source repository.')
|
||||||
@CommandArgument('-r', '--revision',
|
@CommandArgument('-r', '--revision',
|
||||||
|
@ -3674,7 +3729,11 @@ class Vendor(MachCommandBase):
|
||||||
@CommandArgument('--with-windows-wheel', action='store_true',
|
@CommandArgument('--with-windows-wheel', action='store_true',
|
||||||
help='Vendor a wheel for Windows along with the source package',
|
help='Vendor a wheel for Windows along with the source package',
|
||||||
default=False)
|
default=False)
|
||||||
@CommandArgument('packages', default=None, nargs='*', help='Packages to vendor. If omitted, packages and their dependencies defined in Pipfile.lock will be vendored. If Pipfile has been modified, then Pipfile.lock will be regenerated. Note that transient dependencies may be updated when running this command.')
|
@CommandArgument('packages', default=None, nargs='*',
|
||||||
|
help='Packages to vendor. If omitted, packages and their dependencies '
|
||||||
|
'defined in Pipfile.lock will be vendored. If Pipfile has been modified, '
|
||||||
|
'then Pipfile.lock will be regenerated. Note that transient dependencies '
|
||||||
|
'may be updated when running this command.')
|
||||||
def vendor_python(self, **kwargs):
|
def vendor_python(self, **kwargs):
|
||||||
from mozbuild.vendor_python import VendorPython
|
from mozbuild.vendor_python import VendorPython
|
||||||
vendor_command = self._spawn(VendorPython)
|
vendor_command = self._spawn(VendorPython)
|
||||||
|
@ -3698,16 +3757,19 @@ class WebRTCGTestCommands(GTestCommands):
|
||||||
@Command('webrtc-gtest', category='testing',
|
@Command('webrtc-gtest', category='testing',
|
||||||
description='Run WebRTC.org GTest unit tests.')
|
description='Run WebRTC.org GTest unit tests.')
|
||||||
@CommandArgument('gtest_filter', default=b"*", nargs='?', metavar='gtest_filter',
|
@CommandArgument('gtest_filter', default=b"*", nargs='?', metavar='gtest_filter',
|
||||||
help="test_filter is a ':'-separated list of wildcard patterns (called the positive patterns),"
|
help="test_filter is a ':'-separated list of wildcard patterns "
|
||||||
"optionally followed by a '-' and another ':'-separated pattern list (called the negative patterns).")
|
"(called the positive patterns), optionally followed by a '-' and "
|
||||||
|
"another ':'-separated pattern list (called the negative patterns).")
|
||||||
@CommandArgumentGroup('debugging')
|
@CommandArgumentGroup('debugging')
|
||||||
@CommandArgument('--debug', action='store_true', group='debugging',
|
@CommandArgument('--debug', action='store_true', group='debugging',
|
||||||
help='Enable the debugger. Not specifying a --debugger option will result in the default debugger being used.')
|
help='Enable the debugger. Not specifying a --debugger option will '
|
||||||
|
'result in the default debugger being used.')
|
||||||
@CommandArgument('--debugger', default=None, type=str, group='debugging',
|
@CommandArgument('--debugger', default=None, type=str, group='debugging',
|
||||||
help='Name of debugger to use.')
|
help='Name of debugger to use.')
|
||||||
@CommandArgument('--debugger-args', default=None, metavar='params', type=str,
|
@CommandArgument('--debugger-args', default=None, metavar='params', type=str,
|
||||||
group='debugging',
|
group='debugging',
|
||||||
help='Command-line arguments to pass to the debugger itself; split as the Bourne shell would.')
|
help='Command-line arguments to pass to the debugger itself; '
|
||||||
|
'split as the Bourne shell would.')
|
||||||
def gtest(self, gtest_filter, debug, debugger,
|
def gtest(self, gtest_filter, debug, debugger,
|
||||||
debugger_args):
|
debugger_args):
|
||||||
app_path = self.get_binary_path('webrtc-gtest')
|
app_path = self.get_binary_path('webrtc-gtest')
|
||||||
|
@ -3878,7 +3940,8 @@ class Analyze(MachCommandBase):
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
@SubCommand('analyze', 'all',
|
@SubCommand('analyze', 'all',
|
||||||
description='Get a report of files changed within the last n days and their corresponding build cost.')
|
description='Get a report of files changed within the last n days and '
|
||||||
|
'their corresponding build cost.')
|
||||||
@CommandArgument('--days', '-d', type=int, default=14,
|
@CommandArgument('--days', '-d', type=int, default=14,
|
||||||
help='Number of days to include in the report.')
|
help='Number of days to include in the report.')
|
||||||
@CommandArgument('--format', default='pretty',
|
@CommandArgument('--format', default='pretty',
|
||||||
|
|
|
@ -129,7 +129,7 @@ class Rule(object):
|
||||||
|
|
||||||
def dependencies(self):
|
def dependencies(self):
|
||||||
'''Return an iterator on the rule dependencies.'''
|
'''Return an iterator on the rule dependencies.'''
|
||||||
return iter(d for d in self._dependencies if not d in self._targets)
|
return iter(d for d in self._dependencies if d not in self._targets)
|
||||||
|
|
||||||
def commands(self):
|
def commands(self):
|
||||||
'''Return an iterator on the rule commands.'''
|
'''Return an iterator on the rule commands.'''
|
||||||
|
|
|
@ -11,7 +11,6 @@ import sys
|
||||||
import subprocess
|
import subprocess
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from collections import defaultdict
|
|
||||||
from mozpack import path as mozpath
|
from mozpack import path as mozpath
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -130,7 +130,7 @@ def build_dict(config, env=os.environ):
|
||||||
d['platform_guess'] = guess_platform()
|
d['platform_guess'] = guess_platform()
|
||||||
d['buildtype_guess'] = guess_buildtype()
|
d['buildtype_guess'] = guess_buildtype()
|
||||||
|
|
||||||
if 'buildapp' in d and d['buildapp'] == 'mobile/android' and 'MOZ_ANDROID_MIN_SDK_VERSION' in substs:
|
if d.get('buildapp', '') == 'mobile/android' and 'MOZ_ANDROID_MIN_SDK_VERSION' in substs:
|
||||||
d['android_min_sdk'] = substs['MOZ_ANDROID_MIN_SDK_VERSION']
|
d['android_min_sdk'] = substs['MOZ_ANDROID_MIN_SDK_VERSION']
|
||||||
|
|
||||||
return d
|
return d
|
||||||
|
|
|
@ -497,7 +497,7 @@ class Preprocessor:
|
||||||
None)
|
None)
|
||||||
try:
|
try:
|
||||||
from makeutil import Makefile
|
from makeutil import Makefile
|
||||||
except:
|
except Exception:
|
||||||
raise Preprocessor.Error(self, "--depend requires the "
|
raise Preprocessor.Error(self, "--depend requires the "
|
||||||
"mozbuild.makeutil module", None)
|
"mozbuild.makeutil module", None)
|
||||||
depfile = get_output_file(options.depend)
|
depfile = get_output_file(options.depend)
|
||||||
|
@ -598,7 +598,7 @@ class Preprocessor:
|
||||||
val = self.applyFilters(m.group('value'))
|
val = self.applyFilters(m.group('value'))
|
||||||
try:
|
try:
|
||||||
val = int(val)
|
val = int(val)
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
self.context[m.group('name')] = val
|
self.context[m.group('name')] = val
|
||||||
|
|
||||||
|
@ -709,7 +709,6 @@ class Preprocessor:
|
||||||
|
|
||||||
def do_expand(self, args):
|
def do_expand(self, args):
|
||||||
lst = re.split('__(\w+)__', args, re.U)
|
lst = re.split('__(\w+)__', args, re.U)
|
||||||
do_replace = False
|
|
||||||
|
|
||||||
def vsubst(v):
|
def vsubst(v):
|
||||||
if v in self.context:
|
if v in self.context:
|
||||||
|
@ -805,7 +804,7 @@ class Preprocessor:
|
||||||
args = open(args, 'rU')
|
args = open(args, 'rU')
|
||||||
except Preprocessor.Error:
|
except Preprocessor.Error:
|
||||||
raise
|
raise
|
||||||
except:
|
except Exception:
|
||||||
raise Preprocessor.Error(self, 'FILE_NOT_FOUND', str(args))
|
raise Preprocessor.Error(self, 'FILE_NOT_FOUND', str(args))
|
||||||
self.checkLineNumbers = bool(re.search('\.(js|jsm|java|webidl)(?:\.in)?$', args.name))
|
self.checkLineNumbers = bool(re.search('\.(js|jsm|java|webidl)(?:\.in)?$', args.name))
|
||||||
oldFile = self.context['FILE']
|
oldFile = self.context['FILE']
|
||||||
|
|
|
@ -11,7 +11,8 @@ from mozbuild.action.exe_7z_archive import archive_exe
|
||||||
from mozbuild.util import ensureParentDir
|
from mozbuild.util import ensureParentDir
|
||||||
|
|
||||||
|
|
||||||
def repackage_installer(topsrcdir, tag, setupexe, package, output, package_name, sfx_stub, use_upx):
|
def repackage_installer(topsrcdir, tag, setupexe, package, output,
|
||||||
|
package_name, sfx_stub, use_upx):
|
||||||
if package and not zipfile.is_zipfile(package):
|
if package and not zipfile.is_zipfile(package):
|
||||||
raise Exception("Package file %s is not a valid .zip file." % package)
|
raise Exception("Package file %s is not a valid .zip file." % package)
|
||||||
if package is not None and package_name is None:
|
if package is not None and package_name is None:
|
||||||
|
|
|
@ -57,7 +57,7 @@ def repackage_msi(topsrcdir, wsx, version, locale, arch, setupexe, candle, light
|
||||||
raise Exception("%s does not exist." % light)
|
raise Exception("%s does not exist." % light)
|
||||||
embeddedVersion = '0.0.0.0'
|
embeddedVersion = '0.0.0.0'
|
||||||
# Version string cannot contain 'a' or 'b' when embedding in msi manifest.
|
# Version string cannot contain 'a' or 'b' when embedding in msi manifest.
|
||||||
if not 'a' in version and not 'b' in version:
|
if 'a' not in version and 'b' not in version:
|
||||||
if version.endswith('esr'):
|
if version.endswith('esr'):
|
||||||
parts = version[:-3].split('.')
|
parts = version[:-3].split('.')
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -5,9 +5,7 @@
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
import os.path
|
import os.path
|
||||||
import time
|
|
||||||
from tempfile import mkdtemp
|
from tempfile import mkdtemp
|
||||||
from shutil import rmtree
|
from shutil import rmtree
|
||||||
import mozunit
|
import mozunit
|
||||||
|
|
|
@ -37,7 +37,7 @@ class TestNode(unittest.TestCase):
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
try:
|
try:
|
||||||
SCRIPT_ALLOWLIST.remove(TEST_SCRIPT)
|
SCRIPT_ALLOWLIST.remove(TEST_SCRIPT)
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def test_generate_no_returned_deps(self):
|
def test_generate_no_returned_deps(self):
|
||||||
|
|
|
@ -7,15 +7,8 @@ import os
|
||||||
|
|
||||||
import mozunit
|
import mozunit
|
||||||
|
|
||||||
from unittest import expectedFailure
|
|
||||||
|
|
||||||
from mozpack.copier import (
|
|
||||||
FileCopier,
|
|
||||||
FileRegistry,
|
|
||||||
)
|
|
||||||
from mozpack.manifests import (
|
from mozpack.manifests import (
|
||||||
InstallManifest,
|
InstallManifest,
|
||||||
UnreadableInstallManifest,
|
|
||||||
)
|
)
|
||||||
from mozpack.test.test_files import TestWithTmpDir
|
from mozpack.test.test_files import TestWithTmpDir
|
||||||
|
|
||||||
|
|
|
@ -58,7 +58,7 @@ class TestBuild(unittest.TestCase):
|
||||||
backend(config).consume(definitions)
|
backend(config).consume(definitions)
|
||||||
|
|
||||||
yield config
|
yield config
|
||||||
except:
|
except Exception:
|
||||||
raise
|
raise
|
||||||
finally:
|
finally:
|
||||||
if not os.environ.get('MOZ_NO_CLEANUP'):
|
if not os.environ.get('MOZ_NO_CLEANUP'):
|
||||||
|
@ -73,7 +73,7 @@ class TestBuild(unittest.TestCase):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
yield handle_make_line
|
yield handle_make_line
|
||||||
except:
|
except Exception:
|
||||||
print('\n'.join(lines))
|
print('\n'.join(lines))
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
|
@ -3,10 +3,8 @@
|
||||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import posixpath
|
|
||||||
from StringIO import StringIO
|
|
||||||
import unittest
|
import unittest
|
||||||
from mozunit import main, MockedOpen
|
from mozunit import main
|
||||||
|
|
||||||
import mozbuild.backend.configenvironment as ConfigStatus
|
import mozbuild.backend.configenvironment as ConfigStatus
|
||||||
|
|
||||||
|
@ -19,7 +17,7 @@ class ConfigEnvironment(ConfigStatus.ConfigEnvironment):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
ConfigStatus.ConfigEnvironment.__init__(self, *args, **kwargs)
|
ConfigStatus.ConfigEnvironment.__init__(self, *args, **kwargs)
|
||||||
# Be helpful to unit tests
|
# Be helpful to unit tests
|
||||||
if not 'top_srcdir' in self.substs:
|
if 'top_srcdir' not in self.substs:
|
||||||
if os.path.isabs(self.topsrcdir):
|
if os.path.isabs(self.topsrcdir):
|
||||||
top_srcdir = self.topsrcdir.replace(os.sep, '/')
|
top_srcdir = self.topsrcdir.replace(os.sep, '/')
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -4,10 +4,7 @@
|
||||||
|
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import cPickle as pickle
|
|
||||||
import json
|
|
||||||
import os
|
import os
|
||||||
import unittest
|
|
||||||
|
|
||||||
from mozpack.copier import FileRegistry
|
from mozpack.copier import FileRegistry
|
||||||
from mozpack.manifests import InstallManifest
|
from mozpack.manifests import InstallManifest
|
||||||
|
|
|
@ -21,7 +21,6 @@ from mozbuild.gn_processor import (
|
||||||
GnMozbuildWriterBackend,
|
GnMozbuildWriterBackend,
|
||||||
find_common_attrs,
|
find_common_attrs,
|
||||||
)
|
)
|
||||||
from mozbuild.backend.recursivemake import RecursiveMakeBackend
|
|
||||||
|
|
||||||
from mozbuild.frontend.data import (
|
from mozbuild.frontend.data import (
|
||||||
ComputedFlags,
|
ComputedFlags,
|
||||||
|
|
|
@ -61,7 +61,7 @@ class TestPartial(unittest.TestCase):
|
||||||
myconfig = config.copy()
|
myconfig = config.copy()
|
||||||
env.write_vars(myconfig)
|
env.write_vars(myconfig)
|
||||||
with self.assertRaises(KeyError):
|
with self.assertRaises(KeyError):
|
||||||
x = env.substs['MYSUBST']
|
_ = env.substs['MYSUBST']
|
||||||
self.assertFalse(os.path.exists(path))
|
self.assertFalse(os.path.exists(path))
|
||||||
|
|
||||||
myconfig['substs']['MYSUBST'] = 'new'
|
myconfig['substs']['MYSUBST'] = 'new'
|
||||||
|
@ -73,7 +73,7 @@ class TestPartial(unittest.TestCase):
|
||||||
del myconfig['substs']['MYSUBST']
|
del myconfig['substs']['MYSUBST']
|
||||||
env.write_vars(myconfig)
|
env.write_vars(myconfig)
|
||||||
with self.assertRaises(KeyError):
|
with self.assertRaises(KeyError):
|
||||||
x = env.substs['MYSUBST']
|
_ = env.substs['MYSUBST']
|
||||||
# Now that the subst is gone, the file still needs to be present so that
|
# Now that the subst is gone, the file still needs to be present so that
|
||||||
# make can update dependencies correctly. Overwriting the file with
|
# make can update dependencies correctly. Overwriting the file with
|
||||||
# 'None' is the same as deleting it as far as the
|
# 'None' is the same as deleting it as far as the
|
||||||
|
@ -107,7 +107,7 @@ class TestPartial(unittest.TestCase):
|
||||||
self._assert_deps(env, ['defines/MOZ_FOO', 'defines/MOZ_BAR', 'substs/MOZ_SUBST_1'])
|
self._assert_deps(env, ['defines/MOZ_FOO', 'defines/MOZ_BAR', 'substs/MOZ_SUBST_1'])
|
||||||
|
|
||||||
with self.assertRaises(KeyError):
|
with self.assertRaises(KeyError):
|
||||||
x = env.substs['NON_EXISTENT']
|
_ = env.substs['NON_EXISTENT']
|
||||||
self._assert_deps(env, ['defines/MOZ_FOO', 'defines/MOZ_BAR',
|
self._assert_deps(env, ['defines/MOZ_FOO', 'defines/MOZ_BAR',
|
||||||
'substs/MOZ_SUBST_1', 'substs/NON_EXISTENT'])
|
'substs/MOZ_SUBST_1', 'substs/NON_EXISTENT'])
|
||||||
self.assertEqual(env.substs.get('NON_EXISTENT'), None)
|
self.assertEqual(env.substs.get('NON_EXISTENT'), None)
|
||||||
|
|
|
@ -5,7 +5,6 @@
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import cPickle as pickle
|
import cPickle as pickle
|
||||||
import json
|
|
||||||
import os
|
import os
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
|
@ -408,7 +407,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
'EXTRA_MDDEPEND_FILES += bar.c.pp',
|
'EXTRA_MDDEPEND_FILES += bar.c.pp',
|
||||||
'$(MDDEPDIR)/bar.c.stub: %s/generate-bar.py' % env.topsrcdir,
|
'$(MDDEPDIR)/bar.c.stub: %s/generate-bar.py' % env.topsrcdir,
|
||||||
'$(REPORT_BUILD)',
|
'$(REPORT_BUILD)',
|
||||||
'$(call py_action,file_generate,%s/generate-bar.py baz bar.c $(MDDEPDIR)/bar.c.pp $(MDDEPDIR)/bar.c.stub)' % env.topsrcdir,
|
'$(call py_action,file_generate,%s/generate-bar.py baz bar.c $(MDDEPDIR)/bar.c.pp $(MDDEPDIR)/bar.c.stub)' % env.topsrcdir, # noqa
|
||||||
'@$(TOUCH) $@',
|
'@$(TOUCH) $@',
|
||||||
'',
|
'',
|
||||||
'export:: $(MDDEPDIR)/foo.c.stub',
|
'export:: $(MDDEPDIR)/foo.c.stub',
|
||||||
|
@ -418,7 +417,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
'EXTRA_MDDEPEND_FILES += foo.c.pp',
|
'EXTRA_MDDEPEND_FILES += foo.c.pp',
|
||||||
'$(MDDEPDIR)/foo.c.stub: %s/generate-foo.py $(srcdir)/foo-data' % (env.topsrcdir),
|
'$(MDDEPDIR)/foo.c.stub: %s/generate-foo.py $(srcdir)/foo-data' % (env.topsrcdir),
|
||||||
'$(REPORT_BUILD)',
|
'$(REPORT_BUILD)',
|
||||||
'$(call py_action,file_generate,%s/generate-foo.py main foo.c $(MDDEPDIR)/foo.c.pp $(MDDEPDIR)/foo.c.stub $(srcdir)/foo-data)' % (env.topsrcdir),
|
'$(call py_action,file_generate,%s/generate-foo.py main foo.c $(MDDEPDIR)/foo.c.pp $(MDDEPDIR)/foo.c.stub $(srcdir)/foo-data)' % (env.topsrcdir), # noqa
|
||||||
'@$(TOUCH) $@',
|
'@$(TOUCH) $@',
|
||||||
'',
|
'',
|
||||||
]
|
]
|
||||||
|
@ -441,7 +440,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
'EXTRA_MDDEPEND_FILES += bar.c.pp',
|
'EXTRA_MDDEPEND_FILES += bar.c.pp',
|
||||||
'$(MDDEPDIR)/bar.c.stub: %s/generate-bar.py FORCE' % env.topsrcdir,
|
'$(MDDEPDIR)/bar.c.stub: %s/generate-bar.py FORCE' % env.topsrcdir,
|
||||||
'$(REPORT_BUILD)',
|
'$(REPORT_BUILD)',
|
||||||
'$(call py_action,file_generate,%s/generate-bar.py baz bar.c $(MDDEPDIR)/bar.c.pp $(MDDEPDIR)/bar.c.stub)' % env.topsrcdir,
|
'$(call py_action,file_generate,%s/generate-bar.py baz bar.c $(MDDEPDIR)/bar.c.pp $(MDDEPDIR)/bar.c.stub)' % env.topsrcdir, # noqa
|
||||||
'@$(TOUCH) $@',
|
'@$(TOUCH) $@',
|
||||||
'',
|
'',
|
||||||
'export:: $(MDDEPDIR)/foo.c.stub',
|
'export:: $(MDDEPDIR)/foo.c.stub',
|
||||||
|
@ -451,7 +450,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
'EXTRA_MDDEPEND_FILES += foo.c.pp',
|
'EXTRA_MDDEPEND_FILES += foo.c.pp',
|
||||||
'$(MDDEPDIR)/foo.c.stub: %s/generate-foo.py $(srcdir)/foo-data' % (env.topsrcdir),
|
'$(MDDEPDIR)/foo.c.stub: %s/generate-foo.py $(srcdir)/foo-data' % (env.topsrcdir),
|
||||||
'$(REPORT_BUILD)',
|
'$(REPORT_BUILD)',
|
||||||
'$(call py_action,file_generate,%s/generate-foo.py main foo.c $(MDDEPDIR)/foo.c.pp $(MDDEPDIR)/foo.c.stub $(srcdir)/foo-data)' % (env.topsrcdir),
|
'$(call py_action,file_generate,%s/generate-foo.py main foo.c $(MDDEPDIR)/foo.c.pp $(MDDEPDIR)/foo.c.stub $(srcdir)/foo-data)' % (env.topsrcdir), # noqa
|
||||||
'@$(TOUCH) $@',
|
'@$(TOUCH) $@',
|
||||||
'',
|
'',
|
||||||
]
|
]
|
||||||
|
@ -472,9 +471,9 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
'GARBAGE += foo.xyz',
|
'GARBAGE += foo.xyz',
|
||||||
'GARBAGE += $(MDDEPDIR)/foo.xyz.stub',
|
'GARBAGE += $(MDDEPDIR)/foo.xyz.stub',
|
||||||
'EXTRA_MDDEPEND_FILES += foo.xyz.pp',
|
'EXTRA_MDDEPEND_FILES += foo.xyz.pp',
|
||||||
'$(MDDEPDIR)/foo.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir,
|
'$(MDDEPDIR)/foo.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa
|
||||||
'$(REPORT_BUILD)',
|
'$(REPORT_BUILD)',
|
||||||
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo.xyz $(MDDEPDIR)/foo.xyz.pp $(MDDEPDIR)/foo.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir,
|
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo.xyz $(MDDEPDIR)/foo.xyz.pp $(MDDEPDIR)/foo.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||||
'@$(TOUCH) $@',
|
'@$(TOUCH) $@',
|
||||||
'',
|
'',
|
||||||
'LOCALIZED_FILES_0_FILES += foo.xyz',
|
'LOCALIZED_FILES_0_FILES += foo.xyz',
|
||||||
|
@ -499,9 +498,9 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
'GARBAGE += foo.xyz',
|
'GARBAGE += foo.xyz',
|
||||||
'GARBAGE += $(MDDEPDIR)/foo.xyz.stub',
|
'GARBAGE += $(MDDEPDIR)/foo.xyz.stub',
|
||||||
'EXTRA_MDDEPEND_FILES += foo.xyz.pp',
|
'EXTRA_MDDEPEND_FILES += foo.xyz.pp',
|
||||||
'$(MDDEPDIR)/foo.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir,
|
'$(MDDEPDIR)/foo.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa
|
||||||
'$(REPORT_BUILD)',
|
'$(REPORT_BUILD)',
|
||||||
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo.xyz $(MDDEPDIR)/foo.xyz.pp $(MDDEPDIR)/foo.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir,
|
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo.xyz $(MDDEPDIR)/foo.xyz.pp $(MDDEPDIR)/foo.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||||
'@$(TOUCH) $@',
|
'@$(TOUCH) $@',
|
||||||
'',
|
'',
|
||||||
'libs:: $(MDDEPDIR)/abc.xyz.stub',
|
'libs:: $(MDDEPDIR)/abc.xyz.stub',
|
||||||
|
@ -509,9 +508,9 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
'GARBAGE += abc.xyz',
|
'GARBAGE += abc.xyz',
|
||||||
'GARBAGE += $(MDDEPDIR)/abc.xyz.stub',
|
'GARBAGE += $(MDDEPDIR)/abc.xyz.stub',
|
||||||
'EXTRA_MDDEPEND_FILES += abc.xyz.pp',
|
'EXTRA_MDDEPEND_FILES += abc.xyz.pp',
|
||||||
'$(MDDEPDIR)/abc.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input FORCE' % env.topsrcdir,
|
'$(MDDEPDIR)/abc.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input FORCE' % env.topsrcdir, # noqa
|
||||||
'$(REPORT_BUILD)',
|
'$(REPORT_BUILD)',
|
||||||
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main abc.xyz $(MDDEPDIR)/abc.xyz.pp $(MDDEPDIR)/abc.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir,
|
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main abc.xyz $(MDDEPDIR)/abc.xyz.pp $(MDDEPDIR)/abc.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||||
'@$(TOUCH) $@',
|
'@$(TOUCH) $@',
|
||||||
'',
|
'',
|
||||||
]
|
]
|
||||||
|
@ -533,9 +532,9 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
'GARBAGE += foo$(AB_CD).xyz',
|
'GARBAGE += foo$(AB_CD).xyz',
|
||||||
'GARBAGE += $(MDDEPDIR)/foo$(AB_CD).xyz.stub',
|
'GARBAGE += $(MDDEPDIR)/foo$(AB_CD).xyz.stub',
|
||||||
'EXTRA_MDDEPEND_FILES += foo$(AB_CD).xyz.pp',
|
'EXTRA_MDDEPEND_FILES += foo$(AB_CD).xyz.pp',
|
||||||
'$(MDDEPDIR)/foo$(AB_CD).xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir,
|
'$(MDDEPDIR)/foo$(AB_CD).xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa
|
||||||
'$(REPORT_BUILD)',
|
'$(REPORT_BUILD)',
|
||||||
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo$(AB_CD).xyz $(MDDEPDIR)/foo$(AB_CD).xyz.pp $(MDDEPDIR)/foo$(AB_CD).xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir,
|
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo$(AB_CD).xyz $(MDDEPDIR)/foo$(AB_CD).xyz.pp $(MDDEPDIR)/foo$(AB_CD).xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||||
'@$(TOUCH) $@',
|
'@$(TOUCH) $@',
|
||||||
'',
|
'',
|
||||||
'include $(topsrcdir)/config/AB_rCD.mk',
|
'include $(topsrcdir)/config/AB_rCD.mk',
|
||||||
|
@ -543,18 +542,18 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
'GARBAGE += bar$(AB_rCD).xyz',
|
'GARBAGE += bar$(AB_rCD).xyz',
|
||||||
'GARBAGE += $(MDDEPDIR)/bar$(AB_rCD).xyz.stub',
|
'GARBAGE += $(MDDEPDIR)/bar$(AB_rCD).xyz.stub',
|
||||||
'EXTRA_MDDEPEND_FILES += bar$(AB_rCD).xyz.pp',
|
'EXTRA_MDDEPEND_FILES += bar$(AB_rCD).xyz.pp',
|
||||||
'$(MDDEPDIR)/bar$(AB_rCD).xyz.stub: %s/generate-foo.py $(call MERGE_RELATIVE_FILE,localized-input,inner/locales) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir,
|
'$(MDDEPDIR)/bar$(AB_rCD).xyz.stub: %s/generate-foo.py $(call MERGE_RELATIVE_FILE,localized-input,inner/locales) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa
|
||||||
'$(REPORT_BUILD)',
|
'$(REPORT_BUILD)',
|
||||||
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main bar$(AB_rCD).xyz $(MDDEPDIR)/bar$(AB_rCD).xyz.pp $(MDDEPDIR)/bar$(AB_rCD).xyz.stub $(call MERGE_RELATIVE_FILE,localized-input,inner/locales) $(srcdir)/non-localized-input)' % env.topsrcdir,
|
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main bar$(AB_rCD).xyz $(MDDEPDIR)/bar$(AB_rCD).xyz.pp $(MDDEPDIR)/bar$(AB_rCD).xyz.stub $(call MERGE_RELATIVE_FILE,localized-input,inner/locales) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||||
'@$(TOUCH) $@',
|
'@$(TOUCH) $@',
|
||||||
'',
|
'',
|
||||||
'zot$(AB_rCD).xyz: $(MDDEPDIR)/zot$(AB_rCD).xyz.stub ;',
|
'zot$(AB_rCD).xyz: $(MDDEPDIR)/zot$(AB_rCD).xyz.stub ;',
|
||||||
'GARBAGE += zot$(AB_rCD).xyz',
|
'GARBAGE += zot$(AB_rCD).xyz',
|
||||||
'GARBAGE += $(MDDEPDIR)/zot$(AB_rCD).xyz.stub',
|
'GARBAGE += $(MDDEPDIR)/zot$(AB_rCD).xyz.stub',
|
||||||
'EXTRA_MDDEPEND_FILES += zot$(AB_rCD).xyz.pp',
|
'EXTRA_MDDEPEND_FILES += zot$(AB_rCD).xyz.pp',
|
||||||
'$(MDDEPDIR)/zot$(AB_rCD).xyz.stub: %s/generate-foo.py $(call MERGE_RELATIVE_FILE,localized-input,locales) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir,
|
'$(MDDEPDIR)/zot$(AB_rCD).xyz.stub: %s/generate-foo.py $(call MERGE_RELATIVE_FILE,localized-input,locales) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa
|
||||||
'$(REPORT_BUILD)',
|
'$(REPORT_BUILD)',
|
||||||
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main zot$(AB_rCD).xyz $(MDDEPDIR)/zot$(AB_rCD).xyz.pp $(MDDEPDIR)/zot$(AB_rCD).xyz.stub $(call MERGE_RELATIVE_FILE,localized-input,locales) $(srcdir)/non-localized-input)' % env.topsrcdir,
|
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main zot$(AB_rCD).xyz $(MDDEPDIR)/zot$(AB_rCD).xyz.pp $(MDDEPDIR)/zot$(AB_rCD).xyz.stub $(call MERGE_RELATIVE_FILE,localized-input,locales) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||||
'@$(TOUCH) $@',
|
'@$(TOUCH) $@',
|
||||||
'',
|
'',
|
||||||
]
|
]
|
||||||
|
@ -757,7 +756,8 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
self.assertEqual(m, m2)
|
self.assertEqual(m, m2)
|
||||||
|
|
||||||
def test_ipdl_sources(self):
|
def test_ipdl_sources(self):
|
||||||
"""Test that PREPROCESSED_IPDL_SOURCES and IPDL_SOURCES are written to ipdlsrcs.mk correctly."""
|
"""Test that PREPROCESSED_IPDL_SOURCES and IPDL_SOURCES are written to
|
||||||
|
ipdlsrcs.mk correctly."""
|
||||||
env = self._get_environment('ipdl_sources')
|
env = self._get_environment('ipdl_sources')
|
||||||
|
|
||||||
# Make substs writable so we can set the value of IPDL_ROOT to reflect
|
# Make substs writable so we can set the value of IPDL_ROOT to reflect
|
||||||
|
@ -774,8 +774,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
topsrcdir = env.topsrcdir.replace(os.sep, '/')
|
topsrcdir = env.topsrcdir.replace(os.sep, '/')
|
||||||
|
|
||||||
expected = [
|
expected = [
|
||||||
"ALL_IPDLSRCS := bar1.ipdl foo1.ipdl %s/bar/bar.ipdl %s/bar/bar2.ipdlh %s/foo/foo.ipdl %s/foo/foo2.ipdlh" % tuple([
|
"ALL_IPDLSRCS := bar1.ipdl foo1.ipdl %s/bar/bar.ipdl %s/bar/bar2.ipdlh %s/foo/foo.ipdl %s/foo/foo2.ipdlh" % tuple([topsrcdir] * 4), # noqa
|
||||||
topsrcdir] * 4),
|
|
||||||
"CPPSRCS := UnifiedProtocols0.cpp",
|
"CPPSRCS := UnifiedProtocols0.cpp",
|
||||||
"IPDLDIRS := %s %s/bar %s/foo" % (env.topobjdir, topsrcdir, topsrcdir),
|
"IPDLDIRS := %s %s/bar %s/foo" % (env.topobjdir, topsrcdir, topsrcdir),
|
||||||
]
|
]
|
||||||
|
@ -820,8 +819,6 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
backend_path = mozpath.join(env.topobjdir, 'backend.mk')
|
backend_path = mozpath.join(env.topobjdir, 'backend.mk')
|
||||||
lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]]
|
lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]]
|
||||||
|
|
||||||
topobjdir = env.topobjdir.replace('\\', '/')
|
|
||||||
|
|
||||||
expected = [
|
expected = [
|
||||||
'LOCAL_INCLUDES += -I$(CURDIR)/bar/baz',
|
'LOCAL_INCLUDES += -I$(CURDIR)/bar/baz',
|
||||||
'LOCAL_INCLUDES += -I$(CURDIR)/foo',
|
'LOCAL_INCLUDES += -I$(CURDIR)/foo',
|
||||||
|
@ -840,7 +837,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
if not l.startswith('COMPUTED_')]
|
if not l.startswith('COMPUTED_')]
|
||||||
|
|
||||||
expected = [
|
expected = [
|
||||||
'RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libtest_library.a' % env.topobjdir,
|
'RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libtest_library.a' % env.topobjdir, # noqa
|
||||||
'CARGO_FILE := $(srcdir)/Cargo.toml',
|
'CARGO_FILE := $(srcdir)/Cargo.toml',
|
||||||
'CARGO_TARGET_DIR := %s' % env.topobjdir,
|
'CARGO_TARGET_DIR := %s' % env.topobjdir,
|
||||||
]
|
]
|
||||||
|
@ -857,7 +854,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
if not l.startswith('COMPUTED_')]
|
if not l.startswith('COMPUTED_')]
|
||||||
|
|
||||||
expected = [
|
expected = [
|
||||||
'HOST_RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libhostrusttool.a' % env.topobjdir,
|
'HOST_RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libhostrusttool.a' % env.topobjdir, # noqa
|
||||||
'CARGO_FILE := $(srcdir)/Cargo.toml',
|
'CARGO_FILE := $(srcdir)/Cargo.toml',
|
||||||
'CARGO_TARGET_DIR := %s' % env.topobjdir,
|
'CARGO_TARGET_DIR := %s' % env.topobjdir,
|
||||||
]
|
]
|
||||||
|
@ -874,7 +871,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
if not l.startswith('COMPUTED_')]
|
if not l.startswith('COMPUTED_')]
|
||||||
|
|
||||||
expected = [
|
expected = [
|
||||||
'HOST_RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libhostrusttool.a' % env.topobjdir,
|
'HOST_RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libhostrusttool.a' % env.topobjdir, # noqa
|
||||||
'CARGO_FILE := $(srcdir)/Cargo.toml',
|
'CARGO_FILE := $(srcdir)/Cargo.toml',
|
||||||
'CARGO_TARGET_DIR := %s' % env.topobjdir,
|
'CARGO_TARGET_DIR := %s' % env.topobjdir,
|
||||||
'HOST_RUST_LIBRARY_FEATURES := musthave cantlivewithout',
|
'HOST_RUST_LIBRARY_FEATURES := musthave cantlivewithout',
|
||||||
|
@ -892,7 +889,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
if not l.startswith('COMPUTED_')]
|
if not l.startswith('COMPUTED_')]
|
||||||
|
|
||||||
expected = [
|
expected = [
|
||||||
'RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libfeature_library.a' % env.topobjdir,
|
'RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libfeature_library.a' % env.topobjdir, # noqa
|
||||||
'CARGO_FILE := $(srcdir)/Cargo.toml',
|
'CARGO_FILE := $(srcdir)/Cargo.toml',
|
||||||
'CARGO_TARGET_DIR := %s' % env.topobjdir,
|
'CARGO_TARGET_DIR := %s' % env.topobjdir,
|
||||||
'RUST_LIBRARY_FEATURES := musthave cantlivewithout',
|
'RUST_LIBRARY_FEATURES := musthave cantlivewithout',
|
||||||
|
@ -929,7 +926,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
"""Test that FINAL_TARGET is written to backend.mk correctly."""
|
"""Test that FINAL_TARGET is written to backend.mk correctly."""
|
||||||
env = self._consume('final_target', RecursiveMakeBackend)
|
env = self._consume('final_target', RecursiveMakeBackend)
|
||||||
|
|
||||||
final_target_rule = "FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),$(DIST)/bin)$(DIST_SUBDIR:%=/%)"
|
final_target_rule = "FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),$(DIST)/bin)$(DIST_SUBDIR:%=/%)" # noqa
|
||||||
expected = dict()
|
expected = dict()
|
||||||
expected[env.topobjdir] = []
|
expected[env.topobjdir] = []
|
||||||
expected[mozpath.join(env.topobjdir, 'both')] = [
|
expected[mozpath.join(env.topobjdir, 'both')] = [
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import subprocess
|
|
||||||
import unittest
|
import unittest
|
||||||
import shutil
|
import shutil
|
||||||
from StringIO import StringIO
|
from StringIO import StringIO
|
||||||
|
@ -287,7 +286,8 @@ class TestUrlFinder(unittest.TestCase):
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'chrome://global/content/netError.xhtml': 'chrome://browser/content/aboutNetError.xhtml',
|
'chrome://global/content/netError.xhtml':
|
||||||
|
'chrome://browser/content/aboutNetError.xhtml',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'dist/bin/components/MainProcessSingleton.js': [
|
'dist/bin/components/MainProcessSingleton.js': [
|
||||||
|
@ -345,9 +345,6 @@ class TestUrlFinder(unittest.TestCase):
|
||||||
self.assertEqual(url_finder.rewrite_url(path)[0], expected)
|
self.assertEqual(url_finder.rewrite_url(path)[0], expected)
|
||||||
|
|
||||||
def test_wrong_scheme_paths(self):
|
def test_wrong_scheme_paths(self):
|
||||||
app_name = buildconfig.substs.get('MOZ_APP_NAME')
|
|
||||||
omnijar_name = buildconfig.substs.get('OMNIJAR_NAME')
|
|
||||||
|
|
||||||
paths = [
|
paths = [
|
||||||
'http://www.mozilla.org/aFile.js',
|
'http://www.mozilla.org/aFile.js',
|
||||||
'https://www.mozilla.org/aFile.js',
|
'https://www.mozilla.org/aFile.js',
|
||||||
|
@ -376,7 +373,7 @@ class TestUrlFinder(unittest.TestCase):
|
||||||
# Path which ends with > Function
|
# Path which ends with > Function
|
||||||
('resource://gre/modules/osfile/osfile_async_worker.js line 3 > Function', None),
|
('resource://gre/modules/osfile/osfile_async_worker.js line 3 > Function', None),
|
||||||
# Path which contains "->"
|
# Path which contains "->"
|
||||||
('resource://gre/modules/addons/XPIProvider.jsm -> resource://gre/modules/osfile/osfile_async_worker.js',
|
('resource://gre/modules/addons/XPIProvider.jsm -> resource://gre/modules/osfile/osfile_async_worker.js', # noqa
|
||||||
('toolkit/components/osfile/modules/osfile_async_worker.js', None)),
|
('toolkit/components/osfile/modules/osfile_async_worker.js', None)),
|
||||||
# Path with pp_info
|
# Path with pp_info
|
||||||
('resource://gre/modules/AppConstants.jsm', ('toolkit/modules/AppConstants.jsm', {
|
('resource://gre/modules/AppConstants.jsm', ('toolkit/modules/AppConstants.jsm', {
|
||||||
|
|
|
@ -6,7 +6,6 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import unittest
|
import unittest
|
||||||
from StringIO import StringIO
|
|
||||||
from mozunit import main
|
from mozunit import main
|
||||||
from buildconfig import (
|
from buildconfig import (
|
||||||
topobjdir,
|
topobjdir,
|
||||||
|
|
|
@ -474,7 +474,8 @@ class TestChecksConfigure(unittest.TestCase):
|
||||||
self.assertEqual(status, 1)
|
self.assertEqual(status, 1)
|
||||||
self.assertEqual(config, {})
|
self.assertEqual(config, {})
|
||||||
self.assertEqual(out, textwrap.dedent('''\
|
self.assertEqual(out, textwrap.dedent('''\
|
||||||
checking for a...
|
checking for a... ''' # noqa # trailing whitespace...
|
||||||
|
'''
|
||||||
DEBUG: a: Trying known-a
|
DEBUG: a: Trying known-a
|
||||||
ERROR: Paths provided to find_program must be a list of strings, not %r
|
ERROR: Paths provided to find_program must be a list of strings, not %r
|
||||||
''' % mozpath.dirname(self.OTHER_A)))
|
''' % mozpath.dirname(self.OTHER_A)))
|
||||||
|
@ -620,8 +621,11 @@ class TestChecksConfigure(unittest.TestCase):
|
||||||
self.assertEqual(out, textwrap.dedent('''\
|
self.assertEqual(out, textwrap.dedent('''\
|
||||||
checking for java... %s
|
checking for java... %s
|
||||||
checking for jarsigner... not found
|
checking for jarsigner... not found
|
||||||
ERROR: The program jarsigner was not found. Set $JAVA_HOME to your Java SDK directory or use '--with-java-bin-path={java-bin-dir}'
|
ERROR: The program jarsigner was not found. Set $JAVA_HOME to your \
|
||||||
''' % (java)))
|
Java SDK directory or use '--with-java-bin-path={java-bin-dir}'
|
||||||
|
''' % (java)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
def test_pkg_check_modules(self):
|
def test_pkg_check_modules(self):
|
||||||
mock_pkg_config_version = '0.10.0'
|
mock_pkg_config_version = '0.10.0'
|
||||||
|
@ -664,7 +668,6 @@ class TestChecksConfigure(unittest.TestCase):
|
||||||
extra_paths = {
|
extra_paths = {
|
||||||
mock_pkg_config_path: mock_pkg_config,
|
mock_pkg_config_path: mock_pkg_config,
|
||||||
}
|
}
|
||||||
includes = ('util.configure', 'checks.configure', 'pkg.configure')
|
|
||||||
|
|
||||||
config, output, status = get_result("pkg_check_modules('MOZ_VALID', 'valid')")
|
config, output, status = get_result("pkg_check_modules('MOZ_VALID', 'valid')")
|
||||||
self.assertEqual(status, 1)
|
self.assertEqual(status, 1)
|
||||||
|
|
|
@ -639,18 +639,20 @@ class TestConfigure(unittest.TestCase):
|
||||||
mozpath.join(test_data_path, 'imply_option', 'imm.configure'))
|
mozpath.join(test_data_path, 'imply_option', 'imm.configure'))
|
||||||
|
|
||||||
with self.assertRaisesRegexp(InvalidOptionError,
|
with self.assertRaisesRegexp(InvalidOptionError,
|
||||||
"--enable-foo' implied by 'imply_option at %s:7' conflicts with "
|
"--enable-foo' implied by 'imply_option at %s:7' conflicts "
|
||||||
"'--disable-foo' from the command-line" % config_path):
|
"with '--disable-foo' from the command-line" % config_path):
|
||||||
get_config(['--disable-foo'])
|
get_config(['--disable-foo'])
|
||||||
|
|
||||||
with self.assertRaisesRegexp(InvalidOptionError,
|
with self.assertRaisesRegexp(InvalidOptionError,
|
||||||
"--enable-bar=foo,bar' implied by 'imply_option at %s:16' conflicts"
|
"--enable-bar=foo,bar' implied by 'imply_option at %s:16' "
|
||||||
" with '--enable-bar=a,b,c' from the command-line" % config_path):
|
"conflicts with '--enable-bar=a,b,c' from the command-line"
|
||||||
|
% config_path):
|
||||||
get_config(['--enable-bar=a,b,c'])
|
get_config(['--enable-bar=a,b,c'])
|
||||||
|
|
||||||
with self.assertRaisesRegexp(InvalidOptionError,
|
with self.assertRaisesRegexp(InvalidOptionError,
|
||||||
"--enable-baz=BAZ' implied by 'imply_option at %s:25' conflicts"
|
"--enable-baz=BAZ' implied by 'imply_option at %s:25' "
|
||||||
" with '--enable-baz=QUUX' from the command-line" % config_path):
|
"conflicts with '--enable-baz=QUUX' from the command-line"
|
||||||
|
% config_path):
|
||||||
get_config(['--enable-baz=QUUX'])
|
get_config(['--enable-baz=QUUX'])
|
||||||
|
|
||||||
def test_imply_option_failures(self):
|
def test_imply_option_failures(self):
|
||||||
|
@ -810,17 +812,17 @@ class TestConfigure(unittest.TestCase):
|
||||||
# imply_options resolve to None, which disables the imply_option.
|
# imply_options resolve to None, which disables the imply_option.
|
||||||
|
|
||||||
with self.assertRaises(ConfigureError) as e:
|
with self.assertRaises(ConfigureError) as e:
|
||||||
config = self.get_config()
|
self.get_config()
|
||||||
|
|
||||||
self.assertEquals(e.exception.message, message)
|
self.assertEquals(e.exception.message, message)
|
||||||
|
|
||||||
with self.assertRaises(ConfigureError) as e:
|
with self.assertRaises(ConfigureError) as e:
|
||||||
config = self.get_config(['--with-qux'])
|
self.get_config(['--with-qux'])
|
||||||
|
|
||||||
self.assertEquals(e.exception.message, message)
|
self.assertEquals(e.exception.message, message)
|
||||||
|
|
||||||
with self.assertRaises(ConfigureError) as e:
|
with self.assertRaises(ConfigureError) as e:
|
||||||
config = self.get_config(['--without-foo', '--with-qux'])
|
self.get_config(['--without-foo', '--with-qux'])
|
||||||
|
|
||||||
self.assertEquals(e.exception.message, message)
|
self.assertEquals(e.exception.message, message)
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from StringIO import StringIO
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
|
@ -4,8 +4,6 @@
|
||||||
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from mozunit import main
|
from mozunit import main
|
||||||
from mozbuild.util import (
|
from mozbuild.util import (
|
||||||
exec_,
|
exec_,
|
||||||
|
@ -156,10 +154,10 @@ class TestMozConfigure(BaseConfigureTest):
|
||||||
{'PATH': '/usr/bin', 'MAKENSISU': '/usr/bin/makensis'})
|
{'PATH': '/usr/bin', 'MAKENSISU': '/usr/bin/makensis'})
|
||||||
return sandbox._value_for(sandbox['nsis_version'])
|
return sandbox._value_for(sandbox['nsis_version'])
|
||||||
|
|
||||||
with self.assertRaises(SystemExit) as e:
|
with self.assertRaises(SystemExit):
|
||||||
check_nsis_version('v2.5')
|
check_nsis_version('v2.5')
|
||||||
|
|
||||||
with self.assertRaises(SystemExit) as e:
|
with self.assertRaises(SystemExit):
|
||||||
check_nsis_version('v3.0a2')
|
check_nsis_version('v3.0a2')
|
||||||
|
|
||||||
self.assertEquals(check_nsis_version('v3.0b1'), '3.0b1')
|
self.assertEquals(check_nsis_version('v3.0b1'), '3.0b1')
|
||||||
|
|
|
@ -875,14 +875,18 @@ class TestCommandLineHelper(unittest.TestCase):
|
||||||
|
|
||||||
bar = Option('--bar',
|
bar = Option('--bar',
|
||||||
possible_origins=('mozconfig',))
|
possible_origins=('mozconfig',))
|
||||||
with self.assertRaisesRegexp(InvalidOptionError,
|
with self.assertRaisesRegexp(
|
||||||
"--bar can not be set by command-line. Values are accepted from: mozconfig"):
|
InvalidOptionError,
|
||||||
|
"--bar can not be set by command-line. Values are accepted from: mozconfig"
|
||||||
|
):
|
||||||
helper.handle(bar)
|
helper.handle(bar)
|
||||||
|
|
||||||
baz = Option(env='BAZ',
|
baz = Option(env='BAZ',
|
||||||
possible_origins=('implied',))
|
possible_origins=('implied',))
|
||||||
with self.assertRaisesRegexp(InvalidOptionError,
|
with self.assertRaisesRegexp(
|
||||||
"BAZ=1 can not be set by environment. Values are accepted from: implied"):
|
InvalidOptionError,
|
||||||
|
"BAZ=1 can not be set by environment. Values are accepted from: implied"
|
||||||
|
):
|
||||||
helper.handle(baz)
|
helper.handle(baz)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -120,7 +120,8 @@ class TestToolkitMozConfigure(BaseConfigureTest):
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
out.getvalue(),
|
out.getvalue(),
|
||||||
'ERROR: Yasm is required to build with vpx, but you do not appear to have Yasm installed.\n'
|
('ERROR: Yasm is required to build with vpx, but you do not appear '
|
||||||
|
'to have Yasm installed.\n'),
|
||||||
)
|
)
|
||||||
|
|
||||||
out.truncate(0)
|
out.truncate(0)
|
||||||
|
@ -129,7 +130,8 @@ class TestToolkitMozConfigure(BaseConfigureTest):
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
out.getvalue(),
|
out.getvalue(),
|
||||||
'ERROR: Yasm is required to build with jpeg and vpx, but you do not appear to have Yasm installed.\n'
|
('ERROR: Yasm is required to build with jpeg and vpx, but you do not appear '
|
||||||
|
'to have Yasm installed.\n'),
|
||||||
)
|
)
|
||||||
|
|
||||||
out.truncate(0)
|
out.truncate(0)
|
||||||
|
@ -138,7 +140,8 @@ class TestToolkitMozConfigure(BaseConfigureTest):
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
out.getvalue(),
|
out.getvalue(),
|
||||||
'ERROR: Yasm is required to build with jpeg, libav and vpx, but you do not appear to have Yasm installed.\n'
|
('ERROR: Yasm is required to build with jpeg, libav and vpx, but you do not appear '
|
||||||
|
'to have Yasm installed.\n'),
|
||||||
)
|
)
|
||||||
|
|
||||||
out.truncate(0)
|
out.truncate(0)
|
||||||
|
|
|
@ -14,7 +14,6 @@ from mozbuild.frontend.context import (
|
||||||
Path,
|
Path,
|
||||||
)
|
)
|
||||||
from mozbuild.frontend.data import (
|
from mozbuild.frontend.data import (
|
||||||
ChromeManifestEntry,
|
|
||||||
ComputedFlags,
|
ComputedFlags,
|
||||||
ConfigFileSubstitution,
|
ConfigFileSubstitution,
|
||||||
Defines,
|
Defines,
|
||||||
|
@ -23,7 +22,6 @@ from mozbuild.frontend.data import (
|
||||||
FinalTargetPreprocessedFiles,
|
FinalTargetPreprocessedFiles,
|
||||||
GeneratedFile,
|
GeneratedFile,
|
||||||
GeneratedSources,
|
GeneratedSources,
|
||||||
HostDefines,
|
|
||||||
HostProgram,
|
HostProgram,
|
||||||
HostRustLibrary,
|
HostRustLibrary,
|
||||||
HostRustProgram,
|
HostRustProgram,
|
||||||
|
@ -52,7 +50,6 @@ from mozbuild.frontend.reader import (
|
||||||
BuildReaderError,
|
BuildReaderError,
|
||||||
SandboxValidationError,
|
SandboxValidationError,
|
||||||
)
|
)
|
||||||
from mozpack.chrome import manifest
|
|
||||||
|
|
||||||
from mozbuild.test.common import MockConfig
|
from mozbuild.test.common import MockConfig
|
||||||
|
|
||||||
|
@ -533,18 +530,22 @@ class TestEmitterBasic(unittest.TestCase):
|
||||||
LOCALIZED_FILES as an objdir path produces an error.
|
LOCALIZED_FILES as an objdir path produces an error.
|
||||||
"""
|
"""
|
||||||
reader = self.reader('localized-files-not-localized-generated')
|
reader = self.reader('localized-files-not-localized-generated')
|
||||||
with self.assertRaisesRegexp(SandboxValidationError,
|
with self.assertRaisesRegexp(
|
||||||
'Objdir file listed in LOCALIZED_FILES not in LOCALIZED_GENERATED_FILES:'):
|
SandboxValidationError,
|
||||||
objs = self.read_topsrcdir(reader)
|
'Objdir file listed in LOCALIZED_FILES not in LOCALIZED_GENERATED_FILES:'
|
||||||
|
):
|
||||||
|
self.read_topsrcdir(reader)
|
||||||
|
|
||||||
def test_localized_generated_files_final_target_files(self):
|
def test_localized_generated_files_final_target_files(self):
|
||||||
"""Test that using LOCALIZED_GENERATED_FILES and then putting the output in
|
"""Test that using LOCALIZED_GENERATED_FILES and then putting the output in
|
||||||
FINAL_TARGET_FILES as an objdir path produces an error.
|
FINAL_TARGET_FILES as an objdir path produces an error.
|
||||||
"""
|
"""
|
||||||
reader = self.reader('localized-generated-files-final-target-files')
|
reader = self.reader('localized-generated-files-final-target-files')
|
||||||
with self.assertRaisesRegexp(SandboxValidationError,
|
with self.assertRaisesRegexp(
|
||||||
'Outputs of LOCALIZED_GENERATED_FILES cannot be used in FINAL_TARGET_FILES:'):
|
SandboxValidationError,
|
||||||
objs = self.read_topsrcdir(reader)
|
'Outputs of LOCALIZED_GENERATED_FILES cannot be used in FINAL_TARGET_FILES:'
|
||||||
|
):
|
||||||
|
self.read_topsrcdir(reader)
|
||||||
|
|
||||||
def test_generated_files_method_names(self):
|
def test_generated_files_method_names(self):
|
||||||
reader = self.reader('generated-files-method-names')
|
reader = self.reader('generated-files-method-names')
|
||||||
|
@ -748,8 +749,11 @@ class TestEmitterBasic(unittest.TestCase):
|
||||||
"""
|
"""
|
||||||
reader = self.reader('test-manifest-dupes')
|
reader = self.reader('test-manifest-dupes')
|
||||||
|
|
||||||
with self.assertRaisesRegexp(SandboxValidationError, 'bar.js appears multiple times '
|
with self.assertRaisesRegexp(
|
||||||
'in a test manifest under a support-files field, please omit the duplicate entry.'):
|
SandboxValidationError,
|
||||||
|
'bar.js appears multiple times '
|
||||||
|
'in a test manifest under a support-files field, please omit the duplicate entry.'
|
||||||
|
):
|
||||||
self.read_topsrcdir(reader)
|
self.read_topsrcdir(reader)
|
||||||
|
|
||||||
def test_test_manifest_absolute_support_files(self):
|
def test_test_manifest_absolute_support_files(self):
|
||||||
|
@ -1056,7 +1060,7 @@ class TestEmitterBasic(unittest.TestCase):
|
||||||
SandboxValidationError,
|
SandboxValidationError,
|
||||||
'Path specified in LOCAL_INCLUDES.*resolves to the '
|
'Path specified in LOCAL_INCLUDES.*resolves to the '
|
||||||
'topsrcdir or topobjdir'):
|
'topsrcdir or topobjdir'):
|
||||||
objs = self.read_topsrcdir(reader)
|
self.read_topsrcdir(reader)
|
||||||
|
|
||||||
reader = self.reader('local_includes-invalid/objdir')
|
reader = self.reader('local_includes-invalid/objdir')
|
||||||
|
|
||||||
|
@ -1064,7 +1068,7 @@ class TestEmitterBasic(unittest.TestCase):
|
||||||
SandboxValidationError,
|
SandboxValidationError,
|
||||||
'Path specified in LOCAL_INCLUDES.*resolves to the '
|
'Path specified in LOCAL_INCLUDES.*resolves to the '
|
||||||
'topsrcdir or topobjdir'):
|
'topsrcdir or topobjdir'):
|
||||||
objs = self.read_topsrcdir(reader)
|
self.read_topsrcdir(reader)
|
||||||
|
|
||||||
def test_local_includes_file(self):
|
def test_local_includes_file(self):
|
||||||
"""Test that a filename can't be used in LOCAL_INCLUDES."""
|
"""Test that a filename can't be used in LOCAL_INCLUDES."""
|
||||||
|
@ -1073,7 +1077,7 @@ class TestEmitterBasic(unittest.TestCase):
|
||||||
with self.assertRaisesRegexp(
|
with self.assertRaisesRegexp(
|
||||||
SandboxValidationError,
|
SandboxValidationError,
|
||||||
'Path specified in LOCAL_INCLUDES is a filename'):
|
'Path specified in LOCAL_INCLUDES is a filename'):
|
||||||
objs = self.read_topsrcdir(reader)
|
self.read_topsrcdir(reader)
|
||||||
|
|
||||||
def test_generated_includes(self):
|
def test_generated_includes(self):
|
||||||
"""Test that GENERATED_INCLUDES is emitted correctly."""
|
"""Test that GENERATED_INCLUDES is emitted correctly."""
|
||||||
|
@ -1420,8 +1424,10 @@ class TestEmitterBasic(unittest.TestCase):
|
||||||
def test_final_target_pp_files_non_srcdir(self):
|
def test_final_target_pp_files_non_srcdir(self):
|
||||||
'''Test that non-srcdir paths in FINAL_TARGET_PP_FILES throws errors.'''
|
'''Test that non-srcdir paths in FINAL_TARGET_PP_FILES throws errors.'''
|
||||||
reader = self.reader('final-target-pp-files-non-srcdir')
|
reader = self.reader('final-target-pp-files-non-srcdir')
|
||||||
with self.assertRaisesRegexp(SandboxValidationError,
|
with self.assertRaisesRegexp(
|
||||||
'Only source directory paths allowed in FINAL_TARGET_PP_FILES:'):
|
SandboxValidationError,
|
||||||
|
'Only source directory paths allowed in FINAL_TARGET_PP_FILES:'
|
||||||
|
):
|
||||||
self.read_topsrcdir(reader)
|
self.read_topsrcdir(reader)
|
||||||
|
|
||||||
def test_localized_files(self):
|
def test_localized_files(self):
|
||||||
|
@ -1444,9 +1450,11 @@ class TestEmitterBasic(unittest.TestCase):
|
||||||
"""Test that LOCALIZED_FILES errors if a path does not start with
|
"""Test that LOCALIZED_FILES errors if a path does not start with
|
||||||
`en-US/` or contain `locales/en-US/`."""
|
`en-US/` or contain `locales/en-US/`."""
|
||||||
reader = self.reader('localized-files-no-en-us')
|
reader = self.reader('localized-files-no-en-us')
|
||||||
with self.assertRaisesRegexp(SandboxValidationError,
|
with self.assertRaisesRegexp(
|
||||||
'LOCALIZED_FILES paths must start with `en-US/` or contain `locales/en-US/`: foo.js'):
|
SandboxValidationError,
|
||||||
objs = self.read_topsrcdir(reader)
|
'LOCALIZED_FILES paths must start with `en-US/` or contain `locales/en-US/`: foo.js'
|
||||||
|
):
|
||||||
|
self.read_topsrcdir(reader)
|
||||||
|
|
||||||
def test_localized_pp_files(self):
|
def test_localized_pp_files(self):
|
||||||
"""Test that LOCALIZED_PP_FILES works properly."""
|
"""Test that LOCALIZED_PP_FILES works properly."""
|
||||||
|
@ -1665,8 +1673,10 @@ class TestEmitterBasic(unittest.TestCase):
|
||||||
from GENERATED_FILES is an error.
|
from GENERATED_FILES is an error.
|
||||||
"""
|
"""
|
||||||
reader = self.reader('test-symbols-file-objdir-missing-generated')
|
reader = self.reader('test-symbols-file-objdir-missing-generated')
|
||||||
with self.assertRaisesRegexp(SandboxValidationError,
|
with self.assertRaisesRegexp(
|
||||||
'Objdir file specified in SYMBOLS_FILE not in GENERATED_FILES:'):
|
SandboxValidationError,
|
||||||
|
'Objdir file specified in SYMBOLS_FILE not in GENERATED_FILES:'
|
||||||
|
):
|
||||||
self.read_topsrcdir(reader)
|
self.read_topsrcdir(reader)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -247,7 +247,7 @@ class TestBuildReader(unittest.TestCase):
|
||||||
def test_error_error_func_ok(self):
|
def test_error_error_func_ok(self):
|
||||||
reader = self.reader('reader-error-error-func', error_is_fatal=False)
|
reader = self.reader('reader-error-error-func', error_is_fatal=False)
|
||||||
|
|
||||||
contexts = list(reader.read_topsrcdir())
|
list(reader.read_topsrcdir())
|
||||||
|
|
||||||
def test_error_empty_list(self):
|
def test_error_empty_list(self):
|
||||||
reader = self.reader('reader-error-empty-list')
|
reader = self.reader('reader-error-empty-list')
|
||||||
|
|
|
@ -4,8 +4,6 @@
|
||||||
|
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from mozunit import main
|
from mozunit import main
|
||||||
|
@ -30,7 +28,6 @@ from mozbuild.frontend.context import (
|
||||||
)
|
)
|
||||||
|
|
||||||
from mozbuild.test.common import MockConfig
|
from mozbuild.test.common import MockConfig
|
||||||
from types import StringTypes
|
|
||||||
|
|
||||||
import mozpack.path as mozpath
|
import mozpack.path as mozpath
|
||||||
|
|
||||||
|
|
|
@ -49,7 +49,8 @@ class TestAndroidVersionCode(unittest.TestCase):
|
||||||
self.assertTrue('underflow' in cm.exception.message)
|
self.assertTrue('underflow' in cm.exception.message)
|
||||||
|
|
||||||
def test_android_version_code_v1_running_low(self):
|
def test_android_version_code_v1_running_low(self):
|
||||||
'''Verify there is an informative message if one asks for v1 codes that are close to overflow.'''
|
'''Verify there is an informative message if one asks for v1
|
||||||
|
codes that are close to overflow.'''
|
||||||
with self.assertRaises(ValueError) as cm:
|
with self.assertRaises(ValueError) as cm:
|
||||||
overflow = '20290801000000'
|
overflow = '20290801000000'
|
||||||
android_version_code_v1(overflow, cpu_arch='armeabi', min_sdk=9, max_sdk=None)
|
android_version_code_v1(overflow, cpu_arch='armeabi', min_sdk=9, max_sdk=None)
|
||||||
|
@ -65,8 +66,9 @@ class TestAndroidVersionCode(unittest.TestCase):
|
||||||
def test_android_version_code_v0_relative_v1(self):
|
def test_android_version_code_v0_relative_v1(self):
|
||||||
'''Verify that the first v1 code is greater than the equivalent v0 code.'''
|
'''Verify that the first v1 code is greater than the equivalent v0 code.'''
|
||||||
buildid = '20150801000000'
|
buildid = '20150801000000'
|
||||||
self.assertGreater(android_version_code_v1(buildid, cpu_arch='armeabi', min_sdk=9, max_sdk=None),
|
self.assertGreater(
|
||||||
android_version_code_v0(buildid, cpu_arch='armeabi', min_sdk=9, max_sdk=None))
|
android_version_code_v1(buildid, cpu_arch='armeabi', min_sdk=9, max_sdk=None),
|
||||||
|
android_version_code_v0(buildid, cpu_arch='armeabi', min_sdk=9, max_sdk=None))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
|
@ -23,7 +23,6 @@ from mozbuild.base import (
|
||||||
BadEnvironmentException,
|
BadEnvironmentException,
|
||||||
MachCommandBase,
|
MachCommandBase,
|
||||||
MozbuildObject,
|
MozbuildObject,
|
||||||
ObjdirMismatchException,
|
|
||||||
PathArgument,
|
PathArgument,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -30,7 +30,7 @@ class TestReadOnlyNamespace(unittest.TestCase):
|
||||||
['bar', 'foo'])
|
['bar', 'foo'])
|
||||||
|
|
||||||
with self.assertRaises(AttributeError):
|
with self.assertRaises(AttributeError):
|
||||||
value = test.missing
|
test.missing
|
||||||
|
|
||||||
with self.assertRaises(Exception):
|
with self.assertRaises(Exception):
|
||||||
test.foo = 2
|
test.foo = 2
|
||||||
|
@ -56,7 +56,7 @@ class TestReadOnlyDict(unittest.TestCase):
|
||||||
self.assertEqual(test['foo'], 1)
|
self.assertEqual(test['foo'], 1)
|
||||||
|
|
||||||
with self.assertRaises(KeyError):
|
with self.assertRaises(KeyError):
|
||||||
value = test['missing']
|
test['missing']
|
||||||
|
|
||||||
with self.assertRaises(Exception):
|
with self.assertRaises(Exception):
|
||||||
test['baz'] = True
|
test['baz'] = True
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import sys
|
|
||||||
import os.path
|
|
||||||
import mozunit
|
import mozunit
|
||||||
|
|
||||||
from mozbuild.preprocessor import Expression, Context
|
from mozbuild.preprocessor import Expression, Context
|
||||||
|
|
|
@ -8,8 +8,6 @@ import unittest
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import os.path
|
import os.path
|
||||||
import time
|
|
||||||
import inspect
|
|
||||||
from filecmp import dircmp
|
from filecmp import dircmp
|
||||||
from tempfile import mkdtemp
|
from tempfile import mkdtemp
|
||||||
from shutil import rmtree, copy2
|
from shutil import rmtree, copy2
|
||||||
|
|
|
@ -1,13 +1,7 @@
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from StringIO import StringIO
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import os.path
|
|
||||||
import mozunit
|
import mozunit
|
||||||
|
|
||||||
from mozbuild.preprocessor import Preprocessor
|
|
||||||
|
|
||||||
from mozbuild.vendor_rust import VendorRust
|
from mozbuild.vendor_rust import VendorRust
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,6 @@ import unittest
|
||||||
|
|
||||||
from StringIO import StringIO
|
from StringIO import StringIO
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
import os.path
|
import os.path
|
||||||
import mozunit
|
import mozunit
|
||||||
|
|
||||||
|
|
|
@ -139,7 +139,6 @@ class TestMozconfigLoader(unittest.TestCase):
|
||||||
|
|
||||||
srcdir = self.get_temp_dir()
|
srcdir = self.get_temp_dir()
|
||||||
curdir = self.get_temp_dir()
|
curdir = self.get_temp_dir()
|
||||||
dirs = [srcdir, curdir]
|
|
||||||
loader = MozconfigLoader(srcdir)
|
loader = MozconfigLoader(srcdir)
|
||||||
|
|
||||||
path = os.path.join(srcdir, relative_mozconfig)
|
path = os.path.join(srcdir, relative_mozconfig)
|
||||||
|
|
|
@ -277,24 +277,24 @@ class TestHierarchicalStringList(unittest.TestCase):
|
||||||
"<type 'bool'>")
|
"<type 'bool'>")
|
||||||
|
|
||||||
def test_del_exports(self):
|
def test_del_exports(self):
|
||||||
with self.assertRaises(MozbuildDeletionError) as mde:
|
with self.assertRaises(MozbuildDeletionError):
|
||||||
self.EXPORTS.foo += ['bar.h']
|
self.EXPORTS.foo += ['bar.h']
|
||||||
del self.EXPORTS.foo
|
del self.EXPORTS.foo
|
||||||
|
|
||||||
def test_unsorted(self):
|
def test_unsorted(self):
|
||||||
with self.assertRaises(UnsortedError) as ee:
|
with self.assertRaises(UnsortedError):
|
||||||
self.EXPORTS += ['foo.h', 'bar.h']
|
self.EXPORTS += ['foo.h', 'bar.h']
|
||||||
|
|
||||||
with self.assertRaises(UnsortedError) as ee:
|
with self.assertRaises(UnsortedError):
|
||||||
self.EXPORTS.foo = ['foo.h', 'bar.h']
|
self.EXPORTS.foo = ['foo.h', 'bar.h']
|
||||||
|
|
||||||
with self.assertRaises(UnsortedError) as ee:
|
with self.assertRaises(UnsortedError):
|
||||||
self.EXPORTS.foo += ['foo.h', 'bar.h']
|
self.EXPORTS.foo += ['foo.h', 'bar.h']
|
||||||
|
|
||||||
def test_reassign(self):
|
def test_reassign(self):
|
||||||
self.EXPORTS.foo = ['foo.h']
|
self.EXPORTS.foo = ['foo.h']
|
||||||
|
|
||||||
with self.assertRaises(KeyError) as ee:
|
with self.assertRaises(KeyError):
|
||||||
self.EXPORTS.foo = ['bar.h']
|
self.EXPORTS.foo = ['bar.h']
|
||||||
|
|
||||||
def test_walk(self):
|
def test_walk(self):
|
||||||
|
@ -503,7 +503,7 @@ class TestStrictOrderingOnAppendListWithFlagsFactory(unittest.TestCase):
|
||||||
l['a'] = 'foo'
|
l['a'] = 'foo'
|
||||||
|
|
||||||
with self.assertRaises(Exception):
|
with self.assertRaises(Exception):
|
||||||
c = l['c']
|
l['c']
|
||||||
|
|
||||||
self.assertEqual(l['a'].foo, False)
|
self.assertEqual(l['a'].foo, False)
|
||||||
l['a'].foo = True
|
l['a'].foo = True
|
||||||
|
|
|
@ -134,8 +134,9 @@ class SupportFilesConverter(object):
|
||||||
# directory for the benefit of tests specifying 'install-to-subdir'.
|
# directory for the benefit of tests specifying 'install-to-subdir'.
|
||||||
key = field, pattern, out_dir
|
key = field, pattern, out_dir
|
||||||
if key in info.seen:
|
if key in info.seen:
|
||||||
raise ValueError("%s appears multiple times in a test manifest under a %s field,"
|
raise ValueError(
|
||||||
" please omit the duplicate entry." % (pattern, field))
|
"%s appears multiple times in a test manifest under a %s field,"
|
||||||
|
" please omit the duplicate entry." % (pattern, field))
|
||||||
info.seen.add(key)
|
info.seen.add(key)
|
||||||
if key in seen:
|
if key in seen:
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -23,7 +23,6 @@ import time
|
||||||
import types
|
import types
|
||||||
|
|
||||||
from collections import (
|
from collections import (
|
||||||
defaultdict,
|
|
||||||
Iterable,
|
Iterable,
|
||||||
OrderedDict,
|
OrderedDict,
|
||||||
)
|
)
|
||||||
|
@ -661,8 +660,10 @@ def StrictOrderingOnAppendListWithFlagsFactory(flags):
|
||||||
(self._flags_type._flags, other._flags_type._flags))
|
(self._flags_type._flags, other._flags_type._flags))
|
||||||
intersection = set(self._flags.keys()) & set(other._flags.keys())
|
intersection = set(self._flags.keys()) & set(other._flags.keys())
|
||||||
if intersection:
|
if intersection:
|
||||||
raise ValueError('Cannot update flags: both lists of strings with flags configure %s' %
|
raise ValueError(
|
||||||
intersection)
|
'Cannot update flags: both lists of strings with flags configure %s' %
|
||||||
|
intersection
|
||||||
|
)
|
||||||
self._flags.update(other._flags)
|
self._flags.update(other._flags)
|
||||||
|
|
||||||
def extend(self, l):
|
def extend(self, l):
|
||||||
|
|
|
@ -4,10 +4,8 @@
|
||||||
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from distutils.version import LooseVersion
|
|
||||||
import logging
|
import logging
|
||||||
from mozbuild.base import (
|
from mozbuild.base import (
|
||||||
BuildEnvironmentNotFoundException,
|
|
||||||
MozbuildObject,
|
MozbuildObject,
|
||||||
)
|
)
|
||||||
import mozfile
|
import mozfile
|
||||||
|
@ -63,7 +61,7 @@ Please set a repository url with --repo on either googlesource or github.''' % h
|
||||||
req.raise_for_status()
|
req.raise_for_status()
|
||||||
try:
|
try:
|
||||||
info = req.json()
|
info = req.json()
|
||||||
except ValueError as e:
|
except ValueError:
|
||||||
# As of 2017 May, googlesource sends 4 garbage characters
|
# As of 2017 May, googlesource sends 4 garbage characters
|
||||||
# at the beginning of the json response. Work around this.
|
# at the beginning of the json response. Work around this.
|
||||||
# https://bugs.chromium.org/p/chromium/issues/detail?id=718550
|
# https://bugs.chromium.org/p/chromium/issues/detail?id=718550
|
||||||
|
|
|
@ -4,10 +4,8 @@
|
||||||
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from distutils.version import LooseVersion
|
|
||||||
import logging
|
import logging
|
||||||
from mozbuild.base import (
|
from mozbuild.base import (
|
||||||
BuildEnvironmentNotFoundException,
|
|
||||||
MozbuildObject,
|
MozbuildObject,
|
||||||
)
|
)
|
||||||
import mozfile
|
import mozfile
|
||||||
|
@ -52,7 +50,7 @@ Please set a repository url with --repo on either googlesource or github.''' % h
|
||||||
|
|
||||||
def upstream_gitlab_commit(self, revision):
|
def upstream_gitlab_commit(self, revision):
|
||||||
'''Query the github api for a git commit id and timestamp.'''
|
'''Query the github api for a git commit id and timestamp.'''
|
||||||
gitlab_api = 'https://code.videolan.org/api/v4/projects/videolan%2Fdav1d/repository/commits'
|
gitlab_api = 'https://code.videolan.org/api/v4/projects/videolan%2Fdav1d/repository/commits' # noqa
|
||||||
url = mozpath.join(gitlab_api, revision)
|
url = mozpath.join(gitlab_api, revision)
|
||||||
self.log(logging.INFO, 'fetch', {'url': url},
|
self.log(logging.INFO, 'fetch', {'url': url},
|
||||||
'Fetching commit id from {url}')
|
'Fetching commit id from {url}')
|
||||||
|
|
|
@ -11,7 +11,6 @@ from mozbuild.base import (
|
||||||
BuildEnvironmentNotFoundException,
|
BuildEnvironmentNotFoundException,
|
||||||
MozbuildObject,
|
MozbuildObject,
|
||||||
)
|
)
|
||||||
import mozfile
|
|
||||||
import mozpack.path as mozpath
|
import mozpack.path as mozpath
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
@ -124,8 +123,11 @@ Please commit or stash these changes before vendoring, or re-run with `--ignore-
|
||||||
self.run_process(args=[cargo, 'install', 'cargo-vendor'],
|
self.run_process(args=[cargo, 'install', 'cargo-vendor'],
|
||||||
append_env=env)
|
append_env=env)
|
||||||
elif not self.check_cargo_vendor_version(cargo):
|
elif not self.check_cargo_vendor_version(cargo):
|
||||||
self.log(logging.INFO, 'cargo_vendor', {
|
self.log(
|
||||||
}, 'cargo-vendor >= 0.1.23 required; force-reinstalling (this may take a few minutes)...')
|
logging.INFO, 'cargo_vendor', {},
|
||||||
|
('cargo-vendor >= 0.1.23 required; '
|
||||||
|
'force-reinstalling (this may take a few minutes)...')
|
||||||
|
)
|
||||||
env = self.check_openssl()
|
env = self.check_openssl()
|
||||||
self.run_process(args=[cargo, 'install', '--force', 'cargo-vendor'],
|
self.run_process(args=[cargo, 'install', '--force', 'cargo-vendor'],
|
||||||
append_env=env)
|
append_env=env)
|
||||||
|
@ -198,8 +200,9 @@ Please commit or stash these changes before vendoring, or re-run with `--ignore-
|
||||||
RUNTIME_LICENSE_FILE_PACKAGE_WHITELIST = {
|
RUNTIME_LICENSE_FILE_PACKAGE_WHITELIST = {
|
||||||
# MIT
|
# MIT
|
||||||
'deque': '6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb',
|
'deque': '6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb',
|
||||||
# we're whitelisting this fuchsia crate because it doesn't get built in the final product but has a license-file that needs ignoring
|
# we're whitelisting this fuchsia crate because it doesn't get built in the final
|
||||||
'fuchsia-cprng' : '03b114f53e6587a398931762ee11e2395bfdba252a329940e2c8c9e81813845b',
|
# product but has a license-file that needs ignoring
|
||||||
|
'fuchsia-cprng': '03b114f53e6587a398931762ee11e2395bfdba252a329940e2c8c9e81813845b',
|
||||||
}
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -346,15 +349,17 @@ license file's hash.
|
||||||
vendor_dir = mozpath.join(self.topsrcdir, relative_vendor_dir)
|
vendor_dir = mozpath.join(self.topsrcdir, relative_vendor_dir)
|
||||||
|
|
||||||
# We use check_call instead of mozprocess to ensure errors are displayed.
|
# We use check_call instead of mozprocess to ensure errors are displayed.
|
||||||
# We do an |update -p| here to regenerate the Cargo.lock file with minimal changes. See bug 1324462
|
# We do an |update -p| here to regenerate the Cargo.lock file with minimal
|
||||||
|
# changes. See bug 1324462
|
||||||
subprocess.check_call([cargo, 'update', '-p', 'gkrust'], cwd=self.topsrcdir)
|
subprocess.check_call([cargo, 'update', '-p', 'gkrust'], cwd=self.topsrcdir)
|
||||||
|
|
||||||
subprocess.check_call([cargo, 'vendor', '--quiet', '--sync',
|
subprocess.check_call([cargo, 'vendor', '--quiet', '--sync',
|
||||||
'Cargo.lock'] + [vendor_dir], cwd=self.topsrcdir)
|
'Cargo.lock'] + [vendor_dir], cwd=self.topsrcdir)
|
||||||
|
|
||||||
if not self._check_licenses(vendor_dir):
|
if not self._check_licenses(vendor_dir):
|
||||||
self.log(logging.ERROR, 'license_check_failed', {},
|
self.log(
|
||||||
'''The changes from `mach vendor rust` will NOT be added to version control.''')
|
logging.ERROR, 'license_check_failed', {},
|
||||||
|
'''The changes from `mach vendor rust` will NOT be added to version control.''')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
self.repository.add_remove_files(vendor_dir)
|
self.repository.add_remove_files(vendor_dir)
|
||||||
|
@ -396,4 +401,6 @@ The changes from `mach vendor rust` will NOT be added to version control.
|
||||||
Please consider finding ways to reduce the size of the vendored packages.
|
Please consider finding ways to reduce the size of the vendored packages.
|
||||||
For instance, check the vendored packages for unusually large test or
|
For instance, check the vendored packages for unusually large test or
|
||||||
benchmark files that don't need to be published to crates.io and submit
|
benchmark files that don't need to be published to crates.io and submit
|
||||||
a pull request upstream to ignore those files when publishing.'''.format(size=cumulative_added_size))
|
a pull request upstream to ignore those files when publishing.'''.format(
|
||||||
|
size=cumulative_added_size)
|
||||||
|
)
|
||||||
|
|
|
@ -330,7 +330,7 @@ class VirtualenvManager(object):
|
||||||
try:
|
try:
|
||||||
handle_package(package[1:])
|
handle_package(package[1:])
|
||||||
return True
|
return True
|
||||||
except:
|
except Exception:
|
||||||
print('Error processing command. Ignoring',
|
print('Error processing command. Ignoring',
|
||||||
'because optional. (%s)' % ':'.join(package),
|
'because optional. (%s)' % ':'.join(package),
|
||||||
file=self.log_handle)
|
file=self.log_handle)
|
||||||
|
@ -659,7 +659,9 @@ def verify_python_version(log_handle):
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
if len(sys.argv) < 5:
|
if len(sys.argv) < 5:
|
||||||
print('Usage: populate_virtualenv.py /path/to/topsrcdir /path/to/topobjdir /path/to/virtualenv /path/to/virtualenv_manifest')
|
print(
|
||||||
|
'Usage: populate_virtualenv.py /path/to/topsrcdir '
|
||||||
|
'/path/to/topobjdir /path/to/virtualenv /path/to/virtualenv_manifest')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
verify_python_version(sys.stdout)
|
verify_python_version(sys.stdout)
|
||||||
|
|
Загрузка…
Ссылка в новой задаче