зеркало из https://github.com/mozilla/gecko-dev.git
Bug 991983 - Emit absolute paths for other sources. r=gps
This commit is contained in:
Родитель
188ce081ec
Коммит
bc23a0ddc5
|
@ -457,7 +457,11 @@ class RecursiveMakeBackend(CommonBackend):
|
||||||
variables = [suffix_map[obj.canonical_suffix]]
|
variables = [suffix_map[obj.canonical_suffix]]
|
||||||
if isinstance(obj, GeneratedSources):
|
if isinstance(obj, GeneratedSources):
|
||||||
variables.append('GARBAGE')
|
variables.append('GARBAGE')
|
||||||
|
base = backend_file.objdir
|
||||||
|
else:
|
||||||
|
base = backend_file.srcdir
|
||||||
for f in sorted(obj.files):
|
for f in sorted(obj.files):
|
||||||
|
f = mozpath.relpath(f, base)
|
||||||
for var in variables:
|
for var in variables:
|
||||||
backend_file.write('%s += %s\n' % (var, f))
|
backend_file.write('%s += %s\n' % (var, f))
|
||||||
elif isinstance(obj, HostSources):
|
elif isinstance(obj, HostSources):
|
||||||
|
@ -468,7 +472,8 @@ class RecursiveMakeBackend(CommonBackend):
|
||||||
}
|
}
|
||||||
var = suffix_map[obj.canonical_suffix]
|
var = suffix_map[obj.canonical_suffix]
|
||||||
for f in sorted(obj.files):
|
for f in sorted(obj.files):
|
||||||
backend_file.write('%s += %s\n' % (var, f))
|
backend_file.write('%s += %s\n' % (
|
||||||
|
var, mozpath.relpath(f, backend_file.srcdir)))
|
||||||
elif isinstance(obj, VariablePassthru):
|
elif isinstance(obj, VariablePassthru):
|
||||||
# Sorted so output is consistent and we don't bump mtimes.
|
# Sorted so output is consistent and we don't bump mtimes.
|
||||||
for k, v in sorted(obj.variables.items()):
|
for k, v in sorted(obj.variables.items()):
|
||||||
|
|
|
@ -712,15 +712,33 @@ class TreeMetadataEmitter(LoggingMixin):
|
||||||
return sub
|
return sub
|
||||||
|
|
||||||
def _process_sources(self, context, passthru):
|
def _process_sources(self, context, passthru):
|
||||||
|
all_sources = {}
|
||||||
|
all_flags = {}
|
||||||
|
for symbol in ('SOURCES', 'HOST_SOURCES', 'UNIFIED_SOURCES',
|
||||||
|
'GENERATED_SOURCES'):
|
||||||
|
srcs = all_sources[symbol] = []
|
||||||
|
context_srcs = context.get(symbol, [])
|
||||||
|
for f in context_srcs:
|
||||||
|
if symbol.startswith('GENERATED_'):
|
||||||
|
full_path = mozpath.join(context.objdir, f)
|
||||||
|
else:
|
||||||
|
full_path = mozpath.join(context.srcdir, f)
|
||||||
|
full_path = mozpath.normpath(full_path)
|
||||||
|
srcs.append(full_path)
|
||||||
|
if symbol == 'SOURCES':
|
||||||
|
flags = context_srcs[f]
|
||||||
|
if flags:
|
||||||
|
all_flags[full_path] = flags
|
||||||
|
|
||||||
for symbol in ('SOURCES', 'HOST_SOURCES', 'UNIFIED_SOURCES'):
|
for symbol in ('SOURCES', 'HOST_SOURCES', 'UNIFIED_SOURCES'):
|
||||||
for src in (context[symbol] or []):
|
for src in all_sources[symbol]:
|
||||||
if not os.path.exists(mozpath.join(context.srcdir, src)):
|
if not os.path.exists(src):
|
||||||
raise SandboxValidationError('File listed in %s does not '
|
raise SandboxValidationError('File listed in %s does not '
|
||||||
'exist: \'%s\'' % (symbol, src), context)
|
'exist: \'%s\'' % (symbol, src), context)
|
||||||
|
|
||||||
no_pgo = context.get('NO_PGO')
|
no_pgo = context.get('NO_PGO')
|
||||||
sources = context.get('SOURCES', [])
|
no_pgo_sources = [f for f, flags in all_flags.iteritems()
|
||||||
no_pgo_sources = [f for f in sources if sources[f].no_pgo]
|
if flags.no_pgo]
|
||||||
if no_pgo:
|
if no_pgo:
|
||||||
if no_pgo_sources:
|
if no_pgo_sources:
|
||||||
raise SandboxValidationError('NO_PGO and SOURCES[...].no_pgo '
|
raise SandboxValidationError('NO_PGO and SOURCES[...].no_pgo '
|
||||||
|
@ -770,27 +788,26 @@ class TreeMetadataEmitter(LoggingMixin):
|
||||||
|
|
||||||
# First ensure that we haven't been given filetypes that we don't
|
# First ensure that we haven't been given filetypes that we don't
|
||||||
# recognize.
|
# recognize.
|
||||||
for f in context[variable]:
|
for f in all_sources[variable]:
|
||||||
ext = mozpath.splitext(f)[1]
|
ext = mozpath.splitext(f)[1]
|
||||||
if ext not in allowed_suffixes:
|
if ext not in allowed_suffixes:
|
||||||
raise SandboxValidationError(
|
raise SandboxValidationError(
|
||||||
'%s has an unknown file type.' % f, context)
|
'%s has an unknown file type.' % f, context)
|
||||||
|
|
||||||
# Now sort the files to let groupby work.
|
# Now sort the files to let groupby work.
|
||||||
sorted_files = sorted(context[variable], key=canonical_suffix_for_file)
|
sorted_files = sorted(all_sources[variable],
|
||||||
for canonical_suffix, files in itertools.groupby(sorted_files, canonical_suffix_for_file):
|
key=canonical_suffix_for_file)
|
||||||
if variable.startswith('UNIFIED_'):
|
for canonical_suffix, files in itertools.groupby(
|
||||||
files = [mozpath.normpath(mozpath.join(context.srcdir, f))
|
sorted_files, canonical_suffix_for_file):
|
||||||
for f in files]
|
|
||||||
arglist = [context, list(files), canonical_suffix]
|
arglist = [context, list(files), canonical_suffix]
|
||||||
if variable.startswith('UNIFIED_') and 'FILES_PER_UNIFIED_FILE' in context:
|
if variable.startswith('UNIFIED_') and 'FILES_PER_UNIFIED_FILE' in context:
|
||||||
arglist.append(context['FILES_PER_UNIFIED_FILE'])
|
arglist.append(context['FILES_PER_UNIFIED_FILE'])
|
||||||
yield klass(*arglist)
|
yield klass(*arglist)
|
||||||
|
|
||||||
sources_with_flags = [f for f in sources if sources[f].flags]
|
for f, flags in all_flags.iteritems():
|
||||||
for f in sources_with_flags:
|
if flags.flags:
|
||||||
ext = mozpath.splitext(f)[1]
|
ext = mozpath.splitext(f)[1]
|
||||||
yield PerSourceFlag(context, f, sources[f].flags)
|
yield PerSourceFlag(context, f, flags.flags)
|
||||||
|
|
||||||
def _process_xpidl(self, context):
|
def _process_xpidl(self, context):
|
||||||
# XPIDL source files get processed and turned into .h and .xpt files.
|
# XPIDL source files get processed and turned into .h and .xpt files.
|
||||||
|
|
|
@ -751,9 +751,11 @@ class TestEmitterBasic(unittest.TestCase):
|
||||||
}
|
}
|
||||||
for suffix, files in expected.items():
|
for suffix, files in expected.items():
|
||||||
sources = suffix_map[suffix]
|
sources = suffix_map[suffix]
|
||||||
self.assertEqual(sources.files, files)
|
self.assertEqual(
|
||||||
|
sources.files,
|
||||||
|
[mozpath.join(reader.config.topsrcdir, f) for f in files])
|
||||||
|
|
||||||
def test_sources(self):
|
def test_generated_sources(self):
|
||||||
"""Test that GENERATED_SOURCES works properly."""
|
"""Test that GENERATED_SOURCES works properly."""
|
||||||
reader = self.reader('generated-sources')
|
reader = self.reader('generated-sources')
|
||||||
objs = self.read_topsrcdir(reader)
|
objs = self.read_topsrcdir(reader)
|
||||||
|
@ -776,7 +778,9 @@ class TestEmitterBasic(unittest.TestCase):
|
||||||
}
|
}
|
||||||
for suffix, files in expected.items():
|
for suffix, files in expected.items():
|
||||||
sources = suffix_map[suffix]
|
sources = suffix_map[suffix]
|
||||||
self.assertEqual(sources.files, files)
|
self.assertEqual(
|
||||||
|
sources.files,
|
||||||
|
[mozpath.join(reader.config.topobjdir, f) for f in files])
|
||||||
|
|
||||||
def test_host_sources(self):
|
def test_host_sources(self):
|
||||||
"""Test that HOST_SOURCES works properly."""
|
"""Test that HOST_SOURCES works properly."""
|
||||||
|
@ -797,7 +801,9 @@ class TestEmitterBasic(unittest.TestCase):
|
||||||
}
|
}
|
||||||
for suffix, files in expected.items():
|
for suffix, files in expected.items():
|
||||||
sources = suffix_map[suffix]
|
sources = suffix_map[suffix]
|
||||||
self.assertEqual(sources.files, files)
|
self.assertEqual(
|
||||||
|
sources.files,
|
||||||
|
[mozpath.join(reader.config.topsrcdir, f) for f in files])
|
||||||
|
|
||||||
def test_unified_sources(self):
|
def test_unified_sources(self):
|
||||||
"""Test that UNIFIED_SOURCES works properly."""
|
"""Test that UNIFIED_SOURCES works properly."""
|
||||||
|
|
Загрузка…
Ссылка в новой задаче