зеркало из https://github.com/mozilla/gecko-dev.git
bug 1160185 - support GENERATED_FILES in EXPORTS. r=glandium
This change allows specifying objdir-relative paths in EXPORTS to enable exporting entries from GENERATED_FILES. Objdir paths in EXPORTS that are not in GENERATED_FILES will raise an exception. Example: ``` EXPORTS += ['!g.h', 'f.h'] GENERATED_FILES += ['g.h'] ``` Given the implementation, this should also work for FINAL_TARGET_FILES, FINAL_TARGET_PP_FILES, and TESTING_FILES, but those are not well-tested. This patch also renames the install manifest for '_tests' to match the directory name for convenience in some code I refactored. --HG-- extra : commitid : CwayzXtxv1O extra : rebase_source : 5fb6f461fc740da9bce14bbdbfabdfe618af8803
This commit is contained in:
Родитель
993136c2c9
Коммит
47dba5ed8c
|
@ -136,7 +136,7 @@ $(addprefix install-,$(filter dist/%,$(install_manifests))): install-dist/%: $(i
|
||||||
install-dist_%: install-dist/% ;
|
install-dist_%: install-dist/% ;
|
||||||
|
|
||||||
install-_tests: $(install_manifest_depends)
|
install-_tests: $(install_manifest_depends)
|
||||||
$(call py_action,process_install_manifest,$(if $(NO_REMOVE),--no-remove )_tests _build_manifests/install/tests)
|
$(call py_action,process_install_manifest,$(if $(NO_REMOVE),--no-remove )_tests _build_manifests/install/_tests)
|
||||||
|
|
||||||
# For compatibility
|
# For compatibility
|
||||||
.PHONY: install-tests
|
.PHONY: install-tests
|
||||||
|
|
|
@ -1,10 +0,0 @@
|
||||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
||||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
||||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
|
||||||
|
|
||||||
# We'd like this to be defined in a future GENERATED_EXPORTS list.
|
|
||||||
# Bug 1160185 has a few proposals for this.
|
|
||||||
INSTALL_TARGETS += xpcaccevents
|
|
||||||
xpcaccevents_FILES := xpcAccEvents.h
|
|
||||||
xpcaccevents_DEST = $(DIST)/include
|
|
||||||
xpcaccevents_TARGET := export
|
|
|
@ -24,6 +24,10 @@ SOURCES += [
|
||||||
'!xpcAccEvents.cpp',
|
'!xpcAccEvents.cpp',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
EXPORTS += [
|
||||||
|
'!xpcAccEvents.h',
|
||||||
|
]
|
||||||
|
|
||||||
LOCAL_INCLUDES += [
|
LOCAL_INCLUDES += [
|
||||||
'/accessible/base',
|
'/accessible/base',
|
||||||
'/accessible/generic',
|
'/accessible/generic',
|
||||||
|
|
|
@ -401,7 +401,7 @@ class RecursiveMakeBackend(CommonBackend):
|
||||||
'dist_private',
|
'dist_private',
|
||||||
'dist_sdk',
|
'dist_sdk',
|
||||||
'dist_xpi-stage',
|
'dist_xpi-stage',
|
||||||
'tests',
|
'_tests',
|
||||||
'xpidl',
|
'xpidl',
|
||||||
]}
|
]}
|
||||||
|
|
||||||
|
@ -509,9 +509,6 @@ class RecursiveMakeBackend(CommonBackend):
|
||||||
elif isinstance(obj, Defines):
|
elif isinstance(obj, Defines):
|
||||||
self._process_defines(obj, backend_file)
|
self._process_defines(obj, backend_file)
|
||||||
|
|
||||||
elif isinstance(obj, Exports):
|
|
||||||
self._process_exports(obj, obj.exports, backend_file)
|
|
||||||
|
|
||||||
elif isinstance(obj, GeneratedFile):
|
elif isinstance(obj, GeneratedFile):
|
||||||
dep_file = "%s.pp" % obj.output
|
dep_file = "%s.pp" % obj.output
|
||||||
backend_file.write('GENERATED_FILES += %s\n' % obj.output)
|
backend_file.write('GENERATED_FILES += %s\n' % obj.output)
|
||||||
|
@ -587,7 +584,7 @@ class RecursiveMakeBackend(CommonBackend):
|
||||||
self._process_linked_libraries(obj, backend_file)
|
self._process_linked_libraries(obj, backend_file)
|
||||||
|
|
||||||
elif isinstance(obj, FinalTargetFiles):
|
elif isinstance(obj, FinalTargetFiles):
|
||||||
self._process_final_target_files(obj, obj.files)
|
self._process_final_target_files(obj, obj.files, backend_file)
|
||||||
|
|
||||||
elif isinstance(obj, FinalTargetPreprocessedFiles):
|
elif isinstance(obj, FinalTargetPreprocessedFiles):
|
||||||
self._process_final_target_pp_files(obj, obj.files, backend_file)
|
self._process_final_target_pp_files(obj, obj.files, backend_file)
|
||||||
|
@ -852,7 +849,7 @@ class RecursiveMakeBackend(CommonBackend):
|
||||||
|
|
||||||
# Catch duplicate inserts.
|
# Catch duplicate inserts.
|
||||||
try:
|
try:
|
||||||
self._install_manifests['tests'].add_optional_exists(manifest_stem)
|
self._install_manifests['_tests'].add_optional_exists(manifest_stem)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -956,27 +953,15 @@ class RecursiveMakeBackend(CommonBackend):
|
||||||
backend_file.write(' %s' % define)
|
backend_file.write(' %s' % define)
|
||||||
backend_file.write('\n')
|
backend_file.write('\n')
|
||||||
|
|
||||||
def _process_exports(self, obj, exports, backend_file):
|
|
||||||
# This may not be needed, but is present for backwards compatibility
|
|
||||||
# with the old make rules, just in case.
|
|
||||||
if not obj.dist_install:
|
|
||||||
return
|
|
||||||
|
|
||||||
for source, dest in self._walk_hierarchy(obj, exports):
|
|
||||||
self._install_manifests['dist_include'].add_symlink(source, dest)
|
|
||||||
|
|
||||||
if not os.path.exists(source):
|
|
||||||
raise Exception('File listed in EXPORTS does not exist: %s' % source)
|
|
||||||
|
|
||||||
def _process_test_harness_files(self, obj, backend_file):
|
def _process_test_harness_files(self, obj, backend_file):
|
||||||
for path, files in obj.srcdir_files.iteritems():
|
for path, files in obj.srcdir_files.iteritems():
|
||||||
for source in files:
|
for source in files:
|
||||||
dest = '%s/%s' % (path, mozpath.basename(source))
|
dest = '%s/%s' % (path, mozpath.basename(source))
|
||||||
self._install_manifests['tests'].add_symlink(source, dest)
|
self._install_manifests['_tests'].add_symlink(source, dest)
|
||||||
|
|
||||||
for path, patterns in obj.srcdir_pattern_files.iteritems():
|
for path, patterns in obj.srcdir_pattern_files.iteritems():
|
||||||
for p in patterns:
|
for p in patterns:
|
||||||
self._install_manifests['tests'].add_pattern_symlink(p[0], p[1], path)
|
self._install_manifests['_tests'].add_pattern_symlink(p[0], p[1], path)
|
||||||
|
|
||||||
for path, files in obj.objdir_files.iteritems():
|
for path, files in obj.objdir_files.iteritems():
|
||||||
prefix = 'TEST_HARNESS_%s' % path.replace('/', '_')
|
prefix = 'TEST_HARNESS_%s' % path.replace('/', '_')
|
||||||
|
@ -1134,14 +1119,14 @@ INSTALL_TARGETS += %(prefix)s
|
||||||
# the manifest is listed as a duplicate.
|
# the manifest is listed as a duplicate.
|
||||||
for source, (dest, is_test) in obj.installs.items():
|
for source, (dest, is_test) in obj.installs.items():
|
||||||
try:
|
try:
|
||||||
self._install_manifests['tests'].add_symlink(source, dest)
|
self._install_manifests['_tests'].add_symlink(source, dest)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
if not obj.dupe_manifest and is_test:
|
if not obj.dupe_manifest and is_test:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
for base, pattern, dest in obj.pattern_installs:
|
for base, pattern, dest in obj.pattern_installs:
|
||||||
try:
|
try:
|
||||||
self._install_manifests['tests'].add_pattern_symlink(base,
|
self._install_manifests['_tests'].add_pattern_symlink(base,
|
||||||
pattern, dest)
|
pattern, dest)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
if not obj.dupe_manifest:
|
if not obj.dupe_manifest:
|
||||||
|
@ -1149,7 +1134,7 @@ INSTALL_TARGETS += %(prefix)s
|
||||||
|
|
||||||
for dest in obj.external_installs:
|
for dest in obj.external_installs:
|
||||||
try:
|
try:
|
||||||
self._install_manifests['tests'].add_optional_exists(dest)
|
self._install_manifests['_tests'].add_optional_exists(dest)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
if not obj.dupe_manifest:
|
if not obj.dupe_manifest:
|
||||||
raise
|
raise
|
||||||
|
@ -1298,24 +1283,40 @@ INSTALL_TARGETS += %(prefix)s
|
||||||
# Process library-based defines
|
# Process library-based defines
|
||||||
self._process_defines(obj.defines, backend_file)
|
self._process_defines(obj.defines, backend_file)
|
||||||
|
|
||||||
def _process_final_target_files(self, obj, files):
|
def _process_final_target_files(self, obj, files, backend_file):
|
||||||
target = obj.install_target
|
target = obj.install_target
|
||||||
if target.startswith('dist/bin'):
|
for path in (
|
||||||
install_manifest = self._install_manifests['dist_bin']
|
'dist/bin',
|
||||||
reltarget = mozpath.relpath(target, 'dist/bin')
|
'dist/xpi-stage',
|
||||||
elif target.startswith('dist/xpi-stage'):
|
'_tests',
|
||||||
install_manifest = self._install_manifests['dist_xpi-stage']
|
'dist/include',
|
||||||
reltarget = mozpath.relpath(target, 'dist/xpi-stage')
|
):
|
||||||
elif target.startswith('_tests'):
|
manifest = path.replace('/', '_')
|
||||||
install_manifest = self._install_manifests['tests']
|
if target.startswith(path):
|
||||||
reltarget = mozpath.relpath(target, '_tests')
|
install_manifest = self._install_manifests[manifest]
|
||||||
|
reltarget = mozpath.relpath(target, path)
|
||||||
|
break
|
||||||
else:
|
else:
|
||||||
raise Exception("Cannot install to " + target)
|
raise Exception("Cannot install to " + target)
|
||||||
|
|
||||||
for path, files in files.walk():
|
for path, files in files.walk():
|
||||||
|
target_var = (mozpath.join(target, path)
|
||||||
|
if path else target).replace('/', '_')
|
||||||
|
have_objdir_files = False
|
||||||
for f in files:
|
for f in files:
|
||||||
|
if not isinstance(f, ObjDirPath):
|
||||||
dest = mozpath.join(reltarget, path, mozpath.basename(f))
|
dest = mozpath.join(reltarget, path, mozpath.basename(f))
|
||||||
install_manifest.add_symlink(f.full_path, dest)
|
install_manifest.add_symlink(f.full_path, dest)
|
||||||
|
else:
|
||||||
|
backend_file.write('%s_FILES += %s\n' % (
|
||||||
|
target_var, self._pretty_path(f, backend_file)))
|
||||||
|
have_objdir_files = True
|
||||||
|
if have_objdir_files:
|
||||||
|
backend_file.write('%s_DEST := $(DEPTH)/%s\n'
|
||||||
|
% (target_var,
|
||||||
|
mozpath.join(target, path)))
|
||||||
|
backend_file.write('%s_TARGET := export\n' % target_var)
|
||||||
|
backend_file.write('INSTALL_TARGETS += %s\n' % target_var)
|
||||||
|
|
||||||
def _process_final_target_pp_files(self, obj, files, backend_file):
|
def _process_final_target_pp_files(self, obj, files, backend_file):
|
||||||
# We'd like to install these via manifests as preprocessed files.
|
# We'd like to install these via manifests as preprocessed files.
|
||||||
|
|
|
@ -1268,7 +1268,7 @@ VARIABLES = {
|
||||||
into account the values of ``AC_DEFINE`` instead of ``AC_SUBST``.
|
into account the values of ``AC_DEFINE`` instead of ``AC_SUBST``.
|
||||||
""", None),
|
""", None),
|
||||||
|
|
||||||
'EXPORTS': (HierarchicalStringList, list,
|
'EXPORTS': (ContextDerivedTypedHierarchicalStringList(Path), list,
|
||||||
"""List of files to be exported, and in which subdirectories.
|
"""List of files to be exported, and in which subdirectories.
|
||||||
|
|
||||||
``EXPORTS`` is generally used to list the include files to be exported to
|
``EXPORTS`` is generally used to list the include files to be exported to
|
||||||
|
@ -1281,6 +1281,10 @@ VARIABLES = {
|
||||||
|
|
||||||
EXPORTS += ['foo.h']
|
EXPORTS += ['foo.h']
|
||||||
EXPORTS.mozilla.dom += ['bar.h']
|
EXPORTS.mozilla.dom += ['bar.h']
|
||||||
|
|
||||||
|
Entries in ``EXPORTS`` are paths, so objdir paths may be used, but
|
||||||
|
any files listed from the objdir must also be listed in
|
||||||
|
``GENERATED_FILES``.
|
||||||
""", None),
|
""", None),
|
||||||
|
|
||||||
'PROGRAM' : (unicode, unicode,
|
'PROGRAM' : (unicode, unicode,
|
||||||
|
|
|
@ -194,21 +194,6 @@ class Defines(BaseDefines):
|
||||||
class HostDefines(BaseDefines):
|
class HostDefines(BaseDefines):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class Exports(ContextDerived):
|
|
||||||
"""Context derived container object for EXPORTS, which is a
|
|
||||||
HierarchicalStringList.
|
|
||||||
|
|
||||||
We need an object derived from ContextDerived for use in the backend, so
|
|
||||||
this object fills that role. It just has a reference to the underlying
|
|
||||||
HierarchicalStringList, which is created when parsing EXPORTS.
|
|
||||||
"""
|
|
||||||
__slots__ = ('exports', 'dist_install')
|
|
||||||
|
|
||||||
def __init__(self, context, exports, dist_install=True):
|
|
||||||
ContextDerived.__init__(self, context)
|
|
||||||
self.exports = exports
|
|
||||||
self.dist_install = dist_install
|
|
||||||
|
|
||||||
class TestHarnessFiles(ContextDerived):
|
class TestHarnessFiles(ContextDerived):
|
||||||
"""Sandbox container object for TEST_HARNESS_FILES,
|
"""Sandbox container object for TEST_HARNESS_FILES,
|
||||||
which is a HierarchicalStringList.
|
which is a HierarchicalStringList.
|
||||||
|
@ -832,6 +817,19 @@ class TestingFiles(FinalTargetFiles):
|
||||||
return '_tests'
|
return '_tests'
|
||||||
|
|
||||||
|
|
||||||
|
class Exports(FinalTargetFiles):
|
||||||
|
"""Context derived container object for EXPORTS, which is a
|
||||||
|
HierarchicalStringList.
|
||||||
|
|
||||||
|
We need an object derived from ContextDerived for use in the backend, so
|
||||||
|
this object fills that role. It just has a reference to the underlying
|
||||||
|
HierarchicalStringList, which is created when parsing EXPORTS.
|
||||||
|
"""
|
||||||
|
@property
|
||||||
|
def install_target(self):
|
||||||
|
return 'dist/include'
|
||||||
|
|
||||||
|
|
||||||
class GeneratedFile(ContextDerived):
|
class GeneratedFile(ContextDerived):
|
||||||
"""Represents a generated file."""
|
"""Represents a generated file."""
|
||||||
|
|
||||||
|
|
|
@ -608,12 +608,9 @@ class TreeMetadataEmitter(LoggingMixin):
|
||||||
for obj in self._process_sources(context, passthru):
|
for obj in self._process_sources(context, passthru):
|
||||||
yield obj
|
yield obj
|
||||||
|
|
||||||
exports = context.get('EXPORTS')
|
generated_files = set()
|
||||||
if exports:
|
|
||||||
yield Exports(context, exports,
|
|
||||||
dist_install=dist_install is not False)
|
|
||||||
|
|
||||||
for obj in self._process_generated_files(context):
|
for obj in self._process_generated_files(context):
|
||||||
|
generated_files.add(obj.output)
|
||||||
yield obj
|
yield obj
|
||||||
|
|
||||||
for obj in self._process_test_harness_files(context):
|
for obj in self._process_test_harness_files(context):
|
||||||
|
@ -653,6 +650,7 @@ class TreeMetadataEmitter(LoggingMixin):
|
||||||
|
|
||||||
components = []
|
components = []
|
||||||
for var, cls in (
|
for var, cls in (
|
||||||
|
('EXPORTS', Exports),
|
||||||
('FINAL_TARGET_FILES', FinalTargetFiles),
|
('FINAL_TARGET_FILES', FinalTargetFiles),
|
||||||
('FINAL_TARGET_PP_FILES', FinalTargetPreprocessedFiles),
|
('FINAL_TARGET_PP_FILES', FinalTargetPreprocessedFiles),
|
||||||
('TESTING_FILES', TestingFiles),
|
('TESTING_FILES', TestingFiles),
|
||||||
|
@ -674,11 +672,23 @@ class TreeMetadataEmitter(LoggingMixin):
|
||||||
if mozpath.split(base)[0] == 'res':
|
if mozpath.split(base)[0] == 'res':
|
||||||
has_resources = True
|
has_resources = True
|
||||||
for f in files:
|
for f in files:
|
||||||
|
if (var == 'FINAL_TARGET_PP_FILES' and
|
||||||
|
not isinstance(f, SourcePath)):
|
||||||
|
raise SandboxValidationError(
|
||||||
|
('Only source directory paths allowed in ' +
|
||||||
|
'FINAL_TARGET_PP_FILES: %s')
|
||||||
|
% (f,), context)
|
||||||
|
if not isinstance(f, ObjDirPath):
|
||||||
path = f.full_path
|
path = f.full_path
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
raise SandboxValidationError(
|
raise SandboxValidationError(
|
||||||
'File listed in %s does not exist: %s'
|
'File listed in %s does not exist: %s'
|
||||||
% (var, path), context)
|
% (var, path), context)
|
||||||
|
else:
|
||||||
|
if mozpath.basename(f.full_path) not in generated_files:
|
||||||
|
raise SandboxValidationError(
|
||||||
|
('Objdir file listed in %s not in ' +
|
||||||
|
'GENERATED_FILES: %s') % (var, path), context)
|
||||||
|
|
||||||
# Addons (when XPI_NAME is defined) and Applications (when
|
# Addons (when XPI_NAME is defined) and Applications (when
|
||||||
# DIST_SUBDIR is defined) use a different preferences directory
|
# DIST_SUBDIR is defined) use a different preferences directory
|
||||||
|
|
|
@ -0,0 +1,12 @@
|
||||||
|
# Any copyright is dedicated to the Public Domain.
|
||||||
|
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||||
|
|
||||||
|
EXPORTS += ['!bar.h', 'foo.h']
|
||||||
|
EXPORTS.mozilla += ['!mozilla2.h', 'mozilla1.h']
|
||||||
|
EXPORTS.mozilla.dom += ['!dom2.h', '!dom3.h', 'dom1.h']
|
||||||
|
EXPORTS.gfx += ['gfx.h']
|
||||||
|
|
||||||
|
GENERATED_FILES += ['bar.h']
|
||||||
|
GENERATED_FILES += ['mozilla2.h']
|
||||||
|
GENERATED_FILES += ['dom2.h']
|
||||||
|
GENERATED_FILES += ['dom3.h']
|
|
@ -393,6 +393,49 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
self.maxDiff = None
|
self.maxDiff = None
|
||||||
self.assertEqual(lines, expected)
|
self.assertEqual(lines, expected)
|
||||||
|
|
||||||
|
def test_exports_generated(self):
|
||||||
|
"""Ensure EXPORTS that are listed in GENERATED_FILES
|
||||||
|
are handled properly."""
|
||||||
|
env = self._consume('exports-generated', RecursiveMakeBackend)
|
||||||
|
|
||||||
|
# EXPORTS files should appear in the dist_include install manifest.
|
||||||
|
m = InstallManifest(path=mozpath.join(env.topobjdir,
|
||||||
|
'_build_manifests', 'install', 'dist_include'))
|
||||||
|
self.assertEqual(len(m), 4)
|
||||||
|
self.assertIn('foo.h', m)
|
||||||
|
self.assertIn('mozilla/mozilla1.h', m)
|
||||||
|
self.assertIn('mozilla/dom/dom1.h', m)
|
||||||
|
self.assertIn('gfx/gfx.h', m)
|
||||||
|
# EXPORTS files that are also GENERATED_FILES should be handled as
|
||||||
|
# INSTALL_TARGETS.
|
||||||
|
backend_path = mozpath.join(env.topobjdir, 'backend.mk')
|
||||||
|
lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]]
|
||||||
|
expected = [
|
||||||
|
'GENERATED_FILES += bar.h',
|
||||||
|
'EXTRA_MDDEPEND_FILES += bar.h.pp',
|
||||||
|
'GENERATED_FILES += mozilla2.h',
|
||||||
|
'EXTRA_MDDEPEND_FILES += mozilla2.h.pp',
|
||||||
|
'GENERATED_FILES += dom2.h',
|
||||||
|
'EXTRA_MDDEPEND_FILES += dom2.h.pp',
|
||||||
|
'GENERATED_FILES += dom3.h',
|
||||||
|
'EXTRA_MDDEPEND_FILES += dom3.h.pp',
|
||||||
|
'dist_include_FILES += bar.h',
|
||||||
|
'dist_include_DEST := $(DEPTH)/dist/include/',
|
||||||
|
'dist_include_TARGET := export',
|
||||||
|
'INSTALL_TARGETS += dist_include',
|
||||||
|
'dist_include_mozilla_FILES += mozilla2.h',
|
||||||
|
'dist_include_mozilla_DEST := $(DEPTH)/dist/include/mozilla',
|
||||||
|
'dist_include_mozilla_TARGET := export',
|
||||||
|
'INSTALL_TARGETS += dist_include_mozilla',
|
||||||
|
'dist_include_mozilla_dom_FILES += dom2.h',
|
||||||
|
'dist_include_mozilla_dom_FILES += dom3.h',
|
||||||
|
'dist_include_mozilla_dom_DEST := $(DEPTH)/dist/include/mozilla/dom',
|
||||||
|
'dist_include_mozilla_dom_TARGET := export',
|
||||||
|
'INSTALL_TARGETS += dist_include_mozilla_dom',
|
||||||
|
]
|
||||||
|
self.maxDiff = None
|
||||||
|
self.assertEqual(lines, expected)
|
||||||
|
|
||||||
def test_resources(self):
|
def test_resources(self):
|
||||||
"""Ensure RESOURCE_FILES is handled properly."""
|
"""Ensure RESOURCE_FILES is handled properly."""
|
||||||
env = self._consume('resources', RecursiveMakeBackend)
|
env = self._consume('resources', RecursiveMakeBackend)
|
||||||
|
@ -454,7 +497,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
"""Pattern matches in test manifests' support-files should be recorded."""
|
"""Pattern matches in test manifests' support-files should be recorded."""
|
||||||
env = self._consume('test-manifests-written', RecursiveMakeBackend)
|
env = self._consume('test-manifests-written', RecursiveMakeBackend)
|
||||||
m = InstallManifest(path=mozpath.join(env.topobjdir,
|
m = InstallManifest(path=mozpath.join(env.topobjdir,
|
||||||
'_build_manifests', 'install', 'tests'))
|
'_build_manifests', 'install', '_tests'))
|
||||||
|
|
||||||
# This is not the most robust test in the world, but it gets the job
|
# This is not the most robust test in the world, but it gets the job
|
||||||
# done.
|
# done.
|
||||||
|
@ -700,7 +743,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
env = self._consume('test-manifests-duplicate-support-files',
|
env = self._consume('test-manifests-duplicate-support-files',
|
||||||
RecursiveMakeBackend)
|
RecursiveMakeBackend)
|
||||||
|
|
||||||
p = os.path.join(env.topobjdir, '_build_manifests', 'install', 'tests')
|
p = os.path.join(env.topobjdir, '_build_manifests', 'install', '_tests')
|
||||||
m = InstallManifest(p)
|
m = InstallManifest(p)
|
||||||
self.assertIn('testing/mochitest/tests/support-file.txt', m)
|
self.assertIn('testing/mochitest/tests/support-file.txt', m)
|
||||||
|
|
||||||
|
@ -755,7 +798,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
man_dir = mozpath.join(env.topobjdir, '_build_manifests', 'install')
|
man_dir = mozpath.join(env.topobjdir, '_build_manifests', 'install')
|
||||||
self.assertTrue(os.path.isdir(man_dir))
|
self.assertTrue(os.path.isdir(man_dir))
|
||||||
|
|
||||||
full = mozpath.join(man_dir, 'tests')
|
full = mozpath.join(man_dir, '_tests')
|
||||||
self.assertTrue(os.path.exists(full))
|
self.assertTrue(os.path.exists(full))
|
||||||
|
|
||||||
m = InstallManifest(path=full)
|
m = InstallManifest(path=full)
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
# Any copyright is dedicated to the Public Domain.
|
||||||
|
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||||
|
|
||||||
|
EXPORTS += ['foo.h']
|
||||||
|
EXPORTS.mozilla += ['mozilla1.h']
|
||||||
|
EXPORTS.mozilla += ['!mozilla2.h']
|
||||||
|
|
||||||
|
GENERATED_FILES += ['mozilla2.h']
|
|
@ -0,0 +1,5 @@
|
||||||
|
# Any copyright is dedicated to the Public Domain.
|
||||||
|
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||||
|
|
||||||
|
EXPORTS += ['foo.h']
|
||||||
|
EXPORTS += ['!bar.h']
|
|
@ -0,0 +1,6 @@
|
||||||
|
# Any copyright is dedicated to the Public Domain.
|
||||||
|
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||||
|
|
||||||
|
EXPORTS += ['foo.h']
|
||||||
|
EXPORTS.mozilla += ['mozilla1.h']
|
||||||
|
EXPORTS.mozilla += ['mozilla2.h']
|
|
@ -0,0 +1,7 @@
|
||||||
|
# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
|
||||||
|
# Any copyright is dedicated to the Public Domain.
|
||||||
|
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||||
|
|
||||||
|
FINAL_TARGET_PP_FILES += [
|
||||||
|
'!foo.js',
|
||||||
|
]
|
|
@ -9,6 +9,9 @@ import unittest
|
||||||
|
|
||||||
from mozunit import main
|
from mozunit import main
|
||||||
|
|
||||||
|
from mozbuild.frontend.context import (
|
||||||
|
ObjDirPath,
|
||||||
|
)
|
||||||
from mozbuild.frontend.data import (
|
from mozbuild.frontend.data import (
|
||||||
AndroidResDirs,
|
AndroidResDirs,
|
||||||
BrandingFiles,
|
BrandingFiles,
|
||||||
|
@ -254,10 +257,42 @@ class TestEmitterBasic(unittest.TestCase):
|
||||||
('vpx', ['mem.h', 'mem2.h']),
|
('vpx', ['mem.h', 'mem2.h']),
|
||||||
]
|
]
|
||||||
for (expect_path, expect_headers), (actual_path, actual_headers) in \
|
for (expect_path, expect_headers), (actual_path, actual_headers) in \
|
||||||
zip(expected, [(path, list(seq)) for path, seq in objs[0].exports.walk()]):
|
zip(expected, [(path, list(seq)) for path, seq in objs[0].files.walk()]):
|
||||||
self.assertEqual(expect_path, actual_path)
|
self.assertEqual(expect_path, actual_path)
|
||||||
self.assertEqual(expect_headers, actual_headers)
|
self.assertEqual(expect_headers, actual_headers)
|
||||||
|
|
||||||
|
def test_exports_missing(self):
|
||||||
|
'''
|
||||||
|
Missing files in EXPORTS is an error.
|
||||||
|
'''
|
||||||
|
reader = self.reader('exports-missing')
|
||||||
|
with self.assertRaisesRegexp(SandboxValidationError,
|
||||||
|
'File listed in EXPORTS does not exist:'):
|
||||||
|
objs = self.read_topsrcdir(reader)
|
||||||
|
|
||||||
|
def test_exports_missing_generated(self):
|
||||||
|
'''
|
||||||
|
An objdir file in EXPORTS that is not in GENERATED_FILES is an error.
|
||||||
|
'''
|
||||||
|
reader = self.reader('exports-missing-generated')
|
||||||
|
with self.assertRaisesRegexp(SandboxValidationError,
|
||||||
|
'Objdir file listed in EXPORTS not in GENERATED_FILES:'):
|
||||||
|
objs = self.read_topsrcdir(reader)
|
||||||
|
|
||||||
|
def test_exports_generated(self):
|
||||||
|
reader = self.reader('exports-generated')
|
||||||
|
objs = self.read_topsrcdir(reader)
|
||||||
|
|
||||||
|
self.assertEqual(len(objs), 2)
|
||||||
|
self.assertIsInstance(objs[0], GeneratedFile)
|
||||||
|
self.assertIsInstance(objs[1], Exports)
|
||||||
|
exports = [(path, list(seq)) for path, seq in objs[1].files.walk()]
|
||||||
|
self.assertEqual(exports,
|
||||||
|
[('', ['foo.h']),
|
||||||
|
('mozilla', ['mozilla1.h', '!mozilla2.h'])])
|
||||||
|
path, files = exports[1]
|
||||||
|
self.assertIsInstance(files[1], ObjDirPath)
|
||||||
|
|
||||||
def test_test_harness_files(self):
|
def test_test_harness_files(self):
|
||||||
reader = self.reader('test-harness-files')
|
reader = self.reader('test-harness-files')
|
||||||
objs = self.read_topsrcdir(reader)
|
objs = self.read_topsrcdir(reader)
|
||||||
|
@ -834,6 +869,13 @@ class TestEmitterBasic(unittest.TestCase):
|
||||||
reader = self.reader('dist-files-missing')
|
reader = self.reader('dist-files-missing')
|
||||||
self.read_topsrcdir(reader)
|
self.read_topsrcdir(reader)
|
||||||
|
|
||||||
|
def test_final_target_pp_files_non_srcdir(self):
|
||||||
|
'''Test that non-srcdir paths in FINAL_TARGET_PP_FILES throws errors.'''
|
||||||
|
reader = self.reader('final-target-pp-files-non-srcdir')
|
||||||
|
with self.assertRaisesRegexp(SandboxValidationError,
|
||||||
|
'Only source directory paths allowed in FINAL_TARGET_PP_FILES:'):
|
||||||
|
objs = self.read_topsrcdir(reader)
|
||||||
|
|
||||||
def test_android_res_dirs(self):
|
def test_android_res_dirs(self):
|
||||||
"""Test that ANDROID_RES_DIRS works properly."""
|
"""Test that ANDROID_RES_DIRS works properly."""
|
||||||
reader = self.reader('android-res-dirs')
|
reader = self.reader('android-res-dirs')
|
||||||
|
|
Загрузка…
Ссылка в новой задаче