Bug 1207893 - Refactor how build backend execution is summarized. r=gps

- Make all backends report the time spent in their own execution
- Change how the data is collected for the reader and emitter such that
  each of them is aware of its own data, instead of everything being
  tracked by the backend.

This is meant to open the door to multiple backends running from the
same execution of config.status.
This commit is contained in:
Mike Hommey 2015-09-15 12:00:12 +09:00
Родитель 0e156c41d5
Коммит 64341c67d8
13 изменённых файлов: 165 добавлений и 221 удалений

Просмотреть файл

@ -24,6 +24,7 @@ from ..frontend.data import (
)
from ..makeutil import Makefile
from ..util import ensureParentDir
from mozbuild.base import ExecutionSummary
def pretty_print(element):
@ -38,22 +39,17 @@ class AndroidEclipseBackend(CommonBackend):
"""Backend that generates Android Eclipse project files.
"""
def _init(self):
CommonBackend._init(self)
def detailed(summary):
s = 'Wrote {:d} Android Eclipse projects to {:s}; ' \
'{:d} created; {:d} updated'.format(
summary.created_count + summary.updated_count,
mozpath.join(self.environment.topobjdir, 'android_eclipse'),
summary.created_count,
summary.updated_count)
return s
# This is a little kludgy and could be improved with a better API.
self.summary.backend_detailed_summary = types.MethodType(detailed,
self.summary)
def summary(self):
return ExecutionSummary(
'AndroidEclipse backend executed in {execution_time:.2f}s\n'
'Wrote {projects:d} Android Eclipse projects to {path:s}; '
'{created:d} created; {updated:d} updated',
execution_time=self._execution_time,
projects=self._created_count + self._updated_count,
path=mozpath.join(self.environment.topobjdir, 'android_eclipse'),
created=self._created_count,
updated=self._updated_count,
)
def consume_object(self, obj):
"""Write out Android Eclipse project files."""
@ -257,7 +253,7 @@ class AndroidEclipseBackend(CommonBackend):
# When we re-create the build backend, we kill everything that was there.
if os.path.isdir(project_directory):
self.summary.updated_count += 1
self._updated_count += 1
else:
self.summary.created_count += 1
self._created_count += 1
copier.copy(project_directory, skip_if_older=False, remove_unaccounted=True)

Просмотреть файл

@ -21,102 +21,9 @@ import mozpack.path as mozpath
from ..preprocessor import Preprocessor
from ..pythonutil import iter_modules_in_path
from ..util import FileAvoidWrite
from ..frontend.data import (
ContextDerived,
ReaderSummary,
)
from ..frontend.data import ContextDerived
from .configenvironment import ConfigEnvironment
class BackendConsumeSummary(object):
"""Holds state about what a backend did.
This is used primarily to print a summary of what the backend did
so people know what's going on.
"""
def __init__(self):
# How many moz.build files were read. This includes included files.
self.mozbuild_count = 0
# The number of derived objects from the read moz.build files.
self.object_count = 0
# The number of backend files created.
self.created_count = 0
# The number of backend files updated.
self.updated_count = 0
# The number of unchanged backend files.
self.unchanged_count = 0
# The number of deleted backend files.
self.deleted_count = 0
# The total wall time this backend spent consuming objects. If
# the iterable passed into consume() is a generator, this includes the
# time spent to read moz.build files.
self.wall_time = 0.0
# CPU time spent by during the interval captured by wall_time.
self.cpu_time = 0.0
# The total wall time spent executing moz.build files. This is just
# the read and execute time. It does not cover consume time.
self.mozbuild_execution_time = 0.0
# The total wall time spent emitting objects from sandboxes.
self.emitter_execution_time = 0.0
# The total wall time spent in the backend. This counts the time the
# backend writes out files, etc.
self.backend_execution_time = 0.0
# How much wall time the system spent doing other things. This is
# wall_time - mozbuild_execution_time - emitter_execution_time -
# backend_execution_time.
self.other_time = 0.0
# Mapping of changed file paths to diffs of the changes.
self.file_diffs = {}
@property
def reader_summary(self):
return 'Finished reading {:d} moz.build files in {:.2f}s'.format(
self.mozbuild_count,
self.mozbuild_execution_time)
@property
def emitter_summary(self):
return 'Processed into {:d} build config descriptors in {:.2f}s'.format(
self.object_count, self.emitter_execution_time)
@property
def backend_summary(self):
return 'Backend executed in {:.2f}s'.format(self.backend_execution_time)
def backend_detailed_summary(self):
"""Backend summary to be supplied by BuildBackend implementations."""
return None
@property
def total_summary(self):
efficiency_value = self.cpu_time / self.wall_time if self.wall_time else 100
return 'Total wall time: {:.2f}s; CPU time: {:.2f}s; Efficiency: ' \
'{:.0%}; Untracked: {:.2f}s'.format(
self.wall_time, self.cpu_time, efficiency_value,
self.other_time)
def summaries(self):
yield self.reader_summary
yield self.emitter_summary
yield self.backend_summary
detailed = self.backend_detailed_summary()
if detailed:
yield detailed
yield self.total_summary
from mozbuild.base import ExecutionSummary
class BuildBackend(LoggingMixin):
@ -135,7 +42,6 @@ class BuildBackend(LoggingMixin):
self.populate_logger()
self.environment = environment
self.summary = BackendConsumeSummary()
# Files whose modification should cause a new read and backend
# generation.
@ -155,8 +61,44 @@ class BuildBackend(LoggingMixin):
self._environments = {}
self._environments[environment.topobjdir] = environment
# The number of backend files created.
self._created_count = 0
# The number of backend files updated.
self._updated_count = 0
# The number of unchanged backend files.
self._unchanged_count = 0
# The number of deleted backend files.
self._deleted_count = 0
# The total wall time spent in the backend. This counts the time the
# backend writes out files, etc.
self._execution_time = 0.0
# Mapping of changed file paths to diffs of the changes.
self.file_diffs = {}
self._init()
def summary(self):
return ExecutionSummary(
self.__class__.__name__.replace('Backend', '') +
' backend executed in {execution_time:.2f}s\n '
'{total:d} total backend files; '
'{created:d} created; '
'{updated:d} updated; '
'{unchanged:d} unchanged; '
'{deleted:d} deleted',
execution_time=self._execution_time,
total=self._created_count + self._updated_count +
self._unchanged_count,
created=self._created_count,
updated=self._updated_count,
unchanged=self._unchanged_count,
deleted=self._deleted_count)
def _init():
"""Hook point for child classes to perform actions during __init__.
@ -173,24 +115,14 @@ class BuildBackend(LoggingMixin):
base class consumes objects and calls methods (possibly) implemented by
child classes.
"""
cpu_start = time.clock()
time_start = time.time()
backend_time = 0.0
for obj in objs:
self.summary.object_count += 1
obj_start = time.time()
self.consume_object(obj)
backend_time += time.time() - obj_start
self._execution_time += time.time() - obj_start
if isinstance(obj, ContextDerived):
self.backend_input_files |= obj.context_all_paths
if isinstance(obj, ReaderSummary):
self.summary.mozbuild_count = obj.total_file_count
self.summary.mozbuild_execution_time = obj.total_sandbox_execution_time
self.summary.emitter_execution_time = obj.total_emitter_execution_time
# Pull in all loaded Python as dependencies so any Python changes that
# could influence our output result in a rescan.
self.backend_input_files |= set(iter_modules_in_path(
@ -198,14 +130,14 @@ class BuildBackend(LoggingMixin):
finished_start = time.time()
self.consume_finished()
backend_time += time.time() - finished_start
self._execution_time += time.time() - finished_start
# Purge backend files created in previous run, but not created anymore
delete_files = self._backend_output_list - self._backend_output_files
for path in delete_files:
try:
os.unlink(mozpath.join(self.environment.topobjdir, path))
self.summary.deleted_count += 1
self._deleted_count += 1
except OSError:
pass
# Remove now empty directories
@ -216,24 +148,14 @@ class BuildBackend(LoggingMixin):
pass
# Write out the list of backend files generated, if it changed.
if self.summary.deleted_count or self.summary.created_count or \
if self._deleted_count or self._created_count or \
not os.path.exists(self._backend_output_list_file):
with open(self._backend_output_list_file, 'w') as fh:
fh.write('\n'.join(sorted(self._backend_output_files)))
elif self.summary.updated_count:
elif self._updated_count:
with open(self._backend_output_list_file, 'a'):
os.utime(self._backend_output_list_file, None)
self.summary.cpu_time = time.clock() - cpu_start
self.summary.wall_time = time.time() - time_start
self.summary.backend_execution_time = backend_time
self.summary.other_time = self.summary.wall_time - \
self.summary.mozbuild_execution_time - \
self.summary.emitter_execution_time - \
self.summary.backend_execution_time
return self.summary
@abstractmethod
def consume_object(self, obj):
"""Consumes an individual TreeMetadata instance.
@ -250,7 +172,7 @@ class BuildBackend(LoggingMixin):
"""Context manager to write a file.
This is a glorified wrapper around FileAvoidWrite with integration to
update the BackendConsumeSummary on this instance.
update the summary data on this instance.
Example usage:
@ -276,13 +198,13 @@ class BuildBackend(LoggingMixin):
self._backend_output_files.add(mozpath.relpath(fh.name, self.environment.topobjdir))
existed, updated = fh.close()
if not existed:
self.summary.created_count += 1
self._created_count += 1
elif updated:
self.summary.updated_count += 1
self._updated_count += 1
if fh.diff:
self.summary.file_diffs[fh.name] = fh.diff
self.file_diffs[fh.name] = fh.diff
else:
self.summary.unchanged_count += 1
self._unchanged_count += 1
@contextmanager
def _get_preprocessor(self, obj):

Просмотреть файл

@ -15,6 +15,7 @@ from .common import CommonBackend
from ..frontend.data import (
Defines,
)
from mozbuild.base import ExecutionSummary
# TODO Have ./mach eclipse generate the workspace and index it:
# /Users/bgirard/mozilla/eclipse/eclipse/eclipse/eclipse -application org.eclipse.cdt.managedbuilder.core.headlessbuild -data $PWD/workspace -importAll $PWD/eclipse
@ -41,15 +42,15 @@ class CppEclipseBackend(CommonBackend):
# Note: We need the C Pre Processor (CPP) flags, not the CXX flags
self._cppflags = self.environment.substs.get('CPPFLAGS', '')
def detailed(summary):
return ('Generated Cpp Eclipse workspace in "%s".\n' + \
'If missing, import the project using File > Import > General > Existing Project into workspace\n' + \
'\n' + \
'Run with: eclipse -data %s\n') \
% (self._workspace_dir, self._workspace_dir)
self.summary.backend_detailed_summary = types.MethodType(detailed,
self.summary)
def summary(self):
return ExecutionSummary(
'CppEclipse backend executed in {execution_time:.2f}s\n'
'Generated Cpp Eclipse workspace in "{workspace:s}".\n'
'If missing, import the project using File > Import > General > Existing Project into workspace\n'
'\n'
'Run with: eclipse -data {workspace:s}\n',
execution_time=self._execution_time,
workspace=self._workspace_dir)
def _get_workspace_path(self):
return CppEclipseBackend.get_workspace_path(self.environment.topsrcdir, self.environment.topobjdir)

Просмотреть файл

@ -376,26 +376,8 @@ class RecursiveMakeBackend(CommonBackend):
self._backend_files = {}
self._idl_dirs = set()
def detailed(summary):
s = '{:d} total backend files; ' \
'{:d} created; {:d} updated; {:d} unchanged; ' \
'{:d} deleted; {:d} -> {:d} Makefile'.format(
summary.created_count + summary.updated_count +
summary.unchanged_count,
summary.created_count,
summary.updated_count,
summary.unchanged_count,
summary.deleted_count,
summary.makefile_in_count,
summary.makefile_out_count)
return s
# This is a little kludgy and could be improved with a better API.
self.summary.backend_detailed_summary = types.MethodType(detailed,
self.summary)
self.summary.makefile_in_count = 0
self.summary.makefile_out_count = 0
self._makefile_in_count = 0
self._makefile_out_count = 0
self._test_manifests = {}
@ -428,6 +410,13 @@ class RecursiveMakeBackend(CommonBackend):
'tools': set(),
}
def summary(self):
summary = super(RecursiveMakeBackend, self).summary()
summary.extend('; {makefile_in:d} -> {makefile_out:d} Makefile',
makefile_in=self._makefile_in_count,
makefile_out=self._makefile_out_count)
return summary
def _get_backend_file_for(self, obj):
if obj.objdir not in self._backend_files:
self._backend_files[obj.objdir] = \
@ -802,7 +791,7 @@ class RecursiveMakeBackend(CommonBackend):
if not stub:
self.log(logging.DEBUG, 'substitute_makefile',
{'path': makefile}, 'Substituting makefile: {path}')
self.summary.makefile_in_count += 1
self._makefile_in_count += 1
for tier, skiplist in self._may_skip.items():
if bf.relobjdir in skiplist:
@ -1451,7 +1440,7 @@ INSTALL_TARGETS += %(prefix)s
# the autogenerated one automatically.
self.backend_input_files.add(obj.input_path)
self.summary.makefile_out_count += 1
self._makefile_out_count += 1
def _handle_ipdl_sources(self, ipdl_dir, sorted_ipdl_sources,
unified_ipdl_cppsrcs_mapping):

Просмотреть файл

@ -27,6 +27,7 @@ from ..frontend.data import (
Sources,
UnifiedSources,
)
from mozbuild.base import ExecutionSummary
MSBUILD_NAMESPACE = 'http://schemas.microsoft.com/developer/msbuild/2003'
@ -101,12 +102,12 @@ class VisualStudioBackend(CommonBackend):
self._paths_to_configs = {}
self._libs_to_paths = {}
def detailed(summary):
return 'Generated Visual Studio solution at %s' % (
os.path.join(self._out_dir, 'mozilla.sln'))
self.summary.backend_detailed_summary = types.MethodType(detailed,
self.summary)
def summary(self):
return ExecutionSummary(
'VisualStudio backend executed in {execution_time:.2f}s\n'
'Generated Visual Studio solution at {path:s}',
execution_time=self._execution_time,
path=os.path.join(self._out_dir, 'mozilla.sln'))
def consume_object(self, obj):
# Just acknowledge everything.

Просмотреть файл

@ -843,3 +843,22 @@ class PathArgument(object):
def objdir_path(self):
return mozpath.join(self.topobjdir, self.relpath())
class ExecutionSummary(dict):
"""Helper for execution summaries."""
def __init__(self, summary_format, **data):
self._summary_format = ''
assert 'execution_time' in data
self.extend(summary_format, **data)
def extend(self, summary_format, **data):
self._summary_format += summary_format
self.update(data)
def __str__(self):
return self._summary_format.format(**self)
def __getattr__(self, key):
return self[key]

Просмотреть файл

@ -11,6 +11,7 @@ from __future__ import absolute_import, print_function
import logging
import os
import sys
import time
from optparse import OptionParser
@ -133,6 +134,9 @@ def config_status(topobjdir='.', topsrcdir='.',
from mozbuild.backend.fastermake import FasterMakeBackend
backend_cls = FasterMakeBackend
cpu_start = time.clock()
time_start = time.time()
the_backend = backend_cls(env)
reader = BuildReader(env)
@ -150,13 +154,28 @@ def config_status(topobjdir='.', topsrcdir='.',
log_manager.enable_unstructured()
print('Reticulating splines...', file=sys.stderr)
summary = the_backend.consume(definitions)
the_backend.consume(definitions)
for line in summary.summaries():
print(line, file=sys.stderr)
execution_time = 0.0
for obj in (reader, emitter, the_backend):
summary = obj.summary()
print(summary, file=sys.stderr)
execution_time += summary.execution_time
cpu_time = time.clock() - cpu_start
wall_time = time.time() - time_start
efficiency = cpu_time / wall_time if wall_time else 100
untracked = wall_time - execution_time
print(
'Total wall time: {:.2f}s; CPU time: {:.2f}s; Efficiency: '
'{:.0%}; Untracked: {:.2f}s'.format(
wall_time, cpu_time, efficiency, untracked),
file=sys.stderr
)
if options.diff:
for path, diff in sorted(summary.file_diffs.items()):
for path, diff in sorted(the_backend.file_diffs.items()):
print('\n'.join(diff))
# Advertise Visual Studio if appropriate.

Просмотреть файл

@ -88,7 +88,6 @@ class Context(KeyedDefaultDict):
# a list to be a problem.
self._all_paths = []
self.config = config
self.execution_time = 0
self._sandbox = None
KeyedDefaultDict.__init__(self, self._factory)

Просмотреть файл

@ -43,17 +43,6 @@ class TreeMetadata(object):
self._ack = True
class ReaderSummary(TreeMetadata):
"""A summary of what the reader did."""
def __init__(self, total_file_count, total_sandbox_execution_time,
total_emitter_execution_time):
TreeMetadata.__init__(self)
self.total_file_count = total_file_count
self.total_sandbox_execution_time = total_sandbox_execution_time
self.total_emitter_execution_time = total_emitter_execution_time
class ContextDerived(TreeMetadata):
"""Build object derived from a single Context instance.

Просмотреть файл

@ -63,7 +63,6 @@ from .data import (
PreprocessedTestWebIDLFile,
PreprocessedWebIDLFile,
Program,
ReaderSummary,
Resources,
SharedLibrary,
SimpleProgram,
@ -95,6 +94,8 @@ from .context import (
TemplateContext,
)
from mozbuild.base import ExecutionSummary
class TreeMetadataEmitter(LoggingMixin):
"""Converts the executed mozbuild files into data structures.
@ -136,19 +137,27 @@ class TreeMetadataEmitter(LoggingMixin):
# Add security/nss manually, since it doesn't have a subconfigure.
self._external_paths.add('security/nss')
self._emitter_time = 0.0
self._object_count = 0
def summary(self):
return ExecutionSummary(
'Processed into {object_count:d} build config descriptors in '
'{execution_time:.2f}s',
execution_time=self._emitter_time,
object_count=self._object_count)
def emit(self, output):
"""Convert the BuildReader output into data structures.
The return value from BuildReader.read_topsrcdir() (a generator) is
typically fed into this function.
"""
file_count = 0
sandbox_execution_time = 0.0
emitter_time = 0.0
contexts = {}
def emit_objs(objs):
for o in objs:
self._object_count += 1
yield o
if not o._ack:
raise Exception('Unhandled object of type %s' % type(o))
@ -166,14 +175,10 @@ class TreeMetadataEmitter(LoggingMixin):
start = time.time()
# We need to expand the generator for the timings to work.
objs = list(self.emit_from_context(out))
emitter_time += time.time() - start
self._emitter_time += time.time() - start
for o in emit_objs(objs): yield o
# Update the stats.
file_count += len(out.all_paths)
sandbox_execution_time += out.execution_time
else:
raise Exception('Unhandled output type: %s' % type(out))
@ -182,12 +187,10 @@ class TreeMetadataEmitter(LoggingMixin):
if self.config.substs.get('COMPILE_ENVIRONMENT', True):
start = time.time()
objs = list(self._emit_libs_derived(contexts))
emitter_time += time.time() - start
self._emitter_time += time.time() - start
for o in emit_objs(objs): yield o
yield ReaderSummary(file_count, sandbox_execution_time, emitter_time)
def _emit_libs_derived(self, contexts):
# First do FINAL_LIBRARY linkage.
for lib in (l for libs in self._libs.values() for l in libs):

Просмотреть файл

@ -6,7 +6,6 @@ from __future__ import absolute_import, unicode_literals
import gyp
import sys
import time
import os
import mozpack.path as mozpath
from mozpack.files import FileFinder
@ -83,8 +82,6 @@ def read_from_gyp(config, path, output, vars, non_unified_sources = set()):
processor.
"""
time_start = time.time()
# gyp expects plain str instead of unicode. The frontend code gives us
# unicode strings, so convert them.
path = encode(path)
@ -234,6 +231,4 @@ def read_from_gyp(config, path, output, vars, non_unified_sources = set()):
context['DEFINES']['_UNICODE'] = True
context['DISABLE_STL_WRAPPING'] = True
context.execution_time = time.time() - time_start
yield context
time_start = time.time()

Просмотреть файл

@ -78,6 +78,9 @@ from .context import (
TemplateContext,
)
from mozbuild.base import ExecutionSummary
if sys.version_info.major == 2:
text_type = unicode
type_type = types.TypeType
@ -869,6 +872,16 @@ class BuildReader(object):
self._execution_stack = []
self._finder = finder
self._execution_time = 0.0
self._file_count = 0
def summary(self):
return ExecutionSummary(
'Finished reading {file_count:d} moz.build files in '
'{execution_time:.2f}s',
file_count=self._file_count,
execution_time=self._execution_time)
def read_topsrcdir(self):
"""Read the tree of linked moz.build files.
@ -1103,7 +1116,8 @@ class BuildReader(object):
sandbox = MozbuildSandbox(context, metadata=metadata,
finder=self._finder)
sandbox.exec_file(path)
context.execution_time = time.time() - time_start
self._execution_time += time.time() - time_start
self._file_count += len(context.all_paths)
# Yield main context before doing any processing. This gives immediate
# consumers an opportunity to change state before our remaining
@ -1141,6 +1155,7 @@ class BuildReader(object):
raise SandboxValidationError('Cannot find %s.' % source,
context)
non_unified_sources.add(source)
time_start = time.time()
for gyp_context in read_from_gyp(context.config,
mozpath.join(curdir, gyp_dir.input),
mozpath.join(context.objdir,
@ -1149,6 +1164,8 @@ class BuildReader(object):
non_unified_sources = non_unified_sources):
gyp_context.update(gyp_dir.sandbox_vars)
gyp_contexts.append(gyp_context)
self._file_count += len(gyp_context.all_paths)
self._execution_time += time.time() - time_start
for gyp_context in gyp_contexts:
context['DIRS'].append(mozpath.relpath(gyp_context.objdir, context.objdir))

Просмотреть файл

@ -27,7 +27,6 @@ from mozbuild.frontend.data import (
JsPreferenceFile,
LocalInclude,
Program,
ReaderSummary,
Resources,
SimpleProgram,
Sources,
@ -79,17 +78,12 @@ class TestEmitterBasic(unittest.TestCase):
objs = list(ack(o) for o in emitter.emit(reader.read_topsrcdir()))
self.assertGreater(len(objs), 0)
self.assertIsInstance(objs[-1], ReaderSummary)
filtered = []
for obj in objs:
if filter_common and isinstance(obj, DirectoryTraversal):
continue
# Always filter ReaderSummary because it's asserted above.
if isinstance(obj, ReaderSummary):
continue
filtered.append(obj)
return filtered