[android] Add local results details pages. (RELAND)

Bug: 772032,775448
Change-Id: I784723c37a3d4e2c4adeec407170e5d0094c439a
Reviewed-on: https://chromium-review.googlesource.com/723659
Reviewed-by: Zhiling Huang <hzl@chromium.org>
Commit-Queue: John Budorick <jbudorick@chromium.org>
Cr-Original-Commit-Position: refs/heads/master@{#509511}
Cr-Mirrored-From: https://chromium.googlesource.com/chromium/src
Cr-Mirrored-Commit: 611646dc1b2211c58095b11c263864fedbd40fe9
This commit is contained in:
John Budorick 2017-10-17 20:39:46 +00:00 коммит произвёл Commit Bot
Родитель 8ea0d216ae
Коммит ecf037c8ef
25 изменённых файлов: 738 добавлений и 404 удалений

Просмотреть файл

@ -65,6 +65,9 @@ def CommonChecks(input_api, output_api):
J('pylib', 'local', 'device',
'local_device_instrumentation_test_run_test.py'),
J('pylib', 'local', 'device', 'local_device_test_run_test.py'),
J('pylib', 'output', 'local_output_manager_test.py'),
J('pylib', 'output', 'noop_output_manager_test.py'),
J('pylib', 'output', 'remote_output_manager_test.py'),
J('pylib', 'results', 'json_results_test.py'),
J('pylib', 'symbols', 'elf_symbolizer_unittest.py'),
J('pylib', 'utils', 'decorators_test.py'),

Просмотреть файл

@ -1,86 +0,0 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
from devil.android import logcat_monitor
from devil.utils import reraiser_thread
from pylib.utils import logdog_helper
class LogdogLogcatMonitor(logcat_monitor.LogcatMonitor):
"""Logcat monitor that writes logcat to a logdog stream.
The logdog stream client will return a url which contains the logcat.
"""
def __init__(self, adb, stream_name, clear=True, filter_specs=None,
deobfuscate_func=None):
super(LogdogLogcatMonitor, self).__init__(adb, clear, filter_specs)
self._logcat_url = ''
self._logdog_stream = None
self._stream_client = None
self._stream_name = stream_name
self._deobfuscate_func = deobfuscate_func or (lambda lines: lines)
def GetLogcatURL(self):
"""Return logcat url.
The default logcat url is '', if failed to create stream_client.
"""
return self._logcat_url
def Stop(self):
"""Stops the logcat monitor.
Close the logdog stream as well.
"""
try:
super(LogdogLogcatMonitor, self)._StopRecording()
if self._logdog_stream:
self._logdog_stream.close()
except Exception as e: # pylint: disable=broad-except
logging.exception('Unknown Error: %s.', e)
def Start(self):
"""Starts the logdog logcat monitor.
Clears the logcat if |clear| was set in |__init__|.
"""
if self._clear:
self._adb.Logcat(clear=True)
self._logdog_stream = logdog_helper.open_text(self._stream_name)
self._logcat_url = logdog_helper.get_viewer_url(self._stream_name)
logging.info('Logcat will be saved to %s', self._logcat_url)
self._StartRecording()
def _StartRecording(self):
"""Starts recording logcat to file.
Write logcat to stream at the same time.
"""
def record_to_stream():
if self._logdog_stream:
for data in self._adb.Logcat(filter_specs=self._filter_specs,
logcat_format='threadtime',
iter_timeout=0.08):
if self._stop_recording_event.isSet():
return
if data:
data = '\n'.join(self._deobfuscate_func([data]))
self._logdog_stream.write(data + '\n')
if self._stop_recording_event.isSet():
return
self._stop_recording_event.clear()
if not self._record_thread:
self._record_thread = reraiser_thread.ReraiserThread(record_to_stream)
self._record_thread.start()
def Close(self):
"""Override parent's close method."""
pass
def __del__(self):
"""Override parent's delete method."""
pass

Просмотреть файл

@ -16,8 +16,14 @@ class Environment(object):
machine.
"""
def __init__(self):
pass
def __init__(self, output_manager):
"""Environment constructor.
Args:
output_manager: Instance of |output_manager.OutputManager| used to
save test output.
"""
self._output_manager = output_manager
def SetUp(self):
raise NotImplementedError
@ -32,3 +38,6 @@ class Environment(object):
def __exit__(self, _exc_type, _exc_val, _exc_tb):
self.TearDown()
@property
def output_manager(self):
return self._output_manager

Просмотреть файл

@ -6,12 +6,14 @@ from pylib import constants
from pylib.local.device import local_device_environment
from pylib.local.machine import local_machine_environment
def CreateEnvironment(args, error_func):
def CreateEnvironment(args, output_manager, error_func):
if args.environment == 'local':
if args.command not in constants.LOCAL_MACHINE_TESTS:
return local_device_environment.LocalDeviceEnvironment(args, error_func)
return local_device_environment.LocalDeviceEnvironment(
args, output_manager, error_func)
else:
return local_machine_environment.LocalMachineEnvironment(args, error_func)
return local_machine_environment.LocalMachineEnvironment(
args, output_manager, error_func)
error_func('Unable to create %s environment.' % args.environment)

Просмотреть файл

@ -0,0 +1,156 @@
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import contextlib
import logging
import os
import tempfile
from devil.utils import reraiser_thread
class Datatype(object):
HTML = 'html'
IMAGE = 'image'
TEXT = 'text'
class OutputManager(object):
def __init__(self):
"""OutputManager Constructor.
This class provides a simple interface to save test output. Subclasses
of this will allow users to save test results in the cloud or locally.
"""
self._allow_upload = False
self._thread_group = None
@contextlib.contextmanager
def ArchivedTempfile(
self, out_filename, out_subdir, datatype=Datatype.TEXT):
"""Archive file contents asynchonously and then deletes file.
Args:
out_filename: Name for saved file.
out_subdir: Directory to save |out_filename| to.
datatype: Datatype of file.
Returns:
An ArchivedFile file. This file will be uploaded async when the context
manager exits. AFTER the context manager exits, you can get the link to
where the file will be stored using the Link() API. You can use typical
file APIs to write and flish the ArchivedFile. You can also use file.name
to get the local filepath to where the underlying file exists. If you do
this, you are responsible of flushing the file before exiting the context
manager.
"""
if not self._allow_upload:
raise Exception('Must run |SetUp| before attempting to upload!')
f = self._CreateArchivedFile(out_filename, out_subdir, datatype)
try:
yield f
finally:
f.PrepareArchive()
def archive():
try:
f.Archive()
finally:
f.Delete()
thread = reraiser_thread.ReraiserThread(func=archive)
thread.start()
self._thread_group.Add(thread)
def _CreateArchivedFile(self, out_filename, out_subdir, datatype):
"""Returns an instance of ArchivedFile."""
raise NotImplementedError
def SetUp(self):
self._allow_upload = True
self._thread_group = reraiser_thread.ReraiserThreadGroup()
def TearDown(self):
self._allow_upload = False
logging.info('Finishing archiving output.')
self._thread_group.JoinAll()
def __enter__(self):
self.SetUp()
return self
def __exit__(self, _exc_type, _exc_val, _exc_tb):
self.TearDown()
class ArchivedFile(object):
def __init__(self, out_filename, out_subdir, datatype):
self._out_filename = out_filename
self._out_subdir = out_subdir
self._datatype = datatype
self._f = tempfile.NamedTemporaryFile(delete=False)
self._ready_to_archive = False
@property
def name(self):
return self._f.name
def write(self, *args, **kwargs):
if self._ready_to_archive:
raise Exception('Cannot write to file after archiving has begun!')
self._f.write(*args, **kwargs)
def flush(self, *args, **kwargs):
if self._ready_to_archive:
raise Exception('Cannot flush file after archiving has begun!')
self._f.flush(*args, **kwargs)
def Link(self):
"""Returns location of archived file."""
if not self._ready_to_archive:
raise Exception('Cannot get link to archived file before archiving '
'has begun')
return self._Link()
def _Link(self):
"""Note for when overriding this function.
This function will certainly be called before the file
has finished being archived. Therefore, this needs to be able to know the
exact location of the archived file before it is finished being archived.
"""
raise NotImplementedError
def PrepareArchive(self):
"""Meant to be called synchronously to prepare file for async archiving."""
self.flush()
self._ready_to_archive = True
self._PrepareArchive()
def _PrepareArchive(self):
"""Note for when overriding this function.
This function is needed for things such as computing the location of
content addressed files. This is called after the file is written but
before archiving has begun.
"""
pass
def Archive(self):
"""Archives file."""
if not self._ready_to_archive:
raise Exception('File is not ready to archive. Be sure you are not '
'writing to the file and PrepareArchive has been called')
self._Archive()
def _Archive(self):
raise NotImplementedError
def Delete(self):
"""Deletes the backing file."""
os.remove(self.name)

Просмотреть файл

@ -0,0 +1,16 @@
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from pylib import constants
from pylib.output import local_output_manager
from pylib.output import remote_output_manager
def CreateOutputManager(args):
if args.local_output:
return local_output_manager.LocalOutputManager(
output_dir=constants.GetOutDirectory())
else:
return remote_output_manager.RemoteOutputManager(
bucket=args.gs_results_bucket)

Просмотреть файл

@ -0,0 +1,14 @@
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os.path
import unittest
class OutputManagerTestCase(unittest.TestCase):
def assertUsableTempFile(self, archived_tempfile):
self.assertTrue(bool(archived_tempfile.name))
self.assertTrue(os.path.exists(archived_tempfile.name))
self.assertTrue(os.path.isfile(archived_tempfile.name))

Просмотреть файл

@ -287,7 +287,6 @@ class GtestTestInstance(test_instance.TestInstance):
self._extract_test_list_from_filter = args.extract_test_list_from_filter
self._filter_tests_lock = threading.Lock()
self._shard_timeout = args.shard_timeout
self._should_save_logcat = bool(args.json_results_file)
self._store_tombstones = args.store_tombstones
self._total_external_shards = args.test_launcher_total_shards
self._suite = args.suite_name[0]
@ -446,12 +445,6 @@ class GtestTestInstance(test_instance.TestInstance):
def shard_timeout(self):
return self._shard_timeout
# TODO(jbudorick): Remove this once mikecase lands
# https://codereview.chromium.org/2933993002/
@property
def should_save_logcat(self):
return self._should_save_logcat
@property
def store_tombstones(self):
return self._store_tombstones

Просмотреть файл

@ -504,8 +504,6 @@ class InstrumentationTestInstance(test_instance.TestInstance):
self._symbolizer = None
self._enable_java_deobfuscation = False
self._deobfuscator = None
self._gs_results_bucket = None
self._should_save_logcat = None
self._initializeLogAttributes(args)
self._edit_shared_prefs = []
@ -701,9 +699,6 @@ class InstrumentationTestInstance(test_instance.TestInstance):
self.apk_under_test.path if self.apk_under_test else None,
args.enable_relocation_packing)
self._gs_results_bucket = args.gs_results_bucket
self._should_save_logcat = bool(args.json_results_file)
def _initializeEditPrefsAttributes(self, args):
if not hasattr(args, 'shared_prefs_file') or not args.shared_prefs_file:
return
@ -759,10 +754,6 @@ class InstrumentationTestInstance(test_instance.TestInstance):
def flags(self):
return self._flags
@property
def gs_results_bucket(self):
return self._gs_results_bucket
@property
def junit3_runner_class(self):
return self._junit3_runner_class
@ -775,10 +766,6 @@ class InstrumentationTestInstance(test_instance.TestInstance):
def junit4_runner_supports_listing(self):
return self._junit4_runner_supports_listing
@property
def should_save_logcat(self):
return self._should_save_logcat
@property
def package_info(self):
return self._package_info

Просмотреть файл

@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import contextlib
import datetime
import functools
import logging
@ -10,7 +9,6 @@ import os
import shutil
import tempfile
import threading
import time
import devil_chromium
from devil import base_error
@ -22,11 +20,9 @@ from devil.android.sdk import adb_wrapper
from devil.utils import file_utils
from devil.utils import parallelizer
from pylib import constants
from pylib.android import logdog_logcat_monitor
from pylib.base import environment
from pylib.utils import instrumentation_tracing
from py_trace_event import trace_event
from py_utils import contextlib_ext
LOGCAT_FILTERS = [
@ -86,43 +82,10 @@ def handle_shard_failures_with(on_failure):
return decorator
# TODO(jbudorick): Reconcile this with the output manager logic in
# https://codereview.chromium.org/2933993002/ once that lands.
@contextlib.contextmanager
def OptionalPerTestLogcat(
device, test_name, condition, additional_filter_specs=None,
deobfuscate_func=None):
"""Conditionally capture logcat and stream it to logdog.
Args:
device: (DeviceUtils) the device from which logcat should be captured.
test_name: (str) the test name to use in the stream name.
condition: (bool) whether or not to capture the logcat.
additional_filter_specs: (list) additional logcat filters.
deobfuscate_func: (callable) an optional unary function that
deobfuscates logcat lines. The callable should take an iterable
of logcat lines and return a list of deobfuscated logcat lines.
Yields:
A LogdogLogcatMonitor instance whether condition is true or not,
though it may not be active.
"""
stream_name = 'logcat_%s_%s_%s' % (
test_name,
time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()),
device.serial)
filter_specs = LOGCAT_FILTERS + (additional_filter_specs or [])
logmon = logdog_logcat_monitor.LogdogLogcatMonitor(
device.adb, stream_name, filter_specs=filter_specs,
deobfuscate_func=deobfuscate_func)
with contextlib_ext.Optional(logmon, condition):
yield logmon
class LocalDeviceEnvironment(environment.Environment):
def __init__(self, args, _error_func):
super(LocalDeviceEnvironment, self).__init__()
def __init__(self, args, output_manager, _error_func):
super(LocalDeviceEnvironment, self).__init__(output_manager)
self._blacklist = (device_blacklist.Blacklist(args.blacklist_file)
if args.blacklist_file
else None)

Просмотреть файл

@ -13,6 +13,7 @@ import time
from devil.android import crash_handler
from devil.android import device_errors
from devil.android import device_temp_file
from devil.android import logcat_monitor
from devil.android import ports
from devil.utils import reraiser_thread
from incremental_install import installer
@ -453,15 +454,24 @@ class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun):
for f in flags:
logging.info(' %s', f)
with local_device_environment.OptionalPerTestLogcat(
device, hash(tuple(test)),
self._test_instance.should_save_logcat) as logmon:
with contextlib_ext.Optional(
trace_event.trace(str(test)),
self._env.trace_output):
output = self._delegate.Run(
test, device, flags=' '.join(flags),
timeout=timeout, retries=0)
stream_name = 'logcat_%s_%s_%s' % (
hash(tuple(test)),
time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()),
device.serial)
with self._env.output_manager.ArchivedTempfile(
stream_name, 'logcat') as logcat_file:
with logcat_monitor.LogcatMonitor(
device.adb,
filter_specs=local_device_environment.LOGCAT_FILTERS,
output_file=logcat_file.name) as logmon:
with contextlib_ext.Optional(
trace_event.trace(str(test)),
self._env.trace_output):
output = self._delegate.Run(
test, device, flags=' '.join(flags),
timeout=timeout, retries=0)
logmon.Close()
if self._test_instance.enable_xml_result_parsing:
try:
@ -475,7 +485,6 @@ class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun):
str(e))
gtest_xml = None
logcat_url = logmon.GetLogcatURL()
test_artifacts_url = self._UploadTestArtifacts(device,
test_artifacts_dir)
@ -500,8 +509,8 @@ class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun):
tombstones_url = None
for r in results:
if self._test_instance.should_save_logcat:
r.SetLink('logcat', logcat_url)
if logcat_file:
r.SetLink('logcat', logcat_file.Link())
if self._test_instance.gs_test_artifacts_bucket:
r.SetLink('test_artifacts', test_artifacts_url)

Просмотреть файл

@ -10,7 +10,6 @@ import os
import posixpath
import re
import sys
import tempfile
import time
from devil.android import crash_handler
@ -18,19 +17,20 @@ from devil.android import device_errors
from devil.android import device_temp_file
from devil.android import flag_changer
from devil.android.sdk import shared_prefs
from devil.android import logcat_monitor
from devil.android.tools import system_app
from devil.utils import reraiser_thread
from incremental_install import installer
from pylib import valgrind_tools
from pylib.base import base_test_result
from pylib.base import output_manager
from pylib.constants import host_paths
from pylib.instrumentation import instrumentation_test_instance
from pylib.local.device import local_device_environment
from pylib.local.device import local_device_test_run
from pylib.utils import google_storage_helper
from pylib.utils import instrumentation_tracing
from pylib.utils import logdog_helper
from pylib.utils import shared_preference_utils
from py_trace_event import trace_event
from py_trace_event import trace_time
from py_utils import contextlib_ext
@ -121,7 +121,8 @@ _CURRENT_FOCUS_CRASH_RE = re.compile(
class LocalDeviceInstrumentationTestRun(
local_device_test_run.LocalDeviceTestRun):
def __init__(self, env, test_instance):
super(LocalDeviceInstrumentationTestRun, self).__init__(env, test_instance)
super(LocalDeviceInstrumentationTestRun, self).__init__(
env, test_instance)
self._flag_changers = {}
self._ui_capture_dir = dict()
self._replace_package_contextmanager = None
@ -360,12 +361,9 @@ class LocalDeviceInstrumentationTestRun(
extras['coverageFile'] = coverage_device_file
# Save screenshot if screenshot dir is specified (save locally) or if
# a GS bucket is passed (save in cloud).
screenshot_device_file = None
if (self._test_instance.screenshot_dir or
self._test_instance.gs_results_bucket):
screenshot_device_file = device_temp_file.DeviceTempFile(
device.adb, suffix='.png', dir=device.GetExternalStoragePath())
extras[EXTRA_SCREENSHOT_FILE] = screenshot_device_file.name
screenshot_device_file = device_temp_file.DeviceTempFile(
device.adb, suffix='.png', dir=device.GetExternalStoragePath())
extras[EXTRA_SCREENSHOT_FILE] = screenshot_device_file.name
extras[EXTRA_UI_CAPTURE_DIR] = self._ui_capture_dir[device]
@ -440,19 +438,26 @@ class LocalDeviceInstrumentationTestRun(
time_ms = lambda: int(time.time() * 1e3)
start_ms = time_ms()
with local_device_environment.OptionalPerTestLogcat(
device, test_name.replace('#', '.'),
self._test_instance.should_save_logcat,
additional_filter_specs=['%s:I' % _TAG],
deobfuscate_func=self._test_instance.MaybeDeobfuscateLines) as logmon:
with _LogTestEndpoints(device, test_name):
with contextlib_ext.Optional(
trace_event.trace(test_name),
self._env.trace_output):
output = device.StartInstrumentation(
target, raw=True, extras=extras, timeout=timeout, retries=0)
stream_name = 'logcat_%s_%s_%s' % (
test_name.replace('#', '.'),
time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()),
device.serial)
with self._env.output_manager.ArchivedTempfile(
stream_name, 'logcat') as logcat_file:
with logcat_monitor.LogcatMonitor(
device.adb,
filter_specs=local_device_environment.LOGCAT_FILTERS,
output_file=logcat_file.name,
transform_func=self._test_instance.MaybeDeobfuscateLines) as logmon:
with _LogTestEndpoints(device, test_name):
with contextlib_ext.Optional(
trace_event.trace(test_name),
self._env.trace_output):
output = device.StartInstrumentation(
target, raw=True, extras=extras, timeout=timeout, retries=0)
logmon.Close()
logcat_url = logmon.GetLogcatURL()
duration_ms = time_ms() - start_ms
with contextlib_ext.Optional(
@ -514,8 +519,8 @@ class LocalDeviceInstrumentationTestRun(
step()
for result in results:
if logcat_url:
result.SetLink('logcat', logcat_url)
if logcat_file:
result.SetLink('logcat', logcat_file.Link())
# Update the result name if the test used flags.
if flags_to_add:
@ -544,15 +549,8 @@ class LocalDeviceInstrumentationTestRun(
if any(r.GetType() not in (base_test_result.ResultType.PASS,
base_test_result.ResultType.SKIP)
for r in results):
with contextlib_ext.Optional(
tempfile_ext.NamedTemporaryDirectory(),
self._test_instance.screenshot_dir is None and
self._test_instance.gs_results_bucket) as screenshot_host_dir:
screenshot_host_dir = (
self._test_instance.screenshot_dir or screenshot_host_dir)
self._SaveScreenshot(device, screenshot_host_dir,
screenshot_device_file, test_display_name,
results)
self._SaveScreenshot(device, screenshot_device_file, test_display_name,
results)
logging.info('detected failure in %s. raw output:', test_display_name)
for l in output:
@ -580,13 +578,13 @@ class LocalDeviceInstrumentationTestRun(
include_stack_symbols=False,
wipe_tombstones=True,
tombstone_symbolizer=self._test_instance.symbolizer)
stream_name = 'tombstones_%s_%s' % (
tombstone_filename = 'tombstones_%s_%s' % (
time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()),
device.serial)
tombstones_url = logdog_helper.text(
stream_name, '\n'.join(resolved_tombstones))
result.SetLink('tombstones', tombstones_url)
with self._env.output_manager.ArchivedTempfile(
tombstone_filename, 'tombstones') as tombstone_file:
tombstone_file.write('\n'.join(resolved_tombstones))
result.SetLink('tombstones', tombstone_file.Link())
if self._env.concurrent_adb:
post_test_step_thread_group.JoinAll()
return results, None
@ -702,41 +700,23 @@ class LocalDeviceInstrumentationTestRun(
with open(trace_host_file, 'a') as host_handle:
host_handle.write(java_trace_json)
def _SaveScreenshot(self, device, screenshot_host_dir, screenshot_device_file,
test_name, results):
if screenshot_host_dir:
screenshot_host_file = os.path.join(
screenshot_host_dir,
'%s-%s.png' % (
test_name,
time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime())))
def _SaveScreenshot(self, device, screenshot_device_file, test_name, results):
screenshot_filename = '%s-%s.png' % (
test_name, time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()))
if device.FileExists(screenshot_device_file.name):
try:
device.PullFile(screenshot_device_file.name, screenshot_host_file)
finally:
screenshot_device_file.close()
logging.info(
'Saved screenshot for %s to %s.',
test_name, screenshot_host_file)
if self._test_instance.gs_results_bucket:
link = google_storage_helper.upload(
google_storage_helper.unique_name(
'screenshot', device=device),
screenshot_host_file,
bucket=('%s/screenshots' %
self._test_instance.gs_results_bucket))
for result in results:
result.SetLink('post_test_screenshot', link)
with self._env.output_manager.ArchivedTempfile(
screenshot_filename, 'screenshot',
output_manager.Datatype.IMAGE) as screenshot_host_file:
try:
device.PullFile(screenshot_device_file.name,
screenshot_host_file.name)
finally:
screenshot_device_file.close()
for result in results:
result.SetLink('post_test_screenshot', screenshot_host_file.Link())
def _ProcessRenderTestResults(
self, device, render_tests_device_output_dir, results):
# If GS results bucket is specified, will archive render result images.
# If render image dir is specified, will pull the render result image from
# the device and leave in the directory.
if not (bool(self._test_instance.gs_results_bucket) or
bool(self._test_instance.render_results_dir)):
return
failure_images_device_dir = posixpath.join(
render_tests_device_output_dir, 'failures')
@ -749,85 +729,58 @@ class LocalDeviceInstrumentationTestRun(
golden_images_device_dir = posixpath.join(
render_tests_device_output_dir, 'goldens')
with contextlib_ext.Optional(
tempfile_ext.NamedTemporaryDirectory(),
not bool(self._test_instance.render_results_dir)) as render_temp_dir:
render_host_dir = (
self._test_instance.render_results_dir or render_temp_dir)
for failure_filename in device.ListDirectory(failure_images_device_dir):
if not os.path.exists(render_host_dir):
os.makedirs(render_host_dir)
with self._env.output_manager.ArchivedTempfile(
'fail_%s' % failure_filename, 'render_tests',
output_manager.Datatype.IMAGE) as failure_image_host_file:
device.PullFile(
posixpath.join(failure_images_device_dir, failure_filename),
failure_image_host_file.name)
failure_link = failure_image_host_file.Link()
# Pull all render test results from device.
device.PullFile(failure_images_device_dir, render_host_dir)
if device.FileExists(diff_images_device_dir):
device.PullFile(diff_images_device_dir, render_host_dir)
else:
logging.error('Diff images not found on device.')
if device.FileExists(golden_images_device_dir):
device.PullFile(golden_images_device_dir, render_host_dir)
else:
logging.error('Golden images not found on device.')
# Upload results to Google Storage.
if self._test_instance.gs_results_bucket:
self._UploadRenderTestResults(render_host_dir, results)
def _UploadRenderTestResults(self, render_host_dir, results):
render_tests_bucket = (
self._test_instance.gs_results_bucket + '/render_tests')
for failure_filename in os.listdir(
os.path.join(render_host_dir, 'failures')):
m = RE_RENDER_IMAGE_NAME.match(failure_filename)
if not m:
logging.warning('Unexpected file in render test failures: %s',
failure_filename)
continue
failure_filepath = os.path.join(
render_host_dir, 'failures', failure_filename)
failure_link = google_storage_helper.upload_content_addressed(
failure_filepath, bucket=render_tests_bucket)
golden_filepath = os.path.join(
render_host_dir, 'goldens', failure_filename)
if os.path.exists(golden_filepath):
golden_link = google_storage_helper.upload_content_addressed(
golden_filepath, bucket=render_tests_bucket)
golden_image_device_file = posixpath.join(
golden_images_device_dir, failure_filename)
if device.PathExists(golden_image_device_file):
with self._env.output_manager.ArchivedTempfile(
'golden_%s' % failure_filename, 'render_tests',
output_manager.Datatype.IMAGE) as golden_image_host_file:
device.PullFile(
golden_image_device_file, golden_image_host_file)
golden_link = golden_image_host_file.Link()
else:
golden_link = ''
diff_filepath = os.path.join(
render_host_dir, 'diffs', failure_filename)
if os.path.exists(diff_filepath):
diff_link = google_storage_helper.upload_content_addressed(
diff_filepath, bucket=render_tests_bucket)
diff_image_device_file = posixpath.join(
diff_images_device_dir, failure_filename)
if device.PathExists(diff_image_device_file):
with self._env.output_manager.ArchivedTempfile(
'diff_%s' % failure_filename, 'render_tests',
output_manager.Datatype.IMAGE) as diff_image_host_file:
device.PullFile(
diff_image_device_file, diff_image_host_file)
diff_link = diff_image_host_file.Link()
else:
diff_link = ''
with tempfile.NamedTemporaryFile(suffix='.html') as temp_html:
jinja2_env = jinja2.Environment(
loader=jinja2.FileSystemLoader(_JINJA_TEMPLATE_DIR),
trim_blocks=True)
template = jinja2_env.get_template(_JINJA_TEMPLATE_FILENAME)
# pylint: disable=no-member
processed_template_output = template.render(
test_name=failure_filename,
failure_link=failure_link,
golden_link=golden_link,
diff_link=diff_link)
jinja2_env = jinja2.Environment(
loader=jinja2.FileSystemLoader(_JINJA_TEMPLATE_DIR),
trim_blocks=True)
template = jinja2_env.get_template(_JINJA_TEMPLATE_FILENAME)
# pylint: disable=no-member
processed_template_output = template.render(
test_name=failure_filename,
failure_link=failure_link,
golden_link=golden_link,
diff_link=diff_link)
temp_html.write(processed_template_output)
temp_html.flush()
html_results_link = google_storage_helper.upload_content_addressed(
temp_html.name,
bucket=render_tests_bucket,
content_type='text/html')
for result in results:
result.SetLink(failure_filename, html_results_link)
with self._env.output_manager.ArchivedTempfile(
'%s.html' % failure_filename, 'render_tests',
output_manager.Datatype.HTML) as html_results:
html_results.write(processed_template_output)
html_results.flush()
for result in results:
result.SetLink(failure_filename, html_results.Link())
#override
def _ShouldRetry(self, test, result):

Просмотреть файл

@ -9,8 +9,8 @@ from pylib.base import environment
class LocalMachineEnvironment(environment.Environment):
def __init__(self, _args, _error_func):
super(LocalMachineEnvironment, self).__init__()
def __init__(self, _args, output_manager, _error_func):
super(LocalMachineEnvironment, self).__init__(output_manager)
devil_chromium.Initialize(
output_directory=constants.GetOutDirectory())

Просмотреть файл

@ -0,0 +1,3 @@
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

Просмотреть файл

@ -0,0 +1,45 @@
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import time
import os
import shutil
import urllib
from pylib.base import output_manager
class LocalOutputManager(output_manager.OutputManager):
"""Saves and manages test output files locally in output directory.
Location files will be saved in {output_dir}/TEST_RESULTS_{timestamp}.
"""
def __init__(self, output_dir):
super(LocalOutputManager, self).__init__()
timestamp = time.strftime(
'%Y_%m_%dT%H_%M_%S', time.localtime())
self._output_root = os.path.abspath(os.path.join(
output_dir, 'TEST_RESULTS_%s' % timestamp))
#override
def _CreateArchivedFile(self, out_filename, out_subdir, datatype):
return LocalArchivedFile(
out_filename, out_subdir, datatype, self._output_root)
class LocalArchivedFile(output_manager.ArchivedFile):
def __init__(self, out_filename, out_subdir, datatype, out_root):
super(LocalArchivedFile, self).__init__(
out_filename, out_subdir, datatype)
self._output_path = os.path.join(out_root, out_subdir, out_filename)
def _Link(self):
return 'file://%s' % urllib.quote(self._output_path)
def _Archive(self):
if not os.path.exists(os.path.dirname(self._output_path)):
os.makedirs(os.path.dirname(self._output_path))
shutil.copy(self.name, self._output_path)

Просмотреть файл

@ -0,0 +1,34 @@
#! /usr/bin/env python
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=protected-access
import tempfile
import shutil
import unittest
from pylib.base import output_manager
from pylib.base import output_manager_test_case
from pylib.output import local_output_manager
class LocalOutputManagerTest(output_manager_test_case.OutputManagerTestCase):
def setUp(self):
self._output_dir = tempfile.mkdtemp()
self._output_manager = local_output_manager.LocalOutputManager(
self._output_dir)
def testUsableTempFile(self):
self.assertUsableTempFile(
self._output_manager._CreateArchivedFile(
'test_file', 'test_subdir', output_manager.Datatype.TEXT))
def tearDown(self):
shutil.rmtree(self._output_dir)
if __name__ == '__main__':
unittest.main()

Просмотреть файл

@ -0,0 +1,42 @@
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from pylib.base import output_manager
# TODO(jbudorick): This class is currently mostly unused.
# Add a --bot-mode argument that all bots pass. If --bot-mode and
# --local-output args are both not passed to test runner then use this
# as the output manager impl.
# pylint: disable=no-self-use
class NoopOutputManager(output_manager.OutputManager):
def __init__(self):
super(NoopOutputManager, self).__init__()
#override
def _CreateArchivedFile(self, out_filename, out_subdir, datatype):
del out_filename, out_subdir, datatype
return NoopArchivedFile()
class NoopArchivedFile(output_manager.ArchivedFile):
def __init__(self):
super(NoopArchivedFile, self).__init__(None, None, None)
def Link(self):
"""NoopArchivedFiles are not retained."""
return ''
def _Link(self):
pass
def Archive(self):
"""NoopArchivedFiles are not retained."""
pass
def _Archive(self):
pass

Просмотреть файл

@ -0,0 +1,27 @@
#! /usr/bin/env python
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=protected-access
import unittest
from pylib.base import output_manager
from pylib.base import output_manager_test_case
from pylib.output import noop_output_manager
class NoopOutputManagerTest(output_manager_test_case.OutputManagerTestCase):
def setUp(self):
self._output_manager = noop_output_manager.NoopOutputManager()
def testUsableTempFile(self):
self.assertUsableTempFile(
self._output_manager._CreateArchivedFile(
'test_file', 'test_subdir', output_manager.Datatype.TEXT))
if __name__ == '__main__':
unittest.main()

Просмотреть файл

@ -0,0 +1,91 @@
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import hashlib
import os
from pylib.base import output_manager
from pylib.output import noop_output_manager
from pylib.utils import logdog_helper
from pylib.utils import google_storage_helper
class RemoteOutputManager(output_manager.OutputManager):
def __init__(self, bucket):
"""Uploads output files to Google Storage or LogDog.
Files will either be uploaded directly to Google Storage or LogDog
depending on the datatype.
Args
bucket: Bucket to use when saving to Google Storage.
"""
super(RemoteOutputManager, self).__init__()
self._bucket = bucket
#override
def _CreateArchivedFile(self, out_filename, out_subdir, datatype):
if datatype == output_manager.Datatype.TEXT:
try:
logdog_helper.get_logdog_client()
return LogdogArchivedFile(out_filename, out_subdir, datatype)
except RuntimeError:
return noop_output_manager.NoopArchivedFile()
else:
if self._bucket is None:
return noop_output_manager.NoopArchivedFile()
return GoogleStorageArchivedFile(
out_filename, out_subdir, datatype, self._bucket)
class LogdogArchivedFile(output_manager.ArchivedFile):
def __init__(self, out_filename, out_subdir, datatype):
super(LogdogArchivedFile, self).__init__(out_filename, out_subdir, datatype)
self._stream_name = '%s_%s' % (out_subdir, out_filename)
def _Link(self):
return logdog_helper.get_viewer_url(self._stream_name)
def _Archive(self):
with open(self.name, 'r') as f:
logdog_helper.text(self._stream_name, f.read())
class GoogleStorageArchivedFile(output_manager.ArchivedFile):
def __init__(self, out_filename, out_subdir, datatype, bucket):
super(GoogleStorageArchivedFile, self).__init__(
out_filename, out_subdir, datatype)
self._bucket = bucket
self._upload_path = None
self._content_addressed = None
def _PrepareArchive(self):
self._content_addressed = (self._datatype in (
output_manager.Datatype.HTML,
output_manager.Datatype.IMAGE))
if self._content_addressed:
sha1 = hashlib.sha1()
with open(self.name, 'rb') as f:
sha1.update(f.read())
self._upload_path = sha1.hexdigest()
else:
self._upload_path = os.path.join(self._out_subdir, self._out_filename)
def _Link(self):
return google_storage_helper.get_url_link(
self._upload_path, self._bucket)
def _Archive(self):
if (self._content_addressed and
google_storage_helper.exists(self._upload_path, self._bucket)):
return
content_type = None
if self._datatype == output_manager.Datatype.HTML:
content_type = 'text/html'
google_storage_helper.upload(
self._upload_path, self.name, self._bucket, content_type)

Просмотреть файл

@ -0,0 +1,34 @@
#! /usr/bin/env python
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=protected-access
import unittest
from pylib.base import output_manager
from pylib.base import output_manager_test_case
from pylib.constants import host_paths
from pylib.output import remote_output_manager
with host_paths.SysPath(host_paths.PYMOCK_PATH):
import mock # pylint: disable=import-error
@mock.patch('pylib.utils.google_storage_helper')
class RemoteOutputManagerTest(output_manager_test_case.OutputManagerTestCase):
def setUp(self):
self._output_manager = remote_output_manager.RemoteOutputManager(
'this-is-a-fake-bucket')
def testUsableTempFile(self, google_storage_helper_mock):
del google_storage_helper_mock
self.assertUsableTempFile(
self._output_manager._CreateArchivedFile(
'test_file', 'test_subdir', output_manager.Datatype.TEXT))
if __name__ == '__main__':
unittest.main()

Просмотреть файл

@ -75,9 +75,11 @@
{% include 'template/table.html' %}
{% endfor %}
</div>
</br>
<a href="{{feedback_url}}" target="_blank"><b>Feedback</b></a>
</body>
{% if feedback_url %}
</br>
<a href="{{feedback_url}}" target="_blank"><b>Feedback</b></a>
</body>
{%- endif %}
<script>
sortSuiteTableByFailedTestCases();
showSuiteTableOnlyWithReplaceState();

Просмотреть файл

@ -266,20 +266,27 @@ def create_suite_table(results_dict):
def feedback_url(result_details_link):
url_args = urllib.urlencode([
url_args = [
('labels', 'Pri-2,Type-Bug,Restrict-View-Google'),
('summary', 'Result Details Feedback:'),
('components', 'Test>Android'),
('comment', 'Please check out: %s' % result_details_link)])
]
if result_details_link:
url_args.append(('comment', 'Please check out: %s' % result_details_link))
url_args = urllib.urlencode(url_args)
return 'https://bugs.chromium.org/p/chromium/issues/entry?%s' % url_args
def results_to_html(results_dict, cs_base_url, bucket, test_name,
builder_name, build_number):
"""Convert list of test results into html format."""
builder_name, build_number, local_output):
"""Convert list of test results into html format.
test_rows_header, test_rows = create_test_table(results_dict, cs_base_url,
test_name)
Args:
local_output: Whether this results file is uploaded to Google Storage or
just a local file.
"""
test_rows_header, test_rows = create_test_table(
results_dict, cs_base_url, test_name)
suite_rows_header, suite_rows, suite_row_footer = create_suite_table(
results_dict)
@ -298,21 +305,35 @@ def results_to_html(results_dict, cs_base_url, bucket, test_name,
main_template = JINJA_ENVIRONMENT.get_template(
os.path.join('template', 'main.html'))
dest = google_storage_helper.unique_name(
'%s_%s_%s' % (test_name, builder_name, build_number))
result_details_link = google_storage_helper.get_url_link(
dest, '%s/html' % bucket)
return (main_template.render( # pylint: disable=no-member
{'tb_values': [suite_table_values, test_table_values],
'feedback_url': feedback_url(result_details_link)
}), dest, result_details_link)
if local_output:
html_render = main_template.render( # pylint: disable=no-member
{
'tb_values': [suite_table_values, test_table_values],
'feedback_url': feedback_url(None),
})
return (html_render, None, None)
else:
dest = google_storage_helper.unique_name(
'%s_%s_%s' % (test_name, builder_name, build_number))
result_details_link = google_storage_helper.get_url_link(
dest, '%s/html' % bucket)
html_render = main_template.render( # pylint: disable=no-member
{
'tb_values': [suite_table_values, test_table_values],
'feedback_url': feedback_url(result_details_link),
})
return (html_render, dest, result_details_link)
def result_details(json_path, cs_base_url, bucket, test_name,
builder_name, build_number):
"""Get result details from json path and then convert results to html."""
def result_details(json_path, test_name, cs_base_url, bucket=None,
builder_name=None, build_number=None, local_output=False):
"""Get result details from json path and then convert results to html.
Args:
local_output: Whether this results file is uploaded to Google Storage or
just a local file.
"""
with open(json_path) as json_file:
json_object = json.loads(json_file.read())
@ -324,8 +345,8 @@ def result_details(json_path, cs_base_url, bucket, test_name,
for testsuite_run in json_object['per_iteration_data']:
for test, test_runs in testsuite_run.iteritems():
results_dict[test].extend(test_runs)
return results_to_html(results_dict, cs_base_url, bucket,
test_name, builder_name, build_number)
return results_to_html(results_dict, cs_base_url, bucket, test_name,
builder_name, build_number, local_output)
def upload_to_google_bucket(html, bucket, dest):
@ -352,18 +373,18 @@ def main():
required=True)
parser.add_argument(
'-o', '--output-json',
help='(Swarming Merge Script API)'
' Output JSON file to create.')
help='(Swarming Merge Script API) '
'Output JSON file to create.')
parser.add_argument(
'--build-properties',
help='(Swarming Merge Script API) '
'Build property JSON file provided by recipes.')
parser.add_argument(
'--summary-json',
help='(Swarming Merge Script API)'
' Summary of shard state running on swarming.'
' (Output of the swarming.py collect'
' --task-summary-json=XXX command.)')
help='(Swarming Merge Script API) '
'Summary of shard state running on swarming. '
'(Output of the swarming.py collect '
'--task-summary-json=XXX command.)')
parser.add_argument(
'positional', nargs='*',
help='output.json from shards.')
@ -420,13 +441,12 @@ def main():
# Link to result details presentation page is a part of the page.
result_html_string, dest, result_details_link = result_details(
json_file, args.cs_base_url, args.bucket,
args.test_name, builder_name, build_number)
json_file, args.test_name, args.cs_base_url, args.bucket,
builder_name, build_number)
result_details_link_2 = upload_to_google_bucket(
result_html_string.encode('UTF-8'),
args.bucket, dest)
assert result_details_link == result_details_link_2, (
'Result details link do not match. The link returned by get_url_link'
' should be the same as that returned by upload.')

Просмотреть файл

@ -9,7 +9,6 @@ Due to logdog not having image or HTML viewer, those instead should be uploaded
to Google Storage directly using this module.
"""
import hashlib
import logging
import os
import sys
@ -61,21 +60,6 @@ def upload(name, filepath, bucket, content_type=None, authenticated_link=True):
return get_url_link(name, bucket, authenticated_link)
def upload_content_addressed(
filepath, bucket, content_type=None, authenticated_link=True):
"""Uploads data to Google Storage with filename as sha1 hash.
If file already exists in bucket with hash name, nothing is uploaded.
"""
sha1 = hashlib.sha1()
with open(filepath, 'rb') as f:
sha1.update(f.read())
sha1_hash = sha1.hexdigest()
if not exists(sha1_hash, bucket):
upload(sha1_hash, filepath, bucket, content_type, authenticated_link)
return get_url_link(sha1_hash, bucket, authenticated_link)
@decorators.NoRaiseException(default_return_value=False)
def exists(name, bucket):
bucket = _format_bucket_name(bucket)
@ -89,6 +73,7 @@ def exists(name, bucket):
return False
# TODO(jbudorick): Delete this function. Only one user of it.
def unique_name(basename, suffix='', timestamp=True, device=None):
"""Helper function for creating a unique name for a file to store in GS.

Просмотреть файл

@ -36,10 +36,13 @@ from devil.utils import run_tests_helper
from pylib import constants
from pylib.base import base_test_result
from pylib.base import environment_factory
from pylib.base import output_manager
from pylib.base import output_manager_factory
from pylib.base import test_instance_factory
from pylib.base import test_run_factory
from pylib.results import json_results
from pylib.results import report_results
from pylib.results.presentation import test_results_presentation
from pylib.utils import logdog_helper
from pylib.utils import logging_utils
@ -154,6 +157,12 @@ def AddCommonOptions(parser):
default='local', choices=constants.VALID_ENVIRONMENTS,
help='Test environment to run in (default: %(default)s).')
parser.add_argument(
'--local-output',
action='store_true',
help='Whether to archive test output locally and generate '
'a local results detail page.')
class FastLocalDevAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
namespace.verbose_count = max(namespace.verbose_count, 1)
@ -828,69 +837,85 @@ def RunTestsInPlatformMode(args):
### Set up test objects.
env = environment_factory.CreateEnvironment(args, infra_error)
out_manager = output_manager_factory.CreateOutputManager(args)
env = environment_factory.CreateEnvironment(
args, out_manager, infra_error)
test_instance = test_instance_factory.CreateTestInstance(args, infra_error)
test_run = test_run_factory.CreateTestRun(
args, env, test_instance, infra_error)
### Run.
with out_manager:
with json_writer, logcats_uploader, env, test_instance, test_run:
with json_writer, logcats_uploader, env, test_instance, test_run:
repetitions = (xrange(args.repeat + 1) if args.repeat >= 0
else itertools.count())
result_counts = collections.defaultdict(
lambda: collections.defaultdict(int))
iteration_count = 0
for _ in repetitions:
raw_results = test_run.RunTests()
if not raw_results:
continue
repetitions = (xrange(args.repeat + 1) if args.repeat >= 0
else itertools.count())
result_counts = collections.defaultdict(
lambda: collections.defaultdict(int))
iteration_count = 0
for _ in repetitions:
raw_results = test_run.RunTests()
if not raw_results:
continue
all_raw_results.append(raw_results)
all_raw_results.append(raw_results)
iteration_results = base_test_result.TestRunResults()
for r in reversed(raw_results):
iteration_results.AddTestRunResults(r)
all_iteration_results.append(iteration_results)
iteration_results = base_test_result.TestRunResults()
for r in reversed(raw_results):
iteration_results.AddTestRunResults(r)
all_iteration_results.append(iteration_results)
iteration_count += 1
for r in iteration_results.GetAll():
result_counts[r.GetName()][r.GetType()] += 1
report_results.LogFull(
results=iteration_results,
test_type=test_instance.TestType(),
test_package=test_run.TestPackage(),
annotation=getattr(args, 'annotations', None),
flakiness_server=getattr(args, 'flakiness_dashboard_server',
None))
if args.break_on_failure and not iteration_results.DidRunPass():
break
iteration_count += 1
for r in iteration_results.GetAll():
result_counts[r.GetName()][r.GetType()] += 1
report_results.LogFull(
results=iteration_results,
test_type=test_instance.TestType(),
test_package=test_run.TestPackage(),
annotation=getattr(args, 'annotations', None),
flakiness_server=getattr(args, 'flakiness_dashboard_server',
None))
if args.break_on_failure and not iteration_results.DidRunPass():
break
if iteration_count > 1:
# display summary results
# only display results for a test if at least one test did not pass
all_pass = 0
tot_tests = 0
for test_name in result_counts:
tot_tests += 1
if any(result_counts[test_name][x] for x in (
base_test_result.ResultType.FAIL,
base_test_result.ResultType.CRASH,
base_test_result.ResultType.TIMEOUT,
base_test_result.ResultType.UNKNOWN)):
logging.critical(
'%s: %s',
test_name,
', '.join('%s %s' % (str(result_counts[test_name][i]), i)
for i in base_test_result.ResultType.GetTypes()))
else:
all_pass += 1
if iteration_count > 1:
# display summary results
# only display results for a test if at least one test did not pass
all_pass = 0
tot_tests = 0
for test_name in result_counts:
tot_tests += 1
if any(result_counts[test_name][x] for x in (
base_test_result.ResultType.FAIL,
base_test_result.ResultType.CRASH,
base_test_result.ResultType.TIMEOUT,
base_test_result.ResultType.UNKNOWN)):
logging.critical(
'%s: %s',
test_name,
', '.join('%s %s' % (str(result_counts[test_name][i]), i)
for i in base_test_result.ResultType.GetTypes()))
else:
all_pass += 1
logging.critical('%s of %s tests passed in all %s runs',
str(all_pass),
str(tot_tests),
str(iteration_count))
logging.critical('%s of %s tests passed in all %s runs',
str(all_pass),
str(tot_tests),
str(iteration_count))
if args.local_output and args.json_results_file:
with out_manager.ArchivedTempfile(
'test_results_presentation.html',
'test_results_presentation',
output_manager.Datatype.HTML) as results_detail_file:
result_html_string, _, _ = test_results_presentation.result_details(
json_path=args.json_results_file,
test_name=args.command,
cs_base_url='http://cs.chromium.org',
local_output=True)
results_detail_file.write(result_html_string)
results_detail_file.flush()
logging.critical('TEST RESULTS: %s', results_detail_file.Link())
if args.command == 'perf' and (args.steps or args.single_step):
return 0

Просмотреть файл

@ -124,12 +124,12 @@ gyp/util/md5_check.py
incremental_install/__init__.py
incremental_install/installer.py
pylib/__init__.py
pylib/android/__init__.py
pylib/android/logdog_logcat_monitor.py
pylib/base/__init__.py
pylib/base/base_test_result.py
pylib/base/environment.py
pylib/base/environment_factory.py
pylib/base/output_manager.py
pylib/base/output_manager_factory.py
pylib/base/test_collection.py
pylib/base/test_exception.py
pylib/base/test_instance.py
@ -165,6 +165,10 @@ pylib/local/machine/local_machine_environment.py
pylib/local/machine/local_machine_junit_test_run.py
pylib/monkey/__init__.py
pylib/monkey/monkey_test_instance.py
pylib/output/__init__.py
pylib/output/local_output_manager.py
pylib/output/noop_output_manager.py
pylib/output/remote_output_manager.py
pylib/perf/__init__.py
pylib/perf/perf_test_instance.py
pylib/results/__init__.py
@ -172,6 +176,9 @@ pylib/results/flakiness_dashboard/__init__.py
pylib/results/flakiness_dashboard/json_results_generator.py
pylib/results/flakiness_dashboard/results_uploader.py
pylib/results/json_results.py
pylib/results/presentation/__init__.py
pylib/results/presentation/standard_gtest_merge.py
pylib/results/presentation/test_results_presentation.py
pylib/results/report_results.py
pylib/symbols/__init__.py
pylib/symbols/deobfuscator.py