[android] Make build_type a singleton.
BUG=260494 TEST=None NOTRY=True Review URL: https://chromiumcodereview.appspot.com/22933005 git-svn-id: http://src.chromium.org/svn/trunk/src/build@217855 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
This commit is contained in:
Родитель
fbb88fd872
Коммит
9c2576b64c
|
@ -30,13 +30,11 @@ NET_TEST_SERVER_PORT_INFO_FILE = 'net-test-server-ports'
|
|||
class BaseTestRunner(object):
|
||||
"""Base class for running tests on a single device."""
|
||||
|
||||
def __init__(self, device, tool, build_type, push_deps=True,
|
||||
cleanup_test_files=False):
|
||||
def __init__(self, device, tool, push_deps=True, cleanup_test_files=False):
|
||||
"""
|
||||
Args:
|
||||
device: Tests will run on the device of this ID.
|
||||
tool: Name of the Valgrind tool.
|
||||
build_type: 'Release' or 'Debug'.
|
||||
push_deps: If True, push all dependencies to the device.
|
||||
cleanup_test_files: Whether or not to cleanup test files on device.
|
||||
"""
|
||||
|
@ -55,7 +53,6 @@ class BaseTestRunner(object):
|
|||
# starting it in TestServerThread.
|
||||
self.test_server_spawner_port = 0
|
||||
self.test_server_port = 0
|
||||
self.build_type = build_type
|
||||
self._push_deps = push_deps
|
||||
self._cleanup_test_files = cleanup_test_files
|
||||
|
||||
|
@ -130,7 +127,7 @@ class BaseTestRunner(object):
|
|||
|
||||
def _ForwardPorts(self, port_pairs):
|
||||
"""Forwards a port."""
|
||||
Forwarder.Map(port_pairs, self.adb, self.build_type, self.tool)
|
||||
Forwarder.Map(port_pairs, self.adb, constants.GetBuildType(), self.tool)
|
||||
|
||||
def _UnmapPorts(self, port_pairs):
|
||||
"""Unmap previously forwarded ports."""
|
||||
|
@ -194,8 +191,7 @@ class BaseTestRunner(object):
|
|||
[(self.test_server_spawner_port, self.test_server_spawner_port)])
|
||||
self._spawning_server = SpawningServer(self.test_server_spawner_port,
|
||||
self.adb,
|
||||
self.tool,
|
||||
self.build_type)
|
||||
self.tool)
|
||||
self._spawning_server.Start()
|
||||
server_ready, error_msg = ports.IsHttpServerConnectable(
|
||||
'127.0.0.1', self.test_server_spawner_port, path='/ping',
|
||||
|
|
|
@ -335,7 +335,6 @@ def _GetAttachedDevices(wait_for_debugger=False, test_device=None):
|
|||
|
||||
def RunTests(tests, runner_factory, wait_for_debugger, test_device,
|
||||
shard=True,
|
||||
build_type='Debug',
|
||||
test_timeout=DEFAULT_TIMEOUT,
|
||||
setup_timeout=DEFAULT_TIMEOUT,
|
||||
num_retries=2):
|
||||
|
@ -352,7 +351,6 @@ def RunTests(tests, runner_factory, wait_for_debugger, test_device,
|
|||
shared test collection.
|
||||
- Replicating tests will copy all tests to each test runner through a
|
||||
unique test collection for each test runner.
|
||||
build_type: Either 'Debug' or 'Release'.
|
||||
test_timeout: Watchdog timeout in seconds for running tests.
|
||||
setup_timeout: Watchdog timeout in seconds for creating and cleaning up
|
||||
test runners.
|
||||
|
|
|
@ -100,7 +100,7 @@ def _GetServerTypeCommandLine(server_type):
|
|||
class TestServerThread(threading.Thread):
|
||||
"""A thread to run the test server in a separate process."""
|
||||
|
||||
def __init__(self, ready_event, arguments, adb, tool, build_type):
|
||||
def __init__(self, ready_event, arguments, adb, tool):
|
||||
"""Initialize TestServerThread with the following argument.
|
||||
|
||||
Args:
|
||||
|
@ -108,7 +108,6 @@ class TestServerThread(threading.Thread):
|
|||
arguments: dictionary of arguments to run the test server.
|
||||
adb: instance of AndroidCommands.
|
||||
tool: instance of runtime error detection tool.
|
||||
build_type: 'Release' or 'Debug'.
|
||||
"""
|
||||
threading.Thread.__init__(self)
|
||||
self.wait_event = threading.Event()
|
||||
|
@ -128,7 +127,6 @@ class TestServerThread(threading.Thread):
|
|||
self.pipe_in = None
|
||||
self.pipe_out = None
|
||||
self.command_line = []
|
||||
self.build_type = build_type
|
||||
|
||||
def _WaitToStartAndGetPortFromTestServer(self):
|
||||
"""Waits for the Python test server to start and gets the port it is using.
|
||||
|
@ -251,7 +249,8 @@ class TestServerThread(threading.Thread):
|
|||
else:
|
||||
self.is_ready = _CheckPortStatus(self.host_port, True)
|
||||
if self.is_ready:
|
||||
Forwarder.Map([(0, self.host_port)], self.adb, self.build_type, self.tool)
|
||||
Forwarder.Map([(0, self.host_port)], self.adb, constants.GetBuildType(),
|
||||
self.tool)
|
||||
# Check whether the forwarder is ready on the device.
|
||||
self.is_ready = False
|
||||
device_port = Forwarder.DevicePortForHostPort(self.host_port)
|
||||
|
@ -333,8 +332,7 @@ class SpawningServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
|||
ready_event,
|
||||
json.loads(test_server_argument_json),
|
||||
self.server.adb,
|
||||
self.server.tool,
|
||||
self.server.build_type)
|
||||
self.server.tool)
|
||||
self.server.test_server_instance.setDaemon(True)
|
||||
self.server.test_server_instance.start()
|
||||
ready_event.wait()
|
||||
|
@ -401,14 +399,14 @@ class SpawningServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
|||
class SpawningServer(object):
|
||||
"""The class used to start/stop a http server."""
|
||||
|
||||
def __init__(self, test_server_spawner_port, adb, tool, build_type):
|
||||
def __init__(self, test_server_spawner_port, adb, tool):
|
||||
logging.info('Creating new spawner on port: %d.', test_server_spawner_port)
|
||||
self.server = BaseHTTPServer.HTTPServer(('', test_server_spawner_port),
|
||||
SpawningServerRequestHandler)
|
||||
self.server.adb = adb
|
||||
self.server.tool = tool
|
||||
self.server.test_server_instance = None
|
||||
self.server.build_type = build_type
|
||||
self.server.build_type = constants.GetBuildType()
|
||||
|
||||
def _Listen(self):
|
||||
logging.info('Starting test server spawner')
|
||||
|
|
|
@ -83,6 +83,17 @@ ANDROID_NDK_ROOT = os.path.join(DIR_SOURCE_ROOT,
|
|||
UPSTREAM_FLAKINESS_SERVER = 'test-results.appspot.com'
|
||||
|
||||
|
||||
def GetBuildType():
|
||||
try:
|
||||
return os.environ['CHROMIUM_BUILD_TYPE']
|
||||
except KeyError:
|
||||
raise Exception('The build type has not been set')
|
||||
|
||||
|
||||
def SetBuildType(build_type):
|
||||
os.environ['CHROMIUM_BUILD_TYPE'] = build_type
|
||||
|
||||
|
||||
def _GetADBPath():
|
||||
if os.environ.get('ANDROID_SDK_ROOT'):
|
||||
return 'adb'
|
||||
|
|
|
@ -29,10 +29,11 @@ class DeviceStatsMonitor(object):
|
|||
RESULT_VIEWER_PATH = os.path.abspath(os.path.join(
|
||||
os.path.dirname(os.path.realpath(__file__)), 'device_stats_monitor.html'))
|
||||
|
||||
def __init__(self, adb, hz, build_type):
|
||||
def __init__(self, adb, hz):
|
||||
self._adb = adb
|
||||
host_path = os.path.abspath(os.path.join(
|
||||
constants.DIR_SOURCE_ROOT, 'out', build_type, 'device_stats_monitor'))
|
||||
constants.DIR_SOURCE_ROOT, 'out', constants.GetBuildType(),
|
||||
'device_stats_monitor'))
|
||||
self._adb.PushIfNeeded(host_path, DeviceStatsMonitor.DEVICE_PATH)
|
||||
self._hz = hz
|
||||
|
||||
|
|
|
@ -14,14 +14,12 @@ class FakeDns(object):
|
|||
"""Wrapper class for the fake_dns tool."""
|
||||
_FAKE_DNS_PATH = constants.TEST_EXECUTABLE_DIR + '/fake_dns'
|
||||
|
||||
def __init__(self, adb, build_type):
|
||||
def __init__(self, adb):
|
||||
"""
|
||||
Args:
|
||||
adb: the AndroidCommands to use.
|
||||
build_type: 'Release' or 'Debug'.
|
||||
"""
|
||||
self._adb = adb
|
||||
self._build_type = build_type
|
||||
self._fake_dns = None
|
||||
self._original_dns = None
|
||||
|
||||
|
@ -32,7 +30,7 @@ class FakeDns(object):
|
|||
subprocess instance connected to the fake_dns process on the device.
|
||||
"""
|
||||
self._adb.PushIfNeeded(
|
||||
os.path.join(constants.DIR_SOURCE_ROOT, 'out', self._build_type,
|
||||
os.path.join(constants.DIR_SOURCE_ROOT, 'out', constants.GetBuildType(),
|
||||
'fake_dns'),
|
||||
FakeDns._FAKE_DNS_PATH)
|
||||
return subprocess.Popen(
|
||||
|
|
|
@ -69,14 +69,14 @@ _ISOLATE_SCRIPT = os.path.join(
|
|||
constants.DIR_SOURCE_ROOT, 'tools', 'swarm_client', 'isolate.py')
|
||||
|
||||
|
||||
def _GenerateDepsDirUsingIsolate(suite_name, build_type):
|
||||
def _GenerateDepsDirUsingIsolate(suite_name):
|
||||
"""Generate the dependency dir for the test suite using isolate.
|
||||
|
||||
Args:
|
||||
suite_name: Name of the test suite (e.g. base_unittests).
|
||||
build_type: Release/Debug
|
||||
"""
|
||||
product_dir = os.path.join(cmd_helper.OutDirectory.get(), build_type)
|
||||
product_dir = os.path.join(cmd_helper.OutDirectory.get(),
|
||||
constants.GetBuildType())
|
||||
assert os.path.isabs(product_dir)
|
||||
|
||||
if os.path.isdir(constants.ISOLATE_DEPS_DIR):
|
||||
|
@ -142,7 +142,8 @@ def _GenerateDepsDirUsingIsolate(suite_name, build_type):
|
|||
shutil.move(os.path.join(root, filename), paks_dir)
|
||||
|
||||
# Move everything in PRODUCT_DIR to top level.
|
||||
deps_product_dir = os.path.join(constants.ISOLATE_DEPS_DIR, 'out', build_type)
|
||||
deps_product_dir = os.path.join(constants.ISOLATE_DEPS_DIR, 'out',
|
||||
constants.GetBuildType())
|
||||
if os.path.isdir(deps_product_dir):
|
||||
for p in os.listdir(deps_product_dir):
|
||||
shutil.move(os.path.join(deps_product_dir, p), constants.ISOLATE_DEPS_DIR)
|
||||
|
@ -270,19 +271,17 @@ def Setup(test_options):
|
|||
if not ports.ResetTestServerPortAllocation():
|
||||
raise Exception('Failed to reset test server port.')
|
||||
|
||||
test_package = test_package_apk.TestPackageApk(test_options.suite_name,
|
||||
test_options.build_type)
|
||||
test_package = test_package_apk.TestPackageApk(test_options.suite_name)
|
||||
if not os.path.exists(test_package.suite_path):
|
||||
test_package = test_package_exe.TestPackageExecutable(
|
||||
test_options.suite_name, test_options.build_type)
|
||||
test_options.suite_name)
|
||||
if not os.path.exists(test_package.suite_path):
|
||||
raise Exception(
|
||||
'Did not find %s target. Ensure it has been built.'
|
||||
% test_options.suite_name)
|
||||
logging.warning('Found target %s', test_package.suite_path)
|
||||
|
||||
_GenerateDepsDirUsingIsolate(test_options.suite_name,
|
||||
test_options.build_type)
|
||||
_GenerateDepsDirUsingIsolate(test_options.suite_name)
|
||||
|
||||
# Constructs a new TestRunner with the current options.
|
||||
def TestRunnerFactory(device, shard_index):
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
import collections
|
||||
|
||||
GTestOptions = collections.namedtuple('GTestOptions', [
|
||||
'build_type',
|
||||
'tool',
|
||||
'cleanup_test_files',
|
||||
'push_deps',
|
||||
|
|
|
@ -23,14 +23,14 @@ from test_package import TestPackage
|
|||
class TestPackageApk(TestPackage):
|
||||
"""A helper class for running APK-based native tests."""
|
||||
|
||||
def __init__(self, suite_name, build_type):
|
||||
def __init__(self, suite_name):
|
||||
"""
|
||||
Args:
|
||||
suite_name: Name of the test suite (e.g. base_unittests).
|
||||
build_type: 'Release' or 'Debug'.
|
||||
"""
|
||||
TestPackage.__init__(self, suite_name)
|
||||
product_dir = os.path.join(cmd_helper.OutDirectory.get(), build_type)
|
||||
product_dir = os.path.join(cmd_helper.OutDirectory.get(),
|
||||
constants.GetBuildType())
|
||||
if suite_name == 'content_browsertests':
|
||||
self.suite_path = os.path.join(
|
||||
product_dir, 'apks', '%s.apk' % suite_name)
|
||||
|
|
|
@ -22,14 +22,14 @@ class TestPackageExecutable(TestPackage):
|
|||
|
||||
_TEST_RUNNER_RET_VAL_FILE = 'gtest_retval'
|
||||
|
||||
def __init__(self, suite_name, build_type):
|
||||
def __init__(self, suite_name):
|
||||
"""
|
||||
Args:
|
||||
suite_name: Name of the test suite (e.g. base_unittests).
|
||||
build_type: 'Release' or 'Debug'.
|
||||
"""
|
||||
TestPackage.__init__(self, suite_name)
|
||||
product_dir = os.path.join(cmd_helper.OutDirectory.get(), build_type)
|
||||
product_dir = os.path.join(cmd_helper.OutDirectory.get(),
|
||||
constants.GetBuildType())
|
||||
self.suite_path = os.path.join(product_dir, suite_name)
|
||||
self._symbols_dir = os.path.join(product_dir, 'lib.target')
|
||||
|
||||
|
|
|
@ -33,7 +33,6 @@ class TestRunner(base_test_runner.BaseTestRunner):
|
|||
"""
|
||||
|
||||
super(TestRunner, self).__init__(device, test_options.tool,
|
||||
test_options.build_type,
|
||||
test_options.push_deps,
|
||||
test_options.cleanup_test_files)
|
||||
|
||||
|
|
|
@ -196,7 +196,6 @@ def InstrumentationSetup(host_driven_test_root, official_build,
|
|||
return test_runner.HostDrivenTestRunner(
|
||||
device, shard_index,
|
||||
instrumentation_options.tool,
|
||||
instrumentation_options.build_type,
|
||||
instrumentation_options.push_deps,
|
||||
instrumentation_options.cleanup_test_files)
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ import os
|
|||
import time
|
||||
|
||||
from pylib import android_commands
|
||||
from pylib import constants
|
||||
from pylib.base import base_test_result
|
||||
from pylib.instrumentation import test_package
|
||||
from pylib.instrumentation import test_result
|
||||
|
@ -55,11 +56,10 @@ class HostDrivenTestCase(object):
|
|||
self.instrumentation_options = instrumentation_options
|
||||
self.ports_to_forward = []
|
||||
|
||||
def SetUp(self, device, shard_index, build_type, push_deps,
|
||||
def SetUp(self, device, shard_index, push_deps,
|
||||
cleanup_test_files):
|
||||
self.device_id = device
|
||||
self.shard_index = shard_index
|
||||
self.build_type = build_type
|
||||
self.adb = android_commands.AndroidCommands(self.device_id)
|
||||
self.push_deps = push_deps
|
||||
self.cleanup_test_files = cleanup_test_files
|
||||
|
@ -69,7 +69,7 @@ class HostDrivenTestCase(object):
|
|||
|
||||
def GetOutDir(self):
|
||||
return os.path.join(os.environ['CHROME_SRC'], 'out',
|
||||
self.build_type)
|
||||
constants.GetBuildType())
|
||||
|
||||
def Run(self):
|
||||
logging.info('Running host-driven test: %s', self.tagged_name)
|
||||
|
|
|
@ -49,7 +49,7 @@ class HostDrivenTestRunner(base_test_runner.BaseTestRunner):
|
|||
"""
|
||||
|
||||
#override
|
||||
def __init__(self, device, shard_index, tool, build_type, push_deps,
|
||||
def __init__(self, device, shard_index, tool, push_deps,
|
||||
cleanup_test_files):
|
||||
"""Creates a new HostDrivenTestRunner.
|
||||
|
||||
|
@ -57,13 +57,12 @@ class HostDrivenTestRunner(base_test_runner.BaseTestRunner):
|
|||
device: Attached android device.
|
||||
shard_index: Shard index.
|
||||
tool: Name of the Valgrind tool.
|
||||
build_type: 'Release' or 'Debug'.
|
||||
push_deps: If True, push all dependencies to the device.
|
||||
cleanup_test_files: Whether or not to cleanup test files on device.
|
||||
"""
|
||||
|
||||
super(HostDrivenTestRunner, self).__init__(device, tool, build_type,
|
||||
push_deps, cleanup_test_files)
|
||||
super(HostDrivenTestRunner, self).__init__(device, tool, push_deps,
|
||||
cleanup_test_files)
|
||||
|
||||
# The shard index affords the ability to create unique port numbers (e.g.
|
||||
# DEFAULT_PORT + shard_index) if the test so wishes.
|
||||
|
@ -87,8 +86,8 @@ class HostDrivenTestRunner(base_test_runner.BaseTestRunner):
|
|||
exception_raised = False
|
||||
|
||||
try:
|
||||
test.SetUp(self.device, self.shard_index, self.build_type,
|
||||
self._push_deps, self._cleanup_test_files)
|
||||
test.SetUp(self.device, self.shard_index, self._push_deps,
|
||||
self._cleanup_test_files)
|
||||
except Exception:
|
||||
logging.exception(
|
||||
'Caught exception while trying to run SetUp() for test: ' +
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
import collections
|
||||
|
||||
InstrumentationOptions = collections.namedtuple('InstrumentationOptions', [
|
||||
'build_type',
|
||||
'tool',
|
||||
'cleanup_test_files',
|
||||
'push_deps',
|
||||
|
|
|
@ -62,7 +62,6 @@ class TestRunner(base_test_runner.BaseTestRunner):
|
|||
Can be optionally requested by a test case.
|
||||
"""
|
||||
super(TestRunner, self).__init__(device, test_options.tool,
|
||||
test_options.build_type,
|
||||
test_options.push_deps,
|
||||
test_options.cleanup_test_files)
|
||||
self._lighttp_port = constants.LIGHTTPD_RANDOM_PORT_FIRST + shard_index
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
import collections
|
||||
|
||||
MonkeyOptions = collections.namedtuple('MonkeyOptions', [
|
||||
'build_type',
|
||||
'verbose_count',
|
||||
'package_name',
|
||||
'activity_name',
|
||||
|
|
|
@ -14,7 +14,7 @@ class TestRunner(base_test_runner.BaseTestRunner):
|
|||
"""A TestRunner instance runs a monkey test on a single device."""
|
||||
|
||||
def __init__(self, test_options, device, shard_index):
|
||||
super(TestRunner, self).__init__(device, None, test_options.build_type)
|
||||
super(TestRunner, self).__init__(device, None)
|
||||
self.options = test_options
|
||||
|
||||
def _LaunchMonkeyTest(self):
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
import collections
|
||||
|
||||
UIAutomatorOptions = collections.namedtuple('UIAutomatorOptions', [
|
||||
'build_type',
|
||||
'tool',
|
||||
'cleanup_test_files',
|
||||
'push_deps',
|
||||
|
|
|
@ -25,7 +25,6 @@ class TestRunner(instr_test_runner.TestRunner):
|
|||
"""
|
||||
# Create an InstrumentationOptions object to pass to the super class
|
||||
instrumentation_options = instr_test_options.InstrumentationOptions(
|
||||
test_options.build_type,
|
||||
test_options.tool,
|
||||
test_options.cleanup_test_files,
|
||||
test_options.push_deps,
|
||||
|
|
|
@ -13,10 +13,10 @@ from pylib import constants
|
|||
import flakiness_dashboard_results_uploader
|
||||
|
||||
|
||||
def _LogToFile(results, test_type, suite_name, build_type):
|
||||
def _LogToFile(results, test_type, suite_name):
|
||||
"""Log results to local files which can be used for aggregation later."""
|
||||
log_file_path = os.path.join(constants.DIR_SOURCE_ROOT, 'out',
|
||||
build_type, 'test_logs')
|
||||
constants.GetBuildType(), 'test_logs')
|
||||
if not os.path.exists(log_file_path):
|
||||
os.mkdir(log_file_path)
|
||||
full_file_name = os.path.join(
|
||||
|
@ -64,7 +64,7 @@ def _LogToFlakinessDashboard(results, test_type, test_package,
|
|||
|
||||
|
||||
def LogFull(results, test_type, test_package, annotation=None,
|
||||
build_type='Debug', flakiness_server=None):
|
||||
flakiness_server=None):
|
||||
"""Log the tests results for the test suite.
|
||||
|
||||
The results will be logged three different ways:
|
||||
|
@ -80,7 +80,6 @@ def LogFull(results, test_type, test_package, annotation=None,
|
|||
'ContentShellTest' for instrumentation tests)
|
||||
annotation: If instrumenation test type, this is a list of annotations
|
||||
(e.g. ['Smoke', 'SmallTest']).
|
||||
build_type: Release/Debug
|
||||
flakiness_server: If provider, upload the results to flakiness dashboard
|
||||
with this URL.
|
||||
"""
|
||||
|
@ -104,7 +103,7 @@ def LogFull(results, test_type, test_package, annotation=None,
|
|||
suite_name = annotation[0]
|
||||
else:
|
||||
suite_name = test_package
|
||||
_LogToFile(results, test_type, suite_name, build_type)
|
||||
_LogToFile(results, test_type, suite_name)
|
||||
|
||||
if flakiness_server:
|
||||
_LogToFlakinessDashboard(results, test_type, test_package,
|
||||
|
|
|
@ -94,6 +94,7 @@ def AddCommonOptions(option_parser):
|
|||
def ProcessCommonOptions(options):
|
||||
"""Processes and handles all common options."""
|
||||
run_tests_helper.SetLogLevel(options.verbose_count)
|
||||
constants.SetBuildType(options.build_type)
|
||||
|
||||
|
||||
def AddGTestOptions(option_parser):
|
||||
|
@ -275,7 +276,6 @@ def ProcessInstrumentationOptions(options, error_func):
|
|||
'%s.jar' % options.test_apk)
|
||||
|
||||
return instrumentation_test_options.InstrumentationOptions(
|
||||
options.build_type,
|
||||
options.tool,
|
||||
options.cleanup_test_files,
|
||||
options.push_deps,
|
||||
|
@ -344,7 +344,6 @@ def ProcessUIAutomatorOptions(options, error_func):
|
|||
'_java.jar')
|
||||
|
||||
return uiautomator_test_options.UIAutomatorOptions(
|
||||
options.build_type,
|
||||
options.tool,
|
||||
options.cleanup_test_files,
|
||||
options.push_deps,
|
||||
|
@ -411,7 +410,6 @@ def ProcessMonkeyTestOptions(options, error_func):
|
|||
category = options.category.split(',')
|
||||
|
||||
return monkey_test_options.MonkeyOptions(
|
||||
options.build_type,
|
||||
options.verbose_count,
|
||||
options.package_name,
|
||||
options.activity_name,
|
||||
|
@ -467,7 +465,6 @@ def _RunGTests(options, error_func):
|
|||
# TODO(gkanwar): Move this into ProcessGTestOptions once we require -s for
|
||||
# the gtest command.
|
||||
gtest_options = gtest_test_options.GTestOptions(
|
||||
options.build_type,
|
||||
options.tool,
|
||||
options.cleanup_test_files,
|
||||
options.push_deps,
|
||||
|
@ -480,7 +477,6 @@ def _RunGTests(options, error_func):
|
|||
results, test_exit_code = test_dispatcher.RunTests(
|
||||
tests, runner_factory, False, options.test_device,
|
||||
shard=True,
|
||||
build_type=options.build_type,
|
||||
test_timeout=None,
|
||||
num_retries=options.num_retries)
|
||||
|
||||
|
@ -491,7 +487,6 @@ def _RunGTests(options, error_func):
|
|||
results=results,
|
||||
test_type='Unit test',
|
||||
test_package=suite_name,
|
||||
build_type=options.build_type,
|
||||
flakiness_server=options.flakiness_dashboard_server)
|
||||
|
||||
if os.path.isdir(constants.ISOLATE_DEPS_DIR):
|
||||
|
@ -514,7 +509,6 @@ def _RunInstrumentationTests(options, error_func):
|
|||
tests, runner_factory, options.wait_for_debugger,
|
||||
options.test_device,
|
||||
shard=True,
|
||||
build_type=options.build_type,
|
||||
test_timeout=None,
|
||||
num_retries=options.num_retries)
|
||||
|
||||
|
@ -530,7 +524,6 @@ def _RunInstrumentationTests(options, error_func):
|
|||
tests, runner_factory, False,
|
||||
options.test_device,
|
||||
shard=True,
|
||||
build_type=options.build_type,
|
||||
test_timeout=None,
|
||||
num_retries=options.num_retries)
|
||||
|
||||
|
@ -545,7 +538,6 @@ def _RunInstrumentationTests(options, error_func):
|
|||
test_type='Instrumentation',
|
||||
test_package=os.path.basename(options.test_apk),
|
||||
annotation=options.annotations,
|
||||
build_type=options.build_type,
|
||||
flakiness_server=options.flakiness_dashboard_server)
|
||||
|
||||
return exit_code
|
||||
|
@ -560,7 +552,6 @@ def _RunUIAutomatorTests(options, error_func):
|
|||
results, exit_code = test_dispatcher.RunTests(
|
||||
tests, runner_factory, False, options.test_device,
|
||||
shard=True,
|
||||
build_type=options.build_type,
|
||||
test_timeout=None,
|
||||
num_retries=options.num_retries)
|
||||
|
||||
|
@ -569,7 +560,6 @@ def _RunUIAutomatorTests(options, error_func):
|
|||
test_type='UIAutomator',
|
||||
test_package=os.path.basename(options.test_jar),
|
||||
annotation=options.annotations,
|
||||
build_type=options.build_type,
|
||||
flakiness_server=options.flakiness_dashboard_server)
|
||||
|
||||
return exit_code
|
||||
|
@ -587,8 +577,7 @@ def _RunMonkeyTests(options, error_func):
|
|||
report_results.LogFull(
|
||||
results=results,
|
||||
test_type='Monkey',
|
||||
test_package='Monkey',
|
||||
build_type=options.build_type)
|
||||
test_package='Monkey')
|
||||
|
||||
return exit_code
|
||||
|
||||
|
|
Загрузка…
Ссылка в новой задаче