[benchmark] Adding tests for BenchmarkDriver

The imports are a bit sketchy because it doesn’t have `.py` extension and they had to be hacked manually. :-/

Extracted `parse_args` from `main` and added test coverage for argument parsing.
This commit is contained in:
Pavol Vaskovic 2018-08-16 19:50:42 +02:00
Родитель 0b990a82a5
Коммит 69d5d5e732
3 изменённых файлов: 113 добавлений и 14 удалений

1
.gitignore поставляемый
Просмотреть файл

@ -53,3 +53,4 @@ compile_commands.json
SortedCFDatabase.def
htmlcov
.coverage
/benchmark/scripts/Benchmark_Driverc

Просмотреть файл

@ -300,16 +300,17 @@ def positive_int(value):
return ivalue
def main():
def parse_args(args):
"""Parse command line arguments and set default values."""
parser = argparse.ArgumentParser(
epilog='Example: ./Benchmark_Driver run -i 5 -f Prefix -f .*Suffix.*'
)
subparsers = parser.add_subparsers(
title='Swift benchmark driver commands',
help='See COMMAND -h for additional arguments', metavar='<command>')
help='See COMMAND -h for additional arguments', metavar='COMMAND')
parent_parser = argparse.ArgumentParser(add_help=False)
benchmarks_group = parent_parser.add_mutually_exclusive_group()
shared_benchmarks_parser = argparse.ArgumentParser(add_help=False)
benchmarks_group = shared_benchmarks_parser.add_mutually_exclusive_group()
benchmarks_group.add_argument(
'benchmarks',
default=[],
@ -318,22 +319,22 @@ def main():
'-f', '--filter', dest='filters', action='append',
help='run all tests whose name match regular expression PATTERN, ' +
'multiple filters are supported', metavar="PATTERN")
parent_parser.add_argument(
shared_benchmarks_parser.add_argument(
'-t', '--tests',
help='directory containing Benchmark_O{,none,size} ' +
'(default: DRIVER_DIR)',
default=DRIVER_DIR)
run_parser = subparsers.add_parser(
'run',
help='Run benchmarks and output results to stdout',
parents=[parent_parser])
run_parser.add_argument(
shared_benchmarks_parser.add_argument(
'-o', '--optimization',
metavar='OPT',
choices=['O', 'Onone', 'Osize'],
help='optimization level to use: {O,Onone,Osize}, (default: O)',
default='O')
run_parser = subparsers.add_parser(
'run',
help='Run benchmarks and output results to stdout',
parents=[shared_benchmarks_parser])
run_parser.add_argument(
'-i', '--iterations',
help='number of times to run each test (default: 1)',
@ -343,7 +344,7 @@ def main():
help='log results to directory (default: no logging)')
run_parser.add_argument(
'--swift-repo',
help='absolute path to Swift source repo for branch comparison')
help='absolute path to the Swift source repository')
run_parser.set_defaults(func=run)
compare_parser = subparsers.add_parser(
@ -354,7 +355,7 @@ def main():
help='directory containing benchmark logs')
compare_parser.add_argument(
'--swift-repo', required=True,
help='absolute path to Swift source repo')
help='absolute path to the Swift source repository')
compare_parser.add_argument(
'--compare-script', required=True,
help='absolute path to compare script')
@ -364,7 +365,11 @@ def main():
'branch (default: master)')
compare_parser.set_defaults(func=compare)
args = parser.parse_args()
return parser.parse_args(args)
def main():
args = parse_args(sys.argv[1:])
return args.func(args)

Просмотреть файл

@ -0,0 +1,93 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ===--- test_Benchmark_Driver.py ----------------------------------------===//
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ===---------------------------------------------------------------------===//
import os
import unittest
from imp import load_source
from test_utils import captured_output
# import Benchmark_Driver # doesn't work because it misses '.py' extension
Benchmark_Driver = load_source(
'Benchmark_Driver', os.path.join(os.path.dirname(
os.path.abspath(__file__)), 'Benchmark_Driver'))
# from Benchmark_Driver import parse_args
parse_args = Benchmark_Driver.parse_args
class Test_parse_args(unittest.TestCase):
def assert_contains(self, texts, output):
assert not isinstance(texts, str)
for text in texts:
self.assertIn(text, output)
def test_requires_command_argument(self):
with captured_output() as (_, err):
self.assertRaises(SystemExit, parse_args, [])
self.assert_contains(['usage:', 'COMMAND', 'too few arguments'],
err.getvalue())
def test_command_help_lists_commands(self):
with captured_output() as (out, _):
self.assertRaises(SystemExit, parse_args, ['-h'])
self.assert_contains(['COMMAND', 'run', 'compare'],
out.getvalue())
def test_run_benchmarks_by_name_or_ordinal(self):
benchmarks = ['AngryPhonebook', '42']
self.assertEquals(
parse_args(['run'] + benchmarks).benchmarks, benchmarks)
def test_run_benchmarks_matching_pattern(self):
regexes = ['Prefix', '.*Suffix.*']
filters = ['-f', regexes[0], '-f', regexes[1]]
self.assertEquals(parse_args(['run'] + filters).filters, regexes)
def test_run_benchmarks_and_filters_are_exclusive(self):
with captured_output() as (_, err):
self.assertRaises(SystemExit,
parse_args, 'run -f Filter1 Benchmark1'.split())
self.assert_contains(
['error',
'argument BENCHMARK: not allowed with argument -f/--filter'],
err.getvalue())
def test_tests_location(self):
here = os.path.dirname(os.path.abspath(__file__))
self.assertEquals(parse_args(['run']).tests, here)
tests = '/benchmarks/are/here'
self.assertEquals(parse_args(['run', '-t', tests]).tests, tests)
def test_optimization_argument(self):
self.assertEquals(parse_args(['run']).optimization, 'O')
self.assertEquals(
parse_args(['run', '-o', 'O']).optimization, 'O')
self.assertEquals(
parse_args(['run', '-o', 'Onone']).optimization, 'Onone')
self.assertEquals(
parse_args(['run', '-o', 'Osize']).optimization, 'Osize')
with captured_output() as (_, err):
self.assertRaises(SystemExit,
parse_args, ['run', '-o', 'bogus'])
self.assert_contains(
['error:',
"argument -o/--optimization: invalid choice: 'bogus'",
"(choose from 'O', 'Onone', 'Osize')"],
err.getvalue())
if __name__ == '__main__':
unittest.main()