2011-06-17 17:58:14 +04:00
|
|
|
'''
|
|
|
|
Created on May 19, 2011
|
|
|
|
|
|
|
|
@author: bungeman
|
|
|
|
'''
|
|
|
|
|
|
|
|
import re
|
|
|
|
import math
|
|
|
|
|
2012-12-19 00:06:10 +04:00
|
|
|
# bench representation algorithm constant names
|
|
|
|
ALGORITHM_AVERAGE = 'avg'
|
|
|
|
ALGORITHM_MEDIAN = 'med'
|
|
|
|
ALGORITHM_MINIMUM = 'min'
|
|
|
|
ALGORITHM_25TH_PERCENTILE = '25th'
|
|
|
|
|
2013-02-14 22:35:17 +04:00
|
|
|
# Regular expressions used throughout
|
|
|
|
PER_SETTING_RE = '([^\s=]+)(?:=(\S+))?'
|
|
|
|
SETTINGS_RE = 'skia bench:((?:\s+' + PER_SETTING_RE + ')*)'
|
|
|
|
BENCH_RE = 'running bench (?:\[\d+ \d+\] )?\s*(\S+)'
|
|
|
|
TIME_RE = '(?:(\w*)msecs = )?\s*((?:\d+\.\d+)(?:,\d+\.\d+)*)'
|
|
|
|
# non-per-tile benches have configs that don't end with ']' or '>'
|
|
|
|
CONFIG_RE = '(\S+[^\]>]): ((?:' + TIME_RE + '\s+)+)'
|
|
|
|
# per-tile bench lines are in the following format. Note that there are
|
|
|
|
# non-averaged bench numbers in separate lines, which we ignore now due to
|
|
|
|
# their inaccuracy.
|
|
|
|
TILE_RE = (' tile_(\S+): tile \[\d+,\d+\] out of \[\d+,\d+\] <averaged>:'
|
|
|
|
' ((?:' + TIME_RE + '\s+)+)')
|
|
|
|
# for extracting tile layout
|
|
|
|
TILE_LAYOUT_RE = ' out of \[(\d+),(\d+)\] <averaged>: '
|
|
|
|
|
|
|
|
PER_SETTING_RE_COMPILED = re.compile(PER_SETTING_RE)
|
|
|
|
SETTINGS_RE_COMPILED = re.compile(SETTINGS_RE)
|
|
|
|
BENCH_RE_COMPILED = re.compile(BENCH_RE)
|
|
|
|
TIME_RE_COMPILED = re.compile(TIME_RE)
|
|
|
|
CONFIG_RE_COMPILED = re.compile(CONFIG_RE)
|
|
|
|
TILE_RE_COMPILED = re.compile(TILE_RE)
|
|
|
|
TILE_LAYOUT_RE_COMPILED = re.compile(TILE_LAYOUT_RE)
|
|
|
|
|
2011-06-17 17:58:14 +04:00
|
|
|
class BenchDataPoint:
|
|
|
|
"""A single data point produced by bench.
|
2012-12-19 00:06:10 +04:00
|
|
|
|
2013-02-14 03:22:29 +04:00
|
|
|
(str, str, str, float, {str:str}, str, [floats])"""
|
|
|
|
def __init__(self, bench, config, time_type, time, settings,
|
|
|
|
tile_layout='', per_tile_values=[]):
|
2011-06-17 17:58:14 +04:00
|
|
|
self.bench = bench
|
|
|
|
self.config = config
|
|
|
|
self.time_type = time_type
|
|
|
|
self.time = time
|
|
|
|
self.settings = settings
|
2013-02-14 03:22:29 +04:00
|
|
|
# how tiles cover the whole picture. '5x3' means 5 columns and 3 rows.
|
|
|
|
self.tile_layout = tile_layout
|
|
|
|
# list of per_tile bench values, if applicable
|
|
|
|
self.per_tile_values = per_tile_values
|
2012-12-19 00:06:10 +04:00
|
|
|
|
2011-06-17 17:58:14 +04:00
|
|
|
def __repr__(self):
|
|
|
|
return "BenchDataPoint(%s, %s, %s, %s, %s)" % (
|
|
|
|
str(self.bench),
|
|
|
|
str(self.config),
|
|
|
|
str(self.time_type),
|
|
|
|
str(self.time),
|
|
|
|
str(self.settings),
|
|
|
|
)
|
2012-12-19 00:06:10 +04:00
|
|
|
|
2011-06-17 17:58:14 +04:00
|
|
|
class _ExtremeType(object):
|
|
|
|
"""Instances of this class compare greater or less than other objects."""
|
|
|
|
def __init__(self, cmpr, rep):
|
|
|
|
object.__init__(self)
|
|
|
|
self._cmpr = cmpr
|
|
|
|
self._rep = rep
|
2012-12-19 00:06:10 +04:00
|
|
|
|
2011-06-17 17:58:14 +04:00
|
|
|
def __cmp__(self, other):
|
|
|
|
if isinstance(other, self.__class__) and other._cmpr == self._cmpr:
|
|
|
|
return 0
|
|
|
|
return self._cmpr
|
2012-12-19 00:06:10 +04:00
|
|
|
|
2011-06-17 17:58:14 +04:00
|
|
|
def __repr__(self):
|
|
|
|
return self._rep
|
|
|
|
|
|
|
|
Max = _ExtremeType(1, "Max")
|
|
|
|
Min = _ExtremeType(-1, "Min")
|
|
|
|
|
2012-08-17 00:49:28 +04:00
|
|
|
class _ListAlgorithm(object):
|
|
|
|
"""Algorithm for selecting the representation value from a given list.
|
2012-12-19 00:06:10 +04:00
|
|
|
representation is one of the ALGORITHM_XXX representation types."""
|
2012-08-17 00:49:28 +04:00
|
|
|
def __init__(self, data, representation=None):
|
|
|
|
if not representation:
|
2012-12-19 00:06:10 +04:00
|
|
|
representation = ALGORITHM_AVERAGE # default algorithm
|
2012-08-17 00:49:28 +04:00
|
|
|
self._data = data
|
|
|
|
self._len = len(data)
|
2012-12-19 00:06:10 +04:00
|
|
|
if representation == ALGORITHM_AVERAGE:
|
2012-08-17 00:49:28 +04:00
|
|
|
self._rep = sum(self._data) / self._len
|
|
|
|
else:
|
|
|
|
self._data.sort()
|
2012-12-19 00:06:10 +04:00
|
|
|
if representation == ALGORITHM_MINIMUM:
|
2012-08-17 00:49:28 +04:00
|
|
|
self._rep = self._data[0]
|
|
|
|
else:
|
|
|
|
# for percentiles, we use the value below which x% of values are
|
|
|
|
# found, which allows for better detection of quantum behaviors.
|
2012-12-19 00:06:10 +04:00
|
|
|
if representation == ALGORITHM_MEDIAN:
|
2012-08-17 00:49:28 +04:00
|
|
|
x = int(round(0.5 * self._len + 0.5))
|
2012-12-19 00:06:10 +04:00
|
|
|
elif representation == ALGORITHM_25TH_PERCENTILE:
|
2012-08-17 00:49:28 +04:00
|
|
|
x = int(round(0.25 * self._len + 0.5))
|
|
|
|
else:
|
|
|
|
raise Exception("invalid representation algorithm %s!" %
|
|
|
|
representation)
|
|
|
|
self._rep = self._data[x - 1]
|
|
|
|
|
|
|
|
def compute(self):
|
|
|
|
return self._rep
|
|
|
|
|
2013-02-14 22:35:17 +04:00
|
|
|
def _ParseAndStoreTimes(config_re_compiled, is_per_tile, line, bench,
|
|
|
|
value_dic, layout_dic, representation=None):
|
2013-02-14 03:22:29 +04:00
|
|
|
"""Parses given bench time line with regex and adds data to value_dic.
|
2013-02-14 22:35:17 +04:00
|
|
|
|
|
|
|
config_re_compiled: precompiled regular expression for parsing the config
|
|
|
|
line.
|
|
|
|
is_per_tile: boolean indicating whether this is a per-tile bench.
|
|
|
|
If so, we add tile layout into layout_dic as well.
|
2012-12-19 00:06:10 +04:00
|
|
|
line: input string line to parse.
|
|
|
|
bench: name of bench for the time values.
|
2013-02-14 03:22:29 +04:00
|
|
|
value_dic: dictionary to store bench values. See bench_dic in parse() below.
|
|
|
|
layout_dic: dictionary to store tile layouts. See parse() for descriptions.
|
2012-12-19 00:06:10 +04:00
|
|
|
representation: should match one of the ALGORITHM_XXX types."""
|
|
|
|
|
2013-02-14 22:35:17 +04:00
|
|
|
for config in config_re_compiled.finditer(line):
|
2012-12-19 00:06:10 +04:00
|
|
|
current_config = config.group(1)
|
2013-02-14 03:22:29 +04:00
|
|
|
tile_layout = ''
|
2013-02-14 22:35:17 +04:00
|
|
|
if is_per_tile: # per-tile bench, add name prefix
|
2012-12-19 00:06:10 +04:00
|
|
|
current_config = 'tile_' + current_config
|
2013-02-14 22:35:17 +04:00
|
|
|
layouts = TILE_LAYOUT_RE_COMPILED.search(line)
|
2013-02-14 03:22:29 +04:00
|
|
|
if layouts and len(layouts.groups()) == 2:
|
|
|
|
tile_layout = '%sx%s' % layouts.groups()
|
2012-12-19 00:06:10 +04:00
|
|
|
times = config.group(2)
|
2013-02-14 22:35:17 +04:00
|
|
|
for new_time in TIME_RE_COMPILED.finditer(times):
|
2012-12-19 00:06:10 +04:00
|
|
|
current_time_type = new_time.group(1)
|
|
|
|
iters = [float(i) for i in
|
|
|
|
new_time.group(2).strip().split(',')]
|
2013-02-14 03:22:29 +04:00
|
|
|
value_dic.setdefault(bench, {}).setdefault(
|
|
|
|
current_config, {}).setdefault(current_time_type, []).append(
|
|
|
|
_ListAlgorithm(iters, representation).compute())
|
|
|
|
layout_dic.setdefault(bench, {}).setdefault(
|
|
|
|
current_config, {}).setdefault(current_time_type, tile_layout)
|
2012-12-19 00:06:10 +04:00
|
|
|
|
|
|
|
def parse(settings, lines, representation=None):
|
2011-06-17 17:58:14 +04:00
|
|
|
"""Parses bench output into a useful data structure.
|
2012-12-19 00:06:10 +04:00
|
|
|
|
2012-08-15 21:31:46 +04:00
|
|
|
({str:str}, __iter__ -> str) -> [BenchDataPoint]
|
2012-12-19 00:06:10 +04:00
|
|
|
representation is one of the ALGORITHM_XXX types."""
|
|
|
|
|
2011-06-17 17:58:14 +04:00
|
|
|
benches = []
|
|
|
|
current_bench = None
|
2012-12-19 00:06:10 +04:00
|
|
|
bench_dic = {} # [bench][config][time_type] -> [list of bench values]
|
2013-02-14 03:22:29 +04:00
|
|
|
# [bench][config][time_type] -> tile_layout
|
|
|
|
layout_dic = {}
|
2012-12-19 00:06:10 +04:00
|
|
|
|
2011-06-17 17:58:14 +04:00
|
|
|
for line in lines:
|
2012-12-19 00:06:10 +04:00
|
|
|
|
|
|
|
# see if this line is a settings line
|
2013-02-14 22:35:17 +04:00
|
|
|
settingsMatch = SETTINGS_RE_COMPILED.search(line)
|
2011-06-17 17:58:14 +04:00
|
|
|
if (settingsMatch):
|
|
|
|
settings = dict(settings)
|
2013-02-14 22:35:17 +04:00
|
|
|
for settingMatch in PER_SETTING_RE_COMPILED.finditer(settingsMatch.group(1)):
|
2011-06-17 17:58:14 +04:00
|
|
|
if (settingMatch.group(2)):
|
|
|
|
settings[settingMatch.group(1)] = settingMatch.group(2)
|
|
|
|
else:
|
|
|
|
settings[settingMatch.group(1)] = True
|
2012-12-19 00:06:10 +04:00
|
|
|
|
|
|
|
# see if this line starts a new bench
|
2013-02-14 22:35:17 +04:00
|
|
|
new_bench = BENCH_RE_COMPILED.search(line)
|
2011-06-17 17:58:14 +04:00
|
|
|
if new_bench:
|
|
|
|
current_bench = new_bench.group(1)
|
2012-12-19 00:06:10 +04:00
|
|
|
|
|
|
|
# add configs on this line to the bench_dic
|
2011-06-17 17:58:14 +04:00
|
|
|
if current_bench:
|
2013-02-17 12:59:56 +04:00
|
|
|
if line.startswith(' tile_') :
|
|
|
|
_ParseAndStoreTimes(TILE_RE_COMPILED, True, line, current_bench,
|
|
|
|
bench_dic, layout_dic, representation)
|
|
|
|
else:
|
|
|
|
_ParseAndStoreTimes(CONFIG_RE_COMPILED, False, line,
|
|
|
|
current_bench,
|
|
|
|
bench_dic, layout_dic, representation)
|
2012-12-19 00:06:10 +04:00
|
|
|
|
|
|
|
# append benches to list, use the total time as final bench value.
|
|
|
|
for bench in bench_dic:
|
|
|
|
for config in bench_dic[bench]:
|
|
|
|
for time_type in bench_dic[bench][config]:
|
2013-02-14 03:22:29 +04:00
|
|
|
tile_layout = ''
|
|
|
|
per_tile_values = []
|
|
|
|
if len(bench_dic[bench][config][time_type]) > 1:
|
|
|
|
# per-tile values, extract tile_layout
|
|
|
|
per_tile_values = bench_dic[bench][config][time_type]
|
|
|
|
tile_layout = layout_dic[bench][config][time_type]
|
2012-12-19 00:06:10 +04:00
|
|
|
benches.append(BenchDataPoint(
|
|
|
|
bench,
|
|
|
|
config,
|
|
|
|
time_type,
|
|
|
|
sum(bench_dic[bench][config][time_type]),
|
2013-02-14 03:22:29 +04:00
|
|
|
settings,
|
|
|
|
tile_layout,
|
|
|
|
per_tile_values))
|
2012-12-19 00:06:10 +04:00
|
|
|
|
2011-06-17 17:58:14 +04:00
|
|
|
return benches
|
2012-12-19 00:06:10 +04:00
|
|
|
|
2011-06-17 17:58:14 +04:00
|
|
|
class LinearRegression:
|
|
|
|
"""Linear regression data based on a set of data points.
|
2012-12-19 00:06:10 +04:00
|
|
|
|
2011-06-17 17:58:14 +04:00
|
|
|
([(Number,Number)])
|
|
|
|
There must be at least two points for this to make sense."""
|
|
|
|
def __init__(self, points):
|
|
|
|
n = len(points)
|
|
|
|
max_x = Min
|
|
|
|
min_x = Max
|
2012-12-19 00:06:10 +04:00
|
|
|
|
2011-06-17 17:58:14 +04:00
|
|
|
Sx = 0.0
|
|
|
|
Sy = 0.0
|
|
|
|
Sxx = 0.0
|
|
|
|
Sxy = 0.0
|
|
|
|
Syy = 0.0
|
|
|
|
for point in points:
|
|
|
|
x = point[0]
|
|
|
|
y = point[1]
|
|
|
|
max_x = max(max_x, x)
|
|
|
|
min_x = min(min_x, x)
|
2012-12-19 00:06:10 +04:00
|
|
|
|
2011-06-17 17:58:14 +04:00
|
|
|
Sx += x
|
|
|
|
Sy += y
|
|
|
|
Sxx += x*x
|
|
|
|
Sxy += x*y
|
|
|
|
Syy += y*y
|
2012-12-19 00:06:10 +04:00
|
|
|
|
2012-09-20 23:05:33 +04:00
|
|
|
denom = n*Sxx - Sx*Sx
|
|
|
|
if (denom != 0.0):
|
|
|
|
B = (n*Sxy - Sx*Sy) / denom
|
|
|
|
else:
|
|
|
|
B = 0.0
|
2011-06-17 17:58:14 +04:00
|
|
|
a = (1.0/n)*(Sy - B*Sx)
|
2012-12-19 00:06:10 +04:00
|
|
|
|
2011-06-17 17:58:14 +04:00
|
|
|
se2 = 0
|
|
|
|
sB2 = 0
|
|
|
|
sa2 = 0
|
2012-09-20 23:05:33 +04:00
|
|
|
if (n >= 3 and denom != 0.0):
|
|
|
|
se2 = (1.0/(n*(n-2)) * (n*Syy - Sy*Sy - B*B*denom))
|
|
|
|
sB2 = (n*se2) / denom
|
2011-06-17 17:58:14 +04:00
|
|
|
sa2 = sB2 * (1.0/n) * Sxx
|
2012-12-19 00:06:10 +04:00
|
|
|
|
|
|
|
|
2011-06-17 17:58:14 +04:00
|
|
|
self.slope = B
|
|
|
|
self.intercept = a
|
|
|
|
self.serror = math.sqrt(max(0, se2))
|
|
|
|
self.serror_slope = math.sqrt(max(0, sB2))
|
|
|
|
self.serror_intercept = math.sqrt(max(0, sa2))
|
|
|
|
self.max_x = max_x
|
|
|
|
self.min_x = min_x
|
2012-12-19 00:06:10 +04:00
|
|
|
|
2011-06-17 17:58:14 +04:00
|
|
|
def __repr__(self):
|
|
|
|
return "LinearRegression(%s, %s, %s, %s, %s)" % (
|
|
|
|
str(self.slope),
|
|
|
|
str(self.intercept),
|
|
|
|
str(self.serror),
|
|
|
|
str(self.serror_slope),
|
|
|
|
str(self.serror_intercept),
|
|
|
|
)
|
2012-12-19 00:06:10 +04:00
|
|
|
|
2011-06-17 17:58:14 +04:00
|
|
|
def find_min_slope(self):
|
|
|
|
"""Finds the minimal slope given one standard deviation."""
|
|
|
|
slope = self.slope
|
|
|
|
intercept = self.intercept
|
|
|
|
error = self.serror
|
|
|
|
regr_start = self.min_x
|
|
|
|
regr_end = self.max_x
|
|
|
|
regr_width = regr_end - regr_start
|
2012-12-19 00:06:10 +04:00
|
|
|
|
2011-06-17 17:58:14 +04:00
|
|
|
if slope < 0:
|
|
|
|
lower_left_y = slope*regr_start + intercept - error
|
|
|
|
upper_right_y = slope*regr_end + intercept + error
|
|
|
|
return min(0, (upper_right_y - lower_left_y) / regr_width)
|
2012-12-19 00:06:10 +04:00
|
|
|
|
2011-06-17 17:58:14 +04:00
|
|
|
elif slope > 0:
|
|
|
|
upper_left_y = slope*regr_start + intercept + error
|
|
|
|
lower_right_y = slope*regr_end + intercept - error
|
|
|
|
return max(0, (lower_right_y - upper_left_y) / regr_width)
|
2012-12-19 00:06:10 +04:00
|
|
|
|
2011-06-17 17:58:14 +04:00
|
|
|
return 0
|
2011-08-08 21:19:23 +04:00
|
|
|
|
|
|
|
def CreateRevisionLink(revision_number):
|
|
|
|
"""Returns HTML displaying the given revision number and linking to
|
|
|
|
that revision's change page at code.google.com, e.g.
|
|
|
|
http://code.google.com/p/skia/source/detail?r=2056
|
|
|
|
"""
|
|
|
|
return '<a href="http://code.google.com/p/skia/source/detail?r=%s">%s</a>'%(
|
|
|
|
revision_number, revision_number)
|
2012-09-20 23:05:33 +04:00
|
|
|
|
|
|
|
def main():
|
|
|
|
foo = [[0.0, 0.0], [0.0, 1.0], [0.0, 2.0], [0.0, 3.0]]
|
|
|
|
LinearRegression(foo)
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|