Refactor awfy driver to add asmjs-ubench support.

This commit is contained in:
David Anderson 2013-05-06 17:34:48 -07:00 коммит произвёл Mozilla
Родитель 3880324227
Коммит 5297bab098
4 изменённых файлов: 165 добавлений и 98 удалений

Просмотреть файл

@ -7,6 +7,12 @@ updateURL = http://www.arewefastyet.com/????
# for chromebooks:
#flags = hardfp
[native]
cc = clang
cxx = clang++
options = -O2 -m32
mode = clang
[v8]
source = v8

Просмотреть файл

@ -9,45 +9,91 @@ import ConfigParser
import submitter
import utils
def ss_v8(shell, env, args):
return run_sunspider('SunSpider', 'v8-v7', 5, shell, env, args)
class Benchmark(object):
def __init__(self, name, folder):
self.name = name
self.folder = folder
def ss_SunSpider(shell, env, args):
return run_sunspider('SunSpider', 'sunspider-0.9.1', 20, shell, env, args)
def run(self, submit, native, modes):
with utils.chdir(self.folder):
return self._run(submit, native, modes)
def ss_Kraken(shell, env, args):
return run_sunspider('kraken', 'kraken-1.1', 5, shell, env, args)
def _run(self, submit, native, modes):
for mode in modes:
try:
tests = None
print('Running ' + self.name + ' under ' + mode.shell + ' ' + ' '.join(mode.args))
tests = self.benchmark(mode.shell, mode.env, mode.args)
except:
print('Failed to run ' + self.name + '!')
pass
if tests:
submit.AddTests(tests, self.name, mode.name)
def ss_Assorted(shell, env, args):
return run_sunspider('Assorted', 'assorted', 3, shell, env, args)
class AsmJSMicro(Benchmark):
def __init__(self):
super(AsmJSMicro, self).__init__('asmjs-ubench', 'asmjs-ubench')
def run_sunspider(folder, suite, runs, shell, env, args):
with utils.chdir(folder):
return _sunspider(suite, runs, shell, env, args)
def _run(self, submit, native, modes):
# Run the C++ mode.
full_args = ['python', 'harness.py', '--native']
full_args += ['--cc="' + native.cc + '"']
full_args += ['--cxx="' + native.cxx + '"']
full_args += ['--'] + native.args
print(' '.join(full_args))
def v8_v8(shell, env, args):
with utils.chdir('v8-v7'):
return _v8(shell, env, args)
p = subprocess.Popen(full_args, stdout=subprocess.PIPE)
output = p.communicate()[0]
print(output)
tests = self.parse(output)
submit.AddTests(tests, self.name, native.mode)
def v8_octane(shell, env, args):
with utils.chdir('octane'):
return _v8(shell, env, args)
# Run normal benchmarks.
super(AsmJSMicro, self)._run(submit, native, modes)
def _v8(shell, env, args):
full_args = [shell]
if args:
full_args.extend(args)
full_args.append('run.js')
def benchmark(self, shell, env, args):
full_args = ['python', 'harness.py', shell, '--'] + args
print(' '.join(full_args))
p = subprocess.Popen(full_args, stdout=subprocess.PIPE, env=env)
output = p.communicate()[0]
return self.parse(output)
p = subprocess.Popen(full_args, stdout=subprocess.PIPE, env=env)
output = p.communicate()[0]
def parse(self, output):
total = 0.0
tests = []
for line in output.splitlines():
m = re.search("(.+) - (\d+(\.\d+)?)", line)
if not m:
continue
name = m.group(1)
score = m.group(2)
total += float(score)
tests.append({ 'name': name, 'time': score })
tests.append({ 'name': '__total__', 'time': total })
return tests
tests = []
lines = output.splitlines()
class Octane(Benchmark):
def __init__(self):
super(Octane, self).__init__('octane', 'octane')
for x in lines:
m = re.search("(.+): (\d+)", x)
if m != None:
def benchmark(self, shell, env, args):
full_args = [shell]
if args:
full_args.extend(args)
full_args.append('run.js')
print(os.getcwd())
p = subprocess.Popen(full_args, stdout=subprocess.PIPE, env=env)
output = p.communicate()[0]
tests = []
lines = output.splitlines()
for x in lines:
m = re.search("(.+): (\d+)", x)
if not m:
continue
name = m.group(1)
score = m.group(2)
if name[0:5] == "Score":
@ -55,69 +101,59 @@ def _v8(shell, env, args):
tests.append({ 'name': name, 'time': score})
print(score + ' - ' + name)
return tests
return tests
def _sunspider(suite, runs, shell, env, args):
if args != None:
args = '--args=' + ' '.join(args)
else:
args = ''
class SunSpider(Benchmark):
def __init__(self, name, folder, suite, runs):
super(SunSpider, self).__init__(name, folder)
self.suite = suite
self.runs = runs
if suite == "assorted":
p = subprocess.Popen(["hg", "pull", "-u"], stdout=subprocess.PIPE)
p.communicate()
def benchmark(self, shell, env, args):
if args != None:
args = '--args=' + ' '.join(args)
else:
args = ''
p = subprocess.Popen(["./sunspider",
"--shell=" + shell,
"--runs=" + str(runs),
"--suite=" + suite,
args],
stdout=subprocess.PIPE,
env=env)
output = p.communicate()[0]
if self.suite == "assorted":
p = subprocess.Popen(["hg", "pull", "-u"], stdout=subprocess.PIPE)
p.communicate()
tests = []
p = subprocess.Popen(["./sunspider",
"--shell=" + shell,
"--runs=" + str(self.runs),
"--suite=" + self.suite,
args],
stdout=subprocess.PIPE,
env=env)
output = p.communicate()[0]
lines = output.splitlines()
found = False
for x in lines:
if x == "--------------------------------------------" or \
x == "-----------------------------------------------":
found = True
if x[0:5] == "Total":
m = re.search(":\s+(\d+\.\d+)ms", x)
tests.append({ 'name': '__total__', 'time': m.group(1)})
print(m.group(1) + ' - __total__')
elif found == True and x[0:4] == " ":
m = re.search(" (.+):\s+(\d+\.\d+)ms", x)
if m != None:
tests.append({ 'name': m.group(1), 'time': m.group(2)})
print(m.group(2) + ' - ' + m.group(1))
tests = []
if found == False:
raise Exception("output marker not found")
lines = output.splitlines()
found = False
for x in lines:
if x == "--------------------------------------------" or \
x == "-----------------------------------------------":
found = True
if x[0:5] == "Total":
m = re.search(":\s+(\d+\.\d+)ms", x)
tests.append({ 'name': '__total__', 'time': m.group(1)})
print(m.group(1) + ' - __total__')
elif found == True and x[0:4] == " ":
m = re.search(" (.+):\s+(\d+\.\d+)ms", x)
if m != None:
tests.append({ 'name': m.group(1), 'time': m.group(2)})
print(m.group(2) + ' - ' + m.group(1))
return tests
if found == False:
print(output)
raise Exception("output marker not found")
Benchmarks = {
'v8real': v8_v8,
'kraken': ss_Kraken,
'ss': ss_SunSpider,
#'v8': ss_v8,
'misc': ss_Assorted,
'octane': v8_octane
}
def RunAndSubmitAll(shell, env, args, submitter, mode):
for suite in Benchmarks:
args_ = args
if not args:
args_ = []
print('Running ' + suite + ' under ' + shell + ' ' + ' '.join(args_))
fun = Benchmarks[suite]
try:
tests = fun(shell, env, args)
submitter.AddTests(tests, suite, mode)
except:
pass
return tests
#Benchmarks = [SunSpider('ss', 'SunSpider', 'sunspider-0.9.1', 20),
# SunSpider('kraken', 'kraken', 'kraken-1.1', 5),
# SunSpider('misc', 'Assorted', 'assorted', 3),
# AsmJSMicro()]
Benchmarks = [AsmJSMicro()]

Просмотреть файл

@ -173,13 +173,9 @@ class MozillaInbound(Mozilla):
def __init__(self, conf):
super(MozillaInbound, self).__init__(conf, 'mi')
self.modes = [
{
'mode': 'ti',
'args': ['-m', '-n', '--no-ion']
},
{
'mode': 'jmim',
'args': ['--ion', '-m', '-n', '--ion-parallel-compile=on']
'args': ['--ion-parallel-compile=on', '--no-jm']
}
]
@ -192,3 +188,15 @@ class MozillaBaselineCompiler(Mozilla):
'args': ['--ion', '--no-jm', '-n', '--ion-parallel-compile=on']
}
]
class NativeCompiler(Engine):
def __init__(self, conf):
super(NativeCompiler, self).__init__(conf)
self.cc = conf.get('native', 'cc')
self.cxx = conf.get('native', 'cxx')
self.args = conf.get('native', 'options').split(' ')
self.mode = conf.get('native', 'mode')
output = Run([self.cxx, '--version'])
self.signature = output.splitlines()[0].strip()

Просмотреть файл

@ -5,8 +5,9 @@ import submitter
import builders
import sys
import resource
import benchmark
from benchmark import Benchmarks
import utils
from collections import namedtuple
config = ConfigParser.RawConfigParser()
config.read('awfy.config')
@ -18,7 +19,6 @@ resource.setrlimit(resource.RLIMIT_DATA, (-1, -1))
KnownEngines = [
builders.V8(config),
builders.MozillaInbound(config),
builders.MozillaBaselineCompiler(config),
builders.Nitro(config)
]
Engines = []
@ -40,8 +40,14 @@ for e in KnownEngines:
if NumUpdated == 0:
sys.exit(0)
submit = submitter.Submitter(config)
submit.Start()
# The native compiler is a special thing, for now.
native = builders.NativeCompiler(config)
# A mode is a configuration of an engine we just built.
Mode = namedtuple('Mode', ['shell', 'args', 'env', 'name', 'cset'])
# Make a list of all modes.
modes = []
for entry in Engines:
e = entry[0]
cset = entry[1]
@ -57,9 +63,20 @@ for entry in Engines:
elif m['args']:
args = list(m['args'])
else:
args = None
submit.AddEngine(m['mode'], cset)
benchmark.RunAndSubmitAll(shell, env, args, submit, m['mode'])
args = []
mode = Mode(shell, args, env, m['mode'], cset)
modes.append(mode)
# Inform AWFY of each mode we found.
submit = submitter.Submitter(config)
submit.Start()
for mode in modes:
submit.AddEngine(mode.name, mode.cset)
submit.AddEngine(native.mode, native.signature)
# Run through each benchmark.
for benchmark in Benchmarks:
benchmark.run(submit, native, modes)
submit.Finish(1)