add lua-binarytrees benchmark
This commit is contained in:
Родитель
8e73154fd9
Коммит
5b2691eca1
|
@ -0,0 +1,50 @@
|
|||
-- The Computer Language Benchmarks Game
|
||||
-- http://benchmarksgame.alioth.debian.org/
|
||||
-- contributed by Mike Pall
|
||||
|
||||
local function BottomUpTree(item, depth)
|
||||
if depth > 0 then
|
||||
local i = item + item
|
||||
depth = depth - 1
|
||||
local left, right = BottomUpTree(i-1, depth), BottomUpTree(i, depth)
|
||||
return { item, left, right }
|
||||
else
|
||||
return { item }
|
||||
end
|
||||
end
|
||||
|
||||
local function ItemCheck(tree)
|
||||
if tree[2] then
|
||||
return tree[1] + ItemCheck(tree[2]) - ItemCheck(tree[3])
|
||||
else
|
||||
return tree[1]
|
||||
end
|
||||
end
|
||||
|
||||
local N = tonumber(arg and arg[1]) or 0
|
||||
local mindepth = 4
|
||||
local maxdepth = mindepth + 2
|
||||
if maxdepth < N then maxdepth = N end
|
||||
|
||||
do
|
||||
local stretchdepth = maxdepth + 1
|
||||
local stretchtree = BottomUpTree(0, stretchdepth)
|
||||
io.write(string.format("stretch tree of depth %d\t check: %d\n",
|
||||
stretchdepth, ItemCheck(stretchtree)))
|
||||
end
|
||||
|
||||
local longlivedtree = BottomUpTree(0, maxdepth)
|
||||
|
||||
for depth=mindepth,maxdepth,2 do
|
||||
local iterations = 2 ^ (maxdepth - depth + mindepth)
|
||||
local check = 0
|
||||
for i=1,iterations do
|
||||
check = check + ItemCheck(BottomUpTree(1, depth)) +
|
||||
ItemCheck(BottomUpTree(-1, depth))
|
||||
end
|
||||
io.write(string.format("%d\t trees of depth %d\t check: %d\n",
|
||||
iterations*2, depth, check))
|
||||
end
|
||||
|
||||
io.write(string.format("long lived tree of depth %d\t check: %d\n",
|
||||
maxdepth, ItemCheck(longlivedtree)))
|
|
@ -12596,6 +12596,15 @@ elif 'benchmark' in str(sys.argv):
|
|||
TEST_REPS = 2
|
||||
TOTAL_TESTS = 8
|
||||
|
||||
# standard arguments for timing:
|
||||
# 0: no runtime, just startup
|
||||
# 1: very little runtime
|
||||
# 2: 0.5 seconds
|
||||
# 3: 1 second
|
||||
# 4: 5 seconds
|
||||
# 5: 10 seconds
|
||||
DEFAULT_ARG = '4'
|
||||
|
||||
tests_done = 0
|
||||
total_times = map(lambda x: 0., range(TOTAL_TESTS))
|
||||
total_native_times = map(lambda x: 0., range(TOTAL_TESTS))
|
||||
|
@ -12633,15 +12642,9 @@ elif 'benchmark' in str(sys.argv):
|
|||
print ' JavaScript: mean: %.3f (+-%.3f) secs median: %.3f range: %.3f-%.3f (noise: %3.3f%%) (%d runs)' % (mean, std, median, min(times), max(times), 100*std/mean, reps)
|
||||
print ' Native : mean: %.3f (+-%.3f) secs median: %.3f range: %.3f-%.3f (noise: %3.3f%%) JS is %.2f X slower' % (mean_native, std_native, median_native, min(native_times), max(native_times), 100*std_native/mean_native, final)
|
||||
|
||||
def do_benchmark(self, name, src, expected_output='FAIL', args=[], emcc_args=[], native_args=[], shared_args=[], force_c=False, reps=TEST_REPS, native_exec=None, output_parser=None):
|
||||
# standard arguments for timing:
|
||||
# 0: no runtime, just startup
|
||||
# 1: very little runtime
|
||||
# 2: 0.5 seconds
|
||||
# 3: 1 second
|
||||
# 4: 5 seconds
|
||||
# 5: 10 seconds
|
||||
args = args or ['4']
|
||||
def do_benchmark(self, name, src, expected_output='FAIL', args=[], emcc_args=[], native_args=[], shared_args=[], force_c=False, reps=TEST_REPS, native_exec=None, output_parser=None, args_processor=None):
|
||||
args = args or [DEFAULT_ARG]
|
||||
if args_processor: args = args_processor(args)
|
||||
|
||||
dirname = self.get_dir()
|
||||
filename = os.path.join(dirname, name + '.c' + ('' if force_c else 'pp'))
|
||||
|
@ -12962,26 +12965,46 @@ elif 'benchmark' in str(sys.argv):
|
|||
src = open(path_from_root('tests', 'life.c'), 'r').read()
|
||||
self.do_benchmark('life', src, '''--------------------------------''', shared_args=['-std=c99'], force_c=True)
|
||||
|
||||
def test_nbody_java(self): # tests xmlvm compiled java, including bitcasts of doubles, i64 math, etc.
|
||||
def test_java_nbody(self): # tests xmlvm compiled java, including bitcasts of doubles, i64 math, etc.
|
||||
args = [path_from_root('tests', 'nbody-java', x) for x in os.listdir(path_from_root('tests', 'nbody-java')) if x.endswith('.c')] + \
|
||||
['-I' + path_from_root('tests', 'nbody-java')]
|
||||
self.do_benchmark('nbody_java', '', '''Time(s)''',
|
||||
force_c=True, emcc_args=args + ['-s', 'PRECISE_I64_MATH=1', '--llvm-lto', '0'], native_args=args + ['-lgc', '-std=c99', '-target', 'x86_64-pc-linux-gnu', '-lm'])
|
||||
|
||||
def test_lua(self):
|
||||
shutil.copyfile(path_from_root('tests', 'lua', 'scimark.lua'), 'scimark.lua')
|
||||
def lua(self, benchmark, expected, output_parser=None, args_processor=None):
|
||||
shutil.copyfile(path_from_root('tests', 'lua', benchmark), benchmark)
|
||||
emcc_args = self.get_library('lua', [os.path.join('src', 'lua'), os.path.join('src', 'liblua.a')], make=['make', 'generic'], configure=None) + \
|
||||
['--embed-file', 'scimark.lua']
|
||||
['--embed-file', benchmark]
|
||||
shutil.copyfile(emcc_args[0], emcc_args[0] + '.bc')
|
||||
emcc_args[0] += '.bc'
|
||||
native_args = self.get_library('lua_native', [os.path.join('src', 'lua'), os.path.join('src', 'liblua.a')], make=['make', 'generic'], configure=None, native=True)
|
||||
|
||||
def parser(output):
|
||||
self.do_benchmark('lua', '', expected,
|
||||
force_c=True, args=[benchmark], emcc_args=emcc_args, native_args=native_args, native_exec=os.path.join('building', 'lua_native', 'src', 'lua'),
|
||||
output_parser=output_parser, args_processor=args_processor)
|
||||
|
||||
def test_lua_scimark(self):
|
||||
def output_parser(output):
|
||||
return 1.0/float(re.search('\nSciMark +([\d\.]+) ', output).group(1))
|
||||
|
||||
self.do_benchmark('lua', '', '''[small problem sizes]''',
|
||||
force_c=True, args=['scimark.lua'], emcc_args=emcc_args, native_args=native_args, native_exec=os.path.join('building', 'lua_native', 'src', 'lua'),
|
||||
output_parser=parser)
|
||||
self.lua('scimark.lua', '[small problem sizes]', output_parser=output_parser)
|
||||
|
||||
def test_lua_binarytrees(self):
|
||||
def args_processor(args):
|
||||
arg = int(DEFAULT_ARG)
|
||||
if arg == 0:
|
||||
return args + ['0']
|
||||
elif arg == 1:
|
||||
return args + ['9.5']
|
||||
elif arg == 2:
|
||||
return args + ['11.99']
|
||||
elif arg == 3:
|
||||
return args + ['12.85']
|
||||
elif arg == 4:
|
||||
return args + ['14.72']
|
||||
elif arg == 5:
|
||||
return args + ['15.82']
|
||||
self.lua('binarytrees.lua', 'long lived tree of depth', args_processor=args_processor)
|
||||
|
||||
def test_zlib(self):
|
||||
src = open(path_from_root('tests', 'zlib', 'benchmark.c'), 'r').read()
|
||||
|
|
Загрузка…
Ссылка в новой задаче