Properly start and exit the proxy server before running the benchmarks;
This commit is contained in:
Родитель
044a9924de
Коммит
dcdf2ba2e3
|
@ -1,15 +1,14 @@
|
|||
import benchmarks_local
|
||||
import benchmarks_remote
|
||||
import benchmarks_shell
|
||||
|
||||
def getBenchmark(benchmark):
|
||||
section, name = benchmark.split(".")
|
||||
if section == "local":
|
||||
import benchmarks_local
|
||||
return benchmarks_local.getBenchmark(name)
|
||||
elif section == "remote":
|
||||
import benchmarks_remote
|
||||
return benchmarks_remote.getBenchmark(name)
|
||||
elif section == "shell":
|
||||
import benchmarks_shell
|
||||
return benchmarks_shell.getBenchmark(name)
|
||||
else:
|
||||
raise Exception("Unknown benchmark type")
|
||||
|
||||
|
|
|
@ -125,10 +125,3 @@ def getBenchmark(name):
|
|||
if name == "unity-webgl":
|
||||
return UnityWebGL()
|
||||
raise Exception("Unknown benchmark")
|
||||
|
||||
# Test if server is running and start server if needed.
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
result = s.connect_ex(("localhost", 8000))
|
||||
s.close()
|
||||
if result > 0:
|
||||
subprocess.Popen(["python", "server.py"])
|
||||
|
|
|
@ -463,10 +463,3 @@ def getBenchmark(name):
|
|||
if name == b.name():
|
||||
return b()
|
||||
raise Exception("Unknown benchmark")
|
||||
|
||||
# Test if server is running and start server if needed.
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
result = s.connect_ex(("localhost", 8000))
|
||||
s.close()
|
||||
if result > 0:
|
||||
subprocess.Popen(["python", "server.py"])
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#!/usr/bin/env python2
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
|
@ -101,31 +102,46 @@ for engine_path in options.engines:
|
|||
print('Exception: ' + repr(e))
|
||||
traceback.print_exc(file=sys.stdout)
|
||||
|
||||
# Run every benchmark for every build and config
|
||||
benchmarks = [benchmarks.getBenchmark(i) for i in options.benchmarks]
|
||||
for benchmark in benchmarks:
|
||||
for engine_path in engines:
|
||||
info = engineInfo.getInfo(engine_path)
|
||||
executor = executors.getExecutor(info)
|
||||
class AutoSpawnServer:
|
||||
def __init__(self):
|
||||
self.server = None
|
||||
|
||||
for config_name in options.configs:
|
||||
config = configs.getConfig(config_name, info)
|
||||
if config.omit():
|
||||
continue
|
||||
def __enter__(self):
|
||||
print("Starting proxy server.")
|
||||
self.server = subprocess.Popen(['python', 'server.py'])
|
||||
|
||||
try:
|
||||
results = executor.run(benchmark, config)
|
||||
if not results:
|
||||
def __exit__(self, type, value, traceback):
|
||||
print("Terminating proxy server.")
|
||||
if self.server:
|
||||
self.server.terminate()
|
||||
self.server = None
|
||||
|
||||
with AutoSpawnServer():
|
||||
# Run every benchmark for every build and config
|
||||
benchmarks = [benchmarks.getBenchmark(i) for i in options.benchmarks]
|
||||
for benchmark in benchmarks:
|
||||
for engine_path in engines:
|
||||
info = engineInfo.getInfo(engine_path)
|
||||
executor = executors.getExecutor(info)
|
||||
|
||||
for config_name in options.configs:
|
||||
config = configs.getConfig(config_name, info)
|
||||
if config.omit():
|
||||
continue
|
||||
except Exception as e:
|
||||
print('Failed to run ' + engine_path + ' - ' + benchmark.version + ' - ' + config_name + '!')
|
||||
print('Exception: ' + repr(e))
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
continue
|
||||
|
||||
mode = submitter.mode(info["engine_type"], config_name)
|
||||
submitter.addTests(results, benchmark.suite, benchmark.version, mode)
|
||||
try:
|
||||
results = executor.run(benchmark, config)
|
||||
if not results:
|
||||
continue
|
||||
except Exception as e:
|
||||
print('Failed to run ' + engine_path + ' - ' + benchmark.version + ' - ' + config_name + '!')
|
||||
print('Exception: ' + repr(e))
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
continue
|
||||
|
||||
if not options.session:
|
||||
submitter.finish()
|
||||
mode = submitter.mode(info["engine_type"], config_name)
|
||||
submitter.addTests(results, benchmark.suite, benchmark.version, mode)
|
||||
|
||||
if not options.session:
|
||||
submitter.finish()
|
||||
|
|
|
@ -56,7 +56,7 @@ class FakeHandler(SimpleHTTPRequestHandler):
|
|||
if self.path.startswith("/submit"):
|
||||
return self.captureResults(query)
|
||||
else:
|
||||
return self.retrieveOffline();
|
||||
return self.retrieveOffline()
|
||||
|
||||
def retrieveOffline(self):
|
||||
path = self.translate_path(self.path)
|
||||
|
@ -106,10 +106,16 @@ class FakeHandler(SimpleHTTPRequestHandler):
|
|||
|
||||
def captureResults(self, query):
|
||||
queryParsed = urlparse.parse_qs(query)
|
||||
fp = open("slave/results", "w");
|
||||
fp.write(queryParsed["results"][0]);
|
||||
fp = open("slave/results", "w")
|
||||
fp.write(queryParsed["results"][0])
|
||||
fp.close()
|
||||
return False
|
||||
|
||||
content = "Results successfully captured!"
|
||||
self.send_response(200)
|
||||
self.send_header("Content-Length", len(content))
|
||||
self.end_headers()
|
||||
self.wfile.write(bytes(content))
|
||||
return True
|
||||
|
||||
def translatePath(self, old_host, old_path):
|
||||
global translates, benchmarks
|
||||
|
|
Загрузка…
Ссылка в новой задаче