2017-08-25 22:05:18 +03:00
|
|
|
#!/usr/bin/env python
|
2019-07-16 20:46:06 +03:00
|
|
|
|
|
|
|
from __future__ import absolute_import, print_function
|
|
|
|
|
2015-07-01 01:34:09 +03:00
|
|
|
from argparse import ArgumentParser
|
|
|
|
from collections import defaultdict
|
2018-12-26 22:35:30 +03:00
|
|
|
import datetime
|
2015-07-01 01:34:09 +03:00
|
|
|
import json
|
|
|
|
import os
|
|
|
|
import sys
|
2018-12-26 22:35:30 +03:00
|
|
|
import time
|
2015-07-01 01:34:09 +03:00
|
|
|
|
|
|
|
import requests
|
|
|
|
|
|
|
|
here = os.path.abspath(os.path.dirname(__file__))
|
|
|
|
|
2018-12-26 22:35:30 +03:00
|
|
|
ACTIVE_DATA_URL = "https://activedata.allizom.org/query"
|
2015-08-28 00:37:26 +03:00
|
|
|
PERCENTILE = 0.5 # ignore the bottom PERCENTILE*100% of numbers
|
2015-07-01 01:34:09 +03:00
|
|
|
|
2016-12-19 20:09:10 +03:00
|
|
|
def query_activedata(suite, e10s, platforms=None):
|
2015-07-01 01:34:09 +03:00
|
|
|
platforms = ', "build.platform":%s' % json.dumps(platforms) if platforms else ''
|
|
|
|
|
2018-12-26 22:35:30 +03:00
|
|
|
last_week = datetime.datetime.now() - datetime.timedelta(days=7)
|
|
|
|
last_week_timestamp = time.mktime(last_week.timetuple())
|
|
|
|
|
2016-12-19 20:09:10 +03:00
|
|
|
e10s_clause = '"eq":{"run.type":"e10s"}'
|
|
|
|
if not e10s:
|
|
|
|
e10s_clause = '"not":{%s}' % e10s_clause
|
|
|
|
|
2015-07-01 01:34:09 +03:00
|
|
|
query = """
|
|
|
|
{
|
|
|
|
"from":"unittest",
|
|
|
|
"limit":200000,
|
2015-08-28 00:37:26 +03:00
|
|
|
"groupby":["result.test"],
|
2015-07-01 01:34:09 +03:00
|
|
|
"select":{"value":"result.duration","aggregate":"average"},
|
|
|
|
"where":{"and":[
|
2016-12-19 20:09:10 +03:00
|
|
|
{"eq":{"run.suite":"%s"%s}},
|
|
|
|
{%s},
|
2018-12-26 22:35:30 +03:00
|
|
|
{"gt":{"run.timestamp":%s}}
|
2015-07-01 01:34:09 +03:00
|
|
|
]}
|
|
|
|
}
|
2018-12-26 22:35:30 +03:00
|
|
|
""" % (suite, platforms, e10s_clause, last_week_timestamp)
|
2015-07-01 01:34:09 +03:00
|
|
|
|
|
|
|
response = requests.post(ACTIVE_DATA_URL,
|
|
|
|
data=query,
|
|
|
|
stream=True)
|
|
|
|
response.raise_for_status()
|
|
|
|
data = response.json()["data"]
|
|
|
|
return data
|
|
|
|
|
2015-07-13 21:02:53 +03:00
|
|
|
def write_runtimes(data, suite, indir=here, outdir=here):
|
2015-08-28 00:37:26 +03:00
|
|
|
data = dict(data)
|
|
|
|
|
|
|
|
outfilename = os.path.join(outdir, "%s.runtimes.json" % suite)
|
|
|
|
infilename = os.path.join(indir, "%s.runtimes.json" % suite)
|
|
|
|
if not os.path.exists(outdir):
|
|
|
|
os.makedirs(outdir)
|
|
|
|
|
|
|
|
# read in existing data, if any
|
|
|
|
indata = None
|
|
|
|
if os.path.exists(infilename):
|
|
|
|
with open(infilename, 'r') as f:
|
|
|
|
indata = json.loads(f.read()).get('runtimes')
|
|
|
|
|
|
|
|
# identify a threshold of durations, below which we ignore
|
|
|
|
runtimes = []
|
|
|
|
for result in data.itervalues():
|
|
|
|
duration = int(result * 1000) if result else 0
|
|
|
|
if duration:
|
|
|
|
runtimes.append(duration)
|
|
|
|
runtimes.sort()
|
|
|
|
threshold = runtimes[int(len(runtimes) * PERCENTILE)]
|
|
|
|
|
2016-04-14 23:01:14 +03:00
|
|
|
# split the durations into two groups; omitted and specified
|
|
|
|
omitted = []
|
2015-08-28 00:37:26 +03:00
|
|
|
specified = indata if indata else {}
|
|
|
|
current_tests = []
|
|
|
|
for test, duration in data.iteritems():
|
|
|
|
current_tests.append(test)
|
|
|
|
duration = int(duration * 1000) if duration else 0
|
|
|
|
if duration > 0 and duration < threshold:
|
2016-04-14 23:01:14 +03:00
|
|
|
omitted.append(duration)
|
2015-08-28 00:37:26 +03:00
|
|
|
if test in specified:
|
2015-07-13 21:02:53 +03:00
|
|
|
del specified[test]
|
2015-08-28 00:37:26 +03:00
|
|
|
elif duration >= threshold and test != "automation.py":
|
|
|
|
original = specified.get(test, 0)
|
|
|
|
if not original or abs(original - duration) > (original/20):
|
|
|
|
# only write new data if it's > 20% different than original
|
|
|
|
specified[test] = duration
|
|
|
|
|
|
|
|
# delete any test references no longer needed
|
|
|
|
to_delete = []
|
|
|
|
for test in specified:
|
|
|
|
if test not in current_tests:
|
|
|
|
to_delete.append(test)
|
|
|
|
for test in to_delete:
|
|
|
|
del specified[test]
|
|
|
|
|
2016-04-14 23:01:14 +03:00
|
|
|
avg = int(sum(omitted)/len(omitted))
|
2015-08-28 00:37:26 +03:00
|
|
|
|
|
|
|
results = {'excluded_test_average': avg,
|
|
|
|
'runtimes': specified}
|
|
|
|
|
|
|
|
with open(outfilename, 'w') as f:
|
|
|
|
f.write(json.dumps(results, indent=2, sort_keys=True))
|
2015-07-01 01:34:09 +03:00
|
|
|
|
|
|
|
|
|
|
|
def cli(args=sys.argv[1:]):
|
|
|
|
parser = ArgumentParser()
|
|
|
|
parser.add_argument('-o', '--output-directory', dest='outdir',
|
|
|
|
default=here, help="Directory to save runtime data.")
|
|
|
|
|
2015-07-13 21:02:53 +03:00
|
|
|
parser.add_argument('-i', '--input-directory', dest='indir',
|
|
|
|
default=here, help="Directory from which to read current runtime data.")
|
|
|
|
|
2015-07-01 01:34:09 +03:00
|
|
|
parser.add_argument('-p', '--platforms', default=None,
|
|
|
|
help="Comma separated list of platforms from which to generate data.")
|
|
|
|
|
|
|
|
parser.add_argument('-s', '--suite', dest='suite', default=None,
|
|
|
|
help="Suite for which to generate data.")
|
|
|
|
|
2016-12-19 20:09:10 +03:00
|
|
|
parser.add_argument('--disable-e10s', dest='e10s', default=True,
|
|
|
|
action='store_false', help="Generate runtimes for non-e10s tests.")
|
|
|
|
|
2015-07-01 01:34:09 +03:00
|
|
|
args = parser.parse_args(args)
|
|
|
|
|
|
|
|
if not args.suite:
|
2016-04-14 23:01:14 +03:00
|
|
|
raise ValueError("Must specify suite with the -s argument")
|
2015-07-01 01:34:09 +03:00
|
|
|
if ',' in args.suite:
|
|
|
|
raise ValueError("Passing multiple suites is not supported")
|
|
|
|
|
|
|
|
if args.platforms:
|
|
|
|
args.platforms = args.platforms.split(',')
|
|
|
|
|
2016-12-19 20:09:10 +03:00
|
|
|
data = query_activedata(args.suite, args.e10s, args.platforms)
|
|
|
|
|
|
|
|
suite = args.suite
|
|
|
|
if args.e10s:
|
|
|
|
suite = '%s-e10s' % suite
|
2018-12-26 22:35:30 +03:00
|
|
|
|
|
|
|
if not data:
|
|
|
|
print("Not creating runtimes file as no data was found")
|
|
|
|
else:
|
|
|
|
write_runtimes(data, suite, indir=args.indir, outdir=args.outdir)
|
2015-07-01 01:34:09 +03:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
sys.exit(cli())
|