2013-04-12 02:56:55 +04:00
|
|
|
import json
|
|
|
|
import os
|
2015-08-10 01:02:37 +03:00
|
|
|
|
2013-11-14 05:24:24 +04:00
|
|
|
from django.conf import settings
|
2013-04-12 02:56:55 +04:00
|
|
|
|
2013-08-20 00:45:44 +04:00
|
|
|
|
2013-04-12 02:56:55 +04:00
|
|
|
class SampleData(object):
|
|
|
|
|
2014-05-31 01:49:41 +04:00
|
|
|
@classmethod
|
2015-06-15 23:34:37 +03:00
|
|
|
def get_perf_data(cls, filename):
|
|
|
|
with open("{0}/sample_data/artifacts/performance/{1}".format(
|
|
|
|
os.path.dirname(__file__), filename)) as f:
|
2015-03-03 04:59:49 +03:00
|
|
|
return json.loads(f.read())
|
2014-05-31 01:49:41 +04:00
|
|
|
|
2015-06-15 23:34:37 +03:00
|
|
|
@classmethod
|
|
|
|
def get_talos_perf_data(cls):
|
|
|
|
return cls.get_perf_data('talos_perf.json')
|
|
|
|
|
2015-09-23 21:48:24 +03:00
|
|
|
@classmethod
|
|
|
|
def get_minimal_talos_perf_data(cls):
|
|
|
|
return cls.get_perf_data('talos_perf_minimal.json')
|
|
|
|
|
2013-04-12 02:56:55 +04:00
|
|
|
def __init__(self):
|
|
|
|
self.job_data_file = "{0}/sample_data/job_data.txt".format(
|
|
|
|
os.path.dirname(__file__)
|
2013-08-20 00:45:44 +04:00
|
|
|
)
|
2013-04-12 02:56:55 +04:00
|
|
|
|
2013-11-14 05:24:24 +04:00
|
|
|
self.resultset_data_file = "{0}/sample_data/resultset_data.json".format(
|
|
|
|
os.path.dirname(__file__)
|
|
|
|
)
|
|
|
|
|
2013-06-05 02:51:52 +04:00
|
|
|
self.logs_dir = "{0}/sample_data/logs".format(
|
|
|
|
os.path.dirname(__file__)
|
2013-08-20 00:45:44 +04:00
|
|
|
)
|
2015-09-25 23:31:16 +03:00
|
|
|
|
2015-10-27 00:38:19 +03:00
|
|
|
self.talos_logs_dir = "{0}/sample_data/artifacts/performance/talos_logs".format(
|
2014-10-14 03:45:10 +04:00
|
|
|
os.path.dirname(__file__)
|
|
|
|
)
|
2013-08-20 00:45:44 +04:00
|
|
|
|
|
|
|
with open("{0}/sample_data/artifacts/structured_log_artifact.json".format(
|
|
|
|
os.path.dirname(__file__))) as f:
|
|
|
|
self.structured_log_artifact = f.readlines()
|
|
|
|
|
|
|
|
with open("{0}/sample_data/artifacts/job_artifact.json".format(
|
|
|
|
os.path.dirname(__file__))) as f:
|
|
|
|
self.job_artifact = f.readlines()
|
2013-04-12 02:56:55 +04:00
|
|
|
|
2015-05-05 03:36:08 +03:00
|
|
|
with open("{0}/sample_data/artifacts/text_log_summary.json".format(
|
|
|
|
os.path.dirname(__file__))) as f:
|
|
|
|
self.text_log_summary = json.load(f)
|
|
|
|
|
2015-09-25 23:31:16 +03:00
|
|
|
with open("{0}/sample_data/pulse_consumer/job_data.json".format(
|
|
|
|
os.path.dirname(__file__))) as f:
|
|
|
|
self.pulse_jobs = json.load(f)
|
|
|
|
|
2013-04-12 02:56:55 +04:00
|
|
|
self.job_data = []
|
2013-11-14 05:24:24 +04:00
|
|
|
self.resultset_data = []
|
2013-04-12 02:56:55 +04:00
|
|
|
|
|
|
|
self.initialize_data()
|
|
|
|
|
|
|
|
def initialize_data(self):
|
|
|
|
with open(self.job_data_file) as f:
|
|
|
|
for line in f.readlines():
|
2013-08-20 00:45:44 +04:00
|
|
|
self.job_data.append(json.loads(line.strip()))
|
2013-04-12 02:56:55 +04:00
|
|
|
|
2013-11-14 05:24:24 +04:00
|
|
|
with open(self.resultset_data_file) as f:
|
|
|
|
self.resultset_data = json.loads(f.read())
|
2013-11-14 09:31:59 +04:00
|
|
|
|
|
|
|
# ensure that the repository values for all the revisions have the
|
|
|
|
# same name as the db test name in settings. If this is not
|
|
|
|
# the same, the tests will not pass.
|
2013-11-14 05:24:24 +04:00
|
|
|
for rs in self.resultset_data:
|
|
|
|
for rev in rs["revisions"]:
|
2015-09-16 15:00:34 +03:00
|
|
|
rev["repository"] = settings.TREEHERDER_TEST_PROJECT
|
2013-11-14 05:24:24 +04:00
|
|
|
|
2013-06-05 02:51:52 +04:00
|
|
|
def get_log_path(self, name):
|
|
|
|
"""Returns the full path to a log file"""
|
2014-01-31 23:35:46 +04:00
|
|
|
return "{0}/{1}".format(self.logs_dir, name)
|
|
|
|
|
2015-10-27 00:38:19 +03:00
|
|
|
def get_talos_logs(self):
|
|
|
|
"""Returns a list of full paths to talos log files"""
|
|
|
|
files = os.listdir(self.talos_logs_dir)
|
2014-10-14 03:45:10 +04:00
|
|
|
for i, f in enumerate(files):
|
2015-10-27 00:38:19 +03:00
|
|
|
files[i] = 'file://{0}/{1}'.format(self.talos_logs_dir, f)
|
2014-10-14 03:45:10 +04:00
|
|
|
return files
|