2013-06-07 04:38:46 +04:00
|
|
|
import json
|
2015-08-10 01:02:37 +03:00
|
|
|
|
|
|
|
from tests.sampledata import SampleData
|
2015-09-30 14:01:38 +03:00
|
|
|
from treeherder.client import TreeherderClient
|
2016-04-02 17:53:34 +03:00
|
|
|
from treeherder.model import models
|
2014-02-04 05:13:13 +04:00
|
|
|
|
|
|
|
|
2015-10-14 16:53:35 +03:00
|
|
|
def post_collection(project, th_collection):
|
2014-02-04 05:13:13 +04:00
|
|
|
|
2016-06-22 14:06:18 +03:00
|
|
|
client = TreeherderClient(server_url='http://localhost')
|
2015-09-30 14:01:38 +03:00
|
|
|
return client.post_collection(project, th_collection)
|
2013-06-07 04:38:46 +04:00
|
|
|
|
2015-02-15 17:52:31 +03:00
|
|
|
|
2016-04-02 17:53:34 +03:00
|
|
|
def do_job_ingestion(jm, job_data, sample_resultset, verify_data=True):
|
2013-06-07 04:38:46 +04:00
|
|
|
"""
|
|
|
|
Ingest ``job_data`` which will be JSON job blobs.
|
|
|
|
|
|
|
|
``verify_data`` - whether or not to run the ingested jobs
|
|
|
|
through the verifier.
|
|
|
|
"""
|
2013-10-26 04:17:01 +04:00
|
|
|
jm.store_result_set_data(sample_resultset)
|
2013-09-04 18:52:20 +04:00
|
|
|
|
2013-11-05 00:40:46 +04:00
|
|
|
max_index = len(sample_resultset) - 1
|
|
|
|
resultset_index = 0
|
|
|
|
|
|
|
|
# Structures to test if we stored everything
|
|
|
|
build_platforms_ref = set()
|
|
|
|
machine_platforms_ref = set()
|
|
|
|
|
|
|
|
machines_ref = set()
|
|
|
|
options_ref = set()
|
|
|
|
job_types_ref = set()
|
|
|
|
products_ref = set()
|
|
|
|
result_sets_ref = set()
|
|
|
|
log_urls_ref = set()
|
2013-12-12 06:39:58 +04:00
|
|
|
coalesced_job_guids = {}
|
|
|
|
coalesced_replacements = []
|
2013-11-05 00:40:46 +04:00
|
|
|
artifacts_ref = {}
|
|
|
|
|
2013-11-06 05:45:06 +04:00
|
|
|
blobs = []
|
2013-11-05 00:40:46 +04:00
|
|
|
for index, blob in enumerate(job_data):
|
|
|
|
|
|
|
|
if resultset_index > max_index:
|
|
|
|
resultset_index = 0
|
|
|
|
|
|
|
|
# Modify job structure to sync with the resultset sample data
|
|
|
|
if 'sources' in blob:
|
|
|
|
del blob['sources']
|
|
|
|
|
2016-03-03 02:02:22 +03:00
|
|
|
blob['revision'] = sample_resultset[resultset_index]['revision']
|
2013-11-05 00:40:46 +04:00
|
|
|
|
2013-11-06 05:45:06 +04:00
|
|
|
blobs.append(blob)
|
2013-06-07 04:38:46 +04:00
|
|
|
|
2013-11-05 00:40:46 +04:00
|
|
|
resultset_index += 1
|
|
|
|
|
|
|
|
# Build data structures to confirm everything is stored
|
|
|
|
# as expected
|
2013-06-07 04:38:46 +04:00
|
|
|
if verify_data:
|
|
|
|
|
2013-11-06 05:45:06 +04:00
|
|
|
job_guid = blob['job']['job_guid']
|
|
|
|
|
2013-11-05 00:40:46 +04:00
|
|
|
job = blob['job']
|
|
|
|
|
|
|
|
build_platforms_ref.add(
|
2016-03-11 01:40:36 +03:00
|
|
|
"-".join([
|
|
|
|
job.get('build_platform', {}).get('os_name', 'unknown'),
|
|
|
|
job.get('build_platform', {}).get('platform', 'unknown'),
|
2013-11-05 00:40:46 +04:00
|
|
|
job.get('build_platform', {}).get('architecture', 'unknown')
|
2016-03-11 01:40:36 +03:00
|
|
|
]))
|
2013-11-05 00:40:46 +04:00
|
|
|
|
|
|
|
machine_platforms_ref.add(
|
2016-03-11 01:40:36 +03:00
|
|
|
"-".join([
|
2016-06-22 20:08:51 +03:00
|
|
|
job.get('machine_platform', {}).get('os_name', 'unknown'),
|
|
|
|
job.get('machine_platform', {}).get('platform', 'unknown'),
|
2013-11-05 00:40:46 +04:00
|
|
|
job.get('machine_platform', {}).get('architecture', 'unknown')
|
2016-03-11 01:40:36 +03:00
|
|
|
]))
|
2013-11-05 00:40:46 +04:00
|
|
|
|
|
|
|
machines_ref.add(job.get('machine', 'unknown'))
|
|
|
|
|
2015-02-15 17:52:31 +03:00
|
|
|
options_ref = options_ref.union(job.get('option_collection', []).keys())
|
2013-11-05 00:40:46 +04:00
|
|
|
|
|
|
|
job_types_ref.add(job.get('name', 'unknown'))
|
|
|
|
products_ref.add(job.get('product_name', 'unknown'))
|
2016-03-03 02:02:22 +03:00
|
|
|
result_sets_ref.add(blob['revision'])
|
2013-11-05 00:40:46 +04:00
|
|
|
|
|
|
|
log_url_list = job.get('log_references', [])
|
|
|
|
for log_data in log_url_list:
|
2015-02-15 17:52:31 +03:00
|
|
|
log_urls_ref.add(log_data['url'])
|
2013-11-05 00:40:46 +04:00
|
|
|
|
|
|
|
artifact_name = job.get('artifact', {}).get('name')
|
|
|
|
if artifact_name:
|
|
|
|
artifacts_ref[artifact_name] = job.get('artifact')
|
2013-11-06 05:45:06 +04:00
|
|
|
|
2013-12-12 06:39:58 +04:00
|
|
|
coalesced = blob.get('coalesced', [])
|
|
|
|
if coalesced:
|
|
|
|
coalesced_job_guids[job_guid] = coalesced
|
|
|
|
coalesced_replacements.append('%s')
|
|
|
|
|
2015-02-15 17:52:31 +03:00
|
|
|
# Store the modified json blobs
|
2013-11-06 05:45:06 +04:00
|
|
|
jm.store_job_data(blobs)
|
2013-11-05 00:40:46 +04:00
|
|
|
|
|
|
|
if verify_data:
|
|
|
|
# Confirms stored data matches whats in the reference data structs
|
2016-04-02 17:53:34 +03:00
|
|
|
verify_build_platforms(build_platforms_ref)
|
|
|
|
verify_machine_platforms(machine_platforms_ref)
|
|
|
|
verify_machines(machines_ref)
|
|
|
|
verify_options(options_ref)
|
|
|
|
verify_job_types(job_types_ref)
|
|
|
|
verify_products(products_ref)
|
2013-11-05 00:40:46 +04:00
|
|
|
verify_result_sets(jm, result_sets_ref)
|
|
|
|
verify_log_urls(jm, log_urls_ref)
|
|
|
|
verify_artifacts(jm, artifacts_ref)
|
2013-12-12 06:39:58 +04:00
|
|
|
verify_coalesced(jm, coalesced_job_guids, coalesced_replacements)
|
2013-11-05 00:40:46 +04:00
|
|
|
|
2015-02-15 17:52:31 +03:00
|
|
|
|
2016-04-02 17:53:34 +03:00
|
|
|
def verify_build_platforms(build_platforms_ref):
|
2013-11-05 00:40:46 +04:00
|
|
|
|
|
|
|
build_platforms_set = set()
|
2016-04-02 17:53:34 +03:00
|
|
|
for build_platform in models.BuildPlatform.objects.all():
|
2013-11-05 00:40:46 +04:00
|
|
|
build_platforms_set.add(
|
2016-03-11 01:40:36 +03:00
|
|
|
"-".join([
|
2016-04-02 17:53:34 +03:00
|
|
|
build_platform.os_name,
|
|
|
|
build_platform.platform,
|
|
|
|
build_platform.architecture
|
2016-03-11 01:40:36 +03:00
|
|
|
]))
|
2013-11-05 00:40:46 +04:00
|
|
|
|
|
|
|
assert build_platforms_ref.issubset(build_platforms_set)
|
|
|
|
|
2015-02-15 17:52:31 +03:00
|
|
|
|
2016-04-02 17:53:34 +03:00
|
|
|
def verify_machine_platforms(machine_platforms_ref):
|
2013-11-05 00:40:46 +04:00
|
|
|
|
|
|
|
machine_platforms_set = set()
|
2016-04-02 17:53:34 +03:00
|
|
|
for machine_platform in models.MachinePlatform.objects.all():
|
2013-11-05 00:40:46 +04:00
|
|
|
machine_platforms_set.add(
|
2016-03-11 01:40:36 +03:00
|
|
|
"-".join([
|
2016-04-02 17:53:34 +03:00
|
|
|
machine_platform.os_name,
|
|
|
|
machine_platform.platform,
|
|
|
|
machine_platform.architecture
|
2016-03-11 01:40:36 +03:00
|
|
|
]))
|
2013-11-05 00:40:46 +04:00
|
|
|
|
|
|
|
assert machine_platforms_ref.issubset(machine_platforms_set)
|
|
|
|
|
2015-02-15 17:52:31 +03:00
|
|
|
|
2016-04-02 17:53:34 +03:00
|
|
|
def verify_machines(machines_ref):
|
2013-11-05 00:40:46 +04:00
|
|
|
|
2016-04-02 17:53:34 +03:00
|
|
|
machines = models.Machine.objects.all().values_list('name', flat=True)
|
2013-11-05 00:40:46 +04:00
|
|
|
assert machines_ref.issubset(machines)
|
|
|
|
|
2015-02-15 17:52:31 +03:00
|
|
|
|
2016-04-02 17:53:34 +03:00
|
|
|
def verify_options(options_ref):
|
2013-11-05 00:40:46 +04:00
|
|
|
|
2016-04-02 17:53:34 +03:00
|
|
|
options = models.Option.objects.all().values_list('name', flat=True)
|
2013-11-05 00:40:46 +04:00
|
|
|
|
|
|
|
assert options_ref.issubset(options)
|
|
|
|
|
2015-02-15 17:52:31 +03:00
|
|
|
|
2016-04-02 17:53:34 +03:00
|
|
|
def verify_job_types(job_types_ref):
|
2013-11-05 00:40:46 +04:00
|
|
|
|
2016-04-02 17:53:34 +03:00
|
|
|
job_types = models.JobType.objects.all().values_list('name', flat=True)
|
2013-11-05 00:40:46 +04:00
|
|
|
assert job_types_ref.issubset(job_types)
|
|
|
|
|
2015-02-15 17:52:31 +03:00
|
|
|
|
2016-04-02 17:53:34 +03:00
|
|
|
def verify_products(products_ref):
|
2013-11-05 00:40:46 +04:00
|
|
|
|
2016-04-02 17:53:34 +03:00
|
|
|
products = models.Product.objects.all().values_list('name', flat=True)
|
2013-11-05 00:40:46 +04:00
|
|
|
|
|
|
|
assert products_ref.issubset(products)
|
|
|
|
|
2015-02-15 17:52:31 +03:00
|
|
|
|
2013-11-05 00:40:46 +04:00
|
|
|
def verify_result_sets(jm, result_sets_ref):
|
|
|
|
|
2016-03-03 02:02:22 +03:00
|
|
|
revisions = jm.get_dhub().execute(
|
|
|
|
proc='jobs.selects.get_all_result_set_revisions',
|
|
|
|
key_column='long_revision',
|
2013-11-05 00:40:46 +04:00
|
|
|
return_type='set'
|
2015-02-15 17:52:31 +03:00
|
|
|
)
|
2013-11-05 00:40:46 +04:00
|
|
|
|
2016-03-03 02:02:22 +03:00
|
|
|
assert result_sets_ref.issubset(revisions)
|
2013-11-05 00:40:46 +04:00
|
|
|
|
2015-02-15 17:52:31 +03:00
|
|
|
|
2013-11-05 00:40:46 +04:00
|
|
|
def verify_log_urls(jm, log_urls_ref):
|
|
|
|
|
2016-05-25 20:58:44 +03:00
|
|
|
log_urls = set(models.JobLog.objects.values_list('url', flat=True))
|
2013-11-05 00:40:46 +04:00
|
|
|
|
|
|
|
assert log_urls_ref.issubset(log_urls)
|
|
|
|
|
2015-02-15 17:52:31 +03:00
|
|
|
|
2013-11-05 00:40:46 +04:00
|
|
|
def verify_artifacts(jm, artifacts_ref):
|
|
|
|
|
2015-06-18 22:50:50 +03:00
|
|
|
artifacts = jm.get_dhub().execute(
|
2013-11-05 00:40:46 +04:00
|
|
|
proc='jobs.selects.get_all_artifacts',
|
|
|
|
key_column='name',
|
|
|
|
return_type='dict'
|
2015-02-15 17:52:31 +03:00
|
|
|
)
|
2013-11-05 00:40:46 +04:00
|
|
|
|
2013-11-06 05:45:06 +04:00
|
|
|
for key in artifacts.keys():
|
|
|
|
assert artifacts[key]['name'] == artifacts_ref[key]['name']
|
|
|
|
assert artifacts[key]['type'] == artifacts_ref[key]['type']
|
|
|
|
assert json.loads(artifacts[key]['blob']) == artifacts_ref[key]['blob']
|
2013-11-05 00:40:46 +04:00
|
|
|
|
2014-06-03 00:48:58 +04:00
|
|
|
|
2013-12-12 06:39:58 +04:00
|
|
|
def verify_coalesced(jm, coalesced_job_guids, coalesced_replacements):
|
|
|
|
|
|
|
|
coalesced_job_guid_list = coalesced_job_guids.keys()
|
|
|
|
|
|
|
|
if coalesced_job_guid_list:
|
|
|
|
|
|
|
|
rep_str = ','.join(coalesced_replacements)
|
2015-06-18 22:50:50 +03:00
|
|
|
data = jm.get_dhub().execute(
|
2013-12-12 06:39:58 +04:00
|
|
|
proc='jobs.selects.get_jobs_by_coalesced_guids',
|
|
|
|
replace=[rep_str],
|
|
|
|
placeholders=coalesced_job_guid_list
|
2015-02-15 17:52:31 +03:00
|
|
|
)
|
2013-12-12 06:39:58 +04:00
|
|
|
|
|
|
|
coalesced_job_guids_stored = {}
|
|
|
|
for datum in data:
|
|
|
|
if datum['job_coalesced_to_guid'] not in coalesced_job_guids_stored:
|
2015-02-15 17:52:31 +03:00
|
|
|
coalesced_job_guids_stored[datum['job_coalesced_to_guid']] = []
|
|
|
|
coalesced_job_guids_stored[datum['job_coalesced_to_guid']].append(
|
2013-12-12 06:39:58 +04:00
|
|
|
datum['job_guid']
|
2015-02-15 17:52:31 +03:00
|
|
|
)
|
2013-12-12 06:39:58 +04:00
|
|
|
|
|
|
|
assert coalesced_job_guids_stored == coalesced_job_guids
|
2013-06-07 04:38:46 +04:00
|
|
|
|
2015-02-15 17:52:31 +03:00
|
|
|
|
2013-06-07 04:38:46 +04:00
|
|
|
def load_exp(filename):
|
2013-06-08 02:56:31 +04:00
|
|
|
"""
|
|
|
|
Load in an expected result json and return as an obj.
|
|
|
|
|
|
|
|
If the file doesn't exist, it will be created, but the test will
|
|
|
|
fail, due to no content. This is to make it easier during test
|
|
|
|
development.
|
|
|
|
"""
|
2013-06-12 20:42:13 +04:00
|
|
|
path = SampleData().get_log_path(filename)
|
2015-03-03 04:59:49 +03:00
|
|
|
with open(path, "a+") as f:
|
|
|
|
try:
|
2016-05-27 18:19:59 +03:00
|
|
|
return json.load(f)
|
2015-03-03 04:59:49 +03:00
|
|
|
except ValueError:
|
|
|
|
# if it's not parse-able, return an empty dict
|
|
|
|
return {}
|