зеркало из https://github.com/mozilla/treeherder.git
Bug 1175295 - Add support for submitting generic performance artifacts
This commit is contained in:
Родитель
77558b2e86
Коммит
d3bd244b65
|
@ -0,0 +1,76 @@
|
|||
{
|
||||
"definitions": {
|
||||
"framework_schema": {
|
||||
"properties": {
|
||||
"name": {
|
||||
"title": "Framework name",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"subtest_schema": {
|
||||
"properties": {
|
||||
"name": {
|
||||
"title": "Subtest name",
|
||||
"type": "string"
|
||||
},
|
||||
"value": {
|
||||
"description": "Summary value for subtest",
|
||||
"title": "Subtest value",
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"value"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"suite_schema": {
|
||||
"properties": {
|
||||
"name": {
|
||||
"title": "Suite name",
|
||||
"type": "string"
|
||||
},
|
||||
"subtests": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/subtest_schema"
|
||||
},
|
||||
"title": "Subtests",
|
||||
"type": "array"
|
||||
},
|
||||
"value": {
|
||||
"title": "Suite value",
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"subtests"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"description": "Structure for submitting performance data as part of a job",
|
||||
"id": "https://treeherder.mozilla.org/schemas/v1/performance-artifact.json#",
|
||||
"properties": {
|
||||
"framework": {
|
||||
"$ref": "#/definitions/framework_schema"
|
||||
},
|
||||
"suites": {
|
||||
"description": "List of suite-level data submitted as part of this structure",
|
||||
"items": {
|
||||
"$ref": "#/definitions/suite_schema"
|
||||
},
|
||||
"title": "Performance suites",
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"framework",
|
||||
"suites"
|
||||
],
|
||||
"title": "Perfherder Schema",
|
||||
"type": "object"
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
{
|
||||
"description": "Legacy schema for the submission of Talos data to Perfherder (DO NOT USE UNLESS YOU'RE TALOS)",
|
||||
"id": "https://treeherder.mozilla.org/schemas/v1/talos-artifact.json#",
|
||||
"properties": {
|
||||
"results": {
|
||||
"type": "object"
|
||||
},
|
||||
"test_aux": {
|
||||
"type": "object"
|
||||
},
|
||||
"test_build": {
|
||||
"type": "object"
|
||||
},
|
||||
"test_machine": {
|
||||
"type": "object"
|
||||
},
|
||||
"testrun": {
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"results",
|
||||
"test_build",
|
||||
"testrun",
|
||||
"test_machine"
|
||||
],
|
||||
"title": "Talos Schema",
|
||||
"type": "object"
|
||||
}
|
|
@ -2,6 +2,7 @@ from tests.sampledata import SampleData
|
|||
from tests.test_utils import post_collection
|
||||
from treeherder.client.thclient import client
|
||||
from treeherder.perf.models import (PerformanceDatum,
|
||||
PerformanceFramework,
|
||||
PerformanceSignature)
|
||||
|
||||
|
||||
|
@ -42,3 +43,52 @@ def test_post_talos_artifact(test_project, test_repository, result_set_stored,
|
|||
# talos (we have validation elsewhere for the actual data adapters)
|
||||
assert PerformanceSignature.objects.count() == 2
|
||||
assert PerformanceDatum.objects.count() == 2
|
||||
|
||||
|
||||
def test_post_perf_artifact(test_project, test_repository, result_set_stored,
|
||||
mock_post_json):
|
||||
test_repository.save()
|
||||
|
||||
PerformanceSignature.objects.all().delete()
|
||||
PerformanceDatum.objects.all().delete()
|
||||
PerformanceFramework.objects.all().delete()
|
||||
|
||||
PerformanceFramework.objects.get_or_create(name='cheezburger')
|
||||
|
||||
tjc = client.TreeherderJobCollection()
|
||||
job_guid = 'd22c74d4aa6d2a1dcba96d95dccbd5fdca70cf33'
|
||||
tj = client.TreeherderJob({
|
||||
'project': test_repository.name,
|
||||
'revision_hash': result_set_stored[0]['revision_hash'],
|
||||
'job': {
|
||||
'job_guid': job_guid,
|
||||
'state': 'completed',
|
||||
'project': test_repository.name,
|
||||
'option_collection': {'opt': True},
|
||||
'artifacts': [{
|
||||
'blob': {
|
||||
"framework": {"name": "cheezburger"},
|
||||
"suites": [{
|
||||
"name": "cheezburger metrics",
|
||||
"value": 10.0,
|
||||
"subtests": [
|
||||
{"name": "test1", "value": 20.0},
|
||||
{"name": "test2", "value": 30.0}
|
||||
]
|
||||
}]
|
||||
},
|
||||
'type': 'json',
|
||||
'name': 'performance_data',
|
||||
'job_guid': job_guid
|
||||
}]
|
||||
}
|
||||
})
|
||||
|
||||
tjc.add(tj)
|
||||
|
||||
post_collection(test_project, tjc)
|
||||
|
||||
# we'll just validate that we got the expected number of results for
|
||||
# talos (we have validation elsewhere for the actual data adapters)
|
||||
assert PerformanceSignature.objects.all().count() == 3
|
||||
assert PerformanceDatum.objects.all().count() == 3
|
||||
|
|
|
@ -1,23 +1,27 @@
|
|||
import copy
|
||||
import json
|
||||
|
||||
from django.test import TestCase
|
||||
|
||||
from tests.sampledata import SampleData
|
||||
from treeherder.etl.perf import load_perf_artifacts
|
||||
from treeherder.etl.perf import (load_perf_artifacts,
|
||||
load_talos_artifacts)
|
||||
from treeherder.model.models import (MachinePlatform,
|
||||
Option,
|
||||
OptionCollection,
|
||||
Repository,
|
||||
RepositoryGroup)
|
||||
from treeherder.perf.models import (PerformanceDatum,
|
||||
PerformanceFramework,
|
||||
PerformanceSignature)
|
||||
|
||||
|
||||
class TalosDataAdapterTest(TestCase):
|
||||
class PerfDataAdapterTest(TestCase):
|
||||
|
||||
OPTION_HASH = "my_option_hash"
|
||||
REPO_NAME = 'mozilla-central'
|
||||
MACHINE_PLATFORM = "win7"
|
||||
JOB_GUID = "oqiwy0q847365qiu"
|
||||
|
||||
def setUp(self):
|
||||
repo_group = RepositoryGroup.objects.create(name='mygroup')
|
||||
|
@ -32,10 +36,10 @@ class TalosDataAdapterTest(TestCase):
|
|||
}
|
||||
Repository.objects.create(**repo_args)
|
||||
option, _ = Option.objects.get_or_create(name='asan')
|
||||
OptionCollection.objects.get_or_create(
|
||||
option_collection_hash=self.OPTION_HASH,
|
||||
option=option)
|
||||
MachinePlatform.objects.get_or_create(
|
||||
self.option_collection, _ = OptionCollection.objects.get_or_create(
|
||||
option_collection_hash=self.OPTION_HASH,
|
||||
option=option)
|
||||
self.platform, _ = MachinePlatform.objects.get_or_create(
|
||||
os_name="win",
|
||||
platform=self.MACHINE_PLATFORM,
|
||||
architecture="x86",
|
||||
|
@ -43,7 +47,98 @@ class TalosDataAdapterTest(TestCase):
|
|||
'active_status': "active"
|
||||
})
|
||||
|
||||
def test_load(self):
|
||||
def _get_job_and_reference_data(self):
|
||||
job_data = {
|
||||
self.JOB_GUID: {
|
||||
"id": 1,
|
||||
"result_set_id": 1,
|
||||
"push_timestamp": 1402692388
|
||||
}
|
||||
}
|
||||
|
||||
reference_data = {
|
||||
"option_collection_hash": self.OPTION_HASH,
|
||||
"machine_platform": self.MACHINE_PLATFORM,
|
||||
"property1": "value1",
|
||||
"property2": "value2",
|
||||
"property3": "value3"
|
||||
}
|
||||
|
||||
return (job_data, reference_data)
|
||||
|
||||
def _verify_signature_datum(self, framework_name, suitename, testname,
|
||||
value):
|
||||
|
||||
signature = PerformanceSignature.objects.get(
|
||||
suite=suitename,
|
||||
test=testname)
|
||||
self.assertEqual(str(signature.framework), str(framework_name))
|
||||
self.assertEqual(str(signature.option_collection),
|
||||
str(self.option_collection))
|
||||
self.assertEqual(str(signature.platform),
|
||||
str(self.platform))
|
||||
|
||||
datum = PerformanceDatum.objects.get(signature=signature)
|
||||
self.assertEqual(datum.value, value)
|
||||
|
||||
def test_load_generic_data(self):
|
||||
framework_name = "cheezburger"
|
||||
|
||||
PerformanceDatum.objects.all().delete()
|
||||
PerformanceSignature.objects.all().delete()
|
||||
PerformanceFramework.objects.all().delete()
|
||||
PerformanceFramework.objects.get_or_create(name=framework_name)
|
||||
|
||||
(job_data, reference_data) = self._get_job_and_reference_data()
|
||||
datum = {
|
||||
"job_guid": self.JOB_GUID,
|
||||
"name": "test",
|
||||
"type": "test",
|
||||
"blob": {
|
||||
"framework": {"name": framework_name},
|
||||
"suites": [
|
||||
{
|
||||
"name": "cheezburger metrics",
|
||||
"value": 10.0,
|
||||
"subtests": [
|
||||
{
|
||||
"name": "test1",
|
||||
"value": 20.0
|
||||
},
|
||||
{
|
||||
"name": "test2",
|
||||
"value": 30.0
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
# the perf data adapter expects unserialized performance data
|
||||
submit_datum = copy.copy(datum)
|
||||
submit_datum['blob'] = json.dumps(submit_datum['blob'])
|
||||
|
||||
load_perf_artifacts(self.REPO_NAME, reference_data, job_data,
|
||||
submit_datum)
|
||||
self.assertEqual(3, PerformanceSignature.objects.all().count())
|
||||
self.assertEqual(1, PerformanceFramework.objects.all().count())
|
||||
framework = PerformanceFramework.objects.all()[0]
|
||||
self.assertEqual(framework_name, framework.name)
|
||||
|
||||
perf_datum = datum['blob']
|
||||
|
||||
# verify summary, then subtests
|
||||
self._verify_signature_datum(perf_datum['framework']['name'],
|
||||
perf_datum['suites'][0]['name'], '', 10.0)
|
||||
for subtest in perf_datum['suites'][0]['subtests']:
|
||||
self._verify_signature_datum(perf_datum['framework']['name'],
|
||||
perf_datum['suites'][0]['name'],
|
||||
subtest['name'], subtest['value'])
|
||||
|
||||
def test_load_talos_data(self):
|
||||
|
||||
PerformanceFramework.objects.get_or_create(name='talos')
|
||||
|
||||
talos_perf_data = SampleData.get_talos_perf_data()
|
||||
for talos_datum in talos_perf_data:
|
||||
|
@ -56,33 +151,19 @@ class TalosDataAdapterTest(TestCase):
|
|||
PerformanceSignature.objects.all().delete()
|
||||
PerformanceDatum.objects.all().delete()
|
||||
|
||||
(job_data, reference_data) = self._get_job_and_reference_data()
|
||||
|
||||
datum = {
|
||||
"job_guid": 'oqiwy0q847365qiu',
|
||||
"job_guid": self.JOB_GUID,
|
||||
"name": "test",
|
||||
"type": "test",
|
||||
"blob": talos_datum
|
||||
}
|
||||
|
||||
job_data = {
|
||||
"oqiwy0q847365qiu": {
|
||||
"id": 1,
|
||||
"result_set_id": 1,
|
||||
"push_timestamp": 1402692388
|
||||
}
|
||||
}
|
||||
|
||||
reference_data = {
|
||||
"option_collection_hash": self.OPTION_HASH,
|
||||
"machine_platform": self.MACHINE_PLATFORM,
|
||||
"property1": "value1",
|
||||
"property2": "value2",
|
||||
"property3": "value3"
|
||||
}
|
||||
|
||||
# Mimic production environment, the blobs are serialized
|
||||
# when the web service receives them
|
||||
datum['blob'] = json.dumps({'talos_data': [datum['blob']]})
|
||||
load_perf_artifacts(self.REPO_NAME, reference_data, job_data, datum)
|
||||
load_talos_artifacts(self.REPO_NAME, reference_data, job_data, datum)
|
||||
|
||||
# base: subtests + one extra result for the summary series
|
||||
expected_result_count = len(talos_datum["results"]) + 1
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import datetime
|
||||
import logging
|
||||
import math
|
||||
import os
|
||||
from hashlib import sha1
|
||||
|
||||
import simplejson as json
|
||||
|
@ -9,16 +10,15 @@ from jsonschema import validate
|
|||
from treeherder.model.models import (MachinePlatform,
|
||||
OptionCollection,
|
||||
Repository)
|
||||
from treeherder.perf.models import (PerformanceFramework,
|
||||
PerformanceSignature,
|
||||
PerformanceDatum)
|
||||
|
||||
from treeherder.perf.models import (PerformanceDatum,
|
||||
PerformanceFramework,
|
||||
PerformanceSignature)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
PERFORMANCE_ARTIFACT_TYPES = set([
|
||||
'performance',
|
||||
'performance_data',
|
||||
'talos_data'
|
||||
])
|
||||
|
||||
|
@ -28,30 +28,17 @@ SIGNIFICANT_REFERENCE_DATA_KEYS = ['option_collection_hash',
|
|||
'machine_platform']
|
||||
|
||||
|
||||
TALOS_SCHEMA = {
|
||||
"title": "Talos Schema",
|
||||
"type": "object",
|
||||
|
||||
"properties": {
|
||||
"test_machine": {"type": "object"},
|
||||
"testrun": {"type": "object"},
|
||||
"results": {"type": "object"},
|
||||
"test_build": {"type": "object"},
|
||||
"test_aux": {"type": "object"}
|
||||
},
|
||||
|
||||
"required": ["results", "test_build", "testrun", "test_machine"]
|
||||
}
|
||||
PERFHERDER_SCHEMA = json.load(open(os.path.join('schemas',
|
||||
'performance-artifact.json')))
|
||||
TALOS_SCHEMA = json.load(open(os.path.join('schemas',
|
||||
'talos-artifact.json')))
|
||||
|
||||
|
||||
def _transform_signature_properties(properties, significant_keys=None):
|
||||
if significant_keys is None:
|
||||
significant_keys = SIGNIFICANT_REFERENCE_DATA_KEYS
|
||||
transformed_properties = {}
|
||||
keys = properties.keys()
|
||||
for k in keys:
|
||||
if k in significant_keys:
|
||||
transformed_properties[k] = properties[k]
|
||||
transformed_properties = {k: v for k, v in properties.iteritems() if
|
||||
k in significant_keys}
|
||||
|
||||
# HACK: determine if e10s is in job_group_symbol, and add an "e10s"
|
||||
# property to a 'test_options' property if so (we should probably
|
||||
|
@ -79,6 +66,99 @@ def _get_signature_hash(signature_properties):
|
|||
return sha.hexdigest()
|
||||
|
||||
|
||||
def load_perf_artifacts(project_name, reference_data, job_data, datum):
|
||||
perf_datum = json.loads(datum['blob'])
|
||||
validate(perf_datum, PERFHERDER_SCHEMA)
|
||||
|
||||
if 'e10s' in reference_data.get('job_group_symbol', ''):
|
||||
extra_properties = {'test_options': ['e10s']}
|
||||
else:
|
||||
extra_properties = {}
|
||||
|
||||
# transform the reference data so it only contains what we actually
|
||||
# care about (for calculating the signature hash reproducibly), then
|
||||
# get the associated models
|
||||
reference_data = _transform_signature_properties(reference_data)
|
||||
option_collection = OptionCollection.objects.get(
|
||||
option_collection_hash=reference_data['option_collection_hash'])
|
||||
# there may be multiple machine platforms with the same platform: use
|
||||
# the first
|
||||
platform = MachinePlatform.objects.filter(
|
||||
platform=reference_data['machine_platform'])[0]
|
||||
repository = Repository.objects.get(
|
||||
name=project_name)
|
||||
|
||||
# data for performance series
|
||||
job_guid = datum["job_guid"]
|
||||
job_id = job_data[job_guid]['id']
|
||||
result_set_id = job_data[job_guid]['result_set_id']
|
||||
push_timestamp = datetime.datetime.fromtimestamp(
|
||||
job_data[job_guid]['push_timestamp'])
|
||||
|
||||
framework = PerformanceFramework.objects.get(name=perf_datum['framework']['name'])
|
||||
for suite in perf_datum['suites']:
|
||||
subtest_signatures = []
|
||||
for subtest in suite['subtests']:
|
||||
subtest_properties = {
|
||||
'suite': suite['name'],
|
||||
'test': subtest['name']
|
||||
}
|
||||
subtest_properties.update(reference_data)
|
||||
subtest_signature_hash = _get_signature_hash(
|
||||
subtest_properties)
|
||||
subtest_signatures.append(subtest_signature_hash)
|
||||
|
||||
signature, _ = PerformanceSignature.objects.get_or_create(
|
||||
signature_hash=subtest_signature_hash,
|
||||
defaults={
|
||||
'test': subtest['name'],
|
||||
'suite': suite['name'],
|
||||
'option_collection': option_collection,
|
||||
'platform': platform,
|
||||
'framework': framework,
|
||||
'extra_properties': extra_properties
|
||||
})
|
||||
PerformanceDatum.objects.get_or_create(
|
||||
repository=repository,
|
||||
result_set_id=result_set_id,
|
||||
job_id=job_id,
|
||||
signature=signature,
|
||||
push_timestamp=push_timestamp,
|
||||
defaults={'value': subtest['value']})
|
||||
|
||||
# if we have a summary value, create or get its signature and insert
|
||||
# it too
|
||||
if suite['value']:
|
||||
# summary series
|
||||
extra_summary_properties = {
|
||||
'subtest_signatures': sorted(subtest_signatures)
|
||||
}
|
||||
extra_summary_properties.update(extra_properties)
|
||||
summary_properties = {'suite': suite['name']}
|
||||
summary_properties.update(reference_data)
|
||||
summary_properties.update(extra_summary_properties)
|
||||
summary_signature_hash = _get_signature_hash(
|
||||
summary_properties)
|
||||
|
||||
signature, _ = PerformanceSignature.objects.get_or_create(
|
||||
signature_hash=summary_signature_hash,
|
||||
defaults={
|
||||
'test': '',
|
||||
'suite': suite['name'],
|
||||
'option_collection': option_collection,
|
||||
'platform': platform,
|
||||
'framework': framework,
|
||||
'extra_properties': extra_summary_properties
|
||||
})
|
||||
PerformanceDatum.objects.get_or_create(
|
||||
repository=repository,
|
||||
result_set_id=result_set_id,
|
||||
job_id=job_id,
|
||||
signature=signature,
|
||||
push_timestamp=push_timestamp,
|
||||
defaults={'value': suite['value']})
|
||||
|
||||
|
||||
def _calculate_summary_value(results):
|
||||
# needed only for legacy talos blobs which don't provide a suite
|
||||
# summary value
|
||||
|
@ -117,7 +197,7 @@ def _calculate_test_value(replicates):
|
|||
return value
|
||||
|
||||
|
||||
def load_perf_artifacts(project_name, reference_data, job_data, datum):
|
||||
def load_talos_artifacts(project_name, reference_data, job_data, datum):
|
||||
if 'e10s' in reference_data.get('job_group_symbol', ''):
|
||||
extra_properties = {'test_options': ['e10s']}
|
||||
else:
|
||||
|
|
|
@ -3,7 +3,9 @@ import zlib
|
|||
|
||||
import simplejson as json
|
||||
|
||||
from treeherder.etl.perf import (load_perf_artifacts, PERFORMANCE_ARTIFACT_TYPES)
|
||||
from treeherder.etl.perf import (PERFORMANCE_ARTIFACT_TYPES,
|
||||
load_perf_artifacts,
|
||||
load_talos_artifacts)
|
||||
from treeherder.model import utils
|
||||
|
||||
from .base import TreeherderModelBase
|
||||
|
@ -117,7 +119,10 @@ class ArtifactsModel(TreeherderModelBase):
|
|||
del ref_data['signature']
|
||||
|
||||
# adapt and load data into placeholder structures
|
||||
load_perf_artifacts(self.project, ref_data, job_data, perf_data)
|
||||
if perf_data['name'] == 'talos_data':
|
||||
load_talos_artifacts(self.project, ref_data, job_data, perf_data)
|
||||
else:
|
||||
load_perf_artifacts(self.project, ref_data, job_data, perf_data)
|
||||
|
||||
def load_job_artifacts(self, artifact_data, job_id_lookup):
|
||||
"""
|
||||
|
|
Загрузка…
Ссылка в новой задаче