Add API to get manifest run status (#7549)

This commit is contained in:
Joel Maher 2022-11-01 19:48:31 -07:00 коммит произвёл GitHub
Родитель 241e02b1b8
Коммит dd8fdf043f
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
6 изменённых файлов: 244 добавлений и 0 удалений

Просмотреть файл

@ -963,6 +963,63 @@ def test_run_data(bug_data):
return {'test_runs': test_runs, 'push_time': time}
@pytest.fixture
def group_data(transactional_db, eleven_job_blobs, create_jobs):
query_string = '?manifest=/test&date=2022-10-01'
jt = []
jt.append(
th_models.JobType.objects.create(name="test-windows10-64-2004-qr/opt-mochitest-plain-1")
)
jt.append(
th_models.JobType.objects.create(name="test-windows10-64-2004-qr/opt-mochitest-plain-2")
)
jt.append(
th_models.JobType.objects.create(name="test-windows10-64-2004-qr/opt-mochitest-plain-swr-1")
)
g1 = th_models.Group.objects.create(name="/test")
for i in range(3):
job = eleven_job_blobs[i]
job['job'].update(
{
'taskcluster_task_id': 'V3SVuxO8TFy37En_6HcXL%s' % i,
'taskcluster_retry_id': '0',
'name': jt[i].name,
}
)
j = create_jobs([job])[0]
# when creating the job, we also create the joblog, we want the last job log entry
job_log = th_models.JobLog.objects.last()
th_models.GroupStatus.objects.create(status=1, job_log=job_log, group=g1)
return {
'date': j.submit_time,
'manifest': '/test',
'query_string': query_string,
'expected': [
{
'manifest': '/test',
'results': [
{
"job_type_name": 'test-windows10-64-2004-qr/opt-mochitest-plain',
"job_result": "success",
"job_count": 2,
},
{
"job_type_name": 'test-windows10-64-2004-qr/opt-mochitest-plain-swr',
"job_result": "success",
"job_count": 1,
},
],
}
],
}
@pytest.fixture
def generate_enough_perf_datum(test_repository, test_perf_signature):
# generate enough data for a proper alert to be generated (with enough

Просмотреть файл

@ -0,0 +1,61 @@
import datetime
from django.urls import reverse
# test date (future date, no data)
def test_future_date(group_data, client):
expected = [{"manifest": "/test", "results": []}]
today = datetime.datetime.today().date()
tomorrow = today + datetime.timedelta(days=1)
url = reverse('groupsummary') + "?date=%s&manifest=/test" % tomorrow
resp = client.get(url)
assert resp.status_code == 200
assert resp.json() == expected
# test date (today/recent) data
def test_default_date(group_data, client):
expected = [{"manifest": "/test", "results": []}]
url = reverse('groupsummary') + "?manifest=/test"
resp = client.get(url)
assert resp.status_code == 200
assert resp.json() == expected
# test manifests missing, blank, no '/', single string, commas
def test_invalid_manifest(group_data, client):
expected = "invalid url query parameter manifest: 'asdf'"
resp = client.get(reverse('groupsummary') + "?manifest=asdf")
assert resp.status_code == 400
assert resp.json() == expected
def test_manifest_blank(group_data, client):
expected = "invalid url query parameter manifest: ''"
resp = client.get(reverse('groupsummary') + "?manifest=")
assert resp.status_code == 400
assert resp.json() == expected
def test_missing_manifest(group_data, client):
expected = "invalid url query parameter manifest: None"
resp = client.get(reverse('groupsummary') + "")
assert resp.status_code == 400
assert resp.json() == expected
# test data, summarized by manifest
# test jobname chunk removal and aggregation
def test_summarized(group_data, client):
expected = group_data['expected']
url = (
reverse('groupsummary') + "?manifest=/test&date=%s" % str(group_data['date']).split(' ')[0]
)
resp = client.get(url)
assert resp.status_code == 200
assert resp.json() == expected

Просмотреть файл

@ -0,0 +1,94 @@
import datetime
import logging
import re
from django.db.models import Count
from rest_framework import generics
from rest_framework.response import Response
from rest_framework.status import HTTP_400_BAD_REQUEST
from treeherder.model.models import (
JobLog,
)
from treeherder.webapp.api.serializers import GroupNameSerializer
logger = logging.getLogger(__name__)
class SummaryByGroupName(generics.ListAPIView):
"""
This yields group names/status summary for the given group and day.
"""
serializer_class = GroupNameSerializer
queryset = None
def list(self, request):
manifests = None
if 'manifest' in request.query_params:
manifests = [
x.strip()
for x in request.query_params['manifest'].split(',')
if x and '/' in x.strip()
]
if not manifests or len(manifests) == 0:
if 'manifest' in request.query_params:
error = (
"invalid url query parameter manifest: '%s'" % request.query_params['manifest']
)
else:
error = "invalid url query parameter manifest: None"
return Response(data=error, status=HTTP_400_BAD_REQUEST)
date = None
if 'date' in request.query_params:
date = request.query_params['date']
if not date or not re.match(r'^[0-9]{4}-[0-9]{2}-[0-9]{2}$', date):
date = str(datetime.datetime.today().date())
date = datetime.datetime.strptime(date, "%Y-%m-%d")
tomorrow = date + datetime.timedelta(days=1)
self.queryset = (
JobLog.objects.filter(job__push__time__range=(str(date.date()), str(tomorrow.date())))
.filter(job__repository_id__in=(1, 77))
.filter(groups__name__in=manifests)
.values(
'groups__name',
'job__job_type__name',
'job__result',
)
.annotate(job_count=Count('job_id'))
.values('groups__name', 'job__job_type__name', 'job__result', 'job_count')
.order_by('job__job_type__name')
)
serializer = self.get_serializer(self.queryset, many=True)
summary = {}
for item in serializer.data:
if item['group_name'] not in summary:
summary[item['group_name']] = {}
if item['job_type_name'] not in summary[item['group_name']]:
summary[item['group_name']][item['job_type_name']] = {}
if item['job_result'] not in summary[item['group_name']][item['job_type_name']]:
summary[item['group_name']][item['job_type_name']][item['job_result']] = 0
summary[item['group_name']][item['job_type_name']][item['job_result']] += item[
'job_count'
]
data = []
for m in manifests:
mdata = []
# print out manifest with no data
if m not in summary:
data.append({"manifest": m, "results": []})
continue
for d in summary[m]:
for r in summary[m][d]:
mdata.append(
{"job_type_name": d, "job_result": r, "job_count": summary[m][d][r]}
)
data.append({"manifest": m, "results": mdata})
return Response(data=data)

Просмотреть файл

@ -309,6 +309,34 @@ class FailuresSerializer(serializers.ModelSerializer):
fields = ('bug_id', 'bug_count')
class JobTypeNameField(serializers.Field):
"""Removes the ending chunk number"""
def to_representation(self, value):
parts = value.split("-")
try:
_ = int(parts[-1])
return '-'.join(parts[:-1])
except ValueError:
return value
class GroupNameSerializer(serializers.ModelSerializer):
group_name = serializers.CharField(source="groups__name")
job_type_name = JobTypeNameField(source="job__job_type__name")
job_result = serializers.CharField(source="job__result")
job_count = serializers.IntegerField()
class Meta:
model = models.JobLog
fields = (
'group_name',
'job_type_name',
'job_result',
'job_count',
)
class TestSuiteField(serializers.Field):
"""Removes all characters from test_suite that's also found in platform"""

Просмотреть файл

@ -9,6 +9,7 @@ from treeherder.webapp.api import (
changelog,
classification,
csp_report,
groups,
infra_compare,
intermittents_view,
investigated_test,
@ -145,6 +146,7 @@ default_router.register(r'auth', auth.AuthViewSet, basename='auth')
default_router.register(r'changelog', changelog.ChangelogViewSet, basename='changelog')
urlpatterns = [
re_path(r'^groupsummary/$', groups.SummaryByGroupName.as_view(), name='groupsummary'),
re_path(r'^project/(?P<project>[\w-]{0,50})/', include(project_bound_router.urls)),
re_path(r'^', include(default_router.urls)),
re_path(r'^failures/$', intermittents_view.Failures.as_view(), name='failures'),

Просмотреть файл

@ -21,6 +21,8 @@ export const bugDetailsEndpoint = '/failuresbybug/';
export const graphsEndpoint = '/failurecount/';
export const groupSummary = '/groupsummary/';
export const deployedRevisionUrl = '/revision.txt';
export const loginCallbackUrl = '/login';