diff --git a/tests/webapp/api/test_job_details_api.py b/tests/webapp/api/test_job_details_api.py index 042477de0..4c3c5d14f 100644 --- a/tests/webapp/api/test_job_details_api.py +++ b/tests/webapp/api/test_job_details_api.py @@ -1,7 +1,8 @@ from django.core.urlresolvers import reverse from treeherder.model.models import (Job, - JobDetail) + JobDetail, + Repository) def test_job_details(test_repository, webapp): @@ -24,11 +25,24 @@ def test_job_details(test_repository, webapp): } # create some job details for some fake jobs - i = 0 + test_repository2 = Repository.objects.create( + repository_group=test_repository.repository_group, + name=test_repository.name + '_2', + dvcs_type=test_repository.dvcs_type, + url=test_repository.url + '_2', + codebase=test_repository.codebase) + + i = 1 for (job_guid, params) in details.iteritems(): - print job_guid + if i < 3: + repository = test_repository + else: + # renumber last + repository = test_repository2 + i = 1 + print (i, repository) job = Job.objects.create(guid=job_guid, - repository=test_repository, + repository=repository, project_specific_id=i) JobDetail.objects.create( job=job, **params) @@ -41,14 +55,35 @@ def test_job_details(test_repository, webapp): for result in resp.json['results']: job_guid = result['job_guid'] del result['job_guid'] + del result['job_id'] assert result == details[job_guid] # filter to just get one guid at a time - for (guid, detail) in details.iteritems(): - resp = webapp.get(reverse('jobdetail-list') + '?job__guid={}'.format( - guid)) - assert resp.status_int == 200 - assert len(resp.json['results']) == 1 - result = resp.json['results'][0] - del result['job_guid'] - assert result == details[guid] + for guid_identifier in ['job_guid', 'job__guid']: + for (guid, detail) in details.iteritems(): + resp = webapp.get(reverse('jobdetail-list') + '?{}={}'.format( + guid_identifier, guid)) + assert resp.status_int == 200 + assert len(resp.json['results']) == 1 + result = resp.json['results'][0] + del result['job_guid'] + del result['job_id'] + assert result == details[guid] + + # filter to get the first and second with job_id__in and repository + resp = webapp.get(reverse('jobdetail-list') + + '?repository={}&job_id__in=1,2'.format( + test_repository.name)) + assert resp.status_int == 200 + assert len(resp.json['results']) == 2 + assert set([v['job_guid'] for v in resp.json['results']]) == set( + ['abcd', 'efgh']) + + # filter to get the last element with job_id__in and repository + resp = webapp.get(reverse('jobdetail-list') + + '?repository={}&job_id__in=1,2'.format( + test_repository2.name)) + assert resp.status_int == 200 + assert len(resp.json['results']) == 1 + assert set([v['job_guid'] for v in resp.json['results']]) == set( + ['ijkl']) diff --git a/treeherder/webapp/api/jobs.py b/treeherder/webapp/api/jobs.py index f6cd43cdf..d059b6583 100644 --- a/treeherder/webapp/api/jobs.py +++ b/treeherder/webapp/api/jobs.py @@ -1,3 +1,4 @@ +import django_filters from rest_framework import (filters, pagination, viewsets) @@ -307,11 +308,28 @@ class JobDetailViewSet(viewsets.ReadOnlyModelViewSet): Endpoint for retrieving metadata (e.g. links to artifacts, file sizes) associated with a particular job ''' - queryset = JobDetail.objects.all().select_related('job__guid') + queryset = JobDetail.objects.all().select_related('job__guid', + 'job__project_specific_id', + 'job__repository__name') serializer_class = serializers.JobDetailSerializer + class JobDetailFilter(filters.FilterSet): + + class NumberInFilter(django_filters.filters.BaseInFilter, + django_filters.NumberFilter): + pass + + job_id__in = NumberInFilter(name='job__project_specific_id', + lookup_expr='in') + job_guid = django_filters.CharFilter(name='job__guid') + job__guid = django_filters.CharFilter(name='job__guid') # for backwards compat + repository = django_filters.CharFilter(name='job__repository__name') + + class Meta: + model = JobDetail + filter_backends = (filters.DjangoFilterBackend, filters.OrderingFilter) - filter_fields = ['job__guid'] + filter_class = JobDetailFilter class JobDetailPagination(pagination.LimitOffsetPagination): default_limit = 100 diff --git a/treeherder/webapp/api/serializers.py b/treeherder/webapp/api/serializers.py index 1fdea7340..6dbc7b9d2 100644 --- a/treeherder/webapp/api/serializers.py +++ b/treeherder/webapp/api/serializers.py @@ -174,7 +174,10 @@ class JobDetailSerializer(serializers.ModelSerializer): job_guid = serializers.SlugRelatedField( slug_field="guid", source="job", queryset=models.Job.objects.all()) + job_id = serializers.SlugRelatedField( + slug_field="project_specific_id", source="job", + queryset=models.Job.objects.all()) class Meta: model = models.JobDetail - fields = ['job_guid', 'title', 'value', 'url'] + fields = ['job_id', 'job_guid', 'title', 'value', 'url'] diff --git a/ui/js/models/job_detail.js b/ui/js/models/job_detail.js index acaf09cdf..d93964780 100644 --- a/ui/js/models/job_detail.js +++ b/ui/js/models/job_detail.js @@ -8,7 +8,7 @@ treeherder.factory('ThJobDetailModel', [ var timeout = config.timeout || null; return $http.get(thUrl.getRootUrl("/jobdetail/"), { - params: { job__guid: jobGuid }, + params: { job_guid: jobGuid }, timeout: timeout }).then(function(response) { return response.data.results;