зеркало из https://github.com/mozilla/treeherder.git
Merge pull request #128 from mozilla/resultset-date-range
Resultset date range
This commit is contained in:
Коммит
4d13a3d00d
|
@ -28,6 +28,7 @@ test.log
|
|||
treeherder*.log
|
||||
treeherder.log.*
|
||||
LOGFILE
|
||||
supervisor*.log
|
||||
|
||||
# Unit test / coverage reports
|
||||
.coverage
|
||||
|
|
|
@ -13,6 +13,7 @@ WebTest==1.3.4
|
|||
WebOb==1.2
|
||||
|
||||
mock==1.0b1
|
||||
django-extensions==1.3.3
|
||||
|
||||
|
||||
# in order to be able to run bin/generate-vendor-lib.py
|
||||
|
|
|
@ -47,7 +47,39 @@ def test_create_bug_job_map(eleven_jobs_processed, mock_message_broker, jm):
|
|||
reverse("bug-job-map-list", kwargs={"project": jm.project}),
|
||||
bug_job_map_obj
|
||||
)
|
||||
|
||||
|
||||
user.delete()
|
||||
|
||||
assert (bug_job_map_obj,) == jm.get_bug_job_map_list(0, 1)
|
||||
|
||||
|
||||
def test_create_bug_job_map_dup(eleven_jobs_processed, mock_message_broker, jm):
|
||||
"""
|
||||
test creating the same bug map skips it
|
||||
"""
|
||||
|
||||
client = APIClient()
|
||||
user = User.objects.create(username="MyName", is_staff=True)
|
||||
client.force_authenticate(user=user)
|
||||
|
||||
job = jm.get_job_list(0, 1)[0]
|
||||
|
||||
bug_job_map_obj = {
|
||||
"job_id": job["id"],
|
||||
"bug_id": 1,
|
||||
"type": "manual"
|
||||
}
|
||||
|
||||
client.post(
|
||||
reverse("bug-job-map-list", kwargs={"project": jm.project}),
|
||||
bug_job_map_obj
|
||||
)
|
||||
|
||||
client.post(
|
||||
reverse("bug-job-map-list", kwargs={"project": jm.project}),
|
||||
bug_job_map_obj
|
||||
)
|
||||
|
||||
user.delete()
|
||||
|
||||
assert (bug_job_map_obj,) == jm.get_bug_job_map_list(0, 1)
|
||||
|
@ -122,7 +154,7 @@ def test_bug_job_map_delete(webapp, eleven_jobs_processed,
|
|||
"pk": pk
|
||||
})
|
||||
)
|
||||
|
||||
|
||||
user.delete()
|
||||
|
||||
content = json.loads(resp.content)
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import pytest
|
||||
from django.core.urlresolvers import reverse
|
||||
from treeherder.webapp.api.resultset import ResultSetViewSet
|
||||
|
||||
from thclient import TreeherderResultSetCollection
|
||||
from tests import test_utils
|
||||
|
||||
from treeherder.webapp.api import utils
|
||||
|
||||
def test_resultset_list(webapp, eleven_jobs_processed, jm):
|
||||
"""
|
||||
|
@ -15,11 +15,13 @@ def test_resultset_list(webapp, eleven_jobs_processed, jm):
|
|||
reverse("resultset-list", kwargs={"project": jm.project})
|
||||
)
|
||||
|
||||
assert resp.status_int == 200
|
||||
assert isinstance(resp.json, list)
|
||||
rs_list = resp.json
|
||||
results = resp.json['results']
|
||||
meta = resp.json['meta']
|
||||
|
||||
assert len(rs_list) == 10
|
||||
assert resp.status_int == 200
|
||||
assert isinstance(results, list)
|
||||
|
||||
assert len(results) == 10
|
||||
exp_keys = set([
|
||||
u'id',
|
||||
u'repository_id',
|
||||
|
@ -34,9 +36,16 @@ def test_resultset_list(webapp, eleven_jobs_processed, jm):
|
|||
u'job_counts',
|
||||
u'platforms'
|
||||
])
|
||||
for rs in rs_list:
|
||||
for rs in results:
|
||||
assert set(rs.keys()) == exp_keys
|
||||
|
||||
assert(meta == {
|
||||
u'count': 10,
|
||||
u'filter_params': {},
|
||||
u'repository':
|
||||
u'test_treeherder'
|
||||
})
|
||||
|
||||
|
||||
def test_resultset_list_bad_project(webapp, jm):
|
||||
"""
|
||||
|
@ -64,7 +73,71 @@ def test_resultset_list_empty_rs_still_show(webapp, initial_data,
|
|||
reverse("resultset-list", kwargs={"project": jm.project}),
|
||||
)
|
||||
assert resp.status_int == 200
|
||||
assert len(resp.json) == 10
|
||||
assert len(resp.json['results']) == 10
|
||||
|
||||
|
||||
def test_resultset_list_filter_by_revision(webapp, eleven_jobs_processed, jm):
|
||||
"""
|
||||
test retrieving a resultset list, filtered by a date range
|
||||
"""
|
||||
|
||||
resp = webapp.get(
|
||||
reverse("resultset-list", kwargs={"project": jm.project}),
|
||||
{"fromchange": "21fb3eed1b5f", "tochange": "909f55c626a8"}
|
||||
)
|
||||
assert resp.status_int == 200
|
||||
results = resp.json['results']
|
||||
meta = resp.json['meta']
|
||||
assert len(results) == 4
|
||||
assert set([rs["revision"] for rs in results]) == set(
|
||||
["909f55c626a8","71d49fee325a","bb57e9f67223","21fb3eed1b5f"]
|
||||
)
|
||||
assert(meta == {
|
||||
u'count': 4,
|
||||
u'fromchange': u'21fb3eed1b5f',
|
||||
u'filter_params': {
|
||||
u'push_timestamp__gte': 1384363842,
|
||||
u'push_timestamp__lte': 1384365942
|
||||
},
|
||||
u'repository': u'test_treeherder',
|
||||
u'tochange': u'909f55c626a8'}
|
||||
)
|
||||
|
||||
|
||||
def test_resultset_list_filter_by_date(webapp, initial_data,
|
||||
sample_resultset, jm):
|
||||
"""
|
||||
test retrieving a resultset list, filtered by a date range
|
||||
"""
|
||||
sample_resultset[3]["push_timestamp"] = utils.to_timestamp("2013-08-09")
|
||||
sample_resultset[4]["push_timestamp"] = utils.to_timestamp("2013-08-10")
|
||||
sample_resultset[5]["push_timestamp"] = utils.to_timestamp("2013-08-11")
|
||||
sample_resultset[6]["push_timestamp"] = utils.to_timestamp("2013-08-12")
|
||||
sample_resultset[7]["push_timestamp"] = utils.to_timestamp("2013-08-13")
|
||||
|
||||
jm.store_result_set_data(sample_resultset)
|
||||
|
||||
resp = webapp.get(
|
||||
reverse("resultset-list", kwargs={"project": jm.project}),
|
||||
{"startdate": "2013-08-10", "enddate": "2013-08-13"}
|
||||
)
|
||||
assert resp.status_int == 200
|
||||
results = resp.json['results']
|
||||
meta = resp.json['meta']
|
||||
assert len(results) == 4
|
||||
assert set([rs["revision"] for rs in results]) == set(
|
||||
["909f55c626a8","71d49fee325a","bb57e9f67223","668424578a0d"]
|
||||
)
|
||||
assert(meta == {
|
||||
u'count': 4,
|
||||
u'enddate': u'2013-08-13',
|
||||
u'filter_params': {
|
||||
u'push_timestamp__gte': 1376118000.0,
|
||||
u'push_timestamp__lt': 1376463600.0
|
||||
},
|
||||
u'repository': u'test_treeherder',
|
||||
u'startdate': u'2013-08-10'}
|
||||
)
|
||||
|
||||
|
||||
def test_resultset_detail(webapp, eleven_jobs_processed, jm):
|
||||
|
|
|
@ -2,6 +2,8 @@ import json
|
|||
import MySQLdb
|
||||
import time
|
||||
|
||||
from _mysql_exceptions import IntegrityError
|
||||
|
||||
from warnings import filterwarnings, resetwarnings
|
||||
from django.conf import settings
|
||||
|
||||
|
@ -65,7 +67,8 @@ class JobsModel(TreeherderModelBase):
|
|||
"id": "rs.id",
|
||||
"revision_hash": "rs.revision_hash",
|
||||
"revision": "revision.revision",
|
||||
"author": "revision.author"
|
||||
"author": "revision.author",
|
||||
"push_timestamp": "rs.push_timestamp"
|
||||
},
|
||||
"bug_job_map": {
|
||||
"job_id": "job_id",
|
||||
|
@ -315,15 +318,19 @@ class JobsModel(TreeherderModelBase):
|
|||
"""
|
||||
Store a new relation between the given job and bug ids.
|
||||
"""
|
||||
self.get_jobs_dhub().execute(
|
||||
proc='jobs.inserts.insert_bug_job_map',
|
||||
placeholders=[
|
||||
job_id,
|
||||
bug_id,
|
||||
assignment_type
|
||||
],
|
||||
debug_show=self.DEBUG
|
||||
)
|
||||
try:
|
||||
self.get_jobs_dhub().execute(
|
||||
proc='jobs.inserts.insert_bug_job_map',
|
||||
placeholders=[
|
||||
job_id,
|
||||
bug_id,
|
||||
assignment_type
|
||||
],
|
||||
debug_show=self.DEBUG
|
||||
)
|
||||
except IntegrityError as e:
|
||||
raise JobDataIntegrityError(e)
|
||||
|
||||
|
||||
def delete_bug_job_map(self, job_id, bug_id):
|
||||
"""
|
||||
|
@ -1469,10 +1476,21 @@ class JobsModel(TreeherderModelBase):
|
|||
'revision_ids':revision_id_lookup
|
||||
}
|
||||
|
||||
def get_revision_timestamp(self, rev):
|
||||
"""Get the push timestamp of the resultset for a revision"""
|
||||
return self.get_revision_resultset_lookup([rev])[rev][
|
||||
"push_timestamp"
|
||||
]
|
||||
|
||||
|
||||
class JobDataError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
class JobDataIntegrityError(IntegrityError):
|
||||
pass
|
||||
|
||||
|
||||
class JobData(dict):
|
||||
"""
|
||||
Encapsulates data access from incoming test data structure.
|
||||
|
|
|
@ -218,6 +218,7 @@
|
|||
jg.`symbol` as job_group_symbol,
|
||||
jg.`description` as job_group_description,
|
||||
j.`who`,
|
||||
j.failure_classification_id,
|
||||
j.`result_set_id`,
|
||||
j.`result`,
|
||||
j.`state`,
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from treeherder.model.derived.jobs import JobDataIntegrityError
|
||||
from rest_framework import viewsets
|
||||
from rest_framework.response import Response
|
||||
|
||||
|
@ -18,10 +19,20 @@ class BugJobMapViewSet(viewsets.ViewSet):
|
|||
job_id, bug_id = map(int, (request.DATA['job_id'],
|
||||
request.DATA['bug_id']))
|
||||
|
||||
jm.insert_bug_job_map(job_id, bug_id,
|
||||
request.DATA['type'])
|
||||
try:
|
||||
jm.insert_bug_job_map(job_id, bug_id,
|
||||
request.DATA['type'])
|
||||
except JobDataIntegrityError as e:
|
||||
code, msg = e.args
|
||||
if "Duplicate" in msg:
|
||||
return Response(
|
||||
{"message": "Bug job map skipped: {0}".format(msg)},
|
||||
409
|
||||
)
|
||||
else:
|
||||
raise e
|
||||
|
||||
return Response({"message": "Bug job map stored"})
|
||||
return Response({"message": "Bug job map saved"})
|
||||
|
||||
@with_jobs
|
||||
def destroy(self, request, project, jm, pk=None):
|
||||
|
|
|
@ -6,7 +6,8 @@ from rest_framework.decorators import link
|
|||
from rest_framework.reverse import reverse
|
||||
from treeherder.model.derived import DatasetNotFoundError
|
||||
from treeherder.webapp.api.utils import (UrlQueryFilter, with_jobs,
|
||||
oauth_required, get_option)
|
||||
oauth_required, get_option,
|
||||
to_timestamp)
|
||||
|
||||
|
||||
class ResultSetViewSet(viewsets.ViewSet):
|
||||
|
@ -23,7 +24,43 @@ class ResultSetViewSet(viewsets.ViewSet):
|
|||
|
||||
"""
|
||||
|
||||
filter = UrlQueryFilter(request.QUERY_PARAMS)
|
||||
# make a mutable copy of these params
|
||||
filter_params = request.QUERY_PARAMS.copy()
|
||||
|
||||
# This will contain some meta data about the request and results
|
||||
meta = {}
|
||||
|
||||
# support ranges for date as well as revisions(changes) like old tbpl
|
||||
for param in ["fromchange", "tochange", "startdate", "enddate"]:
|
||||
v = filter_params.get(param, None)
|
||||
if v:
|
||||
del(filter_params[param])
|
||||
meta[param] = v
|
||||
|
||||
# translate these params into our own filtering mechanism
|
||||
if 'fromchange' in meta:
|
||||
filter_params.update({
|
||||
"push_timestamp__gte": jm.get_revision_timestamp(meta['fromchange'])
|
||||
})
|
||||
if 'tochange' in meta:
|
||||
filter_params.update({
|
||||
"push_timestamp__lte": jm.get_revision_timestamp(meta['tochange'])
|
||||
})
|
||||
if 'startdate' in meta:
|
||||
filter_params.update({
|
||||
"push_timestamp__gte": to_timestamp(meta['startdate'])
|
||||
})
|
||||
if 'enddate' in meta:
|
||||
|
||||
# add a day because we aren't supplying a time, just a date. So
|
||||
# we're doing ``less than``, rather than ``less than or equal to``.
|
||||
filter_params.update({
|
||||
"push_timestamp__lt": to_timestamp(meta['enddate']) + 86400
|
||||
})
|
||||
|
||||
meta['filter_params'] = filter_params
|
||||
|
||||
filter = UrlQueryFilter(filter_params)
|
||||
|
||||
offset_id = filter.pop("id__lt", 0)
|
||||
count = filter.pop("count", 10)
|
||||
|
@ -35,7 +72,15 @@ class ResultSetViewSet(viewsets.ViewSet):
|
|||
full,
|
||||
filter.conditions
|
||||
)
|
||||
return Response(self.get_resultsets_with_jobs(jm, objs, full, {}))
|
||||
|
||||
results = self.get_resultsets_with_jobs(jm, objs, full, {})
|
||||
meta['count'] = len(results)
|
||||
meta['repository'] = project
|
||||
|
||||
return Response({
|
||||
'meta': meta,
|
||||
'results': results
|
||||
})
|
||||
|
||||
@with_jobs
|
||||
def retrieve(self, request, project, jm, pk=None):
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
from collections import defaultdict
|
||||
import time
|
||||
import datetime
|
||||
|
||||
import simplejson as json
|
||||
import oauth2 as oauth
|
||||
|
||||
from django.conf import settings
|
||||
from rest_framework.response import Response
|
||||
|
||||
|
@ -220,3 +221,10 @@ def get_option(obj, option_collections):
|
|||
else:
|
||||
return None
|
||||
|
||||
|
||||
def to_timestamp(datestr):
|
||||
"""get a timestamp from a datestr like 2014-03-31"""
|
||||
return time.mktime(datetime.datetime.strptime(
|
||||
datestr,
|
||||
"%Y-%m-%d"
|
||||
).timetuple())
|
||||
|
|
Загрузка…
Ссылка в новой задаче