Merge branch 'master' of ssh://github.com/mozilla/treeherder-service into cycle-data

This commit is contained in:
Jonathan Eads 2014-04-18 13:08:58 -07:00
Родитель 5394adfd25 829d941092
Коммит cc152027db
12 изменённых файлов: 312 добавлений и 50 удалений

1
.gitignore поставляемый
Просмотреть файл

@ -28,6 +28,7 @@ test.log
treeherder*.log
treeherder.log.*
LOGFILE
supervisor*.log
# Unit test / coverage reports
.coverage

Просмотреть файл

@ -40,17 +40,6 @@ priority=998
stdout_logfile=/var/log/celery/worker.log
stderr_logfile=/var/log/celery/worker_err.log
[program:celery_gevent]
command=/home/vagrant/treeherder-service/bin/run_celery_worker_gevent
user=vagrant
autostart=true
autorestart=true
startsecs=10
stopwaitsecs = 600
priority=998
stdout_logfile=/var/log/celery/worker_gevent.log
stderr_logfile=/var/log/celery/worker_gevent_err.log
[program:celerymon]
command=/home/vagrant/treeherder-service/bin/run_celery_monitor
user=vagrant
@ -61,3 +50,4 @@ stopwaitsecs = 600
priority=997
stdout_logfile=/var/log/celery/celerymon.log
stderr_logfile=/var/log/celery/celerymon_err.log

Просмотреть файл

@ -0,0 +1,19 @@
[supervisord]
http_port=/var/tmp/supervisor.sock ; (default is to run a UNIX domain socket server)
loglevel=info ; (logging level;default info; others: debug,warn)
nodaemon=true ; (start in foreground if true;default false)
[supervisorctl]
serverurl=unix:///var/tmp/supervisor.sock ; use a unix:// URL for a unix socket
[program:celery_gevent]
command=/home/vagrant/treeherder-service/bin/run_celery_worker_gevent
user=vagrant
autostart=true
autorestart=true
startsecs=10
stopwaitsecs = 600
priority=998
stdout_logfile=/var/log/celery/worker_gevent.log
stderr_logfile=/var/log/celery/worker_gevent_err.log

Просмотреть файл

@ -13,6 +13,7 @@ WebTest==1.3.4
WebOb==1.2
mock==1.0b1
django-extensions==1.3.3
# in order to be able to run bin/generate-vendor-lib.py

Просмотреть файл

@ -47,7 +47,39 @@ def test_create_bug_job_map(eleven_jobs_processed, mock_message_broker, jm):
reverse("bug-job-map-list", kwargs={"project": jm.project}),
bug_job_map_obj
)
user.delete()
assert (bug_job_map_obj,) == jm.get_bug_job_map_list(0, 1)
def test_create_bug_job_map_dup(eleven_jobs_processed, mock_message_broker, jm):
"""
test creating the same bug map skips it
"""
client = APIClient()
user = User.objects.create(username="MyName", is_staff=True)
client.force_authenticate(user=user)
job = jm.get_job_list(0, 1)[0]
bug_job_map_obj = {
"job_id": job["id"],
"bug_id": 1,
"type": "manual"
}
client.post(
reverse("bug-job-map-list", kwargs={"project": jm.project}),
bug_job_map_obj
)
client.post(
reverse("bug-job-map-list", kwargs={"project": jm.project}),
bug_job_map_obj
)
user.delete()
assert (bug_job_map_obj,) == jm.get_bug_job_map_list(0, 1)
@ -122,7 +154,7 @@ def test_bug_job_map_delete(webapp, eleven_jobs_processed,
"pk": pk
})
)
user.delete()
content = json.loads(resp.content)

Просмотреть файл

@ -1,10 +1,10 @@
import pytest
from django.core.urlresolvers import reverse
from treeherder.webapp.api.resultset import ResultSetViewSet
from thclient import TreeherderResultSetCollection
from tests import test_utils
from treeherder.webapp.api import utils
def test_resultset_list(webapp, eleven_jobs_processed, jm):
"""
@ -15,11 +15,13 @@ def test_resultset_list(webapp, eleven_jobs_processed, jm):
reverse("resultset-list", kwargs={"project": jm.project})
)
assert resp.status_int == 200
assert isinstance(resp.json, list)
rs_list = resp.json
results = resp.json['results']
meta = resp.json['meta']
assert len(rs_list) == 10
assert resp.status_int == 200
assert isinstance(results, list)
assert len(results) == 10
exp_keys = set([
u'id',
u'repository_id',
@ -34,9 +36,16 @@ def test_resultset_list(webapp, eleven_jobs_processed, jm):
u'job_counts',
u'platforms'
])
for rs in rs_list:
for rs in results:
assert set(rs.keys()) == exp_keys
assert(meta == {
u'count': 10,
u'filter_params': {},
u'repository':
u'test_treeherder'
})
def test_resultset_list_bad_project(webapp, jm):
"""
@ -64,7 +73,71 @@ def test_resultset_list_empty_rs_still_show(webapp, initial_data,
reverse("resultset-list", kwargs={"project": jm.project}),
)
assert resp.status_int == 200
assert len(resp.json) == 10
assert len(resp.json['results']) == 10
def test_resultset_list_filter_by_revision(webapp, eleven_jobs_processed, jm):
"""
test retrieving a resultset list, filtered by a date range
"""
resp = webapp.get(
reverse("resultset-list", kwargs={"project": jm.project}),
{"fromchange": "21fb3eed1b5f", "tochange": "909f55c626a8"}
)
assert resp.status_int == 200
results = resp.json['results']
meta = resp.json['meta']
assert len(results) == 4
assert set([rs["revision"] for rs in results]) == set(
["909f55c626a8","71d49fee325a","bb57e9f67223","21fb3eed1b5f"]
)
assert(meta == {
u'count': 4,
u'fromchange': u'21fb3eed1b5f',
u'filter_params': {
u'push_timestamp__gte': 1384363842,
u'push_timestamp__lte': 1384365942
},
u'repository': u'test_treeherder',
u'tochange': u'909f55c626a8'}
)
def test_resultset_list_filter_by_date(webapp, initial_data,
sample_resultset, jm):
"""
test retrieving a resultset list, filtered by a date range
"""
sample_resultset[3]["push_timestamp"] = utils.to_timestamp("2013-08-09")
sample_resultset[4]["push_timestamp"] = utils.to_timestamp("2013-08-10")
sample_resultset[5]["push_timestamp"] = utils.to_timestamp("2013-08-11")
sample_resultset[6]["push_timestamp"] = utils.to_timestamp("2013-08-12")
sample_resultset[7]["push_timestamp"] = utils.to_timestamp("2013-08-13")
jm.store_result_set_data(sample_resultset)
resp = webapp.get(
reverse("resultset-list", kwargs={"project": jm.project}),
{"startdate": "2013-08-10", "enddate": "2013-08-13"}
)
assert resp.status_int == 200
results = resp.json['results']
meta = resp.json['meta']
assert len(results) == 4
assert set([rs["revision"] for rs in results]) == set(
["909f55c626a8","71d49fee325a","bb57e9f67223","668424578a0d"]
)
assert(meta == {
u'count': 4,
u'enddate': u'2013-08-13',
u'filter_params': {
u'push_timestamp__gte': 1376118000.0,
u'push_timestamp__lt': 1376463600.0
},
u'repository': u'test_treeherder',
u'startdate': u'2013-08-10'}
)
def test_resultset_detail(webapp, eleven_jobs_processed, jm):

Просмотреть файл

@ -190,6 +190,14 @@ PLATFORMS_BUILDERNAME = [
#// ** B2G **
{
'regex': re.compile('b2g.*_emulator-kk', re.IGNORECASE),
'attributes': {
'os': 'b2g',
'os_platform': 'b2g-emu-kk',
'arch': 'x86',
}
},
{
'regex': re.compile('b2g.*_emulator-jb', re.IGNORECASE),
'attributes': {
@ -282,36 +290,45 @@ TEST_NAME_BUILDERNAME = [
{"regex": re.compile('-br-haz'), "desc": "Static Rooting Hazard Analysis, Full Browser"},
{"regex": re.compile('-sh-haz'), "desc": "Static Rooting Hazard Analysis, JS Shell"},
{"regex": re.compile('xulrunner'), "desc": "XULRunner Nightly"},
{"regex": re.compile('b2g.*_flame_eng_nightly'), "desc": "Flame Device Image Nightly (Engineering)"},
{"regex": re.compile('b2g.*_hamachi_eng_nightly'), "desc": "Hamachi Device Image Nightly (Engineering)"},
{"regex": re.compile('b2g.*_helix_eng_nightly'), "desc": "Helix Device Image Nightly (Engineering)"},
{"regex": re.compile('b2g.*_inari_eng_nightly'), "desc": "Inari Device Image Nightly (Engineering)"},
{"regex": re.compile('b2g.*_leo_eng_nightly'), "desc": "Leo Device Image Nightly (Engineering)"},
{"regex": re.compile('b2g.*_nexus-4_eng_nightly'), "desc": "Nexus 4 Device Image Nightly (Engineering)"},
{"regex": re.compile('b2g.*_tarako_eng_nightly'), "desc": "Tarako Device Image Nightly (Engineering)"},
{"regex": re.compile('b2g.*_unagi_eng_nightly'), "desc": "Unagi Device Image Nightly (Engineering)"},
{"regex": re.compile('b2g.*_eng_nightly'), "desc": "Unknown B2G Device Image Nightly (Engineering)"},
{"regex": re.compile('b2g.*_buri-limited-memory_nightly'), "desc": "Buri Limited Memory Device Image Nightly"},
{"regex": re.compile('b2g.*_emulator.*_nightly'), "desc": "B2G Emulator Image Nightly"},
{"regex": re.compile('b2g.*_flame_nightly'), "desc": "Flame Device Image Nightly"},
{"regex": re.compile('b2g.*_hamachi_nightly'), "desc": "Hamachi Device Image Nightly"},
{"regex": re.compile('b2g.*_helix_nightly'), "desc": "Helix Device Image Nightly"},
{"regex": re.compile('b2g.*_inari_nightly'), "desc": "Inari Device Image Nightly"},
{"regex": re.compile('b2g.*_leo_nightly'), "desc": "Leo Device Image Nightly"},
{"regex": re.compile('b2g.*_nexus-4_nightly'), "desc": "Nexus 4 Device Image Nightly"},
{"regex": re.compile('b2g.*_tarako_nightly'), "desc": "Tarako Device Image Nightly"},
{"regex": re.compile('b2g.*_unagi_nightly'), "desc": "Unagi Device Image Nightly"},
{"regex": re.compile('b2g.*_wasabi_nightly'), "desc": "Wasabi Device Image Nightly"},
{"regex": re.compile('b2g.*_nightly'), "desc": "Unknown B2G Device Image Nightly"},
{"regex": re.compile('(?:l10n|localizer) nightly'), "desc": "L10n Nightly"},
{"regex": re.compile('nightly'), "desc": "Nightly"},
{"regex": re.compile('b2g.*_flame_eng_(?:dep|periodic)'), "desc": "Flame Device Image Build (Engineering)"},
{"regex": re.compile('b2g.*_hamachi_eng_(?:dep|periodic)'), "desc": "Hamachi Device Image Build (Engineering)"},
{"regex": re.compile('b2g.*_helix_eng_(?:dep|periodic)'), "desc": "Helix Device Image Build (Engineering)"},
{"regex": re.compile('b2g.*_inari_eng_(?:dep|periodic)'), "desc": "Inari Device Image Build (Engineering)"},
{"regex": re.compile('b2g.*_leo_eng_(?:dep|periodic)'), "desc": "Leo Device Image Build (Engineering)"},
{"regex": re.compile('b2g.*_nexus-4_eng_(?:dep|periodic)'), "desc": "Nexus 4 Device Image Build (Engineering)"},
{"regex": re.compile('b2g.*_tarako_eng_(?:dep|periodic)'), "desc": "Tarako Device Image Build (Engineering)"},
{"regex": re.compile('b2g.*_unagi_eng_(?:dep|periodic)'), "desc": "Unagi Device Image Build (Engineering)"},
{"regex": re.compile('b2g.*_eng_(?:dep|periodic)'), "desc": "Unknown B2G Device Image Build (Engineering)"},
{"regex": re.compile('b2g.*_emulator.*_dep'), "desc": "B2G Emulator Image Build"},
{"regex": re.compile('b2g.*_buri-limited-memory_(?:dep|periodic)'), "desc": "Buri Limited Memory Device Image Build"},
{"regex": re.compile('b2g.*_emulator.*_(?:dep|periodic)'), "desc": "B2G Emulator Image Build"},
{"regex": re.compile('b2g.*_flame_(?:dep|periodic)'), "desc": "Flame Device Image Build"},
{"regex": re.compile('b2g.*_hamachi_(?:dep|periodic)'), "desc": "Hamachi Device Image Build"},
{"regex": re.compile('b2g.*_helix_(?:dep|periodic)'), "desc": "Helix Device Image Build"},
{"regex": re.compile('b2g.*_inari_(?:dep|periodic)'), "desc": "Inari Device Image Build"},
{"regex": re.compile('b2g.*_leo_(?:dep|periodic)'), "desc": "Leo Device Image Build"},
{"regex": re.compile('b2g.*_nexus-4_(?:dep|periodic)'), "desc": "Nexus 4 Device Image Build"},
{"regex": re.compile('b2g.*_tarako_(?:dep|periodic)'), "desc": "Tarako Device Image Build"},
{"regex": re.compile('b2g.*_unagi_(?:dep|periodic)'), "desc": "Unagi Device Image Build"},
{"regex": re.compile('b2g.*_wasabi_(?:dep|periodic)'), "desc": "Wasabi Device Image Build"},
{"regex": re.compile('b2g.*_(?:dep|periodic)'), "desc": "Unknown B2G Device Image Build"},
@ -333,16 +350,25 @@ TEST_NAME_BUILDERNAME = [
#// These are generally sorted in the same order as how they are sorted in
#// Config.js, though some exceptions are needed to avoid false-positives.
{"regex": re.compile('mozbase'), "desc": "Mozbase Unit Tests"},
{"regex": re.compile('mochitest-e10s-browser-chrome'), "desc": "Mochitest e10s Browser Chrome"},
{"regex": re.compile('mochitest-e10s-devtools-chrome'), "desc": "Mochitest e10s DevTools Browser Chrome"},
{"regex": re.compile('mochitest-e10s-other'), "desc": "Mochitest e10s Other"},
{"regex": re.compile('mochitest-e10s'), "desc": "Mochitest e10s"},
{"regex": re.compile('mochitest-browser-chrome'), "desc": "Mochitest Browser Chrome"},
{"regex": re.compile('mochitest-devtools-chrome'), "desc": "Mochitest DevTools Browser Chrome"},
{"regex": re.compile('mochitest-metro-chrome'), "desc": "Mochitest Metro Browser Chrome"},
{"regex": re.compile('mochitest-other'), "desc": "Mochitest Other"},
{"regex": re.compile('mochitest-gl'), "desc": "Mochitest WebGL"},
{"regex": re.compile('mochitest'), "desc": "Mochitest"},
{"regex": re.compile('robocop'), "desc": "Robocop"},
{"regex": re.compile('crashtest-ipc'), "desc": "Crashtest-IPC"},
{"regex": re.compile('crashtest-e10s'), "desc": "Crashtest e10s"},
{"regex": re.compile('crashtest-ipc'), "desc": "Crashtest IPC"},
{"regex": re.compile('crashtest'), "desc": "Crashtest"},
{"regex": re.compile('jsreftest-e10s'), "desc": "JSReftest e10s"},
{"regex": re.compile('jsreftest'), "desc": "JSReftest"},
{"regex": re.compile('reftest-ipc'), "desc": "Reftest-IPC"},
{"regex": re.compile('reftest-e10s'), "desc": "Reftest e10s"},
{"regex": re.compile('reftest-ipc'), "desc": "Reftest IPC"},
{"regex": re.compile('reftest-omtc'), "desc": "Reftest OMTC"},
{"regex": re.compile('reftest-no-accel'), "desc": "Reftest Unaccelerated"},
{"regex": re.compile('reftest'), "desc": "Reftest"},
{"regex": re.compile('cppunit'), "desc": "CPP Unit Tests"},
@ -363,12 +389,14 @@ TEST_NAME_BUILDERNAME = [
# when updating, please take care to ensure the ``testname`` AND the
# ``groupname`` exist in the ``SYMBOLS`` dict as well.
GROUP_NAMES = {
"Flame Device Image Build": "Flame Device Image",
"Flame Device Image Build (Engineering)": "Flame Device Image",
"Flame Device Image Nightly": "Flame Device Image",
"Flame Device Image Nightly (Engineering)": "Flame Device Image",
"Hamachi Device Image Build": "Buri/Hamachi Device Image",
"Hamachi Device Image Build (Engineering)": "Buri/Hamachi Device Image",
"Buri Limited Memory Device Image Build": "Buri/Hamachi Device Image",
"Hamachi Device Image Nightly": "Buri/Hamachi Device Image",
"Hamachi Device Image Nightly (Engineering)": "Buri/Hamachi Device Image",
"Buri Limited Memory Device Image Nightly": "Buri/Hamachi Device Image",
"Helix Device Image Build": "Helix Device Image",
"Helix Device Image Build (Engineering)": "Helix Device Image",
"Helix Device Image Nightly": "Helix Device Image",
@ -382,7 +410,13 @@ GROUP_NAMES = {
"Leo Device Image Nightly": "Leo Device Image",
"Leo Device Image Nightly (Engineering)": "Leo Device Image",
"Nexus 4 Device Image Build": "Nexus 4 Device Image",
"Nexus 4 Device Image Build (Engineering)": "Nexus 4 Device Image",
"Nexus 4 Device Image Nightly": "Nexus 4 Device Image",
"Nexus 4 Device Image Nightly (Engineering)": "Nexus 4 Device Image",
"Tarako Device Image Build": "Tarako Device Image",
"Tarako Device Image Build (Engineering)": "Tarako Device Image",
"Tarako Device Image Nightly": "Tarako Device Image",
"Tarako Device Image Nightly (Engineering)": "Tarako Device Image",
"Unagi Device Image Build": "Unagi Device Image",
"Unagi Device Image Build (Engineering)": "Unagi Device Image",
"Unagi Device Image Nightly": "Unagi Device Image",
@ -398,15 +432,24 @@ GROUP_NAMES = {
"Mochitest": "Mochitest",
"Mochitest WebGL": "Mochitest",
"Mochitest Browser Chrome": "Mochitest",
"Mochitest DevTools Browser Chrome": "Mochitest",
"Mochitest Metro Browser Chrome": "Mochitest",
"Mochitest Other": "Mochitest",
"Mochitest e10s": "Mochitest e10s",
"Mochitest e10s Browser Chrome": "Mochitest e10s",
"Mochitest e10s DevTools Browser Chrome": "Mochitest e10s",
"Mochitest e10s Other": "Mochitest e10s",
"Robocop": "Mochitest",
"Crashtest": "Reftest",
"Crashtest-IPC": "Reftest",
"Crashtest IPC": "Reftest",
"Reftest": "Reftest",
"Reftest Unaccelerated": "Reftest",
"Reftest-IPC": "Reftest",
"Reftest IPC": "Reftest",
"Reftest OMTC": "Reftest",
"JSReftest": "Reftest",
"Crashtest e10s": "Reftest e10s",
"JSReftest e10s": "Reftest e10s",
"Reftest e10s": "Reftest e10s",
"SpiderMonkey ARM Simulator Build": "SpiderMonkey",
"SpiderMonkey DTrace Build": "SpiderMonkey",
"SpiderMonkey Fail-On-Warnings Build": "SpiderMonkey",
@ -470,14 +513,18 @@ SYMBOLS = {
"L10n Nightly" : "N",
"L10n Repack": "L10n",
"B2G Emulator Image Build": "B",
"B2G Emulator Image Nightly": "N",
# // B2G device image builds (grouped by device in the UI)
"Flame Device Image": "Flame",
"Flame Device Image Build": "B",
"Flame Device Image Build (Engineering)": "Be",
"Flame Device Image Nightly": "N",
"Flame Device Image Nightly (Engineering)": "Ne",
"Buri/Hamachi Device Image": "Buri/Hamachi",
"Hamachi Device Image Build": "B",
"Hamachi Device Image Build (Engineering)": "Be",
"Buri Limited Memory Device Image Build": "Bm",
"Hamachi Device Image Nightly": "N",
"Hamachi Device Image Nightly (Engineering)": "Ne",
"Buri Limited Memory Device Image Nightly": "Nm",
"Helix Device Image": "Helix",
"Helix Device Image Build": "B",
"Helix Device Image Build (Engineering)": "Be",
@ -495,7 +542,14 @@ SYMBOLS = {
"Leo Device Image Nightly (Engineering)": "Ne",
"Nexus 4 Device Image": "Nexus 4",
"Nexus 4 Device Image Build": "B",
"Nexus 4 Device Image Build (Engineering)": "Be",
"Nexus 4 Device Image Nightly": "N",
"Nexus 4 Device Image Nightly (Engineering)": "Ne",
"Tarako Device Image": "Tarako",
"Tarako Device Image Build": "B",
"Tarako Device Image Build (Engineering)": "Be",
"Tarako Device Image Nightly": "N",
"Tarako Device Image Nightly (Engineering)": "Ne",
"Unagi Device Image": "Unagi",
"Unagi Device Image Build": "B",
"Unagi Device Image Build (Engineering)": "Be",
@ -518,15 +572,24 @@ SYMBOLS = {
#// run across all platforms and therefore benefit from better vertical alignment.
"Mochitest" : "M",
"Mochitest Browser Chrome" : "bc",
"Mochitest DevTools Browser Chrome" : "dt",
"Mochitest Metro Browser Chrome" : "mc",
"Mochitest Other" : "oth",
"Mochitest WebGL" : "gl",
"Mochitest e10s" : "M-e10s",
"Mochitest e10s Browser Chrome" : "bc",
"Mochitest e10s DevTools Browser Chrome" : "dt",
"Mochitest e10s Other" : "oth",
"Robocop" : "rc",
"Crashtest" : "C",
"Crashtest-IPC" : "Cipc",
"Crashtest e10s" : "C",
"Crashtest IPC" : "Cipc",
"JSReftest" : "J",
"JSReftest e10s" : "J",
"Reftest" : "R",
"Reftest-IPC" : "Ripc",
"Reftest e10s" : "R-e10s",
"Reftest IPC" : "Ripc",
"Reftest OMTC" : "Ro",
"Reftest Unaccelerated" : "Ru",
#// All other unit tests, sorted alphabetically by TBPL symbol.

Просмотреть файл

@ -2,6 +2,8 @@ import json
import MySQLdb
import time
from _mysql_exceptions import IntegrityError
from warnings import filterwarnings, resetwarnings
from django.conf import settings
@ -65,7 +67,8 @@ class JobsModel(TreeherderModelBase):
"id": "rs.id",
"revision_hash": "rs.revision_hash",
"revision": "revision.revision",
"author": "revision.author"
"author": "revision.author",
"push_timestamp": "rs.push_timestamp"
},
"bug_job_map": {
"job_id": "job_id",
@ -336,15 +339,19 @@ class JobsModel(TreeherderModelBase):
"""
Store a new relation between the given job and bug ids.
"""
self.get_jobs_dhub().execute(
proc='jobs.inserts.insert_bug_job_map',
placeholders=[
job_id,
bug_id,
assignment_type
],
debug_show=self.DEBUG
)
try:
self.get_jobs_dhub().execute(
proc='jobs.inserts.insert_bug_job_map',
placeholders=[
job_id,
bug_id,
assignment_type
],
debug_show=self.DEBUG
)
except IntegrityError as e:
raise JobDataIntegrityError(e)
def delete_bug_job_map(self, job_id, bug_id):
"""
@ -1636,10 +1643,21 @@ class JobsModel(TreeherderModelBase):
'revision_ids':revision_id_lookup
}
def get_revision_timestamp(self, rev):
"""Get the push timestamp of the resultset for a revision"""
return self.get_revision_resultset_lookup([rev])[rev][
"push_timestamp"
]
class JobDataError(ValueError):
pass
class JobDataIntegrityError(IntegrityError):
pass
class JobData(dict):
"""
Encapsulates data access from incoming test data structure.

Просмотреть файл

@ -269,6 +269,7 @@
jg.`symbol` as job_group_symbol,
jg.`description` as job_group_description,
j.`who`,
j.failure_classification_id,
j.`result_set_id`,
j.`result`,
j.`state`,

Просмотреть файл

@ -1,3 +1,4 @@
from treeherder.model.derived.jobs import JobDataIntegrityError
from rest_framework import viewsets
from rest_framework.response import Response
@ -18,10 +19,20 @@ class BugJobMapViewSet(viewsets.ViewSet):
job_id, bug_id = map(int, (request.DATA['job_id'],
request.DATA['bug_id']))
jm.insert_bug_job_map(job_id, bug_id,
request.DATA['type'])
try:
jm.insert_bug_job_map(job_id, bug_id,
request.DATA['type'])
except JobDataIntegrityError as e:
code, msg = e.args
if "Duplicate" in msg:
return Response(
{"message": "Bug job map skipped: {0}".format(msg)},
409
)
else:
raise e
return Response({"message": "Bug job map stored"})
return Response({"message": "Bug job map saved"})
@with_jobs
def destroy(self, request, project, jm, pk=None):

Просмотреть файл

@ -6,7 +6,8 @@ from rest_framework.decorators import link
from rest_framework.reverse import reverse
from treeherder.model.derived import DatasetNotFoundError
from treeherder.webapp.api.utils import (UrlQueryFilter, with_jobs,
oauth_required, get_option)
oauth_required, get_option,
to_timestamp)
class ResultSetViewSet(viewsets.ViewSet):
@ -23,7 +24,43 @@ class ResultSetViewSet(viewsets.ViewSet):
"""
filter = UrlQueryFilter(request.QUERY_PARAMS)
# make a mutable copy of these params
filter_params = request.QUERY_PARAMS.copy()
# This will contain some meta data about the request and results
meta = {}
# support ranges for date as well as revisions(changes) like old tbpl
for param in ["fromchange", "tochange", "startdate", "enddate"]:
v = filter_params.get(param, None)
if v:
del(filter_params[param])
meta[param] = v
# translate these params into our own filtering mechanism
if 'fromchange' in meta:
filter_params.update({
"push_timestamp__gte": jm.get_revision_timestamp(meta['fromchange'])
})
if 'tochange' in meta:
filter_params.update({
"push_timestamp__lte": jm.get_revision_timestamp(meta['tochange'])
})
if 'startdate' in meta:
filter_params.update({
"push_timestamp__gte": to_timestamp(meta['startdate'])
})
if 'enddate' in meta:
# add a day because we aren't supplying a time, just a date. So
# we're doing ``less than``, rather than ``less than or equal to``.
filter_params.update({
"push_timestamp__lt": to_timestamp(meta['enddate']) + 86400
})
meta['filter_params'] = filter_params
filter = UrlQueryFilter(filter_params)
offset_id = filter.pop("id__lt", 0)
count = filter.pop("count", 10)
@ -35,7 +72,15 @@ class ResultSetViewSet(viewsets.ViewSet):
full,
filter.conditions
)
return Response(self.get_resultsets_with_jobs(jm, objs, full, {}))
results = self.get_resultsets_with_jobs(jm, objs, full, {})
meta['count'] = len(results)
meta['repository'] = project
return Response({
'meta': meta,
'results': results
})
@with_jobs
def retrieve(self, request, project, jm, pk=None):

Просмотреть файл

@ -1,8 +1,9 @@
from collections import defaultdict
import time
import datetime
import simplejson as json
import oauth2 as oauth
from django.conf import settings
from rest_framework.response import Response
@ -220,3 +221,10 @@ def get_option(obj, option_collections):
else:
return None
def to_timestamp(datestr):
"""get a timestamp from a datestr like 2014-03-31"""
return time.mktime(datetime.datetime.strptime(
datestr,
"%Y-%m-%d"
).timetuple())