Remove update_perf
This commit is contained in:
Родитель
ec400a9dbf
Коммит
b46ab4235f
|
@ -1,39 +0,0 @@
|
|||
from datetime import date
|
||||
from itertools import groupby
|
||||
import logging
|
||||
|
||||
from django.db.models import Max
|
||||
|
||||
import cronjobs
|
||||
from celery.task.sets import TaskSet
|
||||
|
||||
from amo.utils import chunked
|
||||
from .models import Performance
|
||||
from . import tasks
|
||||
|
||||
|
||||
task_log = logging.getLogger('z.task')
|
||||
|
||||
|
||||
@cronjobs.register
|
||||
def update_perf():
|
||||
# The baseline is where addon_id is null. Find the latest test run so we
|
||||
# can update from all the latest perf results.
|
||||
last_update = (Performance.objects.filter(addon=None)
|
||||
.aggregate(max=Max('created'))['max'])
|
||||
if not last_update:
|
||||
task_log.error('update_perf aborted, no last_update')
|
||||
return
|
||||
|
||||
last_update = date(*last_update.timetuple()[:3])
|
||||
|
||||
qs = (Performance.objects.filter(created__gte=last_update)
|
||||
.values_list('addon', 'osversion', 'average'))
|
||||
results = [(addon, list(rows)) for addon, rows
|
||||
in groupby(sorted(qs), key=lambda x: x[0])]
|
||||
|
||||
baseline = dict((os, avg) for _, os, avg in qs.filter(addon=None))
|
||||
|
||||
ts = [tasks.update_perf.subtask(args=[baseline, chunk])
|
||||
for chunk in chunked(results, 25)]
|
||||
TaskSet(ts).apply_async()
|
|
@ -1,40 +0,0 @@
|
|||
import json
|
||||
import logging
|
||||
|
||||
import redisutils
|
||||
from celeryutils import task
|
||||
|
||||
from addons.models import Addon
|
||||
from .models import Performance
|
||||
|
||||
log = logging.getLogger('z.perf.task')
|
||||
|
||||
|
||||
@task(rate_limit='1/s')
|
||||
def update_perf(baseline, perf, **kw):
|
||||
log.info('[%s@%s] Updating perf' %
|
||||
(len(perf), update_perf.rate_limit))
|
||||
all_deltas = {}
|
||||
for addon, rows in perf:
|
||||
if addon is None:
|
||||
continue
|
||||
deltas = dict((os, (avg - baseline[os]) / float(baseline[os]) * 100)
|
||||
for _, os, avg in rows)
|
||||
if any(d < 0 for d in deltas.values()):
|
||||
slowness = None
|
||||
all_deltas[addon] = None
|
||||
else:
|
||||
slowness = int(sum(deltas.values()) / len(deltas))
|
||||
d = dict((k, int(v)) for k, v in deltas.items())
|
||||
# Include the average slowness as key 0.
|
||||
d[0] = slowness
|
||||
all_deltas[addon] = json.dumps(d, separators=(',', ':'))
|
||||
Addon.objects.filter(pk=addon).update(ts_slowness=slowness)
|
||||
|
||||
# Add all the calculated values to redis so we can show per-platform perf.
|
||||
redis = redisutils.connections['master']
|
||||
redis.hmset(Performance.ALL_PLATFORMS, all_deltas)
|
||||
|
||||
for key, val in all_deltas.items():
|
||||
if val is None:
|
||||
redis.hdel(Performance.ALL_PLATFORMS, key)
|
|
@ -6,57 +6,10 @@ from pyquery import PyQuery as pq
|
|||
|
||||
import amo.tests
|
||||
from amo.urlresolvers import reverse
|
||||
from perf.cron import update_perf
|
||||
from perf.models import Performance
|
||||
from addons.models import Addon
|
||||
|
||||
|
||||
class TestPerfIndex(amo.tests.TestCase):
|
||||
fixtures = ['base/apps', 'base/addon_3615', 'base/addon_5299_gcal',
|
||||
'perf/index']
|
||||
|
||||
def setUp(self):
|
||||
super(TestPerfIndex, self).setUp()
|
||||
update_perf()
|
||||
self.url = reverse('perf.index')
|
||||
self._perf_threshold = settings.PERF_THRESHOLD
|
||||
settings.PERF_THRESHOLD = 25
|
||||
|
||||
def tearDown(self):
|
||||
settings.PERF_THRESHOLD = self._perf_threshold
|
||||
|
||||
def test_get(self):
|
||||
# Are you there page?
|
||||
r = self.client.get(self.url)
|
||||
eq_(r.status_code, 200)
|
||||
addons = r.context['addons']
|
||||
eq_(len(addons), 2)
|
||||
qs = Performance.objects.filter(addon__isnull=False)
|
||||
eq_([a.id for a in addons],
|
||||
[p.addon_id for p in qs.order_by('-average')])
|
||||
|
||||
def test_threshold_filter(self):
|
||||
# Threshold is 25, so only 1 add-on will show up
|
||||
Addon.objects.get(pk=3615).update(ts_slowness=10)
|
||||
Addon.objects.get(pk=5299).update(ts_slowness=50)
|
||||
r = self.client.get(self.url)
|
||||
eq_(r.status_code, 200)
|
||||
addons = r.context['addons']
|
||||
eq_(len(addons), 1)
|
||||
|
||||
def test_empty_perf_table(self):
|
||||
Addon.objects.update(ts_slowness=None)
|
||||
r = self.client.get(self.url)
|
||||
eq_(r.status_code, 200)
|
||||
eq_(pq(r.content)('.no-results').length, 1)
|
||||
|
||||
@patch('perf.tasks.update_perf.subtask')
|
||||
def test_last_update_none(self, subtask):
|
||||
Performance.objects.all().delete()
|
||||
update_perf()
|
||||
assert not subtask.called
|
||||
|
||||
|
||||
class TestModels(amo.tests.TestCase):
|
||||
fixtures = ['base/apps', 'base/addon_3615', 'base/addon_5299_gcal',
|
||||
'perf/index']
|
||||
|
|
Загрузка…
Ссылка в новой задаче