Don't override objects on stats models

This commit is contained in:
Wil Clouser 2010-05-12 19:10:52 -07:00
Родитель 589cea56da
Коммит 8e4e52e46c
6 изменённых файлов: 41 добавлений и 34 удалений

Просмотреть файл

@ -82,17 +82,17 @@ def attach_stats(request, addons, date_):
date_1w, date_2w = date_ - td(days=7), date_ - td(days=14)
# Gather download stats.
q = (DownloadCount.objects.filter(addon__in=ids).values('addon')
q = (DownloadCount.stats.filter(addon__in=ids).values('addon')
.annotate(Sum('count')))
downloads = gather_stats(q, 'addon', 'count__sum', date_)
# Gather active daily user stats.
q = (UpdateCount.objects.filter(addon__in=ids).values('addon')
q = (UpdateCount.stats.filter(addon__in=ids).values('addon')
.annotate(Avg('count')))
adus = gather_stats(q, 'addon', 'count__avg', date_)
# Download data for sparklines.
q = (DownloadCount.objects.filter(addon__in=ids, date__gte=date_2w)
q = (DownloadCount.stats.filter(addon__in=ids, date__gte=date_2w)
.order_by('addon', 'date').values_list('addon', 'count'))
sparks = collections.defaultdict(list)
for addon_id, count in q:

Просмотреть файл

@ -30,7 +30,8 @@ class CollectionCount(caching.base.CachingMixin, models.Model):
count = models.PositiveIntegerField()
date = models.DateField()
objects = StatsManager('date')
objects = models.Manager()
stats = StatsManager('date')
class Meta:
db_table = 'stats_collections_counts'
@ -55,7 +56,8 @@ class DownloadCount(caching.base.CachingMixin, models.Model):
# Leave this out of queries if you can.
sources = StatsDictField(db_column='src', null=True)
objects = StatsManager('date')
objects = models.Manager()
stats = StatsManager('date')
class Meta:
db_table = 'download_counts'
@ -73,7 +75,8 @@ class UpdateCount(caching.base.CachingMixin, models.Model):
oses = StatsDictField(db_column='os', null=True)
locales = StatsDictField(db_column='locale', null=True)
objects = StatsManager('date')
objects = models.Manager()
stats = StatsManager('date')
class Meta:
db_table = 'update_counts'
@ -85,7 +88,8 @@ class ShareCount(caching.base.CachingMixin, models.Model):
service = models.CharField(max_length=255, null=True)
date = models.DateField()
objects = StatsManager('date')
objects = models.Manager()
stats = StatsManager('date')
class Meta:
db_table = 'stats_share_counts'
@ -96,7 +100,8 @@ class ShareCountTotal(caching.base.CachingMixin, models.Model):
count = models.PositiveIntegerField()
service = models.CharField(max_length=255, null=True)
objects = caching.base.CachingManager()
objects = models.Manager()
stats = caching.base.CachingManager()
class Meta:
db_table = 'stats_share_counts_totals'
@ -127,7 +132,8 @@ class Contribution(caching.base.CachingMixin, models.Model):
transaction_id = models.CharField(max_length=255, null=True)
post_data = StatsDictField(null=True)
objects = StatsManager('created')
objects = models.Manager()
stats = StatsManager('created')
class Meta:
db_table = 'stats_contributions'
@ -242,7 +248,8 @@ class GlobalStat(caching.base.CachingMixin, models.Model):
count = models.IntegerField()
date = models.DateField()
objects = caching.base.CachingManager()
objects = models.Manager()
stats = caching.base.CachingManager()
class Meta:
db_table = 'global_stats'

Просмотреть файл

@ -92,40 +92,40 @@ class TestDbAggregates(test.TestCase):
fixtures = ['stats/test_models.json']
def test_count(self):
qs = DownloadCount.objects.filter(date__range=(
qs = DownloadCount.stats.filter(date__range=(
date(2009, 6, 1), date(2009, 6, 30)))
s = qs.summary(my_count=Count('count'))
eq_(s['my_count'], 5, 'unexpected aggregate count')
eq_(s['my_count'], s['row_count'], 'count and row_count differ')
def test_sum(self):
qs = DownloadCount.objects.filter(date__range=(
qs = DownloadCount.stats.filter(date__range=(
date(2009, 6, 1), date(2009, 6, 30)))
s = qs.summary(count_sum=Sum('count'), source_sum=Sum('sources'))
eq_(s['count_sum'], 50, 'unexpected aggregate count sum')
eq_(s['source_sum']['search'], 15, 'unexpected aggregate sources sum')
def test_first(self):
qs = DownloadCount.objects.filter(date__range=(
qs = DownloadCount.stats.filter(date__range=(
date(2009, 6, 1), date(2009, 6, 30)))
s = qs.summary(first_date=First('date'))
eq_(s['first_date'], date(2009, 6, 28),
'unexpected aggregate first date')
def test_last(self):
qs = DownloadCount.objects.filter(date__range=(
qs = DownloadCount.stats.filter(date__range=(
date(2009, 6, 1), date(2009, 6, 30)))
s = qs.summary(last_date=Last('date'))
eq_(s['last_date'], date(2009, 6, 1), 'unexpected aggregate last date')
def test_avg(self):
qs = DownloadCount.objects.filter(date__range=(
qs = DownloadCount.stats.filter(date__range=(
date(2009, 6, 1), date(2009, 6, 30)))
s = qs.summary(my_avg=Avg('count'))
eq_(s['my_avg'], Decimal('10.0'), 'unexpected aggregate avg value')
def test_dayavg(self):
qs = DownloadCount.objects.filter(date__range=(
qs = DownloadCount.stats.filter(date__range=(
date(2009, 6, 1), date(2009, 6, 30)))
s = qs.summary(my_avg=DayAvg('count'))
eq_(s['my_avg'].quantize(Decimal('0.1')), Decimal('1.8'), # 50 / 28days
@ -136,7 +136,7 @@ class TestDbSummaries(test.TestCase):
fixtures = ['stats/test_models.json']
def test_period_summary(self):
qs = DownloadCount.objects.filter(addon=4,
qs = DownloadCount.stats.filter(addon=4,
date__range=(date(2009, 6, 1), date(2009, 7, 3)))
s = list(qs.period_summary('day', fill_holes=True))

Просмотреть файл

@ -22,7 +22,7 @@ class TestUnknownGen(test.TestCase):
to this existing dictionary key (which would result in a
TypeError).
"""
qs = UpdateCount.objects.filter(pk=3)
qs = UpdateCount.stats.filter(pk=3)
fields = [('date', 'start'), ('count', DayAvg('count')),
('applications', DayAvg('applications'))]
stats = qs.daily_summary(**dict(fields))

Просмотреть файл

@ -13,14 +13,14 @@ class TestDownloadCountModel(test.TestCase):
fixtures = ['stats/test_models.json']
def test_sources(self):
dc = DownloadCount.objects.get(id=1)
dc = DownloadCount.stats.get(id=1)
assert isinstance(dc.sources, StatsDict), 'sources is not a StatsDict'
assert len(dc.sources) > 0, 'sources is empty'
def test_summary(self):
# somewhat contrived, but a good test: summarize the entire dataset
summary = DownloadCount.objects.all().summary(
summary = DownloadCount.stats.all().summary(
count_sum='count', sources_sum='sources')
eq_(len(summary), 5, 'unexpected number of keys in summary')
@ -33,7 +33,7 @@ class TestDownloadCountModel(test.TestCase):
'zero sources in summary'
def test_remap_special_fields(self):
qs = DownloadCount.objects.filter(pk=1)
qs = DownloadCount.stats.filter(pk=1)
days = list(qs.daily_summary(date='start', rows='row_count',
start='count'))
@ -46,7 +46,7 @@ class TestDownloadCountModel(test.TestCase):
eq_(days[0]['start'], 10, 'unexpected start value')
def test_weekly_summary(self):
qs = DownloadCount.objects.filter(addon=4,
qs = DownloadCount.stats.filter(addon=4,
date__range=(date(2009, 6, 1), date(2009, 7, 3)))
weeks = list(qs.weekly_summary('count', 'sources'))
@ -61,7 +61,7 @@ class TestDownloadCountModel(test.TestCase):
'unexpected sources total in week 5')
def test_monthly_summary(self):
qs = DownloadCount.objects.filter(addon=4,
qs = DownloadCount.stats.filter(addon=4,
date__range=(date(2009, 6, 1), date(2009, 9, 30)))
months = list(qs.monthly_summary('count', 'sources'))
@ -76,7 +76,7 @@ class TestDownloadCountModel(test.TestCase):
'unexpected sources total in month 4')
def test_daily_fill_holes(self):
qs = DownloadCount.objects.filter(addon=4,
qs = DownloadCount.stats.filter(addon=4,
date__range=(date(2009, 6, 1), date(2009, 6, 7)))
days = list(qs.daily_summary('count', 'sources', fill_holes=True))
@ -94,7 +94,7 @@ class TestUpdateCountModel(test.TestCase):
test_ver = '3.0.9'
def test_serial_types(self):
uc = UpdateCount.objects.get(id=1)
uc = UpdateCount.stats.get(id=1)
assert isinstance(uc.versions, StatsDict), 'versions not a StatsDict'
assert isinstance(uc.statuses, StatsDict), 'statuses not a StatsDict'
@ -105,7 +105,7 @@ class TestUpdateCountModel(test.TestCase):
assert len(uc.statuses) > 0, 'statuses is empty'
def test_applications(self):
uc = UpdateCount.objects.get(id=1)
uc = UpdateCount.stats.get(id=1)
assert isinstance(uc.applications[self.test_app], dict), \
'applications item is not a dict'
@ -113,7 +113,7 @@ class TestUpdateCountModel(test.TestCase):
'unexpected count for app version'
def test_applications_summary(self):
qs = UpdateCount.objects.filter(addon=4,
qs = UpdateCount.stats.filter(addon=4,
date__range=(date(2009, 6, 1), date(2009, 6, 2)))
summary = qs.summary(apps='applications')
@ -127,7 +127,7 @@ class TestContributionModel(test.TestCase):
fixtures = ['stats/test_models.json']
def test_basic(self):
c = Contribution.objects.get(id=1)
c = Contribution.stats.get(id=1)
eq_(c.amount, Decimal('1.99'), 'unexpected amount')
assert isinstance(c.post_data, StatsDict), \
@ -135,7 +135,7 @@ class TestContributionModel(test.TestCase):
eq_(c.email, 'nobody@mozilla.com', 'unexpected payer_email')
def test_daily_summary(self):
qs = Contribution.objects.filter(addon=4, transaction_id__isnull=False,
qs = Contribution.stats.filter(addon=4, transaction_id__isnull=False,
created__range=(date(2009, 6, 2), date(2009, 6, 3)))
days = list(qs.daily_summary('amount'))

Просмотреть файл

@ -33,7 +33,7 @@ def downloads_series(request, addon_id, group, start, end, format):
# resultkey to fieldname map - stored as a list to maintain order for csv
fields = [('date', 'start'), ('count', 'count')]
qs = DownloadCount.objects.filter(addon=addon_id,
qs = DownloadCount.stats.filter(addon=addon_id,
date__range=(start_date, end_date))
gen = qs.period_summary(group, **dict(fields))
@ -52,7 +52,7 @@ def usage_series(request, addon_id, group, start, end, format):
# resultkey to fieldname map - stored as a list to maintain order for csv
fields = [('date', 'start'), ('count', DayAvg('count'))]
qs = UpdateCount.objects.filter(addon=addon_id,
qs = UpdateCount.stats.filter(addon=addon_id,
date__range=(start_date, end_date))
gen = qs.period_summary(group, **dict(fields))
@ -118,7 +118,7 @@ def sources_series(request, addon_id, group, start, end, format):
# resultkey to fieldname map - stored as a list to maintain order for csv
fields = [('date', 'start'), ('count', 'count'), ('sources', 'sources')]
qs = DownloadCount.objects.filter(addon=addon_id,
qs = DownloadCount.stats.filter(addon=addon_id,
date__range=(start_date, end_date))
gen = qs.period_summary(group, **dict(fields))
@ -140,7 +140,7 @@ def usage_breakdown_series(request, addon_id, group,
# Use DayAvg so days with 0 rows affect the calculation.
fields = [('date', 'start'), ('count', DayAvg('count')),
(field, DayAvg(field))]
qs = UpdateCount.objects.filter(addon=addon_id,
qs = UpdateCount.stats.filter(addon=addon_id,
date__range=(start_date, end_date))
gen = qs.period_summary(group, **dict(fields))
@ -206,7 +206,7 @@ def addon_contributions_queryset(addon, start_date, end_date):
end_date = datetime(end_date.year, end_date.month,
end_date.day, 23, 59, 59)
return Contribution.objects.filter(addon=addon,
return Contribution.stats.filter(addon=addon,
transaction_id__isnull=False,
amount__gt=0,
created__range=(start_date, end_date))