Removed old mkt stats (bug 934611)
This commit is contained in:
Родитель
6b413b7225
Коммит
fd81633d8c
|
@ -841,9 +841,7 @@ class ESTestCase(TestCase):
|
|||
addons.search.setup_mapping()
|
||||
stats.search.setup_indexes()
|
||||
if settings.MARKETPLACE:
|
||||
import mkt.stats.search
|
||||
WebappIndexer.setup_mapping()
|
||||
mkt.stats.search.setup_mkt_indexes()
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
|
|
|
@ -24,7 +24,7 @@ from versions.models import Version
|
|||
|
||||
from mkt.constants.regions import REGIONS_CHOICES_SLUG
|
||||
from mkt.monolith.models import MonolithRecord
|
||||
from mkt.webapps.models import Installed, Webapp
|
||||
from mkt.webapps.models import Webapp
|
||||
|
||||
from . import search
|
||||
from .models import (AddonCollectionCount, CollectionCount, CollectionStats,
|
||||
|
@ -227,36 +227,6 @@ def _get_daily_jobs(date=None):
|
|||
'collection_addon_downloads': (lambda:
|
||||
AddonCollectionCount.objects.filter(date__lte=date).aggregate(
|
||||
sum=Sum('count'))['sum']),
|
||||
|
||||
# Marketplace stats
|
||||
# TODO: Remove 'apps_count_new' once we fully migrate to the new
|
||||
# 'apps_added_*' stats.
|
||||
'apps_count_new': (Addon.objects
|
||||
.filter(created__range=(date, next_date),
|
||||
type=amo.ADDON_WEBAPP).count),
|
||||
# Temporarily pull app install counts from the Monolith tables since
|
||||
# that's where they are stored. This will go away once Monolith takes
|
||||
# over this chart. (See bug 910364)
|
||||
'apps_count_installed': (
|
||||
MonolithRecord.objects.filter(recorded__range=(date, next_date),
|
||||
key='install').count),
|
||||
|
||||
# Marketplace reviews
|
||||
'apps_review_count_new': Review.objects
|
||||
.filter(created__range=(date, next_date),
|
||||
editorreview=0, addon__type=amo.ADDON_WEBAPP).count,
|
||||
|
||||
# New users
|
||||
'mmo_user_count_total': UserProfile.objects.filter(
|
||||
created__lt=next_date,
|
||||
source=amo.LOGIN_SOURCE_MMO_BROWSERID).count,
|
||||
'mmo_user_count_new': UserProfile.objects.filter(
|
||||
created__range=(date, next_date),
|
||||
source=amo.LOGIN_SOURCE_MMO_BROWSERID).count,
|
||||
|
||||
# New developers
|
||||
'mmo_developer_count_total': AddonUser.objects.filter(
|
||||
addon__type=amo.ADDON_WEBAPP).values('user').distinct().count,
|
||||
}
|
||||
|
||||
# If we're processing today's stats, we'll do some extras. We don't do
|
||||
|
@ -484,11 +454,6 @@ def _get_monolith_jobs(date=None):
|
|||
'count': Webapp.objects.filter(
|
||||
created__range=(date, next_date)).count,
|
||||
}],
|
||||
'apps_count_installed': [{
|
||||
'count': Installed.objects.filter(
|
||||
created__range=(date, next_date),
|
||||
addon__type=amo.ADDON_WEBAPP).count,
|
||||
}],
|
||||
}
|
||||
|
||||
# Add various "Apps Added" for all the dimensions we need.
|
||||
|
|
|
@ -8,9 +8,8 @@ from nose.tools import eq_
|
|||
|
||||
import amo.tests
|
||||
from addons.models import Addon, AddonUser
|
||||
from bandwagon.models import CollectionAddon, Collection
|
||||
from bandwagon.models import Collection, CollectionAddon
|
||||
from mkt.constants.regions import REGIONS_CHOICES_SLUG
|
||||
from mkt.webapps.models import Installed
|
||||
from reviews.models import Review
|
||||
from stats import cron, tasks
|
||||
from stats.models import (AddonCollectionCount, Contribution, DownloadCount,
|
||||
|
@ -299,13 +298,6 @@ class TestMonolithStats(amo.tests.TestCase):
|
|||
'Incorrect count for region %s, premium type %s. '
|
||||
'Got %d, expected %d.' % (r, p, count, expected_count))
|
||||
|
||||
def test_apps_installed(self):
|
||||
addon = Addon.objects.create(type=amo.ADDON_WEBAPP)
|
||||
user = UserProfile.objects.create(username='foo')
|
||||
Installed.objects.create(addon=addon, user=user)
|
||||
eq_(tasks._get_monolith_jobs()['apps_count_installed'][0]['count'](),
|
||||
1)
|
||||
|
||||
def test_app_reviews(self):
|
||||
addon = Addon.objects.create(type=amo.ADDON_WEBAPP)
|
||||
user = UserProfile.objects.create(username='foo')
|
||||
|
|
|
@ -37,16 +37,6 @@ for key in keys:
|
|||
urls.append(url('^%s/$' % key, views.site_stats_report,
|
||||
name='stats.%s' % key, kwargs={'report': key}))
|
||||
|
||||
# These are the URLs that return JSON back to the front end and actually
|
||||
# do the SQL query. These means that Marketplace is using the same backend as
|
||||
# AMO to actually produce the statistics.
|
||||
keys += ['apps_count_new', 'apps_count_installed', 'apps_review_count_new',
|
||||
'mmo_user_count_new', 'mmo_user_count_total', 'mmo_total_visitors',
|
||||
'my_apps']
|
||||
for key in keys:
|
||||
urls.append(url(global_series[key], views.site_series,
|
||||
kwargs={'field': key}))
|
||||
|
||||
urlpatterns += patterns('', *urls)
|
||||
|
||||
collection_stats_urls = patterns('',
|
||||
|
|
|
@ -1,55 +1,51 @@
|
|||
import csv
|
||||
import cStringIO
|
||||
import csv
|
||||
import itertools
|
||||
import logging
|
||||
import time
|
||||
from types import GeneratorType
|
||||
from datetime import date, timedelta
|
||||
from types import GeneratorType
|
||||
|
||||
from django import http
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import PermissionDenied
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db import connection
|
||||
from django.db.models import Avg, Count, Sum, Q
|
||||
from django.db.models import Avg, Count, Q, Sum
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import simplejson
|
||||
from django.utils.cache import add_never_cache_headers, patch_cache_control
|
||||
from django.utils.datastructures import SortedDict
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.core.exceptions import PermissionDenied
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
||||
import jingo
|
||||
from product_details import product_details
|
||||
import waffle
|
||||
|
||||
import amo
|
||||
from access import acl
|
||||
from addons.decorators import addon_view, addon_view_factory
|
||||
from addons.models import Addon
|
||||
from bandwagon.models import Collection
|
||||
from bandwagon.views import get_collection
|
||||
from lib.metrics import get_monolith_client
|
||||
from zadmin.models import SiteEvent
|
||||
|
||||
import amo
|
||||
from amo.decorators import allow_cross_site_request, json_view, login_required
|
||||
from amo.urlresolvers import reverse
|
||||
from amo.utils import memoize
|
||||
from bandwagon.models import Collection
|
||||
from bandwagon.views import get_collection
|
||||
from zadmin.models import SiteEvent
|
||||
|
||||
from .models import (CollectionCount, Contribution, DownloadCount,
|
||||
ThemeUserCount, UpdateCount)
|
||||
|
||||
|
||||
logger = logging.getLogger('z.apps.stats.views')
|
||||
|
||||
|
||||
SERIES_GROUPS = ('day', 'week', 'month')
|
||||
SERIES_GROUPS_DATE = ('date', 'week', 'month') # Backwards compat.
|
||||
SERIES_FORMATS = ('json', 'csv')
|
||||
SERIES = ('downloads', 'usage', 'contributions', 'overview',
|
||||
'sources', 'os', 'locales', 'statuses', 'versions', 'apps')
|
||||
SERIES = ('downloads', 'usage', 'contributions', 'overview', 'sources', 'os',
|
||||
'locales', 'statuses', 'versions', 'apps')
|
||||
COLLECTION_SERIES = ('downloads', 'subscribers', 'ratings')
|
||||
GLOBAL_SERIES = ('addons_in_use', 'addons_updated', 'addons_downloaded',
|
||||
'apps_count_installed', 'apps_count_new',
|
||||
'apps_review_count_new', 'collections_created',
|
||||
'mmo_user_count_total', 'mmo_user_count_new',
|
||||
'mmo_total_visitors', 'reviews_created', 'addons_created',
|
||||
'collections_created', 'reviews_created', 'addons_created',
|
||||
'users_created', 'my_apps')
|
||||
|
||||
|
||||
|
@ -474,12 +470,6 @@ _KEYS = {
|
|||
'addon_downloads_new': 'addons_downloaded',
|
||||
'addon_total_updatepings': 'addons_in_use',
|
||||
'addon_count_new': 'addons_created',
|
||||
'apps_count_new': 'apps_count_new',
|
||||
'apps_count_installed': 'apps_count_installed',
|
||||
'apps_review_count_new': 'apps_review_count_new',
|
||||
'mmo_user_count_new': 'mmo_user_count_new',
|
||||
'mmo_user_count_total': 'mmo_user_count_total',
|
||||
'webtrends_DailyVisitors': 'mmo_total_visitors',
|
||||
'version_count_new': 'addons_updated',
|
||||
'user_count_new': 'users_created',
|
||||
'review_count_new': 'reviews_created',
|
||||
|
@ -489,48 +479,10 @@ _KEYS = {
|
|||
_CACHED_KEYS = sorted(_KEYS.values())
|
||||
|
||||
|
||||
def _monolith_site_query(period, start, end, field):
|
||||
fields = {'mmo_total_visitors': 'visits',
|
||||
'apps_count_installed': 'app_installs',
|
||||
'apps_review_count_new': 'review_count',
|
||||
'mmo_user_count_new': 'user_count',
|
||||
'apps_count_new': 'app_count',
|
||||
'mmo_user_count_total': 'total_user_count'}
|
||||
|
||||
# Getting data from the monolith server.
|
||||
client = get_monolith_client()
|
||||
|
||||
if period == 'date':
|
||||
period = 'day'
|
||||
|
||||
# The start date is not included in the range.
|
||||
# The end date is included.
|
||||
start = start + timedelta(days=1)
|
||||
|
||||
def _get_data():
|
||||
for result in client(fields[field], start, end, interval=period,
|
||||
strict_range=False):
|
||||
yield {'date': result['date'].strftime('%Y-%m-%d'),
|
||||
'data': {field: result['count']}}
|
||||
|
||||
try:
|
||||
return list(_get_data()), _CACHED_KEYS
|
||||
except ValueError, e:
|
||||
if len(e.args) > 0:
|
||||
logger.error(e.args[0])
|
||||
return [], _CACHED_KEYS
|
||||
|
||||
|
||||
# XXX deactivated until we're happy with monolith
|
||||
#@memoize(prefix='global_stats', time=60 * 60)
|
||||
#
|
||||
@memoize(prefix='global_stats', time=60 * 60)
|
||||
def _site_query(period, start, end, field=None, request=None):
|
||||
old_version = request and request.GET.get('old_version', '0') or '0'
|
||||
|
||||
if waffle.switch_is_active('monolith-stats') and old_version == '0':
|
||||
res = _monolith_site_query(period, start, end, field)
|
||||
return res
|
||||
|
||||
cursor = connection.cursor()
|
||||
# Let MySQL make this fast. Make sure we prevent SQL injection with the
|
||||
# assert.
|
||||
|
|
|
@ -91,11 +91,6 @@ maintained incrementally through post_save and post_delete hooks::
|
|||
|
||||
./manage.py index_stats # Index all the update and download counts.
|
||||
|
||||
./manage.py index_mkt_stats # Index contributions/installs/inapp-payments.
|
||||
|
||||
./manage.py index_stats/index_mkt_stats --addons 12345 1234 # Index
|
||||
specific addons/webapps.
|
||||
|
||||
./manage.py cron reindex_collections # Index all the collections.
|
||||
|
||||
./manage.py cron reindex_users # Index all the users.
|
||||
|
|
|
@ -1,553 +0,0 @@
|
|||
@import 'lib';
|
||||
|
||||
h1.addon {
|
||||
display: inline;
|
||||
}
|
||||
|
||||
.statistics {
|
||||
.island {
|
||||
background: none;
|
||||
border: none;
|
||||
box-shadow: none;
|
||||
float: none;
|
||||
}
|
||||
hgroup {
|
||||
clear: both;
|
||||
}
|
||||
.ui-helper-hidden-accessible {
|
||||
display: none !important;
|
||||
}
|
||||
}
|
||||
.html-rtl.statistics #page .header-search {
|
||||
right: auto;
|
||||
left: 20px;
|
||||
}
|
||||
|
||||
.primary.statistics {
|
||||
margin-left: 210px;
|
||||
}
|
||||
.secondary {
|
||||
float: left;
|
||||
width: 192px;
|
||||
|
||||
a {
|
||||
color: $link;
|
||||
}
|
||||
|
||||
ul, ol {
|
||||
padding: 0;
|
||||
list-style: none;
|
||||
margin-top: 3px;
|
||||
}
|
||||
ul, ol, section {
|
||||
color: $medium-gray;
|
||||
font-size: 13px;
|
||||
line-height: 20px;
|
||||
margin-bottom: 30px;
|
||||
}
|
||||
ul ul li a {
|
||||
padding-left: 1em;
|
||||
}
|
||||
li {
|
||||
border: 1px solid $border-black;
|
||||
border-width: 0 0 1px;
|
||||
position: relative;
|
||||
|
||||
&:last-child {
|
||||
border-width: 0;
|
||||
}
|
||||
a {
|
||||
&.selected {
|
||||
color: $dark-gray;
|
||||
font-weight: bold;
|
||||
}
|
||||
&.selected, &:hover {
|
||||
background: $faint-blue;
|
||||
}
|
||||
}
|
||||
}
|
||||
li.secondary-nav a {
|
||||
padding-left: 18px;
|
||||
}
|
||||
li a {
|
||||
display: block;
|
||||
padding: 6px;
|
||||
text-decoration: none;
|
||||
}
|
||||
section section {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.island {
|
||||
float: none;
|
||||
}
|
||||
> aside, > div {
|
||||
margin-bottom: 2em;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Rules for date criteria selection
|
||||
**/
|
||||
|
||||
.island.criteria {
|
||||
z-index: 1000;
|
||||
float: right;
|
||||
text-transform: lowercase;
|
||||
|
||||
ul {
|
||||
line-height: 2.5em;
|
||||
}
|
||||
li {
|
||||
color: $dark-gray;
|
||||
font-weight: bold;
|
||||
display: inline;
|
||||
|
||||
a {
|
||||
font-weight: normal;
|
||||
border: 1px solid transparent;
|
||||
color: $link;
|
||||
padding: 4px 8px;
|
||||
text-decoration: none;
|
||||
white-space: nowrap;
|
||||
|
||||
&:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
}
|
||||
&.selected a {
|
||||
font-weight: bold;
|
||||
color: $orange;
|
||||
}
|
||||
a.inactive {
|
||||
color: $light-gray;
|
||||
cursor: default;
|
||||
|
||||
&:hover {
|
||||
text-decoration: none;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.html-rtl .island.criteria {
|
||||
float: left;
|
||||
margin: 0 1em 0 0;
|
||||
padding: 0 12px 0 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Three-up stats
|
||||
**/
|
||||
|
||||
.two-up {
|
||||
position: relative;
|
||||
overflow: hidden;
|
||||
|
||||
div {
|
||||
float: left;
|
||||
text-align: left;
|
||||
width: 50%;
|
||||
position: relative;
|
||||
padding-left: 2em;
|
||||
color: $dark-gray;
|
||||
|
||||
&:first-child {
|
||||
padding-left: 1em;
|
||||
}
|
||||
&:first-child:after {
|
||||
content: '';
|
||||
display: block;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
right: 0;
|
||||
border-left: 1px dotted $medium-gray;
|
||||
height: 100%;
|
||||
}
|
||||
b {
|
||||
font-size: 150%;
|
||||
}
|
||||
a {
|
||||
line-height: 2rem;
|
||||
display: block;
|
||||
font-weight: bold;
|
||||
font-size: 140%;
|
||||
}
|
||||
small {
|
||||
line-height: 2rem;
|
||||
font-size: 110%;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* bar-chart tables
|
||||
**/
|
||||
|
||||
.csv-table {
|
||||
.table-box {
|
||||
width: 100%;
|
||||
overflow: auto;
|
||||
}
|
||||
table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
table-layout: fixed;
|
||||
}
|
||||
thead th {
|
||||
white-space: normal;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
th, td {
|
||||
width: 100px;
|
||||
padding: .5em 1em;
|
||||
text-align: right;
|
||||
|
||||
&:first-child {
|
||||
text-align: left;
|
||||
width: 200px;
|
||||
}
|
||||
}
|
||||
th {
|
||||
font-weight: bold;
|
||||
}
|
||||
tbody {
|
||||
display: none;
|
||||
|
||||
tr {
|
||||
&:nth-child(2n+1) {
|
||||
background: rgba($medium-gray,.2);
|
||||
}
|
||||
}
|
||||
}
|
||||
.paginator {
|
||||
float: none;
|
||||
|
||||
.rel {
|
||||
margin-top: 20px;
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* sidestats
|
||||
**/
|
||||
|
||||
aside.highlight {
|
||||
display: block;
|
||||
}
|
||||
|
||||
#stats-note {
|
||||
dt {
|
||||
margin-top: 1em;
|
||||
}
|
||||
dd {
|
||||
font-size: .8em;
|
||||
}
|
||||
code {
|
||||
font-family: $open-stack;
|
||||
background: $light-gray;
|
||||
color: $dark-gray;
|
||||
font-size: .9em;
|
||||
padding: 2px 4px;
|
||||
}
|
||||
}
|
||||
|
||||
.export-data {
|
||||
font: 13px "helvetica neue", arial, helvetica, sans-serif;
|
||||
margin-left: 1em;
|
||||
|
||||
a {
|
||||
margin: 0 2px;
|
||||
|
||||
&:first-child {
|
||||
margin-left: 4px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.stats-export .export-data {
|
||||
margin: 0;
|
||||
|
||||
a {
|
||||
color: $link;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Big table
|
||||
**/
|
||||
|
||||
table.stats-aggregate {
|
||||
width: 100%;
|
||||
margin-top: .5em;
|
||||
}
|
||||
|
||||
table.stats-aggregate thead th {
|
||||
text-align: right;
|
||||
padding-right: 72px;
|
||||
line-height: 90%;
|
||||
}
|
||||
|
||||
table.stats-aggregate thead th:first-child {
|
||||
text-align: left;
|
||||
padding-right: inherit;
|
||||
}
|
||||
|
||||
table.stats-aggregate tbody td {
|
||||
line-height: 120%;
|
||||
font-size: 140%;
|
||||
}
|
||||
|
||||
table.stats-aggregate tbody td.value {
|
||||
padding: 5px 0 3 0px;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
table.stats-aggregate tbody td.label {
|
||||
padding-left: 2px;
|
||||
}
|
||||
|
||||
table.stats-aggregate tbody span.change {
|
||||
display: block;
|
||||
float: right;
|
||||
width: 54px;
|
||||
padding-left: .5em;
|
||||
text-align: left;
|
||||
font-size: 80%;
|
||||
padding-top: 2px;
|
||||
}
|
||||
|
||||
table.stats-aggregate tbody span.change.plus {
|
||||
color: #00774d;
|
||||
}
|
||||
|
||||
table.stats-aggregate tbody span.change.minus {
|
||||
color: #850000;
|
||||
}
|
||||
|
||||
#stats {
|
||||
.loading, .loaded {
|
||||
position:relative;
|
||||
}
|
||||
.loading:after {
|
||||
-moz-transition: opacity .5s;
|
||||
content: "\00a0";
|
||||
display: block;
|
||||
position: absolute;
|
||||
background: #040204 url("../../img/zamboni/loading.gif") no-repeat center center;
|
||||
opacity: .4;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
z-index: 1000;
|
||||
top: 0;
|
||||
left: 0;
|
||||
}
|
||||
.loaded:after {
|
||||
opacity: 0;
|
||||
pointer-events: none;
|
||||
}
|
||||
footer {
|
||||
padding-top: 0;
|
||||
border-top: 1px solid $medium-gray;
|
||||
}
|
||||
}
|
||||
|
||||
.chart {
|
||||
position: relative;
|
||||
padding: 0;
|
||||
width: 100%;
|
||||
overflow: hidden;
|
||||
border: 2px solid $light-gray;
|
||||
border-radius: 8px;
|
||||
}
|
||||
#head-chart {
|
||||
position: relative;
|
||||
background: #fff;
|
||||
overflow: hidden;
|
||||
height: 384px;
|
||||
}
|
||||
|
||||
.no-data-overlay {
|
||||
display: none;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
color: #fff;
|
||||
|
||||
p {
|
||||
text-align: center;
|
||||
position: relative;
|
||||
top: 35%;
|
||||
color: $light-gray;
|
||||
font-size: 2em;
|
||||
}
|
||||
}
|
||||
.nodata .no-data-overlay {
|
||||
display: block;
|
||||
}
|
||||
|
||||
.loadmessage {
|
||||
position: fixed;
|
||||
left: 0;
|
||||
right: 0;
|
||||
top: -2.5em;
|
||||
height: 2em;
|
||||
margin: 0 auto;
|
||||
text-align: center;
|
||||
pointer-events: none;
|
||||
-moz-transition: top .5s;
|
||||
-webkit-transition: top .5s;
|
||||
transition: top .5s;
|
||||
z-index:9000;
|
||||
}
|
||||
|
||||
.loadmessage span {
|
||||
background: url("../../img/zamboni/loading-small.gif") no-repeat 1em center;
|
||||
background-color: #000;
|
||||
padding: .5em 1em;
|
||||
padding-left: 36px;
|
||||
line-height: 2em;
|
||||
color: #fff;
|
||||
opacity: .75;
|
||||
border-radius: 0 0 .75em .75em;
|
||||
}
|
||||
|
||||
.loadmessage.on {
|
||||
top: 0;
|
||||
}
|
||||
|
||||
.loadmessage.off {
|
||||
top: -2.5em;
|
||||
}
|
||||
|
||||
/* Field Menu */
|
||||
#fieldMenu, #fieldList {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
#fieldList label {
|
||||
display: block;
|
||||
white-space: nowrap;
|
||||
padding-right: 2em;
|
||||
}
|
||||
#fieldList label:hover {
|
||||
background: #ccf;
|
||||
}
|
||||
#fieldMenu button {
|
||||
width: 100%;
|
||||
}
|
||||
#fieldList {
|
||||
max-height: 300px;
|
||||
min-width: 160px;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
#custom-criteria {
|
||||
form {
|
||||
overflow: hidden;
|
||||
text-align: center;
|
||||
}
|
||||
fieldset {
|
||||
display: inline-block;
|
||||
vertical-align: top;
|
||||
border: 0;
|
||||
padding: 0;
|
||||
|
||||
&:first-child {
|
||||
margin: 0 8px 0 0;
|
||||
}
|
||||
p {
|
||||
margin: 0 0 1em;
|
||||
text-align: left;
|
||||
}
|
||||
}
|
||||
footer {
|
||||
clear: left;
|
||||
border-top: 0;
|
||||
margin-top: 0;
|
||||
padding: 0;
|
||||
|
||||
p {
|
||||
text-align: left;
|
||||
margin-bottom: 2px;
|
||||
}
|
||||
}
|
||||
h2 {
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
}
|
||||
.html-rtl #custom-criteria {
|
||||
fieldset {
|
||||
&:first-child {
|
||||
margin: 0 0 0 8px;
|
||||
}
|
||||
p {
|
||||
text-align: right;
|
||||
}
|
||||
}
|
||||
footer p {
|
||||
text-align: right;
|
||||
}
|
||||
}
|
||||
|
||||
#stats-permissions p {
|
||||
margin-top: .1em;
|
||||
float: right;
|
||||
}
|
||||
|
||||
#side-nav .active a {
|
||||
background-color: $light-gray;
|
||||
color: $dark-gray;
|
||||
font-weight: bold;
|
||||
|
||||
&:after {
|
||||
color: inherit;
|
||||
}
|
||||
}
|
||||
#side-nav li.secondary-nav.active a {
|
||||
background: $faint-blue;
|
||||
color: $dark-gray;
|
||||
font-weight: bold;
|
||||
|
||||
&:after {
|
||||
color: inherit;
|
||||
}
|
||||
}
|
||||
|
||||
#popup-container {
|
||||
display: none;
|
||||
}
|
||||
.modal {
|
||||
background-color: $white;
|
||||
border: 3px solid $medium-gray;
|
||||
border-radius: 8px;
|
||||
padding: 8px;
|
||||
|
||||
.close {
|
||||
background: url(../../img/impala/banner-close.png) no-repeat;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
height: 25px;
|
||||
margin: 0;
|
||||
overflow: hidden;
|
||||
position: absolute;
|
||||
right: 1em;
|
||||
text-indent: -1000em;
|
||||
top: 1em;
|
||||
width: 25px;
|
||||
|
||||
&:hover {
|
||||
background-position: -25px 0;
|
||||
background-color: #c40000;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.product-select {
|
||||
float: right;
|
||||
margin-top: 15px;
|
||||
}
|
|
@ -1,453 +0,0 @@
|
|||
(function () {
|
||||
// "use strict";
|
||||
var $doc = z.doc,
|
||||
$chart = $('#head-chart'),
|
||||
$btnZoom = $('#chart-zoomout'),
|
||||
baseConfig = {
|
||||
chart: {
|
||||
renderTo: 'head-chart',
|
||||
zoomType: 'x',
|
||||
events: {
|
||||
selection: function() {
|
||||
$btnZoom.removeClass('inactive')
|
||||
.click(_pd(function(e) {
|
||||
$(this).trigger('zoomout');
|
||||
}));
|
||||
}
|
||||
}
|
||||
},
|
||||
credits: { enabled: false },
|
||||
title: {
|
||||
text: null
|
||||
},
|
||||
xAxis: {
|
||||
type: 'datetime',
|
||||
maxZoom: 7 * 24 * 3600000, // seven days
|
||||
title: {
|
||||
text: null
|
||||
},
|
||||
tickmarkPlacement: 'on',
|
||||
startOfWeek: 0,
|
||||
dateTimeLabelFormats: {
|
||||
hour: '%e %b<br>%H:%M'
|
||||
}
|
||||
},
|
||||
yAxis: {
|
||||
title: {
|
||||
text: null
|
||||
},
|
||||
labels: {
|
||||
formatter: function() {
|
||||
return Highcharts.numberFormat(this.value, 0);
|
||||
}
|
||||
},
|
||||
min: 0,
|
||||
minPadding: 0.05,
|
||||
showFirstLabel: false
|
||||
},
|
||||
legend: {
|
||||
enabled: true
|
||||
},
|
||||
tooltip: { },
|
||||
plotOptions: {
|
||||
line: {
|
||||
lineWidth: 1.5,
|
||||
shadow: false,
|
||||
marker: {
|
||||
enabled: true,
|
||||
radius: 0,
|
||||
states: {
|
||||
hover: {
|
||||
enabled: true,
|
||||
radius: 5
|
||||
}
|
||||
}
|
||||
},
|
||||
states: {
|
||||
hover: {
|
||||
lineWidth: 2
|
||||
}
|
||||
},
|
||||
connectNulls: true
|
||||
}
|
||||
}
|
||||
};
|
||||
Highcharts.setOptions({'lang': {'resetZoom': ''}});
|
||||
var chart;
|
||||
|
||||
// Determine unit used for a given metric.
|
||||
// Missing keys here are the source of `TypeError: b is not a function`.
|
||||
var metricTypes = {
|
||||
'revenue' : 'currency',
|
||||
'sales' : 'sales',
|
||||
'refunds' : 'refunds',
|
||||
'installs' : 'installs',
|
||||
'usage' : 'users',
|
||||
'reviews_created' : 'reviews',
|
||||
'mmo_total_visitors' : 'users',
|
||||
'mmo_user_count_total' : 'users',
|
||||
'mmo_user_count_new' : 'users',
|
||||
'apps_review_count_new' : 'reviews',
|
||||
'apps_count_installed' : 'installs',
|
||||
'apps_count_new' : 'apps',
|
||||
'my_apps' : 'installs'
|
||||
};
|
||||
|
||||
var acceptedGroups = {
|
||||
'day' : true,
|
||||
'week' : true,
|
||||
'month' : true
|
||||
};
|
||||
|
||||
function showNoDataOverlay() {
|
||||
$chart.parent().addClass('nodata');
|
||||
$chart.parent().removeClass('loading');
|
||||
if (chart && chart.destroy) chart.destroy();
|
||||
}
|
||||
|
||||
$doc.bind('changeview', function() {
|
||||
$chart.parent().removeClass('nodata');
|
||||
$chart.addClass('loading');
|
||||
$btnZoom.addClass('inactive').click(_pd);
|
||||
});
|
||||
|
||||
$doc.bind('dataready', function(e, obj) {
|
||||
var view = obj.view,
|
||||
metric = view.metric,
|
||||
group = view.group,
|
||||
data = obj.data,
|
||||
range = normalizeRange(view.range),
|
||||
start = range.start,
|
||||
end = range.end,
|
||||
date_range_days = parseInt((end - start) / 1000 / 3600 / 24, 10),
|
||||
fields = obj.fields ? obj.fields.slice(0,5) : ['count'],
|
||||
series = {},
|
||||
events = obj.events,
|
||||
chartRange = {},
|
||||
t, row, i, field, val,
|
||||
is_overview = metric == 'overview' || metric == 'app_overview',
|
||||
apps_chart = metric == 'my_apps';
|
||||
|
||||
// Allows reuse of non-in-app code.
|
||||
metric = metric.replace('_inapp', '');
|
||||
|
||||
// Let different function handle if metrics aren't a Highcharts
|
||||
// datetime line graph.
|
||||
if (metric in z.StatsManager.nonDateMetrics) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!(group in acceptedGroups)) {
|
||||
group = 'day';
|
||||
}
|
||||
|
||||
// Disable links if they don't fit into the date range.
|
||||
$('.group a, .range a').removeClass('inactive').unbind('click', false);
|
||||
if (group == 'week') {
|
||||
$('a.days-7').addClass('inactive').bind('click', false);
|
||||
} else if (group == 'month') {
|
||||
$('a.days-7, a.days-30').addClass('inactive').bind('click', false);
|
||||
}
|
||||
if (group == 'day') {
|
||||
$('a.group-day').parent().addClass('selected');
|
||||
}
|
||||
if (date_range_days <= 8) {
|
||||
$('a.group-week, a.group-month').addClass('inactive').bind('click', false);
|
||||
}
|
||||
if (date_range_days <= 31) {
|
||||
$('a.group-month').addClass('inactive').bind('click', false);
|
||||
}
|
||||
|
||||
if (obj.data.empty || !data.firstIndex) {
|
||||
showNoDataOverlay();
|
||||
$chart.removeClass('loading');
|
||||
return;
|
||||
}
|
||||
|
||||
// Initialize the empty series object.
|
||||
_.each(fields, function(f) {series[f] = [];});
|
||||
|
||||
// Transmute the data into something Highcharts understands.
|
||||
start = Date.iso(data.firstIndex);
|
||||
z.data = data;
|
||||
var step = '1 ' + group,
|
||||
point,
|
||||
dataSum = 0;
|
||||
|
||||
if (apps_chart) {
|
||||
series = [];
|
||||
for (i = 0; i < data.stats.length; i++) {
|
||||
series.push([]);
|
||||
forEachISODate({start: start, end: end}, '1 '+group, data.stats[i], function(row, d) {
|
||||
val = parseFloat(z.StatsManager.getField(row, 'count'));
|
||||
val = isNaN(val) ? null : val;
|
||||
series[i].push(val);
|
||||
if (val) {
|
||||
dataSum += val;
|
||||
}
|
||||
}, this);
|
||||
}
|
||||
} else {
|
||||
forEachISODate({start: start, end: end}, '1 '+group, data, function(row, d) {
|
||||
for (i = 0; i < fields.length; i++) {
|
||||
field = fields[i];
|
||||
val = parseFloat(z.StatsManager.getField(row, field));
|
||||
val = isNaN(val) ? null : val;
|
||||
series[field].push(val);
|
||||
if (val) {
|
||||
dataSum += val;
|
||||
}
|
||||
}
|
||||
}, this);
|
||||
}
|
||||
|
||||
console.log('series is', series);
|
||||
|
||||
// Display marker if only one data point.
|
||||
baseConfig.plotOptions.line.marker.radius = 3;
|
||||
var count = 0,
|
||||
dateRegex = /\d{4}-\d{2}-\d{2}/;
|
||||
|
||||
for (var key in data) {
|
||||
if (dateRegex.exec(key) && data.hasOwnProperty(key)) {
|
||||
count++;
|
||||
}
|
||||
if (count > 1 || apps_chart) {
|
||||
baseConfig.plotOptions.line.marker.radius = 0;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// highCharts seems to dislike 0 and null data when determining a yAxis range.
|
||||
if (dataSum === 0) {
|
||||
baseConfig.yAxis.max = 10;
|
||||
} else {
|
||||
baseConfig.yAxis.max = null;
|
||||
}
|
||||
|
||||
// Transform xAxis based on time grouping (day, week, month) and range.
|
||||
var dayMsecs = 1 * 24 * 3600 * 1000;
|
||||
var pointInterval = dayMsecs;
|
||||
var dateRangeDays = (end - start) / dayMsecs;
|
||||
baseConfig.xAxis.min = start - dayMsecs; // Fix chart truncation.
|
||||
baseConfig.xAxis.max = end;
|
||||
baseConfig.xAxis.tickInterval = null;
|
||||
if (group == 'week') {
|
||||
$('a.days-7').addClass('inactive').bind('click', false);
|
||||
pointInterval = 7 * dayMsecs;
|
||||
baseConfig.xAxis.maxZoom = 7 * dayMsecs;
|
||||
|
||||
if (dateRangeDays <= 90) {
|
||||
baseConfig.xAxis.tickInterval = 7 * dayMsecs;
|
||||
}
|
||||
} else if (group == 'month') {
|
||||
$('a.days-7, a.days-30').addClass('inactive').bind('click', false);
|
||||
pointInterval = 30 * dayMsecs;
|
||||
baseConfig.xAxis.maxZoom = 31 * dayMsecs;
|
||||
|
||||
if (dateRangeDays <= 365) {
|
||||
baseConfig.xAxis.tickInterval = 30 * dayMsecs;
|
||||
}
|
||||
}
|
||||
|
||||
// Disable group links if they don't fit into the date range.
|
||||
if (group == 'day') {
|
||||
$('a.group-day').parent().addClass('selected');
|
||||
}
|
||||
if (date_range_days <= 8) {
|
||||
$('a.group-week, a.group-month').addClass('inactive').bind('click', false);
|
||||
}
|
||||
if (date_range_days <= 31) {
|
||||
$('a.group-month').addClass('inactive').bind('click', false);
|
||||
}
|
||||
|
||||
// Set minimum max value for yAxis to prevent duplicate yAxis values.
|
||||
var max = 0;
|
||||
for (key in data) {
|
||||
if (data[key].count > max) {
|
||||
max = data[key].count;
|
||||
}
|
||||
}
|
||||
// Chart has minimum 5 ticks so set max to 5 to avoid pigeonholing.
|
||||
if (max < 5 && !apps_chart) {
|
||||
baseConfig.yAxis.max = 5;
|
||||
}
|
||||
|
||||
// Round the start time to the nearest day (truncate the time) and
|
||||
// account for time zone to line up ticks and points on datetime axis.
|
||||
date = new Date(start);
|
||||
date.setHours(0, 0, 0);
|
||||
start = date.getTime() - (date.getTimezoneOffset() * 60000);
|
||||
|
||||
// Populate the chart config object.
|
||||
var chartData = [], id;
|
||||
if (apps_chart) {
|
||||
for (i = 0; i < data.stats.length; i++) {
|
||||
chartData.push({
|
||||
'type' : 'line',
|
||||
'name' : data.stats[i].name,
|
||||
'id' : 'count' + i,
|
||||
'pointInterval' : pointInterval,
|
||||
// Add offset to line up points and ticks on day grouping.
|
||||
'pointStart' : start,
|
||||
'data' : series[i],
|
||||
'visible' : true
|
||||
});
|
||||
}
|
||||
} else {
|
||||
for (i = 0; i < fields.length; i++) {
|
||||
field = fields[i];
|
||||
id = field.split('|').slice(-1)[0];
|
||||
chartData.push({
|
||||
'type' : 'line',
|
||||
'name' : z.StatsManager.getPrettyName(view.metric, id),
|
||||
'id' : id,
|
||||
'pointInterval' : pointInterval,
|
||||
// Add offset to line up points and ticks on day grouping.
|
||||
'pointStart' : start,
|
||||
'data' : series[field],
|
||||
'visible' : !(metric == 'contributions' && id !='total')
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Generate the tooltip function for this chart.
|
||||
// both x and y axis can be displayed differently.
|
||||
var tooltipFormatter = (function() {
|
||||
var xFormatter,
|
||||
yFormatter;
|
||||
function dayFormatter(d) { return Highcharts.dateFormat('%a, %b %e, %Y', new Date(d)); }
|
||||
function weekFormatter(d) { return format(gettext('Week of {0}'), Highcharts.dateFormat('%b %e, %Y', new Date(d))); }
|
||||
function monthFormatter(d) { return Highcharts.dateFormat('%B %Y', new Date(d)); }
|
||||
function currencyFormatter(n) { return '$' + Highcharts.numberFormat(n, 2); }
|
||||
function salesFormatter(n) { return format(gettext('{0} sales'), Highcharts.numberFormat(n, 0)); }
|
||||
function refundsFormatter(n) { return format(gettext('{0} refunds'), Highcharts.numberFormat(n, 0)); }
|
||||
function installsFormatter(n) { return format(gettext('{0} installs'), Highcharts.numberFormat(n, 0)); }
|
||||
function userFormatter(n) { return format(gettext('{0} users'), Highcharts.numberFormat(n, 0)); }
|
||||
function appsFormatter(n) { return format(gettext('{0} apps'), Highcharts.numberFormat(n, 0)); }
|
||||
function reviewsFormatter(n) { return format(gettext('{0} reviews'), Highcharts.numberFormat(n, 0)); }
|
||||
function addEventData(s, date) {
|
||||
var e = events[date];
|
||||
if (e) {
|
||||
s += format('<br><br><b>{type_pretty}</b>', e);
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
||||
// Determine x-axis formatter.
|
||||
if (group == 'week') {
|
||||
baseConfig.xAxis.title.text = gettext('Week');
|
||||
xFormatter = weekFormatter;
|
||||
} else if (group == 'month') {
|
||||
baseConfig.xAxis.title.text = gettext('Month');
|
||||
xFormatter = monthFormatter;
|
||||
} else {
|
||||
baseConfig.xAxis.title.text = gettext('Day');
|
||||
xFormatter = dayFormatter;
|
||||
}
|
||||
|
||||
// Determine y-axis formatter.
|
||||
switch (metricTypes[metric]) {
|
||||
case 'currency': case 'revenue':
|
||||
baseConfig.yAxis.title.text = gettext('Amount Earned');
|
||||
yFormatter = currencyFormatter;
|
||||
break;
|
||||
case 'sales':
|
||||
baseConfig.yAxis.title.text = gettext('Units Sold');
|
||||
yFormatter = salesFormatter;
|
||||
break;
|
||||
case 'refunds':
|
||||
baseConfig.yAxis.title.text = gettext('Units Refunded');
|
||||
yFormatter = refundsFormatter;
|
||||
break;
|
||||
case 'installs': case 'my_apps':
|
||||
baseConfig.yAxis.title.text = gettext('Installs');
|
||||
yFormatter = installsFormatter;
|
||||
break;
|
||||
case 'users':
|
||||
baseConfig.yAxis.title.text = gettext('Users');
|
||||
yFormatter = userFormatter;
|
||||
break;
|
||||
case 'apps':
|
||||
baseConfig.yAxis.title.text = gettext('Apps');
|
||||
yFormatter = appsFormatter;
|
||||
break;
|
||||
case 'reviews':
|
||||
yFormatter = reviewsFormatter;
|
||||
break;
|
||||
}
|
||||
return function() {
|
||||
var ret = '<b>' + this.series.name + '</b><br>' +
|
||||
xFormatter(this.x) + '<br>' +
|
||||
yFormatter(this.y);
|
||||
return addEventData(ret, this.x);
|
||||
};
|
||||
})();
|
||||
|
||||
// Set up the new chart's configuration.
|
||||
var newConfig = $.extend(baseConfig, {'series': chartData});
|
||||
newConfig.tooltip.formatter = tooltipFormatter;
|
||||
|
||||
function makeSiteEventHandler(e) {
|
||||
return function() {
|
||||
var s = format('<h3>{type_pretty}</h3><p>{description}</p>', e);
|
||||
if (e.url) {
|
||||
s += format('<p><a href="{0}" target="_blank">{1}</a></p>', [e.url, gettext('More Info...')]);
|
||||
}
|
||||
$('#exception-note h2').html(format(
|
||||
// L10n: {0} is an ISO-formatted date.
|
||||
gettext('Details for {0}'),
|
||||
e.start
|
||||
));
|
||||
$('#exception-note div').html(s);
|
||||
$chart.trigger('explain-exception');
|
||||
};
|
||||
}
|
||||
|
||||
var pb = [], pl = [];
|
||||
eventColors = ['#DDD','#DDD','#FDFFD0','#D0FFD8'];
|
||||
_.forEach(events, function(e) {
|
||||
pb.push({
|
||||
color: eventColors[e.type],
|
||||
from: Date.iso(e.start).backward('12h'),
|
||||
to: Date.iso(e.end || e.start).forward('12h'),
|
||||
events: {
|
||||
click: makeSiteEventHandler(e)
|
||||
}
|
||||
});
|
||||
});
|
||||
newConfig.xAxis.plotBands = pb;
|
||||
newConfig.xAxis.plotLines = pl;
|
||||
|
||||
|
||||
if (fields.length == 1 && !apps_chart) {
|
||||
newConfig.legend.enabled = false;
|
||||
}
|
||||
|
||||
// Generate a pretty title for the chart.
|
||||
var title;
|
||||
if (typeof obj.view.range == 'string') {
|
||||
var numDays = parseInt(obj.view.range, 10);
|
||||
title = format(csv_keys.chartTitle[metric][0], numDays);
|
||||
} else {
|
||||
// This is a custom range so display a range shorter by one day.
|
||||
end = new Date(end.getTime() - (24 * 60 * 60 * 1000));
|
||||
title = format(csv_keys.chartTitle[metric][1], [new Date(start).iso(), end.iso()]);
|
||||
}
|
||||
newConfig.title = {
|
||||
text: title
|
||||
};
|
||||
|
||||
if (chart && chart.destroy) chart.destroy();
|
||||
chart = new Highcharts.Chart(newConfig);
|
||||
|
||||
chartRange = chart.xAxis[0].getExtremes();
|
||||
|
||||
$doc.bind('zoomout', function() {
|
||||
chart.xAxis[0].setExtremes(chartRange.min, chartRange.max);
|
||||
$btnZoom.addClass('inactive').click(_pd);
|
||||
});
|
||||
|
||||
$chart.removeClass('loading');
|
||||
});
|
||||
})();
|
|
@ -1,178 +0,0 @@
|
|||
(function () {
|
||||
// 'use strict';
|
||||
var $doc = z.doc,
|
||||
$chart = $('#head-chart'),
|
||||
baseConfig = {
|
||||
chart: {
|
||||
renderTo: 'head-chart',
|
||||
type: 'column'
|
||||
},
|
||||
credits: { enabled: false },
|
||||
title: {
|
||||
text: null
|
||||
},
|
||||
xAxis: {
|
||||
title: {
|
||||
text: null
|
||||
},
|
||||
tickmarkPlacement: 'on'
|
||||
},
|
||||
yAxis: {
|
||||
title: {
|
||||
text: null
|
||||
},
|
||||
labels: {
|
||||
formatter: function() {
|
||||
return Highcharts.numberFormat(this.value, 0);
|
||||
}
|
||||
},
|
||||
min: 0,
|
||||
minPadding: 0.05
|
||||
},
|
||||
legend: {
|
||||
enabled: false
|
||||
},
|
||||
plotOptions: {
|
||||
column: {
|
||||
colorByPoint: true,
|
||||
shadow: false
|
||||
}
|
||||
},
|
||||
tooltip: { },
|
||||
series: [{
|
||||
data: []
|
||||
}]
|
||||
},
|
||||
chart;
|
||||
|
||||
// Determines unit used for a given metric.
|
||||
var xMetricTypes = z.StatsManager.nonDateMetrics;
|
||||
var yMetricTypes = {
|
||||
'currency_revenue' : 'currency',
|
||||
'currency_sales' : 'sales',
|
||||
'currency_refunds' : 'refunds',
|
||||
'source_revenue' : 'currency',
|
||||
'source_sales' : 'sales',
|
||||
'source_refunds' : 'refunds'
|
||||
};
|
||||
|
||||
function showNoDataOverlay() {
|
||||
$chart.parent().addClass('nodata');
|
||||
$chart.parent().removeClass('loading');
|
||||
if (chart && chart.destroy) chart.destroy();
|
||||
}
|
||||
|
||||
$doc.bind('changeview', function() {
|
||||
$chart.parent().removeClass('nodata');
|
||||
$chart.addClass('loading');
|
||||
});
|
||||
|
||||
$doc.bind('dataready', function(e, obj) {
|
||||
var view = obj.view,
|
||||
metric = view.metric,
|
||||
data = obj.data,
|
||||
t, row, i, field, val;
|
||||
z.data = data;
|
||||
|
||||
if (!(metric in z.StatsManager.nonDateMetrics)) {
|
||||
return;
|
||||
}
|
||||
// Allows reuse of non-in-app code.
|
||||
metric = metric.replace('_inapp', '');
|
||||
|
||||
// Disable irrelevant links and controls.
|
||||
$('.group a, .range a').addClass('inactive').bind('click', false);
|
||||
|
||||
if (data.empty || obj.data.length == 0) {
|
||||
showNoDataOverlay();
|
||||
$chart.removeClass('loading');
|
||||
return;
|
||||
}
|
||||
|
||||
// Populate chart with categories (xAxis values) and data points.
|
||||
var categories = [],
|
||||
dataPoints = [];
|
||||
_.each(data, function(datum) {
|
||||
if (datum.count > 0) {
|
||||
categories.push(datum[xMetricTypes[metric]]);
|
||||
dataPoints.push(datum.count);
|
||||
}
|
||||
});
|
||||
baseConfig.xAxis.categories = categories;
|
||||
baseConfig.series[0].data = dataPoints;
|
||||
|
||||
// Set minimum max value for yAxis to prevent duplicate yAxis values.
|
||||
var max = 0;
|
||||
_.each(data, function(datum) {
|
||||
if (datum.count > max) {
|
||||
max = datum.count;
|
||||
}
|
||||
});
|
||||
// Chart has minimum 5 ticks so set max to 5 to avoid pigeonholing.
|
||||
if (max < 5) {
|
||||
baseConfig.yAxis.max = 5;
|
||||
}
|
||||
|
||||
// Generate the tooltip function for this chart.
|
||||
// both x and y axis can be displayed differently.
|
||||
baseConfig.tooltip.formatter = (function(){
|
||||
var xFormatter,
|
||||
yFormatter;
|
||||
|
||||
function currencyFormatter(currency) { return format(gettext('by currency {0}'), currency); }
|
||||
function sourceFormatter(source) { return format(gettext('by source {0}'), source); }
|
||||
function moneyFormatter(n) { return '$' + Highcharts.numberFormat(n, 2); }
|
||||
function salesFormatter(n) { return format(gettext('{0} sales'), Highcharts.numberFormat(n, 0)); }
|
||||
function refundsFormatter(n) { return format(gettext('{0} refunds'), Highcharts.numberFormat(n, 0)); }
|
||||
|
||||
// Determine y-axis formatter.
|
||||
switch (xMetricTypes[metric]) {
|
||||
case 'currency':
|
||||
baseConfig.xAxis.title.text = gettext('Currency');
|
||||
xFormatter = currencyFormatter;
|
||||
break;
|
||||
case 'source':
|
||||
baseConfig.xAxis.title.text = gettext('Source');
|
||||
xFormatter = sourceFormatter;
|
||||
break;
|
||||
}
|
||||
|
||||
// Determine y-axis formatter.
|
||||
switch (yMetricTypes[metric]) {
|
||||
case 'currency':
|
||||
baseConfig.yAxis.title.text = gettext('Revenue');
|
||||
yFormatter = moneyFormatter;
|
||||
break;
|
||||
case 'sales':
|
||||
baseConfig.yAxis.title.text = gettext('Sales');
|
||||
yFormatter = salesFormatter;
|
||||
break;
|
||||
case 'refunds':
|
||||
baseConfig.yAxis.title.text = gettext('Refunds');
|
||||
yFormatter = refundsFormatter;
|
||||
break;
|
||||
}
|
||||
|
||||
return function() {
|
||||
var ret = '<b>' + z.StatsManager.getPrettyName(metric, 'count') + '</b><br>' +
|
||||
'<p>' + xFormatter(this.x) + '</p>' + '<br>' +
|
||||
'<p>' + yFormatter(this.y) + '</p>';
|
||||
return ret;
|
||||
};
|
||||
})();
|
||||
|
||||
// Set up the new chart's configuration.
|
||||
var newConfig = $.extend(baseConfig, {});
|
||||
|
||||
// Generate chart title.
|
||||
var title;
|
||||
title = format(csv_keys.chartTitle[metric][1], []);
|
||||
newConfig.title = {
|
||||
text: title
|
||||
};
|
||||
|
||||
if (chart && chart.destroy) chart.destroy();
|
||||
chart = new Highcharts.Chart(newConfig);
|
||||
$chart.removeClass('loading');
|
||||
});
|
||||
})();
|
|
@ -1,105 +0,0 @@
|
|||
(function (){
|
||||
"use strict";
|
||||
|
||||
var $rangeSelector = $(".criteria.range ul"),
|
||||
$customRangeForm = $("div.custom.criteria"),
|
||||
$groupSelector = $(".criteria.group ul"),
|
||||
minDate = Date.iso($('.primary').attr('data-min-date')),
|
||||
msDay = 24 * 60 * 60 * 1000; // One day in milliseconds.
|
||||
|
||||
$.datepicker.setDefaults({showAnim: ''});
|
||||
var $customModal = $("#custom-criteria").modal("#custom-date-range",
|
||||
{ width: 520,
|
||||
hideme: true});
|
||||
var $startPicker = $("#start-date-picker").datepicker({
|
||||
maxDate: 0,
|
||||
minDate: minDate,
|
||||
dateFormat: 'yy-mm-dd',
|
||||
onSelect: function(dateText) {
|
||||
$("#date-range-start").val(dateText);
|
||||
}
|
||||
});
|
||||
var $endPicker = $("#end-date-picker").datepicker({
|
||||
maxDate: 0,
|
||||
minDate: minDate,
|
||||
dateFormat: 'yy-mm-dd',
|
||||
onSelect: function(dateText) {
|
||||
$("#date-range-end").val(dateText);
|
||||
}
|
||||
});
|
||||
|
||||
$rangeSelector.click(function(e) {
|
||||
var $target = $(e.target).parent();
|
||||
var newRange = $target.attr("data-range");
|
||||
if (newRange && newRange != "custom") {
|
||||
$target.trigger('changeview', {range: newRange});
|
||||
}
|
||||
e.preventDefault();
|
||||
});
|
||||
|
||||
$groupSelector.delegate('a', 'click', function(e) {
|
||||
var $target = $(this).parent(),
|
||||
newGroup = $target.attr("data-group");
|
||||
|
||||
$(this).trigger('changeview', { group: newGroup });
|
||||
e.preventDefault();
|
||||
});
|
||||
|
||||
// set controls when `changeview` is detected.
|
||||
z.doc.bind('changeview', function(e, newState) {
|
||||
if (!newState) return;
|
||||
function populateCustomRange() {
|
||||
var nRange = normalizeRange(newState.range),
|
||||
startStr = nRange.start.iso(),
|
||||
endStr = nRange.end.iso();
|
||||
|
||||
// Trim nRange.end by one day if custom range.
|
||||
if (newState.range.custom) {
|
||||
nRange.end = new Date(nRange.end.getTime() - msDay);
|
||||
endStr = nRange.end.iso();
|
||||
}
|
||||
|
||||
$("#date-range-start").val(startStr);
|
||||
$startPicker.datepicker("setDate", startStr);
|
||||
$("#date-range-end").val(endStr);
|
||||
$endPicker.datepicker("setDate", endStr);
|
||||
}
|
||||
if (newState.range) {
|
||||
if (!newState.range.custom) {
|
||||
var newRange = newState.range,
|
||||
$rangeEl = $('li[data-range="' + newRange + '"]');
|
||||
if ($rangeEl.length) {
|
||||
$rangeSelector.children("li.selected")
|
||||
.removeClass("selected");
|
||||
$rangeEl.addClass("selected");
|
||||
} else {
|
||||
$rangeSelector.children("li.selected")
|
||||
.removeClass("selected");
|
||||
$('li[data-range="custom"]').addClass("selected");
|
||||
}
|
||||
} else {
|
||||
$rangeSelector.children("li.selected").removeClass("selected");
|
||||
$('[data-range="custom"]').addClass("selected");
|
||||
}
|
||||
populateCustomRange();
|
||||
}
|
||||
if (newState.group) {
|
||||
$groupSelector.children('.selected').removeClass('selected');
|
||||
$('li[data-group="' + newState.group + '"]').addClass('selected');
|
||||
}
|
||||
});
|
||||
|
||||
$("#chart-zoomout").click(_pd);
|
||||
|
||||
$("#date-range-form").submit(_pd(function(e) {
|
||||
var start = Date.iso($("#date-range-start").val()),
|
||||
end = Date.iso($("#date-range-end").val()),
|
||||
newRange = {
|
||||
custom: true,
|
||||
start: Date.iso(start),
|
||||
end: Date.iso(end)
|
||||
};
|
||||
$rangeSelector.trigger('changeview', {range: newRange});
|
||||
$customModal.hider();
|
||||
}));
|
||||
})();
|
|
@ -1,234 +0,0 @@
|
|||
var csv_keys = {
|
||||
installs: {
|
||||
'count': gettext('Installs')
|
||||
},
|
||||
usage: {
|
||||
'count': gettext('Daily Users')
|
||||
},
|
||||
revenue: {
|
||||
'count': gettext('Amount Earned')
|
||||
},
|
||||
sales: {
|
||||
'count': gettext('Units Sold')
|
||||
},
|
||||
refunds: {
|
||||
'count': gettext('Units Refunded')
|
||||
},
|
||||
currency_revenue: {
|
||||
'count': gettext('Amount Earned')
|
||||
},
|
||||
currency_sales: {
|
||||
'count': gettext('Units Sold')
|
||||
},
|
||||
currency_refunds: {
|
||||
'count': gettext('Units Refunded')
|
||||
},
|
||||
source_revenue: {
|
||||
'count': gettext('Amount Earned')
|
||||
},
|
||||
source_sales: {
|
||||
'count': gettext('Units Sold')
|
||||
},
|
||||
source_refunds: {
|
||||
'count': gettext('Units Refunded')
|
||||
},
|
||||
apps_count_new: {
|
||||
'count': gettext('Apps Added')
|
||||
},
|
||||
apps_count_installed: {
|
||||
'count': gettext('Apps Installed')
|
||||
},
|
||||
apps_review_count_new: {
|
||||
'count': gettext('Reviews')
|
||||
},
|
||||
mmo_user_count_new: {
|
||||
'count': gettext('New Users')
|
||||
},
|
||||
mmo_user_count_total: {
|
||||
'count': gettext('Total Users')
|
||||
},
|
||||
mmo_total_visitors: {
|
||||
'count': gettext('Total Visitors')
|
||||
},
|
||||
my_apps: {
|
||||
'count': gettext('App Installs')
|
||||
},
|
||||
sources: {
|
||||
'null' : gettext('Unknown'),
|
||||
'api' : gettext('Add-ons Manager'),
|
||||
'search' : gettext('Search Results'),
|
||||
'homepagepromo' : gettext('Homepage Promo'),
|
||||
'hp-btn-promo' : gettext('Homepage Promo'),
|
||||
'hp-dl-promo' : gettext('Homepage Promo'),
|
||||
'hp-hc-featured' : gettext('Homepage Featured'),
|
||||
'hp-dl-featured' : gettext('Homepage Featured'),
|
||||
'hp-hc-upandcoming' : gettext('Homepage Up and Coming'),
|
||||
'hp-dl-upandcoming' : gettext('Homepage Up and Coming'),
|
||||
'hp-dl-mostpopular' : gettext('Homepage Most Popular'),
|
||||
'dp-btn-primary' : gettext('Detail Page'),
|
||||
'dp-btn-version' : gettext('Detail Page (bottom)'),
|
||||
'addondetail' : gettext('Detail Page'),
|
||||
'addon-detail-version' : gettext('Detail Page (bottom)'),
|
||||
'dp-btn-devchannel' : gettext('Detail Page (Development Channel)'),
|
||||
'oftenusedwith' : gettext('Often Used With'),
|
||||
'dp-hc-oftenusedwith' : gettext('Often Used With'),
|
||||
'dp-dl-oftenusedwith' : gettext('Often Used With'),
|
||||
'dp-hc-othersby' : gettext('Others By Author'),
|
||||
'dp-dl-othersby' : gettext('Others By Author'),
|
||||
'dp-hc-dependencies' : gettext('Dependencies'),
|
||||
'dp-dl-dependencies' : gettext('Dependencies'),
|
||||
'dp-hc-upsell' : gettext('Upsell'),
|
||||
'dp-dl-upsell' : gettext('Upsell'),
|
||||
|
||||
'sharingapi' : gettext('Sharing'),
|
||||
'category' : gettext('Category Pages'),
|
||||
'collection' : gettext('Collections'),
|
||||
'cb-hc-featured' : gettext('Category Landing Featured Carousel'),
|
||||
// Duplicate of line 75.
|
||||
//'cb-dl-featured' : gettext('Category Landing Featured Carousel'),
|
||||
'cb-hc-toprated' : gettext('Category Landing Top Rated'),
|
||||
'cb-dl-toprated' : gettext('Category Landing Top Rated'),
|
||||
'cb-hc-mostpopular' : gettext('Category Landing Most Popular'),
|
||||
'cb-dl-mostpopular' : gettext('Category Landing Most Popular'),
|
||||
'cb-hc-recentlyadded' : gettext('Category Landing Recently Added'),
|
||||
'cb-dl-recentlyadded' : gettext('Category Landing Recently Added'),
|
||||
'cb-btn-featured' : gettext('Browse Listing Featured Sort'),
|
||||
'cb-dl-featured' : gettext('Browse Listing Featured Sort'),
|
||||
'cb-btn-users' : gettext('Browse Listing Users Sort'),
|
||||
'cb-dl-users' : gettext('Browse Listing Users Sort'),
|
||||
'cb-btn-rating' : gettext('Browse Listing Rating Sort'),
|
||||
'cb-dl-rating' : gettext('Browse Listing Rating Sort'),
|
||||
'cb-btn-created' : gettext('Browse Listing Created Sort'),
|
||||
'cb-dl-created' : gettext('Browse Listing Created Sort'),
|
||||
'cb-btn-name' : gettext('Browse Listing Name Sort'),
|
||||
'cb-dl-name' : gettext('Browse Listing Name Sort'),
|
||||
'cb-btn-popular' : gettext('Browse Listing Popular Sort'),
|
||||
'cb-dl-popular' : gettext('Browse Listing Popular Sort'),
|
||||
'cb-btn-updated' : gettext('Browse Listing Updated Sort'),
|
||||
'cb-dl-updated' : gettext('Browse Listing Updated Sort'),
|
||||
'cb-btn-hotness' : gettext('Browse Listing Up and Coming Sort'),
|
||||
'cb-dl-hotness' : gettext('Browse Listing Up and Coming Sort'),
|
||||
|
||||
'mkt-browse' : gettext('Marketplace Browse Page'),
|
||||
'mkt-browse-featured' : gettext('Marketplace Browse Page, Featured'),
|
||||
'mkt-category' : gettext('Marketplace Category Page'),
|
||||
'mkt-category-featured' : gettext('Marketplace Category Page, Featured'),
|
||||
'mkt-detail' : gettext('Marketplace Detail Page'),
|
||||
'mkt-detail-upsell' : gettext('Marketplace Detail Page, Upsell'),
|
||||
'mkt-home' : gettext('Marketplace Home Page'),
|
||||
'mkt-search' : gettext('Marketplace Search Results'),
|
||||
'mkt-search-featured' : gettext('Marketplace Search Results, Featured')
|
||||
},
|
||||
contributions: {
|
||||
'count': gettext('Number of Contributions'),
|
||||
'total': gettext('Total Amount Contributed'),
|
||||
'average': gettext('Average Contribution')
|
||||
},
|
||||
overview: {
|
||||
'downloads' : gettext('Downloads'),
|
||||
'updates' : gettext('Daily Users')
|
||||
},
|
||||
app_overview: {
|
||||
'installs': gettext('Installs'),
|
||||
'usage': gettext('Usage'),
|
||||
'sales': gettext('Units Sold')
|
||||
},
|
||||
apps : {
|
||||
'{ec8030f7-c20a-464f-9b0e-13a3a9e97384}' : gettext('Firefox'),
|
||||
'{86c18b42-e466-45a9-ae7a-9b95ba6f5640}' : gettext('Mozilla'),
|
||||
'{3550f703-e582-4d05-9a08-453d09bdfdc6}' : gettext('Thunderbird'),
|
||||
'{718e30fb-e89b-41dd-9da7-e25a45638b28}' : gettext('Sunbird'),
|
||||
'{92650c4d-4b8e-4d2a-b7eb-24ecf4f6b63a}' : gettext('SeaMonkey'),
|
||||
'{a23983c0-fd0e-11dc-95ff-0800200c9a66}' : gettext('Fennec')
|
||||
},
|
||||
chartTitle: {
|
||||
'installs' : [
|
||||
// L10n: {0} is an integer.
|
||||
gettext('Installs, last {0} days'),
|
||||
// L10n: both {0} and {1} are dates in YYYY-MM-DD format.
|
||||
gettext('Installs from {0} to {1}')
|
||||
],
|
||||
'usage' : [
|
||||
gettext('Daily Users, last {0} days'),
|
||||
gettext('Daily Users from {0} to {1}')
|
||||
],
|
||||
'sales' : [
|
||||
gettext('Units Sold, last {0} days'),
|
||||
gettext('Units Sold from {0} to {1}')
|
||||
],
|
||||
'revenue' : [
|
||||
gettext('Amount Earned, last {0} days'),
|
||||
gettext('Amount Earned {0} to {1}')
|
||||
],
|
||||
'refunds' : [
|
||||
gettext('Units Refunded, last {0} days'),
|
||||
gettext('Units Refunded from {0} to {1}')
|
||||
],
|
||||
'currency_sales' : [
|
||||
gettext('Total Units Sold by Currency'),
|
||||
gettext('Total Units Sold by Currency')
|
||||
],
|
||||
'currency_revenue' : [
|
||||
gettext('Total Amount Earned by Currency'),
|
||||
gettext('Total Amount Earned by Currency')
|
||||
],
|
||||
'currency_refunds' : [
|
||||
gettext('Total Units Refunded by Currency'),
|
||||
gettext('Total Units Refunded by Currency')
|
||||
],
|
||||
'source_sales' : [
|
||||
gettext('Total Units Sold by Source'),
|
||||
gettext('Total Units Sold by Source')
|
||||
],
|
||||
'source_revenue' : [
|
||||
gettext('Total Amount Earned by Source'),
|
||||
gettext('Total Amount Earned by Source')
|
||||
],
|
||||
'source_refunds' : [
|
||||
gettext('Total Units Refunded by Source'),
|
||||
gettext('Total Units Refunded by Source')
|
||||
],
|
||||
'apps_count_new': [
|
||||
gettext('Apps Added'),
|
||||
gettext('Apps Added')
|
||||
],
|
||||
'apps_count_installed': [
|
||||
gettext('Apps Installed'),
|
||||
gettext('Apps Installed')
|
||||
],
|
||||
'apps_review_count_new': [
|
||||
gettext('Reviews'),
|
||||
gettext('Reviews')
|
||||
],
|
||||
'mmo_user_count_total': [
|
||||
gettext('Total Users'),
|
||||
gettext('Total Users')
|
||||
],
|
||||
'mmo_user_count_new': [
|
||||
gettext('New Users'),
|
||||
gettext('New Users')
|
||||
],
|
||||
'mmo_total_visitors': [
|
||||
gettext('Total Visitors'),
|
||||
gettext('Total Visitors')
|
||||
],
|
||||
'my_apps': [
|
||||
gettext('Total Installs'),
|
||||
gettext('Total Installs')
|
||||
]
|
||||
},
|
||||
aggregateLabel: {
|
||||
'downloads' : [
|
||||
// L10n: {0} and {1} are integers.
|
||||
gettext('<b>{0}</b> in last {1} days'),
|
||||
// L10n: {0} is an integer and {1} and {2} are dates in YYYY-MM-DD format.
|
||||
gettext('<b>{0}</b> from {1} to {2}')
|
||||
],
|
||||
'usage' : [
|
||||
// L10n: {0} and {1} are integers.
|
||||
gettext('<b>{0}</b> average in last {1} days'),
|
||||
// L10n: {0} is an integer and {1} and {2} are dates in YYYY-MM-DD format.
|
||||
gettext('<b>{0}</b> from {1} to {2}')
|
||||
]
|
||||
}
|
||||
};
|
|
@ -1,113 +0,0 @@
|
|||
// date management helpers
|
||||
|
||||
(function() {
|
||||
// utility
|
||||
function pad2(n) {
|
||||
var str = n.toString();
|
||||
return ('0' + str).substr(-2);
|
||||
}
|
||||
var intervalRegex = /(-?\d+)\s*(\w)/,
|
||||
// ISO date format is used for internal representations.
|
||||
dateRegex = /(\d{4})[^\d]?(\d{2})[^\d]?(\d{2})/;
|
||||
|
||||
_.extend(Date.prototype, {
|
||||
forward : function(by, unit) {
|
||||
if (typeof by == 'string') {
|
||||
var match = intervalRegex.exec(by);
|
||||
by = +match[1];
|
||||
unit = match[2];
|
||||
}
|
||||
unit = unit || 'd';
|
||||
switch (unit[0]) {
|
||||
case 'h':
|
||||
this.setHours(this.getHours()+by);
|
||||
break;
|
||||
case 'd':
|
||||
this.setDate(this.getDate()+by);
|
||||
break;
|
||||
case 'w':
|
||||
this.setDate(this.getDate()+by*7);
|
||||
break;
|
||||
case 'm':
|
||||
this.setMonth(this.getMonth()+by);
|
||||
break;
|
||||
case 'y':
|
||||
this.setFullYear(this.getFullYear()+by);
|
||||
break;
|
||||
}
|
||||
return this;
|
||||
},
|
||||
backward : function(by, unit) {
|
||||
if (typeof by == 'string') {
|
||||
var match = intervalRegex.exec(by);
|
||||
by = +match[1];
|
||||
unit = match[2];
|
||||
}
|
||||
return this.forward(-by, unit);
|
||||
},
|
||||
pretty : function(del) {
|
||||
del = del || '';
|
||||
return [this.getFullYear(), pad2(this.getMonth()+1), pad2(this.getDate())].join(del);
|
||||
},
|
||||
iso : function() {
|
||||
return this.pretty('-');
|
||||
},
|
||||
isAfter : function(d) {
|
||||
return this.getTime() > d.getTime();
|
||||
},
|
||||
isBefore : function(d) {
|
||||
return this.getTime() < d.getTime();
|
||||
},
|
||||
latter : function(d) {
|
||||
return this.isAfter(d) ? this : d;
|
||||
},
|
||||
former : function(d) {
|
||||
return this.isBefore(d) ? this : d;
|
||||
},
|
||||
clone : function() {
|
||||
return new Date(this.getTime());
|
||||
}
|
||||
});
|
||||
_.extend(Date, {
|
||||
ago : function(s) {
|
||||
return (new Date()).backward(s);
|
||||
},
|
||||
iso : function(s) {
|
||||
if (s instanceof Date) return s;
|
||||
var d = dateRegex.exec(s);
|
||||
if (d) {
|
||||
return new Date(d[1],d[2]-1,d[3]);
|
||||
}
|
||||
}
|
||||
});
|
||||
_.extend(String, {
|
||||
max : function(a,b) {
|
||||
return a > b ? a : b;
|
||||
},
|
||||
min : function(a,b) {
|
||||
return a < b ? a : b;
|
||||
}
|
||||
});
|
||||
})();
|
||||
|
||||
function forEachISODate(range, step, data, iterator, context) {
|
||||
var d = range.start.clone();
|
||||
for (d; d.isBefore(range.end); d.forward(step)) {
|
||||
var ds = d.iso();
|
||||
iterator.call(context, data[ds], d, ds);
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeRange(range) {
|
||||
var ret = {};
|
||||
if (typeof range == "string") {
|
||||
ret.start = Date.ago(range);
|
||||
ret.end = (new Date());
|
||||
} else if (typeof range == "object") {
|
||||
ret.start = Date.iso(range.start);
|
||||
ret.end = Date.iso(range.end);
|
||||
} else {
|
||||
throw "Invalid range values found.";
|
||||
}
|
||||
return ret;
|
||||
}
|
|
@ -1,125 +0,0 @@
|
|||
// Web Worker Pool
|
||||
// size is the max number of arguments
|
||||
function WorkerPool(size) {
|
||||
var workers = 0,
|
||||
jobs = [];
|
||||
|
||||
// url: the url of the worker's js
|
||||
// msg: the initial message to pass to the worker
|
||||
// cb : the callback to recieve messages from postMessage.
|
||||
// return true from cb to dismiss the worker and advance the queue.
|
||||
// ctx: the context for cb.apply
|
||||
this.queueJob = function(url, msg, cb, ctx) {
|
||||
var job = {
|
||||
"url": url,
|
||||
"msg": msg,
|
||||
"cb" : cb,
|
||||
"ctx": ctx
|
||||
};
|
||||
jobs.push(job);
|
||||
if (workers < size) nextJob();
|
||||
};
|
||||
|
||||
function nextJob() {
|
||||
if (jobs.length) {
|
||||
(function() {
|
||||
var job = jobs.shift(),
|
||||
worker = new Worker(job.url);
|
||||
workers++;
|
||||
worker.addEventListener('message', function(e) {
|
||||
if (job.cb.call(job.ctx, e.data, worker)) {
|
||||
worker.terminate();
|
||||
delete worker;
|
||||
workers--;
|
||||
nextJob();
|
||||
};
|
||||
}, false);
|
||||
worker.postMessage(job.msg);
|
||||
})();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Simple Asynchronous Cache
|
||||
// miss: a function that computes the value for the given key.
|
||||
// Takes two parameters:
|
||||
// * key: the key passed to AsyncCache.get()
|
||||
// * set: a callback that sets the value for key
|
||||
// hash: an optional function that generates a hash string for a given key.
|
||||
// Takes one parameter:
|
||||
// * key
|
||||
function AsyncCache(miss, hash) {
|
||||
var cache = {},
|
||||
self = this;
|
||||
|
||||
hash = hash || function(key) {
|
||||
return key.toString();
|
||||
};
|
||||
|
||||
// key: the key to lookup in the cache
|
||||
// cb : the method to call with the value
|
||||
// Takes one parameter:
|
||||
// val: the value in the cache for key
|
||||
// ctx: context for cb.call
|
||||
this.get = function(key, cb, ctx) {
|
||||
var k = hash(key);
|
||||
if (k in cache) {
|
||||
cb.call(ctx, cache[k]);
|
||||
} else {
|
||||
miss.call(ctx, key, function(val) {
|
||||
self.set(key, val);
|
||||
self.get(key, cb, ctx);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// sets value for key in cache
|
||||
this.set = function(key, val) {
|
||||
cache[hash(key)] = val;
|
||||
};
|
||||
}
|
||||
|
||||
function hashObj(o) {
|
||||
var hash = [];
|
||||
for (var i in o) {
|
||||
if (o.hasOwnProperty(i)) {
|
||||
hash.push(o[i].toString());
|
||||
}
|
||||
}
|
||||
return hash.join('_');
|
||||
}
|
||||
|
||||
/* cfg takes:
|
||||
* start: the initial value
|
||||
* end: the (non-inclusive) max value
|
||||
* step: value to iterate by
|
||||
* chunk-size: how many iterations before setTimeout
|
||||
* inner: function to perform each iteration
|
||||
* callback: function to perform when finished
|
||||
* ctx: context from which to run all functions
|
||||
*/
|
||||
function chunkfor(cfg) {
|
||||
var position = cfg.start;
|
||||
|
||||
function nextchunk() {
|
||||
if (position < cfg.end) {
|
||||
|
||||
for (var iterator = position;
|
||||
iterator < position+(cfg.chunk_size*cfg.step) && iterator < cfg.end;
|
||||
iterator += cfg.step) {
|
||||
|
||||
cfg.inner.call(cfg.ctx, iterator);
|
||||
}
|
||||
|
||||
position += cfg.chunk_size * cfg.step;
|
||||
|
||||
setTimeout( function () {
|
||||
nextchunk.call(this);
|
||||
}, 0);
|
||||
|
||||
} else {
|
||||
cfg.callback.call(cfg.ctx);
|
||||
}
|
||||
}
|
||||
nextchunk();
|
||||
}
|
|
@ -1,667 +0,0 @@
|
|||
function dbg() {
|
||||
window.console.log.apply(window.console, arguments);
|
||||
}
|
||||
|
||||
z.hasPushState = (typeof history.replaceState === 'function');
|
||||
|
||||
z.StatsManager = (function() {
|
||||
'use strict';
|
||||
|
||||
// The version of the stats localStorage we are using.
|
||||
// If you increment this number, you cache-bust everyone!
|
||||
var STATS_VERSION = '2011-12-12';
|
||||
var PRECISION = 2;
|
||||
var storage = z.Storage('stats'),
|
||||
storageCache = z.SessionStorage('statscache'),
|
||||
dataStore = {},
|
||||
currentView = {},
|
||||
siteEvents = [],
|
||||
addonId = parseInt($('.primary').attr('data-addon_id'), 10),
|
||||
primary = $('.primary'),
|
||||
inapp = primary.attr('data-inapp'),
|
||||
baseURL = primary.attr('data-base_url'),
|
||||
pendingFetches = 0,
|
||||
siteEventsEnabled = true,
|
||||
writeInterval = false,
|
||||
lookup = {},
|
||||
msDay = 24 * 60 * 60 * 1000; // One day in milliseconds.
|
||||
|
||||
// NaN is a poor choice for a storage key
|
||||
if (isNaN(addonId)) addonId = 'globalstats';
|
||||
|
||||
// It's a bummer, but we need to know which metrics have breakdown fields.
|
||||
// check by saying `if (metric in breakdownMetrics)`
|
||||
var breakdownMetrics = {
|
||||
'apps': true,
|
||||
'locales': true,
|
||||
'os': true,
|
||||
'sources': true,
|
||||
'versions': true,
|
||||
'statuses': true,
|
||||
'overview': true,
|
||||
'site': true
|
||||
};
|
||||
|
||||
var currencyMetrics = {
|
||||
'revenue': true,
|
||||
'currency_revenue': true,
|
||||
'source_revenue': true,
|
||||
'revenue_inapp': true,
|
||||
'currency_revenue_inapp': true,
|
||||
'source_revenue_inapp': true,
|
||||
'contributions': true
|
||||
};
|
||||
|
||||
// For non-date metrics, determine which key is breakdown field.
|
||||
var nonDateMetrics = {
|
||||
'currency_revenue': 'currency',
|
||||
'currency_sales': 'currency',
|
||||
'currency_refunds': 'currency',
|
||||
'currency_revenue_inapp': 'currency',
|
||||
'currency_sales_inapp': 'currency',
|
||||
'currency_refunds_inapp': 'currency',
|
||||
'source_revenue': 'source',
|
||||
'source_sales': 'source',
|
||||
'source_refunds': 'source',
|
||||
'source_revenue_inapp': 'source',
|
||||
'source_sales_inapp': 'source',
|
||||
'source_refunds_inapp': 'source'
|
||||
};
|
||||
|
||||
// is a metric an average or a sum?
|
||||
var metricTypes = {
|
||||
'usage' : 'mean',
|
||||
'apps' : 'mean',
|
||||
'locales' : 'mean',
|
||||
'os' : 'mean',
|
||||
'versions' : 'mean',
|
||||
'statuses' : 'mean',
|
||||
'downloads' : 'sum',
|
||||
'sources' : 'sum',
|
||||
'contributions' : 'sum'
|
||||
};
|
||||
|
||||
// Initialize from localStorage when dom is ready.
|
||||
function init() {
|
||||
dbg('looking for local data');
|
||||
if (verifyLocalStorage()) {
|
||||
var cacheObject = storageCache.get(addonId + inapp);
|
||||
if (cacheObject) {
|
||||
dbg('found local data, loading...');
|
||||
cacheObject = JSON.parse(cacheObject);
|
||||
if (cacheObject) {
|
||||
dataStore = cacheObject;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
$(init);
|
||||
|
||||
// These functions deal with our localStorage cache.
|
||||
|
||||
function writeLocalStorage() {
|
||||
dbg('saving local data');
|
||||
try {
|
||||
storageCache.set(addonId + inapp, JSON.stringify(dataStore));
|
||||
storage.set('version', STATS_VERSION);
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
dbg('saved local data');
|
||||
}
|
||||
|
||||
function clearLocalStorage() {
|
||||
storageCache.remove(addonId + inapp);
|
||||
storage.remove('version');
|
||||
dbg('cleared local data');
|
||||
}
|
||||
|
||||
function verifyLocalStorage() {
|
||||
if (storage.get('version') == STATS_VERSION) {
|
||||
return true;
|
||||
} else {
|
||||
dbg('wrong offline data version');
|
||||
clearLocalStorage();
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
document.onbeforeunload = writeLocalStorage;
|
||||
|
||||
|
||||
// Runs when 'changeview' event is detected.
|
||||
function processView(e, newView) {
|
||||
// Update our internal view state.
|
||||
currentView = $.extend(currentView, newView);
|
||||
|
||||
// On custom ranges request a range greater by 1 day. (bug 737910)
|
||||
if (currentView.range.custom && typeof currentView.range.end == 'object') {
|
||||
currentView.range.end = new Date(currentView.range.end.getTime() + msDay);
|
||||
}
|
||||
|
||||
// Fetch the data from the server or storage, and notify other components.
|
||||
$.when(getDataRange(currentView), getSiteEvents(currentView))
|
||||
.then(function(data, events) {
|
||||
setTimeout(function() {
|
||||
z.doc.trigger('dataready', {
|
||||
'view' : currentView,
|
||||
'fields': getAvailableFields(currentView),
|
||||
'data' : data,
|
||||
'events': events
|
||||
});
|
||||
}, 0);
|
||||
});
|
||||
}
|
||||
z.doc.bind('changeview', processView);
|
||||
|
||||
|
||||
// Retrieves a list of site-wide events that may impact statistics data.
|
||||
function getSiteEvents(view) {
|
||||
if (!siteEventsEnabled) return [];
|
||||
var range = normalizeRange(view.range),
|
||||
urlStart = Highcharts.dateFormat('%Y%m%d', range.start),
|
||||
urlEnd = Highcharts.dateFormat('%Y%m%d', range.end),
|
||||
url = format('/en-US/statistics/events-{0}-{1}.json', urlStart, urlEnd),
|
||||
$def = $.Deferred();
|
||||
$.getJSON(url)
|
||||
.done(function(data) {
|
||||
$def.resolve(data);
|
||||
})
|
||||
.fail(function() {
|
||||
$def.resolve([]);
|
||||
});
|
||||
return $def;
|
||||
}
|
||||
|
||||
|
||||
function annotateData(data, events) {
|
||||
var i, ev, sd, ed;
|
||||
for (i=0; i < events.length; i++) {
|
||||
ev = events[i];
|
||||
if (ev.end) {
|
||||
sd = Date.iso(ev.start);
|
||||
ed = Date.iso(ev.end);
|
||||
forEachISODate({start: sd, end: ed}, '1 day', data, function(row) {
|
||||
if (row) {
|
||||
row.event = ev;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
if (data[ev.start]) {
|
||||
data[ev.start].event = ev;
|
||||
}
|
||||
}
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
|
||||
// Returns a list of field names for a given data set.
|
||||
function getAvailableFields(view) {
|
||||
var metric = view.metric,
|
||||
range = normalizeRange(view.range),
|
||||
start = range.start,
|
||||
end = range.end,
|
||||
ds,
|
||||
row,
|
||||
numRows = 0,
|
||||
fields = {};
|
||||
|
||||
// Non-breakdown metrics only have one field.
|
||||
if (metric == 'contributions') return ['count', 'total', 'average'];
|
||||
if (!(metric in breakdownMetrics)) return ['count'];
|
||||
|
||||
ds = dataStore[metric];
|
||||
if (!ds) throw 'Expected metric with valid data!';
|
||||
|
||||
// Locate all unique fields.
|
||||
forEachISODate(range, '1 day', ds, function(row) {
|
||||
if (row) {
|
||||
if (metric == 'apps') {
|
||||
row = collapseVersions(row, PRECISION);
|
||||
}
|
||||
if (metric == 'sources') {
|
||||
row = collapseSources(row);
|
||||
}
|
||||
_.each(row.data, function(v, k) {
|
||||
fields[k] = fields[k] ? fields[k] + v : v;
|
||||
});
|
||||
_.extend(fields, row.data);
|
||||
}
|
||||
}, this);
|
||||
|
||||
// sort the fields, make them proper field identifiers, and return.
|
||||
return _.map(
|
||||
_.sortBy(
|
||||
_.keys(fields),
|
||||
function (f) {
|
||||
return -fields[f];
|
||||
}
|
||||
),
|
||||
function(f) {
|
||||
return 'data|' + f;
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
// getDataRange: ensures we have all the data from the server we need,
|
||||
// and queues up requests to the server if the requested data is outside
|
||||
// the range currently stored locally. Once all server requests return,
|
||||
// we move on.
|
||||
function getDataRange(view) {
|
||||
var range = normalizeRange(view.range),
|
||||
metric = view.metric,
|
||||
ds = dataStore[metric],
|
||||
reqs = [],
|
||||
isAppsChart = metric == 'my_apps',
|
||||
$def = $.Deferred();
|
||||
|
||||
function finished() {
|
||||
var ds = dataStore[metric],
|
||||
ret = {}, row, firstIndex;
|
||||
|
||||
// Temporary array to process multi-line charts.
|
||||
var mret = [];
|
||||
|
||||
// Return if metric isn't datetime-based.
|
||||
if (metric in nonDateMetrics) {
|
||||
if (ds.length === 0) {
|
||||
ds.empty = true;
|
||||
}
|
||||
$def.resolve(ds);
|
||||
} else if (ds) {
|
||||
if (isAppsChart) {
|
||||
var myData;
|
||||
for (var i = 0; i < ds.length; i++) {
|
||||
(function(myApp) {
|
||||
ret = {};
|
||||
myData = myApp.data;
|
||||
ret['name'] = myApp.name;
|
||||
forEachISODate(range, '1 day', myData, function(row, date) {
|
||||
if (row) {
|
||||
if (!firstIndex) {
|
||||
firstIndex = range.start;
|
||||
}
|
||||
ret[date.iso()] = row;
|
||||
}
|
||||
}, this);
|
||||
mret.push(ret);
|
||||
})(ds[i]);
|
||||
}
|
||||
} else {
|
||||
forEachISODate(range, '1 day', ds, function(row, date) {
|
||||
var d = date.iso();
|
||||
if (row) {
|
||||
if (!firstIndex) {
|
||||
firstIndex = range.start;
|
||||
}
|
||||
if (metric == 'apps') {
|
||||
row = collapseVersions(row, PRECISION);
|
||||
}
|
||||
if (metric == 'sources') {
|
||||
row = collapseSources(row);
|
||||
}
|
||||
ret[d] = row;
|
||||
}
|
||||
}, this);
|
||||
}
|
||||
if (_.isEmpty(ret)) {
|
||||
ret.empty = true;
|
||||
} else {
|
||||
if (isAppsChart) {
|
||||
ret = {firstIndex: firstIndex};
|
||||
for (i = 0; i < mret.length; i++) {
|
||||
mret[i] = groupData(mret[i], view);
|
||||
}
|
||||
ret.stats = mret;
|
||||
ret.metric = metric;
|
||||
} else {
|
||||
ret.firstIndex = firstIndex;
|
||||
ret = groupData(ret, view);
|
||||
ret.metric = metric;
|
||||
}
|
||||
}
|
||||
$def.resolve(ret);
|
||||
} else {
|
||||
$def.fail({empty: true});
|
||||
}
|
||||
}
|
||||
|
||||
if (ds) {
|
||||
dbg('range', range.start.iso(), range.end.iso());
|
||||
if (ds.maxdate < range.end.iso()) {
|
||||
reqs.push(fetchData(metric, Date.iso(ds.maxdate), range.end));
|
||||
}
|
||||
if (ds.mindate > range.start.iso()) {
|
||||
reqs.push(fetchData(metric, range.start, Date.iso(ds.mindate)));
|
||||
}
|
||||
} else {
|
||||
reqs.push(fetchData(metric, range.start, range.end));
|
||||
}
|
||||
|
||||
$.when.apply(null, reqs).then(finished);
|
||||
return $def;
|
||||
}
|
||||
|
||||
|
||||
// Aggregate data based on view's `group` setting.
|
||||
function groupData(data, view) {
|
||||
var metric = view.metric,
|
||||
range = normalizeRange(view.range),
|
||||
group = view.group || 'day',
|
||||
groupedData = {};
|
||||
|
||||
|
||||
// If grouping doesn't fit into custom date range, force group to day.
|
||||
var dayMsecs = 24 * 3600 * 1000;
|
||||
var date_range_days = (range.end.getTime() - range.start.getTime()) / dayMsecs;
|
||||
if ((group == 'week' && date_range_days <= 8) ||
|
||||
(group == 'month' && date_range_days <= 31)) {
|
||||
view.group = 'day';
|
||||
group = 'day';
|
||||
}
|
||||
|
||||
console.log('grouping found: ', group);
|
||||
|
||||
// if grouping is by day, do nothing.
|
||||
if (group == 'day') return data;
|
||||
var groupKey = false,
|
||||
groupVal = false,
|
||||
groupCount = 0,
|
||||
d, row, firstIndex;
|
||||
|
||||
if (group == 'all') {
|
||||
groupKey = firstIndex = range.start.iso();
|
||||
groupCount = 0;
|
||||
groupVal = {
|
||||
date: groupKey,
|
||||
count: 0,
|
||||
data: {},
|
||||
empty: true
|
||||
};
|
||||
if (metric == 'contributions') {
|
||||
_.extend(groupVal, {
|
||||
average: 0,
|
||||
total: 0
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function performAggregation() {
|
||||
// we drop the some days of data from the result set
|
||||
// if they are not a complete grouping.
|
||||
if (groupKey && groupVal && !groupVal.empty) {
|
||||
// average `count` for mean metrics
|
||||
if (metricTypes[metric] == 'mean') {
|
||||
groupVal.count /= groupCount;
|
||||
}
|
||||
if (!firstIndex) firstIndex = groupKey;
|
||||
// overview gets special treatment. Only average ADUs.
|
||||
if (metric == 'overview') {
|
||||
groupVal.data.updates /= groupCount;
|
||||
} else if (metric == 'contributions') {
|
||||
groupVal.average /= groupCount;
|
||||
} else if (metric in breakdownMetrics) {
|
||||
// average for mean metrics.
|
||||
_.each(groupVal.data, function(val, field) {
|
||||
if (metricTypes[metric] == 'mean') {
|
||||
groupVal.data[field] /= groupCount;
|
||||
}
|
||||
});
|
||||
}
|
||||
groupedData[groupKey] = groupVal;
|
||||
}
|
||||
}
|
||||
|
||||
// big loop!
|
||||
forEachISODate(range, '1 day', data, function(row, d) {
|
||||
// Here's where grouping points are caluculated.
|
||||
if ((group == 'week' && d.getDay() === 0) ||
|
||||
(group == 'month' && d.getDate() == 1)) {
|
||||
|
||||
performAggregation();
|
||||
// set the new group date to the current iteration.
|
||||
groupKey = d.iso();
|
||||
// reset our aggregates.
|
||||
groupCount = 0;
|
||||
groupVal = {
|
||||
date: groupKey,
|
||||
count: 0,
|
||||
data: {},
|
||||
empty: true
|
||||
};
|
||||
if (metric == 'contributions') {
|
||||
_.extend(groupVal, {
|
||||
average: 0,
|
||||
total: 0
|
||||
});
|
||||
}
|
||||
}
|
||||
// add the current row to our aggregates.
|
||||
if (row && groupVal) {
|
||||
groupVal.empty = false;
|
||||
groupVal.count += row.count;
|
||||
if (metric == 'contributions') {
|
||||
groupVal.total += parseFloat(row.total);
|
||||
groupVal.average += parseFloat(row.average);
|
||||
}
|
||||
if (metric in breakdownMetrics) {
|
||||
_.each(row.data, function(val, field) {
|
||||
if (!groupVal.data[field]) {
|
||||
groupVal.data[field] = 0;
|
||||
}
|
||||
groupVal.data[field] += val;
|
||||
});
|
||||
}
|
||||
}
|
||||
groupCount++;
|
||||
}, this);
|
||||
if (group == 'all') performAggregation();
|
||||
groupedData.empty = _.isEmpty(groupedData);
|
||||
groupedData.firstIndex = firstIndex;
|
||||
return groupedData;
|
||||
}
|
||||
|
||||
|
||||
// The beef. Negotiates with the server for data.
|
||||
function fetchData(metric, start, end) {
|
||||
var seriesStart = start,
|
||||
seriesEnd = end,
|
||||
$def = $.Deferred();
|
||||
|
||||
var seriesURLStart = Highcharts.dateFormat('%Y%m%d', seriesStart),
|
||||
seriesURLEnd = Highcharts.dateFormat('%Y%m%d', seriesEnd),
|
||||
seriesURL = baseURL + ([metric,'day',seriesURLStart,seriesURLEnd]).join('-') + '.json';
|
||||
|
||||
$.ajax({ url: seriesURL,
|
||||
dataType: 'text',
|
||||
success: fetchHandler,
|
||||
error: errorHandler });
|
||||
|
||||
function errorHandler() {
|
||||
$def.fail();
|
||||
}
|
||||
|
||||
function fetchHandler(raw_data, status, xhr) {
|
||||
var maxdate = '1970-01-01',
|
||||
mindate = (new Date()).iso();
|
||||
|
||||
if (xhr.status == 200) {
|
||||
|
||||
if (!dataStore[metric]) {
|
||||
dataStore[metric] = {
|
||||
mindate : (new Date()).iso(),
|
||||
maxdate : '1970-01-01'
|
||||
};
|
||||
}
|
||||
|
||||
var ds = dataStore[metric],
|
||||
innerData = {},
|
||||
data = JSON.parse(raw_data);
|
||||
|
||||
// Uncomment the raw_data below to have some testing data
|
||||
// when working on the front-end.
|
||||
// USE EITHER THIS OR THE SEGMENT BELOW. NOT BOTH.
|
||||
//raw_data = [{"date": "2012-11-28", "count": 69, "data": {}}, {"date": "2012-11-27", "count": 69, "data": {}}, {"date": "2012-11-26", "count": 65, "data": {}}, {"date": "2012-11-25", "count": 65, "data": {}}, {"date": "2012-11-24", "count": 64, "data": {}}, {"date": "2012-11-23", "count": 64, "data": {}}, {"date": "2012-11-22", "count": 63, "data": {}}, {"date": "2012-11-21", "count": 63, "data": {}}, {"date": "2012-11-20", "count": 61, "data": {}}, {"date": "2012-11-19", "count": 55, "data": {}}, {"date": "2012-11-18", "count": 50, "data": {}}, {"date": "2012-11-17", "count": 47, "data": {}}, {"date": "2012-11-16", "count": 43, "data": {}}, {"date": "2012-11-15", "count": 39, "data": {}}, {"date": "2012-11-05", "count": 30, "data": {}}, {"date": "2012-11-04", "count": 29, "data": {}}, {"date": "2012-11-03", "count": 29, "data": {}}, {"date": "2012-11-02", "count": 29, "data": {}}, {"date": "2012-11-01", "count": 29, "data": {}}];
|
||||
|
||||
// Set to `true` for testing chart data on a multi-line graph.
|
||||
if (false) {
|
||||
raw_data = [];
|
||||
raw_data.push({'name': 'cute app', 'data': [{"date": "2012-11-25", "count": 65, "data": {}}, {"date": "2012-11-24", "count": 64.0, "data": {}}, {"date": "2012-11-23", "count": 64, "data": {}}, {"date": "2012-11-22", "count": 63, "data": {}}, {"date": "2012-11-21", "count": 63, "data": {}}, {"date": "2012-11-20", "count": 61, "data": {}}, {"date": "2012-11-19", "count": 55, "data": {}}, {"date": "2012-11-18", "count": 50, "data": {}}, {"date": "2012-11-17", "count": 47, "data": {}}, {"date": "2012-11-16", "count": 43, "data": {}}, {"date": "2012-11-15", "count": 39, "data": {}}, {"date": "2012-11-05", "count": 30, "data": {}}, {"date": "2012-11-04", "count": 29, "data": {}}, {"date": "2012-11-03", "count": 29, "data": {}}, {"date": "2012-11-02", "count": 29, "data": {}}, {"date": "2012-11-01", "count": 29, "data": {}}, {"date": "2012-10-29", "count": 20, "data": {}}]});
|
||||
raw_data.push({'name': 'sexy app', 'data': [{"date": "2012-11-25", "count": 77, "data": {}}, {"date": "2012-11-24", "count": 77, "data": {}}, {"date": "2012-11-23", "count": 77, "data": {}}, {"date": "2012-11-22", "count": 77, "data": {}}, {"date": "2012-11-21", "count": 77, "data": {}}, {"date": "2012-11-20", "count": 77, "data": {}}, {"date": "2012-11-19", "count": 77, "data": {}}, {"date": "2012-11-18", "count": 77, "data": {}}, {"date": "2012-11-17", "count": 77, "data": {}}, {"date": "2012-11-16", "count": 33, "data": {}}, {"date": "2012-11-15", "count": 44, "data": {}}, {"date": "2012-11-05", "count": 36, "data": {}}, {"date": "2012-11-04", "count": 33, "data": {}}, {"date": "2012-11-03", "count": 19, "data": {}}, {"date": "2012-11-02", "count": 49, "data": {}}, {"date": "2012-11-01", "count": 19, "data": {}}, {"date": "2012-10-29", "count": 30, "data": {}}]});
|
||||
data = raw_data;
|
||||
}
|
||||
|
||||
// Some charts like multi-line won't be stored by a `datekey`.
|
||||
if (metric in nonDateMetrics || metric == 'my_apps') {
|
||||
dataStore[metric] = data;
|
||||
ds = dataStore[metric];
|
||||
if (metric == 'my_apps') {
|
||||
var i = 0, j = 0, datekey;
|
||||
for (i = 0; i < data.length; i++) {
|
||||
innerData = {};
|
||||
for (j = 0; j < data[i].data.length; j++) {
|
||||
datekey = data[i].data[j].date;
|
||||
maxdate = String.max(datekey, maxdate);
|
||||
mindate = String.min(datekey, mindate);
|
||||
innerData[datekey] = data[i].data[j];
|
||||
}
|
||||
ds[i].data = innerData;
|
||||
ds[i].maxdate = String.max(maxdate, ds[i].maxdate || maxdate);
|
||||
ds[i].mindate = String.min(mindate, ds[i].mindate || mindate);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
var i, datekey;
|
||||
for (i = 0; i < data.length; i++) {
|
||||
datekey = data[i].date;
|
||||
maxdate = String.max(datekey, maxdate);
|
||||
mindate = String.min(datekey, mindate);
|
||||
ds[datekey] = data[i];
|
||||
}
|
||||
ds.maxdate = String.max(maxdate, ds.maxdate);
|
||||
ds.mindate = String.min(mindate, ds.mindate);
|
||||
}
|
||||
|
||||
clearTimeout(writeInterval);
|
||||
writeInterval = setTimeout(writeLocalStorage, 1000);
|
||||
$def.resolve();
|
||||
|
||||
} else if (xhr.status == 202) { //Handle a successful fetch but with no response
|
||||
|
||||
var retry_delay = 30000;
|
||||
|
||||
if (xhr.getResponseHeader('Retry-After')) {
|
||||
retry_delay = parseInt(xhr.getResponseHeader('Retry-After'), 10) * 1000;
|
||||
}
|
||||
|
||||
setTimeout(function () {
|
||||
fetchData(metric, start, end, callback);
|
||||
}, retry_delay);
|
||||
|
||||
}
|
||||
}
|
||||
return $def;
|
||||
}
|
||||
|
||||
|
||||
function collapseSources(row) {
|
||||
var out = {
|
||||
count : row.count,
|
||||
date : row.date,
|
||||
end : row.end
|
||||
},
|
||||
data = row.data,
|
||||
pretty, key,
|
||||
ret = {};
|
||||
|
||||
_.each(data, function(val, source) {
|
||||
pretty = $.trim(getPrettyName('sources', source));
|
||||
if (!lookup[pretty]) {
|
||||
lookup[pretty] = source;
|
||||
}
|
||||
key = lookup[pretty];
|
||||
if (!ret[key]) ret[key] = 0;
|
||||
ret[key] += parseFloat(val);
|
||||
});
|
||||
out.data = ret;
|
||||
return out;
|
||||
}
|
||||
|
||||
|
||||
// Rounds application version strings to a given precision.
|
||||
// Passing `0` will truncate versions entirely.
|
||||
function collapseVersions(row, precision) {
|
||||
var out = {
|
||||
count : row.count,
|
||||
date : row.date,
|
||||
end : row.end
|
||||
},
|
||||
apps = row.data,
|
||||
key,
|
||||
ret = {};
|
||||
|
||||
_.each(apps, function(set, app) {
|
||||
_.each(set, function(val, ver) {
|
||||
key = app + '_' + ver.split('.').slice(0,precision).join('.');
|
||||
if (!ret[key]) {
|
||||
ret[key] = 0;
|
||||
}
|
||||
ret[key] += parseFloat(val);
|
||||
});
|
||||
});
|
||||
out.data = ret;
|
||||
return out;
|
||||
}
|
||||
|
||||
|
||||
// Takes a data row and a field identifier and returns the value.
|
||||
function getField(row, field) {
|
||||
var parts = field.split('|'),
|
||||
val = row;
|
||||
|
||||
// give up if the row is falsy.
|
||||
if (!val) {
|
||||
return null;
|
||||
}
|
||||
// drill into the row object for a nested key.
|
||||
// `data|api` means row['data']['api']
|
||||
for (var i = 0; i < parts.length; i++) {
|
||||
val = val[parts[i]];
|
||||
if (!_.isNumber(val) && !_.isObject(val)) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return val;
|
||||
}
|
||||
|
||||
|
||||
function getPrettyName(metric, field) {
|
||||
var parts = field.split('_'),
|
||||
key = parts[0];
|
||||
parts = parts.slice(1);
|
||||
|
||||
metric = metric.replace('_inapp', '');
|
||||
if (metric in csv_keys) {
|
||||
if (key in csv_keys[metric]) {
|
||||
return csv_keys[metric][key] + ' ' + parts.join(' ');
|
||||
}
|
||||
}
|
||||
return field;
|
||||
}
|
||||
|
||||
// Expose some functionality to the z.StatsManager api.
|
||||
return {
|
||||
'getDataRange' : getDataRange,
|
||||
'fetchData' : fetchData,
|
||||
'dataStore' : dataStore,
|
||||
'getPrettyName' : getPrettyName,
|
||||
'getField' : getField,
|
||||
'clearLocalStorage' : clearLocalStorage,
|
||||
'getAvailableFields': getAvailableFields,
|
||||
'currencyMetrics' : currencyMetrics,
|
||||
'nonDateMetrics' : nonDateMetrics,
|
||||
'getCurrentView' : function() { return currentView; }
|
||||
};
|
||||
})();
|
|
@ -1,53 +0,0 @@
|
|||
$(function() {
|
||||
if ($('.primary').attr('data-report') != 'overview') return;
|
||||
|
||||
// set up topcharts (defined in topchart.js)
|
||||
$('.toplist').topChart();
|
||||
|
||||
z.doc.bind("changeview", function(e, view) {
|
||||
$('.two-up').addClass('loading');
|
||||
}).bind("dataready", function(e, data) {
|
||||
// Save some requests by waiting until the graph data is ready.
|
||||
var view = _.extend({}, data.view, {group: 'all'}),
|
||||
range = normalizeRange(view.range);
|
||||
|
||||
// get aggregates for Daily Users and Downloads for the given time range.
|
||||
$.when(z.StatsManager.getDataRange(view)).then(function(data) {
|
||||
if (data.empty) {
|
||||
$("#downloads-in-range, #users-in-range").text(gettext('No data available.'));
|
||||
} else {
|
||||
// make all that data pretty.
|
||||
var aggregateRow = data[data.firstIndex].data,
|
||||
totalDownloads = Highcharts.numberFormat(aggregateRow.downloads, 0),
|
||||
totalUsers = Highcharts.numberFormat(aggregateRow.updates, 0),
|
||||
startString = range.start.iso(),
|
||||
endString = range.end.iso(),
|
||||
downloadFormat,
|
||||
userFormat;
|
||||
if (typeof view.range == 'string') {
|
||||
downloadFormat = csv_keys.aggregateLabel.downloads[0],
|
||||
userFormat = csv_keys.aggregateLabel.usage[0];
|
||||
$("#downloads-in-range").html(format(downloadFormat,
|
||||
totalDownloads,
|
||||
parseInt(view.range, 10)));
|
||||
$("#users-in-range").html(format(userFormat,
|
||||
totalUsers,
|
||||
parseInt(view.range, 10)));
|
||||
} else {
|
||||
downloadFormat = csv_keys.aggregateLabel.downloads[1],
|
||||
userFormat = csv_keys.aggregateLabel.usage[1];
|
||||
$("#downloads-in-range").html(format(downloadFormat,
|
||||
totalDownloads,
|
||||
startString,
|
||||
endString));
|
||||
$("#users-in-range").html(format(userFormat,
|
||||
totalUsers,
|
||||
startString,
|
||||
endString));
|
||||
}
|
||||
|
||||
}
|
||||
$('.two-up').removeClass('loading');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,96 +0,0 @@
|
|||
(function() {
|
||||
|
||||
$(function() {
|
||||
"use strict";
|
||||
|
||||
// Modify the URL when the page state changes, if the browser supports pushState.
|
||||
if (z.capabilities.replaceState) {
|
||||
z.doc.bind('changeview', function(e, view) {
|
||||
var queryParams = {},
|
||||
range = view.range;
|
||||
if (range) {
|
||||
if (typeof range == 'string') {
|
||||
queryParams.last = range.split(/\s+/)[0];
|
||||
} else if (typeof range == 'object') {
|
||||
// queryParams.start = z.date.date_string(new Date(range.start), '');
|
||||
// queryParams.end = z.date.date_string(new Date(range.end), '');
|
||||
}
|
||||
}
|
||||
queryParams = $.param(queryParams);
|
||||
if (queryParams) {
|
||||
history.replaceState(view, document.title, '?' + queryParams);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Set up initial default view.
|
||||
var initView = {
|
||||
metric: $('.primary').attr('data-report'),
|
||||
range: $('.primary').attr('data-range') || '30 days',
|
||||
group: 'day'
|
||||
};
|
||||
|
||||
// Set side nav active state.
|
||||
(function() {
|
||||
var sel = '#side-nav li.' + initView.metric;
|
||||
sel += ', #side-nav li[data-report=' + initView.metric + ']';
|
||||
|
||||
$(sel).addClass('active');
|
||||
})();
|
||||
|
||||
// Restore any session view information from sessionStorage.
|
||||
if (z.capabilities.localStorage && sessionStorage.getItem('stats_view')) {
|
||||
var ssView = JSON.parse(sessionStorage.getItem('stats_view'));
|
||||
initView.range = ssView.range || initView.range;
|
||||
initView.group = ssView.group || initView.group;
|
||||
}
|
||||
|
||||
// In-app drop-down selector.
|
||||
var dropdown = $('.product');
|
||||
dropdown.change(function() {
|
||||
window.location = $('.product option:selected').val();
|
||||
});
|
||||
|
||||
// Update sessionStorage with our current view state.
|
||||
(function() {
|
||||
if (!z.capabilities.localStorage) return;
|
||||
var ssView = _.clone(initView);
|
||||
z.doc.bind('changeview', function(e, newView) {
|
||||
_.extend(ssView, newView);
|
||||
sessionStorage.setItem('stats_view', JSON.stringify({
|
||||
'range': ssView.range,
|
||||
'group': ssView.group
|
||||
}));
|
||||
});
|
||||
})();
|
||||
|
||||
// Update the "Export as CSV" link when the view changes.
|
||||
(function() {
|
||||
var view = {},
|
||||
baseURL = $('.primary').attr('data-base_url');
|
||||
z.doc.bind('changeview', function(e, newView) {
|
||||
_.extend(view, newView);
|
||||
var metric = view.metric,
|
||||
range = normalizeRange(view.range),
|
||||
url = baseURL + ([metric,'day',range.start.pretty(''),range.end.pretty('')]).join('-');
|
||||
$('#export_data_csv').attr('href', url + '.csv');
|
||||
$('#export_data_json').attr('href', url + '.json');
|
||||
});
|
||||
})();
|
||||
|
||||
// set up notes modal.
|
||||
$('#stats-note').modal('#stats-note-link', { width: 520 });
|
||||
|
||||
// set up stats exception modal.
|
||||
var $exceptionModal = $('#exception-note').modal('', { width: 250 });
|
||||
z.doc.bind('explain-exception', function() {
|
||||
$exceptionModal.render();
|
||||
});
|
||||
|
||||
$('.csv-table').csvTable();
|
||||
|
||||
// Trigger the initial data load.
|
||||
z.doc.trigger('changeview', initView);
|
||||
});
|
||||
|
||||
})();
|
|
@ -1,167 +0,0 @@
|
|||
(function($) {
|
||||
"use strict";
|
||||
|
||||
$.fn.csvTable = function(cfg) {
|
||||
$(this).each(function() {
|
||||
var $self = $(this),
|
||||
$table = $self.find('table'),
|
||||
$thead = $self.find('thead'),
|
||||
$paginator = $self.find('.paginator'),
|
||||
pageSize = 14,
|
||||
pages = {},
|
||||
metric = $('.primary').attr('data-report'),
|
||||
nonDateMetrics = z.StatsManager.nonDateMetrics,
|
||||
currentPage;
|
||||
|
||||
$(document).ready(function() {
|
||||
// My apps and multi-line charts won't get a table for now.
|
||||
if (metric != 'my_apps') {
|
||||
init();
|
||||
$('.tabular.csv-table footer').show();
|
||||
} else {
|
||||
$('.tabular.csv-table footer').hide();
|
||||
}
|
||||
});
|
||||
|
||||
function init() {
|
||||
gotoPage(0);
|
||||
$paginator.delegate('.next', 'click', _pd(function() {
|
||||
if ($(this).hasClass('disabled')) return;
|
||||
gotoPage(currentPage+1);
|
||||
}));
|
||||
$paginator.delegate('.prev', 'click', _pd(function() {
|
||||
if ($(this).hasClass('disabled')) return;
|
||||
gotoPage(currentPage-1);
|
||||
}));
|
||||
}
|
||||
|
||||
function gotoPage(page) {
|
||||
if (page < 0) {
|
||||
page = 0;
|
||||
}
|
||||
$paginator.find('.prev').toggleClass('disabled', page === 0);
|
||||
if (pages[page]) {
|
||||
showPage(page);
|
||||
} else {
|
||||
$self.parent().addClass('loading');
|
||||
$.when(getPage(page))
|
||||
.then(function() {
|
||||
showPage(page);
|
||||
$self.parent().removeClass('loading');
|
||||
getPage(page+1);
|
||||
getPage(page-1);
|
||||
});
|
||||
}
|
||||
// nonDateMetrics don't need pages (yet).
|
||||
if (metric in nonDateMetrics) {
|
||||
$paginator.find('.next').toggleClass('disabled');
|
||||
}
|
||||
}
|
||||
|
||||
function showPage(page) {
|
||||
var p = pages[page];
|
||||
if (p) {
|
||||
$table.find('tbody').hide();
|
||||
p.el.show();
|
||||
$thead.empty().html(p.head);
|
||||
}
|
||||
currentPage = page;
|
||||
}
|
||||
|
||||
function getPage(page) {
|
||||
if (pages[page] || page < 0) return;
|
||||
var $def = $.Deferred(),
|
||||
range = {
|
||||
end : Date.ago(pageSize * page + 'days'),
|
||||
start : Date.ago(pageSize * page + pageSize + 'days')
|
||||
},
|
||||
view = {
|
||||
metric : metric,
|
||||
group : 'day',
|
||||
range : range
|
||||
};
|
||||
$.when(z.StatsManager.getDataRange(view))
|
||||
.then(function(data) {
|
||||
var fields = z.StatsManager.getAvailableFields(view),
|
||||
currencyMetrics = z.StatsManager.currencyMetrics,
|
||||
newBody = '<tbody>',
|
||||
newHead = gettext('Date'),
|
||||
newPage = {},
|
||||
row;
|
||||
|
||||
// Handle headers other than 'Date'.
|
||||
switch(nonDateMetrics[metric]) {
|
||||
case 'currency':
|
||||
newHead = gettext('Currency');
|
||||
break;
|
||||
case 'source':
|
||||
newHead = gettext('Source');
|
||||
break;
|
||||
}
|
||||
newHead = '<tr><th>' + newHead + '</th>';
|
||||
|
||||
_.each(fields, function(f) {
|
||||
var id = f.split('|').pop(),
|
||||
prettyName = z.StatsManager.getPrettyName(metric, id);
|
||||
newHead += format('<th title="{0}">', prettyName);
|
||||
newHead += prettyName;
|
||||
newHead += '</th>';
|
||||
});
|
||||
|
||||
// Manually create a table for nonDateMetrics with
|
||||
// breakdown field on left and data on right.
|
||||
if (metric in nonDateMetrics) {
|
||||
_.each(data, function(datum) {
|
||||
newBody += '<tr>';
|
||||
newBody += '<th>' + gettext(datum[nonDateMetrics[metric]]) + '</th>';
|
||||
|
||||
// Insert data (supports multiple fields).
|
||||
_.each(fields, function(f) {
|
||||
newBody += '<td>';
|
||||
if (metric in currencyMetrics) {
|
||||
newBody += '$' + Highcharts.numberFormat(z.StatsManager.getField(datum, f), 2);
|
||||
} else {
|
||||
newBody += Highcharts.numberFormat(z.StatsManager.getField(datum, f), 0);
|
||||
}
|
||||
newBody += '</td>';
|
||||
});
|
||||
newBody += '</tr>';
|
||||
});
|
||||
}
|
||||
// Manually create a table for date-related metrics with
|
||||
// date on left and data on right.
|
||||
else {
|
||||
var d = range.end.clone().backward('1 day'),
|
||||
lastRowDate = range.start.clone().backward('1 day');
|
||||
for (; lastRowDate.isBefore(d); d.backward('1 day')) {
|
||||
row = data[d.iso()] || {};
|
||||
newBody += '<tr>';
|
||||
newBody += '<th>' + Highcharts.dateFormat('%a, %b %e, %Y', Date.iso(d)) + "</th>";
|
||||
|
||||
// Insert data (supports multiple fields).
|
||||
_.each(fields, function(f) {
|
||||
newBody += '<td>';
|
||||
if (metric in currencyMetrics) {
|
||||
newBody += '$' + Highcharts.numberFormat(z.StatsManager.getField(row, f), 2);
|
||||
} else {
|
||||
newBody += Highcharts.numberFormat(z.StatsManager.getField(row, f), 0);
|
||||
}
|
||||
newBody += '</td>';
|
||||
});
|
||||
newBody += '</tr>';
|
||||
}
|
||||
}
|
||||
|
||||
newBody += '</tbody>';
|
||||
newPage.el = $(newBody);
|
||||
newPage.head = newHead;
|
||||
$table.append(newPage.el);
|
||||
pages[page] = newPage;
|
||||
|
||||
$def.resolve();
|
||||
});
|
||||
return $def;
|
||||
}
|
||||
});
|
||||
};
|
||||
})(jQuery);
|
|
@ -1,128 +0,0 @@
|
|||
(function($) {
|
||||
// "use strict";
|
||||
var baseConfig = {
|
||||
chart: {
|
||||
backgroundColor: null
|
||||
},
|
||||
title: {
|
||||
text: null
|
||||
},
|
||||
plotArea: {
|
||||
shadow: null,
|
||||
borderWidth: null
|
||||
},
|
||||
tooltip: {
|
||||
enabled: false
|
||||
},
|
||||
plotOptions: {
|
||||
pie: {
|
||||
allowPointSelect: false,
|
||||
dataLabels: {
|
||||
enabled: false,
|
||||
color: '#333'
|
||||
},
|
||||
animation: false,
|
||||
size:190
|
||||
}
|
||||
},
|
||||
credits: {enabled:false},
|
||||
legend: {
|
||||
enabled:false
|
||||
},
|
||||
series: [{
|
||||
type: 'pie'
|
||||
}]
|
||||
};
|
||||
|
||||
$.fn.topChart = function(cfg) {
|
||||
$(this).each(function() {
|
||||
var $self = $(this),
|
||||
$doc = z.doc,
|
||||
$chart = $self.find('.piechart'),
|
||||
hChart,
|
||||
$table = $self.find('table'),
|
||||
metric = $table.attr('data-metric'),
|
||||
view = {
|
||||
'metric': metric,
|
||||
'group' : 'all'
|
||||
};
|
||||
|
||||
// reload the data when the view's range is modified.
|
||||
$doc.bind('changeview', function(e, newView) {
|
||||
// we only want to respond to changes in range.
|
||||
if (!newView.range) return;
|
||||
$self.addClass('loading');
|
||||
$self.removeClass('nodata');
|
||||
_.extend(view, {'range' : normalizeRange(newView.range)});
|
||||
$.when(z.StatsManager.getDataRange(view))
|
||||
.then(function(data) {
|
||||
generateRankedList(data, render);
|
||||
});
|
||||
});
|
||||
|
||||
// We take the data (aggregated to one row)
|
||||
function generateRankedList(data, done) {
|
||||
if (data.empty) {
|
||||
$self.removeClass('loading');
|
||||
$self.addClass('nodata');
|
||||
if (hChart && hChart.destroy) hChart.destroy();
|
||||
$table.html('');
|
||||
return;
|
||||
}
|
||||
var totalValue = 0,
|
||||
otherValue = 0;
|
||||
data = data[data.firstIndex].data;
|
||||
if (_.isEmpty(data)) return;
|
||||
// Sum all fields.
|
||||
_.each(data, function(val) {
|
||||
totalValue += val;
|
||||
});
|
||||
// Convert all fields to percentages and prettify names.
|
||||
var rankedList = _.map(data, function(val, key) {
|
||||
var field = key.split("|").slice(-1)[0];
|
||||
return [z.StatsManager.getPrettyName(metric, field),
|
||||
val, val/totalValue*100];
|
||||
});
|
||||
// Sort by value.
|
||||
rankedList = _.sortBy(rankedList, function(a) {
|
||||
return -a[1];
|
||||
});
|
||||
// Calculate the 'Other' percentage
|
||||
for (var i=5; i<rankedList.length; i++) {
|
||||
otherValue += rankedList[i][1];
|
||||
}
|
||||
// Take the top 5 values and append an 'Other' row.
|
||||
rankedList = rankedList.slice(0,5);
|
||||
rankedList.push([gettext('Other'), otherValue, otherValue/totalValue*100]);
|
||||
// Move on with our lives.
|
||||
done(rankedList);
|
||||
}
|
||||
|
||||
var tableRow = template("<tr><td>{0}</td><td>{1}</td><td>({2}%)</td></tr>");
|
||||
|
||||
function render(data) {
|
||||
var newBody = "<tbody>";
|
||||
_.each(data, function(row) {
|
||||
var pct = Math.round(row[2]);
|
||||
num = Highcharts.numberFormat(row[1], 0);
|
||||
if (pct < 1) pct = "<1";
|
||||
newBody += tableRow([row[0], num, pct]);
|
||||
});
|
||||
newBody += "</tbody>";
|
||||
$table.html(newBody);
|
||||
|
||||
// set up chart.
|
||||
var newConfig = _.clone(baseConfig),
|
||||
row;
|
||||
newConfig.chart.renderTo = $chart[0];
|
||||
newConfig.series[0].data = _.map(data, function(r) { return r.slice(0,2); });
|
||||
hChart = new Highcharts.Chart(newConfig);
|
||||
for (i = 0; i < data.length; i++) {
|
||||
row = $table.find('tr').eq(i);
|
||||
row.children().eq(0).append($("<b class='seriesdot' style='background:" + hChart.series[0].data[i].color + "'> </b>"));
|
||||
}
|
||||
$self.removeClass('loading');
|
||||
}
|
||||
});
|
||||
};
|
||||
})(jQuery);
|
|
@ -123,11 +123,6 @@ CSS = {
|
|||
'css/devreg/buttons.styl',
|
||||
'css/devreg/in-app-payments.styl',
|
||||
),
|
||||
'mkt/stats': (
|
||||
'css/devreg/legacy-paginator.styl',
|
||||
'css/devreg/jquery-ui/jquery-ui-1.10.1.custom.css',
|
||||
'css/devreg/stats.styl',
|
||||
),
|
||||
'mkt/lookup': (
|
||||
'css/devreg/manifest.styl',
|
||||
'css/devreg/lookup-tool.styl',
|
||||
|
@ -296,22 +291,6 @@ JS = {
|
|||
'js/common/formsets.js',
|
||||
'js/devreg/reviewers_init.js',
|
||||
),
|
||||
'mkt/stats': (
|
||||
'js/zamboni/storage.js',
|
||||
'js/mkt/modal.js',
|
||||
'js/lib/highcharts.src.js',
|
||||
'js/mkt/stats/csv_keys.js',
|
||||
'js/mkt/stats/helpers.js',
|
||||
'js/mkt/stats/dateutils.js',
|
||||
'js/mkt/stats/manager.js',
|
||||
'js/mkt/stats/controls.js',
|
||||
'js/mkt/stats/overview.js',
|
||||
'js/mkt/stats/topchart.js',
|
||||
'js/mkt/stats/chart.js',
|
||||
'js/mkt/stats/table.js',
|
||||
'js/mkt/stats/chart_column.js',
|
||||
'js/mkt/stats/stats.js',
|
||||
),
|
||||
'mkt/in-app-payments': (
|
||||
'js/lib/jquery-1.9.1.js',
|
||||
'js/mkt/inapp_payments.js',
|
||||
|
|
|
@ -4,7 +4,7 @@ from django.http import HttpResponse
|
|||
from mkt.purchase.urls import app_purchase_patterns
|
||||
from mkt.ratings.urls import review_patterns
|
||||
from mkt.receipts.urls import app_receipt_patterns
|
||||
from mkt.stats.urls import app_stats_patterns
|
||||
|
||||
from . import views
|
||||
|
||||
|
||||
|
@ -20,9 +20,6 @@ urlpatterns = patterns('',
|
|||
('^purchase/', include(app_purchase_patterns)),
|
||||
('^purchase/', include(app_receipt_patterns)),
|
||||
|
||||
# Statistics.
|
||||
('^statistics/', include(app_stats_patterns)),
|
||||
|
||||
# Ratings.
|
||||
('^reviews/', include(review_patterns)),
|
||||
|
||||
|
|
|
@ -14,7 +14,6 @@ from mkt.developers.api_payments import (
|
|||
PaymentDebugViewSet, PaymentViewSet, UpsellViewSet)
|
||||
from mkt.developers.decorators import use_apps
|
||||
from mkt.receipts.urls import test_patterns
|
||||
from mkt.stats.urls import all_apps_stats_patterns
|
||||
|
||||
from . import views
|
||||
from . import views_payments
|
||||
|
@ -163,7 +162,6 @@ urlpatterns = decorate(write, patterns('',
|
|||
url('docs/(?P<doc_name>[-_\w]+)/(?P<doc_page>[-_\w]+)',
|
||||
views.docs, name='mkt.developers.docs'),
|
||||
|
||||
url('^statistics/', include(all_apps_stats_patterns)),
|
||||
url('^transactions/', views.transactions,
|
||||
name='mkt.developers.transactions'),
|
||||
|
||||
|
|
|
@ -1,45 +0,0 @@
|
|||
import datetime
|
||||
|
||||
from django.core.management import call_command
|
||||
|
||||
import commonware.log
|
||||
import cronjobs
|
||||
import pyes
|
||||
|
||||
from stats.models import Contribution
|
||||
from lib.es.utils import raise_if_reindex_in_progress
|
||||
from mkt.webapps.models import Installed
|
||||
|
||||
cron_log = commonware.log.getLogger('mkt.cron')
|
||||
|
||||
|
||||
@cronjobs.register
|
||||
def index_latest_mkt_stats(index=None, aliased=True):
|
||||
raise_if_reindex_in_progress('mkt')
|
||||
yesterday = datetime.date.today() - datetime.timedelta(days=1)
|
||||
|
||||
try:
|
||||
latest = Contribution.search(index).order_by('-date').values_dict()
|
||||
latest_contribution = latest and latest[0]['date'] or yesterday
|
||||
except pyes.exceptions.SearchPhaseExecutionException:
|
||||
latest_contribution = yesterday
|
||||
|
||||
try:
|
||||
latest = Installed.search(index).order_by('-date').values_dict()
|
||||
latest_install = latest and latest[0]['date'] or yesterday
|
||||
except pyes.exceptions.SearchPhaseExecutionException:
|
||||
latest_install = yesterday
|
||||
|
||||
latest = min(latest_contribution, latest_install)
|
||||
|
||||
fmt = lambda d: d.strftime('%Y-%m-%d')
|
||||
date_range = '%s:%s' % (fmt(latest), fmt(datetime.date.today()))
|
||||
cron_log.info('index_mkt_stats --date=%s' % date_range)
|
||||
call_command('index_mkt_stats', addons=None, date=date_range, index=index,
|
||||
aliased=True)
|
||||
|
||||
|
||||
@cronjobs.register
|
||||
def index_mkt_stats(index=None, aliased=True):
|
||||
cron_log.info('index_mkt_stats')
|
||||
call_command('index_mkt_stats', addons=None, date=None)
|
|
@ -1,32 +0,0 @@
|
|||
from django.utils.http import urlquote
|
||||
|
||||
from jingo import register
|
||||
import jinja2
|
||||
|
||||
from access import acl
|
||||
|
||||
|
||||
@register.function
|
||||
@jinja2.contextfunction
|
||||
def check_contrib_stats_perms(context, addon):
|
||||
request = context['request']
|
||||
if addon.has_author(request.amo_user) or acl.action_allowed(request,
|
||||
'RevenueStats', 'View'):
|
||||
return True
|
||||
|
||||
|
||||
@register.function
|
||||
@jinja2.contextfunction
|
||||
def stats_url(context, action, metric=None):
|
||||
"""
|
||||
Simplifies the templates a bit, no need to pass in addon into
|
||||
parameters as it is inferred from the context and it makes the function
|
||||
call shorter.
|
||||
"""
|
||||
addon = context['addon']
|
||||
return addon.get_stats_url()
|
||||
|
||||
|
||||
@register.function
|
||||
def url_quote(url):
|
||||
return urlquote(url)
|
|
@ -1,136 +0,0 @@
|
|||
import logging
|
||||
from datetime import date, timedelta
|
||||
from optparse import make_option
|
||||
|
||||
from django.core.exceptions import FieldError
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.models import Max, Min
|
||||
|
||||
from celery.task.sets import TaskSet
|
||||
|
||||
from amo.utils import chunked
|
||||
from stats.models import Contribution
|
||||
from mkt.stats import tasks
|
||||
from mkt.webapps.models import Webapp
|
||||
|
||||
log = logging.getLogger('z.stats')
|
||||
|
||||
# Number of days of stats to process in one chunk if we're indexing everything.
|
||||
STEP = 5
|
||||
HELP = """\
|
||||
Start tasks to index stats. Without constraints, everything will be
|
||||
processed.
|
||||
|
||||
|
||||
To limit the add-ons:
|
||||
|
||||
`--addons=1865,2848,..,1843`
|
||||
|
||||
To limit the date range:
|
||||
|
||||
`--date=2011-08-15` or `--date=2011-08-15:2011-08-22`
|
||||
"""
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--addons',
|
||||
help='Add-on ids to process. Use commas to separate '
|
||||
'multiple ids.'),
|
||||
make_option('--date',
|
||||
help='The date or date range to process. Use the format '
|
||||
'YYYY-MM-DD for a single date or '
|
||||
'YYYY-MM-DD:YYYY-MM-DD to index a range of dates '
|
||||
'(inclusive).'),
|
||||
make_option('--fixup', action='store_true',
|
||||
help='Find and index rows we missed.'),
|
||||
make_option('--index',
|
||||
help='The name of the index to use.'),
|
||||
)
|
||||
help = HELP
|
||||
|
||||
def handle(self, *args, **kw):
|
||||
if kw.get('fixup'):
|
||||
fixup()
|
||||
|
||||
from mkt.webapps.models import Installed
|
||||
addons, dates, index = kw['addons'], kw['date'], kw.get('index')
|
||||
|
||||
queries = [
|
||||
(Webapp.objects, tasks.index_finance_total,
|
||||
{'date': 'created', 'index': index}),
|
||||
(Webapp.objects, tasks.index_finance_total_by_src,
|
||||
{'date': 'created', 'index': index}),
|
||||
(Webapp.objects, tasks.index_finance_total_by_currency,
|
||||
{'date': 'created', 'index': index}),
|
||||
(Contribution.objects, tasks.index_finance_daily,
|
||||
{'date': 'created', 'index': index}),
|
||||
(Installed.objects, tasks.index_installed_daily,
|
||||
{'date': 'created', 'index': index}),
|
||||
]
|
||||
|
||||
for qs, task, fields in queries:
|
||||
date_field = fields['date']
|
||||
|
||||
qs = qs.order_by('-%s' % date_field).values_list('id', flat=True)
|
||||
if addons:
|
||||
pks = [int(a.strip()) for a in addons.split(',')]
|
||||
try:
|
||||
qs = qs.filter(config__addon__in=pks)
|
||||
except FieldError:
|
||||
try:
|
||||
qs = qs.filter(addon__in=pks)
|
||||
except FieldError:
|
||||
qs = qs.filter(id__in=pks)
|
||||
|
||||
if dates:
|
||||
if ':' in dates:
|
||||
qs = qs.filter(**{'%s__range' % date_field:
|
||||
dates.split(':')})
|
||||
else:
|
||||
qs = qs.filter({date_field: dates})
|
||||
|
||||
if not (dates or addons):
|
||||
# We're loading the whole world. Do it in stages so we get most
|
||||
# recent stats first and don't do huge queries.
|
||||
limits = (qs.model.objects.filter(**{'%s__isnull' %
|
||||
date_field: False})
|
||||
.extra(where=['%s <> "0000-00-00"' % date_field])
|
||||
.aggregate(min=Min(date_field), max=Max(date_field)))
|
||||
# If there isn't any data at all, skip over.
|
||||
if not (limits['max'] or limits['min']):
|
||||
continue
|
||||
|
||||
num_days = (limits['max'] - limits['min']).days
|
||||
today = date.today()
|
||||
for start in range(0, num_days, STEP):
|
||||
stop = start + STEP
|
||||
date_range = (today - timedelta(days=stop),
|
||||
today - timedelta(days=start))
|
||||
create_tasks(task, list(qs.filter(**{
|
||||
'%s__range' % date_field:
|
||||
date_range})))
|
||||
else:
|
||||
create_tasks(task, list(qs))
|
||||
|
||||
|
||||
def create_tasks(task, qs):
|
||||
ts = [task.subtask(args=[chunk]) for chunk in chunked(qs, 50)]
|
||||
TaskSet(ts).apply_async()
|
||||
|
||||
|
||||
def fixup():
|
||||
# TODO: fixup for contributions
|
||||
queries = []
|
||||
|
||||
for model, task in queries:
|
||||
all_addons = model.objects.distinct().values_list('addon', flat=True)
|
||||
for addon in all_addons:
|
||||
qs = model.objects.filter(addon=addon)
|
||||
search = model.search().filter(addon=addon)
|
||||
if qs.count() != search.count():
|
||||
all_ids = list(qs.values_list('id', flat=True))
|
||||
search_ids = list(search.values()[:5000])
|
||||
ids = set(all_ids) - set(search_ids)
|
||||
log.info('Missing %s rows for %s.' % (len(ids), addon))
|
||||
create_tasks(task, list(ids))
|
|
@ -1,154 +0,0 @@
|
|||
from django.db.models import Count, Q, Sum
|
||||
|
||||
import amo
|
||||
import amo.search
|
||||
from amo.utils import create_es_index_if_missing
|
||||
from stats.models import Contribution
|
||||
|
||||
from mkt.webapps.models import Installed
|
||||
|
||||
|
||||
def get_finance_total(qs, addon, field=None, **kwargs):
|
||||
"""
|
||||
sales/revenue/refunds per app overall
|
||||
field -- breakdown field name contained by kwargs
|
||||
"""
|
||||
q = Q()
|
||||
if field:
|
||||
kwargs_copy = {field: kwargs[field]}
|
||||
q = handle_kwargs(q, field, kwargs)
|
||||
|
||||
revenue = (qs.values('addon').filter(q, refund=None, **kwargs).
|
||||
annotate(revenue=Sum('price_tier__price')))
|
||||
sales = (qs.values('addon').filter(q, refund=None, **kwargs).
|
||||
annotate(sales=Count('id')))
|
||||
refunds = (qs.filter(q, refund__isnull=False, **kwargs).
|
||||
values('addon').annotate(refunds=Count('id')))
|
||||
document = {
|
||||
'addon': addon,
|
||||
'count': sales[0]['sales'] if sales.count() else 0,
|
||||
'revenue': revenue[0]['revenue'] if revenue.count() else 0,
|
||||
'refunds': refunds[0]['refunds'] if refunds.count() else 0,
|
||||
}
|
||||
if field:
|
||||
# Edge case, handle None values.
|
||||
if kwargs_copy[field] is None:
|
||||
kwargs_copy[field] = ''
|
||||
document[field] = kwargs_copy[field]
|
||||
|
||||
# Non-USD-normalized revenue, calculated from currency's amount rather
|
||||
# than price tier.
|
||||
if field == 'currency':
|
||||
document['revenue_non_normalized'] = (qs.values('addon')
|
||||
.filter(q, refund=None, **kwargs)
|
||||
.annotate(revenue=Sum('amount'))
|
||||
[0]['revenue'] if revenue.count() else 0)
|
||||
|
||||
return document
|
||||
|
||||
|
||||
def get_finance_daily(contribution):
|
||||
"""
|
||||
sales per day
|
||||
revenue per day
|
||||
refunds per day
|
||||
"""
|
||||
addon_id = contribution['addon']
|
||||
date = contribution['created'].date()
|
||||
return {
|
||||
'date': date,
|
||||
'addon': addon_id,
|
||||
'count': Contribution.objects.filter(
|
||||
addon__id=addon_id,
|
||||
refund=None,
|
||||
created__year=date.year,
|
||||
created__month=date.month,
|
||||
created__day=date.day).count() or 0,
|
||||
# TODO: non-USD-normalized revenue (daily_by_currency)?
|
||||
'revenue': Contribution.objects.filter(
|
||||
addon__id=addon_id,
|
||||
refund=None,
|
||||
type=amo.CONTRIB_PURCHASE,
|
||||
created__year=date.year,
|
||||
created__month=date.month,
|
||||
created__day=date.day)
|
||||
.aggregate(revenue=Sum('price_tier__price'))['revenue']
|
||||
or 0,
|
||||
'refunds': Contribution.objects.filter(
|
||||
addon__id=addon_id,
|
||||
refund__isnull=False,
|
||||
created__year=date.year,
|
||||
created__month=date.month,
|
||||
created__day=date.day).count() or 0,
|
||||
}
|
||||
|
||||
|
||||
def get_installed_daily(installed):
|
||||
"""
|
||||
installs per day
|
||||
"""
|
||||
addon_id = installed['addon']
|
||||
date = installed['created'].date()
|
||||
return {
|
||||
'date': date,
|
||||
'addon': addon_id,
|
||||
'count': Installed.objects.filter(
|
||||
addon__id=addon_id,
|
||||
created__year=date.year,
|
||||
created__month=date.month,
|
||||
created__day=date.day).count()
|
||||
}
|
||||
|
||||
|
||||
def setup_mkt_indexes(index=None, aliased=True):
|
||||
"""
|
||||
Define explicit ES mappings for models. If a field is not explicitly
|
||||
defined and a field is inserted, ES will dynamically guess the type and
|
||||
insert it, in a schemaless manner.
|
||||
"""
|
||||
es = amo.search.get_es()
|
||||
for model in [Contribution]:
|
||||
index_ = index or model._get_index()
|
||||
index_ = create_es_index_if_missing(index_, aliased=aliased)
|
||||
|
||||
mapping = {
|
||||
'properties': {
|
||||
'id': {'type': 'long'},
|
||||
'date': {'format': 'dateOptionalTime',
|
||||
'type': 'date'},
|
||||
'count': {'type': 'long'},
|
||||
'revenue': {'type': 'double'},
|
||||
|
||||
# Try to tell ES not to 'analyze' the field to querying with
|
||||
# hyphens and lowercase letters.
|
||||
'currency': {'type': 'string',
|
||||
'index': 'not_analyzed'},
|
||||
'source': {'type': 'string',
|
||||
'index': 'not_analyzed'},
|
||||
'inapp': {'type': 'string',
|
||||
'index': 'not_analyzed'}
|
||||
}
|
||||
}
|
||||
|
||||
es.put_mapping(model._meta.db_table, mapping, index_)
|
||||
|
||||
|
||||
def handle_kwargs(q, field, kwargs, join_field=None):
|
||||
"""
|
||||
Processes kwargs to combine '' and None values and make it ready for
|
||||
filters. Returns Q object to use in filter.
|
||||
"""
|
||||
if join_field:
|
||||
join_field += field
|
||||
kwargs[join_field] = kwargs[field]
|
||||
|
||||
# Have '' and None have the same meaning.
|
||||
if not kwargs[field]:
|
||||
q = Q(**{field + '__in': ['', None]})
|
||||
del(kwargs[field])
|
||||
|
||||
# We are using the join field to filter so get rid of the plain one.
|
||||
if join_field and field in kwargs:
|
||||
del(kwargs[field])
|
||||
|
||||
return q
|
|
@ -1,233 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import copy
|
||||
|
||||
import commonware.log
|
||||
from celeryutils import task
|
||||
|
||||
import amo.search
|
||||
from . import search
|
||||
from lib.es.utils import get_indices
|
||||
from stats.models import Contribution
|
||||
|
||||
log = commonware.log.getLogger('z.task')
|
||||
|
||||
|
||||
@task
|
||||
def index_finance_total(addons, **kw):
|
||||
"""
|
||||
Aggregates financial stats from all of the contributions for a given app.
|
||||
"""
|
||||
index = kw.get('index', Contribution._get_index())
|
||||
es = amo.search.get_es()
|
||||
log.info('Indexing total financial stats for %s apps.' %
|
||||
len(addons))
|
||||
|
||||
for addon in addons:
|
||||
# Get all contributions for given add-on.
|
||||
qs = Contribution.objects.filter(addon=addon, uuid=None)
|
||||
if not qs.exists():
|
||||
continue
|
||||
try:
|
||||
key = ord_word('tot' + str(addon))
|
||||
data = search.get_finance_total(qs, addon)
|
||||
for index in get_indices(index):
|
||||
if not already_indexed(Contribution, data, index):
|
||||
Contribution.index(data, bulk=True, id=key, index=index)
|
||||
es.flush_bulk(forced=True)
|
||||
except Exception, exc:
|
||||
index_finance_total.retry(args=[addons], exc=exc, **kw)
|
||||
raise
|
||||
|
||||
|
||||
@task
|
||||
def index_finance_total_by_src(addons, **kw):
|
||||
"""
|
||||
Bug 758059
|
||||
Total finance stats, source breakdown.
|
||||
"""
|
||||
index = kw.get('index', Contribution._get_index())
|
||||
es = amo.search.get_es()
|
||||
log.info('Indexing total financial stats by source for %s apps.' %
|
||||
len(addons))
|
||||
|
||||
for addon in addons:
|
||||
# Get all contributions for given add-on.
|
||||
qs = Contribution.objects.filter(addon=addon, uuid=None)
|
||||
if not qs.exists():
|
||||
continue
|
||||
|
||||
# Get list of distinct sources.
|
||||
sources = set(qs.values_list('source', flat=True))
|
||||
|
||||
for source in sources:
|
||||
try:
|
||||
key = ord_word('src' + str(addon) + str(source))
|
||||
data = search.get_finance_total(qs, addon, 'source',
|
||||
source=source)
|
||||
for index in get_indices(index):
|
||||
if not already_indexed(Contribution, data, index):
|
||||
Contribution.index(data, bulk=True, id=key,
|
||||
index=index)
|
||||
es.flush_bulk(forced=True)
|
||||
except Exception, exc:
|
||||
index_finance_total_by_src.retry(args=[addons], exc=exc, **kw)
|
||||
raise
|
||||
|
||||
|
||||
@task
|
||||
def index_finance_total_by_currency(addons, **kw):
|
||||
"""
|
||||
Bug 757581
|
||||
Total finance stats, currency breakdown.
|
||||
"""
|
||||
index = kw.get('index', Contribution._get_index())
|
||||
es = amo.search.get_es()
|
||||
log.info('Indexing total financial stats by currency for %s apps.' %
|
||||
len(addons))
|
||||
|
||||
for addon in addons:
|
||||
# Get all contributions for given add-on.
|
||||
qs = Contribution.objects.filter(addon=addon, uuid=None)
|
||||
if not qs.exists():
|
||||
continue
|
||||
|
||||
# Get list of distinct currencies.
|
||||
currencies = set(qs.values_list('currency', flat=True))
|
||||
|
||||
for currency in currencies:
|
||||
try:
|
||||
key = ord_word('cur' + str(addon) + currency.lower())
|
||||
data = search.get_finance_total(
|
||||
qs, addon, 'currency', currency=currency)
|
||||
for index in get_indices(index):
|
||||
if not already_indexed(Contribution, data, index):
|
||||
Contribution.index(data, bulk=True, id=key,
|
||||
index=index)
|
||||
es.flush_bulk(forced=True)
|
||||
except Exception, exc:
|
||||
index_finance_total_by_currency.retry(args=[addons], exc=exc, **kw)
|
||||
raise
|
||||
|
||||
|
||||
@task
|
||||
def index_finance_daily(ids, **kw):
|
||||
"""
|
||||
Bug 748015
|
||||
Takes a list of Contribution ids and uses its addon and date fields to
|
||||
index stats for that day.
|
||||
|
||||
Contribution stats by addon-date unique pair. Uses a nested
|
||||
dictionary to not index duplicate contribution with same addon/date
|
||||
pairs. For each addon-date, it stores the addon in the dict as a top
|
||||
level key with a dict as its value. And it stores the date in the
|
||||
add-on's dict as a second level key. To check if an addon-date pair has
|
||||
been already index, it looks up the dict[addon][date] to see if the
|
||||
key exists. This adds some speed up when batch processing.
|
||||
|
||||
ids -- ids of apps.stats.Contribution objects
|
||||
"""
|
||||
index = kw.get('index', Contribution._get_index())
|
||||
es = amo.search.get_es()
|
||||
|
||||
# Get contributions.
|
||||
qs = (Contribution.objects.filter(id__in=ids)
|
||||
.order_by('created').values('addon', 'created'))
|
||||
log.info('[%s] Indexing %s contributions for daily stats.' %
|
||||
(qs[0]['created'], len(ids)))
|
||||
|
||||
addons_dates = defaultdict(lambda: defaultdict(dict))
|
||||
for contribution in qs:
|
||||
addon = contribution['addon']
|
||||
date = contribution['created'].strftime('%Y%m%d')
|
||||
|
||||
try:
|
||||
# Date for add-on not processed, index it and give it key.
|
||||
if not date in addons_dates[addon]:
|
||||
key = ord_word('fin' + str(addon) + str(date))
|
||||
data = search.get_finance_daily(contribution)
|
||||
for index in get_indices(index):
|
||||
if not already_indexed(Contribution, data, index):
|
||||
Contribution.index(data, bulk=True, id=key,
|
||||
index=index)
|
||||
addons_dates[addon][date] = 0
|
||||
es.flush_bulk(forced=True)
|
||||
except Exception, exc:
|
||||
index_finance_daily.retry(args=[ids], exc=exc, **kw)
|
||||
raise
|
||||
|
||||
|
||||
@task
|
||||
def index_installed_daily(ids, **kw):
|
||||
"""
|
||||
Takes a list of Installed ids and uses its addon and date fields to index
|
||||
stats for that day.
|
||||
ids -- ids of mkt.webapps.Installed objects
|
||||
"""
|
||||
from mkt.webapps.models import Installed
|
||||
index = kw.get('index', Installed._get_index())
|
||||
es = amo.search.get_es()
|
||||
# Get Installed's
|
||||
qs = (Installed.objects.filter(id__in=set(ids)).
|
||||
order_by('-created').values('addon', 'created'))
|
||||
log.info('[%s] Indexing %s installed counts for daily stats.' %
|
||||
(qs[0]['created'], len(qs)))
|
||||
|
||||
addons_dates = defaultdict(lambda: defaultdict(dict))
|
||||
for installed in qs:
|
||||
addon = installed['addon']
|
||||
date = installed['created'].strftime('%Y%m%d')
|
||||
|
||||
try:
|
||||
if not date in addons_dates[addon]:
|
||||
key = ord_word('ins' + str(addon) + str(date))
|
||||
data = search.get_installed_daily(installed)
|
||||
for index in get_indices(index):
|
||||
|
||||
if not already_indexed(Installed, data, index):
|
||||
Installed.index(data, bulk=True, id=key,
|
||||
index=index)
|
||||
addons_dates[addon][date] = 0
|
||||
es.flush_bulk(forced=True)
|
||||
except Exception, exc:
|
||||
index_installed_daily.retry(args=[ids], exc=exc, **kw)
|
||||
raise
|
||||
|
||||
|
||||
def ord_word(word):
|
||||
"""
|
||||
Convert an alphanumeric string to its ASCII values, used for ES keys.
|
||||
"""
|
||||
return ''.join([str(ord(letter)) for letter in word])
|
||||
|
||||
|
||||
def already_indexed(model, data, index=None):
|
||||
"""
|
||||
Bug 759924
|
||||
Checks that data is not being indexed twice.
|
||||
"""
|
||||
# Handle the weird 'have to query in lower-case for ES' thing.
|
||||
for k, v in data.iteritems():
|
||||
try:
|
||||
data[k] = v.lower()
|
||||
except AttributeError:
|
||||
continue
|
||||
|
||||
# Cast any datetimes to date.
|
||||
if 'date' in data:
|
||||
try:
|
||||
data['date'] = data['date'].date()
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
filter_data = copy.deepcopy(data)
|
||||
|
||||
# Search floating point number with string (bug 770037 fix attempt #100).
|
||||
if 'revenue' in filter_data:
|
||||
try:
|
||||
filter_data['revenue'] = str(filter_data['revenue'])
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# XXX shouldn't we return True here ?
|
||||
return list(model.search(index).filter(**filter_data)
|
||||
.values_dict(data.keys()[0]))
|
|
@ -1,25 +0,0 @@
|
|||
<nav id="side-nav" class="report-menu" role="navigation">
|
||||
<ul>
|
||||
<li data-report="overview" data-layout="overview">
|
||||
<a href="{{ url('mkt.stats.overview', addon.app_slug) }}">
|
||||
{{ _('Overview') }}
|
||||
</a>
|
||||
</li>
|
||||
{# TODO: Sales stats should only show up for premium apps. #}
|
||||
<li data-report="sales">
|
||||
<a href="{{ url('mkt.stats.sales', addon.app_slug) }}">
|
||||
{{ _('Sales') }}
|
||||
</a>
|
||||
</li>
|
||||
<li data-report="installs">
|
||||
<a href="{{ url('mkt.stats.installs', addon.app_slug) }}">
|
||||
{{ _('Installs') }}
|
||||
</a>
|
||||
</li>
|
||||
<li data-report="usage">
|
||||
<a href="{{ url('mkt.stats.usage', addon.app_slug) }}">
|
||||
{{ _('Usage') }}
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
</nav>
|
|
@ -1,47 +0,0 @@
|
|||
{% set urls = [
|
||||
(stats_url('overview'), _('Overview')),
|
||||
(stats_url('installs'), _('Installs')),
|
||||
] %}
|
||||
|
||||
{% set sales_filters = [
|
||||
('revenue', stats_url('revenue'), _('Amount Earned')),
|
||||
('sales', stats_url('sales'), _('Units Sold')),
|
||||
('refunds', stats_url('refunds'), _('Units Refunded'))
|
||||
] %}
|
||||
|
||||
<section class="secondary">
|
||||
<div class="report-menu" id="side-nav">
|
||||
<ul>
|
||||
{% for link, title in urls %}
|
||||
<li{% if url_quote(request.path) == link %} class="selected active"{% endif %}>
|
||||
<a href="{{ link }}">{{ title }}</a></li>
|
||||
{% endfor %}
|
||||
|
||||
{% if (addon.is_premium() and check_contrib_stats_perms(addon)) %}
|
||||
|
||||
{% for action, link, title in sales_filters %}
|
||||
{% set match_links = [
|
||||
link, stats_url(action, metric='currency'),
|
||||
stats_url(action, metric='source'),
|
||||
] %}
|
||||
|
||||
{# If a breakdown view is selected, the parent view is still active. #}
|
||||
<li{% if url_quote(request.path) in match_links %} class="selected active"{% endif %}>
|
||||
<a href="{{ link }}">{{ title }}</a></li>
|
||||
|
||||
{% set breakdown_links = [
|
||||
(stats_url(action, metric='currency'), _('by Currency')),
|
||||
(stats_url(action, metric='source'), _('by Source')),
|
||||
] %}
|
||||
{% for link, title in breakdown_links %}
|
||||
<li class="secondary-nav {{ 'selected active' if url_quote(request.path) == link }}">
|
||||
<a href="{{ link }}">{{ title }}</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
</ul>
|
||||
</div>
|
||||
</section>
|
|
@ -1,12 +0,0 @@
|
|||
{% extends 'appstats/stats.html' %}
|
||||
|
||||
{% block csvtitle %}
|
||||
<h2>
|
||||
{{ title }}
|
||||
<span class="export-data">
|
||||
{{ _('Export') }}:
|
||||
<a id="export_data_csv" href="">CSV</a> ·
|
||||
<a id="export_data_json" href="">JSON</a>
|
||||
</span>
|
||||
</h2>
|
||||
{% endblock %}
|
|
@ -1,66 +0,0 @@
|
|||
{% extends "appstats/stats.html" %}
|
||||
|
||||
{% block csvtable %}{% endblock %}
|
||||
|
||||
{% block stats %}
|
||||
<section class="island two-up">
|
||||
<div>
|
||||
<a href="downloads/">
|
||||
{{ _('<b>{0}</b> Installs')|
|
||||
f(addon.total_downloads|numberfmt)|safe }}
|
||||
</a>
|
||||
<small id="downloads-in-range">{{ _('Loading...') }}</small>
|
||||
</div>
|
||||
<div>
|
||||
<a href="usage/">
|
||||
{{ _('<b>{0}</b> Average Daily Users')|
|
||||
f(addon.average_daily_users|numberfmt)|safe }}
|
||||
</a>
|
||||
<small id="users-in-range">{{ _('Loading...') }}</small>
|
||||
</div>
|
||||
</section>
|
||||
<div class="toplists hidden">
|
||||
<div class="toplist">
|
||||
<div class="island statbox">
|
||||
<h2>{{ _('Top Apps') }}</h3>
|
||||
<div class="piechart"></div>
|
||||
<table data-metric="apps">
|
||||
</table>
|
||||
<a class="more" href="usage/applications/">
|
||||
{{ _('See more apps…') }}
|
||||
</a>
|
||||
<div class="no-data-overlay">
|
||||
<p>{{ _('No data available.') }}</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="toplist">
|
||||
<div class="island statbox">
|
||||
<h2>{{ _('Top Languages') }}</h3>
|
||||
<div class="piechart"></div>
|
||||
<table data-metric="locales">
|
||||
</table>
|
||||
<a class="more" href="usage/languages/">
|
||||
{{ _('See more languages…') }}
|
||||
</a>
|
||||
<div class="no-data-overlay">
|
||||
<p>{{ _('No data available.') }}</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="toplist">
|
||||
<div class="island statbox">
|
||||
<h2>{{ _('Top Devices') }}</h3>
|
||||
<div class="piechart"></div>
|
||||
<table data-metric="os">
|
||||
</table>
|
||||
<a class="more" href="usage/os/">
|
||||
{{ _('See more devices…') }}
|
||||
</a>
|
||||
<div class="no-data-overlay">
|
||||
<p>{{ _('No data available.') }}</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
|
@ -1,3 +0,0 @@
|
|||
{% extends 'appstats/report.html' %}
|
||||
|
||||
{% set title = _('Units Refunded by Currency') %}
|
|
@ -1,3 +0,0 @@
|
|||
{% extends 'appstats/report.html' %}
|
||||
|
||||
{% set title = _('Amount Earned by Currency') %}
|
|
@ -1,3 +0,0 @@
|
|||
{% extends 'appstats/report.html' %}
|
||||
|
||||
{% set title = _('Sales by Currency') %}
|
|
@ -1,3 +0,0 @@
|
|||
{% extends 'appstats/report.html' %}
|
||||
|
||||
{% set title = _('Installs by Date') %}
|
|
@ -1,3 +0,0 @@
|
|||
{% extends 'appstats/report.html' %}
|
||||
|
||||
{% set title = _('Units Refunded by Date') %}
|
|
@ -1,3 +0,0 @@
|
|||
{% extends 'appstats/report.html' %}
|
||||
|
||||
{% set title = _('Amount Earned by Date') %}
|
|
@ -1,3 +0,0 @@
|
|||
{% extends 'appstats/report.html' %}
|
||||
|
||||
{% set title = _('Units Sold by Date') %}
|
|
@ -1,3 +0,0 @@
|
|||
{% extends 'appstats/report.html' %}
|
||||
|
||||
{% set title = _('Units Refunded by Source') %}
|
|
@ -1,3 +0,0 @@
|
|||
{% extends 'appstats/report.html' %}
|
||||
|
||||
{% set title = _('Amount Earned by Source') %}
|
|
@ -1,3 +0,0 @@
|
|||
{% extends 'appstats/report.html' %}
|
||||
|
||||
{% set title = _('Units Sold by Source') %}
|
|
@ -1,3 +0,0 @@
|
|||
{% extends 'appstats/report.html' %}
|
||||
|
||||
{% set title = _('Usage by Date') %}
|
|
@ -1,16 +0,0 @@
|
|||
{% extends 'developers/base_impala.html' %}
|
||||
|
||||
{% block bodyclass %}developer-hub{% endblock %}
|
||||
|
||||
{% block title %}
|
||||
{# L10n: {0} is the app name #}
|
||||
{{ _('{0} · Statistics Dashboard')|f(addon.name) }}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
||||
<div class="notification-box error">
|
||||
<h2>Statistics pages are under construction. Check back soon.</h2>
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
|
@ -1,49 +0,0 @@
|
|||
<div class="criteria island">
|
||||
<ul>
|
||||
<li>
|
||||
<a id="chart-zoomout" class="inactive" href="#">
|
||||
{{ _('reset zoom') }}
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
|
||||
<div class="criteria range island">
|
||||
<ul>
|
||||
<li>{{ _('For last:') }}</li>
|
||||
<li data-range="7 days"
|
||||
{% if range == '7' %}class="selected"{% endif %}>
|
||||
<a class="days-7" href="#">{{ _('7 days') }}</a></li>
|
||||
<li data-range="30 days"
|
||||
{% if range == '30' %}class="selected"{% endif %}>
|
||||
<a class="days-30" href="#">{{ _('30 days') }}</a></li>
|
||||
<li data-range="90 days"
|
||||
{% if range == '90' %}class="selected"{% endif %}>
|
||||
<a href="#">{{ _('90 days') }}</a></li>
|
||||
<li data-range="365 days"
|
||||
{% if range == '365' %}class="selected"{% endif %}>
|
||||
<a href="#">{{ _('365 days') }}</a></li>
|
||||
<li data-range="custom"
|
||||
{% if range == 'custom' %}class="selected"{% endif %}>
|
||||
<a id="custom-date-range" href="#">{{ _('Custom Range') }}</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="criteria group island">
|
||||
<ul>
|
||||
<li>{{ _('Group by:') }}</li>
|
||||
<li data-group="day">
|
||||
<a class="group-day" href="#">{{ _('Day') }}</a>
|
||||
</li>
|
||||
{# not working at the moment
|
||||
<li data-group="week">
|
||||
<a class="group-week" href="#">{{ _('Week') }}</a>
|
||||
</li>
|
||||
<li data-group="month">
|
||||
<a class="group-month" href="#">{{ _('Month') }}</a>
|
||||
</li>
|
||||
#}
|
||||
</ul>
|
||||
</div>
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
{% set urls = [
|
||||
(url('mkt.stats.my_apps_overview'), _('Overview')),
|
||||
(url('mkt.stats.my_apps_installs'), _('Installs')),
|
||||
] %}
|
||||
|
||||
<section class="secondary">
|
||||
<div class="report-menu" id="side-nav">
|
||||
<ul>
|
||||
{% for link, title in urls %}
|
||||
<li{% if url_quote(request.path) == link %} class="selected active"{% endif %}>
|
||||
<a href="{{ link }}">{{ title }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</div>
|
||||
</section>
|
|
@ -1,13 +0,0 @@
|
|||
{% extends 'devstats/stats.html' %}
|
||||
|
||||
{% block csvtitle %}
|
||||
<h2>
|
||||
{{ title }}
|
||||
<span class="export-data">
|
||||
{{ _('Export') }}:
|
||||
<a id="export_data_csv" href="">CSV</a> ·
|
||||
<a id="export_data_json" href="">JSON</a>
|
||||
</span>
|
||||
</h2>
|
||||
{% endblock %}
|
||||
|
|
@ -1 +0,0 @@
|
|||
{% extends 'devstats/report.html' %}
|
|
@ -1,92 +0,0 @@
|
|||
{% extends 'developers/base_impala.html' %}
|
||||
|
||||
{% set range = view.range %}
|
||||
|
||||
{% block bodyclass %}developer-hub statistics{% endblock %}
|
||||
|
||||
{% block extrahead %}
|
||||
{{ css('mkt/stats') }}
|
||||
<link rel="stylesheet"
|
||||
href="{{ media('css/zamboni/jquery-ui/custom-1.7.2.css') }}">
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<section id="stats">
|
||||
<header class="c">
|
||||
<hgroup class="c">
|
||||
<h1 class="addon">
|
||||
{{ _('Apps Statistics Dashboard') }}
|
||||
</h1>
|
||||
</hgroup>
|
||||
|
||||
{% include 'devstats/includes/criteria.html' %}
|
||||
|
||||
</header>
|
||||
|
||||
{% include 'devstats/includes/stats_nav.html' %}
|
||||
|
||||
<div id="lm" class="loadmessage">
|
||||
<span>{{ _('Loading the latest data…') }}</span>
|
||||
</div>
|
||||
{# Initial stats will be `installs` only. Uncomment when others are done.
|
||||
<div class="secondary">
|
||||
{{ report_menu(request, report, obj=addon) }}
|
||||
{% block stats_note_link %}{% endblock %}
|
||||
<pre id="dbgout"></pre>
|
||||
</div>
|
||||
#}
|
||||
<div class="primary statistics"
|
||||
data-report="{{ report }}"
|
||||
{% if view.last %}
|
||||
data-range="{{ view.last }}"
|
||||
{% endif %}
|
||||
{% if view.start and view.end %}
|
||||
data-range="custom"
|
||||
data-start_date="{{ view.start }}"
|
||||
data-end_date="{{ view.end }}"
|
||||
{% endif %}
|
||||
data-base_url="{{ stats_base_url }}">
|
||||
<div class="island chart">
|
||||
<div id="head-chart">
|
||||
</div>
|
||||
<div class="no-data-overlay">
|
||||
<p>{{ _('No data available.') }}</p>
|
||||
</div>
|
||||
</div>
|
||||
{% block stats %}
|
||||
{% endblock %}
|
||||
|
||||
{# Dev stats will not show a paginated table - just the export links. #}
|
||||
{% block csvtable %}
|
||||
<div class="stats-export">
|
||||
{% block csvtitle %}{% endblock %}
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
<div class="hidden">
|
||||
<div id="fieldMenuPopup" class="popup">
|
||||
<form id="fieldMenu">
|
||||
<ul id="fieldList">
|
||||
</ul>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div id="popup-container">
|
||||
<div class="modal" id="stats-note">
|
||||
<a class="close">{{ _('close') }}</a>
|
||||
{% block stats_note %}{% endblock %}
|
||||
</div>
|
||||
{% include 'stats/popup.html' %}
|
||||
</div>
|
||||
</section>
|
||||
{% endblock %}
|
||||
|
||||
{% block js %}
|
||||
<!--[if IE]>
|
||||
<script
|
||||
src="{{ media('js/lib/excanvas.compiled.js') }}">
|
||||
</script>
|
||||
<![endif]-->
|
||||
{{ js('mkt/stats') }}
|
||||
{% endblock %}
|
|
@ -1,46 +0,0 @@
|
|||
<div class="criteria island">
|
||||
<ul>
|
||||
<li>
|
||||
<a id="chart-zoomout" class="inactive" href="#">
|
||||
{{ _('reset zoom') }}
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
|
||||
<div class="criteria range island">
|
||||
<ul>
|
||||
<li>{{ _('For last:') }}</li>
|
||||
<li data-range="7 days"
|
||||
{% if range == '7' %}class="selected"{% endif %}>
|
||||
<a class="days-7" href="#">{{ _('7 days') }}</a></li>
|
||||
<li data-range="30 days"
|
||||
{% if range == '30' %}class="selected"{% endif %}>
|
||||
<a class="days-30" href="#">{{ _('30 days') }}</a></li>
|
||||
<li data-range="90 days"
|
||||
{% if range == '90' %}class="selected"{% endif %}>
|
||||
<a href="#">{{ _('90 days') }}</a></li>
|
||||
<li data-range="365 days"
|
||||
{% if range == '365' %}class="selected"{% endif %}>
|
||||
<a href="#">{{ _('365 days') }}</a></li>
|
||||
<li data-range="custom"
|
||||
{% if range == 'custom' %}class="selected"{% endif %}>
|
||||
<a id="custom-date-range" href="#">{{ _('Custom Range') }}</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="criteria group island">
|
||||
<ul>
|
||||
<li>{{ _('Group by:') }}</li>
|
||||
<li data-group="day">
|
||||
<a class="group-day" href="#">{{ _('Day') }}</a>
|
||||
</li>
|
||||
<li data-group="week">
|
||||
<a class="group-week" href="#">{{ _('Week') }}</a>
|
||||
</li>
|
||||
<li data-group="month">
|
||||
<a class="group-month" href="#">{{ _('Month') }}</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
|
@ -1,19 +0,0 @@
|
|||
{% set urls = [
|
||||
(url('mkt.stats.apps_count_new'), _('Apps added')),
|
||||
(url('mkt.stats.apps_count_installed'), _('Apps installed')),
|
||||
(url('mkt.stats.apps_review_count_new'), _('Reviews')),
|
||||
(url('mkt.stats.mmo_user_count_new'), _('New users')),
|
||||
(url('mkt.stats.mmo_user_count_total'), _('Total users')),
|
||||
(url('mkt.stats.mmo_total_visitors'), _('Total visitors')),
|
||||
] %}
|
||||
|
||||
<section class="secondary">
|
||||
<div class="report-menu" id="side-nav">
|
||||
<ul>
|
||||
{% for link, title in urls %}
|
||||
<li{% if url_quote(request.path) == link %} class="selected active"{% endif %}>
|
||||
<a href="{{ link }}">{{ title }}</a></li>
|
||||
{% endfor %}
|
||||
</ul
|
||||
</div>
|
||||
</section>
|
|
@ -1,109 +0,0 @@
|
|||
{% extends 'developers/base_impala.html' %}
|
||||
|
||||
{% set range = view.range %}
|
||||
|
||||
{% block bodyclass %}developer-hub statistics{% endblock %}
|
||||
|
||||
{% block extrahead %}
|
||||
{{ css('mkt/stats') }}
|
||||
<link rel="stylesheet"
|
||||
href="{{ media('css/zamboni/jquery-ui/custom-1.7.2.css') }}">
|
||||
{% endblock %}
|
||||
|
||||
{% block title %}
|
||||
{# L10n: {0} is the app name #}
|
||||
{{ _('Marketplace Statistics Dashboard') }}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<section id="stats">
|
||||
<header class="c">
|
||||
<hgroup class="c">
|
||||
<h1>
|
||||
{{ _('Marketplace Statistics Dashboard') }}
|
||||
</h1>
|
||||
</hgroup>
|
||||
|
||||
{% include 'includes/criteria.html' %}
|
||||
|
||||
</header>
|
||||
|
||||
{% include 'sitestats/includes/stats_nav.html' %}
|
||||
|
||||
<div id="lm" class="loadmessage">
|
||||
<span>{{ _('Loading the latest data…') }}</span>
|
||||
</div>
|
||||
<div class="primary statistics"
|
||||
data-report="{{ report }}"
|
||||
{% if view.last %}
|
||||
data-range="{{ view.last }}"
|
||||
{% endif %}
|
||||
{% if view.start and view.end %}
|
||||
data-range="custom"
|
||||
data-start_date="{{ view.start }}"
|
||||
data-end_date="{{ view.end }}"
|
||||
{% endif %}
|
||||
data-base_url="{{ stats_base_url }}">
|
||||
<div class="island chart">
|
||||
<div id="head-chart">
|
||||
</div>
|
||||
<div class="no-data-overlay">
|
||||
<p>{{ _('No data available.') }}</p>
|
||||
</div>
|
||||
</div>
|
||||
{% block stats %}
|
||||
{% endblock %}
|
||||
{% block csvtable %}
|
||||
<div class="island">
|
||||
{% block csvtitle %}{% endblock %}
|
||||
<div class="tabular csv-table">
|
||||
<div class="table-box">
|
||||
<table>
|
||||
<thead>
|
||||
</thead>
|
||||
</table>
|
||||
</div>
|
||||
<footer>
|
||||
<nav class="paginator c" role="navigation">
|
||||
<p class="range">
|
||||
</p>
|
||||
<p class="rel">
|
||||
<a href="#"
|
||||
class="button prev disabled">
|
||||
◂ {{ _('Previous') }}</a>
|
||||
<a href="#"
|
||||
class="button next">
|
||||
{{ _('Next') }} ▸</a>
|
||||
</p>
|
||||
</nav>
|
||||
</footer>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
<div class="hidden">
|
||||
<div id="fieldMenuPopup" class="popup">
|
||||
<form id="fieldMenu">
|
||||
<ul id="fieldList">
|
||||
</ul>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div id="popup-container">
|
||||
<div class="modal" id="stats-note">
|
||||
<a class="close">{{ _('close') }}</a>
|
||||
{% block stats_note %}{% endblock %}
|
||||
</div>
|
||||
{% include 'stats/popup.html' %}
|
||||
</div>
|
||||
</section>
|
||||
{% endblock %}
|
||||
|
||||
{% block js %}
|
||||
<!--[if IE]>
|
||||
<script
|
||||
src="{{ media('js/lib/excanvas.compiled.js') }}">
|
||||
</script>
|
||||
<![endif]-->
|
||||
{{ js('mkt/stats') }}
|
||||
{% endblock %}
|
|
@ -1,452 +0,0 @@
|
|||
import csv
|
||||
import datetime
|
||||
import json
|
||||
import random
|
||||
from decimal import Decimal
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
import mock
|
||||
from nose import SkipTest
|
||||
from nose.tools import eq_
|
||||
from test_utils import RequestFactory
|
||||
|
||||
import amo
|
||||
import amo.tests
|
||||
from access.models import Group, GroupUser
|
||||
from addons.models import Addon, AddonUser
|
||||
from amo.urlresolvers import reverse
|
||||
from apps.stats.models import GlobalStat
|
||||
from market.models import Price
|
||||
from mkt.site.fixtures import fixture
|
||||
from mkt.stats import search, tasks, views
|
||||
from mkt.stats.views import (FINANCE_SERIES, get_series_column, get_series_line,
|
||||
pad_missing_stats)
|
||||
from mkt.webapps.models import Installed
|
||||
from stats.models import Contribution
|
||||
from users.models import UserProfile
|
||||
|
||||
|
||||
class StatsTest(amo.tests.ESTestCase):
|
||||
fixtures = fixture('user_999')
|
||||
|
||||
def setUp(self):
|
||||
super(StatsTest, self).setUp()
|
||||
self.create_switch('monolith-stats')
|
||||
self.user = UserProfile.objects.get(username='regularuser')
|
||||
self.public_app = amo.tests.app_factory(name='public',
|
||||
app_slug='pub', type=1, status=4, public_stats=True)
|
||||
self.private_app = amo.tests.app_factory(name='private',
|
||||
app_slug='priv', type=1, status=4, public_stats=False)
|
||||
self.url_args = {'start': '20090601', 'end': '20090930',
|
||||
'app_slug': self.private_app.app_slug}
|
||||
|
||||
def login_as_visitor(self):
|
||||
self.login(self.user)
|
||||
|
||||
def get_view_response(self, view, **kwargs):
|
||||
view_args = self.url_args.copy()
|
||||
head = kwargs.pop('head', False)
|
||||
view_args.update(kwargs)
|
||||
url = reverse(view, kwargs=view_args)
|
||||
if head:
|
||||
return self.client.head(url, follow=True)
|
||||
return self.client.get(url, follow=True)
|
||||
|
||||
def views_gen(self, **kwargs):
|
||||
# common set of views
|
||||
for series in views.SERIES:
|
||||
if series == 'my_apps':
|
||||
# skip my_apps, as it has different routes
|
||||
continue
|
||||
for group in views.SERIES_GROUPS:
|
||||
view = 'mkt.stats.%s_series' % series
|
||||
args = kwargs.copy()
|
||||
args['group'] = group
|
||||
yield (view, args)
|
||||
|
||||
def public_views_gen(self, **kwargs):
|
||||
# all views are potentially public, except for contributions
|
||||
for view, args in self.views_gen(**kwargs):
|
||||
if not view in ['mkt.stats.%s_series' % series for series in
|
||||
FINANCE_SERIES]:
|
||||
yield (view, args)
|
||||
|
||||
def private_views_gen(self, **kwargs):
|
||||
# only contributions views are always private
|
||||
for view, args in self.views_gen(**kwargs):
|
||||
if view in ['mkt.stats.%s_series' % series for series in
|
||||
FINANCE_SERIES]:
|
||||
yield (view, args)
|
||||
|
||||
|
||||
class TestStatsPermissions(StatsTest):
|
||||
"""Tests to make sure all restricted data remains restricted."""
|
||||
|
||||
@amo.tests.mock_es # We're checking only headers, not content.
|
||||
def _check_it(self, views, status):
|
||||
for view, kwargs in views:
|
||||
response = self.get_view_response(view, head=True, **kwargs)
|
||||
eq_(response.status_code, status,
|
||||
'unexpected http status for %s. got %s. expected %s' % (
|
||||
view, response.status_code, status))
|
||||
|
||||
def test_private_app_no_groups(self):
|
||||
# Logged in but no groups
|
||||
self.login_as_visitor()
|
||||
self._check_it(self.private_views_gen(format='json'), 403)
|
||||
|
||||
@mock.patch.object(settings, 'MONOLITH_SERVER', 'http://0.0.0.0:0')
|
||||
@mock.patch('monolith.client.Client')
|
||||
def test_private_app_stats_group(self, mocked_client):
|
||||
# Logged in with stats group.
|
||||
group = Group.objects.create(name='Stats', rules='Stats:View')
|
||||
GroupUser.objects.create(user=self.user, group=group)
|
||||
self.login_as_visitor()
|
||||
|
||||
self._check_it(self.public_views_gen(format='json'), 200)
|
||||
self._check_it(self.private_views_gen(format='json'), 403)
|
||||
|
||||
@mock.patch.object(settings, 'MONOLITH_SERVER', 'http://0.0.0.0:0')
|
||||
@mock.patch('monolith.client.Client')
|
||||
def test_private_app_contrib_stats_group(self, mocked_client):
|
||||
# Logged in with stats and contrib stats group.
|
||||
group1 = Group.objects.create(name='Stats', rules='Stats:View')
|
||||
GroupUser.objects.create(user=self.user, group=group1)
|
||||
group2 = Group.objects.create(name='Revenue Stats',
|
||||
rules='RevenueStats:View')
|
||||
GroupUser.objects.create(user=self.user, group=group2)
|
||||
self.login_as_visitor()
|
||||
|
||||
self._check_it(self.public_views_gen(format='json'), 200)
|
||||
self._check_it(self.private_views_gen(format='json'), 200)
|
||||
|
||||
def test_private_app_anonymous(self):
|
||||
# Not logged in
|
||||
self.client.logout()
|
||||
self._check_it(self.private_views_gen(format='json'), 403)
|
||||
|
||||
@mock.patch.object(settings, 'MONOLITH_SERVER', 'http://0.0.0.0:0')
|
||||
@mock.patch('monolith.client.Client')
|
||||
def test_public_app_no_groups(self, mocked_client):
|
||||
# Logged in but no groups
|
||||
self.login_as_visitor()
|
||||
self._check_it(self.public_views_gen(
|
||||
app_slug=self.public_app.app_slug, format='json'), 200)
|
||||
self._check_it(self.private_views_gen(
|
||||
app_slug=self.public_app.app_slug, format='json'), 403)
|
||||
|
||||
@mock.patch.object(settings, 'MONOLITH_SERVER', 'http://0.0.0.0:0')
|
||||
@mock.patch('monolith.client.Client')
|
||||
def test_public_app_stats_group(self, mocked_client):
|
||||
# Logged in with stats group.
|
||||
group = Group.objects.create(name='Stats', rules='Stats:View')
|
||||
GroupUser.objects.create(user=self.user, group=group)
|
||||
self.login_as_visitor()
|
||||
|
||||
self._check_it(self.public_views_gen(
|
||||
app_slug=self.public_app.app_slug, format='json'), 200)
|
||||
self._check_it(self.private_views_gen(
|
||||
app_slug=self.public_app.app_slug, format='json'), 403)
|
||||
|
||||
@mock.patch.object(settings, 'MONOLITH_SERVER', 'http://0.0.0.0:0')
|
||||
@mock.patch('monolith.client.Client')
|
||||
def test_public_app_contrib_stats_group(self, mocked_client):
|
||||
# Logged in with stats and contrib stats group.
|
||||
group1 = Group.objects.create(name='Stats', rules='Stats:View')
|
||||
GroupUser.objects.create(user=self.user, group=group1)
|
||||
group2 = Group.objects.create(name='Revenue Stats',
|
||||
rules='RevenueStats:View')
|
||||
GroupUser.objects.create(user=self.user, group=group2)
|
||||
self.login_as_visitor()
|
||||
|
||||
self._check_it(self.public_views_gen(
|
||||
app_slug=self.public_app.app_slug, format='json'), 200)
|
||||
self._check_it(self.private_views_gen(
|
||||
app_slug=self.public_app.app_slug, format='json'), 200)
|
||||
|
||||
@mock.patch.object(settings, 'MONOLITH_SERVER', 'http://0.0.0.0:0')
|
||||
@mock.patch('monolith.client.Client')
|
||||
def test_public_app_anonymous(self, mocked_client):
|
||||
# Not logged in
|
||||
self.client.logout()
|
||||
self._check_it(self.public_views_gen(app_slug=self.public_app.app_slug,
|
||||
format='json'), 200)
|
||||
self._check_it(self.private_views_gen(
|
||||
app_slug=self.public_app.app_slug, format='json'), 403)
|
||||
|
||||
|
||||
class TestMyApps(StatsTest):
|
||||
|
||||
def setUp(self):
|
||||
super(TestMyApps, self).setUp()
|
||||
self.req = RequestFactory().get('/')
|
||||
self.req.amo_user = self.user
|
||||
AddonUser.objects.create(addon=self.public_app, user=self.user)
|
||||
Installed.objects.create(addon=self.public_app, user=self.user)
|
||||
|
||||
def test_anonymous(self):
|
||||
del self.req.amo_user
|
||||
eq_(views._my_apps(self.req), [])
|
||||
|
||||
def test_some(self):
|
||||
eq_(views._my_apps(self.req), [self.public_app])
|
||||
|
||||
def test_deleted(self):
|
||||
self.public_app.update(status=amo.STATUS_DELETED)
|
||||
eq_(views._my_apps(self.req), [])
|
||||
|
||||
|
||||
class TestInstalled(amo.tests.ESTestCase):
|
||||
test_es = True
|
||||
fixtures = fixture('user_admin', 'group_admin', 'user_admin_group',
|
||||
'user_999', 'webapp_337141')
|
||||
|
||||
def setUp(self):
|
||||
self.today = datetime.date.today()
|
||||
self.webapp = Addon.objects.get(pk=337141)
|
||||
self.user = UserProfile.objects.get(pk=999)
|
||||
self.client.login(username='admin@mozilla.com', password='password')
|
||||
self.in_ = Installed.objects.create(addon=self.webapp, user=self.user)
|
||||
installed = {'addon': self.in_.addon.id, 'created': self.in_.created}
|
||||
Installed.index(search.get_installed_daily(installed),
|
||||
id=self.in_.pk)
|
||||
self.refresh('users_install')
|
||||
|
||||
def get_url(self, start, end, fmt='json'):
|
||||
return reverse('mkt.stats.installs_series',
|
||||
args=[self.webapp.app_slug, 'day',
|
||||
start.strftime('%Y%m%d'),
|
||||
end.strftime('%Y%m%d'), fmt])
|
||||
|
||||
def get_multiple_url(self, start, end, fmt='json'):
|
||||
return reverse('mkt.stats.my_apps_series',
|
||||
args=['day',
|
||||
start.strftime('%Y%m%d'),
|
||||
end.strftime('%Y%m%d'), fmt])
|
||||
|
||||
def test_installed(self):
|
||||
res = self.client.get(self.get_url(self.today, self.today))
|
||||
data = json.loads(res.content)
|
||||
eq_(data[0]['count'], 1)
|
||||
|
||||
def test_installed_anon(self):
|
||||
self.client.logout()
|
||||
res = self.client.get(self.get_url(self.today, self.today))
|
||||
eq_(res.status_code, 403)
|
||||
|
||||
def test_installed_anon_public(self):
|
||||
self.client.logout()
|
||||
self.webapp.update(public_stats=True)
|
||||
res = self.client.get(self.get_url(self.today, self.today))
|
||||
eq_(res.status_code, 200)
|
||||
|
||||
def setup_multiple(self):
|
||||
self.client.login(username=self.user.email, password='password')
|
||||
AddonUser.objects.create(addon=self.webapp, user=self.user)
|
||||
|
||||
def test_multiple_json(self):
|
||||
self.setup_multiple()
|
||||
res = self.client.get(self.get_multiple_url(self.today, self.today))
|
||||
eq_(json.loads(res.content)[0]['name'], self.webapp.name)
|
||||
|
||||
def test_multiple_csv(self):
|
||||
self.setup_multiple()
|
||||
res = self.client.get(self.get_multiple_url(self.today, self.today,
|
||||
fmt='csv'))
|
||||
rows = list(csv.reader(res.content.split('\n')))
|
||||
eq_(rows[5][0], str(self.webapp.name))
|
||||
|
||||
def test_anonymous(self):
|
||||
self.client.logout()
|
||||
res = self.client.get(reverse('mkt.stats.my_apps_overview'))
|
||||
self.assertLoginRequired(res)
|
||||
|
||||
|
||||
class TestGetSeriesLine(amo.tests.ESTestCase):
|
||||
fixtures = fixture('user_999')
|
||||
|
||||
def setUp(self):
|
||||
# Create apps and contributions to index.
|
||||
self.app = amo.tests.app_factory()
|
||||
user = UserProfile.objects.get(username='regularuser')
|
||||
price_tier = Price.objects.create(price='0.99')
|
||||
|
||||
# Create a sale for each day in the expected range.
|
||||
self.expected_days = (1, 2, 3, 4, 5)
|
||||
for day in self.expected_days:
|
||||
# Create different amounts of contribs for each day.
|
||||
for x in range(0, day):
|
||||
c = Contribution.objects.create(addon_id=self.app.pk,
|
||||
user=user,
|
||||
amount='0.99',
|
||||
price_tier=price_tier,
|
||||
type=amo.CONTRIB_PURCHASE)
|
||||
c.update(created=datetime.datetime(2012, 5, day, 0, 0, 0))
|
||||
tasks.index_finance_daily(Contribution.objects.all())
|
||||
self.refresh(timesleep=1)
|
||||
|
||||
def test_basic(self):
|
||||
"""
|
||||
Check a sale (count) is found for each day in the expected range.
|
||||
"""
|
||||
d_range = (datetime.date(2012, 05, 01), datetime.date(2012, 05, 15))
|
||||
stats = list(get_series_line(Contribution, 'day', addon=self.app.pk,
|
||||
date__range=d_range))
|
||||
dates_with_sales = [c['date'] for c in stats if c['count'] > 0]
|
||||
days = [d.day for d in dates_with_sales]
|
||||
for day in self.expected_days:
|
||||
eq_(day in days, True)
|
||||
|
||||
def test_desc_order(self):
|
||||
"""
|
||||
Check the returned data is in descending order by date.
|
||||
"""
|
||||
d_range = (datetime.date(2012, 05, 01), datetime.date(2012, 05, 15))
|
||||
stats = list(get_series_line(Contribution, 'day', addon=self.app.pk,
|
||||
date__range=d_range))
|
||||
eq_(stats, sorted(stats, key=lambda x: x['date'], reverse=True))
|
||||
|
||||
def test_revenue(self):
|
||||
"""
|
||||
Check each day's revenue is correct.
|
||||
"""
|
||||
d_range = (datetime.date(2012, 05, 01), datetime.date(2012, 05, 05))
|
||||
stats = list(get_series_line(Contribution, 'day',
|
||||
primary_field='revenue',
|
||||
addon=self.app.pk,
|
||||
date__range=d_range))
|
||||
|
||||
for stat, day in zip(stats, sorted(self.expected_days, reverse=True)):
|
||||
expected_revenue = day * .99
|
||||
eq_(round(stat['count'], 2), round(expected_revenue, 2))
|
||||
|
||||
|
||||
class TestGetSeriesColumn(amo.tests.ESTestCase):
|
||||
fixtures = fixture('user_999')
|
||||
|
||||
def setUp(self):
|
||||
super(TestGetSeriesColumn, self).setUp()
|
||||
# Create apps and contributions to index.
|
||||
self.app = amo.tests.app_factory()
|
||||
self.user = UserProfile.objects.get(username='regularuser')
|
||||
price_tier = Price.objects.create(price='0.99')
|
||||
|
||||
# Create some revenue for several different currencies.
|
||||
self.expected = [
|
||||
{'currency': 'CAD', 'count': 0},
|
||||
{'currency': 'EUR', 'count': 0},
|
||||
{'currency': 'USD', 'count': 0}
|
||||
]
|
||||
for expected in self.expected:
|
||||
for x in range(random.randint(1, 4)):
|
||||
# Amount doesn't matter for this stat since based off of price
|
||||
# tier (USD normalized).
|
||||
Contribution.objects.create(addon_id=self.app.pk,
|
||||
user=self.user,
|
||||
amount=random.randint(0, 10),
|
||||
currency=expected['currency'],
|
||||
price_tier=price_tier)
|
||||
expected['count'] += Decimal(price_tier.price)
|
||||
expected['count'] = int(expected['count'])
|
||||
tasks.index_finance_total_by_currency([self.app.pk])
|
||||
self.refresh(timesleep=1)
|
||||
|
||||
def test_basic_revenue(self):
|
||||
stats = list(get_series_column(Contribution, addon=self.app.pk,
|
||||
primary_field='revenue',
|
||||
category_field='currency'))
|
||||
|
||||
for stat in stats:
|
||||
stat['currency'] = stat['currency'].upper()
|
||||
stat['count'] = int(stat['count'])
|
||||
stats = sorted(stats, key=lambda stat: stat['currency'])
|
||||
eq_(stats, self.expected)
|
||||
|
||||
def test_desc_order(self):
|
||||
stats = list(get_series_column(Contribution, addon=self.app.pk,
|
||||
primary_field='revenue',
|
||||
category_field='currency'))
|
||||
for stat in stats:
|
||||
stat['count'] = int(stat['count'])
|
||||
eq_(stats, sorted(stats, key=lambda stat: stat['count'], reverse=True))
|
||||
|
||||
|
||||
class TestPadMissingStats(amo.tests.ESTestCase):
|
||||
|
||||
def test_basic(self):
|
||||
days = [datetime.date(2012, 4, 29), datetime.date(2012, 5, 1),
|
||||
datetime.date(2012, 5, 3), datetime.date(2012, 5, 5)]
|
||||
expected_days = [datetime.date(2012, 4, 30), datetime.date(2012, 5, 2),
|
||||
datetime.date(2012, 5, 4)]
|
||||
|
||||
dummies = pad_missing_stats(days, 'day')
|
||||
days = [dummy['date'].date() for dummy in dummies]
|
||||
for day in expected_days:
|
||||
eq_(day in days, True)
|
||||
|
||||
def test_with_date_range(self):
|
||||
date_range = (datetime.date(2012, 5, 1), datetime.date(2012, 5, 5))
|
||||
|
||||
days = [datetime.date(2012, 5, 3)]
|
||||
expected_days = [datetime.date(2012, 5, 2), datetime.date(2012, 5, 4)]
|
||||
|
||||
dummies = pad_missing_stats(days, 'day', date_range=date_range)
|
||||
days = [dummy['date'].date() for dummy in dummies]
|
||||
for day in expected_days:
|
||||
eq_(day in days, True)
|
||||
|
||||
def test_with_fields(self):
|
||||
fields = ['test_field', 'fest_tield']
|
||||
|
||||
days = [datetime.date(2012, 5, 1), datetime.date(2012, 5, 3)]
|
||||
dummies = pad_missing_stats(days, 'day', fields=fields)
|
||||
for dummy in dummies:
|
||||
for field in fields:
|
||||
eq_(field in dummy, True)
|
||||
|
||||
def test_group_week(self):
|
||||
days = [datetime.date(2012, 5, 1), datetime.date(2012, 5, 15)]
|
||||
expected_days = [datetime.date(2012, 5, 8)]
|
||||
|
||||
dummies = pad_missing_stats(days, 'week')
|
||||
days = [dummy['date'].date() for dummy in dummies]
|
||||
for day in expected_days:
|
||||
eq_(day in days, True)
|
||||
|
||||
def test_group_month(self):
|
||||
days = [datetime.date(2012, 5, 1), datetime.date(2012, 7, 1)]
|
||||
expected_days = [datetime.date(2012, 6, 1)]
|
||||
|
||||
dummies = pad_missing_stats(days, 'month')
|
||||
days = [dummy['date'].date() for dummy in dummies]
|
||||
for day in expected_days:
|
||||
eq_(day in days, True)
|
||||
|
||||
|
||||
class TestOverall(amo.tests.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.keys = ['apps_count_new', 'apps_count_installed',
|
||||
'apps_review_count_new']
|
||||
|
||||
def test_url(self):
|
||||
raise SkipTest('Disabling for new stats, later to be removed.')
|
||||
self.assert3xx(self.client.get(reverse('mkt.stats.overall')),
|
||||
reverse('mkt.stats.apps_count_new'))
|
||||
|
||||
def get_url(self, name):
|
||||
return (reverse('mkt.stats.%s' % name) +
|
||||
'/%s-day-20090601-20090630.json' % name)
|
||||
|
||||
def test_stats(self):
|
||||
raise SkipTest('Disabling for new stats, later to be removed.')
|
||||
for stat in self.keys:
|
||||
GlobalStat.objects.create(name=stat, count=1,
|
||||
date=datetime.date(2009, 06, 12))
|
||||
|
||||
for stat in self.keys:
|
||||
res = self.client.get(self.get_url(stat))
|
||||
content = json.loads(res.content)
|
||||
eq_(content[0]['date'], '2009-06-12')
|
||||
eq_(content[0]['count'], 1)
|
|
@ -1,65 +0,0 @@
|
|||
from datetime import datetime
|
||||
|
||||
from nose.tools import eq_
|
||||
|
||||
import amo.tests
|
||||
from mkt.constants import apps
|
||||
from mkt.site.fixtures import fixture
|
||||
from mkt.stats import search
|
||||
from mkt.webapps.models import Installed
|
||||
from users.models import UserProfile
|
||||
|
||||
|
||||
class InstalledTests(amo.tests.TestCase):
|
||||
fixtures = fixture('user_999')
|
||||
|
||||
def setUp(self):
|
||||
self.user = UserProfile.objects.get(username='regularuser')
|
||||
self.first_app = amo.tests.app_factory(name='public',
|
||||
app_slug='pub', type=1, status=4, public_stats=True)
|
||||
self.second_app = amo.tests.app_factory(name='private',
|
||||
app_slug='priv', type=1, status=4, public_stats=False)
|
||||
|
||||
def test_no_installs(self):
|
||||
data = {'created': datetime.now(),
|
||||
'addon': self.first_app.id}
|
||||
result = search.get_installed_daily(data)
|
||||
eq_(result['date'], data['created'].date())
|
||||
eq_(result['addon'], data['addon'])
|
||||
eq_(result['count'], 0)
|
||||
|
||||
def test_only_one_app(self):
|
||||
Installed.objects.create(addon=self.first_app, user=self.user,
|
||||
install_type=apps.INSTALL_TYPE_USER)
|
||||
data = {'created': datetime.now(),
|
||||
'addon': self.first_app.id}
|
||||
result = search.get_installed_daily(data)
|
||||
eq_(result['date'], data['created'].date())
|
||||
eq_(result['addon'], data['addon'])
|
||||
eq_(result['count'], 1)
|
||||
|
||||
def test_multiple_installs(self):
|
||||
# Due to the unique together we use different install types to deal
|
||||
# with that constraint.
|
||||
Installed.objects.create(addon=self.first_app, user=self.user,
|
||||
install_type=apps.INSTALL_TYPE_USER)
|
||||
Installed.objects.create(addon=self.first_app, user=self.user,
|
||||
install_type=apps.INSTALL_TYPE_DEVELOPER)
|
||||
data = {'created': datetime.now(),
|
||||
'addon': self.first_app.id}
|
||||
result = search.get_installed_daily(data)
|
||||
eq_(result['date'], data['created'].date())
|
||||
eq_(result['addon'], data['addon'])
|
||||
eq_(result['count'], 2)
|
||||
|
||||
def test_two_apps(self):
|
||||
Installed.objects.create(addon=self.first_app, user=self.user,
|
||||
install_type=apps.INSTALL_TYPE_USER)
|
||||
Installed.objects.create(addon=self.second_app, user=self.user,
|
||||
install_type=apps.INSTALL_TYPE_USER)
|
||||
data = {'created': datetime.now(),
|
||||
'addon': self.first_app.id}
|
||||
result = search.get_installed_daily(data)
|
||||
eq_(result['date'], data['created'].date())
|
||||
eq_(result['addon'], data['addon'])
|
||||
eq_(result['count'], 1)
|
|
@ -1,255 +0,0 @@
|
|||
import datetime
|
||||
from decimal import Decimal
|
||||
import random
|
||||
|
||||
from nose import SkipTest
|
||||
from nose.tools import eq_
|
||||
|
||||
import amo
|
||||
import amo.tests
|
||||
from market.models import Refund, Price
|
||||
from mkt.stats import tasks
|
||||
from stats.models import Contribution
|
||||
from users.models import UserProfile
|
||||
|
||||
|
||||
class BaseTaskTest(amo.tests.ESTestCase):
|
||||
fixtures = ['base/users']
|
||||
|
||||
def baseSetUp(self):
|
||||
self.app = amo.tests.app_factory()
|
||||
self.usd_price = '0.99'
|
||||
self.price_tier = Price.objects.create(price=self.usd_price)
|
||||
self.user = UserProfile.objects.get(username='regularuser')
|
||||
|
||||
def create_refund(self, contribution):
|
||||
Refund.objects.create(contribution=contribution,
|
||||
status=amo.REFUND_APPROVED, user=self.user)
|
||||
|
||||
class TestIndexFinanceTotal(BaseTaskTest):
|
||||
|
||||
def setUp(self):
|
||||
self.baseSetUp()
|
||||
|
||||
self.expected = {'revenue': 0, 'count': 5, 'refunds': 2}
|
||||
for x in range(self.expected['count']):
|
||||
c = Contribution.objects.create(
|
||||
user=self.user,
|
||||
addon_id=self.app.pk,
|
||||
type=amo.CONTRIB_PURCHASE,
|
||||
amount=str(random.randint(0, 10) + .99),
|
||||
price_tier=self.price_tier)
|
||||
self.expected['revenue'] += Decimal(self.usd_price)
|
||||
|
||||
# Create 2 refunds.
|
||||
if x % 2 == 1:
|
||||
self.create_refund(c)
|
||||
self.expected['revenue'] -= Decimal(self.usd_price)
|
||||
self.expected['count'] -= 1
|
||||
self.refresh()
|
||||
|
||||
def test_index(self):
|
||||
tasks.index_finance_total([self.app.pk])
|
||||
self.refresh(timesleep=1)
|
||||
|
||||
document = Contribution.search().filter(addon=self.app.pk
|
||||
).values_dict('revenue', 'count', 'refunds')[0]
|
||||
|
||||
document = {'count': document['count'],
|
||||
'revenue': int(document['revenue']),
|
||||
'refunds': document['refunds']}
|
||||
self.expected['revenue'] = int(self.expected['revenue'])
|
||||
|
||||
eq_(document, self.expected)
|
||||
|
||||
|
||||
class TestIndexFinanceTotalBySrc(BaseTaskTest):
|
||||
|
||||
def setUp(self):
|
||||
self.baseSetUp()
|
||||
|
||||
self.sources = ['mkt-home', 'front-search', 'featured']
|
||||
self.expected = {
|
||||
'mkt-home': {'revenue': 0, 'count': 2, 'refunds': 1},
|
||||
'front-search': {'revenue': 0, 'count': 3, 'refunds': 1},
|
||||
'featured': {'revenue': 0, 'count': 4, 'refunds': 1}
|
||||
}
|
||||
for source in self.sources:
|
||||
# Create sales.
|
||||
for x in range(self.expected[source]['count']):
|
||||
c = Contribution.objects.create(
|
||||
user=self.user,
|
||||
addon_id=self.app.pk, source=source,
|
||||
type=amo.CONTRIB_PURCHASE,
|
||||
amount=str(random.randint(0, 10) + .99),
|
||||
price_tier=self.price_tier)
|
||||
self.expected[source]['revenue'] += Decimal(self.usd_price)
|
||||
|
||||
self.create_refund(c)
|
||||
self.expected[source]['revenue'] -= Decimal(self.usd_price)
|
||||
self.expected[source]['count'] -= 1
|
||||
self.refresh()
|
||||
|
||||
def test_index(self):
|
||||
tasks.index_finance_total_by_src([self.app.pk])
|
||||
self.refresh(timesleep=1)
|
||||
|
||||
# Grab document for each source breakdown and compare.
|
||||
for source in self.sources:
|
||||
# For some reason, query fails if uppercase letter in filter.
|
||||
document = (Contribution.search().filter(addon=self.app.pk,
|
||||
source=source.lower()).values_dict('source', 'revenue',
|
||||
'count', 'refunds')[0])
|
||||
document = {'count': document['count'],
|
||||
'revenue': int(document['revenue']),
|
||||
'refunds': document['refunds']}
|
||||
self.expected[source]['revenue'] = (
|
||||
int(self.expected[source]['revenue'])
|
||||
)
|
||||
eq_(document, self.expected[source])
|
||||
|
||||
|
||||
class TestIndexFinanceTotalByCurrency(BaseTaskTest):
|
||||
|
||||
def setUp(self):
|
||||
self.baseSetUp()
|
||||
|
||||
self.currencies = ['CAD', 'USD', 'EUR']
|
||||
self.expected = {
|
||||
'CAD': {'revenue': 0, 'count': 3, 'refunds': 1,
|
||||
'revenue_non_normalized': 0},
|
||||
'USD': {'revenue': 0, 'count': 4, 'refunds': 1,
|
||||
'revenue_non_normalized': 0},
|
||||
'EUR': {'revenue': 0, 'count': 2, 'refunds': 1,
|
||||
'revenue_non_normalized': 0}
|
||||
}
|
||||
for currency in self.currencies:
|
||||
# Create sales.
|
||||
for x in range(self.expected[currency]['count']):
|
||||
amount = str(random.randint(0, 10))
|
||||
c = Contribution.objects.create(addon_id=self.app.pk,
|
||||
user=self.user,
|
||||
type=amo.CONTRIB_PURCHASE,
|
||||
currency=currency,
|
||||
amount=amount,
|
||||
price_tier=self.price_tier)
|
||||
self.expected[currency]['revenue'] += Decimal(self.usd_price)
|
||||
self.expected[currency]['revenue_non_normalized'] += (
|
||||
Decimal(amount))
|
||||
|
||||
self.create_refund(c)
|
||||
self.expected[currency]['revenue'] -= Decimal(self.usd_price)
|
||||
self.expected[currency]['revenue_non_normalized'] -= (
|
||||
Decimal(amount))
|
||||
self.expected[currency]['count'] -= 1
|
||||
self.refresh()
|
||||
|
||||
def test_index(self):
|
||||
tasks.index_finance_total_by_currency([self.app.pk])
|
||||
self.refresh(timesleep=1)
|
||||
raise SkipTest('Test is unreliable and causes intermittent failures.')
|
||||
|
||||
# Grab document for each source breakdown and compare.
|
||||
for currency in self.currencies:
|
||||
# For some reason, query fails if uppercase letter in filter.
|
||||
document = (Contribution.search().filter(addon=self.app.pk,
|
||||
currency=currency.lower()).values_dict('currency',
|
||||
'revenue', 'count', 'refunds',
|
||||
'revenue_non_normalized')[0])
|
||||
document = {
|
||||
'count': document['count'],
|
||||
'revenue': int(document['revenue']),
|
||||
'refunds': document['refunds'],
|
||||
'revenue_non_normalized':
|
||||
int(document['revenue_non_normalized'])}
|
||||
self.expected[currency]['revenue'] = (
|
||||
int(self.expected[currency]['revenue'])
|
||||
)
|
||||
self.expected[currency]['revenue_non_normalized'] = (
|
||||
int(self.expected[currency]['revenue_non_normalized'])
|
||||
)
|
||||
eq_(document, self.expected[currency])
|
||||
|
||||
|
||||
class TestIndexFinanceDaily(BaseTaskTest):
|
||||
|
||||
def setUp(self):
|
||||
self.baseSetUp()
|
||||
|
||||
self.ids = []
|
||||
self.expected = {'date': datetime.datetime.today(),
|
||||
'revenue': 0, 'count': 5, 'refunds': 2}
|
||||
for x in range(self.expected['count']):
|
||||
c = Contribution.objects.create(addon_id=self.app.pk,
|
||||
user=self.user,
|
||||
type=amo.CONTRIB_PURCHASE,
|
||||
amount=str(random.randint(0, 10) + .99),
|
||||
price_tier=self.price_tier)
|
||||
self.expected['revenue'] += Decimal(self.usd_price)
|
||||
self.ids.append(c.id)
|
||||
|
||||
# Create 2 refunds.
|
||||
if x % 2 == 1:
|
||||
c.uuid = 123
|
||||
c.save()
|
||||
self.create_refund(c)
|
||||
self.expected['revenue'] -= Decimal(self.usd_price)
|
||||
self.expected['count'] -= 1
|
||||
|
||||
def test_index(self):
|
||||
tasks.index_finance_daily.delay(self.ids)
|
||||
self.refresh(timesleep=1)
|
||||
|
||||
document = Contribution.search().filter(addon=self.app.pk
|
||||
).values_dict('date', 'revenue', 'count', 'refunds')[0]
|
||||
|
||||
date = document['date']
|
||||
ex_date = self.expected['date']
|
||||
eq_((date.year, date.month, date.day),
|
||||
(ex_date.year, ex_date.month, ex_date.day))
|
||||
|
||||
document = {'count': document['count'],
|
||||
'revenue': int(document['revenue']),
|
||||
'refunds': document['refunds']}
|
||||
del(self.expected['date'])
|
||||
|
||||
self.expected['revenue'] = int(self.expected['revenue'])
|
||||
eq_(document, self.expected)
|
||||
|
||||
|
||||
class TestAlreadyIndexed(BaseTaskTest):
|
||||
|
||||
def setUp(self):
|
||||
self.baseSetUp()
|
||||
|
||||
today = datetime.datetime.today()
|
||||
date = datetime.datetime(today.year, today.month, today.day)
|
||||
|
||||
self.ids = []
|
||||
self.expected = {'addon': self.app.pk,
|
||||
'date': date,
|
||||
'revenue': Decimal('0'), 'count': 3,
|
||||
'refunds': 1}
|
||||
|
||||
for x in range(self.expected['count']):
|
||||
c = Contribution.objects.create(addon_id=self.app.pk,
|
||||
user=self.user,
|
||||
type=amo.CONTRIB_PURCHASE,
|
||||
amount=str(random.randint(0, 10)),
|
||||
price_tier=self.price_tier)
|
||||
self.refresh(timesleep=1)
|
||||
self.ids.append(c.id)
|
||||
self.expected['revenue'] += Decimal(self.usd_price)
|
||||
|
||||
c.update(uuid=123)
|
||||
self.create_refund(c)
|
||||
self.expected['revenue'] -= Decimal(self.usd_price)
|
||||
self.expected['count'] -= 1
|
||||
|
||||
self.expected['revenue'] = self.expected['revenue']
|
||||
|
||||
def test_basic(self):
|
||||
eq_(tasks.already_indexed(Contribution, self.expected), [])
|
||||
tasks.index_finance_daily.delay(self.ids)
|
||||
self.refresh(timesleep=1)
|
||||
eq_(tasks.already_indexed(Contribution, self.expected) != [], True)
|
|
@ -1,453 +0,0 @@
|
|||
import csv
|
||||
import datetime
|
||||
import json
|
||||
import random
|
||||
from decimal import Decimal
|
||||
|
||||
import mock
|
||||
from nose import SkipTest
|
||||
from nose.tools import eq_
|
||||
from test_utils import RequestFactory
|
||||
|
||||
import amo
|
||||
import amo.tests
|
||||
from access.models import Group, GroupUser
|
||||
from addons.models import Addon, AddonUser
|
||||
from amo.urlresolvers import reverse
|
||||
from apps.stats.models import GlobalStat
|
||||
from market.models import Price
|
||||
from mkt.site.fixtures import fixture
|
||||
from mkt.stats import search, tasks, views
|
||||
from mkt.stats.views import (FINANCE_SERIES, get_series_column, get_series_line,
|
||||
pad_missing_stats)
|
||||
from mkt.webapps.models import Installed
|
||||
from stats.models import Contribution
|
||||
from users.models import UserProfile
|
||||
|
||||
|
||||
class StatsTest(amo.tests.ESTestCase):
|
||||
fixtures = fixture('user_999')
|
||||
|
||||
def setUp(self):
|
||||
super(StatsTest, self).setUp()
|
||||
self.user = UserProfile.objects.get(username='regularuser')
|
||||
|
||||
self.public_app = amo.tests.app_factory(name='public',
|
||||
app_slug='pub', type=1, status=4, public_stats=True)
|
||||
self.private_app = amo.tests.app_factory(name='private',
|
||||
app_slug='priv', type=1, status=4, public_stats=False)
|
||||
self.url_args = {'start': '20090601', 'end': '20090930',
|
||||
'app_slug': self.private_app.app_slug}
|
||||
|
||||
def login_as_visitor(self):
|
||||
self.client.login(username='regular@mozilla.com', password='password')
|
||||
|
||||
def get_view_response(self, view, **kwargs):
|
||||
view_args = self.url_args.copy()
|
||||
head = kwargs.pop('head', False)
|
||||
view_args.update(kwargs)
|
||||
url = reverse(view, kwargs=view_args)
|
||||
if head:
|
||||
return self.client.head(url, follow=True)
|
||||
return self.client.get(url, follow=True)
|
||||
|
||||
def views_gen(self, **kwargs):
|
||||
# common set of views
|
||||
for series in views.SERIES:
|
||||
if series == 'my_apps':
|
||||
# skip my_apps, as it has different routes
|
||||
continue
|
||||
for group in views.SERIES_GROUPS:
|
||||
view = 'mkt.stats.%s_series' % series
|
||||
args = kwargs.copy()
|
||||
args['group'] = group
|
||||
yield (view, args)
|
||||
|
||||
def public_views_gen(self, **kwargs):
|
||||
# all views are potentially public, except for contributions
|
||||
for view, args in self.views_gen(**kwargs):
|
||||
if not view in ['mkt.stats.%s_series' % series for series in
|
||||
FINANCE_SERIES]:
|
||||
yield (view, args)
|
||||
|
||||
def private_views_gen(self, **kwargs):
|
||||
# only contributions views are always private
|
||||
for view, args in self.views_gen(**kwargs):
|
||||
if view in ['mkt.stats.%s_series' % series for series in
|
||||
FINANCE_SERIES]:
|
||||
yield (view, args)
|
||||
|
||||
|
||||
class TestStatsPermissions(StatsTest):
|
||||
"""Tests to make sure all restricted data remains restricted."""
|
||||
|
||||
@amo.tests.mock_es # We're checking only headers, not content.
|
||||
def _check_it(self, views, status):
|
||||
for view, kwargs in views:
|
||||
response = self.get_view_response(view, head=True, **kwargs)
|
||||
eq_(response.status_code, status,
|
||||
'unexpected http status for %s. got %s. expected %s' % (
|
||||
view, response.status_code, status))
|
||||
|
||||
def test_private_app_no_groups(self):
|
||||
# Logged in but no groups
|
||||
self.login_as_visitor()
|
||||
self._check_it(self.private_views_gen(format='json'), 403)
|
||||
|
||||
def test_private_app_stats_group(self):
|
||||
# Logged in with stats group.
|
||||
group = Group.objects.create(name='Stats', rules='Stats:View')
|
||||
GroupUser.objects.create(user=self.user, group=group)
|
||||
self.login_as_visitor()
|
||||
|
||||
self._check_it(self.public_views_gen(format='json'), 200)
|
||||
self._check_it(self.private_views_gen(format='json'), 403)
|
||||
|
||||
def test_private_app_contrib_stats_group(self):
|
||||
# Logged in with stats and contrib stats group.
|
||||
group1 = Group.objects.create(name='Stats', rules='Stats:View')
|
||||
GroupUser.objects.create(user=self.user, group=group1)
|
||||
group2 = Group.objects.create(name='Revenue Stats',
|
||||
rules='RevenueStats:View')
|
||||
GroupUser.objects.create(user=self.user, group=group2)
|
||||
self.login_as_visitor()
|
||||
|
||||
self._check_it(self.public_views_gen(format='json'), 200)
|
||||
self._check_it(self.private_views_gen(format='json'), 200)
|
||||
|
||||
def test_private_app_anonymous(self):
|
||||
# Not logged in
|
||||
self.client.logout()
|
||||
self._check_it(self.private_views_gen(format='json'), 403)
|
||||
|
||||
def test_public_app_no_groups(self):
|
||||
# Logged in but no groups
|
||||
self.login_as_visitor()
|
||||
self._check_it(self.public_views_gen(
|
||||
app_slug=self.public_app.app_slug, format='json'), 200)
|
||||
self._check_it(self.private_views_gen(
|
||||
app_slug=self.public_app.app_slug, format='json'), 403)
|
||||
|
||||
def test_public_app_stats_group(self):
|
||||
# Logged in with stats group.
|
||||
group = Group.objects.create(name='Stats', rules='Stats:View')
|
||||
GroupUser.objects.create(user=self.user, group=group)
|
||||
self.login_as_visitor()
|
||||
|
||||
self._check_it(self.public_views_gen(
|
||||
app_slug=self.public_app.app_slug, format='json'), 200)
|
||||
self._check_it(self.private_views_gen(
|
||||
app_slug=self.public_app.app_slug, format='json'), 403)
|
||||
|
||||
def test_public_app_contrib_stats_group(self):
|
||||
# Logged in with stats and contrib stats group.
|
||||
group1 = Group.objects.create(name='Stats', rules='Stats:View')
|
||||
GroupUser.objects.create(user=self.user, group=group1)
|
||||
group2 = Group.objects.create(name='Revenue Stats',
|
||||
rules='RevenueStats:View')
|
||||
GroupUser.objects.create(user=self.user, group=group2)
|
||||
self.login_as_visitor()
|
||||
|
||||
self._check_it(self.public_views_gen(
|
||||
app_slug=self.public_app.app_slug, format='json'), 200)
|
||||
self._check_it(self.private_views_gen(
|
||||
app_slug=self.public_app.app_slug, format='json'), 200)
|
||||
|
||||
def test_public_app_anonymous(self):
|
||||
# Not logged in
|
||||
self.client.logout()
|
||||
self._check_it(self.public_views_gen(app_slug=self.public_app.app_slug,
|
||||
format='json'), 200)
|
||||
self._check_it(self.private_views_gen(
|
||||
app_slug=self.public_app.app_slug, format='json'), 403)
|
||||
|
||||
|
||||
class TestMyApps(StatsTest):
|
||||
|
||||
def setUp(self):
|
||||
super(TestMyApps, self).setUp()
|
||||
self.req = RequestFactory().get('/')
|
||||
self.req.amo_user = self.user
|
||||
AddonUser.objects.create(addon=self.public_app, user=self.user)
|
||||
Installed.objects.create(addon=self.public_app, user=self.user)
|
||||
|
||||
def test_anonymous(self):
|
||||
del self.req.amo_user
|
||||
eq_(views._my_apps(self.req), [])
|
||||
|
||||
def test_some(self):
|
||||
eq_(views._my_apps(self.req), [self.public_app])
|
||||
|
||||
def test_deleted(self):
|
||||
self.public_app.update(status=amo.STATUS_DELETED)
|
||||
eq_(views._my_apps(self.req), [])
|
||||
|
||||
|
||||
class TestInstalled(amo.tests.ESTestCase):
|
||||
test_es = True
|
||||
fixtures = fixture('user_admin', 'group_admin', 'user_admin_group',
|
||||
'user_999', 'webapp_337141')
|
||||
|
||||
def setUp(self):
|
||||
self.today = datetime.date.today()
|
||||
self.webapp = Addon.objects.get(pk=337141)
|
||||
self.user = UserProfile.objects.get(pk=999)
|
||||
self.client.login(username='admin@mozilla.com', password='password')
|
||||
self.in_ = Installed.objects.create(addon=self.webapp, user=self.user)
|
||||
installed = {'addon': self.in_.addon.id, 'created': self.in_.created}
|
||||
Installed.index(search.get_installed_daily(installed),
|
||||
id=self.in_.pk)
|
||||
self.refresh('users_install')
|
||||
|
||||
def get_url(self, start, end, fmt='json'):
|
||||
return reverse('mkt.stats.installs_series',
|
||||
args=[self.webapp.app_slug, 'day',
|
||||
start.strftime('%Y%m%d'),
|
||||
end.strftime('%Y%m%d'), fmt])
|
||||
|
||||
def get_multiple_url(self, start, end, fmt='json'):
|
||||
return reverse('mkt.stats.my_apps_series',
|
||||
args=['day',
|
||||
start.strftime('%Y%m%d'),
|
||||
end.strftime('%Y%m%d'), fmt])
|
||||
|
||||
def test_installed(self):
|
||||
res = self.client.get(self.get_url(self.today, self.today))
|
||||
data = json.loads(res.content)
|
||||
eq_(data[0]['count'], 1)
|
||||
|
||||
def test_installed_anon(self):
|
||||
self.client.logout()
|
||||
res = self.client.get(self.get_url(self.today, self.today))
|
||||
eq_(res.status_code, 403)
|
||||
|
||||
def test_installed_anon_public(self):
|
||||
self.client.logout()
|
||||
# Avoid re-indexing the app when changing its public_stats value.
|
||||
with mock.patch('mkt.webapps.tasks.index_webapps.delay'):
|
||||
self.webapp.update(public_stats=True)
|
||||
res = self.client.get(self.get_url(self.today, self.today))
|
||||
eq_(res.status_code, 200)
|
||||
|
||||
def setup_multiple(self):
|
||||
self.client.login(username=self.user.email, password='password')
|
||||
AddonUser.objects.create(addon=self.webapp, user=self.user)
|
||||
|
||||
def test_multiple_json(self):
|
||||
self.setup_multiple()
|
||||
res = self.client.get(self.get_multiple_url(self.today, self.today))
|
||||
eq_(json.loads(res.content)[0]['name'], self.webapp.name)
|
||||
|
||||
def test_multiple_csv(self):
|
||||
self.setup_multiple()
|
||||
res = self.client.get(self.get_multiple_url(self.today, self.today,
|
||||
fmt='csv'))
|
||||
rows = list(csv.reader(res.content.split('\n')))
|
||||
eq_(rows[5][0], str(self.webapp.name))
|
||||
|
||||
def test_anonymous(self):
|
||||
self.client.logout()
|
||||
res = self.client.get(reverse('mkt.stats.my_apps_overview'))
|
||||
self.assertLoginRequired(res)
|
||||
|
||||
|
||||
class TestGetSeriesLine(amo.tests.ESTestCase):
|
||||
fixtures = fixture('user_999')
|
||||
|
||||
def setUp(self):
|
||||
# Create apps and contributions to index.
|
||||
self.app = amo.tests.app_factory()
|
||||
user = UserProfile.objects.get(username='regularuser')
|
||||
price_tier = Price.objects.create(price='0.99')
|
||||
|
||||
# Create a sale for each day in the expected range.
|
||||
self.expected_days = (1, 2, 3, 4, 5)
|
||||
for day in self.expected_days:
|
||||
# Create different amounts of contribs for each day.
|
||||
for x in range(0, day):
|
||||
c = Contribution.objects.create(addon_id=self.app.pk,
|
||||
user=user,
|
||||
amount='0.99',
|
||||
price_tier=price_tier,
|
||||
type=amo.CONTRIB_PURCHASE)
|
||||
c.update(created=datetime.datetime(2012, 5, day, 0, 0, 0))
|
||||
tasks.index_finance_daily(Contribution.objects.all())
|
||||
self.refresh(timesleep=1)
|
||||
|
||||
def test_basic(self):
|
||||
"""
|
||||
Check a sale (count) is found for each day in the expected range.
|
||||
"""
|
||||
d_range = (datetime.date(2012, 05, 01), datetime.date(2012, 05, 15))
|
||||
stats = list(get_series_line(Contribution, 'day', addon=self.app.pk,
|
||||
date__range=d_range))
|
||||
dates_with_sales = [c['date'] for c in stats if c['count'] > 0]
|
||||
days = [d.day for d in dates_with_sales]
|
||||
for day in self.expected_days:
|
||||
eq_(day in days, True)
|
||||
|
||||
def test_desc_order(self):
|
||||
"""
|
||||
Check the returned data is in descending order by date.
|
||||
"""
|
||||
d_range = (datetime.date(2012, 05, 01), datetime.date(2012, 05, 15))
|
||||
stats = list(get_series_line(Contribution, 'day', addon=self.app.pk,
|
||||
date__range=d_range))
|
||||
eq_(stats, sorted(stats, key=lambda x: x['date'], reverse=True))
|
||||
|
||||
def test_revenue(self):
|
||||
"""
|
||||
Check each day's revenue is correct.
|
||||
"""
|
||||
d_range = (datetime.date(2012, 05, 01), datetime.date(2012, 05, 05))
|
||||
stats = list(get_series_line(Contribution, 'day',
|
||||
primary_field='revenue',
|
||||
addon=self.app.pk,
|
||||
date__range=d_range))
|
||||
|
||||
for stat, day in zip(stats, sorted(self.expected_days, reverse=True)):
|
||||
expected_revenue = day * .99
|
||||
eq_(round(stat['count'], 2), round(expected_revenue, 2))
|
||||
|
||||
|
||||
class TestGetSeriesColumn(amo.tests.ESTestCase):
|
||||
fixtures = fixture('user_999')
|
||||
|
||||
def setUp(self):
|
||||
super(TestGetSeriesColumn, self).setUp()
|
||||
# Create apps and contributions to index.
|
||||
self.app = amo.tests.app_factory()
|
||||
self.user = UserProfile.objects.get(username='regularuser')
|
||||
price_tier = Price.objects.create(price='0.99')
|
||||
|
||||
# Create some revenue for several different currencies.
|
||||
self.expected = [
|
||||
{'currency': 'CAD', 'count': 0},
|
||||
{'currency': 'EUR', 'count': 0},
|
||||
{'currency': 'USD', 'count': 0}
|
||||
]
|
||||
for expected in self.expected:
|
||||
for x in range(random.randint(1, 4)):
|
||||
# Amount doesn't matter for this stat since based off of price
|
||||
# tier (USD normalized).
|
||||
Contribution.objects.create(addon_id=self.app.pk,
|
||||
user=self.user,
|
||||
amount=random.randint(0, 10),
|
||||
currency=expected['currency'],
|
||||
price_tier=price_tier)
|
||||
expected['count'] += Decimal(price_tier.price)
|
||||
expected['count'] = int(expected['count'])
|
||||
tasks.index_finance_total_by_currency([self.app.pk])
|
||||
self.refresh(timesleep=1)
|
||||
|
||||
def test_basic_revenue(self):
|
||||
stats = list(get_series_column(Contribution, addon=self.app.pk,
|
||||
primary_field='revenue',
|
||||
category_field='currency'))
|
||||
|
||||
for stat in stats:
|
||||
stat['currency'] = stat['currency'].upper()
|
||||
stat['count'] = int(stat['count'])
|
||||
stats = sorted(stats, key=lambda stat: stat['currency'])
|
||||
eq_(stats, self.expected)
|
||||
|
||||
def test_desc_order(self):
|
||||
stats = list(get_series_column(Contribution, addon=self.app.pk,
|
||||
primary_field='revenue',
|
||||
category_field='currency'))
|
||||
for stat in stats:
|
||||
stat['count'] = int(stat['count'])
|
||||
eq_(stats, sorted(stats, key=lambda stat: stat['count'], reverse=True))
|
||||
|
||||
|
||||
class TestPadMissingStats(amo.tests.ESTestCase):
|
||||
|
||||
def test_basic(self):
|
||||
days = [datetime.date(2012, 4, 29), datetime.date(2012, 5, 1),
|
||||
datetime.date(2012, 5, 3), datetime.date(2012, 5, 5)]
|
||||
expected_days = [datetime.date(2012, 4, 30), datetime.date(2012, 5, 2),
|
||||
datetime.date(2012, 5, 4)]
|
||||
|
||||
dummies = pad_missing_stats(days, 'day')
|
||||
days = [dummy['date'].date() for dummy in dummies]
|
||||
for day in expected_days:
|
||||
eq_(day in days, True)
|
||||
|
||||
def test_with_date_range(self):
|
||||
date_range = (datetime.date(2012, 5, 1), datetime.date(2012, 5, 5))
|
||||
|
||||
days = [datetime.date(2012, 5, 3)]
|
||||
expected_days = [datetime.date(2012, 5, 2), datetime.date(2012, 5, 4)]
|
||||
|
||||
dummies = pad_missing_stats(days, 'day', date_range=date_range)
|
||||
days = [dummy['date'].date() for dummy in dummies]
|
||||
for day in expected_days:
|
||||
eq_(day in days, True)
|
||||
|
||||
def test_with_fields(self):
|
||||
fields = ['test_field', 'fest_tield']
|
||||
|
||||
days = [datetime.date(2012, 5, 1), datetime.date(2012, 5, 3)]
|
||||
dummies = pad_missing_stats(days, 'day', fields=fields)
|
||||
for dummy in dummies:
|
||||
for field in fields:
|
||||
eq_(field in dummy, True)
|
||||
|
||||
def test_group_week(self):
|
||||
days = [datetime.date(2012, 5, 1), datetime.date(2012, 5, 15)]
|
||||
expected_days = [datetime.date(2012, 5, 8)]
|
||||
|
||||
dummies = pad_missing_stats(days, 'week')
|
||||
days = [dummy['date'].date() for dummy in dummies]
|
||||
for day in expected_days:
|
||||
eq_(day in days, True)
|
||||
|
||||
def test_group_month(self):
|
||||
days = [datetime.date(2012, 5, 1), datetime.date(2012, 7, 1)]
|
||||
expected_days = [datetime.date(2012, 6, 1)]
|
||||
|
||||
dummies = pad_missing_stats(days, 'month')
|
||||
days = [dummy['date'].date() for dummy in dummies]
|
||||
for day in expected_days:
|
||||
eq_(day in days, True)
|
||||
|
||||
|
||||
class TestOverall(amo.tests.TestCase):
|
||||
fixtures = fixture('user_999')
|
||||
|
||||
def setUp(self):
|
||||
self.keys = ['apps_count_new', 'apps_count_installed',
|
||||
'apps_review_count_new']
|
||||
|
||||
def test_url(self):
|
||||
raise SkipTest('Disabling for new stats, later to be removed.')
|
||||
self.assert3xx(self.client.get(reverse('mkt.stats.overall')),
|
||||
reverse('mkt.stats.apps_count_new'))
|
||||
|
||||
def get_url(self, name):
|
||||
return (reverse('mkt.stats.%s' % name) +
|
||||
'/%s-day-20090601-20090630.json' % name)
|
||||
|
||||
def test_stats(self):
|
||||
raise SkipTest('Disabling for new stats, later to be removed.')
|
||||
for stat in self.keys:
|
||||
GlobalStat.objects.create(name=stat, count=1,
|
||||
date=datetime.date(2009, 06, 12))
|
||||
|
||||
for stat in self.keys:
|
||||
res = self.client.get(self.get_url(stat))
|
||||
content = json.loads(res.content)
|
||||
eq_(content[0]['date'], '2009-06-12')
|
||||
eq_(content[0]['count'], 1)
|
||||
|
||||
def test_stats_view_perm(self):
|
||||
raise SkipTest('Disabling for new stats, later to be removed.')
|
||||
assert self.client.login(username='regular@mozilla.com',
|
||||
password='password')
|
||||
res = self.client.get(reverse('mkt.stats.apps_count_new'))
|
||||
eq_(res.status_code, 403)
|
||||
|
||||
self.grant_permission(
|
||||
UserProfile.objects.get(username='regularuser'), 'Stats:View')
|
||||
res = self.client.get(reverse('mkt.stats.apps_count_new'))
|
||||
eq_(res.status_code, 200)
|
|
@ -1,14 +1,6 @@
|
|||
from django.conf.urls import patterns, url
|
||||
from django.shortcuts import redirect
|
||||
|
||||
from . import api, views
|
||||
|
||||
from stats.urls import series_re
|
||||
|
||||
|
||||
# Time series URLs following this pattern:
|
||||
# /app/{app_slug}/statistics/{series}-{group}-{start}-{end}.{format}
|
||||
series = dict((type, '%s-%s' % (type, series_re)) for type in views.SERIES)
|
||||
from . import api
|
||||
|
||||
|
||||
stats_api_patterns = patterns('',
|
||||
|
@ -28,125 +20,3 @@ txn_api_patterns = patterns('',
|
|||
api.TransactionAPI.as_view(),
|
||||
name='transaction_api'),
|
||||
)
|
||||
|
||||
|
||||
def sales_stats_report_urls(category=''):
|
||||
"""
|
||||
urlpatterns helper builder for views.stats_report urls
|
||||
"""
|
||||
url_patterns = []
|
||||
sales_metrics = ['revenue', 'sales', 'refunds']
|
||||
|
||||
category_prefix = ''
|
||||
category_suffix = ''
|
||||
if category:
|
||||
category_prefix = category + '_'
|
||||
category_suffix = category + '/'
|
||||
|
||||
for metric in sales_metrics:
|
||||
full_category = '%s%s' % (category_prefix, metric)
|
||||
|
||||
# URL defaults revenue to root, don't explicitly put in url.
|
||||
if metric == 'revenue':
|
||||
metric = ''
|
||||
|
||||
url_patterns += patterns('',
|
||||
url('^sales/%s%s$' % (category_suffix, metric),
|
||||
views.stats_report,
|
||||
name='mkt.stats.%s' % full_category,
|
||||
kwargs={'report': full_category})
|
||||
)
|
||||
return url_patterns
|
||||
|
||||
|
||||
def sales_series_urls(category=''):
|
||||
"""
|
||||
urlpatterns helper builder for views.*_series urls
|
||||
"""
|
||||
url_patterns = []
|
||||
sales_metrics = ['revenue', 'sales', 'refunds']
|
||||
|
||||
inapp_suffix = ''
|
||||
|
||||
# Distinguish between line and column series.
|
||||
view = views.finance_line_series
|
||||
category_prefix = ''
|
||||
if category:
|
||||
view = views.finance_column_series
|
||||
category_prefix = category + '_'
|
||||
|
||||
for metric in sales_metrics:
|
||||
full_category = '%s%s%s' % (category_prefix, metric, inapp_suffix)
|
||||
|
||||
kwargs = {}
|
||||
if metric != 'sales':
|
||||
# Defaults to sales so does not need primary_field arg.
|
||||
kwargs['primary_field'] = metric
|
||||
if category:
|
||||
kwargs['category_field'] = category
|
||||
|
||||
url_re = series[full_category]
|
||||
|
||||
url_patterns += patterns('',
|
||||
url(url_re,
|
||||
view,
|
||||
name='mkt.stats.%s' % full_category + '_series',
|
||||
kwargs=kwargs)
|
||||
)
|
||||
return url_patterns
|
||||
|
||||
|
||||
app_stats_patterns = patterns('',
|
||||
# Overview (not implemented).
|
||||
url('^$', views.stats_report, name='mkt.stats.overview',
|
||||
kwargs={'report': 'installs'}),
|
||||
# kwargs={'report': 'app_overview'}.
|
||||
|
||||
# Installs.
|
||||
url('^installs/$', views.stats_report, name='mkt.stats.installs',
|
||||
kwargs={'report': 'installs'}),
|
||||
url(series['installs'], views.installs_series,
|
||||
name='mkt.stats.installs_series'),
|
||||
|
||||
# Usage (not implemented).
|
||||
url('^usage/$', views.stats_report, name='mkt.stats.usage',
|
||||
kwargs={'report': 'usage'}),
|
||||
url(series['usage'], views.usage_series,
|
||||
name='mkt.stats.usage_series'),
|
||||
)
|
||||
|
||||
app_stats_patterns += sales_stats_report_urls(category='currency')
|
||||
app_stats_patterns += sales_series_urls(category='currency')
|
||||
app_stats_patterns += sales_stats_report_urls(category='source')
|
||||
app_stats_patterns += sales_series_urls(category='source')
|
||||
|
||||
app_stats_patterns += sales_stats_report_urls()
|
||||
app_stats_patterns += sales_series_urls()
|
||||
|
||||
# Overall site statistics.
|
||||
app_site_patterns = patterns('',
|
||||
url('^$', lambda r: redirect('mkt.stats.apps_count_new', permanent=False),
|
||||
name='mkt.stats.overall')
|
||||
)
|
||||
|
||||
keys = ['apps_count_new', 'apps_count_installed', 'apps_review_count_new',
|
||||
'mmo_user_count_total', 'mmo_user_count_new', 'mmo_total_visitors']
|
||||
|
||||
urls = []
|
||||
for key in keys:
|
||||
urls.append(url('^%s/$' % key, views.overall,
|
||||
name='mkt.stats.%s' % key, kwargs={'report': key}))
|
||||
|
||||
app_site_patterns += patterns('', *urls)
|
||||
|
||||
all_apps_stats_patterns = patterns('',
|
||||
# Landing pages.
|
||||
url('^$', views.my_apps_report, name='mkt.stats.my_apps_overview',
|
||||
kwargs={'report': 'installs'}),
|
||||
url('^installs/$', views.my_apps_report, name='mkt.stats.my_apps_installs',
|
||||
kwargs={'report': 'installs'}),
|
||||
|
||||
# Data URL.
|
||||
url(series['my_apps'], views.my_apps_series,
|
||||
name='mkt.stats.my_apps_series'),
|
||||
)
|
||||
|
|
|
@ -1,471 +0,0 @@
|
|||
import datetime
|
||||
from datetime import date, timedelta
|
||||
from dateutil.relativedelta import relativedelta
|
||||
import logging
|
||||
|
||||
from django.core.exceptions import PermissionDenied
|
||||
from django.shortcuts import redirect
|
||||
|
||||
import jingo
|
||||
from waffle.decorators import waffle_switch
|
||||
import waffle
|
||||
|
||||
from access import acl
|
||||
import amo
|
||||
from amo.decorators import json_view, login_required, permission_required
|
||||
from amo.urlresolvers import reverse
|
||||
from lib.metrics import get_monolith_client
|
||||
from mkt.webapps.decorators import app_view, app_view_factory
|
||||
from mkt.webapps.models import Installed, Webapp
|
||||
from stats.models import Contribution, UpdateCount
|
||||
from stats.views import (check_series_params_or_404, daterange,
|
||||
get_report_view, render_csv, render_json)
|
||||
|
||||
|
||||
logger = logging.getLogger('z.mkt.stats.views')
|
||||
FINANCE_SERIES = (
|
||||
'sales', 'refunds', 'revenue',
|
||||
'currency_revenue', 'currency_sales', 'currency_refunds',
|
||||
'source_revenue', 'source_sales', 'source_refunds',
|
||||
)
|
||||
SERIES = FINANCE_SERIES + ('installs', 'usage', 'my_apps')
|
||||
SERIES_GROUPS = ('day', 'week', 'month')
|
||||
SERIES_GROUPS_DATE = ('date', 'week', 'month')
|
||||
SERIES_FORMATS = ('json', 'csv')
|
||||
|
||||
|
||||
@app_view_factory(Webapp.objects.all)
|
||||
def stats_report(request, addon, report, category_field=None):
|
||||
"""
|
||||
Stats page. Passes in context variables into template which is read by the
|
||||
JS to build a URL. The URL calls a *_series view which determines
|
||||
necessary arguments for get_series_*. get_series_* queries ES for the data,
|
||||
which is later formatted into .json or .csv and made available to the JS.
|
||||
"""
|
||||
if (addon.status is not amo.STATUS_PUBLIC and
|
||||
not check_stats_permission(request, addon, for_contributions=True,
|
||||
no_raise=True)):
|
||||
return redirect(addon.get_detail_url())
|
||||
check_stats_permission(request, addon)
|
||||
|
||||
template_name = 'appstats/reports/%s.html' % report
|
||||
stats_base_url = reverse('mkt.stats.overview', args=[addon.app_slug])
|
||||
view = get_report_view(request)
|
||||
|
||||
return jingo.render(request, template_name, {
|
||||
'addon': addon,
|
||||
'report': report,
|
||||
'view': view,
|
||||
'stats_base_url': stats_base_url,
|
||||
})
|
||||
|
||||
|
||||
@login_required
|
||||
@waffle_switch('developer-stats')
|
||||
def my_apps_report(request, report):
|
||||
"""
|
||||
A report for a developer, showing multiple apps.
|
||||
"""
|
||||
view = get_report_view(request)
|
||||
template_name = 'devstats/reports/%s.html' % report
|
||||
return jingo.render(request, template_name, {
|
||||
'view': view,
|
||||
'report': 'my_apps',
|
||||
})
|
||||
|
||||
|
||||
def get_series_line(model, group, primary_field=None, extra_fields=None,
|
||||
extra_values=None, **filters):
|
||||
"""
|
||||
Get a generator of dicts for the stats model given by the filters, made
|
||||
to fit into Highchart's datetime line graph.
|
||||
|
||||
primary_field takes a field name that can be referenced by the key 'count'
|
||||
extra_fields takes a list of fields that can be found in the index
|
||||
on top of date and count and can be seen in the output
|
||||
extra_values is a list of constant values added to each line
|
||||
"""
|
||||
if not extra_fields:
|
||||
extra_fields = []
|
||||
|
||||
extra_values = extra_values or {}
|
||||
|
||||
if waffle.switch_is_active('monolith-stats'):
|
||||
keys = {Installed: 'app_installs',
|
||||
UpdateCount: 'updatecount_XXX',
|
||||
Contribution: 'contribution_XXX'}
|
||||
|
||||
# Getting data from the monolith server.
|
||||
client = get_monolith_client()
|
||||
|
||||
field = keys[model]
|
||||
start, end = filters['date__range']
|
||||
|
||||
if group == 'date':
|
||||
group = 'day'
|
||||
|
||||
try:
|
||||
for result in client(field, start, end, interval=group,
|
||||
addon_id=filters['addon']):
|
||||
res = {'count': result['count']}
|
||||
for extra_field in extra_fields:
|
||||
res[extra_field] = result[extra_field]
|
||||
date_ = date(*result['date'].timetuple()[:3])
|
||||
res['end'] = res['date'] = date_
|
||||
res.update(extra_values)
|
||||
yield res
|
||||
except ValueError as e:
|
||||
if len(e.args) > 0:
|
||||
logger.error(e.args[0])
|
||||
|
||||
else:
|
||||
# Pull data out of ES
|
||||
data = list((model.search().order_by('-date').filter(**filters)
|
||||
.values_dict('date', 'count', primary_field, *extra_fields))[:365])
|
||||
|
||||
# Pad empty data with dummy dicts.
|
||||
days = [datum['date'].date() for datum in data]
|
||||
fields = []
|
||||
if primary_field:
|
||||
fields.append(primary_field)
|
||||
if extra_fields:
|
||||
fields += extra_fields
|
||||
data += pad_missing_stats(days, group, filters.get('date__range'),
|
||||
fields)
|
||||
|
||||
# Sort in descending order.
|
||||
data = sorted(data, key=lambda document: document['date'],
|
||||
reverse=True)
|
||||
|
||||
# Generate dictionary with options from ES document
|
||||
for val in data:
|
||||
# Convert the datetimes to a date.
|
||||
date_ = date(*val['date'].timetuple()[:3])
|
||||
if primary_field and primary_field != 'count':
|
||||
rv = dict(count=val[primary_field], date=date_, end=date_)
|
||||
else:
|
||||
rv = dict(count=val['count'], date=date_, end=date_)
|
||||
for extra_field in extra_fields:
|
||||
rv[extra_field] = val[extra_field]
|
||||
rv.update(extra_values)
|
||||
yield rv
|
||||
|
||||
|
||||
def get_series_column(model, primary_field=None, category_field=None,
|
||||
**filters):
|
||||
"""
|
||||
Get a generator of dicts for the stats model given by the filters, made
|
||||
to fit into Highchart's column graph.
|
||||
|
||||
primary_field -- field name that is converted into generic key 'count'.
|
||||
category_field -- the breakdown field for x-axis (e.g. currency, source),
|
||||
is a Highcharts term where categories are the xAxis
|
||||
values.
|
||||
"""
|
||||
categories = list(set(model.objects.filter(**filters).values_list(
|
||||
category_field, flat=True)))
|
||||
|
||||
# Set up ES query.
|
||||
if 'config__addon' in filters:
|
||||
filters['addon'] = filters['config__addon']
|
||||
del(filters['config__addon'])
|
||||
|
||||
data = []
|
||||
for category in categories:
|
||||
# Have to query ES in lower-case.
|
||||
try:
|
||||
category = category.lower()
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
filters[category_field] = category
|
||||
if primary_field:
|
||||
data += list((model.search().filter(**filters)
|
||||
.values_dict(category_field, 'count',
|
||||
primary_field)))
|
||||
else:
|
||||
data += list((model.search().filter(**filters)
|
||||
.values_dict(category_field, 'count')))
|
||||
del(filters[category_field])
|
||||
|
||||
# Sort descending.
|
||||
if primary_field:
|
||||
data = sorted(data, key=lambda datum: datum.get(primary_field),
|
||||
reverse=True)
|
||||
else:
|
||||
data = sorted(data, key=lambda datum: datum['count'], reverse=True)
|
||||
|
||||
# Generate dictionary.
|
||||
for val in data:
|
||||
if primary_field:
|
||||
rv = dict(count=val[primary_field])
|
||||
else:
|
||||
rv = dict(count=val['count'])
|
||||
if category_field:
|
||||
rv[category_field] = val[category_field]
|
||||
# Represent empty strings as 'N/A' in the frontend.
|
||||
if not rv[category_field]:
|
||||
rv[category_field] = 'N/A'
|
||||
yield rv
|
||||
|
||||
|
||||
#TODO: complex JS logic similar to apps/stats, real stats data
|
||||
@app_view
|
||||
def overview_series(request, addon, group, start, end, format):
|
||||
"""
|
||||
Combines installs_series and usage_series into one payload.
|
||||
"""
|
||||
date_range = check_series_params_or_404(group, start, end, format)
|
||||
check_stats_permission(request, addon)
|
||||
|
||||
series = get_series_line(Installed, group, addon=addon.id,
|
||||
date__range=date_range)
|
||||
|
||||
if format == 'csv':
|
||||
return render_csv(request, addon, series, ['date', 'count'])
|
||||
elif format == 'json':
|
||||
return render_json(request, addon, series)
|
||||
|
||||
|
||||
@app_view
|
||||
def installs_series(request, addon, group, start, end, format):
|
||||
"""
|
||||
Generate install counts grouped by ``group`` in ``format``.
|
||||
"""
|
||||
date_range = check_series_params_or_404(group, start, end, format)
|
||||
check_stats_permission(request, addon)
|
||||
|
||||
series = get_series_line(Installed, group, addon=addon.id,
|
||||
date__range=date_range)
|
||||
|
||||
if format == 'csv':
|
||||
return render_csv(request, addon, series, ['date', 'count'])
|
||||
elif format == 'json':
|
||||
return render_json(request, addon, series)
|
||||
|
||||
|
||||
def _my_apps(request):
|
||||
"""
|
||||
Find the apps you are allowed to see stats for, by getting all apps
|
||||
and then filtering down.
|
||||
"""
|
||||
filtered = []
|
||||
if not getattr(request, 'amo_user', None):
|
||||
return filtered
|
||||
|
||||
addon_users = (request.amo_user.addonuser_set
|
||||
.filter(addon__type=amo.ADDON_WEBAPP)
|
||||
.exclude(addon__status=amo.STATUS_DELETED))
|
||||
for addon_user in addon_users:
|
||||
if check_stats_permission(request, addon_user.addon, no_raise=True):
|
||||
filtered.append(addon_user.addon)
|
||||
return filtered
|
||||
|
||||
|
||||
def my_apps_series(request, group, start, end, format):
|
||||
"""
|
||||
Install counts for multiple apps. This is a temporary hack that will
|
||||
probably live forever.
|
||||
"""
|
||||
date_range = check_series_params_or_404(group, start, end, format)
|
||||
apps = _my_apps(request)
|
||||
series = []
|
||||
for app in apps:
|
||||
# The app name is going to appended in slightly different ways
|
||||
# depending upon data format.
|
||||
if format == 'csv':
|
||||
series = get_series_line(Installed, group, addon=app.id,
|
||||
date__range=date_range,
|
||||
extra_values={'name': (app.name)})
|
||||
elif format == 'json':
|
||||
data = get_series_line(Installed, group, addon=app.id,
|
||||
date__range=date_range)
|
||||
series.append({'name': str(app.name), 'data': list(data)})
|
||||
|
||||
if format == 'csv':
|
||||
return render_csv(request, apps, series, ['name', 'date', 'count'])
|
||||
elif format == 'json':
|
||||
return render_json(request, apps, series)
|
||||
|
||||
|
||||
#TODO: real data
|
||||
@app_view
|
||||
def usage_series(request, addon, group, start, end, format):
|
||||
date_range = check_series_params_or_404(group, start, end, format)
|
||||
check_stats_permission(request, addon)
|
||||
|
||||
series = get_series_line(UpdateCount, group, addon=addon.id,
|
||||
date__range=date_range)
|
||||
|
||||
if format == 'csv':
|
||||
return render_csv(request, addon, series, ['date', 'count'])
|
||||
elif format == 'json':
|
||||
return render_json(request, addon, series)
|
||||
|
||||
|
||||
@app_view
|
||||
def finance_line_series(request, addon, group, start, end, format,
|
||||
primary_field=None):
|
||||
"""
|
||||
Date-based contribution series.
|
||||
primary_field -- revenue/count/refunds
|
||||
"""
|
||||
date_range = check_series_params_or_404(group, start, end, format)
|
||||
check_stats_permission(request, addon, for_contributions=True)
|
||||
|
||||
series = get_series_line(Contribution, group,
|
||||
primary_field=primary_field, addon=addon.id,
|
||||
date__range=date_range)
|
||||
|
||||
if format == 'csv':
|
||||
return render_csv(request, addon, series, ['date', 'count'])
|
||||
elif format == 'json':
|
||||
return render_json(request, addon, series)
|
||||
|
||||
|
||||
@app_view
|
||||
def finance_column_series(request, addon, group, start, end, format,
|
||||
primary_field=None, category_field=None):
|
||||
"""
|
||||
Non-date-based contribution series, column graph.
|
||||
primary_field -- revenue/count/refunds
|
||||
category_field -- breakdown field, currency/source
|
||||
"""
|
||||
check_stats_permission(request, addon, for_contributions=True)
|
||||
|
||||
series = get_series_column(Contribution, primary_field=primary_field,
|
||||
category_field=category_field, addon=addon.id)
|
||||
|
||||
# Since we're currently storing everything in lower-case in ES,
|
||||
# re-capitalize the currency.
|
||||
if category_field == 'currency':
|
||||
series = list(series)
|
||||
for datum in series:
|
||||
datum['currency'] = datum['currency'].upper()
|
||||
|
||||
if format == 'csv':
|
||||
return render_csv(request, addon, series, [category_field, 'count'])
|
||||
elif format == 'json':
|
||||
return render_json(request, addon, series)
|
||||
|
||||
|
||||
def check_stats_permission(request, addon, for_contributions=False,
|
||||
no_raise=False):
|
||||
"""
|
||||
Check if user is allowed to view stats for ``addon``.
|
||||
|
||||
no_raise -- if enabled function returns true or false
|
||||
else function raises PermissionDenied
|
||||
if user is not allowed.
|
||||
"""
|
||||
# If public, non-contributions: everybody can view.
|
||||
if addon.public_stats and not for_contributions:
|
||||
return True
|
||||
|
||||
# Everything else requires an authenticated user.
|
||||
if not request.user.is_authenticated():
|
||||
if no_raise:
|
||||
return False
|
||||
raise PermissionDenied
|
||||
|
||||
if not for_contributions:
|
||||
# Only authors and Stats Viewers allowed.
|
||||
if (addon.has_author(request.amo_user) or
|
||||
acl.action_allowed(request, 'Stats', 'View')):
|
||||
return True
|
||||
|
||||
else: # For contribution stats.
|
||||
# Only authors and Contribution Stats Viewers.
|
||||
if (addon.has_author(request.amo_user) or
|
||||
acl.action_allowed(request, 'RevenueStats', 'View')):
|
||||
return True
|
||||
|
||||
if no_raise:
|
||||
return False
|
||||
raise PermissionDenied
|
||||
|
||||
|
||||
def pad_missing_stats(days, group, date_range=None, fields=None):
|
||||
"""
|
||||
Bug 758480: return dummy dicts with values of 0 to pad missing dates
|
||||
days -- list of datetime dates that have returned data
|
||||
group -- grouping by day, week, or month
|
||||
date_range -- optional, to extend the padding to fill a date range
|
||||
fields -- fields to insert into the dummy dict with values of 0
|
||||
"""
|
||||
if not fields:
|
||||
fields = []
|
||||
|
||||
# Add 0s for missing daily stats (so frontend represents empty stats as 0).
|
||||
days = sorted(set(days))
|
||||
|
||||
# Make sure whole date range is padded so data doesn't just start at first
|
||||
# data point returned from ES.
|
||||
if date_range:
|
||||
start, end = date_range
|
||||
if start not in days:
|
||||
days.insert(0, start)
|
||||
if end not in days:
|
||||
days.append(end)
|
||||
|
||||
if group == 'day':
|
||||
max_delta = timedelta(1)
|
||||
group_delta = relativedelta(days=1)
|
||||
elif group == 'week':
|
||||
max_delta = timedelta(7)
|
||||
group_delta = relativedelta(weeks=1)
|
||||
elif group == 'month':
|
||||
max_delta = timedelta(31)
|
||||
group_delta = relativedelta(months=1)
|
||||
|
||||
dummy_dicts = []
|
||||
for day in enumerate(days):
|
||||
# Find missing dates between two dates in the list of days.
|
||||
try:
|
||||
# Pad based on the group (e.g don't insert days in a week view).
|
||||
if days[day[0] + 1] - day[1] > max_delta:
|
||||
dummy_date = day[1] + group_delta
|
||||
dummy_dict = {
|
||||
'date': datetime.datetime.combine(dummy_date,
|
||||
datetime.time(0, 0)),
|
||||
'count': 0
|
||||
}
|
||||
|
||||
for field in fields:
|
||||
dummy_dict[field] = 0
|
||||
|
||||
# Insert dummy day into current iterated list to find more
|
||||
# empty spots.
|
||||
days.insert(day[0] + 1, dummy_date)
|
||||
dummy_dicts.append(dummy_dict)
|
||||
except IndexError:
|
||||
break
|
||||
return dummy_dicts
|
||||
|
||||
|
||||
@json_view
|
||||
def fake_app_stats(request, addon, group, start, end, format):
|
||||
from time import strftime
|
||||
from math import sin, floor
|
||||
start, end = check_series_params_or_404(group, start, end, format)
|
||||
faked = []
|
||||
val = 0
|
||||
for single_date in daterange(start, end):
|
||||
isodate = strftime("%Y-%m-%d", single_date.timetuple())
|
||||
faked.append({
|
||||
'date': isodate,
|
||||
'count': floor(200 + 50 * sin(val + 1)),
|
||||
'data': {
|
||||
'installs': floor(200 + 50 * sin(2 * val + 2)),
|
||||
'usage': floor(200 + 50 * sin(3 * val + 3)),
|
||||
#'device': floor(200 + 50 * sin(5 * val + 5)),
|
||||
}})
|
||||
val += .01
|
||||
return faked
|
||||
|
||||
|
||||
@permission_required('Stats', 'View')
|
||||
def overall(request, report):
|
||||
view = get_report_view(request)
|
||||
return jingo.render(request, 'sitestats/stats.html', {'report': report,
|
||||
'view': view})
|
15
mkt/urls.py
15
mkt/urls.py
|
@ -18,7 +18,6 @@ from mkt.developers.views import login
|
|||
from mkt.operators.urls import url_patterns as operator_patterns
|
||||
from mkt.purchase.urls import webpay_services_patterns
|
||||
from mkt.reviewers.urls import url_patterns as reviewer_url_patterns
|
||||
from mkt.stats.urls import app_site_patterns
|
||||
|
||||
|
||||
admin.autodiscover()
|
||||
|
@ -112,20 +111,6 @@ urlpatterns = patterns('',
|
|||
('', include('mkt.commonplace.urls')),
|
||||
('', include('mkt.site.urls')),
|
||||
|
||||
# Site events data.
|
||||
url('^statistics/events-(?P<start>\d{8})-(?P<end>\d{8}).json$',
|
||||
'stats.views.site_events', name='amo.site_events'),
|
||||
|
||||
# Catch marketplace specific statistics urls.
|
||||
url('^statistics/', include(app_site_patterns)),
|
||||
|
||||
# Let the rest of the URLs fall through.
|
||||
url('^statistics/', include('stats.urls')),
|
||||
|
||||
# Disable currently not working statistics.
|
||||
# Fall through for any URLs not matched above stats dashboard.
|
||||
url('^statistics/', lambda r: redirect('/'), name='statistics.dashboard'),
|
||||
|
||||
# Services.
|
||||
('', include('amo.urls')),
|
||||
)
|
||||
|
|
|
@ -67,7 +67,6 @@ HOME=/tmp
|
|||
35 6 * * * %(z_cron)s update_global_totals
|
||||
40 6 * * * %(z_cron)s update_addon_average_daily_users
|
||||
30 7 * * * %(z_cron)s index_latest_stats
|
||||
35 7 * * * %(z_cron)s index_latest_mkt_stats --settings=settings_local_mkt
|
||||
45 7 * * * %(z_cron)s update_addons_collections_downloads
|
||||
50 7 * * * %(z_cron)s update_daily_theme_user_counts
|
||||
|
||||
|
|
Загрузка…
Ссылка в новой задаче