Python 3 fixes for the `es` job (#10483)

Fixes #10420

* Python fixes for the 'es' job
* Remove some legacy api 1.2 cruft, test legacy_api with 1.5 by default
At this point clients using the legacy api are old, clients using the
legacy api < 1.5 are *super* old and not worth supporting.
* Dynamically ignore ResourceWarning under Python 3 for now
* Make stats csv column ordering stable
* Move 'es' job to the python3working section
This commit is contained in:
Mathieu Pillard 2019-01-23 06:01:09 +01:00 коммит произвёл Christopher Grebs
Родитель 8f8acb0aea
Коммит 467b2d9235
11 изменённых файлов: 273 добавлений и 245 удалений

Просмотреть файл

@ -23,9 +23,9 @@ jobs:
- { stage: python3working, python: 3.6, env: TOXENV=codestyle}
- { stage: python3working, python: 3.6, env: TOXENV=docs }
- { stage: python3working, python: 3.6, env: TOXENV=assets }
- { stage: python3working, python: 3.6, env: TOXENV=es }
- { stage: python3working, python: 3.6, env: TOXENV=addons }
- { stage: python3working, python: 3.6, env: TOXENV=users-and-ratings }
- { stage: python3, python: 3.6, env: TOXENV=es }
- { stage: python3, python: 3.6, env: TOXENV=devhub }
- { stage: python3, python: 3.6, env: TOXENV=reviewers-and-zadmin }
- { stage: python3, python: 3.6, env: TOXENV=amo-locales-and-signing }

Просмотреть файл

@ -6,9 +6,11 @@ on module-level, they should instead be added to hooks or fixtures directly.
"""
import os
import uuid
import warnings
import pytest
import responses
import six
@pytest.fixture(autouse=True)
@ -136,6 +138,11 @@ def test_pre_setup(request, tmpdir, settings):
from waffle.utils import get_cache as waffle_get_cache
from waffle import models as waffle_models
# Ignore ResourceWarning for now. It's a Python 3 thing so it's done
# dynamically here.
if six.PY3:
warnings.filterwarnings('ignore', category=ResourceWarning) # noqa
# Clear all cache-instances. They'll be re-initialized by Django
# This will make sure that our random `KEY_PREFIX` is applied
# appropriately.

Просмотреть файл

@ -87,8 +87,8 @@ class TestESWithoutMakingQueries(TestCase):
# Query:
# {'bool': {'must': [{'term': {'type': 1}},
# {'range': {'status': {'gte': 1}}}, ]}}
assert query.keys() == ['bool']
assert query['bool'].keys() == ['must']
assert list(query.keys()) == ['bool']
assert list(query['bool'].keys()) == ['must']
assert {'term': {'type': 1}} in query['bool']['must']
assert {'range': {'status': {'gte': 1}}} in query['bool']['must']
@ -99,8 +99,8 @@ class TestESWithoutMakingQueries(TestCase):
# Query:
# {'bool': {'must': [{'fuzzy': {'status': fuzz}},
# {'term': {'type': 1}}, ]}})
assert query.keys() == ['bool']
assert query['bool'].keys() == ['must']
assert list(query.keys()) == ['bool']
assert list(query['bool'].keys()) == ['must']
assert {'term': {'type': 1}} in query['bool']['must']
assert {'fuzzy': {'status': fuzz}} in query['bool']['must']
@ -291,7 +291,7 @@ class TestES(ESTestCaseWithAddons):
def test_empty_values_dict_result(self):
qs = Addon.search().values_dict()
assert qs[0].keys() == ['id']
assert list(qs[0].keys()) == ['id']
def test_object_result(self):
qs = Addon.search().filter(id=self._addons[0].id)[:1]

Просмотреть файл

@ -6,6 +6,7 @@ from textwrap import dedent
from django.conf import settings
from django.test.client import Client
from django.utils import translation
from django.utils.encoding import force_text
import jinja2
import pytest
@ -34,7 +35,7 @@ from olympia.tags.models import AddonTag, Tag
pytestmark = pytest.mark.django_db
def api_url(x, app='firefox', lang='en-US', version=1.2):
def api_url(x, app='firefox', lang='en-US', version=1.5):
return '/%s/%s/api/%s/%s' % (lang, app, version, x)
@ -121,10 +122,10 @@ class ControlCharacterTest(TestCase):
fixtures = ('base/addon_3615',)
def test(self):
a = Addon.objects.get(pk=3615)
addon = Addon.objects.get(pk=3615)
char = chr(12)
a.name = "I %sove You" % char
a.save()
addon.name = "I %sove You" % char
addon.save()
response = make_call('addon/3615')
self.assertNotContains(response, char)
@ -134,20 +135,20 @@ class StripHTMLTest(TestCase):
def test(self):
"""For API < 1.5 we remove HTML."""
a = Addon.objects.get(pk=3615)
a.eula = '<i>free</i> stock tips'
a.summary = '<i>xxx video</i>s'
a.description = 'FFFF<b>UUUU</b>'
a.save()
addon = Addon.objects.get(pk=3615)
addon.eula = '<i>free</i> stock tips'
addon.summary = '<i>xxx video</i>s'
addon.description = 'FFFF<b>UUUU</b>'
addon.save()
r = make_call('addon/3615', version=1.5)
doc = pq(r.content)
response = make_call('addon/3615', version=1.5)
doc = pq(response.content)
assert doc('eula').html() == '<i>free</i> stock tips'
assert doc('summary').html() == '&lt;i&gt;xxx video&lt;/i&gt;s'
assert doc('description').html() == 'FFFF<b>UUUU</b>'
r = make_call('addon/3615')
doc = pq(r.content)
response = make_call('addon/3615', version=1.2)
doc = pq(response.content)
assert doc('eula').html() == 'free stock tips'
assert doc('summary').html() == 'xxx videos'
assert doc('description').html() == 'FFFFUUUU'
@ -289,7 +290,7 @@ class APITest(TestCase):
addon = Addon.objects.get(id=3615)
response = self.client.get(
'/en-US/firefox/api/%.1f/addon/3615?format=json' % 1.2)
data = json.loads(response.content)
data = json.loads(force_text(response.content))
assert data['name'] == six.text_type(addon.name)
assert data['type'] == 'extension'
assert data['guid'] == addon.guid
@ -322,7 +323,7 @@ class APITest(TestCase):
Persona.objects.create(persona_id=3, addon=addon)
response = self.client.get(
'/en-US/firefox/api/%.1f/addon/3615?format=json' % 1.2)
data = json.loads(response.content)
data = json.loads(force_text(response.content))
assert data['id'] == 3615
# `id` should be `addon_id`, not `persona_id`
assert data['theme']['id'] == '3615'
@ -606,8 +607,8 @@ class ListTest(TestCase):
def test_json(self):
"""Verify that we get some json."""
r = make_call('list/by_adu?format=json', version=1.5)
assert json.loads(r.content)
response = make_call('list/by_adu?format=json', version=1.5)
assert json.loads(force_text(response.content))
def test_unicode(self):
make_call(u'list/featured/all/10/Linux/3.7a2prexec\xb6\u0153\xec\xb2')
@ -736,13 +737,13 @@ class TestGuidSearch(TestCase):
def setUp(self):
super(TestGuidSearch, self).setUp()
addon = Addon.objects.get(id=3615)
c = CompatOverride.objects.create(guid=addon.guid)
compat_override = CompatOverride.objects.create(guid=addon.guid)
app = list(addon.compatible_apps.keys())[0]
CompatOverrideRange.objects.create(compat=c, app=app.id)
CompatOverrideRange.objects.create(compat=compat_override, app=app.id)
def test_success(self):
r = make_call(self.good)
dom = pq(r.content)
response = make_call(self.good)
dom = pq(response.content)
assert set(['3615', '6113']) == (
set([a.attrib['id'] for a in dom('addon')]))
@ -765,46 +766,46 @@ class TestGuidSearch(TestCase):
def test_api_caching_app(self):
response = make_call(self.good)
assert 'en-US/firefox/addon/None/reviews/?src=api' in response.content
assert 'en-US/android/addon/None/reviews/' not in response.content
assert b'en-US/firefox/addon/None/reviews/?src=api' in response.content
assert b'en-US/android/addon/None/reviews/' not in response.content
response = make_call(self.good, app='android')
assert 'en-US/android/addon/None/reviews/?src=api' in response.content
assert 'en-US/firefox/addon/None/reviews/' not in response.content
assert b'en-US/android/addon/None/reviews/?src=api' in response.content
assert b'en-US/firefox/addon/None/reviews/' not in response.content
def test_xss(self):
addon_factory(guid='test@xss', name='<script>alert("test");</script>')
r = make_call('search/guid:test@xss')
assert '<script>alert' not in r.content
assert '&lt;script&gt;alert' in r.content
response = make_call('search/guid:test@xss')
assert b'<script>alert' not in response.content
assert b'&lt;script&gt;alert' in response.content
def test_block_inactive(self):
Addon.objects.filter(id=6113).update(disabled_by_user=True)
r = make_call(self.good)
response = make_call(self.good)
assert set(['3615']) == (
set([a.attrib['id'] for a in pq(r.content)('addon')]))
set([a.attrib['id'] for a in pq(response.content)('addon')]))
def test_block_nonpublic(self):
Addon.objects.filter(id=6113).update(status=amo.STATUS_NOMINATED)
r = make_call(self.good)
response = make_call(self.good)
assert set(['3615']) == (
set([a.attrib['id'] for a in pq(r.content)('addon')]))
set([a.attrib['id'] for a in pq(response.content)('addon')]))
def test_empty(self):
"""
Bug: https://bugzilla.mozilla.org/show_bug.cgi?id=607044
guid:foo, should search for just 'foo' and not empty guids.
"""
r = make_call('search/guid:koberger,')
doc = pq(r.content)
response = make_call('search/guid:koberger,')
doc = pq(response.content)
# No addons should exist with guid koberger and the , should not
# indicate that we are searching for null guid.
assert len(doc('addon')) == 0
def test_addon_compatibility(self):
addon = Addon.objects.get(id=3615)
r = make_call('search/guid:%s' % addon.guid)
dom = pq(r.content, parser='xml')
response = make_call('search/guid:%s' % addon.guid)
dom = pq(response.content, parser='xml')
assert len(dom('addon_compatibility')) == 1
assert dom('addon_compatibility')[0].attrib['id'] == '3615'
assert dom('addon_compatibility')[0].attrib['hosted'] == 'true'
@ -817,22 +818,22 @@ class TestGuidSearch(TestCase):
amo.FIREFOX.guid)
def test_addon_compatibility_not_hosted(self):
c = CompatOverride.objects.create(guid='yeah', name='ok')
CompatOverrideRange.objects.create(app=1, compat=c,
compat_override = CompatOverride.objects.create(guid='yeah', name='ok')
CompatOverrideRange.objects.create(app=1, compat=compat_override,
min_version='1', max_version='2',
min_app_version='3',
max_app_version='4')
r = make_call('search/guid:%s' % c.guid)
dom = pq(r.content, parser='xml')
response = make_call('search/guid:%s' % compat_override.guid)
dom = pq(response.content, parser='xml')
assert len(dom('addon_compatibility')) == 1
assert dom('addon_compatibility')[0].attrib['hosted'] == 'false'
assert 'id' not in dom('addon_compatibility')[0].attrib
assert dom('addon_compatibility guid').text() == c.guid
assert dom('addon_compatibility > name').text() == c.name
assert dom('addon_compatibility guid').text() == compat_override.guid
assert dom('addon_compatibility > name').text() == compat_override.name
cr = c.compat_ranges[0]
cr = compat_override.compat_ranges[0]
assert dom('version_range')[0].attrib['type'] == cr.override_type()
assert dom('version_range > min_version').text() == cr.min_version
assert dom('version_range > max_version').text() == cr.max_version
@ -855,11 +856,11 @@ class SearchTest(ESTestCase):
super(SearchTest, self).setUp()
self.addons = Addon.objects.filter(status=amo.STATUS_PUBLIC,
disabled_by_user=False)
t = Tag.objects.create(tag_text='ballin')
a = Addon.objects.get(pk=3615)
AddonTag.objects.create(tag=t, addon=a)
tag = Tag.objects.create(tag_text='ballin')
addon = Addon.objects.get(pk=3615)
AddonTag.objects.create(tag=tag, addon=addon)
[addon.save() for addon in self.addons]
[a.save() for a in self.addons]
self.refresh()
self.url = ('/en-US/firefox/api/%(api_version)s/search/%(query)s/'
@ -874,14 +875,6 @@ class SearchTest(ESTestCase):
'compat_mode': 'strict',
}
def test_double_escaping(self):
"""
For API < 1.5 we use double escaping in search.
"""
resp = make_call('search/%25E6%2596%25B0%25E5%2590%258C%25E6%2596%'
'2587%25E5%25A0%2582/all/10/WINNT/3.6', version=1.2)
self.assertContains(resp, '<addon id="6113">')
def test_zero_results(self):
"""
Tests that the search API correctly gives us zero results found.
@ -935,7 +928,7 @@ class SearchTest(ESTestCase):
"""
response = self.client.get(
"/en-US/firefox/api/1.2/search/delicious/all/1")
assert response.content.count("<addon id") == 1
assert force_text(response.content).count("<addon id") == 1
def test_total_results(self):
"""
@ -1179,17 +1172,17 @@ class SearchTest(ESTestCase):
response = self.client.get(
'/en-US/firefox/api/%.1f/search_suggestions/?q=delicious' %
legacy_api.CURRENT_VERSION)
data = json.loads(response.content)['suggestions'][0]
a = Addon.objects.get(pk=3615)
assert data['id'] == str(a.pk)
assert data['name'] == a.name
assert data['rating'] == a.average_rating
data = json.loads(force_text(response.content))['suggestions'][0]
addon = Addon.objects.get(pk=3615)
assert data['id'] == str(addon.pk)
assert data['name'] == addon.name
assert data['rating'] == addon.average_rating
def test_no_category_suggestions(self):
response = self.client.get(
'/en-US/firefox/api/%.1f/search_suggestions/?q=Feed' %
legacy_api.CURRENT_VERSION)
assert json.loads(response.content)['suggestions'] == []
assert json.loads(force_text(response.content))['suggestions'] == []
def test_suggestions_throttle(self):
self.create_sample('autosuggest-throttle')
@ -1251,10 +1244,10 @@ class LanguagePacksTest(UploadTest):
@patch('olympia.addons.models.Addon.get_localepicker')
def test_localepicker(self, get_localepicker):
get_localepicker.return_value = six.text_type('title=اختر لغة', 'utf8')
get_localepicker.return_value = u'title=اختر لغة'
self.addon.update(type=amo.ADDON_LPAPP, status=amo.STATUS_PUBLIC)
res = self.client.get(self.url)
self.assertContains(res, dedent("""
self.assertContains(res, dedent(u"""
<strings><![CDATA[
title=اختر لغة
]]></strings>"""))

Просмотреть файл

@ -2,7 +2,6 @@ import hashlib
import itertools
import json
import random
import urllib
from datetime import date, timedelta
@ -17,6 +16,8 @@ from django.utils.translation import get_language, ugettext, ugettext_lazy as _
import six
import waffle
from six.moves.urllib_parse import quote
import olympia.core.logger
from olympia import amo, legacy_api
@ -279,6 +280,7 @@ class AddonDetailView(APIView):
def guid_search(request, api_version, guids):
lang = request.LANG
app_id = request.APP.id
api_version = float(api_version)
def guid_search_cache_key(guid):
key = 'guid_search:%s:%s:%s:%s' % (api_version, lang, app_id, guid)
@ -310,8 +312,8 @@ def guid_search(request, api_version, guids):
addons_xml[key] = addon_xml
if dirty_keys:
cache.set_many(dict((k, v) for k, v in six.iteritems(addons_xml)
if k in dirty_keys))
cache.set_many({k: v for k, v in six.iteritems(addons_xml)
if k in dirty_keys})
compat = (CompatOverride.objects.filter(guid__in=guids)
.transform(CompatOverride.transformer))
@ -369,14 +371,6 @@ class SearchView(APIView):
# Filter by ALL types, which is really all types except for apps.
filters['type__in'] = list(amo.ADDON_SEARCH_TYPES)
if self.version < 1.5:
# Fix doubly encoded query strings.
try:
query = urllib.unquote(query.encode('ascii'))
except UnicodeEncodeError:
# This fails if the string is already UTF-8.
pass
qs = (
Addon.search()
.filter(**filters)
@ -493,7 +487,7 @@ def redirect_view(request, url):
Redirect all requests that come here to an API call with a view parameter.
"""
dest = '/api/%.1f/%s' % (legacy_api.CURRENT_VERSION,
urllib.quote(url.encode('utf-8')))
quote(url.encode('utf-8')))
dest = get_url_prefix().fix(dest)
return HttpResponsePermanentRedirect(dest)

Просмотреть файл

@ -3,7 +3,7 @@ import datetime
import json
from django.conf import settings
from django.utils.encoding import smart_text
from django.utils.encoding import force_text
import mock
import pytest
@ -640,7 +640,7 @@ class TestDeletedThemeLookup(TestCase):
response = self.client.get(reverse('reviewers.themes.deleted'))
assert response.status_code == 200
assert (self.deleted.name.localized_string in
smart_text(response.content))
force_text(response.content))
def test_perm(self):
# Personas:Review allow access to deleted themes as well.
@ -666,7 +666,8 @@ class TestThemeSearch(amo.tests.ESTestCase):
reverse('reviewers.themes.search'),
user=UserProfile.objects.get(username='persona_reviewer'))
request.GET = get_query
return json.loads(themes_search(request).content)['objects']
return json.loads(
force_text(themes_search(request).content))['objects']
def test_pending(self):
assert self.search('theme')[0]['id'] == self.addon.id

Просмотреть файл

@ -2,6 +2,7 @@
import json
from django.conf import settings
from django.utils.encoding import force_text
from olympia import amo
from olympia.amo.tests import (
@ -31,7 +32,7 @@ class TestRankingScenarios(ESTestCase):
params['q'] = query
response = self.client.get(url, params)
assert response.status_code == 200
data = json.loads(response.content)
data = json.loads(force_text(response.content))
assert data['count']
results = data['results']

Просмотреть файл

@ -3,6 +3,7 @@ import json
from django.http import QueryDict
from django.test.client import RequestFactory
from django.utils.encoding import force_text
import mock
import pytest
@ -207,7 +208,6 @@ class TestESSearch(SearchBase):
self.assert3xx(response, self.url + '?sort=rating', status_code=301)
def test_legacy_redirects_to_non_ascii(self):
# see http://sentry.dmz.phx1.mozilla.com/addons/group/2186/
url = '/ga-IE/firefox/tag/%E5%95%86%E5%93%81%E6%90%9C%E7%B4%A2'
from_ = ('?sort=updated&lver=1.0&advancedsearch=1'
'&tag=dearbhair&cat=4%2C84')
@ -895,7 +895,7 @@ class TestAjaxSearch(ESTestCaseWithAddons):
addons = []
response = self.client.get(url + '?' + params)
assert response.status_code == 200
data = json.loads(response.content)
data = json.loads(force_text(response.content))
assert len(data) == len(addons)
for got, expected in zip(
@ -1005,7 +1005,7 @@ class TestSearchSuggestions(TestAjaxSearch):
apps = []
response = self.client.get(self.url + '?' + params)
assert response.status_code == 200
data = json.loads(response.content)
data = json.loads(force_text(response.content))
data = sorted(data, key=lambda x: x['id'])
apps = sorted(apps, key=lambda x: x.id)

Просмотреть файл

@ -1,7 +1,6 @@
from django import http
from django.db.models import Q
from django.db.transaction import non_atomic_requests
from django.utils.encoding import force_bytes
from django.utils.translation import ugettext
from django.views.decorators.vary import vary_on_headers
@ -491,7 +490,7 @@ def version_sidebar(request, form_data, aggregations):
def platform_sidebar(request, form_data):
qplatform = form_data.get('platform')
app_platforms = request.APP.platforms.values()
app_platforms = list(request.APP.platforms.values())
ALL = app_platforms.pop(0)
# The default is to show "All Systems."
@ -522,7 +521,7 @@ def tag_sidebar(request, form_data, aggregations):
def fix_search_query(query, extra_params=None):
rv = {force_bytes(k): v for k, v in query.items()}
rv = query.dict()
changed = False
# Change old keys to new names.
keys = {
@ -545,10 +544,12 @@ def fix_search_query(query, extra_params=None):
'sortby': 'sort',
},
'platform': {
str(p.id): p.shortname
six.text_type(p.id): p.shortname
for p in amo.PLATFORMS.values()
},
'atype': {k: str(v) for k, v in amo.ADDON_SEARCH_SLUGS.items()},
'atype': {
k: six.text_type(v) for k, v in amo.ADDON_SEARCH_SLUGS.items()
},
}
if extra_params:
params.update(extra_params)

Просмотреть файл

@ -5,8 +5,10 @@ import json
from django.http import Http404
from django.test.client import RequestFactory
from django.utils.encoding import force_text
import mock
import six
from pyquery import PyQuery as pq
@ -153,6 +155,26 @@ class TestListedAddons(StatsTest):
self._check_it(self.public_views_gen(format='json'), 200)
if six.PY2:
# In Python 2, the csv module deals with bytes all the way. This is
# taken from https://docs.python.org/2/library/csv.html#csv-examples to
# deal with utf-8, allowing our tests to be a bit saner - they can pass
# unicode strings instead of bytes transparently.
def utf_8_encoder(unicode_csv_data):
for line in unicode_csv_data:
yield line.encode('utf-8')
def CSVDictReaderClass(data, **kwargs):
"""csv.DictReader that deals with unicode data for Python 2."""
csv_reader = csv.DictReader(utf_8_encoder(data), **kwargs)
for row in csv_reader:
yield {
six.text_type(key, 'utf-8'): six.text_type(value, 'utf-8')
for key, value in row.iteritems()}
else:
CSVDictReaderClass = csv.DictReader
class ESStatsTest(StatsTest, amo.tests.ESTestCase):
"""Test class with some ES setup."""
@ -171,13 +193,15 @@ class ESStatsTest(StatsTest, amo.tests.ESTestCase):
self.refresh('stats')
def csv_eq(self, response, expected):
content = csv.DictReader(
content = force_text(response.content)
content_csv = CSVDictReaderClass(
# Drop lines that are comments.
filter(lambda row: row[0] != '#', response.content.splitlines()))
expected = csv.DictReader(
filter(lambda row: row[0] != '#', content.splitlines()))
expected = force_text(expected)
expected_csv = CSVDictReaderClass(
# Strip any extra spaces from the expected content.
line.strip() for line in expected.splitlines())
assert tuple(content) == tuple(expected)
assert tuple(content_csv) == tuple(expected_csv)
class TestSeriesSecurity(StatsTest):
@ -257,24 +281,24 @@ class TestCSVs(ESStatsTest):
group='month', format='csv')
assert response.status_code == 200
self.csv_eq(response, """date,count,search,api
2009-09-03,10,3,2
2009-08-03,10,3,2
2009-07-03,10,3,2
2009-06-28,10,3,2
2009-06-20,10,3,2
2009-06-12,10,3,2
2009-06-07,10,3,2
2009-06-01,10,3,2""")
self.csv_eq(response, """date,count,api,search
2009-09-03,10,2,3
2009-08-03,10,2,3
2009-07-03,10,2,3
2009-06-28,10,2,3
2009-06-20,10,2,3
2009-06-12,10,2,3
2009-06-07,10,2,3
2009-06-01,10,2,3""")
def test_os_series(self):
response = self.get_view_response('stats.os_series',
group='month', format='csv')
assert response.status_code == 200
self.csv_eq(response, """date,count,Windows,Linux
2009-06-02,1500,500,400
2009-06-01,1000,400,300""")
self.csv_eq(response, """date,count,Linux,Windows
2009-06-02,1500,400,500
2009-06-01,1000,300,400""")
def test_locales_series(self):
response = self.get_view_response('stats.locales_series',
@ -283,9 +307,7 @@ class TestCSVs(ESStatsTest):
assert response.status_code == 200
self.csv_eq(
response,
"""date,count,English (US) (en-us),"""
"""\xce\x95\xce\xbb\xce\xbb\xce\xb7\xce\xbd\xce\xb9\xce\xba"""
"""\xce\xac (el)
u"""date,count,English (US) (en-us),Ελληνικά (el)
2009-06-02,1500,300,400
2009-06-01,1000,300,400""")
@ -294,18 +316,18 @@ class TestCSVs(ESStatsTest):
group='month', format='csv')
assert response.status_code == 200
self.csv_eq(response, """date,count,userEnabled,userDisabled
2009-06-02,1500,1370,130
2009-06-01,1000,950,50""")
self.csv_eq(response, """date,count,userDisabled,userEnabled
2009-06-02,1500,130,1370
2009-06-01,1000,50,950""")
def test_versions_series(self):
response = self.get_view_response('stats.versions_series',
group='month', format='csv')
assert response.status_code == 200
self.csv_eq(response, """date,count,2.0,1.0
2009-06-02,1500,950,550
2009-06-01,1000,800,200""")
self.csv_eq(response, """date,count,1.0,2.0
2009-06-02,1500,550,950
2009-06-01,1000,200,800""")
def test_apps_series(self):
response = self.get_view_response('stats.apps_series',
@ -321,13 +343,14 @@ class TestCSVs(ESStatsTest):
self.url_args = {'start': '20200101', 'end': '20200130', 'addon_id': 4}
response = self.get_view_response('stats.versions_series', head=True,
group='day', format='csv')
assert response.status_code == 200
assert set(response['cache-control'].split(', ')) == (
{'max-age=0', 'no-cache', 'no-store', 'must-revalidate'})
self.url_args = {'start': '20200101', 'end': '20200130', 'addon_id': 4}
response = self.get_view_response('stats.versions_series', head=True,
group='day', format='json')
assert response.status_code == 200
assert set(response['cache-control'].split(', ')) == (
{'max-age=0', 'no-cache', 'no-store', 'must-revalidate'})
@ -384,10 +407,10 @@ class TestResponses(ESStatsTest):
for url_args in [self.url_args, self.url_args_theme]:
self.url_args = url_args
r = self.get_view_response('stats.usage_series', group='day',
format='json')
assert r.status_code == 200
self.assertListEqual(json.loads(r.content), [
response = self.get_view_response(
'stats.usage_series', group='day', format='json')
assert response.status_code == 200
self.assertListEqual(json.loads(force_text(response.content)), [
{'count': 1500, 'date': '2009-06-02', 'end': '2009-06-02'},
{'count': 1000, 'date': '2009-06-01', 'end': '2009-06-01'},
])
@ -396,19 +419,19 @@ class TestResponses(ESStatsTest):
for url_args in [self.url_args, self.url_args_theme]:
self.url_args = url_args
r = self.get_view_response('stats.usage_series', group='day',
format='csv')
assert r.status_code == 200
self.csv_eq(r,
response = self.get_view_response(
'stats.usage_series', group='day', format='csv')
assert response.status_code == 200
self.csv_eq(response,
"""date,count
2009-06-02,1500
2009-06-01,1000""")
def test_usage_by_app_json(self):
r = self.get_view_response('stats.apps_series', group='day',
format='json')
assert r.status_code == 200
self.assertListEqual(json.loads(r.content), [
response = self.get_view_response(
'stats.apps_series', group='day', format='json')
assert response.status_code == 200
self.assertListEqual(json.loads(force_text(response.content)), [
{
"data": {
"{ec8030f7-c20a-464f-9b0e-13a3a9e97384}": {"4.0": 1500}
@ -428,18 +451,18 @@ class TestResponses(ESStatsTest):
])
def test_usage_by_app_csv(self):
r = self.get_view_response('stats.apps_series', group='day',
format='csv')
assert r.status_code == 200
self.csv_eq(r, """date,count,Firefox 4.0
2009-06-02,1500,1500
2009-06-01,1000,1000""")
response = self.get_view_response(
'stats.apps_series', group='day', format='csv')
assert response.status_code == 200
self.csv_eq(response, """date,count,Firefox 4.0
2009-06-02,1500,1500
2009-06-01,1000,1000""")
def test_usage_by_locale_json(self):
r = self.get_view_response('stats.locales_series', group='day',
format='json')
assert r.status_code == 200
self.assertListEqual(json.loads(r.content), [
response = self.get_view_response(
'stats.locales_series', group='day', format='json')
assert response.status_code == 200
self.assertListEqual(json.loads(force_text(response.content)), [
{
"count": 1500,
"date": "2009-06-02",
@ -461,18 +484,18 @@ class TestResponses(ESStatsTest):
])
def test_usage_by_locale_csv(self):
r = self.get_view_response('stats.locales_series', group='day',
format='csv')
assert r.status_code == 200
self.csv_eq(r, """date,count,English (US) (en-us),Ελληνικά (el)
2009-06-02,1500,300,400
2009-06-01,1000,300,400""")
response = self.get_view_response(
'stats.locales_series', group='day', format='csv')
assert response.status_code == 200
self.csv_eq(response, """date,count,English (US) (en-us),Ελληνικά (el)
2009-06-02,1500,300,400
2009-06-01,1000,300,400""")
def test_usage_by_os_json(self):
r = self.get_view_response('stats.os_series', group='day',
format='json')
assert r.status_code == 200
self.assertListEqual(json.loads(r.content), [
response = self.get_view_response(
'stats.os_series', group='day', format='json')
assert response.status_code == 200
self.assertListEqual(json.loads(force_text(response.content)), [
{
"count": 1500,
"date": "2009-06-02",
@ -494,15 +517,15 @@ class TestResponses(ESStatsTest):
])
def test_usage_by_os_csv(self):
r = self.get_view_response('stats.os_series', head=True, group='day',
format='csv')
assert r.status_code == 200
response = self.get_view_response(
'stats.os_series', head=True, group='day', format='csv')
assert response.status_code == 200
def test_usage_by_version_json(self):
r = self.get_view_response('stats.versions_series', group='day',
format='json')
assert r.status_code == 200
self.assertListEqual(json.loads(r.content), [
response = self.get_view_response(
'stats.versions_series', group='day', format='json')
assert response.status_code == 200
self.assertListEqual(json.loads(force_text(response.content)), [
{
"count": 1500,
"date": "2009-06-02",
@ -524,18 +547,18 @@ class TestResponses(ESStatsTest):
])
def test_usage_by_version_csv(self):
r = self.get_view_response('stats.versions_series', group='day',
format='csv')
assert r.status_code == 200
self.csv_eq(r, """date,count,2.0,1.0
2009-06-02,1500,950,550
2009-06-01,1000,800,200""")
response = self.get_view_response(
'stats.versions_series', group='day', format='csv')
assert response.status_code == 200
self.csv_eq(response, """date,count,1.0,2.0
2009-06-02,1500,550,950
2009-06-01,1000,200,800""")
def test_usage_by_status_json(self):
r = self.get_view_response('stats.statuses_series', group='day',
format='json')
assert r.status_code == 200
self.assertListEqual(json.loads(r.content), [
response = self.get_view_response(
'stats.statuses_series', group='day', format='json')
assert response.status_code == 200
self.assertListEqual(json.loads(force_text(response.content)), [
{
"count": 1500,
"date": "2009-06-02",
@ -557,17 +580,17 @@ class TestResponses(ESStatsTest):
])
def test_usage_by_status_csv(self):
r = self.get_view_response('stats.statuses_series', group='day',
format='csv')
assert r.status_code == 200
self.csv_eq(r, """date,count,userEnabled,userDisabled
2009-06-02,1500,1370,130
2009-06-01,1000,950,50""")
response = self.get_view_response(
'stats.statuses_series', group='day', format='csv')
assert response.status_code == 200
self.csv_eq(response, """date,count,userDisabled,userEnabled
2009-06-02,1500,130,1370
2009-06-01,1000,50,950""")
def test_overview(self):
r = self.get_view_response('stats.overview_series', group='day',
format='json')
assert r.status_code == 200
response = self.get_view_response(
'stats.overview_series', group='day', format='json')
assert response.status_code == 200
# These are the dates from the fixtures. The return value will have
# dates in between filled with zeroes.
expected_data = [
@ -590,7 +613,7 @@ class TestResponses(ESStatsTest):
{"date": "2009-06-01",
"data": {"downloads": 10, "updates": 1000}}
]
actual_data = json.loads(r.content)
actual_data = json.loads(force_text(response.content))
# Make sure they match up at the front and back.
assert actual_data[0]['date'] == expected_data[0]['date']
assert actual_data[-1]['date'] == expected_data[-1]['date']
@ -612,10 +635,10 @@ class TestResponses(ESStatsTest):
next_actual = next(actual)
def test_downloads_json(self):
r = self.get_view_response('stats.downloads_series', group='day',
format='json')
assert r.status_code == 200
self.assertListEqual(json.loads(r.content), [
response = self.get_view_response(
'stats.downloads_series', group='day', format='json')
assert response.status_code == 200
self.assertListEqual(json.loads(force_text(response.content)), [
{"count": 10, "date": "2009-09-03", "end": "2009-09-03"},
{"count": 10, "date": "2009-08-03", "end": "2009-08-03"},
{"count": 10, "date": "2009-07-03", "end": "2009-07-03"},
@ -627,24 +650,24 @@ class TestResponses(ESStatsTest):
])
def test_downloads_csv(self):
r = self.get_view_response('stats.downloads_series', group='day',
format='csv')
assert r.status_code == 200
self.csv_eq(r, """date,count
2009-09-03,10
2009-08-03,10
2009-07-03,10
2009-06-28,10
2009-06-20,10
2009-06-12,10
2009-06-07,10
2009-06-01,10""")
response = self.get_view_response(
'stats.downloads_series', group='day', format='csv')
assert response.status_code == 200
self.csv_eq(response, """date,count
2009-09-03,10
2009-08-03,10
2009-07-03,10
2009-06-28,10
2009-06-20,10
2009-06-12,10
2009-06-07,10
2009-06-01,10""")
def test_downloads_sources_json(self):
r = self.get_view_response('stats.sources_series', group='day',
format='json')
assert r.status_code == 200
self.assertListEqual(json.loads(r.content), [
response = self.get_view_response(
'stats.sources_series', group='day', format='json')
assert response.status_code == 200
self.assertListEqual(json.loads(force_text(response.content)), [
{"count": 10,
"date": "2009-09-03",
"end": "2009-09-03",
@ -680,18 +703,18 @@ class TestResponses(ESStatsTest):
])
def test_downloads_sources_csv(self):
r = self.get_view_response('stats.sources_series', group='day',
format='csv')
assert r.status_code == 200
self.csv_eq(r, """date,count,search,api
2009-09-03,10,3,2
2009-08-03,10,3,2
2009-07-03,10,3,2
2009-06-28,10,3,2
2009-06-20,10,3,2
2009-06-12,10,3,2
2009-06-07,10,3,2
2009-06-01,10,3,2""")
response = self.get_view_response(
'stats.sources_series', group='day', format='csv')
assert response.status_code == 200
self.csv_eq(response, """date,count,api,search
2009-09-03,10,2,3
2009-08-03,10,2,3
2009-07-03,10,2,3
2009-06-28,10,2,3
2009-06-20,10,2,3
2009-06-12,10,2,3
2009-06-07,10,2,3
2009-06-01,10,2,3""")
# Test the SQL query by using known dates, for weeks and months etc.
@ -760,7 +783,7 @@ class TestSite(TestCase):
def test_json(self, _site_query):
_site_query.return_value = [[], []]
res = self.client.get(reverse('stats.site', args=['json', 'date']))
assert res._headers['content-type'][1].startswith('text/json')
assert res._headers['content-type'][1].startswith('application/json')
def tests_no_date(self, _site_query):
_site_query.return_value = ['.', '.']

Просмотреть файл

@ -12,6 +12,7 @@ from django.core.files.storage import get_storage_class
from django.db import connection
from django.db.transaction import non_atomic_requests
from django.utils.cache import add_never_cache_headers, patch_cache_control
from django.utils.encoding import force_text
import six
@ -75,7 +76,7 @@ def get_series(model, extra_field=None, source=None, **filters):
for val in qs[:365]:
# Convert the datetimes to a date.
date_ = parse(val['date']).date()
rv = dict(count=val['count'], date=date_, end=date_)
rv = {'count': val['count'], 'date': date_, 'end': date_}
if source:
rv['data'] = extract(val[source])
elif extra_field:
@ -96,7 +97,9 @@ def csv_fields(series):
fields.update(row['data'])
rv.append(row['data'])
row['data'].update(count=row['count'], date=row['date'])
return rv, fields
# Sort the fields before returning them - we don't care much about column
# ordering, but it helps make the tests stable.
return rv, sorted(fields)
def extract(dicts):
@ -397,6 +400,7 @@ _KEYS = {
'review_count_new': 'reviews_created',
'collection_count_new': 'collections_created',
}
_ALL_KEYS = list(_KEYS)
_CACHED_KEYS = sorted(_KEYS.values())
@ -415,8 +419,8 @@ def _site_query(period, start, end, field=None, request=None):
"AND name IN (%s) "
"GROUP BY %s(date), name "
"ORDER BY %s(date) DESC;"
% (', '.join(['%s' for key in _KEYS.keys()]), period, period))
cursor.execute(sql, [start, end] + _KEYS.keys())
% (', '.join(['%s' for key in _ALL_KEYS]), period, period))
cursor.execute(sql, [start, end] + _ALL_KEYS)
# Process the results into a format that is friendly for render_*.
default = {k: 0 for k in _CACHED_KEYS}
@ -429,7 +433,7 @@ def _site_query(period, start, end, field=None, request=None):
result[date_]['data'] = {}
result[date_]['data'][_KEYS[name]] = int(count)
return result.values(), _CACHED_KEYS
return list(result.values()), _CACHED_KEYS
@non_atomic_requests
@ -484,34 +488,39 @@ def fudge_headers(response, stats):
patch_cache_control(response, max_age=seven_days)
class UnicodeCSVDictWriter(csv.DictWriter):
"""A DictWriter that writes a unicode stream."""
if six.PY2:
class CSVDictWriterClass(csv.DictWriter):
"""A DictWriter that writes a unicode stream, because the python 2
csv module doesn't."""
def __init__(self, stream, fields, **kw):
# We have the csv module write into our buffer as bytes and then we
# dump the buffer to the real stream as unicode.
self.buffer = moves.cStringIO()
csv.DictWriter.__init__(self, self.buffer, fields, **kw)
self.stream = stream
def __init__(self, stream, fields, **kw):
# We have the csv module write into our buffer as bytes and then we
# dump the buffer to the real stream as unicode.
self.buffer = moves.cStringIO()
csv.DictWriter.__init__(self, self.buffer, fields, **kw)
self.stream = stream
def writeheader(self):
self.writerow(dict(zip(self.fieldnames, self.fieldnames)))
def writeheader(self):
self.writerow(dict(zip(self.fieldnames, self.fieldnames)))
def try_encode(self, obj):
return obj.encode('utf-8') if isinstance(obj, six.text_type) else obj
def try_encode(self, obj):
return obj.encode('utf-8') if isinstance(
obj, six.string_types) else obj
def writerow(self, rowdict):
row = self._dict_to_list(rowdict)
# Write to the buffer as ascii.
self.writer.writerow(map(self.try_encode, row))
# Dump the buffer to the real stream as utf-8.
self.stream.write(self.buffer.getvalue().decode('utf-8'))
# Clear the buffer.
self.buffer.truncate(0)
def writerow(self, rowdict):
row = self._dict_to_list(rowdict)
# Write to the buffer as ascii.
self.writer.writerow(map(self.try_encode, row))
# Dump the buffer to the real stream as utf-8.
self.stream.write(self.buffer.getvalue().decode('utf-8'))
# Clear the buffer.
self.buffer.truncate(0)
def writerows(self, rowdicts):
for rowdict in rowdicts:
self.writerow(rowdict)
def writerows(self, rowdicts):
for rowdict in rowdicts:
self.writerow(rowdict)
else:
CSVDictWriterClass = csv.DictWriter
@allow_cross_site_request
@ -524,9 +533,8 @@ def render_csv(request, addon, stats, fields,
context = {'addon': addon, 'timestamp': ts, 'title': title,
'show_disclaimer': show_disclaimer}
response = render(request, 'stats/csv_header.txt', context)
writer = UnicodeCSVDictWriter(response, fields, restval=0,
extrasaction='ignore')
writer = CSVDictWriterClass(
response, fields, restval=0, extrasaction='ignore')
writer.writeheader()
writer.writerows(stats)
@ -539,9 +547,9 @@ def render_csv(request, addon, stats, fields,
@non_atomic_requests
def render_json(request, addon, stats):
"""Render a stats series in JSON."""
response = http.HttpResponse(content_type='text/json')
response = http.HttpResponse(content_type='application/json')
# Django's encoder supports date and datetime.
json.dump(stats, response, cls=AMOJSONEncoder)
fudge_headers(response, response.content != json.dumps([]))
fudge_headers(response, force_text(response.content) != json.dumps([]))
return response