Remove old update service (#20086)
* Remove old update service * Remove more code/docs associated with services/ * Add some missing pytest.mark.django_db that the autouse fixture was hiding... * Remove extra import * Add a comment documenting what the update URL typically looks like
This commit is contained in:
Родитель
978b1718b6
Коммит
3f83f99131
|
@ -168,8 +168,8 @@ setup-codestyle:
|
|||
|
||||
.PHONY: lint
|
||||
lint: ## lint the code
|
||||
black --check src/ services/ tests/
|
||||
flake8 src/ services/ tests/
|
||||
black --check src/ tests/
|
||||
flake8 src/ tests/
|
||||
$(shell npm $(NPM_ARGS) bin)/prettier --check '**'
|
||||
curlylint src/
|
||||
|
||||
|
|
|
@ -81,11 +81,6 @@ def mock_basket(settings):
|
|||
json={'status': 'ok', 'token': USER_TOKEN})
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def update_services_db_name_to_follow_test_db_name(db, settings, request):
|
||||
settings.SERVICES_DATABASE['NAME'] = settings.DATABASES['default']['NAME']
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
import django
|
||||
# Forcefully call `django.setup`, pytest-django tries to be very lazy
|
||||
|
|
|
@ -5,10 +5,6 @@ merge_slashes off;
|
|||
server {
|
||||
listen 80 default;
|
||||
|
||||
location ~ ^/update-legacy/.* {
|
||||
try_files $uri @update;
|
||||
}
|
||||
|
||||
location /code/storage/files/ {
|
||||
internal;
|
||||
# This matches where addons-server `docker-compose.yml` mounts
|
||||
|
@ -104,12 +100,6 @@ server {
|
|||
|
||||
}
|
||||
|
||||
location @update {
|
||||
uwsgi_pass versioncheck;
|
||||
include uwsgi_params;
|
||||
uwsgi_param Host $http_host;
|
||||
}
|
||||
|
||||
# Return 204 for CSP reports to save sending them
|
||||
# into Django.
|
||||
location /csp-report {
|
||||
|
@ -122,10 +112,6 @@ upstream web {
|
|||
server web:8001;
|
||||
}
|
||||
|
||||
upstream versioncheck {
|
||||
server web:8002;
|
||||
}
|
||||
|
||||
upstream addons-frontend {
|
||||
# This port is set in the `docker-compose.yml` file of addons-server.
|
||||
server addons-frontend:7010;
|
||||
|
|
|
@ -15,18 +15,6 @@ stdout_logfile_backups=1
|
|||
stopsignal=KILL
|
||||
priority=500
|
||||
|
||||
[program:versioncheck]
|
||||
command=uwsgi --ini /code/docker/uwsgi.versioncheck.ini
|
||||
directory=/code
|
||||
stopasgroup=true
|
||||
autostart=true
|
||||
redirect_stderr=true
|
||||
stdout_logfile=logs/docker-versioncheck.log
|
||||
stdout_logfile_maxbytes=10MB
|
||||
stdout_logfile_backups=1
|
||||
stopsignal=KILL
|
||||
priority=600
|
||||
|
||||
[program:watcher]
|
||||
autostart=true
|
||||
autorestart=false
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
# versioncheck (services/update) uwsgi config for local envs
|
||||
# Note that we don't use a single config and uwsgi routing, to mimic what we
|
||||
# do in production.
|
||||
[uwsgi]
|
||||
base = /code
|
||||
chdir = %(base)
|
||||
module = services.wsgi.versioncheck:application
|
||||
|
||||
# process-related settings
|
||||
master = true
|
||||
# maximum number of worker processes
|
||||
processes = 2
|
||||
vaccum = true
|
||||
socket = :8002
|
||||
uid = olympia
|
||||
gid = olympia
|
||||
memory-report = true
|
||||
enable-threads = true
|
||||
|
||||
# autoreload is not enabled to save ressources.
|
||||
|
||||
max-requests = 1000
|
||||
|
||||
# Load apps in workers and not only in master
|
||||
lazy-apps = true
|
||||
|
||||
# Open log file after we dropped privileges so that the file is being owned
|
||||
# by olympia:olympia and has proper permissions to be readable outside
|
||||
# of docker
|
||||
logto2 = %(base)/logs/uwsgi-versioncheck.log
|
||||
|
||||
# Limit log file size to 10MB
|
||||
log-maxsize = 1048576
|
||||
|
||||
# And set the name for the previous log
|
||||
log-backupname = %(base)/logs/uwsgi-versioncheck.log.1
|
||||
|
||||
# Set default settings as originally done by manage.py
|
||||
env = DJANGO_SETTINGS_MODULE=settings
|
|
@ -16,7 +16,6 @@ Development
|
|||
vpn
|
||||
acl
|
||||
logging
|
||||
services
|
||||
translations
|
||||
search
|
||||
docs
|
||||
|
|
|
@ -1,12 +0,0 @@
|
|||
.. _services:
|
||||
|
||||
==========================
|
||||
Services
|
||||
==========================
|
||||
|
||||
The services directory contain a special separate piece of code that deals with the update service Firefox calls to get updates about installed add-ons (though the ``extensions.update.background.url`` and ``extensions.update.background.url`` preferences).
|
||||
|
||||
In dev/stage/prod, this would have its own domain, but locally, we re-use the same as
|
||||
the rest of addons-server, and our nginx configuration answers using the separate ``versioncheck`` wsgi service for all requests with a /update/ prefix as their path. This minimal configuration has autoreload disabled to stay as lean as possible, so to you'll need to manually restart the web docker service to see changes.
|
||||
|
||||
A typical URL to test with would look like this: ``http://olympia.test/update/?reqVersion=1&id=addon@guid&version=0.1&appID={ec8030f7-c20a-464f-9b0e-13a3a9e97384}&appVersion=99.0``
|
|
@ -122,48 +122,6 @@ Pillow==9.3.0 \
|
|||
PyJWT==2.6.0 \
|
||||
--hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
|
||||
--hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
|
||||
SQLAlchemy==1.4.44 \
|
||||
--hash=sha256:0be9b479c5806cece01f1581726573a8d6515f8404e082c375b922c45cfc2a7b \
|
||||
--hash=sha256:17aee7bfcef7bf0dea92f10e5dfdd67418dcf6fe0759f520e168b605855c003e \
|
||||
--hash=sha256:21f3df74a0ab39e1255e94613556e33c1dc3b454059fe0b365ec3bbb9ed82e4a \
|
||||
--hash=sha256:237067ba0ef45a518b64606e1807f7229969ad568288b110ed5f0ca714a3ed3a \
|
||||
--hash=sha256:2dda5f96719ae89b3ec0f1b79698d86eb9aecb1d54e990abb3fdd92c04b46a90 \
|
||||
--hash=sha256:393f51a09778e8984d735b59a810731394308b4038acdb1635397c2865dae2b6 \
|
||||
--hash=sha256:3ca21b35b714ce36f4b8d1ee8d15f149db8eb43a472cf71600bf18dae32286e7 \
|
||||
--hash=sha256:3cbdbed8cdcae0f83640a9c44fa02b45a6c61e149c58d45a63c9581aba62850f \
|
||||
--hash=sha256:3eba07f740488c3a125f17c092a81eeae24a6c7ec32ac9dbc52bf7afaf0c4f16 \
|
||||
--hash=sha256:3f68eab46649504eb95be36ca529aea16cd199f080726c28cbdbcbf23d20b2a2 \
|
||||
--hash=sha256:4c56e6899fa6e767e4be5d106941804a4201c5cb9620a409c0b80448ec70b656 \
|
||||
--hash=sha256:53f90a2374f60e703c94118d21533765412da8225ba98659de7dd7998641ab17 \
|
||||
--hash=sha256:595b185041a4dc5c685283ea98c2f67bbfa47bb28e4a4f5b27ebf40684e7a9f8 \
|
||||
--hash=sha256:65a0ad931944fcb0be12a8e0ac322dbd3ecf17c53f088bc10b6da8f0caac287b \
|
||||
--hash=sha256:68e0cd5d32a32c4395168d42f2fefbb03b817ead3a8f3704b8bd5697c0b26c24 \
|
||||
--hash=sha256:6a06c2506c41926d2769f7968759995f2505e31c5b5a0821e43ca5a3ddb0e8ae \
|
||||
--hash=sha256:6d7e1b28342b45f19e3dea7873a9479e4a57e15095a575afca902e517fb89652 \
|
||||
--hash=sha256:6f0ea4d7348feb5e5d0bf317aace92e28398fa9a6e38b7be9ec1f31aad4a8039 \
|
||||
--hash=sha256:7313e4acebb9ae88dbde14a8a177467a7625b7449306c03a3f9f309b30e163d0 \
|
||||
--hash=sha256:7cf7c7adbf4417e3f46fc5a2dbf8395a5a69698217337086888f79700a12e93a \
|
||||
--hash=sha256:80ead36fb1d676cc019586ffdc21c7e906ce4bf243fe4021e4973dae332b6038 \
|
||||
--hash=sha256:9470633395e5f24d6741b4c8a6e905bce405a28cf417bba4ccbaadf3dab0111d \
|
||||
--hash=sha256:94c0093678001f5d79f2dcbf3104c54d6c89e41ab50d619494c503a4d3f1aef2 \
|
||||
--hash=sha256:95f4f8d62589755b507218f2e3189475a4c1f5cc9db2aec772071a7dc6cd5726 \
|
||||
--hash=sha256:9c857676d810ca196be73c98eb839125d6fa849bfa3589be06201a6517f9961c \
|
||||
--hash=sha256:a22208c1982f1fe2ae82e5e4c3d4a6f2445a7a0d65fb7983a3d7cbbe3983f5a4 \
|
||||
--hash=sha256:ad5f966623905ee33694680dda1b735544c99c7638f216045d21546d3d8c6f5b \
|
||||
--hash=sha256:ae1ed1ebc407d2f66c6f0ec44ef7d56e3f455859df5494680e2cf89dad8e3ae0 \
|
||||
--hash=sha256:afd1ac99179d1864a68c06b31263a08ea25a49df94e272712eb2824ef151e294 \
|
||||
--hash=sha256:b6a337a2643a41476fb6262059b8740f4b9a2ec29bf00ffb18c18c080f6e0aed \
|
||||
--hash=sha256:b737fbeb2f78926d1f59964feb287bbbd050e7904766f87c8ce5cfb86e6d840c \
|
||||
--hash=sha256:c46322354c58d4dc039a2c982d28284330f8919f31206894281f4b595b9d8dbe \
|
||||
--hash=sha256:c7e3b9e01fdbe1ce3a165cc7e1ff52b24813ee79c6df6dee0d1e13888a97817e \
|
||||
--hash=sha256:c9aa372b295a36771cffc226b6517df3011a7d146ac22d19fa6a75f1cdf9d7e6 \
|
||||
--hash=sha256:d3b6d4588994da73567bb00af9d7224a16c8027865a8aab53ae9be83f9b7cbd1 \
|
||||
--hash=sha256:d3b9ac11f36ab9a726097fba7c7f6384f0129aedb017f1d4d1d4fce9052a1320 \
|
||||
--hash=sha256:d654870a66027af3a26df1372cf7f002e161c6768ebe4c9c6fdc0da331cb5173 \
|
||||
--hash=sha256:d8080bc51a775627865e0f1dbfc0040ff4ace685f187f6036837e1727ba2ed10 \
|
||||
--hash=sha256:da60b98b0f6f0df9fbf8b72d67d13b73aa8091923a48af79a951d4088530a239 \
|
||||
--hash=sha256:f5e8ed9cde48b76318ab989deeddc48f833d2a6a7b7c393c49b704f67dedf01d \
|
||||
--hash=sha256:f8e5443295b218b08bef8eb85d31b214d184b3690d99a33b7bd8e5591e2b0aa1
|
||||
# amqp is required by kombu
|
||||
amqp==5.1.1 \
|
||||
--hash=sha256:2c1b13fecc0893e946c65cbd5f36427861cffa4ea2201d8f6fca22e2a373b5e2 \
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
import os
|
||||
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings_local')
|
||||
|
||||
from django.conf import settings # noqa
|
|
@ -1,257 +0,0 @@
|
|||
import json
|
||||
import logging.config
|
||||
import os
|
||||
|
||||
from django.utils.encoding import force_bytes
|
||||
from email.utils import formatdate
|
||||
import MySQLdb as mysql
|
||||
from urllib.parse import parse_qsl
|
||||
import sqlalchemy.pool as pool
|
||||
from time import time
|
||||
|
||||
from services.settings import settings
|
||||
|
||||
# This has to be imported after the settings so statsd knows where to log to.
|
||||
from django_statsd.clients import statsd
|
||||
|
||||
from olympia.constants import applications, base
|
||||
from olympia.versions.compare import version_int
|
||||
import olympia.core.logger
|
||||
|
||||
|
||||
def get_connection():
|
||||
db = settings.SERVICES_DATABASE
|
||||
return mysql.connect(
|
||||
host=db['HOST'],
|
||||
user=db['USER'],
|
||||
passwd=db['PASSWORD'],
|
||||
db=db['NAME'],
|
||||
charset=db['OPTIONS']['charset'],
|
||||
)
|
||||
|
||||
|
||||
pool = pool.QueuePool(get_connection, max_overflow=10, pool_size=5, recycle=300)
|
||||
|
||||
|
||||
class Update:
|
||||
def __init__(self, data, compat_mode='strict'):
|
||||
self.connection, self.cursor = None, None
|
||||
self.data = data.copy()
|
||||
self.data['row'] = {}
|
||||
self.version_int = 0
|
||||
self.compat_mode = compat_mode
|
||||
self.app = applications.APP_GUIDS.get(data.get('appID'))
|
||||
|
||||
def is_valid(self):
|
||||
# If you accessing this from unit tests, then before calling
|
||||
# is valid, you can assign your own cursor.
|
||||
if not self.cursor:
|
||||
self.connection = pool.connect()
|
||||
self.cursor = self.connection.cursor()
|
||||
|
||||
data = self.data
|
||||
# Version can be blank.
|
||||
data['version'] = data.get('version', '')
|
||||
for field in ['reqVersion', 'id', 'appID', 'appVersion']:
|
||||
if field not in data:
|
||||
return False
|
||||
|
||||
if not self.app:
|
||||
return False
|
||||
|
||||
data['app_id'] = self.app.id
|
||||
|
||||
sql = """SELECT `id`, `status`, `guid` FROM `addons`
|
||||
WHERE `guid` = %(guid)s AND
|
||||
`inactive` = 0 AND
|
||||
`status` NOT IN (%(STATUS_DELETED)s, %(STATUS_DISABLED)s)
|
||||
LIMIT 1;"""
|
||||
self.cursor.execute(
|
||||
sql,
|
||||
{
|
||||
'guid': self.data['id'],
|
||||
'STATUS_DELETED': base.STATUS_DELETED,
|
||||
'STATUS_DISABLED': base.STATUS_DISABLED,
|
||||
},
|
||||
)
|
||||
result = self.cursor.fetchone()
|
||||
if result is None:
|
||||
return False
|
||||
|
||||
data['id'], data['addon_status'], data['guid'] = result
|
||||
data['version_int'] = version_int(data['appVersion'])
|
||||
return True
|
||||
|
||||
def get_update(self):
|
||||
data = self.data
|
||||
|
||||
data['STATUS_APPROVED'] = base.STATUS_APPROVED
|
||||
data['CHANNEL_LISTED'] = base.CHANNEL_LISTED
|
||||
|
||||
sql = [
|
||||
"""
|
||||
SELECT
|
||||
`addons`.`guid` AS `guid`,
|
||||
`addons`.`slug` AS `slug`,
|
||||
`appmin`.`version` AS `min`,
|
||||
`appmax`.`version` AS `max`,
|
||||
`files`.`hash`,
|
||||
`files`.`filename`,
|
||||
`files`.`id` AS `file_id`,
|
||||
`versions`.`id` AS `version_id`,
|
||||
`files`.`strict_compatibility` AS `strict_compat`,
|
||||
`versions`.`releasenotes`,
|
||||
`versions`.`version` AS `version`
|
||||
FROM `versions`
|
||||
INNER JOIN `addons`
|
||||
ON `addons`.`id` = `versions`.`addon_id`
|
||||
AND `addons`.`id` = %(id)s
|
||||
INNER JOIN `applications_versions`
|
||||
ON `applications_versions`.`version_id` = `versions`.`id`
|
||||
INNER JOIN `appversions` `appmin`
|
||||
ON `appmin`.`id` = `applications_versions`.`min`
|
||||
AND `appmin`.`application_id` = %(app_id)s
|
||||
INNER JOIN `appversions` `appmax`
|
||||
ON `appmax`.`id` = `applications_versions`.`max`
|
||||
AND `appmax`.`application_id` = %(app_id)s
|
||||
INNER JOIN `files`
|
||||
ON `files`.`version_id` = `versions`.`id`
|
||||
-- Find a reference to the user's current version, if it exists.
|
||||
-- These should never be inner joins. We need results even if we
|
||||
-- can't find the current version.
|
||||
LEFT JOIN `versions` `curver`
|
||||
ON `curver`.`addon_id` = `addons`.`id`
|
||||
AND `curver`.`version` = %(version)s
|
||||
LEFT JOIN `files` `curfile`
|
||||
ON `curfile`.`version_id` = `curver`.`id`
|
||||
WHERE
|
||||
`versions`.`deleted` = 0
|
||||
AND `versions`.`channel` = %(CHANNEL_LISTED)s
|
||||
AND `files`.`status` = %(STATUS_APPROVED)s
|
||||
AND `appmin`.`version_int` <= %(version_int)s
|
||||
"""
|
||||
]
|
||||
|
||||
if self.compat_mode == 'ignore':
|
||||
pass # no further SQL modification required.
|
||||
|
||||
elif self.compat_mode == 'normal':
|
||||
# When file has strict_compatibility enabled, default to compatible
|
||||
# is disabled.
|
||||
sql.append(
|
||||
"""AND
|
||||
CASE WHEN `files`.`strict_compatibility` = 1
|
||||
THEN `appmax`.`version_int` >= %(version_int)s ELSE 1 END
|
||||
"""
|
||||
)
|
||||
else: # Not defined or 'strict'.
|
||||
sql.append('AND `appmax`.`version_int` >= %(version_int)s ')
|
||||
|
||||
sql.append('ORDER BY `versions`.`id` DESC LIMIT 1;')
|
||||
self.cursor.execute(''.join(sql), data)
|
||||
result = self.cursor.fetchone()
|
||||
|
||||
if result:
|
||||
data['row'] = dict(
|
||||
zip(
|
||||
[
|
||||
'guid',
|
||||
'slug',
|
||||
'min',
|
||||
'max',
|
||||
'hash',
|
||||
'filename',
|
||||
'file_id',
|
||||
'version_id',
|
||||
'strict_compat',
|
||||
'releasenotes',
|
||||
'version',
|
||||
],
|
||||
list(result),
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def get_output(self):
|
||||
if self.is_valid():
|
||||
if self.get_update():
|
||||
contents = self.get_success_output()
|
||||
else:
|
||||
contents = self.get_no_updates_output()
|
||||
else:
|
||||
contents = self.get_error_output()
|
||||
self.cursor.close()
|
||||
if self.connection:
|
||||
self.connection.close()
|
||||
return json.dumps(contents)
|
||||
|
||||
def get_error_output(self):
|
||||
return {}
|
||||
|
||||
def get_no_updates_output(self):
|
||||
return {'addons': {self.data['guid']: {'updates': []}}}
|
||||
|
||||
def get_success_output(self):
|
||||
data = self.data['row']
|
||||
slug = data['slug']
|
||||
version = data['version']
|
||||
file_id = data['file_id']
|
||||
filename = os.path.basename(data['filename'])
|
||||
update = {
|
||||
'version': data['version'],
|
||||
# This is essentially re-implementing File.get_absolute_url()
|
||||
# without needing django.
|
||||
'update_link': (
|
||||
f'{settings.SITE_URL}/{self.app.short}/'
|
||||
f'downloads/file/{file_id}/{filename}'
|
||||
),
|
||||
'applications': {'gecko': {'strict_min_version': data['min']}},
|
||||
}
|
||||
if data['strict_compat']:
|
||||
update['applications']['gecko']['strict_max_version'] = data['max']
|
||||
if data['hash']:
|
||||
update['update_hash'] = data['hash']
|
||||
if data['releasenotes']:
|
||||
update['update_info_url'] = (
|
||||
f'{settings.SITE_URL}/%APP_LOCALE%/'
|
||||
f'{self.app.short}/addon/{slug}/versions/{version}/updateinfo/'
|
||||
)
|
||||
return {'addons': {self.data['guid']: {'updates': [update]}}}
|
||||
|
||||
def format_date(self, secs):
|
||||
return '%s GMT' % formatdate(time() + secs)[:25]
|
||||
|
||||
def get_headers(self, length):
|
||||
content_type = 'application/json'
|
||||
return [
|
||||
('Content-Type', content_type),
|
||||
('Cache-Control', 'public, max-age=3600'),
|
||||
('Last-Modified', self.format_date(0)),
|
||||
('Expires', self.format_date(3600)),
|
||||
('Content-Length', str(length)),
|
||||
]
|
||||
|
||||
|
||||
def application(environ, start_response):
|
||||
# Logging has to be configured before it can be used. In the django app
|
||||
# this is done through settings.LOGGING but the update service is its own
|
||||
# separate wsgi app.
|
||||
logging.config.dictConfig(settings.LOGGING)
|
||||
|
||||
# Now we can get our logger instance.
|
||||
log = olympia.core.logger.getLogger('z.services')
|
||||
|
||||
status = '200 OK'
|
||||
with statsd.timer('services.update'):
|
||||
data = dict(parse_qsl(environ['QUERY_STRING']))
|
||||
compat_mode = data.pop('compatMode', 'strict')
|
||||
try:
|
||||
update = Update(data, compat_mode)
|
||||
output = force_bytes(update.get_output())
|
||||
start_response(status, update.get_headers(len(output)))
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
raise
|
||||
return [output]
|
|
@ -1,10 +0,0 @@
|
|||
import os
|
||||
import site
|
||||
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings_local')
|
||||
|
||||
wsgidir = os.path.dirname(__file__)
|
||||
for path in ['../', '../..', '../../..', '../../apps']:
|
||||
site.addsitedir(os.path.abspath(os.path.join(wsgidir, path)))
|
||||
|
||||
from update import application # noqa
|
|
@ -70,6 +70,7 @@ class TestRepudiateActivityLogToken(TestCase):
|
|||
call_command('repudiate_token')
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_backfill_ratinglog_command():
|
||||
user = user_factory()
|
||||
addon = addon_factory()
|
||||
|
|
|
@ -522,6 +522,7 @@ class TestFixLangpacksWithMaxVersionStar(TestCase):
|
|||
assert version.compatible_apps[app].max.version == '77.*'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_rating_aggregates():
|
||||
addon = addon_factory()
|
||||
Rating.objects.create(addon=addon, user=user_factory(), rating=4)
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import os
|
||||
import pytest
|
||||
import shutil
|
||||
import tempfile
|
||||
from unittest import mock
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
import pytest
|
||||
from PIL import Image
|
||||
from waffle.testutils import override_switch
|
||||
|
||||
|
@ -230,6 +230,7 @@ def test_update_addon_hotness():
|
|||
assert addon3.hotness == 123
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_addon_weekly_downloads():
|
||||
addon = addon_factory(weekly_downloads=0)
|
||||
count = 123
|
||||
|
@ -242,6 +243,7 @@ def test_update_addon_weekly_downloads():
|
|||
assert addon.weekly_downloads == count
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_addon_weekly_downloads_ignores_deleted_addons():
|
||||
guid = 'some@guid'
|
||||
deleted_addon = addon_factory(guid=guid)
|
||||
|
@ -258,6 +260,7 @@ def test_update_addon_weekly_downloads_ignores_deleted_addons():
|
|||
assert addon.weekly_downloads == count
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_addon_weekly_downloads_skips_non_existent_addons():
|
||||
addon = addon_factory(weekly_downloads=0)
|
||||
count = 123
|
||||
|
@ -351,6 +354,7 @@ class TestResizeIcon(TestCase):
|
|||
self._uploader(resize_size, final_size)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch('olympia.addons.tasks.index_addons.delay')
|
||||
def test_disable_addons(index_addons_mock):
|
||||
UserProfile.objects.create(pk=settings.TASK_USER_ID)
|
||||
|
@ -368,6 +372,7 @@ def test_disable_addons(index_addons_mock):
|
|||
index_addons_mock.assert_called_with([addon.id])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch('olympia.addons.tasks.unindex_objects')
|
||||
@mock.patch('olympia.addons.tasks.index_objects')
|
||||
def test_index_addons(index_objects_mock, unindex_objects_mock):
|
||||
|
|
|
@ -1,622 +0,0 @@
|
|||
import json
|
||||
from unittest import mock
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from email import utils
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import connection
|
||||
from django.test.testcases import TransactionTestCase
|
||||
|
||||
from services import update
|
||||
|
||||
from olympia import amo
|
||||
from olympia.addons.models import Addon
|
||||
from olympia.amo.tests import addon_factory, TestCase
|
||||
from olympia.applications.models import AppVersion
|
||||
from olympia.files.models import File
|
||||
from olympia.versions.models import ApplicationsVersions, Version
|
||||
|
||||
|
||||
class VersionCheckMixin:
|
||||
def get_update_instance(self, data):
|
||||
instance = update.Update(data)
|
||||
instance.cursor = connection.cursor()
|
||||
return instance
|
||||
|
||||
|
||||
class TestDataValidate(VersionCheckMixin, TestCase):
|
||||
fixtures = ['base/addon_3615', 'base/appversion']
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.data = {
|
||||
'id': '{2fa4ed95-0317-4c6a-a74c-5f3e3912c1f9}',
|
||||
'version': '2.0.58',
|
||||
'reqVersion': 1,
|
||||
'appID': '{ec8030f7-c20a-464f-9b0e-13a3a9e97384}',
|
||||
'appVersion': '3.7a1pre',
|
||||
}
|
||||
|
||||
def test_app_version_fails(self):
|
||||
data = self.data.copy()
|
||||
del data['appID']
|
||||
instance = self.get_update_instance(data)
|
||||
assert not instance.is_valid()
|
||||
|
||||
def test_app_version_wrong(self):
|
||||
data = self.data.copy()
|
||||
data['appVersion'] = '67.7'
|
||||
instance = self.get_update_instance(data)
|
||||
# If you pass through the wrong version that's fine
|
||||
# you will just end up with no updates because your
|
||||
# version_int will be out.
|
||||
assert instance.is_valid()
|
||||
|
||||
def test_app_version(self):
|
||||
data = self.data.copy()
|
||||
instance = self.get_update_instance(data)
|
||||
assert instance.is_valid()
|
||||
assert instance.data['version_int'] == 3070000001000
|
||||
|
||||
def test_sql_injection(self):
|
||||
data = self.data.copy()
|
||||
data['id'] = "'"
|
||||
instance = self.get_update_instance(data)
|
||||
assert not instance.is_valid()
|
||||
|
||||
def test_inactive(self):
|
||||
addon = Addon.objects.get(pk=3615)
|
||||
addon.update(disabled_by_user=True)
|
||||
|
||||
instance = self.get_update_instance(self.data)
|
||||
assert not instance.is_valid()
|
||||
|
||||
def test_soft_deleted(self):
|
||||
addon = Addon.objects.get(pk=3615)
|
||||
addon.update(status=amo.STATUS_DELETED)
|
||||
|
||||
instance = self.get_update_instance(self.data)
|
||||
assert not instance.is_valid()
|
||||
|
||||
def test_disabled(self):
|
||||
addon = Addon.objects.get(pk=3615)
|
||||
addon.update(status=amo.STATUS_DISABLED)
|
||||
|
||||
instance = self.get_update_instance(self.data)
|
||||
assert not instance.is_valid()
|
||||
|
||||
def test_no_version(self):
|
||||
data = self.data.copy()
|
||||
del data['version']
|
||||
instance = self.get_update_instance(data)
|
||||
assert instance.is_valid()
|
||||
|
||||
def test_unlisted_addon(self):
|
||||
"""Add-ons with only unlisted versions are valid, they just don't
|
||||
receive any updates (See TestLookinstance.test_no_unlisted below)."""
|
||||
addon = Addon.objects.get(pk=3615)
|
||||
self.make_addon_unlisted(addon)
|
||||
|
||||
instance = self.get_update_instance(self.data)
|
||||
assert instance.is_valid()
|
||||
|
||||
|
||||
class TestLookup(VersionCheckMixin, TestCase):
|
||||
fixtures = ['addons/update', 'base/appversion']
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.addon = Addon.objects.get(id=1865)
|
||||
self.platform = None
|
||||
self.version_int = 3069900200100
|
||||
|
||||
self.app = amo.APP_IDS[1]
|
||||
self.version_1_0_2 = 66463
|
||||
self.version_1_1_3 = 90149
|
||||
self.version_1_2_0 = 105387
|
||||
self.version_1_2_1 = 112396
|
||||
self.version_1_2_2 = 115509
|
||||
|
||||
def get_update_instance(self, *args):
|
||||
data = {
|
||||
'id': self.addon.guid,
|
||||
'appID': args[2].guid,
|
||||
'appVersion': 1, # this is going to be overridden
|
||||
'appOS': args[3] if args[3] else '',
|
||||
'reqVersion': '',
|
||||
}
|
||||
# Allow version to be optional.
|
||||
if args[0]:
|
||||
data['version'] = args[0]
|
||||
instance = super().get_update_instance(data)
|
||||
assert instance.is_valid()
|
||||
instance.data['version_int'] = args[1]
|
||||
instance.get_update()
|
||||
return (
|
||||
instance.data['row'].get('version_id'),
|
||||
instance.data['row'].get('file_id'),
|
||||
)
|
||||
|
||||
def change_status(self, version, status):
|
||||
version = Version.objects.get(pk=version)
|
||||
file = version.file
|
||||
file.status = status
|
||||
file.save()
|
||||
return version
|
||||
|
||||
def change_version(self, version, name):
|
||||
Version.objects.get(pk=version).update(version=name)
|
||||
|
||||
def test_low_client(self):
|
||||
"""
|
||||
Version 3.0a1 of Firefox is 3000000001100 and version 1.0.2 of the
|
||||
add-on is returned.
|
||||
"""
|
||||
version, file = self.get_update_instance(
|
||||
'', '3000000001100', self.app, self.platform
|
||||
)
|
||||
assert version == self.version_1_0_2
|
||||
|
||||
def test_new_client(self):
|
||||
"""
|
||||
Version 3.0.12 of Firefox is 3069900200100 and version 1.2.2 of the
|
||||
add-on is returned.
|
||||
"""
|
||||
version, file = self.get_update_instance(
|
||||
'', self.version_int, self.app, self.platform
|
||||
)
|
||||
assert version == self.version_1_2_2
|
||||
|
||||
def test_min_client(self):
|
||||
"""
|
||||
Version 3.7a5pre of Firefox is 3070000005000 and version 1.1.3 of
|
||||
the add-on is returned, because all later ones are set to minimum
|
||||
version of 3.7a5.
|
||||
"""
|
||||
for version in Version.objects.filter(pk__gte=self.version_1_2_0):
|
||||
appversion = version.apps.all()[0]
|
||||
appversion.min = AppVersion.objects.get(pk=325) # 3.7a5
|
||||
appversion.save()
|
||||
|
||||
version, file = self.get_update_instance(
|
||||
'', '3070000005000', self.app, self.platform
|
||||
) # 3.7a5pre
|
||||
assert version == self.version_1_1_3
|
||||
|
||||
def test_new_client_ordering(self):
|
||||
"""
|
||||
Given the following:
|
||||
* Version 15 (1 day old), max application_version 3.6*
|
||||
* Version 12 (1 month old), max application_version 3.7a
|
||||
We want version 15, even though version 12 is for a higher version.
|
||||
This was found in https://bugzilla.mozilla.org/show_bug.cgi?id=615641.
|
||||
"""
|
||||
application_version = ApplicationsVersions.objects.get(pk=77550)
|
||||
application_version.max_id = 350
|
||||
application_version.save()
|
||||
|
||||
# Version 1.2.2 is now a lower max version.
|
||||
application_version = ApplicationsVersions.objects.get(pk=88490)
|
||||
application_version.max_id = 329
|
||||
application_version.save()
|
||||
|
||||
version, file = self.get_update_instance(
|
||||
'', self.version_int, self.app, self.platform
|
||||
)
|
||||
assert version == self.version_1_2_2
|
||||
|
||||
def test_public(self):
|
||||
"""
|
||||
If the addon status is public then you get a public version.
|
||||
"""
|
||||
self.change_status(self.version_1_2_2, amo.STATUS_AWAITING_REVIEW)
|
||||
self.addon.reload()
|
||||
assert self.addon.status == amo.STATUS_APPROVED
|
||||
version, file = self.get_update_instance(
|
||||
'1.2', self.version_int, self.app, self.platform
|
||||
)
|
||||
assert version == self.version_1_2_1
|
||||
|
||||
def test_no_unlisted(self):
|
||||
"""
|
||||
Unlisted versions are always ignored, never served as updates.
|
||||
"""
|
||||
Version.objects.get(pk=self.version_1_2_2).update(channel=amo.CHANNEL_UNLISTED)
|
||||
self.addon.reload()
|
||||
assert self.addon.status == amo.STATUS_APPROVED
|
||||
version, file = self.get_update_instance(
|
||||
'1.2', self.version_int, self.app, self.platform
|
||||
)
|
||||
assert version == self.version_1_2_1
|
||||
|
||||
def test_can_downgrade(self):
|
||||
"""
|
||||
Check that we can downgrade, if 1.2.0 gets admin disabled
|
||||
and the oldest public version is now 1.1.3.
|
||||
"""
|
||||
self.change_status(self.version_1_2_0, amo.STATUS_AWAITING_REVIEW)
|
||||
for v in Version.objects.filter(pk__gte=self.version_1_2_1):
|
||||
v.delete()
|
||||
version, file = self.get_update_instance(
|
||||
'1.2', self.version_int, self.app, self.platform
|
||||
)
|
||||
|
||||
assert version == self.version_1_1_3
|
||||
|
||||
def test_public_pending_exists(self):
|
||||
"""
|
||||
If the addon status is public and you are asking
|
||||
for a beta version we look up a version based on the
|
||||
file version at that point. In this case, because the
|
||||
file is pending, we are looking for something public.
|
||||
"""
|
||||
self.change_status(self.version_1_2_2, amo.STATUS_AWAITING_REVIEW)
|
||||
self.change_status(self.version_1_2_0, amo.STATUS_AWAITING_REVIEW)
|
||||
self.change_version(self.version_1_2_0, '1.2beta')
|
||||
|
||||
version, file = self.get_update_instance(
|
||||
'1.2', self.version_int, self.app, self.platform
|
||||
)
|
||||
|
||||
assert version == self.version_1_2_1
|
||||
|
||||
def test_not_public(self):
|
||||
"""
|
||||
If the addon status is not public, then the update only
|
||||
looks for files within that one version.
|
||||
"""
|
||||
self.change_status(self.version_1_2_2, amo.STATUS_NULL)
|
||||
self.addon.update(status=amo.STATUS_NULL)
|
||||
version, file = self.get_update_instance(
|
||||
'1.2.1', self.version_int, self.app, self.platform
|
||||
)
|
||||
assert version == self.version_1_2_1
|
||||
|
||||
def test_platform_ignore(self):
|
||||
"""Ignore platform passed by clients (all add-ons are now compatible
|
||||
with all platforms, only the app matters)"""
|
||||
version = Version.objects.get(pk=115509)
|
||||
version, file = self.get_update_instance(
|
||||
'1.2', self.version_int, self.app, 'Linux'
|
||||
)
|
||||
assert version == self.version_1_2_2
|
||||
|
||||
|
||||
class TestDefaultToCompat(VersionCheckMixin, TestCase):
|
||||
"""
|
||||
Test default to compatible with all the various combinations of input.
|
||||
"""
|
||||
|
||||
fixtures = ['addons/default-to-compat']
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.addon = Addon.objects.get(id=337203)
|
||||
self.platform = None
|
||||
self.app = amo.APP_IDS[1]
|
||||
self.app_version_int_4_0 = 4000000200100
|
||||
self.app_version_int_5_0 = 5000000200100
|
||||
self.app_version_int_6_0 = 6000000200100
|
||||
self.app_version_int_7_0 = 7000000200100
|
||||
self.app_version_int_8_0 = 8000000200100
|
||||
self.ver_1_0 = 1268881
|
||||
self.ver_1_1 = 1268882
|
||||
self.ver_1_2 = 1268883
|
||||
self.ver_1_3 = 1268884
|
||||
|
||||
self.expected = {
|
||||
'4.0-strict': self.ver_1_0,
|
||||
'4.0-normal': self.ver_1_0,
|
||||
'4.0-ignore': self.ver_1_0,
|
||||
'5.0-strict': self.ver_1_2,
|
||||
'5.0-normal': self.ver_1_2,
|
||||
'5.0-ignore': self.ver_1_2,
|
||||
'6.0-strict': self.ver_1_3,
|
||||
'6.0-normal': self.ver_1_3,
|
||||
'6.0-ignore': self.ver_1_3,
|
||||
'7.0-strict': self.ver_1_3,
|
||||
'7.0-normal': self.ver_1_3,
|
||||
'7.0-ignore': self.ver_1_3,
|
||||
'8.0-strict': None,
|
||||
'8.0-normal': self.ver_1_3,
|
||||
'8.0-ignore': self.ver_1_3,
|
||||
}
|
||||
|
||||
def update_files(self, **kw):
|
||||
for version in self.addon.versions.all():
|
||||
version.file.update(**kw)
|
||||
|
||||
def get_update_instance(self, **kw):
|
||||
instance = super().get_update_instance(
|
||||
{
|
||||
'reqVersion': 1,
|
||||
'id': self.addon.guid,
|
||||
'version': kw.get('item_version', '1.0'),
|
||||
'appID': self.app.guid,
|
||||
'appVersion': kw.get('app_version', '3.0'),
|
||||
}
|
||||
)
|
||||
assert instance.is_valid()
|
||||
instance.compat_mode = kw.get('compat_mode', 'strict')
|
||||
instance.get_update()
|
||||
return instance.data['row'].get('version_id')
|
||||
|
||||
def check(self, expected):
|
||||
"""
|
||||
Checks Firefox versions 4.0 to 8.0 in each compat mode and compares it
|
||||
to the expected version.
|
||||
"""
|
||||
versions = ['4.0', '5.0', '6.0', '7.0', '8.0']
|
||||
modes = ['strict', 'normal', 'ignore']
|
||||
|
||||
for version in versions:
|
||||
for mode in modes:
|
||||
assert (
|
||||
self.get_update_instance(app_version=version, compat_mode=mode)
|
||||
== expected['-'.join([version, mode])]
|
||||
)
|
||||
|
||||
def test_baseline(self):
|
||||
# Tests simple add-on (non-strict_compatibility).
|
||||
self.check(self.expected)
|
||||
|
||||
def test_strict_opt_in(self):
|
||||
# Tests add-on with opt-in strict compatibility
|
||||
self.update_files(strict_compatibility=True)
|
||||
self.expected.update(
|
||||
{
|
||||
'8.0-normal': None,
|
||||
}
|
||||
)
|
||||
self.check(self.expected)
|
||||
|
||||
|
||||
class TestResponse(VersionCheckMixin, TestCase):
|
||||
fixtures = ['base/addon_3615']
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.addon = Addon.objects.get(pk=3615)
|
||||
self.data = {
|
||||
'id': '{2fa4ed95-0317-4c6a-a74c-5f3e3912c1f9}',
|
||||
'version': '2.0.58',
|
||||
'reqVersion': 1,
|
||||
'appID': '{ec8030f7-c20a-464f-9b0e-13a3a9e97384}',
|
||||
'appVersion': '3.7a1pre',
|
||||
}
|
||||
|
||||
def test_bad_guid(self):
|
||||
self.data['id'] = 'garbage'
|
||||
instance = self.get_update_instance(self.data)
|
||||
assert json.loads(instance.get_output()) == instance.get_error_output()
|
||||
|
||||
def test_good_version(self):
|
||||
instance = self.get_update_instance(self.data)
|
||||
instance.is_valid()
|
||||
instance.get_update()
|
||||
assert instance.data['row']['hash'].startswith('sha256:3808b13e')
|
||||
assert instance.data['row']['min'] == '2.0'
|
||||
assert instance.data['row']['max'] == '4.0'
|
||||
|
||||
def test_no_app_version(self):
|
||||
data = self.data.copy()
|
||||
data['appVersion'] = '1.4'
|
||||
instance = self.get_update_instance(data)
|
||||
instance.is_valid()
|
||||
assert not instance.get_update()
|
||||
|
||||
def test_low_app_version(self):
|
||||
data = self.data.copy()
|
||||
data['appVersion'] = '2.0'
|
||||
instance = self.get_update_instance(data)
|
||||
instance.is_valid()
|
||||
instance.get_update()
|
||||
assert instance.data['row']['hash'].startswith('sha256:3808b13e')
|
||||
assert instance.data['row']['min'] == '2.0'
|
||||
assert instance.data['row']['max'] == '4.0'
|
||||
|
||||
def test_content_type(self):
|
||||
instance = self.get_update_instance(self.data)
|
||||
('Content-Type', 'text/xml') in instance.get_headers(1)
|
||||
|
||||
def test_cache_control(self):
|
||||
instance = self.get_update_instance(self.data)
|
||||
('Cache-Control', 'public, max-age=3600') in instance.get_headers(1)
|
||||
|
||||
def test_length(self):
|
||||
instance = self.get_update_instance(self.data)
|
||||
('Cache-Length', '1') in instance.get_headers(1)
|
||||
|
||||
def test_expires(self):
|
||||
"""Check there are these headers and that expires is 3600 later."""
|
||||
# We aren't bother going to test the actual time in expires, that
|
||||
# way lies pain with broken tests later.
|
||||
instance = self.get_update_instance(self.data)
|
||||
headers = dict(instance.get_headers(1))
|
||||
last_modified = datetime(*utils.parsedate_tz(headers['Last-Modified'])[:7])
|
||||
expires = datetime(*utils.parsedate_tz(headers['Expires'])[:7])
|
||||
assert (expires - last_modified).seconds == 3600
|
||||
|
||||
def get_file_url(self):
|
||||
"""Return the file url with the hash as parameter."""
|
||||
return self.addon.current_version.file.get_absolute_url()
|
||||
|
||||
def test_url(self):
|
||||
instance = self.get_update_instance(self.data)
|
||||
content = instance.get_output()
|
||||
data = json.loads(content)
|
||||
guid = '{2fa4ed95-0317-4c6a-a74c-5f3e3912c1f9}'
|
||||
assert data['addons'][guid]['updates'][0]['update_link'] == self.get_file_url()
|
||||
|
||||
def test_url_local_recent(self):
|
||||
a_bit_ago = datetime.now() - timedelta(seconds=60)
|
||||
File.objects.get(pk=67442).update(datestatuschanged=a_bit_ago)
|
||||
instance = self.get_update_instance(self.data)
|
||||
content = instance.get_output()
|
||||
data = json.loads(content)
|
||||
guid = '{2fa4ed95-0317-4c6a-a74c-5f3e3912c1f9}'
|
||||
assert data['addons'][guid]['updates'][0]['update_link'] == self.get_file_url()
|
||||
|
||||
def test_hash(self):
|
||||
content = self.get_update_instance(self.data).get_output()
|
||||
data = json.loads(content)
|
||||
|
||||
file = File.objects.get(pk=67442)
|
||||
guid = '{2fa4ed95-0317-4c6a-a74c-5f3e3912c1f9}'
|
||||
assert data['addons'][guid]['updates'][0]['update_hash'] == file.hash
|
||||
|
||||
file = File.objects.get(pk=67442)
|
||||
file.hash = ''
|
||||
file.save()
|
||||
|
||||
content = self.get_update_instance(self.data).get_output()
|
||||
data = json.loads(content)
|
||||
assert 'update_hash' not in data['addons'][guid]['updates'][0]
|
||||
|
||||
def test_release_notes(self):
|
||||
content = self.get_update_instance(self.data).get_output()
|
||||
data = json.loads(content)
|
||||
guid = '{2fa4ed95-0317-4c6a-a74c-5f3e3912c1f9}'
|
||||
expected_url = (
|
||||
'http://testserver/%APP_LOCALE%/firefox/'
|
||||
'addon/a3615/versions/2.1.072/updateinfo/'
|
||||
)
|
||||
assert data['addons'][guid]['updates'][0]['update_info_url'] == expected_url
|
||||
|
||||
version = Version.objects.get(pk=81551)
|
||||
version.update(release_notes=None)
|
||||
|
||||
content = self.get_update_instance(self.data).get_output()
|
||||
data = json.loads(content)
|
||||
assert 'update_info_url' not in data['addons'][guid]['updates'][0]
|
||||
|
||||
def test_release_notes_android(self):
|
||||
# Quick & dirty way to make the add-on compatible with android and
|
||||
# force the update request to be from Android as well.
|
||||
AppVersion.objects.update(application=amo.ANDROID.id)
|
||||
self.data['appID'] = amo.ANDROID.guid
|
||||
|
||||
content = self.get_update_instance(self.data).get_output()
|
||||
data = json.loads(content)
|
||||
guid = '{2fa4ed95-0317-4c6a-a74c-5f3e3912c1f9}'
|
||||
expected_url = (
|
||||
'http://testserver/%APP_LOCALE%/android/'
|
||||
'addon/a3615/versions/2.1.072/updateinfo/'
|
||||
)
|
||||
assert data['addons'][guid]['updates'][0]['update_info_url'] == expected_url
|
||||
|
||||
def test_no_updates_at_all(self):
|
||||
self.addon.versions.all().delete()
|
||||
instance = self.get_update_instance(self.data)
|
||||
assert json.loads(instance.get_output()) == instance.get_no_updates_output()
|
||||
|
||||
def test_no_updates_my_fx(self):
|
||||
data = self.data.copy()
|
||||
data['appVersion'] = '5.0.1'
|
||||
instance = self.get_update_instance(data)
|
||||
assert json.loads(instance.get_output()) == instance.get_no_updates_output()
|
||||
|
||||
def test_application(self):
|
||||
# Basic test making sure application() is returning the output of
|
||||
# Update.get_output(). Have to mock Update(): otherwise, even though
|
||||
# we're setting SERVICES_DATABASE to point to the test database in
|
||||
# settings_test.py, we wouldn't see results because the data wouldn't
|
||||
# exist with the cursor the update service is using, which is different
|
||||
# from the one used by django tests.
|
||||
environ = {'QUERY_STRING': ''}
|
||||
self.start_response_call_count = 0
|
||||
|
||||
expected_headers = [('FakeHeader', 'FakeHeaderValue')]
|
||||
|
||||
expected_output = b'{"fake": "output"}'
|
||||
|
||||
def start_response_inspector(status, headers):
|
||||
self.start_response_call_count += 1
|
||||
assert status == '200 OK'
|
||||
assert headers == expected_headers
|
||||
|
||||
with mock.patch('services.update.Update') as UpdateMock:
|
||||
update_instance = UpdateMock.return_value
|
||||
update_instance.get_headers.return_value = expected_headers
|
||||
update_instance.get_output.return_value = expected_output
|
||||
output = update.application(environ, start_response_inspector)
|
||||
assert self.start_response_call_count == 1
|
||||
# Output is an array with a single string containing the body of the
|
||||
# response.
|
||||
assert output == [expected_output]
|
||||
|
||||
@mock.patch('services.update.logging.config.dictConfig')
|
||||
@mock.patch('services.update.Update')
|
||||
def test_exception_handling(self, UpdateMock, dictConfigMock):
|
||||
"""Test ensuring exceptions are raised and logged properly."""
|
||||
|
||||
class CustomException(Exception):
|
||||
pass
|
||||
|
||||
self.inspector_call_count = 0
|
||||
update_instance = UpdateMock.return_value
|
||||
update_instance.get_output.side_effect = CustomException('Boom!')
|
||||
|
||||
def inspector(status, headers):
|
||||
self.inspector_call_count += 1
|
||||
|
||||
with self.assertRaises(CustomException):
|
||||
with self.assertLogs(level='ERROR') as logs:
|
||||
update.application({'QUERY_STRING': ''}, inspector)
|
||||
assert self.inspector_call_count == 0
|
||||
assert len(logs.records) == 1
|
||||
assert logs.records[0].message == 'Boom!'
|
||||
assert logs.records[0].exc_info[1] == update_instance.get_output.side_effect
|
||||
|
||||
# Ensure we had set up logging correctly. We can't let the actual call
|
||||
# go through, it would override the loggers assertLogs() set up.
|
||||
assert dictConfigMock.call_count == 1
|
||||
assert dictConfigMock.call_args[0] == (settings.LOGGING,)
|
||||
|
||||
|
||||
# This test needs to be a TransactionTestCase because we want to test the
|
||||
# behavior of database cursor created by the update service. Since the data is
|
||||
# written by a different cursor, it needs to be committed for the update
|
||||
# service to see it (Other tests above that aren't explicitly mocking the
|
||||
# service and care about the output cheat and override the cursor to use
|
||||
# django's).
|
||||
class TestUpdateConnectionEncoding(TransactionTestCase):
|
||||
def setUp(self):
|
||||
self.addon = addon_factory()
|
||||
|
||||
def test_service_database_setting(self):
|
||||
expected_name = settings.DATABASES['default']['NAME']
|
||||
assert 'test' in expected_name
|
||||
assert settings.SERVICES_DATABASE['NAME'] == expected_name
|
||||
|
||||
connection = update.pool.connect()
|
||||
cursor = connection.cursor()
|
||||
cursor.execute('SELECT DATABASE();')
|
||||
assert cursor.fetchone()[0] == expected_name
|
||||
connection.close()
|
||||
|
||||
def test_connection_pool_encoding(self):
|
||||
connection = update.pool.connect()
|
||||
assert connection.connection.encoding == 'utf8'
|
||||
connection.close()
|
||||
|
||||
def test_unicode_data(self):
|
||||
# To trigger the error this test is trying to cover, we need 2 things:
|
||||
# - An update request that would be considered 'valid', i.e. the
|
||||
# necessary parameters are presentTestUpdateConnectionEncoding and
|
||||
# the add-on exists.
|
||||
# - A database cursor instantiated from the update service, not by
|
||||
# django tests.
|
||||
# Note that this test would hang before the fix to pass charset when
|
||||
# connecting in get_connection().
|
||||
data = {
|
||||
'id': self.addon.guid,
|
||||
'reqVersion': '2鎈',
|
||||
'appID': amo.FIREFOX.guid,
|
||||
'appVersion': '78.0',
|
||||
}
|
||||
instance = update.Update(data)
|
||||
output = instance.get_output()
|
||||
update_data = json.loads(output)
|
||||
assert update_data
|
|
@ -494,6 +494,7 @@ def test_webext_version_stats():
|
|||
incr_mock.assert_called_with('prefix.for.logging.webext_version.12_34_56')
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_validate_version_number_is_gt_latest_signed_listed_version():
|
||||
addon = addon_factory(version_kw={'version': '123.0'}, file_kw={'is_signed': True})
|
||||
# add an unlisted version, which should be ignored.
|
||||
|
@ -545,6 +546,7 @@ def test_validate_version_number_is_gt_latest_signed_listed_version():
|
|||
assert not validate_version_number_is_gt_latest_signed_listed_version(None, '123')
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_validate_version_number_is_gt_latest_signed_listed_version_not_langpack():
|
||||
addon = addon_factory(version_kw={'version': '123.0'}, file_kw={'is_signed': True})
|
||||
assert validate_version_number_is_gt_latest_signed_listed_version(addon, '122') == (
|
||||
|
|
|
@ -7,6 +7,8 @@ from django.core.paginator import PageNotAnInteger
|
|||
from django.test import override_settings
|
||||
from django.urls import reverse
|
||||
|
||||
import pytest
|
||||
|
||||
from olympia import amo
|
||||
from olympia.addons.models import AddonCategory
|
||||
from olympia.amo.sitemap import (
|
||||
|
@ -172,6 +174,7 @@ def test_amo_sitemap():
|
|||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_categories_sitemap():
|
||||
# without any addons we should still generate a url for each category
|
||||
empty_cats = list(CategoriesSitemap().items())
|
||||
|
@ -222,6 +225,7 @@ def test_categories_sitemap():
|
|||
assert set(cats_limited) - set(empty_cats) == {extra_2}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_collection_sitemap(mozilla_user):
|
||||
collection_a = collection_factory(
|
||||
author=mozilla_user, modified=datetime(2020, 1, 1, 1, 1, 1)
|
||||
|
@ -586,6 +590,7 @@ class TestAccountSitemap(TestCase):
|
|||
]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_tag_pages_sitemap():
|
||||
# without any addons we should still generate a url for each tag page
|
||||
empty_tag_pages = list(TagPagesSitemap().items())
|
||||
|
@ -624,6 +629,7 @@ def test_tag_pages_sitemap():
|
|||
assert set(tag_pages_limited) - set(empty_tag_pages) == {extra_2}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_sitemap_section_pages():
|
||||
addon_factory()
|
||||
addon_factory()
|
||||
|
|
|
@ -448,6 +448,7 @@ class TestRobots(TestCase):
|
|||
assert f'Disallow: {username_url}$' in content
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_fake_fxa_authorization_correct_values_passed():
|
||||
with override_settings(DEBUG=True): # USE_FAKE_FXA_AUTH is already True
|
||||
url = reverse('fake-fxa-authorization')
|
||||
|
@ -464,6 +465,7 @@ def test_fake_fxa_authorization_correct_values_passed():
|
|||
assert elm # No value yet, should just be present.
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_fake_fxa_authorization_deactivated():
|
||||
url = reverse('fake-fxa-authorization')
|
||||
with override_settings(DEBUG=False, USE_FAKE_FXA_AUTH=False):
|
||||
|
@ -582,6 +584,7 @@ def test_multipart_error():
|
|||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_client_info():
|
||||
response = Client().get(reverse('amo.client_info'))
|
||||
assert response.status_code == 403
|
||||
|
|
|
@ -44,8 +44,6 @@ DATABASES = {
|
|||
'replica': get_db_config('DATABASES_REPLICA_URL', atomic_requests=False),
|
||||
}
|
||||
|
||||
SERVICES_DATABASE = get_db_config('SERVICES_DATABASE_URL')
|
||||
|
||||
REPLICA_DATABASES = ['replica']
|
||||
|
||||
# Celery
|
||||
|
|
|
@ -36,8 +36,6 @@ DATABASES = {
|
|||
'replica': get_db_config('DATABASES_REPLICA_URL', atomic_requests=False),
|
||||
}
|
||||
|
||||
SERVICES_DATABASE = get_db_config('SERVICES_DATABASE_URL')
|
||||
|
||||
REPLICA_DATABASES = ['replica']
|
||||
|
||||
# Celery
|
||||
|
|
|
@ -41,8 +41,6 @@ DATABASES = {
|
|||
'replica': get_db_config('DATABASES_REPLICA_URL', atomic_requests=False),
|
||||
}
|
||||
|
||||
SERVICES_DATABASE = get_db_config('SERVICES_DATABASE_URL')
|
||||
|
||||
REPLICA_DATABASES = ['replica']
|
||||
|
||||
# Celery
|
||||
|
|
|
@ -139,12 +139,6 @@ DATABASES = {
|
|||
'default': get_db_config('DATABASES_DEFAULT_URL'),
|
||||
}
|
||||
|
||||
# A database to be used by the services scripts, which does not use Django.
|
||||
# Please note that this is not a full Django database connection
|
||||
# so the amount of values supported are limited. By default we are using
|
||||
# the same connection as 'default' but that changes in prod/dev/stage.
|
||||
SERVICES_DATABASE = get_db_config('DATABASES_DEFAULT_URL')
|
||||
|
||||
DATABASE_ROUTERS = ('multidb.PinningReplicaRouter',)
|
||||
|
||||
# Put the aliases for your slave databases in this list.
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import pytest
|
||||
|
||||
from olympia import amo
|
||||
from olympia.amo.tests import addon_factory
|
||||
from olympia.constants.promoted import LINE, NOTABLE, NOT_PROMOTED
|
||||
|
@ -7,6 +9,7 @@ from olympia.zadmin.models import set_config
|
|||
from ..tasks import add_high_adu_extensions_to_notable, ADU_LIMIT_CONFIG_KEY
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_add_high_adu_extensions_to_notable():
|
||||
# Arbitrary_adu_limit
|
||||
adu_limit = 1234
|
||||
|
|
|
@ -12,6 +12,7 @@ from olympia.reviewers.utils import ReviewAddon, ReviewFiles, ReviewHelper
|
|||
|
||||
|
||||
@mock.patch('olympia.reviewers.utils.sign_file', lambda f: None)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
'review_action,addon_status,file_status,review_class,review_type,'
|
||||
'final_addon_status,final_file_status',
|
||||
|
|
|
@ -18,6 +18,14 @@ COMPAT_MODE_STRICT = 'strict'
|
|||
COMPAT_MODE_NORMAL = 'normal'
|
||||
COMPAT_MODE_IGNORE = 'ignore'
|
||||
|
||||
# Pref in Firefox: sextensions.update.url
|
||||
# A typical request looks like:
|
||||
# https://versioncheck.addons.mozilla.org/update/VersionCheck.php?reqVersion=2
|
||||
# &id=uBlock0@raymondhill.net&version=1.45.2&maxAppVersion=*&status=userEnabled
|
||||
# &appID={ec8030f7-c20a-464f-9b0e-13a3a9e97384}&appVersion=109.0a1&appOS=Linux
|
||||
# &appABI=x86_64-gcc3&locale=en-US¤tAppVersion=109.0a1&updateType=97
|
||||
# &compatMode=normal
|
||||
|
||||
|
||||
class Updater:
|
||||
def __init__(self, data):
|
||||
|
|
Загрузка…
Ссылка в новой задаче