2011-05-12 00:32:13 +04:00
|
|
|
import json
|
2010-09-15 00:22:33 +04:00
|
|
|
import logging
|
2010-11-18 20:44:30 +03:00
|
|
|
import os
|
2011-05-26 03:44:34 +04:00
|
|
|
import shutil
|
2011-08-17 01:50:43 +04:00
|
|
|
import socket
|
2010-09-15 00:22:33 +04:00
|
|
|
import sys
|
|
|
|
import traceback
|
2011-08-17 01:50:43 +04:00
|
|
|
import urllib2
|
|
|
|
import urlparse
|
2010-09-15 00:22:33 +04:00
|
|
|
|
2010-12-21 02:10:42 +03:00
|
|
|
from django.conf import settings
|
2010-12-28 00:36:13 +03:00
|
|
|
from django.core.management import call_command
|
2011-08-17 01:50:43 +04:00
|
|
|
|
2010-09-15 00:22:33 +04:00
|
|
|
from celeryutils import task
|
2011-08-17 01:50:43 +04:00
|
|
|
from tower import ugettext as _
|
2010-09-15 00:22:33 +04:00
|
|
|
|
2010-12-31 04:02:31 +03:00
|
|
|
import amo
|
2011-04-28 04:44:38 +04:00
|
|
|
from amo.decorators import write, set_modified_on
|
2011-08-17 01:50:43 +04:00
|
|
|
from amo.utils import slugify, resize_image
|
|
|
|
from addons.models import Addon
|
2010-12-28 00:36:13 +03:00
|
|
|
from applications.management.commands import dump_apps
|
2011-07-14 04:25:08 +04:00
|
|
|
from applications.models import Application, AppVersion
|
2011-09-02 02:52:06 +04:00
|
|
|
from devhub import perf
|
2011-08-17 01:50:43 +04:00
|
|
|
from files.models import FileUpload, File, FileValidation
|
2010-09-15 00:22:33 +04:00
|
|
|
|
2011-06-22 23:09:18 +04:00
|
|
|
from PIL import Image
|
|
|
|
|
2010-09-15 00:22:33 +04:00
|
|
|
log = logging.getLogger('z.devhub.task')
|
|
|
|
|
|
|
|
|
2011-07-29 23:31:54 +04:00
|
|
|
@task
|
2010-09-15 00:22:33 +04:00
|
|
|
@write
|
|
|
|
def validator(upload_id, **kw):
|
2011-01-04 23:07:24 +03:00
|
|
|
if not settings.VALIDATE_ADDONS:
|
|
|
|
return None
|
2010-09-15 00:22:33 +04:00
|
|
|
log.info('VALIDATING: %s' % upload_id)
|
|
|
|
upload = FileUpload.objects.get(pk=upload_id)
|
|
|
|
try:
|
2011-05-04 02:30:54 +04:00
|
|
|
result = run_validator(upload.path)
|
2010-12-11 03:16:28 +03:00
|
|
|
upload.validation = result
|
|
|
|
upload.save() # We want to hit the custom save().
|
2010-09-15 00:22:33 +04:00
|
|
|
except:
|
|
|
|
# Store the error with the FileUpload job, then raise
|
|
|
|
# it for normal logging.
|
|
|
|
tb = traceback.format_exception(*sys.exc_info())
|
|
|
|
upload.update(task_error=''.join(tb))
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
2011-07-29 23:31:54 +04:00
|
|
|
@task
|
2011-07-14 04:25:08 +04:00
|
|
|
@write
|
|
|
|
def compatibility_check(upload_id, app_guid, appversion_str, **kw):
|
|
|
|
if not settings.VALIDATE_ADDONS:
|
|
|
|
return None
|
|
|
|
log.info('COMPAT CHECK for upload %s / app %s version %s'
|
|
|
|
% (upload_id, app_guid, appversion_str))
|
|
|
|
upload = FileUpload.objects.get(pk=upload_id)
|
|
|
|
app = Application.objects.get(guid=app_guid)
|
|
|
|
appver = AppVersion.objects.get(application=app, version=appversion_str)
|
|
|
|
try:
|
|
|
|
result = run_validator(upload.path,
|
|
|
|
for_appversions={app_guid: [appversion_str]},
|
|
|
|
test_all_tiers=True,
|
2011-08-27 00:37:47 +04:00
|
|
|
# Ensure we only check compatibility
|
|
|
|
# against this one specific version:
|
|
|
|
overrides={'targetapp_minVersion':
|
|
|
|
{app_guid: appversion_str},
|
|
|
|
'targetapp_maxVersion':
|
2011-07-14 04:25:08 +04:00
|
|
|
{app_guid: appversion_str}})
|
|
|
|
upload.validation = result
|
|
|
|
upload.compat_with_app = app
|
|
|
|
upload.compat_with_appver = appver
|
|
|
|
upload.save() # We want to hit the custom save().
|
|
|
|
except:
|
|
|
|
# Store the error with the FileUpload job, then raise
|
|
|
|
# it for normal logging.
|
|
|
|
tb = traceback.format_exception(*sys.exc_info())
|
|
|
|
upload.update(task_error=''.join(tb))
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
2011-07-29 23:31:54 +04:00
|
|
|
@task
|
2010-12-30 01:30:08 +03:00
|
|
|
@write
|
|
|
|
def file_validator(file_id, **kw):
|
2011-01-04 23:07:24 +03:00
|
|
|
if not settings.VALIDATE_ADDONS:
|
|
|
|
return None
|
2010-12-30 01:30:08 +03:00
|
|
|
log.info('VALIDATING file: %s' % file_id)
|
|
|
|
file = File.objects.get(pk=file_id)
|
|
|
|
# Unlike upload validation, let the validator
|
|
|
|
# raise an exception if there is one.
|
2011-05-04 02:30:54 +04:00
|
|
|
result = run_validator(file.file_path)
|
2010-12-30 01:30:08 +03:00
|
|
|
return FileValidation.from_json(file, result)
|
|
|
|
|
|
|
|
|
2011-05-18 21:38:28 +04:00
|
|
|
def run_validator(file_path, for_appversions=None, test_all_tiers=False,
|
|
|
|
overrides=None):
|
2011-05-04 02:30:54 +04:00
|
|
|
"""A pre-configured wrapper around the addon validator.
|
|
|
|
|
|
|
|
*file_path*
|
|
|
|
Path to addon / extension file to validate.
|
|
|
|
|
|
|
|
*for_appversions=None*
|
|
|
|
An optional dict of application versions to validate this addon
|
|
|
|
for. The key is an application GUID and its value is a list of
|
|
|
|
versions.
|
|
|
|
|
2011-05-18 04:44:42 +04:00
|
|
|
*test_all_tiers=False*
|
|
|
|
When False (default) the validator will not continue if it
|
|
|
|
encounters fatal errors. When True, all tests in all tiers are run.
|
|
|
|
See bug 615426 for discussion on this default.
|
|
|
|
|
2011-05-18 21:38:28 +04:00
|
|
|
*overrides=None*
|
|
|
|
Normally the validator gets info from install.rdf but there are a
|
|
|
|
few things we need to override. See validator for supported overrides.
|
|
|
|
Example: {'targetapp_maxVersion': {'<app guid>': '<version>'}}
|
|
|
|
|
2011-05-04 02:30:54 +04:00
|
|
|
To validate the addon for compatibility with Firefox 5 and 6,
|
|
|
|
you'd pass in::
|
|
|
|
|
|
|
|
for_appversions={amo.FIREFOX.guid: ['5.0.*', '6.0.*']}
|
|
|
|
|
|
|
|
Not all application versions will have a set of registered
|
|
|
|
compatibility tests.
|
|
|
|
"""
|
2010-11-23 21:57:39 +03:00
|
|
|
|
2010-12-30 18:07:15 +03:00
|
|
|
from validator.validate import validate
|
2010-12-21 02:10:42 +03:00
|
|
|
|
|
|
|
# TODO(Kumar) remove this when validator is fixed, see bug 620503
|
|
|
|
from validator.testcases import scripting
|
|
|
|
scripting.SPIDERMONKEY_INSTALLATION = settings.SPIDERMONKEY
|
|
|
|
import validator.constants
|
|
|
|
validator.constants.SPIDERMONKEY_INSTALLATION = settings.SPIDERMONKEY
|
2010-12-10 21:46:13 +03:00
|
|
|
|
2010-12-28 00:36:13 +03:00
|
|
|
apps = dump_apps.Command.JSON_PATH
|
|
|
|
if not os.path.exists(apps):
|
|
|
|
call_command('dump_apps')
|
2010-12-10 00:27:44 +03:00
|
|
|
|
2011-05-04 02:30:54 +04:00
|
|
|
return validate(file_path,
|
|
|
|
for_appversions=for_appversions,
|
|
|
|
format='json',
|
2011-05-18 04:44:42 +04:00
|
|
|
# When False, this flag says to stop testing after one
|
|
|
|
# tier fails.
|
|
|
|
determined=test_all_tiers,
|
2010-12-21 02:10:42 +03:00
|
|
|
approved_applications=apps,
|
2011-05-18 21:38:28 +04:00
|
|
|
spidermonkey=settings.SPIDERMONKEY,
|
|
|
|
overrides=overrides)
|
2010-11-12 03:39:18 +03:00
|
|
|
|
|
|
|
|
2011-05-27 21:51:38 +04:00
|
|
|
@task(rate_limit='4/m')
|
2011-05-12 00:32:13 +04:00
|
|
|
@write
|
|
|
|
def flag_binary(ids, **kw):
|
|
|
|
log.info('[%s@%s] Flagging binary addons starting with id: %s...'
|
|
|
|
% (len(ids), flag_binary.rate_limit, ids[0]))
|
|
|
|
addons = Addon.objects.filter(pk__in=ids).no_transforms()
|
|
|
|
|
|
|
|
for addon in addons:
|
|
|
|
try:
|
|
|
|
log.info('Validating addon with id: %s' % addon.pk)
|
|
|
|
file = File.objects.filter(version__addon=addon).latest('created')
|
|
|
|
result = run_validator(file.file_path)
|
|
|
|
binary = (json.loads(result)['metadata']
|
|
|
|
.get('contains_binary_extension', False))
|
|
|
|
log.info('Setting binary for addon with id: %s to %s'
|
|
|
|
% (addon.pk, binary))
|
|
|
|
addon.update(binary=binary)
|
|
|
|
except Exception, err:
|
|
|
|
log.error('Failed to run validation on addon id: %s, %s'
|
|
|
|
% (addon.pk, err))
|
|
|
|
|
|
|
|
|
2011-01-14 03:09:22 +03:00
|
|
|
@task
|
2011-04-28 04:44:38 +04:00
|
|
|
@set_modified_on
|
2010-11-12 03:39:18 +03:00
|
|
|
def resize_icon(src, dst, size, **kw):
|
|
|
|
"""Resizes addon icons."""
|
|
|
|
log.info('[1@None] Resizing icon: %s' % dst)
|
|
|
|
try:
|
2010-12-10 03:36:00 +03:00
|
|
|
if isinstance(size, list):
|
|
|
|
for s in size:
|
|
|
|
resize_image(src, '%s-%s.png' % (dst, s), (s, s),
|
|
|
|
remove_src=False)
|
|
|
|
os.remove(src)
|
|
|
|
else:
|
|
|
|
resize_image(src, dst, (size, size), remove_src=True)
|
2011-04-28 04:44:38 +04:00
|
|
|
return True
|
2010-11-12 03:39:18 +03:00
|
|
|
except Exception, e:
|
|
|
|
log.error("Error saving addon icon: %s" % e)
|
2010-12-31 04:02:31 +03:00
|
|
|
|
|
|
|
|
2011-01-14 03:09:22 +03:00
|
|
|
@task
|
2011-04-28 04:44:38 +04:00
|
|
|
@set_modified_on
|
2011-08-31 02:01:18 +04:00
|
|
|
def resize_preview(src, instance, **kw):
|
2011-06-23 22:03:59 +04:00
|
|
|
"""Resizes preview images and stores the sizes on the preview."""
|
2011-06-22 23:09:18 +04:00
|
|
|
thumb_dst, full_dst = instance.thumbnail_path, instance.image_path
|
|
|
|
sizes = {}
|
2011-06-23 22:03:59 +04:00
|
|
|
log.info('[1@None] Resizing preview and storing size: %s' % thumb_dst)
|
2010-12-31 04:02:31 +03:00
|
|
|
try:
|
2011-06-22 23:09:18 +04:00
|
|
|
sizes['thumbnail'] = resize_image(src, thumb_dst,
|
|
|
|
amo.ADDON_PREVIEW_SIZES[0],
|
|
|
|
remove_src=False)
|
2011-06-23 05:03:02 +04:00
|
|
|
sizes['image'] = resize_image(src, full_dst,
|
2011-06-22 23:09:18 +04:00
|
|
|
amo.ADDON_PREVIEW_SIZES[1],
|
|
|
|
remove_src=False)
|
|
|
|
instance.sizes = sizes
|
|
|
|
instance.save()
|
2011-04-28 04:44:38 +04:00
|
|
|
return True
|
2010-12-31 04:02:31 +03:00
|
|
|
except Exception, e:
|
|
|
|
log.error("Error saving preview: %s" % e)
|
2011-06-22 23:09:18 +04:00
|
|
|
|
2011-05-26 03:44:34 +04:00
|
|
|
|
2011-06-22 23:09:18 +04:00
|
|
|
@task
|
|
|
|
@write
|
|
|
|
def get_preview_sizes(ids, **kw):
|
|
|
|
log.info('[%s@%s] Getting preview sizes for addons starting at id: %s...'
|
|
|
|
% (len(ids), get_preview_sizes.rate_limit, ids[0]))
|
|
|
|
addons = Addon.objects.filter(pk__in=ids).no_transforms()
|
|
|
|
|
|
|
|
for addon in addons:
|
|
|
|
previews = addon.previews.all()
|
|
|
|
log.info('Found %s previews for: %s' % (previews.count(), addon.pk))
|
|
|
|
for preview in previews:
|
|
|
|
try:
|
|
|
|
log.info('Getting size for preview: %s' % preview.pk)
|
|
|
|
sizes = {
|
|
|
|
'thumbnail': Image.open(preview.thumbnail_path).size,
|
|
|
|
'image': Image.open(preview.image_path).size,
|
|
|
|
}
|
|
|
|
preview.update(sizes=sizes)
|
|
|
|
except Exception, err:
|
|
|
|
log.error('Failed to find size of preview: %s, error: %s'
|
|
|
|
% (addon.pk, err))
|
2011-06-27 22:49:34 +04:00
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
@write
|
|
|
|
def convert_purified(ids, **kw):
|
|
|
|
log.info('[%s@%s] Converting fields to purified starting at id: %s...'
|
|
|
|
% (len(ids), convert_purified.rate_limit, ids[0]))
|
|
|
|
fields = ['the_reason', 'the_future']
|
|
|
|
for addon in Addon.objects.filter(pk__in=ids):
|
|
|
|
flag = False
|
|
|
|
for field in fields:
|
|
|
|
value = getattr(addon, field)
|
|
|
|
if value:
|
|
|
|
value.clean()
|
|
|
|
if (value.localized_string_clean != value.localized_string):
|
|
|
|
flag = True
|
|
|
|
if flag:
|
|
|
|
log.info('Saving addon: %s to purify fields' % addon.pk)
|
|
|
|
addon.save()
|
2011-05-26 03:44:34 +04:00
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
def packager(data, feature_set, **kw):
|
|
|
|
"""Build an add-on based on input data."""
|
|
|
|
log.info('[1@None] Packaging add-on')
|
|
|
|
|
|
|
|
# "Lock" the file by putting .lock in its name.
|
|
|
|
from devhub.views import packager_path
|
|
|
|
xpi_path = packager_path('%s.lock' % data['uuid'])
|
|
|
|
log.info('Saving package to: %s' % xpi_path)
|
|
|
|
|
|
|
|
from packager.main import packager
|
|
|
|
|
|
|
|
data['slug'] = slugify(data['name']).replace('-', '_')
|
|
|
|
features = set([k for k, v in feature_set.items() if v])
|
|
|
|
|
|
|
|
packager(data, xpi_path, features)
|
|
|
|
|
|
|
|
# Unlock the file and make it available.
|
|
|
|
try:
|
|
|
|
shutil.move(xpi_path, packager_path(data['uuid']))
|
|
|
|
except IOError:
|
|
|
|
log.error('Error unlocking add-on: %s' % xpi_path)
|
2011-08-17 01:50:43 +04:00
|
|
|
|
|
|
|
|
|
|
|
def failed_validation(*messages):
|
|
|
|
"""Return a validation object that looks like the add-on validator."""
|
2011-08-29 22:55:39 +04:00
|
|
|
m = []
|
|
|
|
for msg in messages:
|
|
|
|
m.append({'type': 'error', 'message': msg, 'tier': 1})
|
|
|
|
|
|
|
|
return json.dumps({'errors': 1, 'success': False, 'messages': m})
|
2011-08-17 01:50:43 +04:00
|
|
|
|
|
|
|
|
|
|
|
def _fetch_manifest(url):
|
|
|
|
try:
|
|
|
|
response = urllib2.urlopen(url, timeout=5)
|
2011-08-23 02:09:40 +04:00
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
raise Exception(_('%s responded with %s (%s).') % (url, e.code, e.msg))
|
2011-08-17 01:50:43 +04:00
|
|
|
except urllib2.URLError, e:
|
|
|
|
# Unpack the URLError to try and find a useful message.
|
|
|
|
if isinstance(e.reason, socket.timeout):
|
|
|
|
raise Exception(_('Connection to "%s" timed out.') % url)
|
|
|
|
elif isinstance(e.reason, socket.gaierror):
|
|
|
|
raise Exception(_('Could not contact host at "%s".') % url)
|
|
|
|
else:
|
|
|
|
raise Exception(str(e.reason))
|
|
|
|
|
|
|
|
content_type = 'application/x-web-app-manifest+json'
|
|
|
|
if not response.headers.get('Content-Type', '').startswith(content_type):
|
|
|
|
if 'Content-Type' in response.headers:
|
|
|
|
raise Exception(_('Your manifest must be served with the HTTP '
|
|
|
|
'header "Content-Type: %s". We saw "%s".')
|
|
|
|
% (content_type, response.headers['Content-Type']))
|
|
|
|
else:
|
|
|
|
raise Exception(_('Your manifest must be served with the HTTP '
|
|
|
|
'header "Content-Type: %s".') % content_type)
|
|
|
|
|
|
|
|
# Read one extra byte. Reject if it's too big so we don't have issues
|
|
|
|
# downloading huge files.
|
|
|
|
content = response.read(settings.MAX_WEBAPP_UPLOAD_SIZE + 1)
|
|
|
|
if len(content) > settings.MAX_WEBAPP_UPLOAD_SIZE:
|
|
|
|
raise Exception(_('Your manifest must be less than %s bytes.')
|
|
|
|
% settings.MAX_WEBAPP_UPLOAD_SIZE)
|
|
|
|
return content
|
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
def fetch_manifest(url, upload_pk=None, **kw):
|
|
|
|
log.info(u'[1@None] Fetching manifest: %s.' % url)
|
|
|
|
upload = FileUpload.objects.get(pk=upload_pk)
|
2011-08-29 22:55:39 +04:00
|
|
|
|
2011-08-17 01:50:43 +04:00
|
|
|
try:
|
|
|
|
content = _fetch_manifest(url)
|
|
|
|
except Exception, e:
|
|
|
|
# Drop a message in the validation slot and bail.
|
2011-08-17 22:51:20 +04:00
|
|
|
upload.update(validation=failed_validation(e.message))
|
2011-08-29 22:55:39 +04:00
|
|
|
return
|
2011-08-17 01:50:43 +04:00
|
|
|
|
|
|
|
upload.add_file([content], url, len(content))
|
|
|
|
# Send the upload to the validator.
|
|
|
|
validator(upload.pk)
|
2011-09-02 02:52:06 +04:00
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
def start_perf_test_for_file(file_id, os_name, app_name, **kw):
|
|
|
|
log.info('[@%s] Starting perf tests for file %s on %s / %s'
|
|
|
|
% (start_perf_test_for_file.rate_limit, file_id,
|
|
|
|
os_name, app_name))
|
|
|
|
file_ = File.objects.get(pk=file_id)
|
|
|
|
# TODO(Kumar) store token to retrieve results later?
|
|
|
|
perf.start_perf_test(file_, os_name, app_name)
|