2011-10-04 22:49:43 +04:00
|
|
|
# -*- coding: utf8 -*-
|
2011-08-16 01:40:56 +04:00
|
|
|
import base64
|
2011-10-12 03:09:39 +04:00
|
|
|
from datetime import date
|
2011-05-12 00:32:13 +04:00
|
|
|
import json
|
2010-09-15 00:22:33 +04:00
|
|
|
import logging
|
2010-11-18 20:44:30 +03:00
|
|
|
import os
|
2011-08-16 01:40:56 +04:00
|
|
|
import path
|
2011-08-17 01:50:43 +04:00
|
|
|
import socket
|
2010-09-15 00:22:33 +04:00
|
|
|
import sys
|
|
|
|
import traceback
|
2011-08-17 01:50:43 +04:00
|
|
|
import urllib2
|
2011-08-16 01:40:56 +04:00
|
|
|
import uuid
|
2010-09-15 00:22:33 +04:00
|
|
|
|
2010-12-21 02:10:42 +03:00
|
|
|
from django.conf import settings
|
2010-12-28 00:36:13 +03:00
|
|
|
from django.core.management import call_command
|
2011-10-12 03:09:39 +04:00
|
|
|
from django.utils.http import urlencode
|
2011-08-17 01:50:43 +04:00
|
|
|
|
2010-09-15 00:22:33 +04:00
|
|
|
from celeryutils import task
|
2012-01-05 03:12:53 +04:00
|
|
|
from django_statsd.clients import statsd
|
2011-08-17 01:50:43 +04:00
|
|
|
from tower import ugettext as _
|
2010-09-15 00:22:33 +04:00
|
|
|
|
2010-12-31 04:02:31 +03:00
|
|
|
import amo
|
2011-04-28 04:44:38 +04:00
|
|
|
from amo.decorators import write, set_modified_on
|
2011-10-12 03:09:39 +04:00
|
|
|
from amo.utils import guard, resize_image, remove_icons
|
2011-08-17 01:50:43 +04:00
|
|
|
from addons.models import Addon
|
2010-12-28 00:36:13 +03:00
|
|
|
from applications.management.commands import dump_apps
|
2011-07-14 04:25:08 +04:00
|
|
|
from applications.models import Application, AppVersion
|
2011-09-02 02:52:06 +04:00
|
|
|
from devhub import perf
|
2011-08-17 01:50:43 +04:00
|
|
|
from files.models import FileUpload, File, FileValidation
|
2010-09-15 00:22:33 +04:00
|
|
|
|
2011-06-22 23:09:18 +04:00
|
|
|
from PIL import Image
|
|
|
|
|
2011-10-12 03:09:39 +04:00
|
|
|
|
2010-09-15 00:22:33 +04:00
|
|
|
log = logging.getLogger('z.devhub.task')
|
|
|
|
|
|
|
|
|
2011-07-29 23:31:54 +04:00
|
|
|
@task
|
2010-09-15 00:22:33 +04:00
|
|
|
@write
|
|
|
|
def validator(upload_id, **kw):
|
2011-01-04 23:07:24 +03:00
|
|
|
if not settings.VALIDATE_ADDONS:
|
|
|
|
return None
|
2010-09-15 00:22:33 +04:00
|
|
|
log.info('VALIDATING: %s' % upload_id)
|
|
|
|
upload = FileUpload.objects.get(pk=upload_id)
|
|
|
|
try:
|
2011-05-04 02:30:54 +04:00
|
|
|
result = run_validator(upload.path)
|
2010-12-11 03:16:28 +03:00
|
|
|
upload.validation = result
|
|
|
|
upload.save() # We want to hit the custom save().
|
2010-09-15 00:22:33 +04:00
|
|
|
except:
|
|
|
|
# Store the error with the FileUpload job, then raise
|
|
|
|
# it for normal logging.
|
|
|
|
tb = traceback.format_exception(*sys.exc_info())
|
|
|
|
upload.update(task_error=''.join(tb))
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
2011-07-29 23:31:54 +04:00
|
|
|
@task
|
2011-07-14 04:25:08 +04:00
|
|
|
@write
|
|
|
|
def compatibility_check(upload_id, app_guid, appversion_str, **kw):
|
|
|
|
if not settings.VALIDATE_ADDONS:
|
|
|
|
return None
|
|
|
|
log.info('COMPAT CHECK for upload %s / app %s version %s'
|
|
|
|
% (upload_id, app_guid, appversion_str))
|
|
|
|
upload = FileUpload.objects.get(pk=upload_id)
|
|
|
|
app = Application.objects.get(guid=app_guid)
|
|
|
|
appver = AppVersion.objects.get(application=app, version=appversion_str)
|
|
|
|
try:
|
|
|
|
result = run_validator(upload.path,
|
|
|
|
for_appversions={app_guid: [appversion_str]},
|
|
|
|
test_all_tiers=True,
|
2011-08-27 00:37:47 +04:00
|
|
|
# Ensure we only check compatibility
|
|
|
|
# against this one specific version:
|
|
|
|
overrides={'targetapp_minVersion':
|
|
|
|
{app_guid: appversion_str},
|
|
|
|
'targetapp_maxVersion':
|
2011-07-14 04:25:08 +04:00
|
|
|
{app_guid: appversion_str}})
|
|
|
|
upload.validation = result
|
|
|
|
upload.compat_with_app = app
|
|
|
|
upload.compat_with_appver = appver
|
|
|
|
upload.save() # We want to hit the custom save().
|
|
|
|
except:
|
|
|
|
# Store the error with the FileUpload job, then raise
|
|
|
|
# it for normal logging.
|
|
|
|
tb = traceback.format_exception(*sys.exc_info())
|
|
|
|
upload.update(task_error=''.join(tb))
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
2011-07-29 23:31:54 +04:00
|
|
|
@task
|
2010-12-30 01:30:08 +03:00
|
|
|
@write
|
|
|
|
def file_validator(file_id, **kw):
|
2011-01-04 23:07:24 +03:00
|
|
|
if not settings.VALIDATE_ADDONS:
|
|
|
|
return None
|
2010-12-30 01:30:08 +03:00
|
|
|
log.info('VALIDATING file: %s' % file_id)
|
|
|
|
file = File.objects.get(pk=file_id)
|
|
|
|
# Unlike upload validation, let the validator
|
|
|
|
# raise an exception if there is one.
|
2011-05-04 02:30:54 +04:00
|
|
|
result = run_validator(file.file_path)
|
2010-12-30 01:30:08 +03:00
|
|
|
return FileValidation.from_json(file, result)
|
|
|
|
|
|
|
|
|
2011-05-18 21:38:28 +04:00
|
|
|
def run_validator(file_path, for_appversions=None, test_all_tiers=False,
|
|
|
|
overrides=None):
|
2011-05-04 02:30:54 +04:00
|
|
|
"""A pre-configured wrapper around the addon validator.
|
|
|
|
|
|
|
|
*file_path*
|
|
|
|
Path to addon / extension file to validate.
|
|
|
|
|
|
|
|
*for_appversions=None*
|
|
|
|
An optional dict of application versions to validate this addon
|
|
|
|
for. The key is an application GUID and its value is a list of
|
|
|
|
versions.
|
|
|
|
|
2011-05-18 04:44:42 +04:00
|
|
|
*test_all_tiers=False*
|
|
|
|
When False (default) the validator will not continue if it
|
|
|
|
encounters fatal errors. When True, all tests in all tiers are run.
|
|
|
|
See bug 615426 for discussion on this default.
|
|
|
|
|
2011-05-18 21:38:28 +04:00
|
|
|
*overrides=None*
|
|
|
|
Normally the validator gets info from install.rdf but there are a
|
|
|
|
few things we need to override. See validator for supported overrides.
|
|
|
|
Example: {'targetapp_maxVersion': {'<app guid>': '<version>'}}
|
|
|
|
|
2011-05-04 02:30:54 +04:00
|
|
|
To validate the addon for compatibility with Firefox 5 and 6,
|
|
|
|
you'd pass in::
|
|
|
|
|
|
|
|
for_appversions={amo.FIREFOX.guid: ['5.0.*', '6.0.*']}
|
|
|
|
|
|
|
|
Not all application versions will have a set of registered
|
|
|
|
compatibility tests.
|
|
|
|
"""
|
2010-11-23 21:57:39 +03:00
|
|
|
|
2010-12-30 18:07:15 +03:00
|
|
|
from validator.validate import validate
|
2010-12-21 02:10:42 +03:00
|
|
|
|
|
|
|
# TODO(Kumar) remove this when validator is fixed, see bug 620503
|
|
|
|
from validator.testcases import scripting
|
|
|
|
scripting.SPIDERMONKEY_INSTALLATION = settings.SPIDERMONKEY
|
|
|
|
import validator.constants
|
|
|
|
validator.constants.SPIDERMONKEY_INSTALLATION = settings.SPIDERMONKEY
|
2010-12-10 21:46:13 +03:00
|
|
|
|
2010-12-28 00:36:13 +03:00
|
|
|
apps = dump_apps.Command.JSON_PATH
|
|
|
|
if not os.path.exists(apps):
|
|
|
|
call_command('dump_apps')
|
2010-12-10 00:27:44 +03:00
|
|
|
|
2011-12-13 00:42:04 +04:00
|
|
|
with statsd.timer('devhub.validator'):
|
|
|
|
return validate(file_path,
|
|
|
|
for_appversions=for_appversions,
|
|
|
|
format='json',
|
|
|
|
# When False, this flag says to stop testing after one
|
|
|
|
# tier fails.
|
|
|
|
determined=test_all_tiers,
|
|
|
|
approved_applications=apps,
|
|
|
|
spidermonkey=settings.SPIDERMONKEY,
|
|
|
|
overrides=overrides,
|
|
|
|
timeout=settings.VALIDATOR_TIMEOUT)
|
2010-11-12 03:39:18 +03:00
|
|
|
|
|
|
|
|
2011-05-27 21:51:38 +04:00
|
|
|
@task(rate_limit='4/m')
|
2011-05-12 00:32:13 +04:00
|
|
|
@write
|
|
|
|
def flag_binary(ids, **kw):
|
|
|
|
log.info('[%s@%s] Flagging binary addons starting with id: %s...'
|
|
|
|
% (len(ids), flag_binary.rate_limit, ids[0]))
|
|
|
|
addons = Addon.objects.filter(pk__in=ids).no_transforms()
|
|
|
|
|
2012-01-19 00:21:53 +04:00
|
|
|
latest = kw.pop('latest', True)
|
|
|
|
|
2011-05-12 00:32:13 +04:00
|
|
|
for addon in addons:
|
|
|
|
try:
|
|
|
|
log.info('Validating addon with id: %s' % addon.pk)
|
2012-01-19 00:21:53 +04:00
|
|
|
files = (File.objects.filter(version__addon=addon)
|
2012-01-19 21:59:29 +04:00
|
|
|
.exclude(status=amo.STATUS_DISABLED)
|
2012-01-19 00:21:53 +04:00
|
|
|
.order_by('-created'))
|
|
|
|
if latest:
|
|
|
|
files = [files[0]]
|
|
|
|
for file in files:
|
|
|
|
result = json.loads(run_validator(file.file_path))
|
|
|
|
metadata = result['metadata']
|
|
|
|
binary = (metadata.get('contains_binary_extension', False) or
|
|
|
|
metadata.get('contains_binary_content', False))
|
|
|
|
binary_components = metadata.get('binary_components', False)
|
2012-01-19 22:58:55 +04:00
|
|
|
log.info('Updating binary flags for addon with id=%s: '
|
|
|
|
'binary -> %s, binary_components -> %s' % (
|
|
|
|
addon.pk, binary, binary_components))
|
2012-01-19 00:21:53 +04:00
|
|
|
file.update(binary=binary, binary_components=binary_components)
|
2011-05-12 00:32:13 +04:00
|
|
|
except Exception, err:
|
|
|
|
log.error('Failed to run validation on addon id: %s, %s'
|
|
|
|
% (addon.pk, err))
|
|
|
|
|
|
|
|
|
2011-01-14 03:09:22 +03:00
|
|
|
@task
|
2011-04-28 04:44:38 +04:00
|
|
|
@set_modified_on
|
2010-11-12 03:39:18 +03:00
|
|
|
def resize_icon(src, dst, size, **kw):
|
|
|
|
"""Resizes addon icons."""
|
|
|
|
log.info('[1@None] Resizing icon: %s' % dst)
|
|
|
|
try:
|
2010-12-10 03:36:00 +03:00
|
|
|
if isinstance(size, list):
|
|
|
|
for s in size:
|
|
|
|
resize_image(src, '%s-%s.png' % (dst, s), (s, s),
|
|
|
|
remove_src=False)
|
|
|
|
os.remove(src)
|
|
|
|
else:
|
|
|
|
resize_image(src, dst, (size, size), remove_src=True)
|
2011-04-28 04:44:38 +04:00
|
|
|
return True
|
2010-11-12 03:39:18 +03:00
|
|
|
except Exception, e:
|
|
|
|
log.error("Error saving addon icon: %s" % e)
|
2010-12-31 04:02:31 +03:00
|
|
|
|
|
|
|
|
2011-01-14 03:09:22 +03:00
|
|
|
@task
|
2011-04-28 04:44:38 +04:00
|
|
|
@set_modified_on
|
2011-08-31 02:01:18 +04:00
|
|
|
def resize_preview(src, instance, **kw):
|
2011-06-23 22:03:59 +04:00
|
|
|
"""Resizes preview images and stores the sizes on the preview."""
|
2011-06-22 23:09:18 +04:00
|
|
|
thumb_dst, full_dst = instance.thumbnail_path, instance.image_path
|
|
|
|
sizes = {}
|
2011-06-23 22:03:59 +04:00
|
|
|
log.info('[1@None] Resizing preview and storing size: %s' % thumb_dst)
|
2010-12-31 04:02:31 +03:00
|
|
|
try:
|
2011-06-22 23:09:18 +04:00
|
|
|
sizes['thumbnail'] = resize_image(src, thumb_dst,
|
|
|
|
amo.ADDON_PREVIEW_SIZES[0],
|
|
|
|
remove_src=False)
|
2011-06-23 05:03:02 +04:00
|
|
|
sizes['image'] = resize_image(src, full_dst,
|
2011-06-22 23:09:18 +04:00
|
|
|
amo.ADDON_PREVIEW_SIZES[1],
|
|
|
|
remove_src=False)
|
|
|
|
instance.sizes = sizes
|
|
|
|
instance.save()
|
2011-04-28 04:44:38 +04:00
|
|
|
return True
|
2010-12-31 04:02:31 +03:00
|
|
|
except Exception, e:
|
|
|
|
log.error("Error saving preview: %s" % e)
|
2011-06-22 23:09:18 +04:00
|
|
|
|
2011-05-26 03:44:34 +04:00
|
|
|
|
2011-06-22 23:09:18 +04:00
|
|
|
@task
|
|
|
|
@write
|
|
|
|
def get_preview_sizes(ids, **kw):
|
|
|
|
log.info('[%s@%s] Getting preview sizes for addons starting at id: %s...'
|
|
|
|
% (len(ids), get_preview_sizes.rate_limit, ids[0]))
|
|
|
|
addons = Addon.objects.filter(pk__in=ids).no_transforms()
|
|
|
|
|
|
|
|
for addon in addons:
|
|
|
|
previews = addon.previews.all()
|
|
|
|
log.info('Found %s previews for: %s' % (previews.count(), addon.pk))
|
|
|
|
for preview in previews:
|
|
|
|
try:
|
|
|
|
log.info('Getting size for preview: %s' % preview.pk)
|
|
|
|
sizes = {
|
|
|
|
'thumbnail': Image.open(preview.thumbnail_path).size,
|
|
|
|
'image': Image.open(preview.image_path).size,
|
|
|
|
}
|
|
|
|
preview.update(sizes=sizes)
|
|
|
|
except Exception, err:
|
|
|
|
log.error('Failed to find size of preview: %s, error: %s'
|
|
|
|
% (addon.pk, err))
|
2011-06-27 22:49:34 +04:00
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
@write
|
|
|
|
def convert_purified(ids, **kw):
|
|
|
|
log.info('[%s@%s] Converting fields to purified starting at id: %s...'
|
|
|
|
% (len(ids), convert_purified.rate_limit, ids[0]))
|
|
|
|
fields = ['the_reason', 'the_future']
|
|
|
|
for addon in Addon.objects.filter(pk__in=ids):
|
|
|
|
flag = False
|
|
|
|
for field in fields:
|
|
|
|
value = getattr(addon, field)
|
|
|
|
if value:
|
|
|
|
value.clean()
|
|
|
|
if (value.localized_string_clean != value.localized_string):
|
|
|
|
flag = True
|
|
|
|
if flag:
|
|
|
|
log.info('Saving addon: %s to purify fields' % addon.pk)
|
|
|
|
addon.save()
|
2011-05-26 03:44:34 +04:00
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
def packager(data, feature_set, **kw):
|
|
|
|
"""Build an add-on based on input data."""
|
|
|
|
log.info('[1@None] Packaging add-on')
|
|
|
|
|
|
|
|
from devhub.views import packager_path
|
2011-10-02 11:41:23 +04:00
|
|
|
dest = packager_path(data['slug'])
|
2011-05-26 03:44:34 +04:00
|
|
|
|
2011-10-04 22:49:43 +04:00
|
|
|
with guard(u'devhub.packager.%s' % dest) as locked:
|
2011-10-02 11:41:23 +04:00
|
|
|
if locked:
|
2011-10-04 22:49:43 +04:00
|
|
|
log.error(u'Packaging in progress: %s' % dest)
|
2011-10-02 11:41:23 +04:00
|
|
|
return
|
2011-05-26 03:44:34 +04:00
|
|
|
|
2011-10-02 11:41:23 +04:00
|
|
|
with statsd.timer('devhub.packager'):
|
2011-10-04 00:04:43 +04:00
|
|
|
from packager.main import packager
|
2011-10-02 11:41:23 +04:00
|
|
|
log.info('Starting packaging: %s' % dest)
|
|
|
|
features = set([k for k, v in feature_set.items() if v])
|
|
|
|
try:
|
|
|
|
packager(data, dest, features)
|
|
|
|
except Exception, err:
|
2011-10-04 22:49:43 +04:00
|
|
|
log.error(u'Failed to package add-on: %s' % err)
|
|
|
|
raise
|
2011-10-02 11:41:23 +04:00
|
|
|
if os.path.exists(dest):
|
2011-10-04 22:49:43 +04:00
|
|
|
log.info(u'Package saved: %s' % dest)
|
2011-08-17 01:50:43 +04:00
|
|
|
|
|
|
|
|
|
|
|
def failed_validation(*messages):
|
|
|
|
"""Return a validation object that looks like the add-on validator."""
|
2011-08-29 22:55:39 +04:00
|
|
|
m = []
|
|
|
|
for msg in messages:
|
|
|
|
m.append({'type': 'error', 'message': msg, 'tier': 1})
|
|
|
|
|
|
|
|
return json.dumps({'errors': 1, 'success': False, 'messages': m})
|
2011-08-17 01:50:43 +04:00
|
|
|
|
|
|
|
|
2011-08-16 01:40:56 +04:00
|
|
|
def _fetch_content(url):
|
2011-08-17 01:50:43 +04:00
|
|
|
try:
|
2011-08-16 01:40:56 +04:00
|
|
|
return urllib2.urlopen(url, timeout=5)
|
2011-08-23 02:09:40 +04:00
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
raise Exception(_('%s responded with %s (%s).') % (url, e.code, e.msg))
|
2011-08-17 01:50:43 +04:00
|
|
|
except urllib2.URLError, e:
|
|
|
|
# Unpack the URLError to try and find a useful message.
|
|
|
|
if isinstance(e.reason, socket.timeout):
|
|
|
|
raise Exception(_('Connection to "%s" timed out.') % url)
|
|
|
|
elif isinstance(e.reason, socket.gaierror):
|
|
|
|
raise Exception(_('Could not contact host at "%s".') % url)
|
|
|
|
else:
|
|
|
|
raise Exception(str(e.reason))
|
|
|
|
|
2011-08-16 01:40:56 +04:00
|
|
|
|
|
|
|
def check_content_type(response, content_type,
|
|
|
|
no_ct_message, wrong_ct_message):
|
2011-08-17 01:50:43 +04:00
|
|
|
if not response.headers.get('Content-Type', '').startswith(content_type):
|
|
|
|
if 'Content-Type' in response.headers:
|
2011-08-16 01:40:56 +04:00
|
|
|
raise Exception(wrong_ct_message %
|
|
|
|
(content_type, response.headers['Content-Type']))
|
2011-08-17 01:50:43 +04:00
|
|
|
else:
|
2011-08-16 01:40:56 +04:00
|
|
|
raise Exception(no_ct_message % content_type)
|
|
|
|
|
2011-08-17 01:50:43 +04:00
|
|
|
|
2011-08-16 01:40:56 +04:00
|
|
|
def get_content_and_check_size(response, max_size, error_message):
|
2011-08-17 01:50:43 +04:00
|
|
|
# Read one extra byte. Reject if it's too big so we don't have issues
|
|
|
|
# downloading huge files.
|
2011-08-16 01:40:56 +04:00
|
|
|
content = response.read(max_size + 1)
|
|
|
|
if len(content) > max_size:
|
|
|
|
raise Exception(error_message % max_size)
|
2011-08-17 01:50:43 +04:00
|
|
|
return content
|
|
|
|
|
|
|
|
|
2011-08-16 01:40:56 +04:00
|
|
|
def save_icon(webapp, content):
|
|
|
|
tmp_path = path.path(settings.TMP_PATH) / 'icon'
|
|
|
|
if not os.path.exists(tmp_path):
|
|
|
|
os.makedirs(tmp_path)
|
|
|
|
tmp_dst = tmp_path / uuid.uuid4().hex
|
|
|
|
with open(tmp_dst, 'wb') as fd:
|
|
|
|
fd.write(content)
|
|
|
|
|
|
|
|
dirname = webapp.get_icon_dir()
|
|
|
|
destination = os.path.join(dirname, '%s' % webapp.id)
|
|
|
|
remove_icons(destination)
|
|
|
|
resize_icon.delay(tmp_dst, destination, amo.ADDON_ICON_SIZES,
|
|
|
|
set_modified_on=[webapp])
|
|
|
|
|
|
|
|
# Need to set the icon type so .get_icon_url() works
|
|
|
|
# normally submit step 4 does it through AddonFormMedia,
|
|
|
|
# but we want to beat them to the punch.
|
|
|
|
# resize_icon outputs pngs, so we know it's 'image/png'
|
|
|
|
webapp.icon_type = 'image/png'
|
|
|
|
webapp.save()
|
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
def fetch_icon(webapp, **kw):
|
|
|
|
"""Downloads a webapp icon from the location specified in the manifest.
|
|
|
|
Returns False if icon was not able to be retrieved
|
|
|
|
"""
|
|
|
|
log.info(u'[1@None] Fetching icon for webapp %s.' % webapp.name)
|
|
|
|
|
|
|
|
manifest = webapp.get_manifest_json()
|
|
|
|
if not 'icons' in manifest:
|
|
|
|
return
|
|
|
|
biggest = max([int(size) for size in manifest['icons']])
|
|
|
|
icon_url = manifest['icons'][str(biggest)]
|
|
|
|
if icon_url.startswith('data:image'):
|
|
|
|
image_string = icon_url.split('base64,')[1]
|
|
|
|
content = base64.decodestring(image_string)
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
response = _fetch_content(webapp.origin + icon_url)
|
|
|
|
except Exception, e:
|
|
|
|
log.error('Failed to fetch icon for webapp %s: %s'
|
|
|
|
% (webapp.pk, e.message))
|
|
|
|
return
|
|
|
|
|
|
|
|
size_error_message = _('Your icon must be less than %s bytes.')
|
|
|
|
content = get_content_and_check_size(response,
|
|
|
|
settings.MAX_ICON_UPLOAD_SIZE,
|
|
|
|
size_error_message)
|
|
|
|
|
|
|
|
save_icon(webapp, content)
|
|
|
|
|
|
|
|
|
2011-08-17 01:50:43 +04:00
|
|
|
@task
|
|
|
|
def fetch_manifest(url, upload_pk=None, **kw):
|
|
|
|
log.info(u'[1@None] Fetching manifest: %s.' % url)
|
|
|
|
upload = FileUpload.objects.get(pk=upload_pk)
|
2011-08-29 22:55:39 +04:00
|
|
|
|
2011-08-17 01:50:43 +04:00
|
|
|
try:
|
2011-08-16 01:40:56 +04:00
|
|
|
response = _fetch_content(url)
|
|
|
|
|
|
|
|
no_ct_message = _('Your manifest must be served with the HTTP '
|
|
|
|
'header "Content-Type: %s".')
|
|
|
|
wrong_ct_message = _('Your manifest must be served with the HTTP '
|
|
|
|
'header "Content-Type: %s". We saw "%s".')
|
|
|
|
check_content_type(response, 'application/x-web-app-manifest+json',
|
|
|
|
no_ct_message, wrong_ct_message)
|
|
|
|
|
|
|
|
size_error_message = _('Your manifest must be less than %s bytes.')
|
|
|
|
content = get_content_and_check_size(response,
|
|
|
|
settings.MAX_WEBAPP_UPLOAD_SIZE,
|
|
|
|
size_error_message)
|
2011-08-17 01:50:43 +04:00
|
|
|
except Exception, e:
|
|
|
|
# Drop a message in the validation slot and bail.
|
2011-08-17 22:51:20 +04:00
|
|
|
upload.update(validation=failed_validation(e.message))
|
2011-08-29 22:55:39 +04:00
|
|
|
return
|
2011-08-17 01:50:43 +04:00
|
|
|
|
|
|
|
upload.add_file([content], url, len(content))
|
|
|
|
# Send the upload to the validator.
|
|
|
|
validator(upload.pk)
|
2011-09-02 02:52:06 +04:00
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
def start_perf_test_for_file(file_id, os_name, app_name, **kw):
|
|
|
|
log.info('[@%s] Starting perf tests for file %s on %s / %s'
|
|
|
|
% (start_perf_test_for_file.rate_limit, file_id,
|
|
|
|
os_name, app_name))
|
|
|
|
file_ = File.objects.get(pk=file_id)
|
|
|
|
# TODO(Kumar) store token to retrieve results later?
|
|
|
|
perf.start_perf_test(file_, os_name, app_name)
|
2011-10-12 03:09:39 +04:00
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
def subscribe_to_responsys(campaign, address, format='html', source_url='',
|
|
|
|
lang='', country='', **kw):
|
|
|
|
"""
|
|
|
|
Subscribe a user to a list in responsys. There should be two
|
|
|
|
fields within the Responsys system named by the "campaign"
|
|
|
|
parameter: <campaign>_FLG and <campaign>_DATE.
|
|
|
|
"""
|
|
|
|
|
|
|
|
data = {
|
|
|
|
'LANG_LOCALE': lang,
|
|
|
|
'COUNTRY_': country,
|
|
|
|
'SOURCE_URL': source_url,
|
|
|
|
'EMAIL_ADDRESS_': address,
|
|
|
|
'EMAIL_FORMAT_': 'H' if format == 'html' else 'T',
|
|
|
|
}
|
|
|
|
|
|
|
|
data['%s_FLG' % campaign] = 'Y'
|
|
|
|
data['%s_DATE' % campaign] = date.today().strftime('%Y-%m-%d')
|
|
|
|
data['_ri_'] = settings.RESPONSYS_ID
|
|
|
|
|
|
|
|
try:
|
|
|
|
res = urllib2.urlopen('http://awesomeness.mozilla.org/pub/rf',
|
|
|
|
data=urlencode(data))
|
|
|
|
return res.code == 200
|
|
|
|
except urllib2.URLError:
|
|
|
|
return False
|