Add a task before validation that repacks add-on uploads (#10961)
Add a task before validation that repacks add-on uploads This ensures that we can't hide content from validation by abusing the differences between the various implementations of zip files reading in our stack, since we re-create a zip file from what addons-server saw initially. * Refactor repack_fileupload to avoid issues with Celery < 4.2 on_error isn't fired for tasks chains so we need to be creative... this folds the repack_fileupload() and validate_file_path() into a single task that is then decorated by @validation_task for error handling. * Fix tests now that we're repackaging * Add tests and fix hash generation
This commit is contained in:
Родитель
b9e30e21a6
Коммит
0fa369a6ee
|
@ -38,6 +38,7 @@ from olympia.lib.akismet.models import AkismetReport
|
|||
from olympia.files.models import File, FileUpload, FileValidation
|
||||
from olympia.files.utils import (
|
||||
NoManifestFound, parse_addon, SafeZip, UnsupportedFileType)
|
||||
from olympia.files.tasks import repack_fileupload
|
||||
from olympia.versions.models import Version
|
||||
from olympia.devhub import file_validation_annotations as annotations
|
||||
|
||||
|
@ -140,7 +141,7 @@ def validation_task(fn):
|
|||
"""Wrap a validation task so that it runs with the correct flags, then
|
||||
parse and annotate the results before returning."""
|
||||
|
||||
@task(bind=True, ignore_result=False, # Required for groups/chains.
|
||||
@task(bind=True, ignore_result=False, # We want to pass the results down.
|
||||
soft_time_limit=settings.VALIDATOR_TIMEOUT)
|
||||
@wraps(fn)
|
||||
def wrapper(task, id_or_path, *args, **kwargs):
|
||||
|
@ -161,20 +162,41 @@ def validation_task(fn):
|
|||
return results
|
||||
except Exception as exc:
|
||||
log.exception('Unhandled error during validation: %r' % exc)
|
||||
return deepcopy(amo.VALIDATOR_SKELETON_EXCEPTION_WEBEXT)
|
||||
results = deepcopy(amo.VALIDATOR_SKELETON_EXCEPTION_WEBEXT)
|
||||
return results
|
||||
finally:
|
||||
# But we do want to return a result after that exception has
|
||||
# But we do want to return the results after that exception has
|
||||
# been handled.
|
||||
task.ignore_result = False
|
||||
return wrapper
|
||||
|
||||
|
||||
@validation_task
|
||||
@use_primary_db
|
||||
def validate_upload(upload_pk, channel):
|
||||
"""
|
||||
Repack and then validate a FileUpload.
|
||||
|
||||
This only exists to get repack_fileupload() and validate_file_path()
|
||||
into a single task that is wrapped by @validation_task. We do this because
|
||||
with Celery < 4.2, on_error is never executed for task chains, so we use
|
||||
a task decorated by @validation_task which takes care of it for us. Once
|
||||
we upgrade to Celery 4.2, we stop calling repack_fileupload() here and just
|
||||
have Validator.task return a chain with those 2 tasks when it's dealing
|
||||
with a FileUpload.
|
||||
https://github.com/mozilla/addons-server/issues/9068#issuecomment-473255011
|
||||
"""
|
||||
upload = FileUpload.objects.get(pk=upload_pk)
|
||||
repack_fileupload(upload.pk)
|
||||
return validate_file_path(upload.path, channel)
|
||||
|
||||
|
||||
def validate_file_path(path, channel):
|
||||
"""Run the validator against a file at the given path, and return the
|
||||
results.
|
||||
results, which should be a json string.
|
||||
|
||||
Should only be called directly by Validator or `validate_file` task.
|
||||
Should only be called directly by `validate_upload` or `validate_file`
|
||||
tasks.
|
||||
|
||||
Search plugins don't call the linter but get linted by
|
||||
`annotate_search_plugin_validation`.
|
||||
|
@ -223,14 +245,8 @@ def validate_file(file_id):
|
|||
try:
|
||||
return file_.validation.validation
|
||||
except FileValidation.DoesNotExist:
|
||||
# Calling `validate_file_path` looks particularly nasty because of
|
||||
# @validation_task wrapping a lot of it's functionalities and
|
||||
# ins particular it's signature. @validation_task also returns the
|
||||
# actual python-object, so we have to dump it as JSON for backwards
|
||||
# compatibility (something we may want to change at a later point)
|
||||
return json.dumps(validate_file_path(
|
||||
id_or_path=file_.current_file_path,
|
||||
channel=file_.version.channel))
|
||||
return validate_file_path(
|
||||
file_.current_file_path, file_.version.channel)
|
||||
|
||||
|
||||
@task
|
||||
|
|
|
@ -28,7 +28,7 @@ from olympia.api.models import SYMMETRIC_JWT_TYPE, APIKey
|
|||
from olympia.applications.models import AppVersion
|
||||
from olympia.constants.base import VALIDATOR_SKELETON_RESULTS
|
||||
from olympia.devhub import tasks, file_validation_annotations as annotations
|
||||
from olympia.files.models import File
|
||||
from olympia.files.models import File, FileValidation
|
||||
from olympia.files.utils import NoManifestFound
|
||||
from olympia.files.tests.test_models import UploadTest
|
||||
from olympia.versions.models import Version
|
||||
|
@ -493,37 +493,81 @@ class TestRunAddonsLinter(UploadTest, ValidatorTestCase):
|
|||
assert not result['warnings']
|
||||
assert not result['errors']
|
||||
|
||||
@mock.patch('olympia.devhub.tasks.repack_fileupload')
|
||||
@mock.patch('olympia.devhub.tasks.parse_addon')
|
||||
@mock.patch('olympia.devhub.tasks.run_addons_linter')
|
||||
def test_upload_is_repacked_before_linter_runs(
|
||||
self, run_addons_linter_mock, parse_addon_mock,
|
||||
repack_fileupload_mock):
|
||||
"""When validating new uploads, we are repacking before handing the xpi
|
||||
to the linter."""
|
||||
run_addons_linter_mock.return_value = json.dumps(
|
||||
{'errors': 0, 'warnings': 0, 'notices': 0}
|
||||
)
|
||||
parse_addon_mock.return_value = {
|
||||
'is_webextension': True
|
||||
}
|
||||
upload = self.get_upload(
|
||||
abspath=self.valid_path, with_validation=False)
|
||||
assert not upload.valid
|
||||
tasks.validate(upload, listed=True)
|
||||
assert parse_addon_mock.called
|
||||
assert repack_fileupload_mock.called
|
||||
upload.reload()
|
||||
assert upload.valid, upload.validation
|
||||
|
||||
@mock.patch('olympia.devhub.tasks.repack_fileupload')
|
||||
@mock.patch('olympia.devhub.tasks.parse_addon')
|
||||
@mock.patch('olympia.devhub.tasks.run_addons_linter')
|
||||
def test_file_is_not_repacked_before_linter_runs(
|
||||
self, run_addons_linter_mock, parse_addon_mock,
|
||||
repack_fileupload_mock):
|
||||
"""When validating existing File instances, we are *not* repacking."""
|
||||
run_addons_linter_mock.return_value = json.dumps(
|
||||
{'errors': 0, 'warnings': 0, 'notices': 0}
|
||||
)
|
||||
parse_addon_mock.return_value = {
|
||||
'is_webextension': True
|
||||
}
|
||||
addon = addon_factory()
|
||||
file_ = addon.current_version.all_files[0]
|
||||
assert not FileValidation.objects.filter(file=file_).exists()
|
||||
tasks.validate(file_).get()
|
||||
assert parse_addon_mock.called
|
||||
assert not repack_fileupload_mock.called
|
||||
assert FileValidation.objects.filter(file=file_).exists()
|
||||
|
||||
|
||||
class TestValidateFilePath(ValidatorTestCase):
|
||||
|
||||
def test_success(self):
|
||||
result = tasks.validate_file_path(
|
||||
result = json.loads(tasks.validate_file_path(
|
||||
get_addon_file('valid_webextension.xpi'),
|
||||
channel=amo.RELEASE_CHANNEL_LISTED)
|
||||
channel=amo.RELEASE_CHANNEL_LISTED))
|
||||
assert result['success']
|
||||
assert not result['errors']
|
||||
assert not result['warnings']
|
||||
|
||||
def test_fail_warning(self):
|
||||
result = tasks.validate_file_path(
|
||||
result = json.loads(tasks.validate_file_path(
|
||||
get_addon_file('valid_webextension_warning.xpi'),
|
||||
channel=amo.RELEASE_CHANNEL_LISTED)
|
||||
channel=amo.RELEASE_CHANNEL_LISTED))
|
||||
assert result['success']
|
||||
assert not result['errors']
|
||||
assert result['warnings']
|
||||
|
||||
def test_fail_error(self):
|
||||
result = tasks.validate_file_path(
|
||||
result = json.loads(tasks.validate_file_path(
|
||||
get_addon_file('invalid_webextension_invalid_id.xpi'),
|
||||
channel=amo.RELEASE_CHANNEL_LISTED)
|
||||
channel=amo.RELEASE_CHANNEL_LISTED))
|
||||
assert not result['success']
|
||||
assert result['errors']
|
||||
assert not result['warnings']
|
||||
|
||||
def test_returns_skeleton_for_search_plugin(self):
|
||||
result = tasks.validate_file_path(
|
||||
result = json.loads(tasks.validate_file_path(
|
||||
get_addon_file('searchgeek-20090701.xml'),
|
||||
channel=amo.RELEASE_CHANNEL_LISTED)
|
||||
channel=amo.RELEASE_CHANNEL_LISTED))
|
||||
|
||||
expected = amo.VALIDATOR_SKELETON_RESULTS
|
||||
assert result == expected
|
||||
|
|
|
@ -46,7 +46,7 @@ class TestAddonsLinterListed(UploadTest, TestCase):
|
|||
self.validate_file = self.patch(
|
||||
'olympia.devhub.tasks.validate_file').si
|
||||
self.validate_upload = self.patch(
|
||||
'olympia.devhub.tasks.validate_file_path').si
|
||||
'olympia.devhub.tasks.validate_upload').si
|
||||
|
||||
def patch(self, thing):
|
||||
"""Patch the given "thing", and revert the patch on test teardown."""
|
||||
|
@ -69,7 +69,7 @@ class TestAddonsLinterListed(UploadTest, TestCase):
|
|||
# Make sure we run the correct validation task for the upload and we
|
||||
# set up an error handler.
|
||||
self.validate_upload.assert_called_once_with(
|
||||
file_upload.path, channel=channel)
|
||||
file_upload.pk, channel=channel)
|
||||
assert self.validate_upload.return_value.on_error.called
|
||||
|
||||
# Make sure we run the correct save validation task.
|
||||
|
|
|
@ -445,8 +445,8 @@ class TestValidateFile(BaseUploadTest):
|
|||
assert addon.binary
|
||||
|
||||
@mock.patch('olympia.devhub.tasks.validate_file_path')
|
||||
def test_ending_tier_is_preserved(self, v):
|
||||
v.return_value = json.dumps({
|
||||
def test_ending_tier_is_preserved(self, validate_file_path_mock):
|
||||
validate_file_path_mock.return_value = json.dumps({
|
||||
"errors": 0,
|
||||
"success": True,
|
||||
"warnings": 0,
|
||||
|
@ -470,8 +470,9 @@ class TestValidateFile(BaseUploadTest):
|
|||
assert data['validation']['ending_tier'] == 5
|
||||
|
||||
@mock.patch('olympia.devhub.tasks.validate_file_path')
|
||||
def test_validator_sets_binary_flag_for_content(self, v):
|
||||
v.return_value = json.dumps({
|
||||
def test_validator_sets_binary_flag_for_content(
|
||||
self, validate_file_path_mock):
|
||||
validate_file_path_mock.return_value = json.dumps({
|
||||
"errors": 0,
|
||||
"success": True,
|
||||
"warnings": 0,
|
||||
|
@ -496,8 +497,8 @@ class TestValidateFile(BaseUploadTest):
|
|||
assert addon.binary
|
||||
|
||||
@mock.patch('olympia.devhub.tasks.validate_file_path')
|
||||
def test_linkify_validation_messages(self, v):
|
||||
v.return_value = json.dumps({
|
||||
def test_linkify_validation_messages(self, validate_file_path_mock):
|
||||
validate_file_path_mock.return_value = json.dumps({
|
||||
"errors": 0,
|
||||
"success": True,
|
||||
"warnings": 1,
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import copy
|
||||
import uuid
|
||||
|
||||
from django.conf import settings
|
||||
|
@ -219,7 +220,7 @@ class Validator(object):
|
|||
Class which handles creating or fetching validation results for File
|
||||
and FileUpload instances.
|
||||
|
||||
It forwards the actual validation to `devhub.tasks:validate_file_path`
|
||||
It forwards the actual validation to `devhub.tasks:validate_upload`
|
||||
and `devhub.tasks:validate_file` but implements shortcuts for
|
||||
legacy add-ons and search plugins to avoid running the linter.
|
||||
"""
|
||||
|
@ -270,12 +271,13 @@ class Validator(object):
|
|||
|
||||
# Fallback error handler to save a set of exception results, in case
|
||||
# anything unexpected happens during processing.
|
||||
on_error = self.error_handler(save, file_, channel, is_mozilla_signed)
|
||||
self.on_error = self.error_handler(
|
||||
save, file_, channel, is_mozilla_signed)
|
||||
|
||||
# When the validation jobs complete, pass the results to the
|
||||
# appropriate save task for the object type.
|
||||
self.task = (
|
||||
validate_task.on_error(on_error) |
|
||||
validate_task.on_error(self.on_error) |
|
||||
save.s(file_.pk, channel, is_mozilla_signed)
|
||||
)
|
||||
|
||||
|
@ -292,9 +294,8 @@ class Validator(object):
|
|||
@staticmethod
|
||||
def error_handler(save_task, file_, channel, is_mozilla_signed):
|
||||
"""Return the task error handler."""
|
||||
return save_task.si(
|
||||
amo.VALIDATOR_SKELETON_EXCEPTION_WEBEXT, file_.pk, channel,
|
||||
is_mozilla_signed)
|
||||
results = copy.deepcopy(amo.VALIDATOR_SKELETON_EXCEPTION_WEBEXT)
|
||||
return save_task.si(results, file_.pk, channel, is_mozilla_signed)
|
||||
|
||||
@staticmethod
|
||||
def validate_file(file):
|
||||
|
@ -306,7 +307,7 @@ class Validator(object):
|
|||
"""Return a subtask to validate a FileUpload instance."""
|
||||
assert not upload.validation
|
||||
|
||||
return tasks.validate_file_path.si(upload.path, channel=channel)
|
||||
return tasks.validate_upload.si(upload.pk, channel=channel)
|
||||
|
||||
|
||||
def add_dynamic_theme_tag(version):
|
||||
|
|
|
@ -1,11 +1,16 @@
|
|||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
import olympia.core.logger
|
||||
|
||||
from olympia.amo.celery import task
|
||||
from olympia.amo.decorators import use_primary_db
|
||||
from olympia.files.models import File, WebextPermission
|
||||
from olympia.files.utils import parse_xpi
|
||||
from olympia.amo.storage_utils import move_stored_file
|
||||
from olympia.files.models import File, FileUpload, WebextPermission
|
||||
from olympia.files.utils import extract_zip, get_sha256, parse_xpi
|
||||
from olympia.users.models import UserProfile
|
||||
|
||||
|
||||
|
@ -38,3 +43,35 @@ def extract_webext_permissions(ids, **kw):
|
|||
defaults={'permissions': permissions}, file=file_)
|
||||
except Exception as err:
|
||||
log.error('Failed to extract: %s, error: %s' % (file_.pk, err))
|
||||
|
||||
|
||||
@task
|
||||
@use_primary_db
|
||||
def repack_fileupload(upload_pk):
|
||||
log.info('Starting task to repackage FileUpload %s', upload_pk)
|
||||
upload = FileUpload.objects.get(pk=upload_pk)
|
||||
# When a FileUpload is created and a file added to it, if it's a xpi/zip,
|
||||
# it should be move to upload.path, and it should have a .xpi extension,
|
||||
# so we only need to care about that extension here.
|
||||
# We don't trust upload.name: it's the original filename as used by the
|
||||
# developer, so it could be something else.
|
||||
if upload.path.endswith('.xpi'):
|
||||
try:
|
||||
tempdir = extract_zip(upload.path)
|
||||
except Exception:
|
||||
# Something bad happened, maybe we couldn't parse the zip file.
|
||||
# This task should have a on_error attached when called by
|
||||
# Validator(), so we can just raise and the developer will get a
|
||||
# generic error message.
|
||||
log.exception('Could not extract upload %s for repack.', upload_pk)
|
||||
raise
|
||||
log.info('Zip from upload %s extracted, repackaging', upload_pk)
|
||||
file_ = tempfile.NamedTemporaryFile(suffix='.zip', delete=False)
|
||||
shutil.make_archive(os.path.splitext(file_.name)[0], 'zip', tempdir)
|
||||
with open(file_.name, 'rb') as f:
|
||||
upload.hash = 'sha256:%s' % get_sha256(f)
|
||||
log.info('Zip from upload %s repackaged, moving file back', upload_pk)
|
||||
move_stored_file(file_.name, upload.path)
|
||||
upload.save()
|
||||
else:
|
||||
log.info('Not repackaging upload %s, it is not a xpi file.', upload_pk)
|
||||
|
|
|
@ -0,0 +1,83 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import mock
|
||||
import zipfile
|
||||
|
||||
from olympia.amo.tests import TestCase
|
||||
from olympia.files.tasks import repack_fileupload
|
||||
from olympia.files.tests.test_models import UploadTest
|
||||
|
||||
|
||||
class TestRepackFileUpload(UploadTest, TestCase):
|
||||
@mock.patch('olympia.files.tasks.move_stored_file')
|
||||
@mock.patch('olympia.files.tasks.get_sha256')
|
||||
@mock.patch('olympia.files.tasks.shutil')
|
||||
@mock.patch('olympia.files.tasks.extract_zip')
|
||||
def test_not_repacking_non_xpi_files_with_mocks(
|
||||
self, extract_zip_mock, shutil_mock, get_sha256_mock,
|
||||
move_stored_file_mock):
|
||||
"""Test we're not repacking non-xpi files"""
|
||||
upload = self.get_upload('search.xml')
|
||||
old_hash = upload.hash
|
||||
assert old_hash.startswith('sha256:')
|
||||
repack_fileupload(upload.pk)
|
||||
assert not extract_zip_mock.called
|
||||
assert not shutil_mock.make_archive.called
|
||||
assert not get_sha256_mock.called
|
||||
assert not move_stored_file_mock.called
|
||||
upload.reload()
|
||||
assert upload.hash == old_hash # Hasn't changed.
|
||||
|
||||
@mock.patch('olympia.files.tasks.move_stored_file')
|
||||
@mock.patch('olympia.files.tasks.get_sha256')
|
||||
@mock.patch('olympia.files.tasks.shutil')
|
||||
@mock.patch('olympia.files.tasks.extract_zip')
|
||||
def test_repacking_xpi_files_with_mocks(
|
||||
self, extract_zip_mock, shutil_mock, get_sha256_mock,
|
||||
move_stored_file_mock):
|
||||
"""Opposite of test_not_repacking_non_xpi_files() (using same mocks)"""
|
||||
upload = self.get_upload('webextension.xpi')
|
||||
get_sha256_mock.return_value = 'fakehashfrommock'
|
||||
extract_zip_mock.return_value = '/tmp/faketempdir'
|
||||
repack_fileupload(upload.pk)
|
||||
assert extract_zip_mock.called
|
||||
assert shutil_mock.make_archive.called
|
||||
assert get_sha256_mock.called
|
||||
assert move_stored_file_mock.called
|
||||
upload.reload()
|
||||
assert upload.hash == 'sha256:fakehashfrommock'
|
||||
|
||||
def test_repacking_xpi_files(self):
|
||||
"""Test that repack_fileupload() does repack xpi files (no mocks)"""
|
||||
# Take an extension with a directory structure, so that we can test
|
||||
# that structure is restored once the file has been moved.
|
||||
upload = self.get_upload('unicode-filenames.xpi')
|
||||
original_hash = upload.hash
|
||||
repack_fileupload(upload.pk)
|
||||
upload.reload()
|
||||
assert upload.hash.startswith('sha256:')
|
||||
assert upload.hash != original_hash
|
||||
|
||||
# Test zip contents
|
||||
with zipfile.ZipFile(upload.path) as z:
|
||||
contents = sorted(z.namelist())
|
||||
assert contents == [
|
||||
u'chrome.manifest',
|
||||
u'chrome/',
|
||||
u'chrome/content/',
|
||||
u'chrome/content/ff-overlay.js',
|
||||
u'chrome/content/ff-overlay.xul',
|
||||
u'chrome/content/overlay.js',
|
||||
u'chrome/locale/',
|
||||
u'chrome/locale/en-US/',
|
||||
u'chrome/locale/en-US/overlay.dtd',
|
||||
u'chrome/locale/en-US/overlay.properties',
|
||||
u'chrome/skin/',
|
||||
u'chrome/skin/overlay.css',
|
||||
u'install.rdf',
|
||||
u'삮'
|
||||
]
|
||||
# Spot-check an individual file.
|
||||
info = z.getinfo('install.rdf')
|
||||
assert info.file_size == 717
|
||||
assert info.compress_size < info.file_size
|
||||
assert info.compress_type == zipfile.ZIP_DEFLATED
|
|
@ -1105,7 +1105,8 @@ CELERY_TASK_ROUTES = {
|
|||
'olympia.devhub.tasks.send_welcome_email': {'queue': 'devhub'},
|
||||
'olympia.devhub.tasks.submit_file': {'queue': 'devhub'},
|
||||
'olympia.devhub.tasks.validate_file': {'queue': 'devhub'},
|
||||
'olympia.devhub.tasks.validate_file_path': {'queue': 'devhub'},
|
||||
'olympia.devhub.tasks.validate_upload': {'queue': 'devhub'},
|
||||
'olympia.files.tasks.repack_fileupload': {'queue': 'devhub'},
|
||||
'olympia.lib.akismet.tasks.akismet_comment_check': {'queue': 'devhub'},
|
||||
|
||||
# Activity (goes to devhub queue).
|
||||
|
|
|
@ -849,9 +849,10 @@ class TestCheckVersion(BaseUploadVersionTestMixin, TestCase):
|
|||
assert response.status_code == 200
|
||||
file_ = qs.get()
|
||||
|
||||
filename = self.xpi_filepath('@upload-version', version_string)
|
||||
assert response.data['files'][0]['hash'] == \
|
||||
file_.generate_hash(filename=filename)
|
||||
# We're repackaging, so we can't compare the hash to an existing value.
|
||||
expected_hash = file_.generate_hash(filename=file_.file_path)
|
||||
assert file_.hash == expected_hash
|
||||
assert response.data['files'][0]['hash'] == expected_hash
|
||||
|
||||
def test_has_failed_upload(self):
|
||||
addon = Addon.objects.get(guid=self.guid)
|
||||
|
|
Загрузка…
Ссылка в новой задаче