Bug 1172044: Part 2 - Allow editors to flag or unflag validator messages as ignorable.

This commit is contained in:
Kris Maglione 2015-08-02 20:14:53 -07:00
Родитель d23a3c01bd
Коммит 7b195421cd
16 изменённых файлов: 611 добавлений и 144 удалений

4
.gitignore поставляемый
Просмотреть файл

@ -4,7 +4,7 @@ settings_local_*.py
local_settings.py
shellng_local.py
*.py[co]
*.sw[po]
.*.sw?
.coverage
.vagrant
pip-log.txt
@ -33,6 +33,8 @@ media/external/*
vendor
.nose*
tmp/*
cache/*
dist/*
tags
vagrantconfig_local.yaml
vagrant/manifests/classes/custom.pp

Просмотреть файл

@ -198,7 +198,7 @@
"valid": true,
"file": 100456,
"notices": 1,
"validation": "{\"errors\": 1, \"detected_type\": \"extension\", \"success\": false, \"warnings\": 0, \"message_tree\": {\"testcases_targetapplication\": {\"__warnings\": 0, \"__errors\": 1, \"__messages\": [], \"__infos\": 0, \"test_targetedapplications\": {\"__warnings\": 0, \"__errors\": 1, \"__messages\": [], \"__infos\": 0, \"invalid_min_version\": {\"__warnings\": 0, \"__errors\": 1, \"__messages\": [\"d67edb08018411e09b13c42c0301fe38\"], \"__infos\": 0}}}}, \"infos\": 0, \"messages\": [{\"uid\": \"d67edb08018411e09b13c42c0301fe38\", \"tier\": 1, \"id\": [\"testcases_targetapplication\", \"test_targetedapplications\", \"invalid_min_version\"], \"file\": \"install.rdf\", \"message\": \"The value of <em:id> is invalid.\", \"context\": [\"<em:description>...\", [\"<foo/>\"]], \"type\": \"error\", \"line\": 0, \"description\": [\"<iframe>\", \"Version \\\"3.0b3\\\" isn't compatible with {ec8030f7-c20a-464f-9b0e-13a3a9e97384}.\"], \"signing_help\": [\"<script>&amp;\"]}], \"rejected\": false}\n"
"validation": "{\"errors\": 1, \"detected_type\": \"extension\", \"success\": false, \"warnings\": 0, \"message_tree\": {\"testcases_targetapplication\": {\"__warnings\": 0, \"__errors\": 1, \"__messages\": [], \"__infos\": 0, \"test_targetedapplications\": {\"__warnings\": 0, \"__errors\": 1, \"__messages\": [], \"__infos\": 0, \"invalid_min_version\": {\"__warnings\": 0, \"__errors\": 1, \"__messages\": [\"d67edb08018411e09b13c42c0301fe38\"], \"__infos\": 0}}}}, \"infos\": 0, \"messages\": [{\"uid\": \"d67edb08018411e09b13c42c0301fe38\", \"tier\": 1, \"id\": [\"testcases_targetapplication\", \"test_targetedapplications\", \"invalid_min_version\"], \"file\": \"install.rdf\", \"message\": \"The value of <em:id> is invalid.\", \"context\": [\"<em:description>...\", \"<foo/>\"], \"type\": \"error\", \"line\": 0, \"description\": [\"<iframe>\", \"Version \\\"3.0b3\\\" isn't compatible with {ec8030f7-c20a-464f-9b0e-13a3a9e97384}.\"], \"signing_help\": [\"<script>&amp;\"]}], \"rejected\": false}\n"
}
}
]

Просмотреть файл

@ -10,7 +10,7 @@
"user": null,
"valid": false,
"path": "/Users/kumar/dev/zamboni/tmp/addons/temp/db8366bd8ce04ed8a607e44809a63d52.xpi",
"validation": "{\"errors\": 1, \"detected_type\": \"extension\", \"success\": false, \"warnings\": 0, \"message_tree\": {\"testcases_targetapplication\": {\"__warnings\": 0, \"__errors\": 1, \"__messages\": [], \"__infos\": 0, \"test_targetedapplications\": {\"__warnings\": 0, \"__errors\": 1, \"__messages\": [], \"__infos\": 0, \"invalid_min_version\": {\"__warnings\": 0, \"__errors\": 1, \"__messages\": [\"d67edb08018411e09b13c42c0301fe38\"], \"__infos\": 0}}}}, \"infos\": 0, \"messages\": [{\"uid\": \"d67edb08018411e09b13c42c0301fe38\", \"tier\": 1, \"id\": [\"testcases_targetapplication\", \"test_targetedapplications\", \"invalid_min_version\"], \"file\": \"install.rdf\", \"message\": \"The value of <em:id> is invalid.\", \"context\": [\"<em:description>...\", [\"<foo/>\"]], \"type\": \"error\", \"line\": 0, \"description\": [\"<iframe>\", \"Version \\\"3.0b3\\\" isn't compatible with {ec8030f7-c20a-464f-9b0e-13a3a9e97384}.\"], \"signing_help\": [\"<script>&amp;\"]}], \"rejected\": false}\n"
"validation": "{\"errors\": 1, \"detected_type\": \"extension\", \"success\": false, \"warnings\": 0, \"message_tree\": {\"testcases_targetapplication\": {\"__warnings\": 0, \"__errors\": 1, \"__messages\": [], \"__infos\": 0, \"test_targetedapplications\": {\"__warnings\": 0, \"__errors\": 1, \"__messages\": [], \"__infos\": 0, \"invalid_min_version\": {\"__warnings\": 0, \"__errors\": 1, \"__messages\": [\"d67edb08018411e09b13c42c0301fe38\"], \"__infos\": 0}}}}, \"infos\": 0, \"messages\": [{\"uid\": \"d67edb08018411e09b13c42c0301fe38\", \"tier\": 1, \"id\": [\"testcases_targetapplication\", \"test_targetedapplications\", \"invalid_min_version\"], \"file\": \"install.rdf\", \"message\": \"The value of <em:id> is invalid.\", \"context\": [\"<em:description>...\", \"<foo/>\"], \"type\": \"error\", \"line\": 0, \"description\": [\"<iframe>\", \"Version \\\"3.0b3\\\" isn't compatible with {ec8030f7-c20a-464f-9b0e-13a3a9e97384}.\"], \"signing_help\": [\"<script>&amp;\"]}], \"rejected\": false}\n"
}
}
]

Просмотреть файл

@ -1,4 +1,5 @@
# -*- coding: utf-8 -*-
import json
import os
import socket
@ -32,7 +33,7 @@ from translations.models import delete_translation, Translation
from translations.forms import TranslationFormMixin
from versions.models import (ApplicationsVersions, License,
VALID_SOURCE_EXTENSIONS, Version)
from . import tasks
from . import tasks, utils
paypal_log = commonware.log.getLogger('z.paypal')
@ -92,6 +93,25 @@ class DeleteForm(happyforms.Form):
raise forms.ValidationError(_('Password incorrect.'))
class AnnotateFileForm(happyforms.Form):
message = forms.CharField()
ignore_duplicates = forms.BooleanField(required=False)
def clean_message(self):
msg = self.cleaned_data['message']
try:
msg = json.loads(msg)
except ValueError:
raise forms.ValidationError(_('Invalid JSON object'))
key = utils.ValidationComparator.message_key(msg)
if key is None:
raise forms.ValidationError(
_('Message not eligible for annotation'))
return msg
class LicenseChoiceRadio(forms.widgets.RadioFieldRenderer):
def __iter__(self):

Просмотреть файл

@ -5,6 +5,7 @@ import os
import socket
import urllib2
from copy import deepcopy
from functools import wraps
from tempfile import NamedTemporaryFile
from django.conf import settings
@ -40,7 +41,6 @@ def validate(file_, listed=None):
# Import loop.
from .utils import ValidationAnnotator
return ValidationAnnotator(file_, listed=listed).task.delay()
@ -49,24 +49,38 @@ def validate(file_, listed=None):
validator.ValidationTimeout = SoftTimeLimitExceeded
validation_task = task(ignore_result=False, # Required for groups/chains.
soft_time_limit=settings.VALIDATOR_TIMEOUT)
def validation_task(fn):
"""Wrap a validation task so that it runs with the correct flags, then
parse and annotate the results before returning."""
@task(ignore_result=False, # Required for groups/chains.
soft_time_limit=settings.VALIDATOR_TIMEOUT)
@wraps(fn)
def wrapper(id_, hash_, *args, **kw):
data = fn(id_, hash_, *args, **kw)
result = json.loads(data)
if hash_:
# Import loop.
from .utils import ValidationComparator
ValidationComparator(result).annotate_results(hash_)
return result
return wrapper
@validation_task
def validate_file_path(path, listed, **kw):
def validate_file_path(path, hash_, listed, **kw):
"""Run the validator against a file at the given path, and return the
results.
Should only be called directly by ValidationAnnotator."""
result = run_validator(path, listed=listed)
# FIXME: Have validator return native Python datastructure.
return json.loads(result)
return run_validator(path, listed=listed)
@validation_task
def validate_file(file_id, **kw):
def validate_file(file_id, hash_, **kw):
"""Validate a File instance. If cached validation results exist, return
those, otherwise run the validator.
@ -74,12 +88,10 @@ def validate_file(file_id, **kw):
file_ = File.objects.get(pk=file_id)
try:
return json.loads(file_.validation.validation)
return file_.validation.validation
except FileValidation.DoesNotExist:
result = run_validator(file_.current_file_path,
listed=file_.version.addon.is_listed)
# FIXME: Have validator return native Python datastructure.
return json.loads(result)
return run_validator(file_.current_file_path,
listed=file_.version.addon.is_listed)
@task

Просмотреть файл

@ -35,6 +35,12 @@
data-file-url="{{ file_url }}"
data-file-id="{{ file.id }}"
{% endif %}
{% if annotate_url %}
data-annotate-url="{{ annotate_url }}"
{% endif %}
{% if validation_data %}
data-validation="{{ validation_data }}"
{% endif %}
data-validateurl="{{ validate_url }}">
{% if result_type == 'compat' %}
{% with app_trans=app_trans,

Просмотреть файл

@ -1,3 +1,4 @@
import json
import os.path
from copy import deepcopy
@ -10,7 +11,7 @@ import amo.tests
from addons.models import Addon
from devhub import utils
from devhub.tasks import annotate_validation_results
from files.models import File, FileUpload
from files.models import File, FileUpload, FileValidation, ValidationAnnotation
from versions.models import Version
@ -48,6 +49,14 @@ class TestValidationComparator(amo.tests.TestCase):
self.new_msg.update(merge_dicts(old, changes))
self.expected_msg.update(merge_dicts(self.new_msg, expected_changes))
if ('signing_severity' in self.expected_msg and
'ignore_duplicates' not in self.expected_msg):
# The annotator should add an ignore_duplicates key to all
# signing-related messages that don't have one.
if utils.ValidationComparator.message_key(self.expected_msg):
self.expected_msg['ignore_duplicates'] = (
utils.ValidationComparator.is_ignorable(self.expected_msg))
results = self.run_comparator(self.old_msg, self.new_msg.copy())
assert results['messages'] == [self.expected_msg]
@ -303,6 +312,102 @@ class TestValidationComparator(amo.tests.TestCase):
# Changes, fails.
self.compare(message, {'file': 'foo thing.js'}, {})
def test_json_deserialization(self):
"""Test that the JSON deserializer returns the expected hashable
objects."""
assert (utils.json_decode('["foo", ["bar", "baz"], 12, null, '
'[], false]') ==
('foo', ('bar', 'baz'), 12, None, (), False))
def test_annotate_results(self):
"""Test that results are annotated as expected."""
RESULTS = deepcopy(amo.VALIDATOR_SKELETON_RESULTS)
RESULTS['messages'] = [
{'id': ['foo', 'bar'],
'context': ['foo', 'bar', 'baz'],
'file': 'foo',
'signing_severity': 'low'},
{'id': ['a', 'b'],
'context': ['c', 'd', 'e'],
'file': 'f',
'ignore_duplicates': False,
'signing_severity': 'high'},
{'id': ['z', 'y'],
'context': ['x', 'w', 'v'],
'file': 'u',
'signing_severity': 'high'},
]
HASH = 'xxx'
def annotation(hash_, message, **kw):
"""Create a ValidationAnnotation object for the given file hash,
and the key of the given message, with the given keywords."""
key = utils.ValidationComparator.message_key(message)
return ValidationAnnotation(
file_hash=hash_, message_key=json.dumps(key), **kw)
# Create two annotations for this file, and one for a message in this
# file, but with the wrong hash.
ValidationAnnotation.objects.bulk_create((
annotation(HASH, RESULTS['messages'][0], ignore_duplicates=False),
annotation(HASH, RESULTS['messages'][1], ignore_duplicates=True),
annotation('zzz', RESULTS['messages'][2], ignore_duplicates=True),
))
# Annote a copy of the results.
annotated = deepcopy(RESULTS)
utils.ValidationComparator(annotated).annotate_results(HASH)
# The two annotations for this file should be applied.
assert annotated['messages'][0]['ignore_duplicates'] == False
assert annotated['messages'][1]['ignore_duplicates'] == True
# The annotation for the wrong file should not be applied, and
# `ignore_duplicates` should be set to the default for the messge
# severity (false).
assert annotated['messages'][2]['ignore_duplicates'] == False
def test_is_ignorable(self):
"""Test that is_ignorable returns the correct value in all relevant
circumstances."""
MESSAGE = {'id': ['foo', 'bar', 'baz'],
'message': 'Foo',
'description': 'Foo',
'context': ['foo', 'bar', 'baz'],
'file': 'foo.js', 'line': 1}
IGNORABLE_TYPES = ('notice', 'warning')
OTHER_TYPES = ('error',)
IGNORABLE_SEVERITIES = ('trivial', 'low')
OTHER_SEVERITIES = ('medium', 'high')
def is_ignorable(**kw):
"""Return true if the base message with the given keyword overrides
is ignorable."""
msg = merge_dicts(MESSAGE, kw)
return utils.ValidationComparator.is_ignorable(msg)
# Non-ignorable types are not ignorable regardless of severity.
for type_ in OTHER_TYPES:
for severity in IGNORABLE_SEVERITIES + OTHER_SEVERITIES:
assert not is_ignorable(signing_severity=severity, type=type_)
# Non-ignorable severities are not ignorable regardless of type.
for severity in OTHER_SEVERITIES:
for type_ in IGNORABLE_TYPES + OTHER_TYPES:
assert not is_ignorable(signing_severity=severity, type=type_)
# Ignorable types with ignorable severities are ignorable.
for severity in IGNORABLE_SEVERITIES:
for type_ in IGNORABLE_TYPES:
assert is_ignorable(signing_severity=severity, type=type_)
class TestValidationAnnotatorBase(amo.tests.TestCase):
@ -349,17 +454,68 @@ class TestValidationAnnotatorBase(amo.tests.TestCase):
self.validate_upload = self.patch(
'devhub.tasks.validate_file_path').subtask
def tearDown(self):
for patcher in self.patchers:
patcher.stop()
def patch(self, thing):
"""Patch the given "thing", and revert the patch on test teardown."""
patcher = mock.patch(thing)
self.patchers.append(patcher)
self.addCleanup(patcher.stop)
return patcher.start()
def check_upload(self, file_, listed=None):
"""Check that our file upload is matched to the given file."""
# Create an annotator, make sure it matches the expected older file.
va = utils.ValidationAnnotator(self.file_upload)
assert va.prev_file == file_
# Make sure we run the correct validation task for the matched file,
# if there is a match.
if file_:
self.validate_file.assert_called_once_with([
file_.pk, file_.original_hash])
else:
assert not self.validate_file.called
# Make sure we run the correct validation task for the upload.
self.validate_upload.assert_called_once_with(
[self.file_upload.path, self.file_upload.hash, listed])
# Make sure we run the correct save validation task, with a
# fallback error handler.
self.save_upload.assert_has_calls([
mock.call([mock.ANY, self.file_upload.pk], {'annotate': False},
immutable=True),
mock.call([self.file_upload.pk], link_error=mock.ANY)])
def check_file(self, file_new, file_old):
"""Check that the given new file is matched to the given old file."""
# Create an annotator, make sure it matches the expected older file.
va = utils.ValidationAnnotator(file_new)
assert va.prev_file == file_old
# We shouldn't be attempting to validate a bare upload.
assert not self.validate_upload.called
# Make sure we run the correct validation tasks for both files,
# or only one validation task if there's no match.
if file_old:
self.validate_file.assert_has_calls([
mock.call([file_new.pk, file_new.original_hash]),
mock.call([file_old.pk, file_old.original_hash])])
else:
self.validate_file.assert_called_once_with([
file_new.pk, file_new.original_hash])
# Make sure we run the correct save validation task, with a
# fallback error handler.
self.save_file.assert_has_calls([
mock.call([mock.ANY, self.file_1_1.pk], {'annotate': False},
immutable=True),
mock.call([self.file_1_1.pk], link_error=mock.ANY)])
class TestValidationAnnotatorUnlisted(TestValidationAnnotatorBase):
def setUp(self):
super(TestValidationAnnotatorUnlisted, self).setUp()
@ -368,58 +524,83 @@ class TestValidationAnnotatorUnlisted(TestValidationAnnotatorBase):
def test_find_fileupload_prev_version(self):
"""Test that the correct previous version is found for a new upload."""
va = utils.ValidationAnnotator(self.file_upload)
assert va.find_previous_version(self.xpi_version) == self.file
self.validate_file.assert_called_once_with([self.file.pk])
self.validate_upload.assert_called_once_with(
[self.file_upload.path, None])
self.save_upload.assert_called_with([self.file_upload.pk],
link_error=mock.ANY)
self.check_upload(self.file)
def test_find_file_prev_version(self):
"""Test that the correct previous version is found for a File."""
va = utils.ValidationAnnotator(self.file_1_1)
assert va.find_previous_version(self.xpi_version) == self.file
assert not self.validate_upload.called
self.validate_file.assert_has_calls([mock.call([self.file_1_1.pk]),
mock.call([self.file.pk])])
self.save_file.assert_called_with([self.file_1_1.pk],
link_error=mock.ANY)
self.check_file(self.file_1_1, self.file)
def test_find_future_fileupload_version(self):
"""Test that a future version will not be matched."""
self.version.update(version='1.2')
va = utils.ValidationAnnotator(self.file_upload)
assert va.find_previous_version(self.xpi_version) is None
assert not self.validate_file.called
self.validate_upload.assert_called_once_with(
[self.file_upload.path, None])
self.save_upload.asserted_once_with([self.file_upload.pk],
link_error=mock.ANY)
self.check_upload(None)
def test_find_future_file(self):
"""Test that a future version will not be matched."""
self.version.update(version='1.2')
va = utils.ValidationAnnotator(self.file_1_1)
assert va.find_previous_version(self.xpi_version) is None
self.check_file(self.file_1_1, None)
assert not self.validate_upload.called
self.validate_file.assert_called_once_with([self.file_1_1.pk])
def test_update_annotations(self):
"""Test that annotations are correctly copied from an old file to a new
one."""
self.save_file.assert_called_with([self.file_1_1.pk],
link_error=mock.ANY)
HASH_0 = 'xxx'
HASH_1 = 'yyy'
RESULTS = deepcopy(amo.VALIDATOR_SKELETON_RESULTS)
RESULTS['messages'] = [
{'id': ['foo'],
'context': ['foo'],
'file': 'foo'},
{'id': ['baz'],
'context': ['baz'],
'file': 'baz'},
]
self.file.update(original_hash=HASH_0)
self.file_1_1.update(original_hash=HASH_1)
# Attach the validation results to our previous version's file,
# and update the object's cached foreign key value.
self.file.validation = FileValidation.objects.create(
file=self.file_1_1, validation=json.dumps(RESULTS))
def annotation(hash_, key, **kw):
return ValidationAnnotation(file_hash=hash_, message_key=key, **kw)
def key(metasyntatic_variable):
"""Return an arbitrary, but valid, message key for the given
arbitrary string."""
return '[["{0}"], ["{0}"], "{0}", null, false]'.format(
metasyntatic_variable)
# Create two annotations which match the above messages, and
# one which does not.
ValidationAnnotation.objects.bulk_create((
annotation(HASH_0, key('foo'), ignore_duplicates=True),
annotation(HASH_0, key('bar'), ignore_duplicates=True),
annotation(HASH_0, key('baz'), ignore_duplicates=False),
))
# Create the annotator and make sure it links our target
# file to the previous version.
annotator = utils.ValidationAnnotator(self.file_1_1)
assert annotator.prev_file == self.file
annotator.update_annotations()
# The two annotations which match messages in the above
# validation results should be duplicated for this version.
# The third annotation should not.
assert (set(ValidationAnnotation.objects.filter(file_hash=HASH_1)
.values_list('message_key', 'ignore_duplicates')) ==
set(((key('foo'), True), (key('baz'), False))))
class TestValidationAnnotatorListed(TestValidationAnnotatorBase):
@ -429,30 +610,16 @@ class TestValidationAnnotatorListed(TestValidationAnnotatorBase):
full reviewed version."""
self.version_1_1.update(version='1.0.1')
self.file_1_1.update(status=amo.STATUS_PUBLIC)
va = utils.ValidationAnnotator(self.file_upload)
assert va.find_previous_version(self.xpi_version) == self.file_1_1
self.validate_file.assert_called_once_with([self.file_1_1.pk])
self.validate_upload.assert_called_once_with(
[self.file_upload.path, None])
self.save_upload.assert_called_with([self.file_upload.pk],
link_error=mock.ANY)
self.check_upload(self.file_1_1)
def test_full_to_unreviewed(self):
"""Test that a full reviewed version is not matched to an unreviewed
version."""
self.file_1_1.update(status=amo.STATUS_UNREVIEWED)
va = utils.ValidationAnnotator(self.file_upload)
assert va.find_previous_version(self.xpi_version) == self.file
self.validate_file.assert_called_once_with([self.file.pk])
self.validate_upload.assert_called_once_with(
[self.file_upload.path, None])
self.save_upload.assert_called_with([self.file_upload.pk],
link_error=mock.ANY)
self.check_upload(self.file)
# We can't prevent matching against prelim or beta versions
# until we change the file upload process to allow flagging
@ -464,30 +631,17 @@ class TestValidationAnnotatorListed(TestValidationAnnotatorBase):
self.file_1_1.update(status=amo.STATUS_PUBLIC)
va = utils.ValidationAnnotator(self.file_1_1)
assert va.find_previous_version(self.xpi_version) == self.file
self.validate_file.assert_has_calls([mock.call([self.file_1_1.pk]),
mock.call([self.file.pk])])
self.save_file.assert_called_with([self.file_1_1.pk],
link_error=mock.ANY)
self.check_file(self.file_1_1, self.file)
for status in amo.STATUS_UNREVIEWED, amo.STATUS_LITE, amo.STATUS_BETA:
self.validate_file.reset_mock()
self.save_file.reset_mock()
self.file.update(status=status)
va = utils.ValidationAnnotator(self.file_1_1)
assert va.find_previous_version(self.xpi_version) is None
self.validate_file.assert_called_once_with([self.file_1_1.pk])
self.save_file.assert_called_with([self.file_1_1.pk],
link_error=mock.ANY)
self.check_file(self.file_1_1, None)
class TestValidationAnnotatorBeta(TestValidationAnnotatorBase):
def setUp(self):
super(TestValidationAnnotatorBeta, self).setUp()
@ -501,10 +655,7 @@ class TestValidationAnnotatorBeta(TestValidationAnnotatorBase):
"""Test that a beta submission is matched to the latest approved
release version."""
va = utils.ValidationAnnotator(self.file_upload)
assert va.find_previous_version(self.xpi_version) == self.file
self.validate_file.assert_called_once_with([self.file.pk])
self.check_upload(self.file)
def test_match_beta_to_signed_beta(self):
"""Test that a beta submission is matched to a prior signed beta
@ -513,10 +664,7 @@ class TestValidationAnnotatorBeta(TestValidationAnnotatorBase):
self.file_1_1.update(status=amo.STATUS_BETA, is_signed=True)
self.version_1_1.update(version='1.1b0')
va = utils.ValidationAnnotator(self.file_upload)
assert va.find_previous_version(self.xpi_version) == self.file_1_1
self.validate_file.assert_called_once_with([self.file_1_1.pk])
self.check_upload(self.file_1_1)
def test_match_beta_to_unsigned_beta(self):
"""Test that a beta submission is not matched to a prior unsigned beta
@ -525,10 +673,7 @@ class TestValidationAnnotatorBeta(TestValidationAnnotatorBase):
self.file_1_1.update(status=amo.STATUS_BETA)
self.version_1_1.update(version='1.1b0')
va = utils.ValidationAnnotator(self.file_upload)
assert va.find_previous_version(self.xpi_version) == self.file
self.validate_file.assert_called_once_with([self.file.pk])
self.check_upload(self.file)
# This is technically in tasks at the moment, but may make more sense as a

Просмотреть файл

@ -1988,7 +1988,7 @@ class TestUpload(BaseUploadTest):
@mock.patch('validator.validate.validate')
def test_upload_unlisted_addon(self, validate_mock):
"""Unlisted addons are validated as "self hosted" addons."""
validate_mock.return_value = '{"errors": 0}'
validate_mock.return_value = json.dumps(amo.VALIDATOR_SKELETON_RESULTS)
self.url = reverse('devhub.upload_unlisted')
self.post()
# Make sure it was called with listed=False.

Просмотреть файл

@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
import json
from copy import deepcopy
from django.core.files.storage import default_storage as storage
@ -17,7 +18,7 @@ from amo.urlresolvers import reverse
from applications.models import AppVersion
from devhub.tasks import compatibility_check
from files.helpers import copyfileobj
from files.models import File, FileUpload, FileValidation
from files.models import File, FileUpload, FileValidation, ValidationAnnotation
from files.tests.test_models import UploadTest as BaseUploadTest
from files.utils import parse_addon
from users.models import UserProfile
@ -43,7 +44,7 @@ class TestUploadValidation(BaseUploadTest):
eq_(msg['description'][0], '&lt;iframe&gt;')
eq_(msg['signing_help'][0], '&lt;script&gt;&amp;amp;')
eq_(msg['context'],
[u'<em:description>...', [u'<foo/>']])
[u'<em:description>...', u'<foo/>'])
def test_date_on_upload(self):
upload = FileUpload.objects.get(name='invalid-id-20101206.xpi')
@ -157,7 +158,7 @@ class TestFileValidation(amo.tests.TestCase):
eq_(msg['description'][0], '&lt;iframe&gt;')
eq_(msg['signing_help'][0], '&lt;script&gt;&amp;amp;')
eq_(msg['context'],
[u'<em:description>...', [u'<foo/>']])
[u'<em:description>...', u'<foo/>'])
@mock.patch('devhub.tasks.run_validator')
def test_json_results_post_not_cached(self, validate):
@ -180,13 +181,13 @@ class TestFileValidation(amo.tests.TestCase):
assert not validate.called
def test_json_results_get_cached(self):
"""Tests that GET requests return results when they've already been
"""Test that GET requests return results when they've already been
cached."""
assert self.file.has_been_validated
assert self.client.get(self.json_url).status_code == 200
def test_json_results_get_not_cached(self):
"""Tests that GET requests return a Method Not Allowed error when
"""Test that GET requests return a Method Not Allowed error when
retults have not been cached."""
self.file.validation.delete()
@ -197,6 +198,95 @@ class TestFileValidation(amo.tests.TestCase):
assert self.client.get(self.json_url).status_code == 405
class TestFileAnnotation(amo.tests.TestCase):
fixtures = ['base/users', 'devhub/addon-validation-1']
def setUp(self):
super(TestFileAnnotation, self).setUp()
assert self.client.login(username='editor@mozilla.com',
password='password')
self.RESULTS = deepcopy(amo.VALIDATOR_SKELETON_RESULTS)
self.RESULTS['messages'] = [
{'id': ['foo', 'bar'],
'context': ['foo', 'bar', 'baz'],
'file': 'foo',
'signing_severity': 'low',
'ignore_duplicates': True,
'message': '', 'description': []},
{'id': ['a', 'b'],
'context': ['c', 'd', 'e'],
'file': 'f',
'ignore_duplicates': False,
'signing_severity': 'high',
'message': '', 'description': []},
{'id': ['z', 'y'],
'context': ['x', 'w', 'v'],
'file': 'u',
'signing_severity': 'high',
'ignore_duplicates': False,
'message': '', 'description': []},
]
# Make the results as close to the JSON loaded from the validator
# as possible, so pytest reports a better diff when we fail.
# At present, just changes all strings to unicode.
self.RESULTS = json.loads(json.dumps(self.RESULTS))
self.file_validation = FileValidation.objects.get(pk=1)
self.file_validation.update(validation=json.dumps(self.RESULTS))
self.file = self.file_validation.file
self.file.update(original_hash='xxx')
self.url = reverse('devhub.json_file_validation',
args=[self.file.version.addon.slug,
self.file.pk])
self.annotate_url = reverse('devhub.annotate_file_validation',
args=[self.file.version.addon.slug,
self.file.pk])
def test_base_results(self):
"""Test that the base results are returned unchanged prior to
annotation."""
resp = self.client.get(self.url)
assert json.loads(resp.content) == {u'validation': self.RESULTS,
u'error': None}
assert not ValidationAnnotation.objects.exists()
def annotate_message(self, idx, ignore_duplicates):
"""Annotate a message in `self.RESULTS` and check that the result
is correct."""
self.client.post(self.annotate_url, {
'message': json.dumps(self.RESULTS['messages'][idx]),
'ignore_duplicates': ignore_duplicates})
resp = self.client.get(self.url)
self.RESULTS['messages'][idx]['ignore_duplicates'] = ignore_duplicates
assert json.loads(resp.content) == {u'validation': self.RESULTS,
u'error': None}
def test_annotated_results(self):
"""Test that annotations result in modified results and the expected
number of annotation objects."""
self.annotate_message(idx=1, ignore_duplicates=True)
assert ValidationAnnotation.objects.count() == 1
self.annotate_message(idx=1, ignore_duplicates=False)
assert ValidationAnnotation.objects.count() == 1
self.annotate_message(idx=2, ignore_duplicates=True)
assert ValidationAnnotation.objects.count() == 2
class TestValidateAddon(amo.tests.TestCase):
fixtures = ['base/users']
@ -225,7 +315,7 @@ class TestValidateAddon(amo.tests.TestCase):
@mock.patch('validator.validate.validate')
def test_upload_listed_addon(self, validate_mock):
"""Listed addons are not validated as "self-hosted" addons."""
validate_mock.return_value = '{"errors": 0}'
validate_mock.return_value = json.dumps(amo.VALIDATOR_SKELETON_RESULTS)
self.url = reverse('devhub.upload')
data = open(get_image_path('animated.png'), 'rb')
self.client.post(self.url, {'upload': data})
@ -237,7 +327,7 @@ class TestValidateAddon(amo.tests.TestCase):
@mock.patch('validator.validate.validate')
def test_upload_unlisted_addon(self, validate_mock):
"""Unlisted addons are validated as "self-hosted" addons."""
validate_mock.return_value = '{"errors": 0}'
validate_mock.return_value = json.dumps(amo.VALIDATOR_SKELETON_RESULTS)
self.url = reverse('devhub.upload_unlisted')
data = open(get_image_path('animated.png'), 'rb')
self.client.post(self.url, {'upload': data})
@ -249,7 +339,7 @@ class TestValidateAddon(amo.tests.TestCase):
@mock.patch('validator.validate.validate')
def test_upload_sideload_addon(self, validate_mock):
"""Sideload addons are validated as "self-hosted" addons."""
validate_mock.return_value = '{"errors": 0}'
validate_mock.return_value = json.dumps(amo.VALIDATOR_SKELETON_RESULTS)
self.url = reverse('devhub.upload_sideload')
data = open(get_image_path('animated.png'), 'rb')
self.client.post(self.url, {'upload': data})
@ -274,7 +364,8 @@ class TestUploadURLs(amo.tests.TestCase):
AddonUser.objects.create(addon=self.addon, user=user)
self.run_validator = self.patch('devhub.tasks.run_validator')
self.run_validator.return_value = '{"errors": 0}'
self.run_validator.return_value = json.dumps(
amo.VALIDATOR_SKELETON_RESULTS)
self.parse_addon = self.patch('devhub.utils.parse_addon')
self.parse_addon.return_value = {'guid': self.addon.guid,
'version': '1.0'}

Просмотреть файл

@ -90,6 +90,10 @@ detail_patterns = patterns(
views.json_file_validation,
name='devhub.json_file_validation'),
url('^file/(?P<file_id>[^/]+)/validation/annotate$',
views.annotate_file_validation,
name='devhub.annotate_file_validation'),
url('^validation-result/(?P<result_id>\d+)$',
views.bulk_compat_result,
name='devhub.bulk_compat_result'),
@ -131,7 +135,6 @@ redirect_patterns = patterns(
lambda r, id: redirect('devhub.addons.versions', id, permanent=True)),
('^versions/(\d+)',
lambda r, id: redirect('devhub.addons.versions', id, permanent=True)),
('^versions/validate/(\d+)', views.validator_redirect),
)

Просмотреть файл

@ -1,3 +1,6 @@
import json
from json.decoder import JSONArray
from django.conf import settings
from django.db.models import Q
from django.forms import ValidationError
@ -7,15 +10,16 @@ from tower import ugettext as _
import amo
from addons.models import Addon
from amo.decorators import write
from amo.urlresolvers import linkify_escape
from files.models import File, FileUpload
from files.models import File, FileUpload, ValidationAnnotation
from files.utils import parse_addon
from validator.constants import SIGNING_SEVERITIES
from validator.version import Version
from . import tasks
def process_validation(validation, is_compatibility=False):
def process_validation(validation, is_compatibility=False, file_hash=None):
"""Process validation results into the format expected by the web
frontend, including transforming certain fields into HTML, mangling
compatibility messages, and limiting the number of messages displayed."""
@ -31,6 +35,9 @@ def process_validation(validation, is_compatibility=False):
validation['ending_tier'] = max(msg.get('tier', -1)
for msg in validation['messages'])
if file_hash:
ValidationComparator(validation).annotate_results(file_hash)
limit_validation_results(validation)
htmlify_validation(validation)
@ -88,6 +95,7 @@ def limit_validation_results(validation):
'warnings so %s messages were truncated. '
'After addressing the visible messages, '
"you'll be able to see the others.") % leftover_count,
'description': [],
'compatibility_type': compat_type})
@ -172,17 +180,46 @@ class ValidationAnnotator(object):
self.task = chain(validate, save.subtask([file_.pk],
link_error=on_error))
@write
def update_annotations(self):
"""Update the annotations for our file with the annotations for any
previous matching file, if it exists."""
if not (self.file and self.prev_file):
# We don't have two Files to work with. Nothing to do.
return
hash_ = self.file.original_hash
if ValidationAnnotation.objects.filter(file_hash=hash_).exists():
# We already have annotations for this file hash.
# Don't add any more.
return
comparator = ValidationComparator(
json.loads(self.file.validation.validation))
keys = [json.dumps(key) for key in comparator.messages.iterkeys()]
annos = ValidationAnnotation.objects.filter(
file_hash=self.prev_file.original_hash, message_key__in=keys)
ValidationAnnotation.objects.bulk_create(
ValidationAnnotation(file_hash=hash_, message_key=anno.message_key,
ignore_duplicates=anno.ignore_duplicates)
for anno in annos)
@staticmethod
def validate_file(file):
"""Return a subtask to validate a File instance."""
return tasks.validate_file.subtask([file.pk])
return tasks.validate_file.subtask([file.pk, file.original_hash])
@staticmethod
def validate_upload(upload, is_listed):
"""Return a subtask to validate a FileUpload instance."""
assert not upload.validation
return tasks.validate_file_path.subtask([upload.path, is_listed])
return tasks.validate_file_path.subtask([upload.path, upload.hash,
is_listed])
def find_previous_version(self, version):
"""Find the most recent previous version of this add-on, prior to
@ -234,6 +271,24 @@ class ValidationAnnotator(object):
return file_
def JSONTuple(*args, **kw):
"""Parse a JSON array, and return it in tuple form, along with the
character position where we stopped parsing. Simple wrapper around
the stock JSONArray parser."""
values, end = JSONArray(*args, **kw)
return tuple(values), end
class HashableJSONDecoder(json.JSONDecoder):
"""A JSON decoder which deserializes arrays as tuples rather than lists."""
def __init__(self, *args, **kwargs):
super(HashableJSONDecoder, self).__init__()
self.parse_array = JSONTuple
self.scan_once = json.scanner.py_make_scanner(self)
json_decode = HashableJSONDecoder().decode
class ValidationComparator(object):
"""Compares the validation results from an older version with a version
currently being checked, and annotates results based on which messages
@ -305,7 +360,8 @@ class ValidationComparator(object):
# The `ignore_duplicates` flag will be set by editors, to overrule
# the basic signing severity heuristic. If it's present, it takes
# precedence.
low_severity = message.get('signing_severity') in ('trivial', 'low')
low_severity = (message.get('type') != 'error' and
message.get('signing_severity') in ('trivial', 'low'))
return message.get('ignore_duplicates', low_severity)
def compare_results(self, validation):
@ -336,6 +392,8 @@ class ValidationComparator(object):
msg['ignored'] = self.is_ignorable(prev_msg)
if severity:
if 'ignore_duplicates' not in msg and self.message_key(msg):
msg['ignore_duplicates'] = self.is_ignorable(msg)
if msg.get('ignored'):
ignored_summary[severity] += 1
else:
@ -344,3 +402,22 @@ class ValidationComparator(object):
validation['signing_summary'] = signing_summary
validation['signing_ignored_summary'] = ignored_summary
return validation
def annotate_results(self, file_hash):
"""Annotate our `validation` result set with any stored annotations
for a file with `file_hash`."""
annotations = (ValidationAnnotation.objects
.filter(file_hash=file_hash,
ignore_duplicates__isnull=False)
.values_list('message_key', 'ignore_duplicates'))
for message_key, ignore_duplicates in annotations:
key = json_decode(message_key)
msg = self.messages.get(key)
if msg:
msg['ignore_duplicates'] = ignore_duplicates
for msg in self.messages.itervalues():
if 'ignore_duplicates' not in msg and 'signing_severity' in msg:
msg['ignore_duplicates'] = self.is_ignorable(msg)

Просмотреть файл

@ -43,9 +43,11 @@ from devhub import perf
from devhub.decorators import dev_required
from devhub.forms import CheckCompatibilityForm
from devhub.models import ActivityLog, BlogPost, RssKey, SubmitStep
from devhub.utils import ValidationAnnotator, process_validation
from devhub.utils import (ValidationAnnotator, ValidationComparator,
process_validation)
from editors.decorators import addons_reviewer_required
from editors.helpers import get_position, ReviewHelper
from files.models import File, FileUpload, FileValidation
from files.models import File, FileUpload, FileValidation, ValidationAnnotation
from files.utils import is_beta, parse_addon
from lib.crypto.packaged import sign_file
from search.views import BaseAjaxSearch
@ -689,24 +691,60 @@ def upload_detail_for_addon(request, addon_id, addon, uuid):
@dev_required(allow_editors=True)
def file_validation(request, addon_id, addon, file_id):
file = get_object_or_404(File, id=file_id)
file_ = get_object_or_404(File, id=file_id)
validate_url = reverse('devhub.json_file_validation',
args=[addon.slug, file.id])
args=[addon.slug, file_.id])
automated_signing = (addon.automated_signing or
file.status == amo.STATUS_BETA)
file_.status == amo.STATUS_BETA)
prev_file = ValidationAnnotator(file).prev_file
prev_file = ValidationAnnotator(file_).prev_file
if prev_file:
file_url = reverse('files.compare', args=[file.id, prev_file.id,
file_url = reverse('files.compare', args=[file_.id, prev_file.id,
'file', ''])
else:
file_url = reverse('files.list', args=[file.id, 'file', ''])
file_url = reverse('files.list', args=[file_.id, 'file', ''])
return render(request, 'devhub/validation.html',
dict(validate_url=validate_url, file_url=file_url, file=file,
filename=file.filename, timestamp=file.created,
automated_signing=automated_signing, addon=addon))
context = {'validate_url': validate_url, 'file_url': file_url,
'file': file_, 'filename': file_.filename,
'timestamp': file_.created, 'addon': addon,
'automated_signing': automated_signing}
if acl.check_addons_reviewer(request):
context['annotate_url'] = reverse('devhub.annotate_file_validation',
args=[addon.slug, file_id])
if file_.has_been_validated:
context['validation_data'] = json.dumps(
file_.validation.processed_validation)
return render(request, 'devhub/validation.html', context)
@post_required
@addons_reviewer_required
@json_view
def annotate_file_validation(request, addon_id, file_id):
file_ = get_object_or_404(File, pk=file_id)
form = forms.AnnotateFileForm(request.POST)
if not form.is_valid():
return {'status': 'fail',
'errors': dict(form.errors.items())}
message_key = ValidationComparator.message_key(
form.cleaned_data['message'])
updates = {'ignore_duplicates': form.cleaned_data['ignore_duplicates']}
annotation, created = ValidationAnnotation.objects.get_or_create(
file_hash=file_.original_hash, message_key=json.dumps(message_key),
defaults=updates)
if not created:
annotation.update(**updates)
return {'status': 'ok'}
@dev_required(allow_editors=True)
@ -856,10 +894,15 @@ def upload_detail(request, uuid, format='html'):
return _compat_result(request, validate_url,
upload.compat_with_app,
upload.compat_with_appver)
return render(request, 'devhub/validation.html',
dict(validate_url=validate_url, filename=upload.name,
automated_signing=upload.automated_signing,
timestamp=upload.created))
context = {'validate_url': validate_url, 'filename': upload.name,
'automated_signing': upload.automated_signing,
'timestamp': upload.created}
if upload.validation:
context['validation_data'] = json.dumps(upload.processed_validation)
return render(request, 'devhub/validation.html', context)
class AddonDependencySearch(BaseAjaxSearch):
@ -1631,12 +1674,6 @@ def request_review(request, addon_id, addon, status):
return redirect(addon.get_dev_url('versions'))
# TODO(kumar): Remove when the editor tools are in zamboni.
def validator_redirect(request, version_id):
v = get_object_or_404(Version, id=version_id)
return redirect('devhub.addons.versions', v.addon_id, permanent=True)
@post_required
@addon_view
def admin(request, addon):

Просмотреть файл

@ -53,6 +53,9 @@ class File(amo.models.OnChangeMixin, amo.models.ModelBase):
filename = models.CharField(max_length=255, default='')
size = models.PositiveIntegerField(default=0) # In bytes.
hash = models.CharField(max_length=255, default='')
# The original hash of the file, before we sign it, or repackage it in
# any other way.
original_hash = models.CharField(max_length=255, default='')
# TODO: delete this column
codereview = models.BooleanField(default=False)
jetpack_version = models.CharField(max_length=10, null=True)
@ -169,6 +172,7 @@ class File(amo.models.OnChangeMixin, amo.models.ModelBase):
file_.status = amo.STATUS_LITE
file_.hash = file_.generate_hash(upload.path)
file_.original_hash = file_.hash
if upload.validation:
validation = json.loads(upload.validation)
@ -189,10 +193,14 @@ class File(amo.models.OnChangeMixin, amo.models.ModelBase):
if upload.validation:
# Import loop.
from devhub.tasks import annotate_validation_results
from devhub.utils import ValidationAnnotator
validation = annotate_validation_results(validation)
FileValidation.from_json(file_, validation)
# Copy annotations from any previously approved file.
ValidationAnnotator(file_).update_annotations()
return file_
@classmethod
@ -464,6 +472,13 @@ def cleanup_file(sender, instance, **kw):
% (filename, instance.pk))
storage.delete(filename)
if not (File.objects.filter(original_hash=instance.original_hash)
.exclude(pk=instance.pk).exists()):
# This is the last remaining file with this hash.
# Delete any validation annotations keyed to it.
ValidationAnnotation.objects.filter(
file_hash=instance.original_hash).delete()
@File.on_change
def check_file(old_attr, new_attr, instance, sender, **kw):
@ -587,7 +602,7 @@ class FileUpload(amo.models.ModelBase):
validation = json.loads(self.validation)
is_compatibility = self.compat_with_app is not None
return process_validation(validation, is_compatibility)
return process_validation(validation, is_compatibility, self.hash)
class FileValidation(amo.models.ModelBase):
@ -646,7 +661,17 @@ class FileValidation(amo.models.ModelBase):
"""Return processed validation results as expected by the frontend."""
# Import loop.
from devhub.utils import process_validation
return process_validation(json.loads(self.validation))
return process_validation(json.loads(self.validation),
file_hash=self.file.original_hash)
class ValidationAnnotation(amo.models.ModelBase):
file_hash = models.CharField(max_length=255, db_index=True)
message_key = models.CharField(max_length=1024)
ignore_duplicates = models.NullBooleanField(blank=True, null=True)
class Meta:
db_table = 'validation_annotations'
def nfd_str(u):

Просмотреть файл

@ -0,0 +1,13 @@
ALTER TABLE
`files`
ADD COLUMN
`original_hash` varchar(255) NOT NULL DEFAULT ''
;
UPDATE
`files`
SET
`original_hash` = `hash`
WHERE
`original_hash` = '' AND `hash` IS NOT NULL
;

Просмотреть файл

@ -0,0 +1,9 @@
CREATE TABLE `validation_annotations` (
`id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
`created` datetime NOT NULL,
`modified` datetime NOT NULL,
`file_hash` varchar(255) NOT NULL,
`message_key` varchar(1024) NOT NULL,
`ignore_duplicates` bool,
KEY `file_hash` (`file_hash`)
);

Просмотреть файл

@ -156,8 +156,9 @@ function initValidator($doc) {
this.versionChangeLinks = null;
this.allCounts = {error: 0, warning: 0};
this.automatedSigning = suite.is('.automated-signing');
this.fileURL = suite.attr('data-file-url');
this.fileID = suite.attr('data-file-id');
this.annotateURL = suite.data('annotateUrl');
this.fileURL = suite.data('fileUrl');
this.fileID = suite.data('fileId');
if (this.automatedSigning) {
this.hideNonSigning = $('#signing-hide-unnecessary').prop('checked');
@ -254,6 +255,15 @@ function initValidator($doc) {
// them.
$('h5', msgDiv).html(msg.message); // Sanitized HTML value.
if (msg.ignore_duplicates != null && this.annotateURL) {
msgDiv.append($('<p>').append(
$('<label>', {
text: ' ' + gettext('Ignore this message in future updates')
}).prepend($('<input>', { type: 'checkbox', checked: msg.ignore_duplicates,
'class': 'ignore-duplicates-checkbox',
name: JSON.stringify(msg) }))));
}
// The validator returns the "description" and
// "signing_help" properties as either strings, or
// arrays of strings. We turn them all into arrays
@ -515,12 +525,29 @@ function initValidator($doc) {
$('.addon-validator-suite', $doc).bind('validate', function(e) {
var el = $(this),
url = el.attr('data-validateurl');
data = el.data();
if (data.annotateUrl) {
el.delegate('.ignore-duplicates-checkbox', 'change',
function(event) {
var $target = $(event.target);
$.ajax({type: 'POST',
url: data.annotateUrl,
data: { message: $target.attr('name'),
ignore_duplicates: $target.prop('checked') || undefined },
dataType: 'json'})
});
}
if (data.validation) {
buildResults(el, {validation: data.validation})
return;
}
$('.test-tier,.tier-results', el).addClass('ajax-loading');
$.ajax({type: 'POST',
url: url,
url: data.validateurl,
data: {},
success: function(data) {
if (data.validation == '') {