Adds support for running compatibility tests per app/version (bug 654285)

This commit is contained in:
Kumar McMillan 2011-05-03 17:30:54 -05:00
Родитель 4e5e812d04
Коммит 12ae0db9dc
5 изменённых файлов: 52 добавлений и 24 удалений

Просмотреть файл

@ -24,7 +24,7 @@ def validator(upload_id, **kw):
log.info('VALIDATING: %s' % upload_id)
upload = FileUpload.objects.get(pk=upload_id)
try:
result = _validator(upload.path)
result = run_validator(upload.path)
upload.validation = result
upload.save() # We want to hit the custom save().
except:
@ -44,11 +44,29 @@ def file_validator(file_id, **kw):
file = File.objects.get(pk=file_id)
# Unlike upload validation, let the validator
# raise an exception if there is one.
result = _validator(file.file_path)
result = run_validator(file.file_path)
return FileValidation.from_json(file, result)
def _validator(file_path):
def run_validator(file_path, for_appversions=None):
"""A pre-configured wrapper around the addon validator.
*file_path*
Path to addon / extension file to validate.
*for_appversions=None*
An optional dict of application versions to validate this addon
for. The key is an application GUID and its value is a list of
versions.
To validate the addon for compatibility with Firefox 5 and 6,
you'd pass in::
for_appversions={amo.FIREFOX.guid: ['5.0.*', '6.0.*']}
Not all application versions will have a set of registered
compatibility tests.
"""
from validator.validate import validate
@ -62,7 +80,9 @@ def _validator(file_path):
if not os.path.exists(apps):
call_command('dump_apps')
return validate(file_path, format='json',
return validate(file_path,
for_appversions=for_appversions,
format='json',
# This flag says to stop testing after one tier fails.
# bug 615426
determined=False,

Просмотреть файл

@ -92,19 +92,19 @@ class TestValidator(test_utils.TestCase):
def get_upload(self):
return FileUpload.objects.get(pk=self.upload.pk)
@mock.patch('devhub.tasks._validator')
@mock.patch('devhub.tasks.run_validator')
def test_pass_validation(self, _mock):
_mock.return_value = '{"errors": 0}'
validator(self.upload.pk)
assert self.get_upload().valid
@mock.patch('devhub.tasks._validator')
@mock.patch('devhub.tasks.run_validator')
def test_fail_validation(self, _mock):
_mock.return_value = '{"errors": 2}'
validator(self.upload.pk)
assert not self.get_upload().valid
@mock.patch('devhub.tasks._validator')
@mock.patch('devhub.tasks.run_validator')
def test_validation_error(self, _mock):
_mock.side_effect = Exception
eq_(self.upload.task_error, None)

Просмотреть файл

@ -2561,7 +2561,7 @@ class TestUploadDetail(BaseUploadTest):
eq_(suite.attr('data-validateurl'),
reverse('devhub.upload_detail', args=[upload.uuid, 'json']))
@mock.patch('devhub.tasks._validator')
@mock.patch('devhub.tasks.run_validator')
def test_multi_app_addon_can_have_all_platforms(self, v):
v.return_value = json.dumps(self.validation_ok())
self.upload_file('mobile-2.9.10-fx+fn.xpi')
@ -2572,7 +2572,7 @@ class TestUploadDetail(BaseUploadTest):
data = json.loads(r.content)
eq_(data['platforms_to_exclude'], [])
@mock.patch('devhub.tasks._validator')
@mock.patch('devhub.tasks.run_validator')
def test_mobile_excludes_desktop_platforms(self, v):
v.return_value = json.dumps(self.validation_ok())
self.upload_file('mobile-0.1-fn.xpi')
@ -2584,7 +2584,7 @@ class TestUploadDetail(BaseUploadTest):
eq_(sorted(data['platforms_to_exclude']),
sorted([str(p) for p in amo.DESKTOP_PLATFORMS]))
@mock.patch('devhub.tasks._validator')
@mock.patch('devhub.tasks.run_validator')
def test_search_tool_excludes_all_platforms(self, v):
v.return_value = json.dumps(self.validation_ok())
self.upload_file('searchgeek-20090701.xml')
@ -2596,7 +2596,7 @@ class TestUploadDetail(BaseUploadTest):
eq_(sorted(data['platforms_to_exclude']),
sorted([str(p) for p in amo.SUPPORTED_PLATFORMS]))
@mock.patch('devhub.tasks._validator')
@mock.patch('devhub.tasks.run_validator')
def test_desktop_excludes_mobile(self, v):
v.return_value = json.dumps(self.validation_ok())
self.upload_file('desktop.xpi')
@ -2608,7 +2608,7 @@ class TestUploadDetail(BaseUploadTest):
eq_(sorted(data['platforms_to_exclude']),
sorted([str(p) for p in amo.MOBILE_PLATFORMS]))
@mock.patch('devhub.tasks._validator')
@mock.patch('devhub.tasks.run_validator')
def test_unparsable_xpi(self, v):
v.return_value = json.dumps(self.validation_ok())
self.upload_file('unopenable.xpi')
@ -2766,7 +2766,7 @@ class TestValidateFile(BaseUploadTest):
msg = data['validation']['messages'][0]
eq_(msg['message'], 'The value of <em:id> is invalid.')
@mock.patch('devhub.tasks._validator')
@mock.patch('devhub.tasks.run_validator')
def test_validator_errors(self, v):
v.side_effect = ValueError('catastrophic failure in amo-validator')
r = self.client.post(reverse('devhub.json_file_validation',

Просмотреть файл

@ -9,7 +9,7 @@ from django.db import connection
from celeryutils import task
from amo.decorators import write
from devhub.tasks import _validator
from devhub.tasks import run_validator
from zadmin.models import ValidationResult, ValidationJob
log = logging.getLogger('z.task')
@ -31,14 +31,15 @@ def tally_job_results(job_id, **kw):
@write
def bulk_validate_file(result_id, **kw):
res = ValidationResult.objects.get(pk=result_id)
file_base = os.path.basename(res.file.file_path)
log.info('[1@None] Validating file %s (%s) for result_id %s'
% (res.file, file_base, res.id))
task_error = None
validation = None
try:
# TODO(Kumar) when supported, add for_appversions={'{guid}': [1,2]}
validation = _validator(res.file.file_path)
file_base = os.path.basename(res.file.file_path)
log.info('[1@None] Validating file %s (%s) for result_id %s'
% (res.file, file_base, res.id))
target = res.validation_job.target_version
ver = {target.application.guid: [target.version]}
validation = run_validator(res.file.file_path, for_appversions=ver)
except:
task_error = sys.exc_info()
log.error(task_error[1])

Просмотреть файл

@ -157,11 +157,11 @@ class TestBulkValidation(BulkValidationTest):
class TestBulkValidationTask(BulkValidationTest):
def start_validation(self):
new_max = self.appversion('3.7a3')
self.new_max = self.appversion('3.7a3')
r = self.client.post(reverse('zadmin.start_validation'),
{'application': amo.FIREFOX.id,
'curr_max_version': self.curr_max.id,
'target_version': new_max.id,
'target_version': self.new_max.id,
'finish_email': 'fliggy@mozilla.com'},
follow=True)
eq_(r.status_code, 200)
@ -185,9 +185,9 @@ class TestBulkValidationTask(BulkValidationTest):
eq_(res.validation_job.stats['failing'], 1)
eq_(res.validation_job.stats['errors'], 0)
@mock.patch('zadmin.tasks._validator')
def test_task_error(self, _validator):
_validator.side_effect = RuntimeError('validation error')
@mock.patch('zadmin.tasks.run_validator')
def test_task_error(self, run_validator):
run_validator.side_effect = RuntimeError('validation error')
self.start_validation()
res = ValidationResult.objects.get()
err = res.task_error.strip()
@ -197,6 +197,13 @@ class TestBulkValidationTask(BulkValidationTest):
eq_(res.validation_job.stats['total'], 1)
eq_(res.validation_job.stats['errors'], 1)
@mock.patch('zadmin.tasks.run_validator')
def test_validate_for_appversions(self, run_validator):
self.start_validation()
assert run_validator.called
eq_(run_validator.call_args[1]['for_appversions'],
{amo.FIREFOX.guid: [self.new_max.version]})
def test_settings():
# Are you there, settings page?