Iterate through the add-ons and repack them after fixing the 'let scope bustage' (bug 1224686)
This commit is contained in:
Родитель
555c5dff55
Коммит
12cbafc79c
|
@ -0,0 +1,16 @@
|
|||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
from files.tasks import fix_let_scope_bustage_in_addons
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
args = '<addon_id addon_id ...>'
|
||||
help = """Fix the "let scope bustage" (bug 1224686) for a list of add-ons.
|
||||
Only the last version of each add-on will be fixed, and its version bumped."""
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if len(args) == 0:
|
||||
raise CommandError('Please provide at least one add-on id to fix.')
|
||||
|
||||
addon_ids = [int(addon_id) for addon_id in args]
|
||||
fix_let_scope_bustage_in_addons(addon_ids)
|
|
@ -1,7 +1,9 @@
|
|||
import mock
|
||||
import pytest
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management import call_command
|
||||
from django.core.management.base import CommandError
|
||||
|
||||
import amo
|
||||
import amo.tests
|
||||
|
@ -211,3 +213,21 @@ def test_approve_addons_get_review_type(use_case):
|
|||
"""
|
||||
addon, file1, _, review_type = use_case
|
||||
assert approve_addons.get_review_type(file1) == review_type
|
||||
|
||||
|
||||
# fix_let_scope_bustage.
|
||||
|
||||
|
||||
def test_fix_let_scope_bustage_no_addon_id():
|
||||
"""If no add-on id is provided, raise."""
|
||||
with pytest.raises(CommandError) as exc_info:
|
||||
call_command('fix_let_scope_bustage')
|
||||
assert 'Please provide at least one add-on id to fix.' in exc_info.value
|
||||
|
||||
|
||||
@mock.patch('addons.management.commands.fix_let_scope_bustage.'
|
||||
'fix_let_scope_bustage_in_addons')
|
||||
def test_fix_let_scope_bustage(mock_fixer):
|
||||
"""The command should call the task with the list of add-on id provided."""
|
||||
call_command('fix_let_scope_bustage', 1, 2, 3)
|
||||
mock_fixer.assert_called_once_with([1, 2, 3])
|
||||
|
|
|
@ -8,6 +8,7 @@ import uuid
|
|||
from contextlib import contextmanager
|
||||
from datetime import datetime, timedelta
|
||||
from functools import partial, wraps
|
||||
from tempfile import NamedTemporaryFile
|
||||
from urlparse import parse_qs, urlparse, urlsplit, urlunsplit
|
||||
|
||||
from django import forms, test
|
||||
|
@ -841,6 +842,19 @@ def copy_file(source, dest, overwrite=False):
|
|||
os.unlink(dest)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def copy_file_to_temp(source):
|
||||
"""Context manager that copies the source file to a temporary destination.
|
||||
|
||||
The files are relative to the root folder (containing the settings file).
|
||||
The temporary file is yielded by the context manager.
|
||||
|
||||
The copied file is removed on exit."""
|
||||
temp_filename = get_temp_filename()
|
||||
with copy_file(source, temp_filename):
|
||||
yield temp_filename
|
||||
|
||||
|
||||
# This sets up a module that we can patch dynamically with URLs.
|
||||
@override_settings(ROOT_URLCONF='amo.tests.dynamic_urls')
|
||||
class WithDynamicEndpoints(TestCase):
|
||||
|
@ -868,3 +882,9 @@ class WithDynamicEndpoints(TestCase):
|
|||
|
||||
def _clean_up_dynamic_urls(self):
|
||||
dynamic_urls.urlpatterns = None
|
||||
|
||||
|
||||
def get_temp_filename():
|
||||
"""Get a unique, non existing, temporary filename."""
|
||||
with NamedTemporaryFile() as tempfile:
|
||||
return tempfile.name
|
||||
|
|
Двоичный файл не отображается.
|
@ -0,0 +1,3 @@
|
|||
let foo = 1;
|
||||
const bar = 2;
|
||||
var baz = 3;
|
|
@ -1,15 +1,22 @@
|
|||
import logging
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
import spidermonkey
|
||||
from cache_nuggets.lib import Message
|
||||
from tower import ugettext as _
|
||||
|
||||
import amo
|
||||
from addons.models import Addon, AddonUser
|
||||
from amo.celery import task
|
||||
from files.utils import repack, update_version_number
|
||||
from lib.crypto.packaged import sign_file
|
||||
from versions.compare import version_int
|
||||
|
||||
|
||||
task_log = logging.getLogger('z.task')
|
||||
jp_log = logging.getLogger('z.jp.repack')
|
||||
log = logging.getLogger('z.task')
|
||||
|
||||
|
||||
@task
|
||||
|
@ -19,8 +26,8 @@ def extract_file(viewer, **kw):
|
|||
msg.delete()
|
||||
# This flag is so that we can signal when the extraction is completed.
|
||||
flag = Message(viewer._extraction_cache_key())
|
||||
task_log.debug('[1@%s] Unzipping %s for file viewer.' % (
|
||||
extract_file.rate_limit, viewer))
|
||||
log.debug('[1@%s] Unzipping %s for file viewer.' % (
|
||||
extract_file.rate_limit, viewer))
|
||||
|
||||
try:
|
||||
flag.save('extracting') # Set the flag to a truthy value.
|
||||
|
@ -31,8 +38,128 @@ def extract_file(viewer, **kw):
|
|||
(viewer, err))
|
||||
else:
|
||||
msg.save(_('There was an error accessing file %s.') % viewer)
|
||||
task_log.error('[1@%s] Error unzipping: %s' %
|
||||
(extract_file.rate_limit, err))
|
||||
log.error('[1@%s] Error unzipping: %s' %
|
||||
(extract_file.rate_limit, err))
|
||||
finally:
|
||||
# Always delete the flag so the file never gets into a bad state.
|
||||
flag.delete()
|
||||
|
||||
|
||||
def fix_let_scope_bustage(*files):
|
||||
"""Needed for bug 1224444: rewrite all the top level `let` to `var`.
|
||||
|
||||
Usage:
|
||||
>>> fix_let_scope_bustage(file1, file2, file3, ...)
|
||||
('', '') # First one is stdout, second is stderr.
|
||||
"""
|
||||
fixage_script = os.path.join('scripts', 'rewrite.js')
|
||||
# We need to force the version to 180, because in 185 there were some
|
||||
# breaking changes: using `function` for generators was deprecated, but a
|
||||
# lot of the current add-ons are using that.
|
||||
process = spidermonkey.Spidermonkey(code='version(180)',
|
||||
script_file=fixage_script,
|
||||
script_args=files)
|
||||
return process.communicate()
|
||||
|
||||
|
||||
def fix_let_scope_bustage_in_xpi(xpi_path):
|
||||
"""Rewrite all the top level `let` to `var` in an XPI."""
|
||||
files_to_fix = []
|
||||
with repack(xpi_path) as folder:
|
||||
for root, dirs, files in os.walk(folder):
|
||||
for file_ in files:
|
||||
if file_.endswith('.js'):
|
||||
# We only want to fix javascript files.
|
||||
files_to_fix.append(os.path.join(root, file_))
|
||||
fix_let_scope_bustage(*files_to_fix)
|
||||
|
||||
|
||||
MAIL_SUBJECT = u'Mozilla Add-ons: {addon} has been automatically fixed on AMO'
|
||||
MAIL_MESSAGE = u"""
|
||||
Your add-on, {addon}, has been automatically fixed for future versions of
|
||||
Firefox (see
|
||||
https://blog.mozilla.org/addons/2015/10/14/breaking-changes-let-const-firefox-nightly-44/).
|
||||
The fixing process involved repackaging the add-on files and adding the string
|
||||
'.1-let-fixed' to their versions numbers. We only fixed the files for the
|
||||
last uploaded version.
|
||||
We recommend that you give them a try to make sure they don't have any
|
||||
unexpected problems: {addon_url}
|
||||
|
||||
Future uploads will not be repackaged, so please make sure to integrate these
|
||||
changes into your source code. The blog post linked above explains in detail
|
||||
what changed and how it affects your code.
|
||||
|
||||
If you have any questions or comments on this, please reply to this email or
|
||||
join #addons on irc.mozilla.org.
|
||||
|
||||
You're receiving this email because you have an add-on hosted on
|
||||
https://addons.mozilla.org
|
||||
"""
|
||||
|
||||
|
||||
@task
|
||||
def fix_let_scope_bustage_in_addons(addon_ids):
|
||||
"""Used to fix the "let scope bustage" (bug 1224686) in the last version of
|
||||
the provided add-ons.
|
||||
|
||||
This is used in the 'fix_let_scope_bustage' management commands.
|
||||
|
||||
It also bumps the version number of the file and the Version, so the
|
||||
Firefox extension update mecanism picks this new fixed version and installs
|
||||
it.
|
||||
"""
|
||||
log.info(u'[{0}] Fixing addons.'.format(len(addon_ids)))
|
||||
|
||||
addons_emailed = []
|
||||
for addon in Addon.objects.filter(id__in=addon_ids):
|
||||
# We only care about the latest added version for each add-on.
|
||||
version = addon.versions.first()
|
||||
log.info(u'Fixing addon {0}, version {1}'.format(addon, version))
|
||||
|
||||
bumped_version_number = u'{0}.1-let-fixed'.format(version.version)
|
||||
for file_obj in version.files.all():
|
||||
if not os.path.isfile(file_obj.file_path):
|
||||
log.info(u'File {0} does not exist, skip'.format(file_obj.pk))
|
||||
continue
|
||||
# Save the original file, before bumping the version.
|
||||
backup_path = u'{0}.backup_let_fix'.format(file_obj.file_path)
|
||||
shutil.copy(file_obj.file_path, backup_path)
|
||||
try:
|
||||
# Apply the fix itself.
|
||||
fix_let_scope_bustage_in_xpi(file_obj.file_path)
|
||||
except:
|
||||
log.error(u'Failed fixing file {0}'.format(file_obj.pk),
|
||||
exc_info=True)
|
||||
# Revert the fix by restoring the backup.
|
||||
shutil.move(backup_path, file_obj.file_path)
|
||||
continue # We move to the next file.
|
||||
# Need to bump the version (modify install.rdf or package.json)
|
||||
# before the file is signed.
|
||||
update_version_number(file_obj, bumped_version_number)
|
||||
if file_obj.is_signed: # Only sign if it was already signed.
|
||||
if file_obj.status == amo.STATUS_PUBLIC:
|
||||
server = settings.SIGNING_SERVER
|
||||
else:
|
||||
server = settings.PRELIMINARY_SIGNING_SERVER
|
||||
sign_file(file_obj, server)
|
||||
# Now update the Version model.
|
||||
version.update(version=bumped_version_number,
|
||||
version_int=version_int(bumped_version_number))
|
||||
addon = version.addon
|
||||
if addon.pk not in addons_emailed:
|
||||
# Send a mail to the owners/devs warning them we've
|
||||
# automatically fixed their addon.
|
||||
qs = (AddonUser.objects
|
||||
.filter(role=amo.AUTHOR_ROLE_OWNER, addon=addon)
|
||||
.exclude(user__email__isnull=True))
|
||||
emails = qs.values_list('user__email', flat=True)
|
||||
subject = MAIL_SUBJECT.format(addon=addon.name)
|
||||
message = MAIL_MESSAGE.format(
|
||||
addon=addon.name,
|
||||
addon_url=amo.helpers.absolutify(
|
||||
addon.get_dev_url(action='versions')))
|
||||
amo.utils.send_mail(
|
||||
subject, message, recipient_list=emails,
|
||||
fail_silently=True,
|
||||
headers={'Reply-To': 'amo-editors@mozilla.org'})
|
||||
addons_emailed.append(addon.pk)
|
||||
|
|
|
@ -378,34 +378,34 @@ class TestSafeUnzipFile(amo.tests.TestCase, amo.tests.AMOPaths):
|
|||
# is covered in the file viewer tests.
|
||||
@patch.object(settings, 'FILE_UNZIP_SIZE_LIMIT', 5)
|
||||
def test_unzip_limit(self):
|
||||
zip = SafeUnzip(self.xpi_path('langpack-localepicker'))
|
||||
self.assertRaises(forms.ValidationError, zip.is_valid)
|
||||
zip_file = SafeUnzip(self.xpi_path('langpack-localepicker'))
|
||||
self.assertRaises(forms.ValidationError, zip_file.is_valid)
|
||||
|
||||
def test_unzip_fatal(self):
|
||||
zip = SafeUnzip(self.xpi_path('search.xml'))
|
||||
self.assertRaises(zipfile.BadZipfile, zip.is_valid)
|
||||
zip_file = SafeUnzip(self.xpi_path('search.xml'))
|
||||
self.assertRaises(zipfile.BadZipfile, zip_file.is_valid)
|
||||
|
||||
def test_unzip_not_fatal(self):
|
||||
zip = SafeUnzip(self.xpi_path('search.xml'))
|
||||
assert not zip.is_valid(fatal=False)
|
||||
zip_file = SafeUnzip(self.xpi_path('search.xml'))
|
||||
assert not zip_file.is_valid(fatal=False)
|
||||
|
||||
def test_extract_path(self):
|
||||
zip = SafeUnzip(self.xpi_path('langpack-localepicker'))
|
||||
assert zip.is_valid()
|
||||
assert'locale browser de' in zip.extract_path('chrome.manifest')
|
||||
zip_file = SafeUnzip(self.xpi_path('langpack-localepicker'))
|
||||
assert zip_file.is_valid()
|
||||
assert'locale browser de' in zip_file.extract_path('chrome.manifest')
|
||||
|
||||
def test_not_secure(self):
|
||||
zip = SafeUnzip(self.xpi_path('extension'))
|
||||
zip.is_valid()
|
||||
assert not zip.is_signed()
|
||||
zip_file = SafeUnzip(self.xpi_path('extension'))
|
||||
zip_file.is_valid()
|
||||
assert not zip_file.is_signed()
|
||||
|
||||
def test_is_secure(self):
|
||||
zip = SafeUnzip(self.xpi_path('signed'))
|
||||
zip.is_valid()
|
||||
assert zip.is_signed()
|
||||
zip_file = SafeUnzip(self.xpi_path('signed'))
|
||||
zip_file.is_valid()
|
||||
assert zip_file.is_signed()
|
||||
|
||||
def test_is_broken(self):
|
||||
zip = SafeUnzip(self.xpi_path('signed'))
|
||||
zip.is_valid()
|
||||
zip.info[2].filename = 'META-INF/foo.sf'
|
||||
assert not zip.is_signed()
|
||||
zip_file = SafeUnzip(self.xpi_path('signed'))
|
||||
zip_file.is_valid()
|
||||
zip_file.info_list[2].filename = 'META-INF/foo.sf'
|
||||
assert not zip_file.is_signed()
|
||||
|
|
|
@ -0,0 +1,94 @@
|
|||
# -*- coding: utf8 -*-
|
||||
import mock
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from django.test.utils import override_settings
|
||||
|
||||
import amo.tests
|
||||
from files import tasks
|
||||
from files.models import File
|
||||
from versions.models import Version
|
||||
|
||||
|
||||
# Very basic js file that contains a `let`, a `const`, and a `var` in the
|
||||
# toplevel scope. Both the `let` and `const` should be rewritten to `var`.
|
||||
TEST_JS_FILE = 'apps/files/fixtures/files/test_with_toplevel_let.js'
|
||||
|
||||
|
||||
def assert_test_file_fixed(filename):
|
||||
"""Check that the content of the "test file" has been fixed."""
|
||||
with open(filename, 'r') as fixed_js_file:
|
||||
# The "let foo = 1;" has been "fixed" to "var foo = 1".
|
||||
expected = "var foo = 1;\nvar bar = 2;\nvar baz = 3;\n"
|
||||
assert fixed_js_file.read() == expected
|
||||
|
||||
|
||||
def test_fix_let_scope_bustage():
|
||||
# Create two copies, we want to make sure the fixage script fixes both.
|
||||
with amo.tests.copy_file_to_temp(TEST_JS_FILE) as temp_filename1:
|
||||
with amo.tests.copy_file_to_temp(TEST_JS_FILE) as temp_filename2:
|
||||
# No output, no error.
|
||||
assert tasks.fix_let_scope_bustage(temp_filename1,
|
||||
temp_filename2) == ('', '')
|
||||
assert_test_file_fixed(temp_filename1)
|
||||
assert_test_file_fixed(temp_filename2)
|
||||
|
||||
|
||||
@mock.patch('files.tasks.fix_let_scope_bustage')
|
||||
def test_fix_let_scope_bustage_in_xpi(mock_fixer):
|
||||
"""Fix the "let scope bustage" in the test XPI.
|
||||
|
||||
The content of the test XPI is as follows:
|
||||
├── chrome.manifest
|
||||
├── foobar
|
||||
│ └── main.js
|
||||
├── install.rdf
|
||||
└── some_file.js
|
||||
|
||||
The two files that should be fixed are some_file.js and foobar/main.js.
|
||||
Both those files have the same content as the TEST_JS_FILE.
|
||||
"""
|
||||
test_xpi = 'apps/files/fixtures/files/extension-let-global-scope.xpi'
|
||||
with amo.tests.copy_file_to_temp(test_xpi) as temp_filename:
|
||||
tasks.fix_let_scope_bustage_in_xpi(temp_filename)
|
||||
mock_fixer.assert_called_once_with(mock.ANY, mock.ANY)
|
||||
# Make sure it's been called with the two javascript files in the xpi.
|
||||
call = mock_fixer.call_args[0]
|
||||
assert call[0].endswith('some_file.js')
|
||||
assert call[1].endswith('foobar/main.js')
|
||||
|
||||
|
||||
@mock.patch('files.tasks.fix_let_scope_bustage_in_xpi')
|
||||
@mock.patch('files.tasks.update_version_number')
|
||||
@mock.patch('files.tasks.sign_file')
|
||||
def test_fix_let_scope_bustage_in_addon(mock_sign_file, mock_version_bump,
|
||||
mock_fixer, db):
|
||||
# Create an add-on, with a version.
|
||||
addon = amo.tests.addon_factory()
|
||||
addon.update(guid='xxxxx')
|
||||
# Add another version, which is the one we want to fix.
|
||||
version = Version.objects.create(addon=addon, version='0.1')
|
||||
# So addon.versions.first() (which is the last one uploaded) works.
|
||||
future_date = datetime.now() + timedelta(days=1)
|
||||
version.update(created=future_date)
|
||||
assert addon.versions.count() == 2 # Two versions, we only fix the last.
|
||||
|
||||
# Assign a file for the last version's file.
|
||||
test_xpi = 'apps/files/fixtures/files/extension-let-global-scope.xpi'
|
||||
file_ = File.objects.create(version=version, filename='foo.xpi',
|
||||
is_signed=True)
|
||||
with override_settings(PRELIMINARY_SIGNING_SERVER='prelim_signing'):
|
||||
with amo.tests.copy_file(test_xpi, file_.file_path):
|
||||
# Fix the file!
|
||||
tasks.fix_let_scope_bustage_in_addons([addon.pk])
|
||||
|
||||
# fix_let_scope_bustage_in_xpi was called.
|
||||
mock_fixer.assert_called_once_with(file_.file_path)
|
||||
|
||||
# Version was bumped.
|
||||
bumped_version_number = u'0.1.1-let-fixed'
|
||||
version.reload().version == bumped_version_number
|
||||
mock_version_bump.assert_called_once_with(file_, bumped_version_number)
|
||||
|
||||
# File was signed.
|
||||
mock_sign_file.assert_called_once_with(file_, 'prelim_signing')
|
|
@ -1,4 +1,6 @@
|
|||
import json
|
||||
import os
|
||||
import zipfile
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
import mock
|
||||
|
@ -11,9 +13,8 @@ import amo
|
|||
import amo.tests
|
||||
from addons.models import Addon
|
||||
from applications.models import AppVersion
|
||||
from files import utils
|
||||
from files.models import File
|
||||
from files.utils import (find_jetpacks, is_beta, Extractor,
|
||||
ManifestJSONExtractor, PackageJSONExtractor)
|
||||
from versions.models import Version
|
||||
|
||||
|
||||
|
@ -21,56 +22,56 @@ pytestmark = pytest.mark.django_db
|
|||
|
||||
|
||||
def test_is_beta():
|
||||
assert not is_beta('1.2')
|
||||
assert not utils.is_beta('1.2')
|
||||
|
||||
assert is_beta('1.2a')
|
||||
assert is_beta('1.2a1')
|
||||
assert is_beta('1.2a123')
|
||||
assert is_beta('1.2a.1')
|
||||
assert is_beta('1.2a.123')
|
||||
assert is_beta('1.2a-1')
|
||||
assert is_beta('1.2a-123')
|
||||
assert utils.is_beta('1.2a')
|
||||
assert utils.is_beta('1.2a1')
|
||||
assert utils.is_beta('1.2a123')
|
||||
assert utils.is_beta('1.2a.1')
|
||||
assert utils.is_beta('1.2a.123')
|
||||
assert utils.is_beta('1.2a-1')
|
||||
assert utils.is_beta('1.2a-123')
|
||||
|
||||
assert is_beta('1.2alpha')
|
||||
assert is_beta('1.2alpha')
|
||||
assert is_beta('1.2alpha1')
|
||||
assert is_beta('1.2alpha123')
|
||||
assert is_beta('1.2alpha.1')
|
||||
assert is_beta('1.2alpha.123')
|
||||
assert is_beta('1.2alpha-1')
|
||||
assert is_beta('1.2alpha-123')
|
||||
assert utils.is_beta('1.2alpha')
|
||||
assert utils.is_beta('1.2alpha')
|
||||
assert utils.is_beta('1.2alpha1')
|
||||
assert utils.is_beta('1.2alpha123')
|
||||
assert utils.is_beta('1.2alpha.1')
|
||||
assert utils.is_beta('1.2alpha.123')
|
||||
assert utils.is_beta('1.2alpha-1')
|
||||
assert utils.is_beta('1.2alpha-123')
|
||||
|
||||
assert is_beta('1.2b')
|
||||
assert is_beta('1.2b1')
|
||||
assert is_beta('1.2b123')
|
||||
assert is_beta('1.2b.1')
|
||||
assert is_beta('1.2b.123')
|
||||
assert is_beta('1.2b-1')
|
||||
assert is_beta('1.2b-123')
|
||||
assert utils.is_beta('1.2b')
|
||||
assert utils.is_beta('1.2b1')
|
||||
assert utils.is_beta('1.2b123')
|
||||
assert utils.is_beta('1.2b.1')
|
||||
assert utils.is_beta('1.2b.123')
|
||||
assert utils.is_beta('1.2b-1')
|
||||
assert utils.is_beta('1.2b-123')
|
||||
|
||||
assert is_beta('1.2beta')
|
||||
assert is_beta('1.2beta1')
|
||||
assert is_beta('1.2beta123')
|
||||
assert is_beta('1.2beta.1')
|
||||
assert is_beta('1.2beta.123')
|
||||
assert is_beta('1.2beta-1')
|
||||
assert is_beta('1.2beta-123')
|
||||
assert utils.is_beta('1.2beta')
|
||||
assert utils.is_beta('1.2beta1')
|
||||
assert utils.is_beta('1.2beta123')
|
||||
assert utils.is_beta('1.2beta.1')
|
||||
assert utils.is_beta('1.2beta.123')
|
||||
assert utils.is_beta('1.2beta-1')
|
||||
assert utils.is_beta('1.2beta-123')
|
||||
|
||||
assert is_beta('1.2pre')
|
||||
assert is_beta('1.2pre1')
|
||||
assert is_beta('1.2pre123')
|
||||
assert is_beta('1.2pre.1')
|
||||
assert is_beta('1.2pre.123')
|
||||
assert is_beta('1.2pre-1')
|
||||
assert is_beta('1.2pre-123')
|
||||
assert utils.is_beta('1.2pre')
|
||||
assert utils.is_beta('1.2pre1')
|
||||
assert utils.is_beta('1.2pre123')
|
||||
assert utils.is_beta('1.2pre.1')
|
||||
assert utils.is_beta('1.2pre.123')
|
||||
assert utils.is_beta('1.2pre-1')
|
||||
assert utils.is_beta('1.2pre-123')
|
||||
|
||||
assert is_beta('1.2rc')
|
||||
assert is_beta('1.2rc1')
|
||||
assert is_beta('1.2rc123')
|
||||
assert is_beta('1.2rc.1')
|
||||
assert is_beta('1.2rc.123')
|
||||
assert is_beta('1.2rc-1')
|
||||
assert is_beta('1.2rc-123')
|
||||
assert utils.is_beta('1.2rc')
|
||||
assert utils.is_beta('1.2rc1')
|
||||
assert utils.is_beta('1.2rc123')
|
||||
assert utils.is_beta('1.2rc.1')
|
||||
assert utils.is_beta('1.2rc.123')
|
||||
assert utils.is_beta('1.2rc-1')
|
||||
assert utils.is_beta('1.2rc-123')
|
||||
|
||||
|
||||
class TestFindJetpacks(amo.tests.TestCase):
|
||||
|
@ -82,20 +83,20 @@ class TestFindJetpacks(amo.tests.TestCase):
|
|||
self.file = File.objects.filter(version__addon=3615).get()
|
||||
|
||||
def test_success(self):
|
||||
files = find_jetpacks('1.0', '1.1')
|
||||
files = utils.find_jetpacks('1.0', '1.1')
|
||||
eq_(files, [self.file])
|
||||
|
||||
def test_skip_autorepackage(self):
|
||||
Addon.objects.update(auto_repackage=False)
|
||||
eq_(find_jetpacks('1.0', '1.1'), [])
|
||||
eq_(utils.find_jetpacks('1.0', '1.1'), [])
|
||||
|
||||
def test_minver(self):
|
||||
files = find_jetpacks('1.1', '1.2')
|
||||
files = utils.find_jetpacks('1.1', '1.2')
|
||||
eq_(files, [self.file])
|
||||
eq_(files[0].needs_upgrade, False)
|
||||
|
||||
def test_maxver(self):
|
||||
files = find_jetpacks('.1', '1.0')
|
||||
files = utils.find_jetpacks('.1', '1.0')
|
||||
eq_(files, [self.file])
|
||||
eq_(files[0].needs_upgrade, False)
|
||||
|
||||
|
@ -108,13 +109,13 @@ class TestFindJetpacks(amo.tests.TestCase):
|
|||
eq_(new_file.status, amo.STATUS_UNREVIEWED)
|
||||
eq_(new_file2.status, amo.STATUS_UNREVIEWED)
|
||||
|
||||
files = find_jetpacks('1.0', '1.1')
|
||||
files = utils.find_jetpacks('1.0', '1.1')
|
||||
eq_(files, [self.file, new_file, new_file2])
|
||||
assert all(f.needs_upgrade for f in files)
|
||||
|
||||
# Now self.file will not need an upgrade since we skip old versions.
|
||||
new_file.update(status=amo.STATUS_PUBLIC)
|
||||
files = find_jetpacks('1.0', '1.1')
|
||||
files = utils.find_jetpacks('1.0', '1.1')
|
||||
eq_(files, [self.file, new_file, new_file2])
|
||||
eq_(files[0].needs_upgrade, False)
|
||||
assert all(f.needs_upgrade for f in files[1:])
|
||||
|
@ -132,7 +133,7 @@ class TestExtractor(amo.tests.TestCase):
|
|||
|
||||
def test_no_manifest(self):
|
||||
with self.assertRaises(forms.ValidationError) as exc:
|
||||
Extractor.parse('foobar')
|
||||
utils.Extractor.parse('foobar')
|
||||
assert exc.exception.message == (
|
||||
"No install.rdf or package.json or manifest.json found")
|
||||
|
||||
|
@ -144,7 +145,7 @@ class TestExtractor(amo.tests.TestCase):
|
|||
package_json_extractor,
|
||||
manifest_json_extractor):
|
||||
exists_mock.side_effect = self.os_path_exists_for('install.rdf')
|
||||
Extractor.parse('foobar')
|
||||
utils.Extractor.parse('foobar')
|
||||
assert rdf_extractor.called
|
||||
assert not package_json_extractor.called
|
||||
assert not manifest_json_extractor.called
|
||||
|
@ -157,7 +158,7 @@ class TestExtractor(amo.tests.TestCase):
|
|||
package_json_extractor,
|
||||
manifest_json_extractor):
|
||||
exists_mock.side_effect = self.os_path_exists_for('package.json')
|
||||
Extractor.parse('foobar')
|
||||
utils.Extractor.parse('foobar')
|
||||
assert not rdf_extractor.called
|
||||
assert package_json_extractor.called
|
||||
assert not manifest_json_extractor.called
|
||||
|
@ -171,7 +172,7 @@ class TestExtractor(amo.tests.TestCase):
|
|||
manifest_json_extractor):
|
||||
self.create_switch('webextensions')
|
||||
exists_mock.side_effect = self.os_path_exists_for('manifest.json')
|
||||
Extractor.parse('foobar')
|
||||
utils.Extractor.parse('foobar')
|
||||
assert not rdf_extractor.called
|
||||
assert not package_json_extractor.called
|
||||
assert manifest_json_extractor.called
|
||||
|
@ -186,7 +187,7 @@ class TestExtractor(amo.tests.TestCase):
|
|||
# Here we don't create the waffle switch to enable it.
|
||||
exists_mock.side_effect = self.os_path_exists_for('manifest.json')
|
||||
with self.assertRaises(forms.ValidationError) as exc:
|
||||
Extractor.parse('foobar')
|
||||
utils.Extractor.parse('foobar')
|
||||
assert exc.exception.message == "WebExtensions aren't allowed yet"
|
||||
assert not rdf_extractor.called
|
||||
assert not package_json_extractor.called
|
||||
|
@ -196,8 +197,8 @@ class TestExtractor(amo.tests.TestCase):
|
|||
class TestPackageJSONExtractor(amo.tests.TestCase):
|
||||
|
||||
def parse(self, base_data):
|
||||
return PackageJSONExtractor('/fake_path',
|
||||
json.dumps(base_data)).parse()
|
||||
return utils.PackageJSONExtractor('/fake_path',
|
||||
json.dumps(base_data)).parse()
|
||||
|
||||
def create_appversion(self, name, version):
|
||||
return AppVersion.objects.create(application=amo.APPS[name].id,
|
||||
|
@ -209,7 +210,7 @@ class TestPackageJSONExtractor(amo.tests.TestCase):
|
|||
with NamedTemporaryFile() as file_:
|
||||
file_.write(json.dumps(data))
|
||||
file_.flush()
|
||||
pje = PackageJSONExtractor(file_.name)
|
||||
pje = utils.PackageJSONExtractor(file_.name)
|
||||
assert pje.data == data
|
||||
|
||||
def test_guid(self):
|
||||
|
@ -314,8 +315,8 @@ class TestPackageJSONExtractor(amo.tests.TestCase):
|
|||
class TestManifestJSONExtractor(amo.tests.TestCase):
|
||||
|
||||
def parse(self, base_data):
|
||||
return ManifestJSONExtractor('/fake_path',
|
||||
json.dumps(base_data)).parse()
|
||||
return utils.ManifestJSONExtractor('/fake_path',
|
||||
json.dumps(base_data)).parse()
|
||||
|
||||
def create_appversion(self, name, version):
|
||||
return AppVersion.objects.create(application=amo.APPS[name].id,
|
||||
|
@ -327,7 +328,7 @@ class TestManifestJSONExtractor(amo.tests.TestCase):
|
|||
with NamedTemporaryFile() as file_:
|
||||
file_.write(json.dumps(data))
|
||||
file_.flush()
|
||||
mje = ManifestJSONExtractor(file_.name)
|
||||
mje = utils.ManifestJSONExtractor(file_.name)
|
||||
assert mje.data == data
|
||||
|
||||
def test_guid(self):
|
||||
|
@ -408,3 +409,77 @@ class TestManifestJSONExtractor(amo.tests.TestCase):
|
|||
'strict_min_version': '>=30.0',
|
||||
'strict_max_version': '=30.*'}}}
|
||||
assert not self.parse(data)['apps']
|
||||
|
||||
|
||||
def test_zip_folder_content():
|
||||
extension_file = 'apps/files/fixtures/files/extension.xpi'
|
||||
try:
|
||||
temp_folder = utils.extract_zip(extension_file)
|
||||
assert os.listdir(temp_folder) == [
|
||||
'install.rdf', 'chrome.manifest', 'chrome']
|
||||
temp_filename = amo.tests.get_temp_filename()
|
||||
utils.zip_folder_content(temp_folder, temp_filename)
|
||||
# Make sure the zipped files contain the same files.
|
||||
with zipfile.ZipFile(temp_filename, mode='r') as new:
|
||||
with zipfile.ZipFile(extension_file, mode='r') as orig:
|
||||
assert new.namelist() == orig.namelist()
|
||||
finally:
|
||||
if os.path.exists(temp_folder):
|
||||
amo.utils.rm_local_tmp_dir(temp_folder)
|
||||
if os.path.exists(temp_filename):
|
||||
os.unlink(temp_filename)
|
||||
|
||||
|
||||
def test_repack():
|
||||
# Warning: context managers all the way down. Because they're awesome.
|
||||
extension_file = 'apps/files/fixtures/files/extension.xpi'
|
||||
# We don't want to overwrite our fixture, so use a copy.
|
||||
with amo.tests.copy_file_to_temp(extension_file) as temp_filename:
|
||||
# This is where we're really testing the repack helper.
|
||||
with utils.repack(temp_filename) as folder_path:
|
||||
# Temporary folder contains the unzipped XPI.
|
||||
assert os.listdir(folder_path) == [
|
||||
'install.rdf', 'chrome.manifest', 'chrome']
|
||||
# Add a file, which should end up in the repacked file.
|
||||
with open(os.path.join(folder_path, 'foo.bar'), 'w') as file_:
|
||||
file_.write('foobar')
|
||||
# Once we're done with the repack, the temporary folder is removed.
|
||||
assert not os.path.exists(folder_path)
|
||||
# And the repacked file has the added file.
|
||||
assert os.path.exists(temp_filename)
|
||||
with zipfile.ZipFile(temp_filename, mode='r') as zf:
|
||||
assert 'foo.bar' in zf.namelist()
|
||||
assert zf.read('foo.bar') == 'foobar'
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def file_obj():
|
||||
addon = amo.tests.addon_factory()
|
||||
addon.update(guid='xxxxx')
|
||||
version = addon.current_version
|
||||
return version.all_files[0]
|
||||
|
||||
|
||||
def test_bump_version_in_install_rdf(file_obj):
|
||||
with amo.tests.copy_file('apps/files/fixtures/files/jetpack.xpi',
|
||||
file_obj.file_path):
|
||||
utils.update_version_number(file_obj, '1.3.1-signed')
|
||||
parsed = utils.parse_xpi(file_obj.file_path)
|
||||
assert parsed['version'] == '1.3.1-signed'
|
||||
|
||||
|
||||
def test_bump_version_in_alt_install_rdf(file_obj):
|
||||
with amo.tests.copy_file('apps/files/fixtures/files/alt-rdf.xpi',
|
||||
file_obj.file_path):
|
||||
utils.update_version_number(file_obj, '2.1.106.1-signed')
|
||||
parsed = utils.parse_xpi(file_obj.file_path)
|
||||
assert parsed['version'] == '2.1.106.1-signed'
|
||||
|
||||
|
||||
def test_bump_version_in_package_json(file_obj):
|
||||
with amo.tests.copy_file(
|
||||
'apps/files/fixtures/files/new-format-0.0.1.xpi',
|
||||
file_obj.file_path):
|
||||
utils.update_version_number(file_obj, '0.0.1.1-signed')
|
||||
parsed = utils.parse_xpi(file_obj.file_path)
|
||||
assert parsed['version'] == '0.0.1.1-signed'
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import collections
|
||||
import contextlib
|
||||
import glob
|
||||
import hashlib
|
||||
import json
|
||||
|
@ -24,6 +25,7 @@ from django.core.files.storage import default_storage as storage
|
|||
|
||||
import rdflib
|
||||
import waffle
|
||||
from lxml import etree
|
||||
from tower import ugettext as _
|
||||
|
||||
import amo
|
||||
|
@ -349,7 +351,7 @@ def parse_search(fileorpath, addon=None):
|
|||
class SafeUnzip(object):
|
||||
def __init__(self, source, mode='r'):
|
||||
self.source = source
|
||||
self.info = None
|
||||
self.info_list = None
|
||||
self.mode = mode
|
||||
|
||||
def is_valid(self, fatal=True):
|
||||
|
@ -359,16 +361,16 @@ class SafeUnzip(object):
|
|||
an error, otherwise it will return False.
|
||||
"""
|
||||
try:
|
||||
zip = zipfile.ZipFile(self.source, self.mode)
|
||||
zip_file = zipfile.ZipFile(self.source, self.mode)
|
||||
except (BadZipfile, IOError):
|
||||
log.info('Error extracting %s', self.source, exc_info=True)
|
||||
if fatal:
|
||||
log.info('Error extracting', exc_info=True)
|
||||
raise
|
||||
return False
|
||||
|
||||
_info = zip.infolist()
|
||||
info_list = zip_file.infolist()
|
||||
|
||||
for info in _info:
|
||||
for info in info_list:
|
||||
if '..' in info.filename or info.filename.startswith('/'):
|
||||
log.error('Extraction error, invalid file name (%s) in '
|
||||
'archive: %s' % (info.filename, self.source))
|
||||
|
@ -385,14 +387,14 @@ class SafeUnzip(object):
|
|||
_('File exceeding size limit in archive: {0}').format(
|
||||
info.filename))
|
||||
|
||||
self.info = _info
|
||||
self.zip = zip
|
||||
self.info_list = info_list
|
||||
self.zip_file = zip_file
|
||||
return True
|
||||
|
||||
def is_signed(self):
|
||||
"""Tells us if an addon is signed."""
|
||||
finds = []
|
||||
for info in self.info:
|
||||
for info in self.info_list:
|
||||
match = SIGNED_RE.match(info.filename)
|
||||
if match:
|
||||
name, ext = match.groups()
|
||||
|
@ -413,18 +415,19 @@ class SafeUnzip(object):
|
|||
if type == 'jar':
|
||||
parts = path.split('!')
|
||||
for part in parts[:-1]:
|
||||
jar = self.__class__(StringIO.StringIO(jar.zip.read(part)))
|
||||
jar = self.__class__(
|
||||
StringIO.StringIO(jar.zip_file.read(part)))
|
||||
jar.is_valid(fatal=True)
|
||||
path = parts[-1]
|
||||
return jar.extract_path(path[1:] if path.startswith('/') else path)
|
||||
|
||||
def extract_path(self, path):
|
||||
"""Given a path, extracts the content at path."""
|
||||
return self.zip.read(path)
|
||||
return self.zip_file.read(path)
|
||||
|
||||
def extract_info_to_dest(self, info, dest):
|
||||
"""Extracts the given info to a directory and checks the file size."""
|
||||
self.zip.extract(info, dest)
|
||||
self.zip_file.extract(info, dest)
|
||||
dest = os.path.join(dest, info.filename)
|
||||
if not os.path.isdir(dest):
|
||||
# Directories consistently report their size incorrectly.
|
||||
|
@ -436,21 +439,21 @@ class SafeUnzip(object):
|
|||
|
||||
def extract_to_dest(self, dest):
|
||||
"""Extracts the zip file to a directory."""
|
||||
for info in self.info:
|
||||
for info in self.info_list:
|
||||
self.extract_info_to_dest(info, dest)
|
||||
|
||||
def close(self):
|
||||
self.zip.close()
|
||||
self.zip_file.close()
|
||||
|
||||
|
||||
def extract_zip(source, remove=False, fatal=True):
|
||||
"""Extracts the zip file. If remove is given, removes the source file."""
|
||||
tempdir = tempfile.mkdtemp()
|
||||
|
||||
zip = SafeUnzip(source)
|
||||
zip_file = SafeUnzip(source)
|
||||
try:
|
||||
if zip.is_valid(fatal):
|
||||
zip.extract_to_dest(tempdir)
|
||||
if zip_file.is_valid(fatal):
|
||||
zip_file.extract_to_dest(tempdir)
|
||||
except:
|
||||
rm_local_tmp_dir(tempdir)
|
||||
raise
|
||||
|
@ -682,3 +685,93 @@ class JetpackUpgrader(object):
|
|||
cache.set(self.file_key, newfiles)
|
||||
if not newfiles:
|
||||
cache.delete(self.version_key)
|
||||
|
||||
|
||||
def zip_folder_content(folder, filename):
|
||||
"""Compress the _content_ of a folder."""
|
||||
with zipfile.ZipFile(filename, 'w', zipfile.ZIP_DEFLATED) as dest:
|
||||
# Add each file/folder from the folder to the zip file.
|
||||
for root, dirs, files in os.walk(folder):
|
||||
relative_dir = os.path.relpath(root, folder)
|
||||
for file_ in files:
|
||||
dest.write(os.path.join(root, file_),
|
||||
# We want the relative paths for the files.
|
||||
arcname=os.path.join(relative_dir, file_))
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def repack(xpi_path, raise_on_failure=True):
|
||||
"""Unpack the XPI, yield the temp folder, and repack on exit.
|
||||
|
||||
Usage:
|
||||
with repack('foo.xpi') as temp_folder:
|
||||
# 'foo.xpi' files are extracted to the temp_folder.
|
||||
modify_files(temp_folder) # Modify the files in the temp_folder.
|
||||
# The 'foo.xpi' extension is now repacked, with the file changes.
|
||||
"""
|
||||
# Unpack.
|
||||
tempdir = extract_zip(xpi_path, remove=False, fatal=raise_on_failure)
|
||||
yield tempdir
|
||||
try:
|
||||
# Repack.
|
||||
repacked = u'{0}.repacked'.format(xpi_path) # Temporary file.
|
||||
zip_folder_content(tempdir, repacked)
|
||||
# Overwrite the initial file with the repacked one.
|
||||
shutil.move(repacked, xpi_path)
|
||||
finally:
|
||||
rm_local_tmp_dir(tempdir)
|
||||
|
||||
|
||||
def update_version_number(file_obj, new_version_number):
|
||||
"""Update the manifest to have the new version number."""
|
||||
# Create a new xpi with the updated version.
|
||||
updated = u'{0}.updated_version_number'.format(file_obj.file_path)
|
||||
# Copy the original XPI, with the updated install.rdf or package.json.
|
||||
with zipfile.ZipFile(file_obj.file_path, 'r') as source:
|
||||
file_list = source.infolist()
|
||||
with zipfile.ZipFile(updated, 'w', zipfile.ZIP_DEFLATED) as dest:
|
||||
for file_ in file_list:
|
||||
content = source.read(file_.filename)
|
||||
if file_.filename == 'install.rdf':
|
||||
content = _update_version_in_install_rdf(
|
||||
content, new_version_number)
|
||||
if file_.filename == 'package.json':
|
||||
content = _update_version_in_package_json(
|
||||
content, new_version_number)
|
||||
dest.writestr(file_, content)
|
||||
# Move the updated file to the original file.
|
||||
shutil.move(updated, file_obj.file_path)
|
||||
|
||||
|
||||
def _update_version_in_install_rdf(content, new_version_number):
|
||||
"""Change the version number in the install.rdf provided."""
|
||||
# We need to use an XML parser, and not a RDF parser, because our
|
||||
# install.rdf files aren't really standard (they use default namespaces,
|
||||
# don't namespace the "about" attribute... rdflib can parse them, and can
|
||||
# now even serialize them, but the end result could be very different from
|
||||
# the format we need.
|
||||
tree = etree.fromstring(content)
|
||||
# There's two different formats for the install.rdf: the "standard" one
|
||||
# uses nodes for each item (like <em:version>1.2</em:version>), the other
|
||||
# alternate one sets attributes on the <RDF:Description
|
||||
# RDF:about="urn:mozilla:install-manifest"> element.
|
||||
|
||||
# Get the version node, if it's the common format, or the Description node
|
||||
# that has the "em:version" attribute if it's the alternate format.
|
||||
namespace = 'http://www.mozilla.org/2004/em-rdf#'
|
||||
version_uri = '{{{0}}}version'.format(namespace)
|
||||
for node in tree.xpath('//em:version | //*[@em:version]',
|
||||
namespaces={'em': namespace}):
|
||||
if node.tag == version_uri: # Common format, version is a node.
|
||||
node.text = new_version_number
|
||||
else: # Alternate format, version is an attribute.
|
||||
node.set(version_uri, new_version_number)
|
||||
return etree.tostring(tree, xml_declaration=True, encoding='utf-8')
|
||||
|
||||
|
||||
def _update_version_in_package_json(content, new_version_number):
|
||||
"""Change the version number in the package.json provided."""
|
||||
updated = json.loads(content)
|
||||
if 'version' in updated:
|
||||
updated['version'] = new_version_number
|
||||
return json.dumps(updated)
|
||||
|
|
|
@ -1,17 +1,14 @@
|
|||
import json
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import zipfile
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models import Q
|
||||
|
||||
from lxml import etree
|
||||
|
||||
import amo
|
||||
from addons.models import AddonUser
|
||||
from amo.celery import task
|
||||
from files.utils import update_version_number
|
||||
from lib.crypto.packaged import sign_file
|
||||
from versions.compare import version_int
|
||||
from versions.models import Version
|
||||
|
@ -113,7 +110,8 @@ def sign_addons(addon_ids, force=False, **kw):
|
|||
continue
|
||||
log.info(u'Signing addon {0}, version {1}'.format(version.addon,
|
||||
version))
|
||||
bump_version = False # Did we sign at least one file?
|
||||
bumped_version_number = u'{0}.1-signed'.format(version.version)
|
||||
signed_at_least_a_file = False # Did we sign at least one file?
|
||||
for file_obj in to_sign:
|
||||
if not os.path.isfile(file_obj.file_path):
|
||||
log.info(u'File {0} does not exist, skip'.format(file_obj.pk))
|
||||
|
@ -124,14 +122,14 @@ def sign_addons(addon_ids, force=False, **kw):
|
|||
try:
|
||||
# Need to bump the version (modify install.rdf or package.json)
|
||||
# before the file is signed.
|
||||
bump_version_number(file_obj)
|
||||
update_version_number(file_obj, bumped_version_number)
|
||||
if file_obj.status == amo.STATUS_PUBLIC:
|
||||
server = settings.SIGNING_SERVER
|
||||
else:
|
||||
server = settings.PRELIMINARY_SIGNING_SERVER
|
||||
signed = bool(sign_file(file_obj, server))
|
||||
if signed: # Bump the version number if at least one signed.
|
||||
bump_version = True
|
||||
signed_at_least_a_file = True
|
||||
else: # We didn't sign, so revert the version bump.
|
||||
shutil.move(backup_path, file_obj.file_path)
|
||||
except:
|
||||
|
@ -140,17 +138,16 @@ def sign_addons(addon_ids, force=False, **kw):
|
|||
# Revert the version bump, restore the backup.
|
||||
shutil.move(backup_path, file_obj.file_path)
|
||||
# Now update the Version model, if we signed at least one file.
|
||||
if bump_version:
|
||||
bumped_version = _dot_one(version.version)
|
||||
version.update(version=bumped_version,
|
||||
version_int=version_int(bumped_version))
|
||||
if signed_at_least_a_file:
|
||||
version.update(version=bumped_version_number,
|
||||
version_int=version_int(bumped_version_number))
|
||||
addon = version.addon
|
||||
if addon.pk not in addons_emailed:
|
||||
# Send a mail to the owners/devs warning them we've
|
||||
# automatically signed their addon.
|
||||
qs = (AddonUser.objects
|
||||
.filter(role=amo.AUTHOR_ROLE_OWNER, addon=addon)
|
||||
.exclude(user__email=None))
|
||||
.exclude(user__email__isnull=True))
|
||||
emails = qs.values_list('user__email', flat=True)
|
||||
subject = MAIL_SUBJECT.format(addon=addon.name)
|
||||
message = MAIL_MESSAGE.format(
|
||||
|
@ -164,64 +161,6 @@ def sign_addons(addon_ids, force=False, **kw):
|
|||
addons_emailed.append(addon.pk)
|
||||
|
||||
|
||||
def bump_version_number(file_obj):
|
||||
"""Add a '.1-signed' to the version number."""
|
||||
# Create a new xpi with the bumped version.
|
||||
bumped = u'{0}.bumped'.format(file_obj.file_path)
|
||||
# Copy the original XPI, with the updated install.rdf or package.json.
|
||||
with zipfile.ZipFile(file_obj.file_path, 'r') as source:
|
||||
file_list = source.infolist()
|
||||
with zipfile.ZipFile(bumped, 'w', zipfile.ZIP_DEFLATED) as dest:
|
||||
for file_ in file_list:
|
||||
content = source.read(file_.filename)
|
||||
if file_.filename == 'install.rdf':
|
||||
content = _bump_version_in_install_rdf(content)
|
||||
if file_.filename == 'package.json':
|
||||
content = _bump_version_in_package_json(content)
|
||||
dest.writestr(file_, content)
|
||||
# Move the bumped file to the original file.
|
||||
shutil.move(bumped, file_obj.file_path)
|
||||
|
||||
|
||||
def _dot_one(version):
|
||||
"""Returns the version with an appended '.1-signed' on it."""
|
||||
return u'{0}.1-signed'.format(version)
|
||||
|
||||
|
||||
def _bump_version_in_install_rdf(content):
|
||||
"""Add a '.1-signed' to the version number in the install.rdf provided."""
|
||||
# We need to use an XML parser, and not a RDF parser, because our
|
||||
# install.rdf files aren't really standard (they use default namespaces,
|
||||
# don't namespace the "about" attribute... rdflib can parse them, and can
|
||||
# now even serialize them, but the end result could be very different from
|
||||
# the format we need.
|
||||
tree = etree.fromstring(content)
|
||||
# There's two different formats for the install.rdf: the "standard" one
|
||||
# uses nodes for each item (like <em:version>1.2</em:version>), the other
|
||||
# alternate one sets attributes on the <RDF:Description
|
||||
# RDF:about="urn:mozilla:install-manifest"> element.
|
||||
|
||||
# Get the version node, if it's the common format, or the Description node
|
||||
# that has the "em:version" attribute if it's the alternate format.
|
||||
namespace = 'http://www.mozilla.org/2004/em-rdf#'
|
||||
version_uri = '{{{0}}}version'.format(namespace)
|
||||
for node in tree.xpath('//em:version | //*[@em:version]',
|
||||
namespaces={'em': namespace}):
|
||||
if node.tag == version_uri: # Common format, version is a node.
|
||||
node.text = _dot_one(node.text)
|
||||
else: # Alternate format, version is an attribute.
|
||||
node.set(version_uri, _dot_one(node.get(version_uri)))
|
||||
return etree.tostring(tree, xml_declaration=True, encoding='utf-8')
|
||||
|
||||
|
||||
def _bump_version_in_package_json(content):
|
||||
"""Add a '.1-signed' to the version number in the package.json provided."""
|
||||
bumped = json.loads(content)
|
||||
if 'version' in bumped:
|
||||
bumped['version'] = _dot_one(bumped['version'])
|
||||
return json.dumps(bumped)
|
||||
|
||||
|
||||
@task
|
||||
def unsign_addons(addon_ids, force=False, **kw):
|
||||
"""Used to unsign all the versions of an addon that were previously signed.
|
||||
|
@ -306,7 +245,7 @@ def unsign_addons(addon_ids, force=False, **kw):
|
|||
# unsigned their addon and restored backups.
|
||||
qs = (AddonUser.objects
|
||||
.filter(role=amo.AUTHOR_ROLE_OWNER, addon=addon)
|
||||
.exclude(user__email=None))
|
||||
.exclude(user__email__isnull=True))
|
||||
emails = qs.values_list('user__email', flat=True)
|
||||
subject = MAIL_UNSIGN_SUBJECT.format(addon=addon.name)
|
||||
message = MAIL_UNSIGN_MESSAGE.format(
|
||||
|
|
|
@ -13,7 +13,7 @@ import pytest
|
|||
|
||||
import amo
|
||||
import amo.tests
|
||||
from files.utils import extract_xpi, parse_xpi
|
||||
from files.utils import extract_xpi
|
||||
from lib.crypto import packaged, tasks
|
||||
from versions.compare import version_int
|
||||
|
||||
|
@ -575,25 +575,3 @@ class TestTasks(amo.tests.TestCase):
|
|||
assert self.version.version_int == version_int('1.3.1-signed')
|
||||
assert file_hash != self.file_.generate_hash()
|
||||
self.assert_backup()
|
||||
|
||||
def test_bump_version_in_install_rdf(self):
|
||||
with amo.tests.copy_file('apps/files/fixtures/files/jetpack.xpi',
|
||||
self.file_.file_path):
|
||||
tasks.bump_version_number(self.file_)
|
||||
parsed = parse_xpi(self.file_.file_path)
|
||||
assert parsed['version'] == '1.3.1-signed'
|
||||
|
||||
def test_bump_version_in_alt_install_rdf(self):
|
||||
with amo.tests.copy_file('apps/files/fixtures/files/alt-rdf.xpi',
|
||||
self.file_.file_path):
|
||||
tasks.bump_version_number(self.file_)
|
||||
parsed = parse_xpi(self.file_.file_path)
|
||||
assert parsed['version'] == '2.1.106.1-signed'
|
||||
|
||||
def test_bump_version_in_package_json(self):
|
||||
with amo.tests.copy_file(
|
||||
'apps/files/fixtures/files/new-format-0.0.1.xpi',
|
||||
self.file_.file_path):
|
||||
tasks.bump_version_number(self.file_)
|
||||
parsed = parse_xpi(self.file_.file_path)
|
||||
assert parsed['version'] == '0.0.1.1-signed'
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
-r compiled.txt
|
||||
amo-validator==1.10.2
|
||||
amo-validator==1.10.3
|
||||
amqp==1.4.6
|
||||
anyjson==0.3.3
|
||||
argparse==1.2.1
|
||||
|
@ -84,7 +84,7 @@ sasl==0.1.3
|
|||
signing_clients==0.1.14
|
||||
six==1.4.1
|
||||
slumber==0.5.3
|
||||
spidermonkey==41.0a2.post1
|
||||
spidermonkey==44.0a2.post2
|
||||
SQLAlchemy==0.7.5
|
||||
statsd==2.0.3
|
||||
suds==0.4
|
||||
|
|
|
@ -0,0 +1,112 @@
|
|||
// vim:se ft=javascript sts=2 sw=2 et:
|
||||
"use strict";
|
||||
|
||||
function deepEqual(left, right) {
|
||||
if (left === right)
|
||||
return true;
|
||||
|
||||
if (!(left && right &&
|
||||
typeof left == "object" &&
|
||||
typeof right == "object"))
|
||||
return false;
|
||||
|
||||
if (Object.getPrototypeOf(left) !== Object.getPrototypeOf(right))
|
||||
return false;
|
||||
|
||||
let allKeys = obj => new Set([...Object.getOwnPropertyNames(obj),
|
||||
...Object.getOwnPropertySymbols(obj)]);
|
||||
|
||||
let leftKeys = allKeys(left);
|
||||
let rightKeys = allKeys(right);
|
||||
|
||||
if (leftKeys.size != rightKeys.size)
|
||||
return false;
|
||||
|
||||
for (let key of leftKeys) {
|
||||
if (!rightKeys.has(key))
|
||||
return false;
|
||||
|
||||
if (!deepEqual(left[key], right[key]))
|
||||
return false
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function mungeFile(text) {
|
||||
let lines = [];
|
||||
|
||||
let match;
|
||||
let expr = /[^\r\n]*\r?\n?/g;
|
||||
while ((match = expr.exec(text)) && match[0])
|
||||
lines.push(match[0]);
|
||||
|
||||
|
||||
let ast = Reflect.parse(text);
|
||||
if (ast.type != "Program")
|
||||
throw new SyntaxError;
|
||||
|
||||
|
||||
const REPLACEMENTS = {
|
||||
"let": "var",
|
||||
"const": "var ",
|
||||
};
|
||||
|
||||
let changes = 0;
|
||||
for (let node of ast.body) {
|
||||
if (!(node.type == "VariableDeclaration" &&
|
||||
node.kind in REPLACEMENTS))
|
||||
continue;
|
||||
|
||||
let kind = node.kind;
|
||||
let start = node.loc.start;
|
||||
let lineNo = start.line - 1;
|
||||
|
||||
if (lineNo >= lines.length)
|
||||
continue;
|
||||
|
||||
let line = lines[lineNo];
|
||||
if (line.substr(start.column, kind.length) != kind)
|
||||
continue;
|
||||
|
||||
line = [line.slice(0, start.column),
|
||||
REPLACEMENTS[kind],
|
||||
line.slice(start.column + kind.length)].join("");
|
||||
|
||||
lines[lineNo] = line;
|
||||
node.kind = "var";
|
||||
changes++;
|
||||
}
|
||||
|
||||
if (!changes)
|
||||
return;
|
||||
|
||||
let newText = lines.join("");
|
||||
let newAST = Reflect.parse(newText);
|
||||
|
||||
if (deepEqual(ast, newAST))
|
||||
return newText;
|
||||
}
|
||||
|
||||
for (let file of scriptArgs) {
|
||||
let input = os.file.readFile(file, "binary");
|
||||
|
||||
// Convert to a string, ignoring encoding.
|
||||
//
|
||||
// The file may not be valid UTF-8, so treating it as a single-byte
|
||||
// encoding leaves the least chance of errors during re-encoding.
|
||||
// Since we'll only be changing `var` and `const` keywords, and only
|
||||
// if the file parses, the chance of causing breakage this way is
|
||||
// vanishingly small.
|
||||
input = String.fromCharCode.apply(null, input);
|
||||
|
||||
try {
|
||||
let result = mungeFile(input);
|
||||
if (result !== undefined) {
|
||||
let array = Uint8Array.from(result, c => c.charCodeAt(0));
|
||||
os.file.writeTypedArrayToFile(file, array);
|
||||
}
|
||||
} catch (e if e instanceof SyntaxError) {
|
||||
console.log("SyntaxeError: ", e);
|
||||
}
|
||||
}
|
Загрузка…
Ссылка в новой задаче