зеркало из https://github.com/mozilla/FlightDeck.git
Refactoring: Info about sdk is not needed to create a valid manifest
Backend for downloading a zip file model is exporting source and creating a zip file views for prepare, check and provide zip file are created front-end added based on download XPI
This commit is contained in:
Родитель
e1e1ca0351
Коммит
9559179409
|
@ -51,6 +51,7 @@ from utils.helpers import (pathify, alphanum, alphanum_plus, get_random_string,
|
|||
sanitize_for_frontend)
|
||||
from utils.os_utils import make_path
|
||||
from utils.amo import AMOOAuth
|
||||
from utils.zip import zipdir
|
||||
from xpi import xpi_utils
|
||||
|
||||
from elasticutils.utils import retry_on_timeout
|
||||
|
@ -425,6 +426,18 @@ class PackageRevision(BaseModel):
|
|||
raise Exception('XPI might be created only from an Add-on')
|
||||
return reverse('jp_addon_revision_test', args=[self.pk])
|
||||
|
||||
def get_prepare_zip_url(self):
|
||||
" returns URL to prepare ZIP "
|
||||
return reverse('jp_revision_prepare_zip', args=[self.pk])
|
||||
|
||||
def get_check_zip_url(self, hashtag):
|
||||
" returns URL to check ZIP "
|
||||
return reverse('jp_revision_check_zip', args=[self.pk, hashtag])
|
||||
|
||||
def get_download_zip_url(self, hashtag, filename):
|
||||
" returns URL to download ZIP "
|
||||
return reverse('jp_revision_check_zip', args=[self.pk, hashtag, filename])
|
||||
|
||||
def get_download_xpi_url(self):
|
||||
" returns URL to download Add-on's XPI "
|
||||
if self.package.type != 'a':
|
||||
|
@ -472,11 +485,9 @@ class PackageRevision(BaseModel):
|
|||
for contributors in csv_r:
|
||||
return contributors
|
||||
|
||||
def get_dependencies_list(self, sdk=None):
|
||||
def get_dependencies_list(self):
|
||||
" returns a list of dependencies names extended by default core "
|
||||
# breaking possibility to build jetpack SDK 0.6
|
||||
deps = ["%s" % (dep.name) \
|
||||
for dep in self.dependencies.all()]
|
||||
deps = ["%s" % (dep.name) for dep in self.dependencies.all()]
|
||||
deps.append('api-utils')
|
||||
if self.package.is_addon():
|
||||
deps.append('addon-kit')
|
||||
|
@ -493,8 +504,7 @@ class PackageRevision(BaseModel):
|
|||
" return description prepared for rendering "
|
||||
return "<p>%s</p>" % self.get_full_description().replace("\n", "<br/>")
|
||||
|
||||
def get_manifest(self, test_in_browser=False, sdk=None,
|
||||
package_overrides=None):
|
||||
def get_manifest(self, test_in_browser=False, package_overrides=None):
|
||||
" returns manifest dictionary "
|
||||
version = self.get_version_name()
|
||||
if test_in_browser:
|
||||
|
@ -521,7 +531,7 @@ class PackageRevision(BaseModel):
|
|||
else self.package.pk,
|
||||
'version': version,
|
||||
'main': self.module_main,
|
||||
'dependencies': self.get_dependencies_list(sdk),
|
||||
'dependencies': self.get_dependencies_list(),
|
||||
'license': self.package.license,
|
||||
'url': str(self.package.url),
|
||||
'contributors': self.get_contributors_list(),
|
||||
|
@ -538,9 +548,9 @@ class PackageRevision(BaseModel):
|
|||
manifest[key] = package_overrides.get(key, None) or value
|
||||
return manifest
|
||||
|
||||
def get_manifest_json(self, sdk=None, package_overrides=None, **kwargs):
|
||||
def get_manifest_json(self, package_overrides=None, **kwargs):
|
||||
" returns manifest as JSOIN object "
|
||||
return simplejson.dumps(self.get_manifest(sdk=sdk,
|
||||
return simplejson.dumps(self.get_manifest(
|
||||
package_overrides=package_overrides, **kwargs))
|
||||
|
||||
def get_main_module(self):
|
||||
|
@ -1293,6 +1303,97 @@ class PackageRevision(BaseModel):
|
|||
|
||||
return self.sdk.kit_lib if self.sdk.kit_lib else self.sdk.core_lib
|
||||
|
||||
def export_source(self, modules=None, attachments=None, tstart=None,
|
||||
temp_dir=None, package_overrides=None):
|
||||
"""
|
||||
Export source of the PackageRevision and all it's dependencies
|
||||
|
||||
:param modules: list of modules from editor - potentially unsaved
|
||||
:param attachments: list of aatachments from editor - potentially
|
||||
unsaved
|
||||
:rtype: String defining the path to exported source
|
||||
"""
|
||||
if not tstart:
|
||||
tstart = time.time()
|
||||
if not modules:
|
||||
modules = []
|
||||
if not attachments:
|
||||
attachments = []
|
||||
if not temp_dir:
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
package_dir = self.make_dir(temp_dir)
|
||||
# preparing manifest
|
||||
self.export_manifest(package_dir, package_overrides=package_overrides)
|
||||
t1 = (time.time() - tstart) * 1000
|
||||
|
||||
# export modules with ability to use edited code (from modules var)
|
||||
lib_dir = os.path.join(package_dir, self.get_lib_dir())
|
||||
for mod in self.modules.all():
|
||||
mod_edited = False
|
||||
for e_mod in modules:
|
||||
if e_mod.pk == mod.pk:
|
||||
mod_edited = True
|
||||
e_mod.export_code(lib_dir)
|
||||
if not mod_edited:
|
||||
mod.export_code(lib_dir)
|
||||
t2 = (time.time() - (t1 / 1000) - tstart) * 1000
|
||||
statsd.timing('export.modules', t2)
|
||||
log.debug("[export] modules exported (time %dms)" % t2)
|
||||
# export atts with ability to use edited code (from attachments var)
|
||||
# XPI: memory/database/NFS to local
|
||||
data_dir = os.path.join(package_dir, settings.JETPACK_DATA_DIR)
|
||||
for att in self.attachments.all():
|
||||
att_edited = False
|
||||
for e_att in attachments:
|
||||
if e_att.pk == att.pk:
|
||||
att_edited = True
|
||||
e_att.export_code(data_dir)
|
||||
if not att_edited:
|
||||
att.export_file(data_dir)
|
||||
t3 = (time.time() - (t2 / 1000) - tstart) * 1000
|
||||
statsd.timing('export.attachments', t3)
|
||||
log.debug("[export] attachments exported (time %dms)" % t3)
|
||||
|
||||
# XPI: copying to local from memory/db/files
|
||||
self.export_dependencies(temp_dir)
|
||||
t4 = (time.time() - (t3 / 1000) - tstart) * 1000
|
||||
statsd.timing('export.dependencies', t4)
|
||||
log.debug("[export] dependencies exported (time %dms)" % t4)
|
||||
if not os.path.isdir(temp_dir):
|
||||
log.error("[export] An attempt to export add-on (%s) failed." %
|
||||
self.get_version_name())
|
||||
raise IntegrityError("Failed to export source")
|
||||
return temp_dir
|
||||
|
||||
def zip_source(self, modules=None, attachments=None, hashtag=None,
|
||||
tstart=None, package_overrides=None):
|
||||
"""
|
||||
Compress exported sources into a zip file, return path to the file
|
||||
"""
|
||||
if not tstart:
|
||||
tstart = time.time()
|
||||
if not hashtag:
|
||||
log.error("[zip] Attempt to build add-on (%s) but it's missing a "
|
||||
"hashtag. Failing." % self.get_version_name())
|
||||
raise IntegrityError("Hashtag is required to create an xpi.")
|
||||
# export sources
|
||||
temp_dir = self.export_source(modules, attachments, tstart)
|
||||
# zip data
|
||||
zip_targetname = "%s.zip" % hashtag
|
||||
zip_targetpath = os.path.join(settings.XPI_TARGETDIR, zip_targetname)
|
||||
t1 = (time.time() - tstart) * 1000
|
||||
try:
|
||||
zipdir(temp_dir, zip_targetpath)
|
||||
except Exception, err:
|
||||
log.error("[zip] An attempt to compress add-on (%s) failed.\n%s" % (
|
||||
self.get_version_name(), err))
|
||||
raise
|
||||
t2 = (time.time() - (t1 / 1000) - tstart) * 1000
|
||||
statsd.timing('zip.zipped', t2)
|
||||
shutil.rmtree(temp_dir)
|
||||
log.debug("[zip] directory compressed (time %dms)" % t2)
|
||||
return zip_targetpath
|
||||
|
||||
def build_xpi(self, modules=None, attachments=None, hashtag=None,
|
||||
tstart=None, sdk=None, package_overrides=None):
|
||||
"""
|
||||
|
@ -1344,8 +1445,7 @@ class PackageRevision(BaseModel):
|
|||
packages_dir = os.path.join(sdk_dir, 'packages')
|
||||
package_dir = self.make_dir(packages_dir)
|
||||
# XPI: create manifest (from memory to local)
|
||||
self.export_manifest(package_dir, sdk=sdk,
|
||||
package_overrides=package_overrides)
|
||||
self.export_manifest(package_dir, package_overrides=package_overrides)
|
||||
|
||||
# export modules with ability to use edited code (from modules var)
|
||||
# XPI: memory/database to local
|
||||
|
@ -1402,11 +1502,11 @@ class PackageRevision(BaseModel):
|
|||
f.write('private-key:%s\n' % self.package.private_key)
|
||||
f.write('public-key:%s' % self.package.public_key)
|
||||
|
||||
def export_manifest(self, package_dir, sdk=None, package_overrides=None):
|
||||
def export_manifest(self, package_dir, package_overrides=None):
|
||||
"""Creates a file with an Add-on's manifest."""
|
||||
manifest_file = "%s/package.json" % package_dir
|
||||
with codecs.open(manifest_file, mode='w', encoding='utf-8') as f:
|
||||
f.write(self.get_manifest_json(sdk=sdk,
|
||||
f.write(self.get_manifest_json(
|
||||
package_overrides=package_overrides))
|
||||
|
||||
def export_modules(self, lib_dir):
|
||||
|
@ -1429,7 +1529,7 @@ class PackageRevision(BaseModel):
|
|||
package_dir = self.make_dir(packages_dir)
|
||||
if not package_dir:
|
||||
return
|
||||
self.export_manifest(package_dir, sdk=sdk)
|
||||
self.export_manifest(package_dir)
|
||||
self.export_modules(
|
||||
os.path.join(package_dir, self.get_lib_dir()))
|
||||
self.export_attachments(
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
import datetime
|
||||
import commonware.log
|
||||
import time
|
||||
|
||||
from statsd import statsd
|
||||
from celery.decorators import task
|
||||
|
||||
from jetpack.models import Package
|
||||
from jetpack.models import Package, PackageRevision
|
||||
from elasticutils import get_es
|
||||
|
||||
log = commonware.log.getLogger('f.celery')
|
||||
|
@ -13,11 +16,25 @@ def calculate_activity_rating(pks,**kw):
|
|||
ids_str = ','.join(map(str, pks))
|
||||
log.debug('ES starting calculate_activity_rating for packages: [%s]'
|
||||
% ids_str)
|
||||
|
||||
|
||||
for package in Package.objects.filter(pk__in=pks):
|
||||
package.activity_rating = package.calc_activity_rating()
|
||||
package.save()
|
||||
|
||||
package.save()
|
||||
|
||||
log.debug('ES completed calculate_activity_rating for packages: [%s]'
|
||||
% ids_str)
|
||||
|
||||
|
||||
|
||||
@task
|
||||
def zip_source(pk, hashtag, tqueued=None, **kw):
|
||||
if not hashtag:
|
||||
log.critical("[zip] No hashtag provided")
|
||||
return
|
||||
tstart = time.time()
|
||||
if tqueued:
|
||||
tinqueue = (tstart - tqueued) * 1000
|
||||
statsd.timing('zip.queued', tinqueue)
|
||||
log.info('[zip:%s] Addon job picked from queue (%dms)' % (hashtag, tinqueue))
|
||||
log.debug("[zip:%s] Compressing" % pk)
|
||||
PackageRevision.objects.get(pk=pk).zip_source(hashtag=hashtag, tstart=tstart)
|
||||
log.debug("[zip:%s] Compressed" % pk)
|
||||
|
|
|
@ -44,6 +44,9 @@
|
|||
<li id="download" title="Download" class="UI_Editor_Menu_Button Icon_download">
|
||||
<a href="{{ revision.get_download_xpi_url() }}"><span></span></a>
|
||||
</li>
|
||||
<li id="zip" title="Download Source" class="UI_Editor_Menu_Button Icon_zip">
|
||||
<a href="{{ revision.get_prepare_zip_url() }}"><span></span></a>
|
||||
</li>
|
||||
{#
|
||||
<li id="upload_to_amo" title="Upload" class="UI_Editor_Menu_Button Icon_upload">
|
||||
<a target="_}ew" href="{{ revision.get_upload_to_amo_url() }}"><span></span></a>
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
import commonware
|
||||
import tempfile
|
||||
import os
|
||||
import shutil
|
||||
import datetime
|
||||
import decimal
|
||||
|
||||
|
@ -34,10 +35,17 @@ class PackageRevisionTest(TestCase):
|
|||
self.hashtag = hashtag()
|
||||
self.xpi_file = os.path.join(settings.XPI_TARGETDIR,
|
||||
"%s.xpi" % self.hashtag)
|
||||
self.zip_file = os.path.join(settings.XPI_TARGETDIR,
|
||||
"%s.zip" % self.hashtag)
|
||||
self.temp_dir = tempfile.mkdtemp()
|
||||
|
||||
def tearDown(self):
|
||||
if os.path.exists(self.xpi_file):
|
||||
os.remove(self.xpi_file)
|
||||
if os.path.exists(self.temp_dir):
|
||||
shutil.rmtree(self.temp_dir)
|
||||
if os.path.exists(self.zip_file):
|
||||
os.remove(self.zip_file)
|
||||
|
||||
def test_first_revision_creation(self):
|
||||
addon = Package(author=self.author, type='a')
|
||||
|
@ -496,6 +504,25 @@ class PackageRevisionTest(TestCase):
|
|||
assert validator.is_valid('alphanum',
|
||||
self.addon.latest.get_cache_hashtag())
|
||||
|
||||
def test_export_source(self):
|
||||
self.addon.latest.dependency_add(self.library.latest)
|
||||
d = self.addon.latest.export_source(temp_dir=self.temp_dir)
|
||||
eq_(d, self.temp_dir)
|
||||
assert os.path.exists(os.path.join(d, self.addon.name))
|
||||
assert os.path.exists(os.path.join(d, self.addon.name, 'package.json'))
|
||||
assert os.path.exists(os.path.join(d, self.library.name))
|
||||
assert os.path.exists(os.path.join(d, self.library.name,
|
||||
'package.json'))
|
||||
|
||||
def test_zip_source(self):
|
||||
self.addon.latest.zip_source(hashtag=self.hashtag)
|
||||
assert os.path.isfile(self.zip_file)
|
||||
|
||||
def test_zip_lib(self):
|
||||
self.library.latest.zip_source(hashtag=self.hashtag)
|
||||
assert os.path.isfile(self.zip_file)
|
||||
|
||||
|
||||
"""
|
||||
Althought not supported on view and front-end,
|
||||
there is no harm in these two
|
||||
|
|
|
@ -366,6 +366,19 @@ class TestEditing(TestCase):
|
|||
class TestRevision(TestCase):
|
||||
fixtures = ('mozilla_user', 'core_sdk', 'users', 'packages')
|
||||
|
||||
def setUp(self):
|
||||
self.hashtag = hashtag()
|
||||
self.xpi_file = os.path.join(settings.XPI_TARGETDIR,
|
||||
"%s.xpi" % self.hashtag)
|
||||
self.zip_file = os.path.join(settings.XPI_TARGETDIR,
|
||||
"%s.zip" % self.hashtag)
|
||||
|
||||
def tearDown(self):
|
||||
if os.path.exists(self.xpi_file):
|
||||
os.remove(self.xpi_file)
|
||||
if os.path.exists(self.zip_file):
|
||||
os.remove(self.zip_file)
|
||||
|
||||
def test_copy_revision(self):
|
||||
author = User.objects.get(username='john')
|
||||
addon = Package(author=author, type='a')
|
||||
|
@ -474,3 +487,34 @@ class TestRevision(TestCase):
|
|||
# there should be other package with the name created from FIXABLE
|
||||
eq_(Package.objects.filter(
|
||||
author=author, full_name__contains='Integrity Error').count(), 2)
|
||||
|
||||
def test_prepare_zip_file(self):
|
||||
author = User.objects.get(username='john')
|
||||
addon = Package(author=author, type='a')
|
||||
addon.save()
|
||||
prepare_url = addon.latest.get_prepare_zip_url()
|
||||
response = self.client.post(prepare_url, {'hashtag': self.hashtag})
|
||||
eq_(response.status_code, 200)
|
||||
eq_(response.content, '{"delayed": true}')
|
||||
|
||||
def test_check_zip_file(self):
|
||||
author = User.objects.get(username='john')
|
||||
addon = Package(author=author, type='a')
|
||||
addon.save()
|
||||
check_url = reverse('jp_revision_check_zip', args=[self.hashtag,])
|
||||
response = self.client.get(check_url)
|
||||
eq_(response.content, '{"ready": false}')
|
||||
addon.latest.zip_source(hashtag=self.hashtag)
|
||||
response = self.client.get(check_url)
|
||||
eq_(response.status_code, 200)
|
||||
eq_(response.content, '{"ready": true}')
|
||||
|
||||
def test_download_zip_file(self):
|
||||
author = User.objects.get(username='john')
|
||||
addon = Package(author=author, type='a')
|
||||
addon.save()
|
||||
addon.latest.zip_source(hashtag=self.hashtag)
|
||||
download_url = reverse('jp_revision_download_zip', args=[self.hashtag, 'x'])
|
||||
response = self.client.get(download_url)
|
||||
eq_(response.status_code, 200)
|
||||
eq_(response['Content-Disposition'], 'attachment; filename="x.zip"')
|
||||
|
|
|
@ -135,4 +135,12 @@ urlpatterns = patterns('jetpack.views',
|
|||
# check libraries for latest versions
|
||||
url(r'package/check_latest_dependencies/(?P<revision_id>\d+)/$',
|
||||
'latest_dependencies', name='jp_package_check_latest_dependencies'),
|
||||
|
||||
# zip file
|
||||
url(r'^revision/prepare_zip/(?P<revision_id>\d+)/$',
|
||||
'prepare_zip', name='jp_revision_prepare_zip'),
|
||||
url(r'^revision/download_zip/(?P<hashtag>[a-zA-Z0-9]+)/(?P<filename>.*)/$',
|
||||
'get_zip', name='jp_revision_download_zip'),
|
||||
url(r'^revision/check_zip/(?P<hashtag>[a-zA-Z0-9]+)/$',
|
||||
'check_zip', name='jp_revision_check_zip'),
|
||||
)
|
||||
|
|
|
@ -7,10 +7,14 @@ import shutil
|
|||
import codecs
|
||||
import tempfile
|
||||
import urllib2
|
||||
import time
|
||||
|
||||
from simplejson import JSONDecodeError
|
||||
from statsd import statsd
|
||||
|
||||
from django.contrib import messages
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.core.cache import cache
|
||||
from django.db import transaction
|
||||
from django.views.static import serve
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
@ -24,11 +28,13 @@ from django.db import IntegrityError, transaction
|
|||
from django.db.models import Q, ObjectDoesNotExist
|
||||
from django.views.decorators.cache import never_cache
|
||||
from django.views.decorators.http import require_POST
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from django.template.defaultfilters import escape
|
||||
from django.conf import settings
|
||||
from django.utils import simplejson
|
||||
from django.forms.fields import URLField
|
||||
|
||||
from tasks import zip_source
|
||||
from base.shortcuts import get_object_with_related_or_404
|
||||
from utils import validator
|
||||
from utils.helpers import pathify, render, render_json
|
||||
|
@ -1059,7 +1065,7 @@ def library_autocomplete(request):
|
|||
ids = (settings.MINIMUM_PACKAGE_ID, settings.MINIMUM_PACKAGE_ID - 1)
|
||||
notAddonKit = ~(F(id_number=ids[0]) | F(id_number=ids[1]))
|
||||
onlyMyPrivateLibs = (F(active=True) | F(author=request.user.id))
|
||||
|
||||
|
||||
try:
|
||||
qs = (Package.search().query(or_=package_query(q)).filter(type='l')
|
||||
.filter(notAddonKit).filter(onlyMyPrivateLibs))
|
||||
|
@ -1218,3 +1224,75 @@ def get_revision_conflicting_modules_list(request, pk):
|
|||
revision = get_object_or_404(PackageRevision, pk=pk)
|
||||
return HttpResponse(simplejson.dumps(
|
||||
revision.get_conflicting_module_names()), mimetype="application/json")
|
||||
|
||||
def _get_zip_cache_key(request, hashtag):
|
||||
session = request.session.session_key
|
||||
return 'zip:timing:queued:%s:%s' % (hashtag, session)
|
||||
|
||||
@csrf_exempt
|
||||
@require_POST
|
||||
def prepare_zip(request, revision_id):
|
||||
"""
|
||||
Prepare download zip This package is built asynchronously and we assume
|
||||
it works. It will be downloaded in %``get_zip``
|
||||
"""
|
||||
revision = get_object_with_related_or_404(PackageRevision, pk=revision_id)
|
||||
if (not revision.package.active and user != revision.package.author):
|
||||
# pretend package doesn't exist as it's private
|
||||
raise Http404()
|
||||
hashtag = request.POST.get('hashtag')
|
||||
if not hashtag:
|
||||
return HttpResponseForbidden('Add-on Builder has been updated!'
|
||||
'We have updated this part of the application. Please '
|
||||
'empty your cache and reload to get changes.')
|
||||
if not validator.is_valid('alphanum', hashtag):
|
||||
log.warning('[security] Wrong hashtag provided')
|
||||
return HttpResponseBadRequest("{'error': 'Wrong hashtag'}")
|
||||
log.info('[zip:%s] Addon added to queue' % hashtag)
|
||||
# caching
|
||||
tqueued = time.time()
|
||||
tkey = _get_zip_cache_key(request, hashtag)
|
||||
cache.set(tkey, tqueued, 120)
|
||||
# create zip file
|
||||
zip_source(pk=revision.pk, hashtag=hashtag, tqueued=tqueued)
|
||||
return HttpResponse('{"delayed": true}')
|
||||
|
||||
@never_cache
|
||||
def get_zip(request, hashtag, filename):
|
||||
"""
|
||||
Download zip (it has to be ready)
|
||||
"""
|
||||
if not validator.is_valid('alphanum', hashtag):
|
||||
log.warning('[security] Wrong hashtag provided')
|
||||
return HttpResponseForbidden("{'error': 'Wrong hashtag'}")
|
||||
path = os.path.join(settings.XPI_TARGETDIR, '%s.zip' % hashtag)
|
||||
log.info('[zip:%s] Downloading Addon from %s' % (filename, path))
|
||||
|
||||
tend = time.time()
|
||||
tkey = _get_zip_cache_key(request, hashtag)
|
||||
tqueued = cache.get(tkey)
|
||||
if tqueued:
|
||||
ttotal = (tend - tqueued) * 1000
|
||||
statsd.timing('zip.total', ttotal)
|
||||
total = '%dms' % ttotal
|
||||
else:
|
||||
total = 'n/a'
|
||||
|
||||
log.info('[zip:%s] Downloading Add-on (%s)' % (hashtag, total))
|
||||
|
||||
response = serve(request, path, '/', show_indexes=False)
|
||||
response['Content-Disposition'] = ('attachment; '
|
||||
'filename="%s.zip"' % filename)
|
||||
return response
|
||||
|
||||
@never_cache
|
||||
def check_zip(r, hashtag):
|
||||
"""Check if zip file is prepared."""
|
||||
if not validator.is_valid('alphanum', hashtag):
|
||||
log.warning('[security] Wrong hashtag provided')
|
||||
return HttpResponseForbidden("{'error': 'Wrong hashtag'}")
|
||||
path = os.path.join(settings.XPI_TARGETDIR, '%s.zip' % hashtag)
|
||||
# Check file if it exists
|
||||
if os.path.isfile(path):
|
||||
return HttpResponse('{"ready": true}')
|
||||
return HttpResponse('{"ready": false}')
|
||||
|
|
|
@ -21,7 +21,7 @@ def index_all():
|
|||
with establish_connection() as conn:
|
||||
for chunk in chunked(ids, 100):
|
||||
tasks.index_all.apply_async(args=[chunk], connection=conn)
|
||||
|
||||
|
||||
|
||||
@cronjobs.register
|
||||
def setup_mapping():
|
||||
|
|
|
@ -162,6 +162,10 @@ form.UI_Editor_Menu_Descendant {
|
|||
background-position: -34px 0;
|
||||
}
|
||||
|
||||
.UI_Editor_Menu_Button.Icon_zip a span {
|
||||
background-position: -241px 0;
|
||||
}
|
||||
|
||||
.UI_Editor_Menu_Button.Icon_upload a span {
|
||||
background-position: -212px 0;
|
||||
}
|
||||
|
|
Двоичные данные
media/jetpack/img/editor-buttons.png
Двоичные данные
media/jetpack/img/editor-buttons.png
Двоичный файл не отображается.
До Ширина: | Высота: | Размер: 11 KiB После Ширина: | Высота: | Размер: 11 KiB |
|
@ -45,6 +45,7 @@ module.exports = new Class({
|
|||
copy_el: 'package-copy',
|
||||
test_el: 'try_in_browser',
|
||||
download_el: 'download',
|
||||
zip_el: 'zip',
|
||||
console_el: 'error-console',
|
||||
save_el: 'package-save',
|
||||
menu_el: 'UI_Editor_Menu',
|
||||
|
@ -144,7 +145,17 @@ module.exports = new Class({
|
|||
}
|
||||
controller.downloadAddon();
|
||||
});
|
||||
|
||||
}
|
||||
this.zip_el = dom.$(this.options.zip_el);
|
||||
this.options.zip_url = this.zip_el.getElement('a').get('href');
|
||||
this.zip_el.addListener('click', function(e) {
|
||||
e.preventDefault();
|
||||
if (this.hasClass(LOADING_CLASS)) {
|
||||
return;
|
||||
}
|
||||
controller.zipRevision();
|
||||
});
|
||||
this.copy_el = dom.$(this.options.copy_el);
|
||||
if (this.copy_el) {
|
||||
this.copy_el.addListener('click', function(e) {
|
||||
|
@ -520,6 +531,94 @@ module.exports = new Class({
|
|||
}).send();
|
||||
},
|
||||
|
||||
zipRevision: function() {
|
||||
var el = dom.$(this.options.zip_el).getElement('a');
|
||||
var hashtag = this.options.hashtag;
|
||||
var key = 'zip' + hashtag;
|
||||
var filename = this.package_.get('name');
|
||||
var that = this;
|
||||
if (el.hasClass('clicked')) {
|
||||
return;
|
||||
}
|
||||
el.addClass('clicked');
|
||||
|
||||
fd().tests[key] = {
|
||||
spinner: el.addClass('loading').addClass('small')
|
||||
};
|
||||
var data = {
|
||||
hashtag: hashtag,
|
||||
filename: filename
|
||||
};
|
||||
new Request({
|
||||
url: this.options.zip_url,
|
||||
method: 'post',
|
||||
data: data,
|
||||
onComplete: function() {
|
||||
el.removeClass('clicked');
|
||||
// remove spinner
|
||||
el.removeClass('loading').removeClass('small');
|
||||
},
|
||||
onSuccess: function() {
|
||||
var time = fd().options.request_interval;
|
||||
log.debug('[zip] delayed .. try to load ever %d seconds', time / 1000);
|
||||
fd().tests[key].download_request_number = 0;
|
||||
fd().tests[key].zip_ID = setInterval(function() {
|
||||
that.tryDownloadZip(hashtag, filename);
|
||||
}, time);
|
||||
}
|
||||
}).send();
|
||||
},
|
||||
|
||||
/*
|
||||
* Method: tryDownloadXPI
|
||||
*
|
||||
* Try to download XPI
|
||||
* if finished - stop periodical, stop spinner
|
||||
*/
|
||||
tryDownloadZip: function (hashtag, filename) {
|
||||
var zip_request = fd().tests['zip' + hashtag];
|
||||
if (!zip_request.download_zip_request || (
|
||||
zip_request.download_zip_request &&
|
||||
!zip_request.download_zip_request.isRunning())) {
|
||||
zip_request.download_request_number++;
|
||||
var url = '/revision/check_zip/' + hashtag + '/';
|
||||
log.debug('checking if ' + url + ' is prepared (attempt ' +
|
||||
zip_request.download_request_number + '/50)');
|
||||
var r = zip_request.download_zip_request = new Request({
|
||||
method: 'get',
|
||||
url: url,
|
||||
timeout: fd().options.request_interval,
|
||||
onSuccess: function(response) {
|
||||
try {
|
||||
response = JSON.parse(response);
|
||||
} catch (jsonError) {
|
||||
log.warning('JSON error: ', jsonError);
|
||||
return;
|
||||
}
|
||||
if (response.ready || zip_request.download_request_number > 50) {
|
||||
clearInterval(zip_request.zip_ID);
|
||||
zip_request.spinner.removeClass('loading');
|
||||
if (!response.ready) {
|
||||
fd.error.alert('ZIP download failed',
|
||||
'ZIP file is not yet prepared, giving up');
|
||||
}
|
||||
}
|
||||
if (response.ready) {
|
||||
var url = '/revision/download_zip/'+hashtag+'/'+filename+'/';
|
||||
log.debug('downloading ' + filename + '.zip from ' + url );
|
||||
dom.window.getNode().location = url;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
r.addListener('failure', function() {
|
||||
clearInterval(zip_request.zip_ID);
|
||||
zip_request.spinner.removeClass('loading');
|
||||
});
|
||||
r.send();
|
||||
}
|
||||
},
|
||||
|
||||
downloadAddon: function() {
|
||||
var el = dom.$(this.options.download_el).getElement('a');
|
||||
if (el.hasClass('clicked')) {
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
# from
|
||||
# http://stackoverflow.com/questions/296499/how-do-i-zip-the-contents-of-a-folder-using-python-version-2-5
|
||||
|
||||
from contextlib import closing
|
||||
from zipfile import ZipFile, ZIP_DEFLATED
|
||||
import os
|
||||
|
||||
def zipdir(basedir, archivename):
|
||||
if not os.path.isdir(basedir):
|
||||
raise OSError('No such directory', basedir)
|
||||
with closing(ZipFile(archivename, "w", ZIP_DEFLATED)) as z:
|
||||
for root, dirs, files in os.walk(basedir):
|
||||
#NOTE: ignore empty directories
|
||||
for fn in files:
|
||||
absfn = os.path.join(root, fn)
|
||||
zfn = absfn[len(basedir)+len(os.sep):] #XXX: relative path
|
||||
z.write(absfn, zfn)
|
Загрузка…
Ссылка в новой задаче