Bug 1403131 - Run linters against mozharness scripts and configs. r=rail

--HG--
extra : rebase_source : 4d9458f1819de0a73af9191d560557ab01ffa32e
extra : histedit_source : 1a3c743a9a1c6a3304e4a924be90164fcf882e1f%2C440a79ff12f256a2a79aff6b592fe838c1c9e6e3
This commit is contained in:
Steve Armand 2017-10-23 09:46:15 -04:00
Родитель 8a54fa305a
Коммит 3be377b84e
29 изменённых файлов: 435 добавлений и 402 удалений

Просмотреть файл

@ -1,5 +1,3 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from mozfile import *

Просмотреть файл

@ -29,10 +29,10 @@ __all__ = ['extract_tarball',
try:
WindowsError
except NameError:
WindowsError = None # so we can unconditionally catch it later...
WindowsError = None # so we can unconditionally catch it later...
### utilities for extracting archives
# utilities for extracting archives
def extract_tarball(src, dest):
"""extract a .tar file"""
@ -54,7 +54,7 @@ def extract_zip(src, dest):
else:
try:
bundle = zipfile.ZipFile(src)
except Exception, e:
except Exception:
print "src: %s" % src
raise
@ -118,7 +118,7 @@ def extract(src, dest=None):
return top_level_files
### utilities for removal of files and directories
# utilities for removal of files and directories
def rmtree(dir):
"""Deprecated wrapper method to remove a directory tree.
@ -179,6 +179,7 @@ def remove(path):
os.chmod(path, path_stats.st_mode | stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
_call_with_windows_retry(shutil.rmtree, path)
def depth(directory):
"""returns the integer depth of a directory or path relative to '/' """
@ -191,20 +192,22 @@ def depth(directory):
break
return level
# ASCII delimeters
ascii_delimeters = {
'vertical_line' : '|',
'item_marker' : '+',
'last_child' : '\\'
'vertical_line': '|',
'item_marker': '+',
'last_child': '\\'
}
# unicode delimiters
unicode_delimeters = {
'vertical_line' : '',
'item_marker' : '',
'last_child' : ''
'vertical_line': '',
'item_marker': '',
'last_child': ''
}
def tree(directory,
item_marker=unicode_delimeters['item_marker'],
vertical_line=unicode_delimeters['vertical_line'],
@ -230,8 +233,7 @@ def tree(directory,
for resource in (dirnames, filenames):
resource[:] = sorted(resource, key=sort_key)
files_end = item_marker
dirpath_marker = item_marker
files_end = item_marker
if level > len(indent):
indent.append(vertical_line)
@ -254,21 +256,19 @@ def tree(directory,
# append the directory and piece of tree structure
# if the top-level entry directory, print as passed
retval.append('%s%s%s'% (''.join(indent[:-1]),
dirpath_mark,
basename if retval else directory))
retval.append('%s%s%s' % (''.join(indent[:-1]),
dirpath_mark, basename if retval else directory))
# add the files
if filenames:
last_file = filenames[-1]
retval.extend([('%s%s%s' % (''.join(indent),
files_end if filename == last_file else item_marker,
filename))
for index, filename in enumerate(filenames)])
files_end if filename == last_file else item_marker, filename))
for index, filename in enumerate(filenames)])
return '\n'.join(retval)
### utilities for temporary resources
# utilities for temporary resources
class NamedTemporaryFile(object):
"""
@ -340,7 +340,7 @@ def TemporaryDirectory():
shutil.rmtree(tempdir)
### utilities dealing with URLs
# utilities dealing with URLs
def is_url(thing):
"""
@ -353,6 +353,7 @@ def is_url(thing):
else:
return len(parsed[0]) >= 2
def load(resource):
"""
open a file or URL for reading. If the passed resource string is not a URL,
@ -369,4 +370,3 @@ def load(resource):
return file(resource)
return urllib2.urlopen(resource)

Просмотреть файл

@ -52,5 +52,5 @@ Module variables:
"""
import mozinfo
from mozinfo import *
__all__ = mozinfo.__all__

Просмотреть файл

@ -19,19 +19,24 @@ import mozfile
# keep a copy of the os module since updating globals overrides this
_os = os
class unknown(object):
"""marker class for unknown information"""
def __nonzero__(self):
return False
def __str__(self):
return 'UNKNOWN'
unknown = unknown() # singleton
unknown = unknown() # singleton
# get system information
info = {'os': unknown,
'processor': unknown,
'version': unknown,
'bits': unknown }
'bits': unknown}
(system, node, release, version, machine, processor) = platform.uname()
(bits, linkage) = platform.architecture()
@ -66,7 +71,7 @@ elif system == "Darwin":
elif sys.platform in ('solaris', 'sunos5'):
info['os'] = 'unix'
version = sys.platform
info['version'] = version # os version
info['version'] = version # os version
# processor type and bits
if processor in ["i386", "i686"]:
@ -82,7 +87,7 @@ elif processor == "Power Macintosh":
bits = re.search('(\d+)bit', bits).group(1)
info.update({'processor': processor,
'bits': int(bits),
})
})
# standard value of choices, for easy inspection
choices = {'os': ['linux', 'bsd', 'win', 'mac', 'unix'],
@ -95,13 +100,14 @@ def sanitize(info):
to handle universal Mac builds."""
if "processor" in info and info["processor"] == "universal-x86-x86_64":
# If we're running on OS X 10.6 or newer, assume 64-bit
if release[:4] >= "10.6": # Note this is a string comparison
if release[:4] >= "10.6": # Note this is a string comparison
info["processor"] = "x86_64"
info["bits"] = 64
else:
info["processor"] = "x86"
info["bits"] = 32
# method for updating information
def update(new_info):
"""
@ -124,9 +130,10 @@ def update(new_info):
for os_name in choices['os']:
globals()['is' + os_name.title()] = info['os'] == os_name
# unix is special
if isLinux or isBsd:
if isLinux or isBsd: # noqa
globals()['isUnix'] = True
def find_and_update_from_json(*dirs):
"""
Find a mozinfo.json file, load it, and update the info with the
@ -158,6 +165,7 @@ def find_and_update_from_json(*dirs):
return None
update({})
# exports
@ -172,6 +180,7 @@ __all__ += [
'find_and_update_from_json',
]
def main(args=None):
# parse the command line
@ -199,11 +208,13 @@ def main(args=None):
print '%s choices: %s' % (key, ' '.join([str(choice)
for choice in choices[key]]))
flag = True
if flag: return
if flag:
return
# otherwise, print out all info
for key, value in info.items():
print '%s: %s' % (key, value)
if __name__ == '__main__':
main()

Просмотреть файл

@ -12,7 +12,6 @@ import os
import re
import sys
import signal
import socket
import subprocess
import time
import tempfile
@ -32,34 +31,35 @@ from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_opt
from mozharness.mozilla.testing.unittest import EmulatorMixin
class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin, BaseScript, MozbaseMixin):
class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin, BaseScript,
MozbaseMixin):
config_options = [[
["--test-suite"],
{"action": "store",
"dest": "test_suite",
"default": None
}
}
], [
["--adb-path"],
{"action": "store",
"dest": "adb_path",
"default": None,
"help": "Path to adb",
}
}
], [
["--total-chunk"],
{"action": "store",
"dest": "total_chunks",
"default": None,
"help": "Number of total chunks",
}
}
], [
["--this-chunk"],
{"action": "store",
"dest": "this_chunk",
"default": None,
"help": "Number of this chunk",
}
}
]] + copy.deepcopy(testing_config_options) + \
copy.deepcopy(blobupload_config_options)
@ -86,7 +86,7 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
'verify-emulator',
'install',
'run-tests',
],
],
default_actions=['clobber',
'start-emulator',
'download-and-extract',
@ -94,7 +94,7 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
'verify-emulator',
'install',
'run-tests',
],
],
require_config_file=require_config_file,
config={
'virtualenv_modules': self.virtualenv_modules,
@ -178,11 +178,11 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
if self.test_suite == 'mochitest-media':
# mochitest-media is the only thing that needs this
requirements = os.path.join(dirs['abs_mochitest_dir'],
'websocketprocessbridge',
'websocketprocessbridge_requirements.txt')
'websocketprocessbridge',
'websocketprocessbridge_requirements.txt')
elif self.test_suite == 'marionette':
requirements = os.path.join(dirs['abs_test_install_dir'],
'config', 'marionette_requirements.txt')
'config', 'marionette_requirements.txt')
if requirements:
self.register_virtualenv_module(requirements=[requirements],
two_pass=True)
@ -226,7 +226,7 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
"tmp_file": tmp_file,
}
def _retry(self, max_attempts, interval, func, description, max_time = 0):
def _retry(self, max_attempts, interval, func, description, max_time=0):
'''
Execute func until it returns True, up to max_attempts times, waiting for
interval seconds between each attempt. description is logged on each attempt.
@ -237,12 +237,13 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
status = False
attempts = 0
if max_time > 0:
end_time = datetime.datetime.now() + datetime.timedelta(seconds = max_time)
end_time = datetime.datetime.now() + datetime.timedelta(seconds=max_time)
else:
end_time = None
while attempts < max_attempts and not status:
if (end_time is not None) and (datetime.datetime.now() > end_time):
self.info("Maximum retry run-time of %d seconds exceeded; remaining attempts abandoned" % max_time)
self.info("Maximum retry run-time of %d seconds exceeded; "
"remaining attempts abandoned" % max_time)
break
if attempts != 0:
self.info("Sleeping %d seconds" % interval)
@ -290,11 +291,13 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
if not adb_ok:
self.warning('Unable to communicate with adb')
return False
adb_device_ok = self._retry(4, 30, self._verify_adb_device, "Verify emulator visible to adb")
adb_device_ok = self._retry(4, 30, self._verify_adb_device,
"Verify emulator visible to adb")
if not adb_device_ok:
self.warning('Unable to communicate with emulator via adb')
return False
boot_ok = self._retry(30, 10, self._is_boot_completed, "Verify Android boot completed", max_time = 330)
boot_ok = self._retry(30, 10, self._is_boot_completed, "Verify Android boot completed",
max_time=330)
if not boot_ok:
self.warning('Unable to verify Android boot completion')
return False
@ -319,9 +322,11 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
def _install_fennec_apk(self):
install_ok = False
if int(self.sdk_level) >= 23:
cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', '-g', self.installer_path]
cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', '-g',
self.installer_path]
else:
cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', self.installer_path]
cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r',
self.installer_path]
out = self._run_with_timeout(300, cmd, True)
if 'Success' in out:
install_ok = True
@ -330,9 +335,11 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
def _install_robocop_apk(self):
install_ok = False
if int(self.sdk_level) >= 23:
cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', '-g', self.robocop_path]
cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', '-g',
self.robocop_path]
else:
cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', self.robocop_path]
cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r',
self.robocop_path]
out = self._run_with_timeout(300, cmd, True)
if 'Success' in out:
install_ok = True
@ -374,7 +381,7 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
return
try:
tmpfd, filename = tempfile.mkstemp(prefix=prefix, suffix='.png',
dir=dirs['abs_blob_upload_dir'])
dir=dirs['abs_blob_upload_dir'])
os.close(tmpfd)
self.info("Taking screenshot with %s; saving to %s" % (utility, filename))
subprocess.call([utility, filename], env=self.query_env())
@ -455,7 +462,7 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
continue
if '%(app)' in option:
# only query package name if requested
cmd.extend([option % {'app' : self._query_package_name()}])
cmd.extend([option % {'app': self._query_package_name()}])
else:
cmd.extend([option % str_format_values])
@ -466,7 +473,7 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
try_options, try_tests = self.try_args(self.test_suite)
cmd.extend(try_options)
if self.config.get('verify') != True:
if self.config.get('verify') is not True:
cmd.extend(self.query_tests_args(
self.config["suite_definitions"][self.test_suite].get("tests"),
None,
@ -494,7 +501,8 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
# something unexpected!
repo = 'https://hg.mozilla.org/mozilla-central'
revision = 'default'
self.warning('Unable to find repo/revision for manifest; using mozilla-central/default')
self.warning('Unable to find repo/revision for manifest; '
'using mozilla-central/default')
url = '%s/raw-file/%s/%s' % (
repo,
revision,
@ -519,7 +527,7 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
cache=c.get("tooltool_cache", None))
##########################################
### Actions for AndroidEmulatorTest ###
# Actions for AndroidEmulatorTest #
##########################################
def setup_avds(self):
'''
@ -561,7 +569,8 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
'''
Starts the emulator
'''
if 'emulator_url' in self.config or 'emulator_manifest' in self.config or 'tools_manifest' in self.config:
if 'emulator_url' in self.config or 'emulator_manifest' in self.config or \
'tools_manifest' in self.config:
self.install_emulator()
if not os.path.isfile(self.adb_path):
@ -618,19 +627,19 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
f.write('\n\nEmulator /proc/cpuinfo:\n')
cmd = [self.adb_path, '-s', self.emulator['device_id'],
'shell', 'cat', '/proc/cpuinfo']
'shell', 'cat', '/proc/cpuinfo']
out = self._run_with_timeout(30, cmd, quiet=True)
f.write(out)
f.write('\n\nEmulator /proc/meminfo:\n')
cmd = [self.adb_path, '-s', self.emulator['device_id'],
'shell', 'cat', '/proc/meminfo']
'shell', 'cat', '/proc/meminfo']
out = self._run_with_timeout(30, cmd, quiet=True)
f.write(out)
f.write('\n\nEmulator process list:\n')
cmd = [self.adb_path, '-s', self.emulator['device_id'],
'shell', 'ps']
'shell', 'ps']
out = self._run_with_timeout(30, cmd, quiet=True)
f.write(out)
@ -641,10 +650,11 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
'''
self.mkdir_p(self.query_abs_dirs()['abs_blob_upload_dir'])
max_restarts = 5
emulator_ok = self._retry(max_restarts, 10, self._verify_emulator_and_restart_on_fail, "Check emulator")
emulator_ok = self._retry(max_restarts, 10, self._verify_emulator_and_restart_on_fail,
"Check emulator")
if not emulator_ok:
self.fatal('INFRA-ERROR: Unable to start emulator after %d attempts' % max_restarts,
EXIT_STATUS_DICT[TBPL_RETRY])
EXIT_STATUS_DICT[TBPL_RETRY])
self._dump_perf_info()
# Start logcat for the emulator. The adb process runs until the
# corresponding emulator is killed. Output is written directly to
@ -652,8 +662,8 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
# at the end of the job.
logcat_filename = 'logcat-%s.log' % self.emulator["device_id"]
logcat_path = os.path.join(self.abs_dirs['abs_blob_upload_dir'], logcat_filename)
logcat_cmd = '%s -s %s logcat -v threadtime Trace:S StrictMode:S ExchangeService:S > %s &' % \
(self.adb_path, self.emulator["device_id"], logcat_path)
logcat_cmd = '%s -s %s logcat -v threadtime Trace:S StrictMode:S '\
' ExchangeService:S > %s &' % (self.adb_path, self.emulator["device_id"], logcat_path)
self.info(logcat_cmd)
os.system(logcat_cmd)
# Get a post-boot emulator process list for diagnostics
@ -664,7 +674,8 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
"""
Download and extract fennec APK, tests.zip, host utils, and robocop (if required).
"""
super(AndroidEmulatorTest, self).download_and_extract(suite_categories=self._query_suite_categories())
super(AndroidEmulatorTest, self).download_and_extract(
suite_categories=self._query_suite_categories())
dirs = self.query_abs_dirs()
if self.test_suite and self.test_suite.startswith('robocop'):
robocop_url = self.installer_url[:self.installer_url.rfind('/')] + '/robocop.apk'
@ -687,29 +698,33 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
"""
Install APKs on the emulator
"""
install_needed = (not self.test_suite) or self.config["suite_definitions"][self.test_suite].get("install")
if install_needed == False:
install_needed = (not self.test_suite) or \
self.config["suite_definitions"][self.test_suite].get("install")
if install_needed is False:
self.info("Skipping apk installation for %s" % self.test_suite)
return
assert self.installer_path is not None, \
"Either add installer_path to the config or use --installer-path."
self.sdk_level = self._run_with_timeout(30, [self.adb_path, '-s', self.emulator['device_id'],
'shell', 'getprop', 'ro.build.version.sdk'])
self.sdk_level = self._run_with_timeout(30, [self.adb_path, '-s',
self.emulator['device_id'],
'shell', 'getprop', 'ro.build.version.sdk'])
# Install Fennec
install_ok = self._retry(3, 30, self._install_fennec_apk, "Install app APK")
if not install_ok:
self.fatal('INFRA-ERROR: Failed to install %s on %s' %
(self.installer_path, self.emulator["name"]), EXIT_STATUS_DICT[TBPL_RETRY])
(self.installer_path, self.emulator["name"]),
EXIT_STATUS_DICT[TBPL_RETRY])
# Install Robocop if required
if self.test_suite and self.test_suite.startswith('robocop'):
install_ok = self._retry(3, 30, self._install_robocop_apk, "Install Robocop APK")
if not install_ok:
self.fatal('INFRA-ERROR: Failed to install %s on %s' %
(self.robocop_path, self.emulator["name"]), EXIT_STATUS_DICT[TBPL_RETRY])
(self.robocop_path, self.emulator["name"]),
EXIT_STATUS_DICT[TBPL_RETRY])
self.info("Finished installing apps for %s" % self.emulator["name"])
@ -773,7 +788,8 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
# Verification has run out of time. That is okay! Stop running
# tests so that a task timeout is not triggered, and so that
# (partial) results are made available in a timely manner.
self.info("TinderboxPrint: Verification too long: Not all tests were verified.<br/>")
self.info("TinderboxPrint: Verification too long: "
"Not all tests were verified.<br/>")
# Signal verify time exceeded, to break out of suites and
# suite categories loops also.
return False
@ -815,7 +831,6 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
if len(verify_args) > 0:
self._dump_emulator_log()
@PostScriptAction('run-tests')
def stop_emulator(self, action, success=None):
'''
@ -837,6 +852,7 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
self._kill_processes(self.config["emulator_process_name"])
super(AndroidEmulatorTest, self).upload_blobber_files()
if __name__ == '__main__':
emulatorTest = AndroidEmulatorTest()
emulatorTest.run_and_exit()

Просмотреть файл

@ -29,32 +29,33 @@ from mozharness.mozilla.testing.codecoverage import (
code_coverage_config_options
)
class AWSY(TestingMixin, MercurialScript, BlobUploadMixin, TooltoolMixin, CodeCoverageMixin):
config_options = [
[["--e10s"],
{"action": "store_true",
"dest": "e10s",
"default": False,
"help": "Run tests with multiple processes. (Desktop builds only)",
}],
{"action": "store_true",
"dest": "e10s",
"default": False,
"help": "Run tests with multiple processes. (Desktop builds only)",
}],
[["--enable-stylo"],
{"action": "store_true",
"dest": "enable_stylo",
"default": False,
"help": "Run tests with Stylo enabled.",
}],
{"action": "store_true",
"dest": "enable_stylo",
"default": False,
"help": "Run tests with Stylo enabled.",
}],
[["--disable-stylo"],
{"action": "store_true",
"dest": "disable_stylo",
"default": False,
"help": "Run tests with Stylo disabled.",
}],
{"action": "store_true",
"dest": "disable_stylo",
"default": False,
"help": "Run tests with Stylo disabled.",
}],
[["--single-stylo-traversal"],
{"action": "store_true",
"dest": "single_stylo_traversal",
"default": False,
"help": "Set STYLO_THREADS=1.",
}]
{"action": "store_true",
"dest": "single_stylo_traversal",
"default": False,
"help": "Set STYLO_THREADS=1.",
}]
] + testing_config_options + copy.deepcopy(blobupload_config_options) \
+ copy.deepcopy(code_coverage_config_options)
@ -121,7 +122,6 @@ class AWSY(TestingMixin, MercurialScript, BlobUploadMixin, TooltoolMixin, CodeCo
self.register_virtualenv_module('awsy', self.awsy_path)
def populate_webroot(self):
"""Populate the production test slaves' webroots"""
self.info("Downloading pageset with tooltool...")
@ -140,7 +140,6 @@ class AWSY(TestingMixin, MercurialScript, BlobUploadMixin, TooltoolMixin, CodeCo
self.run_command(unzip_cmd, halt_on_failure=True)
self.run_command("ls %s" % page_load_test_dir)
def run_tests(self, args=None, **kw):
'''
AWSY test should be implemented here

Просмотреть файл

@ -63,7 +63,7 @@ class BouncerSubmitter(BaseScript, PurgeMixin, BouncerSubmitterMixin, BuildbotMi
'submit',
],
config={
'buildbot_json_path' : 'buildprops.json'
'buildbot_json_path': 'buildprops.json'
}
)
self.locales = None
@ -72,17 +72,21 @@ class BouncerSubmitter(BaseScript, PurgeMixin, BouncerSubmitterMixin, BuildbotMi
def _pre_config_lock(self, rw_config):
super(BouncerSubmitter, self)._pre_config_lock(rw_config)
#override properties from buildbot properties here as defined by taskcluster properties
# override properties from buildbot properties here as defined by taskcluster properties
self.read_buildbot_config()
#check if release promotion is true first before overwriting these properties
# check if release promotion is true first before overwriting these properties
if self.buildbot_config["properties"].get("release_promotion"):
for prop in ['product', 'version', 'build_number', 'revision', 'bouncer_submitter_config', ]:
for prop in \
['product', 'version', 'build_number', 'revision',
'bouncer_submitter_config', ]:
if self.buildbot_config["properties"].get(prop):
self.info("Overriding %s with %s" % (prop, self.buildbot_config["properties"].get(prop)))
self.info("Overriding %s with %s" %
(prop, self.buildbot_config["properties"].get(prop)))
self.config[prop] = self.buildbot_config["properties"].get(prop)
if self.buildbot_config["properties"].get("partial_versions"):
self.config["prev_versions"] = self.buildbot_config["properties"].get("partial_versions").split(", ")
self.config["prev_versions"] = \
self.buildbot_config["properties"].get("partial_versions").split(", ")
for opt in ["version", "credentials_file", "bouncer-api-prefix"]:
if opt not in self.config:

Просмотреть файл

@ -24,14 +24,15 @@ sys.path.insert(1, os.path.dirname(sys.path[0]))
from mozharness.base.script import BaseScript
# ConfigTest {{{1
class ConfigTest(BaseScript):
config_options = [[
["--test-file",],
["--test-file", ],
{"action": "extend",
"dest": "test_files",
"help": "Specify which config files to test"
}
}
]]
def __init__(self, require_config_file=False):
@ -45,7 +46,7 @@ class ConfigTest(BaseScript):
default_actions=['test-json-configs',
'test-python-configs',
'summary',
],
],
require_config_file=require_config_file)
def query_config_files(self):
@ -128,7 +129,8 @@ class ConfigTest(BaseScript):
self.info("Good.")
filecount[1] += 1
else:
self.add_summary("%s is valid python, but doesn't create a config dictionary." %
self.add_summary("%s is valid python, "
"but doesn't create a config dictionary." %
config_file, level="error")
if filecount[0]:
self.add_summary("%d of %d python config files were good." %
@ -136,6 +138,7 @@ class ConfigTest(BaseScript):
else:
self.add_summary("No python config files to test.")
# __main__ {{{1
if __name__ == '__main__':
config_test = ConfigTest()

Просмотреть файл

@ -34,7 +34,6 @@ from mozharness.mozilla.signing import SigningMixin
from mozharness.mozilla.updates.balrog import BalrogMixin
from mozharness.mozilla.taskcluster_helper import Taskcluster
from mozharness.base.python import VirtualenvMixin
from mozharness.mozilla.mock import ERROR_MSGS
try:
import simplejson as json
@ -158,9 +157,9 @@ class DesktopSingleLocale(LocalesMixin, ReleaseMixin, MockMixin, BuildbotMixin,
"help": "Specify the url of the en-us binary"}
], [
["--disable-mock"], {
"dest": "disable_mock",
"action": "store_true",
"help": "do not run under mock despite what gecko-config says"}
"dest": "disable_mock",
"action": "store_true",
"help": "do not run under mock despite what gecko-config says"}
]]
def __init__(self, require_config_file=True):
@ -484,7 +483,7 @@ class DesktopSingleLocale(LocalesMixin, ReleaseMixin, MockMixin, BuildbotMixin,
elif 'revision' in self.buildbot_properties:
revision = self.buildbot_properties['revision']
elif (self.buildbot_config and
self.buildbot_config.get('sourcestamp', {}).get('revision')):
self.buildbot_config.get('sourcestamp', {}).get('revision')):
revision = self.buildbot_config['sourcestamp']['revision']
elif self.buildbot_config and self.buildbot_config.get('revision'):
revision = self.buildbot_config['revision']
@ -618,7 +617,8 @@ class DesktopSingleLocale(LocalesMixin, ReleaseMixin, MockMixin, BuildbotMixin,
# pass through non-interpolables, like booleans
current_repo[key] = value
except KeyError:
self.error('not all the values in "{0}" can be replaced. Check your configuration'.format(value))
self.error('not all the values in "{0}" can be replaced. Check your '
'configuration'.format(value))
raise
repos.append(current_repo)
self.info("repositories: %s" % repos)
@ -819,7 +819,7 @@ class DesktopSingleLocale(LocalesMixin, ReleaseMixin, MockMixin, BuildbotMixin,
targets.extend(['setup.exe', 'setup-stub.exe'])
for f in matches:
target_file = next(target_file for target_file in targets
if f.endswith(target_file[6:]))
if f.endswith(target_file[6:]))
if target_file:
# Remove from list of available options for this locale
targets.remove(target_file)
@ -971,7 +971,8 @@ class DesktopSingleLocale(LocalesMixin, ReleaseMixin, MockMixin, BuildbotMixin,
# files
# Locale is hardcoded to en-US, for silly reasons
# The Balrog submitter translates this platform into a build target
# via https://github.com/mozilla/build-tools/blob/master/lib/python/release/platforms.py#L23
# via
# https://github.com/mozilla/build-tools/blob/master/lib/python/release/platforms.py#L23
self.set_buildbot_property("completeMarSize", self.query_filesize(c_marfile))
self.set_buildbot_property("completeMarHash", self.query_sha512sum(c_marfile))
self.set_buildbot_property("completeMarUrl", c_mar_url)
@ -1097,7 +1098,8 @@ class DesktopSingleLocale(LocalesMixin, ReleaseMixin, MockMixin, BuildbotMixin,
'branch': self.config['branch'],
'appName': self.config['appName'],
'platform': self.config['platform'],
'completeMarUrls': {locale: self._query_complete_mar_url(locale) for locale in locales},
'completeMarUrls': {locale: self._query_complete_mar_url(locale)
for locale in locales},
}
self.info('funsize info: %s' % funsize_info)
self.set_buildbot_property('funsize_info', json.dumps(funsize_info),
@ -1172,7 +1174,7 @@ class DesktopSingleLocale(LocalesMixin, ReleaseMixin, MockMixin, BuildbotMixin,
self.info('Using routes: %s' % routes)
tc = Taskcluster(branch,
pushinfo.pushdate, # Use pushdate as the rank
pushinfo.pushdate, # Use pushdate as the rank
client_id,
access_token,
self.log_obj,

Просмотреть файл

@ -98,7 +98,6 @@ class DesktopPartnerRepacks(ReleaseMixin, BuildbotMixin, PurgeMixin,
**buildscript_kwargs
)
def _pre_config_lock(self, rw_config):
self.read_buildbot_config()
if not self.buildbot_config:
@ -106,9 +105,11 @@ class DesktopPartnerRepacks(ReleaseMixin, BuildbotMixin, PurgeMixin,
else:
if self.config.get('require_buildprops', False) is True:
if not self.buildbot_config:
self.fatal("Unable to load properties from file: %s" % self.config.get('buildbot_json_path'))
self.fatal("Unable to load properties from file: %s" %
self.config.get('buildbot_json_path'))
props = self.buildbot_config["properties"]
for prop in ['version', 'build_number', 'revision', 'repo_file', 'repack_manifests_url', 'partner']:
for prop in ['version', 'build_number', 'revision', 'repo_file',
'repack_manifests_url', 'partner']:
if props.get(prop):
self.info("Overriding %s with %s" % (prop, props[prop]))
self.config[prop] = props.get(prop)
@ -191,6 +192,7 @@ class DesktopPartnerRepacks(ReleaseMixin, BuildbotMixin, PurgeMixin,
return self.run_command(repack_cmd,
cwd=self.query_abs_dirs()['abs_scripts_dir'])
# main {{{
if __name__ == '__main__':
partner_repacks = DesktopPartnerRepacks()

Просмотреть файл

@ -2,6 +2,7 @@
# ***** BEGIN LICENSE BLOCK *****
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
# ***** END LICENSE BLOCK *****
"""desktop_unittest.py
@ -24,7 +25,7 @@ from datetime import datetime, timedelta
sys.path.insert(1, os.path.dirname(sys.path[0]))
from mozharness.base.errors import BaseErrorList
from mozharness.base.log import INFO, ERROR
from mozharness.base.log import INFO
from mozharness.base.script import PreScriptAction
from mozharness.base.vcs.vcsbase import MercurialScript
from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
@ -39,13 +40,15 @@ from mozharness.mozilla.testing.codecoverage import (
)
from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options
SUITE_CATEGORIES = ['gtest', 'cppunittest', 'jittest', 'mochitest', 'reftest', 'xpcshell', 'mozbase', 'mozmill']
SUITE_CATEGORIES = ['gtest', 'cppunittest', 'jittest', 'mochitest', 'reftest', 'xpcshell',
'mozbase', 'mozmill']
SUITE_DEFAULT_E10S = ['mochitest', 'reftest']
SUITE_NO_E10S = ['xpcshell']
# DesktopUnittest {{{1
class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMixin, CodeCoverageMixin):
class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMixin,
CodeCoverageMixin):
config_options = [
[['--mochitest-suite', ], {
"action": "extend",
@ -151,7 +154,8 @@ class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMix
"action": "store_true",
"dest": "allow_software_gl_layers",
"default": False,
"help": "Permits a software GL implementation (such as LLVMPipe) to use the GL compositor."}
"help": "Permits a software GL implementation (such as LLVMPipe) to use "
"the GL compositor."}
],
[["--single-stylo-traversal"], {
"action": "store_true",
@ -281,8 +285,10 @@ class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMix
dirs['abs_xpcshell_dir'] = os.path.join(dirs['abs_test_install_dir'], "xpcshell")
dirs['abs_cppunittest_dir'] = os.path.join(dirs['abs_test_install_dir'], "cppunittest")
dirs['abs_gtest_dir'] = os.path.join(dirs['abs_test_install_dir'], "gtest")
dirs['abs_blob_upload_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'blobber_upload_dir')
dirs['abs_jittest_dir'] = os.path.join(dirs['abs_test_install_dir'], "jit-test", "jit-test")
dirs['abs_blob_upload_dir'] = os.path.join(abs_dirs['abs_work_dir'],
'blobber_upload_dir')
dirs['abs_jittest_dir'] = os.path.join(dirs['abs_test_install_dir'],
"jit-test", "jit-test")
dirs['abs_mozbase_dir'] = os.path.join(dirs['abs_test_install_dir'], "mozbase")
dirs['abs_mozmill_dir'] = os.path.join(dirs['abs_test_install_dir'], "mozmill")
@ -335,10 +341,8 @@ class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMix
self.register_virtualenv_module(name='mock')
self.register_virtualenv_module(name='simplejson')
requirements_files = [
os.path.join(dirs['abs_test_install_dir'],
'config',
'marionette_requirements.txt')]
requirements_files = [os.path.join(dirs['abs_test_install_dir'],
'config', 'marionette_requirements.txt')]
if self._query_specified_suites('mochitest') is not None:
# mochitest is the only thing that needs this
@ -391,7 +395,7 @@ class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMix
str_format_values = {
'binary_path': self.binary_path,
'symbols_path': self._query_symbols_url(),
'abs_work_dir' : dirs['abs_work_dir'],
'abs_work_dir': dirs['abs_work_dir'],
'abs_app_dir': abs_app_dir,
'abs_res_dir': abs_res_dir,
'raw_log_file': raw_log_file,
@ -419,11 +423,11 @@ class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMix
if suite_category == "mochitest":
base_cmd.append('--bisect-chunk=default')
else:
self.warning("--no-random does not currently work with suites other than mochitest.")
self.warning("--no-random does not currently work with suites other than "
"mochitest.")
if c['headless']:
base_cmd.append('--headless');
base_cmd.append('--headless')
# set pluginsPath
abs_res_plugins_dir = os.path.join(abs_res_dir, 'plugins')
@ -514,7 +518,8 @@ class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMix
return False
if suite_category not in unstructured_flavors:
return True
if not unstructured_flavors.get(suite_category) or flavor in unstructured_flavors.get(suite_category):
if not unstructured_flavors.get(suite_category) or \
flavor in unstructured_flavors.get(suite_category):
return False
return True
@ -557,10 +562,9 @@ class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMix
if rejected:
self.buildbot_status(TBPL_EXCEPTION)
self.fatal("There are specified suites that are incompatible with "
"--artifact try syntax flag: {}".format(', '.join(rejected)),
"--artifact try syntax flag: {}".format(', '.join(rejected)),
exit_code=self.return_code)
def download_and_extract(self):
"""
download and extract test zip / download installer
@ -698,7 +702,7 @@ class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMix
env = {}
if isinstance(suites[suite], dict):
options_list = suites[suite].get('options', [])
if self.config.get('verify') == True:
if self.config.get('verify') is True:
tests_list = []
else:
tests_list = suites[suite].get('tests', [])
@ -773,7 +777,8 @@ class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMix
# Verification has run out of time. That is okay! Stop running
# tests so that a task timeout is not triggered, and so that
# (partial) results are made available in a timely manner.
self.info("TinderboxPrint: Verification too long: Not all tests were verified.<br/>")
self.info("TinderboxPrint: Verification too long: Not all tests "
"were verified.<br/>")
# Signal verify time exceeded, to break out of suites and
# suite categories loops also.
return False

Просмотреть файл

@ -78,7 +78,8 @@ class FxDesktopBuild(BuildScript, TryToolsMixin, object):
],
'stage_product': 'firefox',
'platform_supports_post_upload_to_latest': True,
'build_resources_path': '%(abs_src_dir)s/obj-firefox/.mozbuild/build_resources.json',
'build_resources_path': \
'%(abs_src_dir)s/obj-firefox/.mozbuild/build_resources.json',
'nightly_promotion_branches': ['mozilla-central', 'mozilla-aurora'],
# try will overwrite these
@ -205,7 +206,6 @@ class FxDesktopBuild(BuildScript, TryToolsMixin, object):
self.actions = tuple(rw_config.actions)
self.all_actions = tuple(rw_config.all_actions)
def query_abs_dirs(self):
if self.abs_dirs:
return self.abs_dirs
@ -257,6 +257,7 @@ class FxDesktopBuild(BuildScript, TryToolsMixin, object):
import ctypes
ctypes.windll.kernel32.SetErrorMode(0x8001)
if __name__ == '__main__':
fx_desktop_build = FxDesktopBuild()
fx_desktop_build.run_and_exit()

Просмотреть файл

@ -212,11 +212,13 @@ class L10nBumper(VCSScript):
self.mkdir_p(dirs['abs_work_dir'])
self.rmtree(treestatus_json)
self.run_command(["curl", "--retry", "4", "-o", treestatus_json, treestatus_url], throw_exception=True)
self.run_command(["curl", "--retry", "4", "-o", treestatus_json, treestatus_url],
throw_exception=True)
treestatus = self._read_json(treestatus_json)
if treestatus['result']['status'] != 'closed':
self.info("treestatus is %s - assuming we can land" % repr(treestatus['result']['status']))
self.info("treestatus is %s - assuming we can land" %
repr(treestatus['result']['status']))
return True
return False

Просмотреть файл

@ -7,14 +7,13 @@
import copy
import os
import re
import sys
# load modules from parent dir
sys.path.insert(1, os.path.dirname(sys.path[0]))
from mozharness.base.errors import BaseErrorList, TarErrorList
from mozharness.base.log import INFO, ERROR, WARNING
from mozharness.base.log import INFO
from mozharness.base.script import PreScriptAction
from mozharness.base.transfer import TransferMixin
from mozharness.base.vcs.vcsbase import MercurialScript
@ -34,7 +33,8 @@ from mozharness.mozilla.structuredlog import StructuredOutputParser
# builds is turned off, Bug 1209180.
class MarionetteTest(TestingMixin, MercurialScript, BlobUploadMixin, TransferMixin, CodeCoverageMixin):
class MarionetteTest(TestingMixin, MercurialScript, BlobUploadMixin, TransferMixin,
CodeCoverageMixin):
config_options = [[
["--application"],
{"action": "store",
@ -54,7 +54,8 @@ class MarionetteTest(TestingMixin, MercurialScript, BlobUploadMixin, TransferMix
{"action": "store",
"dest": "marionette_address",
"default": None,
"help": "The host:port of the Marionette server running inside Gecko. Unused for emulator testing",
"help": "The host:port of the Marionette server running inside Gecko. "
"Unused for emulator testing",
}
], [
["--emulator"],
@ -98,7 +99,7 @@ class MarionetteTest(TestingMixin, MercurialScript, BlobUploadMixin, TransferMix
"dest": "headless",
"default": False,
"help": "Run tests in headless mode.",
}
}
], [
["--allow-software-gl-layers"],
{"action": "store_true",
@ -154,7 +155,8 @@ class MarionetteTest(TestingMixin, MercurialScript, BlobUploadMixin, TransferMix
def _pre_config_lock(self, rw_config):
super(MarionetteTest, self)._pre_config_lock(rw_config)
if not self.config.get('emulator') and not self.config.get('marionette_address'):
self.fatal("You need to specify a --marionette-address for non-emulator tests! (Try --marionette-address localhost:2828 )")
self.fatal("You need to specify a --marionette-address for non-emulator tests! "
"(Try --marionette-address localhost:2828 )")
def query_abs_dirs(self):
if self.abs_dirs:

Просмотреть файл

@ -20,14 +20,12 @@ import os
import pprint
import subprocess
import sys
from getpass import getpass
sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
from mozharness.base.errors import HgErrorList
from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
from mozharness.base.vcs.vcsbase import MercurialScript
from mozharness.mozilla.selfserve import SelfServeMixin
from mozharness.mozilla.updates.balrog import BalrogMixin
from mozharness.mozilla.buildbot import BuildbotMixin
from mozharness.mozilla.repo_manipulation import MercurialRepoManipulationMixin
@ -104,13 +102,16 @@ class GeckoMigration(MercurialScript, BalrogMixin, VirtualenvMixin,
"""
message = ""
if self.config['migration_behavior'] not in VALID_MIGRATION_BEHAVIORS:
message += "%s must be one of %s!\n" % (self.config['migration_behavior'], VALID_MIGRATION_BEHAVIORS)
message += "%s must be one of %s!\n" % (self.config['migration_behavior'],
VALID_MIGRATION_BEHAVIORS)
if self.config['migration_behavior'] == 'beta_to_release':
if self.config.get("require_remove_locales") and not self.config.get("remove_locales") and 'migrate' in self.actions:
if self.config.get("require_remove_locales") \
and not self.config.get("remove_locales") and 'migrate' in self.actions:
message += "You must specify --remove-locale!\n"
else:
if self.config.get("require_remove_locales") or self.config.get("remove_locales"):
self.warning("--remove-locale isn't valid unless you're using beta_to_release migration_behavior!\n")
self.warning("--remove-locale isn't valid unless you're using beta_to_release "
"migration_behavior!\n")
if message:
self.fatal(message)
@ -221,7 +222,8 @@ class GeckoMigration(MercurialScript, BalrogMixin, VirtualenvMixin,
dirs = self.query_abs_dirs()
patch_file = os.path.join(dirs['abs_work_dir'], 'patch_file')
self.run_command(
subprocess.list2cmdline(hg + ['diff', '-r', old_head, '.hgtags', '-U9', '>', patch_file]),
subprocess.list2cmdline(hg + ['diff', '-r', old_head, '.hgtags',
'-U9', '>', patch_file]),
cwd=cwd,
)
self.run_command(
@ -324,7 +326,8 @@ class GeckoMigration(MercurialScript, BalrogMixin, VirtualenvMixin,
"""
dirs = self.query_abs_dirs()
next_mb_version = self.get_version(dirs['abs_to_dir'])[0]
self.bump_version(dirs['abs_to_dir'], next_mb_version, next_mb_version, "a1", "", use_config_suffix=True)
self.bump_version(dirs['abs_to_dir'], next_mb_version, next_mb_version, "a1", "",
use_config_suffix=True)
self.apply_replacements()
# bump m-c version
curr_mc_version = self.get_version(dirs['abs_from_dir'])[0]
@ -338,7 +341,6 @@ class GeckoMigration(MercurialScript, BalrogMixin, VirtualenvMixin,
self.touch_clobber_file(dirs['abs_from_dir'])
self.touch_clobber_file(dirs['abs_to_dir'])
def beta_to_release(self, *args, **kwargs):
""" mozilla-beta -> mozilla-release behavior.
@ -492,9 +494,11 @@ class GeckoMigration(MercurialScript, BalrogMixin, VirtualenvMixin,
)
# Call beta_to_release etc.
if not hasattr(self, self.config['migration_behavior']):
self.fatal("Don't know how to proceed with migration_behavior %s !" % self.config['migration_behavior'])
self.fatal("Don't know how to proceed with migration_behavior %s !" %
self.config['migration_behavior'])
getattr(self, self.config['migration_behavior'])(end_tag=end_tag)
self.info("Verify the diff, and apply any manual changes, such as disabling features, and --commit-changes")
self.info("Verify the diff, and apply any manual changes, such as disabling features, "
"and --commit-changes")
# __main__ {{{1

Просмотреть файл

@ -111,7 +111,7 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
{"dest": "disable_mock",
"action": "store_true",
"help": "do not run under mock despite what gecko-config says",
}
}
], [
['--revision', ],
{"action": "store",
@ -183,7 +183,8 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
rc = self.query_release_config()
repack_env['EN_US_BINARY_URL'] = c['base_en_us_binary_url'] % replace_dict
if 'MOZ_SIGNING_SERVERS' in os.environ:
repack_env['MOZ_SIGN_CMD'] = subprocess.list2cmdline(self.query_moz_sign_cmd(formats=['jar']))
repack_env['MOZ_SIGN_CMD'] = \
subprocess.list2cmdline(self.query_moz_sign_cmd(formats=['jar']))
self.repack_env = repack_env
return self.repack_env
@ -271,7 +272,7 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
elif 'revision' in self.buildbot_properties:
revision = self.buildbot_properties['revision']
elif (self.buildbot_config and
self.buildbot_config.get('sourcestamp', {}).get('revision')):
self.buildbot_config.get('sourcestamp', {}).get('revision')):
revision = self.buildbot_config['sourcestamp']['revision']
elif self.buildbot_config and self.buildbot_config.get('revision'):
revision = self.buildbot_config['revision']
@ -347,20 +348,18 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
self.error("Can't determine the upload url for %s!" % locale)
def query_abs_dirs(self):
if self.abs_dirs:
return self.abs_dirs
abs_dirs = super(MobileSingleLocale, self).query_abs_dirs()
if self.abs_dirs:
return self.abs_dirs
abs_dirs = super(MobileSingleLocale, self).query_abs_dirs()
dirs = {
'abs_tools_dir':
os.path.join(abs_dirs['base_work_dir'], 'tools'),
'build_dir':
os.path.join(abs_dirs['base_work_dir'], 'build'),
}
dirs = {
'abs_tools_dir': os.path.join(abs_dirs['base_work_dir'], 'tools'),
'build_dir': os.path.join(abs_dirs['base_work_dir'], 'build'),
}
abs_dirs.update(dirs)
self.abs_dirs = abs_dirs
return self.abs_dirs
abs_dirs.update(dirs)
self.abs_dirs = abs_dirs
return self.abs_dirs
def add_failure(self, locale, message, **kwargs):
self.locales_property[locale] = "Failed"
@ -379,7 +378,8 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
locales = self.query_locales()
for locale in locales:
self.locales_property.setdefault(locale, "Success")
self.set_buildbot_property("locales", json.dumps(self.locales_property), write_to_file=True)
self.set_buildbot_property("locales", json.dumps(self.locales_property),
write_to_file=True)
# Actions {{{2
def clobber(self):
@ -410,7 +410,8 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
# pass through non-interpolables, like booleans
current_repo[key] = value
except KeyError:
self.error('not all the values in "{0}" can be replaced. Check your configuration'.format(value))
self.error('not all the values in "{0}" can be replaced. Check '
'your configuration'.format(value))
raise
repos.append(current_repo)
self.info("repositories: %s" % repos)
@ -423,7 +424,6 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
# list_locales() is defined in LocalesMixin.
def _setup_configure(self, buildid=None):
c = self.config
dirs = self.query_abs_dirs()
env = self.query_repack_env()
make = self.query_exe("make")
@ -510,7 +510,8 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
env=repack_env,
error_list=MakefileErrorList,
halt_on_failure=False):
self.add_failure(locale, message="%s failed in make installers-%s!" % (locale, locale))
self.add_failure(locale, message="%s failed in make installers-%s!" %
(locale, locale))
continue
success_count += 1
self.summarize_success_count(success_count, total_count,
@ -544,7 +545,8 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
continue
success_count += 1
self.summarize_success_count(success_count, total_count,
message="Validated signatures on %d of %d binaries successfully.")
message="Validated signatures on %d of %d "
"binaries successfully.")
def taskcluster_upload(self):
auth = os.path.join(os.getcwd(), self.config['taskcluster_credentials_file'])
@ -602,7 +604,7 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
self.info('Using routes: %s' % routes)
tc = Taskcluster(branch,
pushinfo.pushdate, # Use pushdate as the rank
pushinfo.pushdate, # Use pushdate as the rank
client_id,
access_token,
self.log_obj,
@ -633,7 +635,9 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
continue
total_count += 1
if c.get('base_post_upload_cmd'):
upload_env['POST_UPLOAD_CMD'] = c['base_post_upload_cmd'] % {'version': version, 'locale': locale, 'buildnum': str(buildnum), 'post_upload_extra': ' '.join(c.get('post_upload_extra', []))}
upload_env['POST_UPLOAD_CMD'] = c['base_post_upload_cmd'] % \
{'version': version, 'locale': locale, 'buildnum': str(buildnum),
'post_upload_extra': ' '.join(c.get('post_upload_extra', []))}
output = self.get_output_from_command_m(
# Ugly hack to avoid |make upload| stderr from showing up
# as get_output_from_command errors
@ -687,18 +691,19 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
rev = self.vcs_checkout(**repos[0])
self.set_buildbot_property("tools_revision", rev, write_to_file=True)
def query_apkfile_path(self,locale):
def query_apkfile_path(self, locale):
dirs = self.query_abs_dirs()
apkdir = os.path.join(dirs['abs_objdir'], 'dist')
r = r"(\.)" + re.escape(locale) + r"(\.*)"
r = r"(\.)" + re.escape(locale) + r"(\.*)"
apks = []
for f in os.listdir(apkdir):
if f.endswith(".apk") and re.search(r, f):
apks.append(f)
if len(apks) == 0:
self.fatal("Found no apks files in %s, don't know what to do:\n%s" % (apkdir, apks), exit_code=1)
self.fatal("Found no apks files in %s, don't know what to do:\n%s" %
(apkdir, apks), exit_code=1)
return os.path.join(apkdir, apks[0])
@ -721,11 +726,13 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
for locale in locales:
apk_url = self.query_upload_url(locale)
if not apk_url:
self.add_failure(locale, message="Failed to detect %s url in make upload!" % (locale))
self.add_failure(locale, message="Failed to detect %s url in make upload!" %
(locale))
balrogReady = False
continue
if not balrogReady:
return self.fatal(message="Not all repacks successful, abort without submitting to balrog")
return self.fatal(message="Not all repacks successful, abort without "
"submitting to balrog.")
env = self.query_upload_env()
for locale in locales:
@ -740,15 +747,15 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
self.set_buildbot_property("completeMarUrl", apk_url)
# The Balrog submitter translates this platform into a build target
# via https://github.com/mozilla/build-tools/blob/master/lib/python/release/platforms.py#L23
# via https://github.com/mozilla/build-tools/blob/master/lib/python/release/platforms.py#L23 # noqa
self.set_buildbot_property(
"platform",
self.buildbot_config["properties"]["platform"])
#TODO: Is there a better way to get this?
# TODO: Is there a better way to get this?
# Set other necessary properties for Balrog submission. None need to
# be passed back to buildbot, so we won't write them to the properties
#files.
# files.
self.set_buildbot_property("locale", locale)
self.set_buildbot_property("appVersion", self.query_version())
@ -778,6 +785,7 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
if not self.query_is_nightly():
self.submit_balrog_release_pusher(dirs)
# main {{{1
if __name__ == '__main__':
single_locale = MobileSingleLocale()

Просмотреть файл

@ -173,7 +173,8 @@ class MobilePartnerRepack(LocalesMixin, ReleaseMixin, MobileSigningMixin,
total_count += 1
if not self.download_file(url, file_path):
self.add_failure(platform, locale,
message="Unable to download %(platform)s:%(locale)s installer!")
message="Unable to "
"download %(platform)s:%(locale)s installer!")
else:
success_count += 1
self.summarize_success_count(success_count, total_count,
@ -237,19 +238,23 @@ class MobilePartnerRepack(LocalesMixin, ReleaseMixin, MobileSigningMixin,
success_count = total_count = 0
for platform in c['platforms']:
for locale in locales:
installer_name = c['installer_base_names'][platform] % {'version': rc['version'], 'locale': locale}
installer_name = c['installer_base_names'][platform] % \
{'version': rc['version'], 'locale': locale}
if self.query_failure(platform, locale):
self.warning("%s:%s had previous issues; skipping!" % (platform, locale))
continue
original_path = '%s/original/%s/%s/%s' % (dirs['abs_work_dir'], platform, locale, installer_name)
original_path = '%s/original/%s/%s/%s' % \
(dirs['abs_work_dir'], platform, locale, installer_name)
for partner in c['partner_config'].keys():
repack_path = '%s/unsigned/partner-repacks/%s/%s/%s/%s' % (dirs['abs_work_dir'], partner, platform, locale, installer_name)
repack_path = '%s/unsigned/partner-repacks/%s/%s/%s/%s' % \
(dirs['abs_work_dir'], partner, platform, locale, installer_name)
total_count += 1
if self._repack_apk(partner, original_path, repack_path):
success_count += 1
else:
self.add_failure(platform, locale,
message="Unable to repack %(platform)s:%(locale)s installer!")
message="Unable to repack %(platform)s:%(locale)s "
"installer!")
self.summarize_success_count(success_count, total_count,
message="Repacked %d of %d installers successfully.")
@ -287,13 +292,16 @@ class MobilePartnerRepack(LocalesMixin, ReleaseMixin, MobileSigningMixin,
success_count = total_count = 0
for platform in c['platforms']:
for locale in locales:
installer_name = c['installer_base_names'][platform] % {'version': rc['version'], 'locale': locale}
installer_name = c['installer_base_names'][platform] % \
{'version': rc['version'], 'locale': locale}
if self.query_failure(platform, locale):
self.warning("%s:%s had previous issues; skipping!" % (platform, locale))
continue
for partner in c['partner_config'].keys():
unsigned_path = '%s/unsigned/partner-repacks/%s/%s/%s/%s' % (dirs['abs_work_dir'], partner, platform, locale, installer_name)
signed_dir = '%s/partner-repacks/%s/%s/%s' % (dirs['abs_work_dir'], partner, platform, locale)
unsigned_path = '%s/unsigned/partner-repacks/%s/%s/%s/%s' % \
(dirs['abs_work_dir'], partner, platform, locale, installer_name)
signed_dir = '%s/partner-repacks/%s/%s/%s' % \
(dirs['abs_work_dir'], partner, platform, locale)
signed_path = "%s/%s" % (signed_dir, installer_name)
total_count += 1
self.info("Signing %s %s." % (platform, locale))
@ -303,7 +311,8 @@ class MobilePartnerRepack(LocalesMixin, ReleaseMixin, MobileSigningMixin,
if self.sign_apk(unsigned_path, c['keystore'],
self.store_passphrase, self.key_passphrase,
c['key_alias']) != 0:
self.add_summary("Unable to sign %s:%s apk!" % (platform, locale), level=FATAL)
self.add_summary("Unable to sign %s:%s apk!" % (platform, locale),
level=FATAL)
else:
self.mkdir_p(signed_dir)
if self.align_apk(unsigned_path, signed_path):

Просмотреть файл

@ -75,34 +75,34 @@ class AntivirusScan(BaseScript, VirtualenvMixin):
def __init__(self):
BaseScript.__init__(self,
config_options=self.config_options,
require_config_file=False,
config={
"virtualenv_modules": [
"pip==1.5.5",
"boto",
"redo",
"mar",
],
"virtualenv_path": "venv",
},
all_actions=[
"create-virtualenv",
"activate-virtualenv",
"get-extract-script",
"get-files",
"scan-files",
"cleanup-cache",
],
default_actions=[
"create-virtualenv",
"activate-virtualenv",
"get-extract-script",
"get-files",
"scan-files",
"cleanup-cache",
],
)
config_options=self.config_options,
require_config_file=False,
config={
"virtualenv_modules": [
"pip==1.5.5",
"boto",
"redo",
"mar",
],
"virtualenv_path": "venv",
},
all_actions=[
"create-virtualenv",
"activate-virtualenv",
"get-extract-script",
"get-files",
"scan-files",
"cleanup-cache",
],
default_actions=[
"create-virtualenv",
"activate-virtualenv",
"get-extract-script",
"get-files",
"scan-files",
"cleanup-cache",
],
)
self.excludes = self.config.get('excludes', self.DEFAULT_EXCLUDES)
self.dest_dir = self.CACHE_DIR
@ -122,8 +122,8 @@ class AntivirusScan(BaseScript, VirtualenvMixin):
def get_extract_script(self):
"""Gets a copy of extract_and_run_command.py from tools, and the supporting mar.py,
so that we can unpack various files for clam to scan them."""
remote_file = "{}/raw-file/{}/stage/extract_and_run_command.py".format(self.config["tools_repo"],
self.config["tools_revision"])
remote_file = "{}/raw-file/{}/stage/extract_and_run_command.py"\
.format(self.config["tools_repo"], self.config["tools_revision"])
self.download_file(remote_file, file_name="extract_and_run_command.py")
def get_files(self):
@ -166,7 +166,8 @@ class AntivirusScan(BaseScript, VirtualenvMixin):
if self._matches_exclude(keyname):
self.debug("Excluding {}".format(keyname))
else:
destination = os.path.join(self.dest_dir, keyname.replace(candidates_prefix, ''))
destination = os.path.join(self.dest_dir,
keyname.replace(candidates_prefix, ''))
dest_dir = os.path.dirname(destination)
if not os.path.isdir(dest_dir):
os.makedirs(dest_dir)

Просмотреть файл

@ -137,7 +137,7 @@ class BeetMover(BaseScript, VirtualenvMixin, object):
'generate-candidates-manifest',
'refresh-antivirus',
'verify-bits', # beets
'download-bits', # beets
'download-bits', # beets
'scan-bits', # beets
'upload-bits', # beets
],
@ -145,7 +145,8 @@ class BeetMover(BaseScript, VirtualenvMixin, object):
# Default configuration
'config': {
# base index url where to find taskcluster artifact based on taskid
"artifact_base_url": 'https://queue.taskcluster.net/v1/task/{taskid}/artifacts/public/{subdir}',
"artifact_base_url": \
'https://queue.taskcluster.net/v1/task/{taskid}/artifacts/public/{subdir}',
"virtualenv_modules": [
"boto",
"PyYAML",
@ -157,7 +158,7 @@ class BeetMover(BaseScript, VirtualenvMixin, object):
"virtualenv_path": "venv",
},
}
#todo do excludes need to be configured via command line for specific builds?
# todo do excludes need to be configured via command line for specific builds?
super(BeetMover, self).__init__(**beetmover_kwargs)
c = self.config
@ -166,7 +167,8 @@ class BeetMover(BaseScript, VirtualenvMixin, object):
self.virtualenv_imports = None
self.bucket = c['bucket']
if not all(aws_creds):
self.fatal('credentials must be passed in env: "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"')
self.fatal('credentials must be passed in env: '
'"AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"')
self.aws_key_id, self.aws_secret_key = aws_creds
# if excludes is set from command line, use it otherwise use defaults
self.excludes = self.config.get('excludes', DEFAULT_EXCLUDES)
@ -256,7 +258,8 @@ class BeetMover(BaseScript, VirtualenvMixin, object):
for locale in self.manifest['mapping']:
for deliverable in self.manifest['mapping'][locale]:
self.log("downloading '{}' deliverable for '{}' locale".format(deliverable, locale))
self.log("downloading '{}' deliverable for '{}' locale".format(deliverable,
locale))
source = self.manifest['mapping'][locale][deliverable]['artifact']
self.retry(
self.download_file,
@ -288,7 +291,8 @@ class BeetMover(BaseScript, VirtualenvMixin, object):
# we have already downloaded the files locally so we can use that version
source = self.manifest['mapping'][locale][deliverable]['artifact']
s3_key = self.manifest['mapping'][locale][deliverable]['s3_key']
downloaded_file = os.path.join(dirs['abs_work_dir'], self.get_filename_from_url(source))
downloaded_file = os.path.join(dirs['abs_work_dir'],
self.get_filename_from_url(source))
# generate checksums for every uploaded file
beet_file_name = '{}.beet'.format(downloaded_file)
# upload checksums to a separate subdirectory
@ -310,7 +314,6 @@ class BeetMover(BaseScript, VirtualenvMixin, object):
bucket=bucket)
self.log('Success!')
def upload_bit(self, source, s3_key, bucket):
boto = self.virtualenv_imports['boto']
self.info('uploading to s3 with key: {}'.format(s3_key))
@ -324,8 +327,8 @@ class BeetMover(BaseScript, VirtualenvMixin, object):
key = bucket.new_key(s3_key)
# set key value
mime_type, _ = mimetypes.guess_type(source)
self.retry(lambda: key.set_contents_from_filename(source, headers={'Content-Type': mime_type}),
error_level=FATAL),
self.retry(lambda: key.set_contents_from_filename(
source, headers={'Content-Type': mime_type}), error_level=FATAL),
else:
if not get_hash(key.get_contents_as_string()) == get_hash(open(source).read()):
# for now, let's halt. If necessary, we can revisit this and allow for overwrites
@ -337,14 +340,16 @@ class BeetMover(BaseScript, VirtualenvMixin, object):
dirs = self.query_abs_dirs()
filenames = [f for f in listdir(dirs['abs_work_dir']) if isfile(join(dirs['abs_work_dir'], f))]
filenames = [f for f in listdir(dirs['abs_work_dir'])
if isfile(join(dirs['abs_work_dir'], f))]
self.mkdir_p(self.dest_dir)
for file_name in filenames:
if self._matches_exclude(file_name):
self.info("Excluding {} from virus scan".format(file_name))
else:
self.info('Copying {} to {}'.format(file_name,self.dest_dir))
self.copyfile(os.path.join(dirs['abs_work_dir'], file_name), os.path.join(self.dest_dir,file_name))
self.info('Copying {} to {}'.format(file_name, self.dest_dir))
self.copyfile(os.path.join(dirs['abs_work_dir'], file_name),
os.path.join(self.dest_dir, file_name))
self._scan_files()
self.info('Emptying {}'.format(self.dest_dir))
self.rmtree(self.dest_dir)
@ -352,20 +357,22 @@ class BeetMover(BaseScript, VirtualenvMixin, object):
def _scan_files(self):
"""Scan the files we've collected. We do the download and scan concurrently to make
it easier to have a coherent log afterwards. Uses the venv python."""
external_tools_path = os.path.join(
os.path.abspath(os.path.dirname(os.path.dirname(mozharness.__file__))), 'external_tools')
self.run_command([self.query_python_path(), os.path.join(external_tools_path,'extract_and_run_command.py'),
'-j{}'.format(self.config['scan_parallelization']),
'clamscan', '--no-summary', '--', self.dest_dir])
external_tools_path = os.path.join(os.path.abspath(os.path.dirname(
os.path.dirname(mozharness.__file__))), 'external_tools')
self.run_command([self.query_python_path(), os.path.join(external_tools_path,
'extract_and_run_command.py'),
'-j{}'.format(self.config['scan_parallelization']),
'clamscan', '--no-summary', '--', self.dest_dir])
def _matches_exclude(self, keyname):
return any(re.search(exclude, keyname) for exclude in self.excludes)
return any(re.search(exclude, keyname) for exclude in self.excludes)
def mime_fix(self):
""" Add mimetypes for custom extensions """
mimetypes.init()
map(lambda (ext, mime_type,): mimetypes.add_type(mime_type, ext), MIME_MAP.items())
if __name__ == '__main__':
beet_mover = BeetMover(pop_aws_auth_from_env())
beet_mover.run_and_exit()

Просмотреть файл

@ -16,27 +16,31 @@ from mozharness.mozilla.signing import SigningMixin
from mozharness.mozilla.buildbot import BuildbotMixin
from mozharness.mozilla.merkle import MerkleTree
class ChecksumsGenerator(BaseScript, VirtualenvMixin, SigningMixin, VCSMixin, BuildbotMixin):
config_options = [
[["--stage-product"], {
"dest": "stage_product",
"help": "Name of product used in file server's directory structure, eg: firefox, mobile",
"help": "Name of product used in file server's directory structure, "
"e.g.: firefox, mobile",
}],
[["--version"], {
"dest": "version",
"help": "Version of release, eg: 39.0b5",
"help": "Version of release, e.g.: 39.0b5",
}],
[["--build-number"], {
"dest": "build_number",
"help": "Build number of release, eg: 2",
"help": "Build number of release, e.g.: 2",
}],
[["--bucket-name-prefix"], {
"dest": "bucket_name_prefix",
"help": "Prefix of bucket name, eg: net-mozaws-prod-delivery. This will be used to generate a full bucket name (such as net-mozaws-prod-delivery-{firefox,archive}.",
"help": "Prefix of bucket name, e.g.: net-mozaws-prod-delivery. This will be used to "
"generate a full bucket name (such as "
"net-mozaws-prod-delivery-{firefox,archive}.",
}],
[["--bucket-name-full"], {
"dest": "bucket_name_full",
"help": "Full bucket name, eg: net-mozaws-prod-delivery-firefox",
"help": "Full bucket name, e.g.: net-mozaws-prod-delivery-firefox",
}],
[["-j", "--parallelization"], {
"dest": "parallelization",
@ -54,7 +58,8 @@ class ChecksumsGenerator(BaseScript, VirtualenvMixin, SigningMixin, VCSMixin, Bu
"dest": "includes",
"default": [],
"action": "append",
"help": "List of patterns to include in big checksums file. See script source for default.",
"help": "List of patterns to include in big checksums file. See script "
"source for default.",
}],
[["--tools-repo"], {
"dest": "tools_repo",
@ -68,34 +73,34 @@ class ChecksumsGenerator(BaseScript, VirtualenvMixin, SigningMixin, VCSMixin, Bu
def __init__(self):
BaseScript.__init__(self,
config_options=self.config_options,
require_config_file=False,
config={
"virtualenv_modules": [
"pip==1.5.5",
"boto",
],
"virtualenv_path": "venv",
'buildbot_json_path': 'buildprops.json',
},
all_actions=[
"create-virtualenv",
"collect-individual-checksums",
"create-big-checksums",
"create-summary",
"sign",
"upload",
"copy-info-files",
],
default_actions=[
"create-virtualenv",
"collect-individual-checksums",
"create-big-checksums",
"create-summary",
"sign",
"upload",
],
)
config_options=self.config_options,
require_config_file=False,
config={
"virtualenv_modules": [
"pip==1.5.5",
"boto",
],
"virtualenv_path": "venv",
'buildbot_json_path': 'buildprops.json',
},
all_actions=[
"create-virtualenv",
"collect-individual-checksums",
"create-big-checksums",
"create-summary",
"sign",
"upload",
"copy-info-files",
],
default_actions=[
"create-virtualenv",
"collect-individual-checksums",
"create-big-checksums",
"create-summary",
"sign",
"upload",
],
)
self.checksums = {}
self.bucket = None
@ -144,7 +149,8 @@ class ChecksumsGenerator(BaseScript, VirtualenvMixin, SigningMixin, VCSMixin, Bu
return self.config['bucket_name_full']
suffix = "archive"
# Firefox has a special bucket, per https://github.com/mozilla-services/product-delivery-tools/blob/master/bucketmap.go
# Firefox has a special bucket, per
# https://github.com/mozilla-services/product-delivery-tools/blob/master/bucketmap.go
if self.config["stage_product"] == "firefox":
suffix = "firefox"
@ -188,6 +194,7 @@ class ChecksumsGenerator(BaseScript, VirtualenvMixin, SigningMixin, VCSMixin, Bu
# Temporary holding place for checksums
raw_checksums = []
def worker(item):
self.debug("Downloading {}".format(item))
# TODO: It would be nice to download the associated .asc file
@ -222,7 +229,8 @@ class ChecksumsGenerator(BaseScript, VirtualenvMixin, SigningMixin, VCSMixin, Bu
for pattern in self.config["includes"]:
if re.search(pattern, f):
if f in self.checksums:
self.fatal("Found duplicate checksum entry for {}, don't know which one to pick.".format(f))
self.fatal("Found duplicate checksum entry for {}, "
"don't know which one to pick.".format(f))
if not set(self.config["formats"]) <= set(info["hashes"]):
self.fatal("Missing necessary format for file {}".format(f))
self.debug("Adding checksums for file: {}".format(f))
@ -244,7 +252,8 @@ class ChecksumsGenerator(BaseScript, VirtualenvMixin, SigningMixin, VCSMixin, Bu
tree = MerkleTree(hash_fn, data)
head = tree.head().encode("hex")
proofs = [tree.inclusion_proof(i).to_rfc6962_bis().encode("hex") for i in range(len(files))]
proofs = [tree.inclusion_proof(i).to_rfc6962_bis().encode("hex")
for i in range(len(files))]
summary = self._get_summary_filename(fmt)
self.info("Creating summary file: {}".format(summary))

Просмотреть файл

@ -185,6 +185,7 @@ class PostReleaseVersionBump(MercurialScript, BuildbotMixin,
revision=self.config["revision"], message=message,
user=self.config["hg_user"], force=True)
# __main__ {{{1
if __name__ == '__main__':
PostReleaseVersionBump().run_and_exit()

Просмотреть файл

@ -166,7 +166,6 @@ class PublishBalrog(MercurialScript, BuildbotMixin):
error_level=FATAL)
# __main__ {{{1
if __name__ == '__main__':
PublishBalrog().run_and_exit()

Просмотреть файл

@ -66,27 +66,27 @@ class ReleasePusher(BaseScript, VirtualenvMixin):
def __init__(self, aws_creds):
BaseScript.__init__(self,
config_options=self.config_options,
require_config_file=False,
config={
"virtualenv_modules": [
"pip==1.5.5",
"boto",
"redo",
],
"virtualenv_path": "venv",
},
all_actions=[
"create-virtualenv",
"activate-virtualenv",
"push-to-releases",
],
default_actions=[
"create-virtualenv",
"activate-virtualenv",
"push-to-releases",
],
)
config_options=self.config_options,
require_config_file=False,
config={
"virtualenv_modules": [
"pip==1.5.5",
"boto",
"redo",
],
"virtualenv_path": "venv",
},
all_actions=[
"create-virtualenv",
"activate-virtualenv",
"push-to-releases",
],
default_actions=[
"create-virtualenv",
"activate-virtualenv",
"push-to-releases",
],
)
# validate aws credentials
if not (all(aws_creds) or self.config.get('credentials')):
@ -152,11 +152,12 @@ class ReleasePusher(BaseScript, VirtualenvMixin):
def copy_key():
source_key = bucket.get_key(source)
dest_key = bucket.get_key(destination)
# According to http://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html
# According to
# http://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html
# S3 key MD5 is represented as ETag, except when objects are
# uploaded using multipart method. In this case objects's ETag
# is constructed using its MD5, minus symbol, and number of
# part. See http://stackoverflow.com/questions/12186993/what-is-the-algorithm-to-compute-the-amazon-s3-etag-for-a-file-larger-than-5gb#answer-19896823
# part. See http://stackoverflow.com/questions/12186993/what-is-the-algorithm-to-compute-the-amazon-s3-etag-for-a-file-larger-than-5gb#answer-19896823 # noqa
source_md5 = source_key.etag.split("-")[0]
if dest_key:
dest_md5 = dest_key.etag.split("-")[0]
@ -173,7 +174,8 @@ class ReleasePusher(BaseScript, VirtualenvMixin):
destination, dest_md5))
else:
self.fatal(
"{} already exists with the different content (src ETag: {}, dest ETag: {}), aborting".format(
"{} already exists with the different content "
"(src ETag: {}, dest ETag: {}), aborting".format(
destination, source_key.etag, dest_key.etag))
return retry(copy_key, sleeptime=5, max_sleeptime=60,
@ -195,6 +197,7 @@ class ReleasePusher(BaseScript, VirtualenvMixin):
pool = ThreadPool(self.config["parallelization"])
pool.map(worker, find_release_files())
if __name__ == "__main__":
myScript = ReleasePusher(pop_aws_auth_from_env())
myScript.run_and_exit()

Просмотреть файл

@ -16,7 +16,8 @@ class CTSubmitter(BaseScript, VirtualenvMixin):
config_options = [
[["--chain"], {
"dest": "chain",
"help": "URL from which to download the cert chain to be submitted to CT (in PEM format)"
"help": "URL from which to download the cert chain to be "
"submitted to CT (in PEM format)"
}],
[["--log"], {
"dest": "log",
@ -30,19 +31,19 @@ class CTSubmitter(BaseScript, VirtualenvMixin):
def __init__(self):
BaseScript.__init__(self,
config_options=self.config_options,
config={
"virtualenv_modules": [
"pem",
"redo",
"requests",
],
"virtualenv_path": "venv",
},
require_config_file=False,
all_actions=["add-chain"],
default_actions=["add-chain"],
)
config_options=self.config_options,
config={
"virtualenv_modules": [
"pem",
"redo",
"requests",
],
"virtualenv_path": "venv",
},
require_config_file=False,
all_actions=["add-chain"],
default_actions=["add-chain"],
)
self.chain_url = self.config["chain"]
self.log_url = self.config["log"]
@ -60,7 +61,7 @@ class CTSubmitter(BaseScript, VirtualenvMixin):
chain = retry(get_chain)
req = { "chain": [] }
req = {"chain": []}
chain = pem.parse(chain)
for i in range(len(chain)):
cert = crypto.load_certificate(crypto.FILETYPE_PEM, str(chain[i]))
@ -76,6 +77,7 @@ class CTSubmitter(BaseScript, VirtualenvMixin):
sct = SignedCertificateTimestamp(resp)
self.write_to_file(self.sct_filename, sct.to_rfc6962())
if __name__ == "__main__":
myScript = CTSubmitter()
myScript.run_and_exit()

60
testing/mozharness/scripts/release/updates.py Normal file → Executable file
Просмотреть файл

@ -94,8 +94,7 @@ class UpdatesBumper(MercurialScript, BuildbotMixin,
# TODO: version and appVersion should come from repo
props = self.buildbot_config["properties"]
for prop in ['product', 'version', 'build_number', 'revision',
'appVersion', 'balrog_api_root', "channels",
'generate_bz2_blob']:
'appVersion', 'balrog_api_root', "channels"]:
if props.get(prop):
self.info("Overriding %s with %s" % (prop, props[prop]))
self.config[prop] = props.get(prop)
@ -270,10 +269,6 @@ class UpdatesBumper(MercurialScript, BuildbotMixin,
def submit_to_balrog(self):
for _, channel_config in self.query_channel_configs():
self._submit_to_balrog(channel_config)
if 'generate_bz2_blob' in self.config and \
self.config['generate_bz2_blob']:
for _, channel_config in self.query_channel_configs():
self._submit_to_balrog_bz2(channel_config)
def _submit_to_balrog(self, channel_config):
dirs = self.query_abs_dirs()
@ -311,59 +306,6 @@ class UpdatesBumper(MercurialScript, BuildbotMixin,
self.retry(lambda: self.run_command(cmd, halt_on_failure=True))
def _submit_to_balrog_bz2(self, channel_config):
if "bz2_blob_suffix" not in channel_config:
self.info("No need to generate BZ2 blob")
return
dirs = self.query_abs_dirs()
# Use env varialbe instead of command line to avoid issues with blob
# names starting with "-", e.g. "-bz2"
env = {"BALROG_BLOB_SUFFIX": channel_config["bz2_blob_suffix"]}
auth = os.path.join(os.getcwd(), self.config['credentials_file'])
cmd = [
sys.executable,
os.path.join(dirs["abs_tools_dir"],
"scripts/build-promotion/balrog-release-pusher.py")]
cmd.extend([
"--api-root", self.config["balrog_api_root"],
"--download-domain", self.config["download_domain"],
"--archive-domain", self.config["archive_domain"],
"--credentials-file", auth,
"--product", self.config["product"],
"--version", self.config["version"],
"--build-number", str(self.config["build_number"]),
"--app-version", self.config["appVersion"],
"--username", self.config["balrog_username"],
"--complete-mar-filename-pattern",
channel_config["complete_mar_filename_pattern"],
"--complete-mar-bouncer-product-pattern",
channel_config["complete_mar_bouncer_product_pattern"],
"--verbose",
])
for v, build_number in self.query_matching_partials(channel_config):
if v < "56.0":
self.info("Adding %s to partials" % v)
partial = "{version}build{build_number}".format(
version=v, build_number=build_number)
cmd.extend(["--partial-update", partial])
else:
self.info("Not adding %s to partials" % v)
for c in channel_config["channel_names"]:
cmd.extend(["--channel", c])
for r in channel_config["bz2_rules_to_update"]:
cmd.extend(["--rule-to-update", r])
for p in self.config["platforms"]:
cmd.extend(["--platform", p])
if channel_config["requires_mirrors"]:
cmd.append("--requires-mirrors")
if self.config["balrog_use_dummy_suffix"]:
cmd.append("--dummy")
self.retry(lambda: self.run_command(cmd, halt_on_failure=True, env=env))
# __main__ {{{1
if __name__ == '__main__':

Просмотреть файл

@ -151,7 +151,8 @@ class SpidermonkeyBuild(MockMixin,
if self.buildbot_config:
bb_props = [('mock_target', 'mock_target', None),
('hgurl', 'hgurl', None),
('clobberer_url', 'clobberer_url', 'https://api.pub.build.mozilla.org/clobberer/lastclobber'),
('clobberer_url', 'clobberer_url',
'https://api.pub.build.mozilla.org/clobberer/lastclobber'),
('force_clobber', 'force_clobber', None),
('branch', 'blob_upload_branch', None),
]
@ -166,7 +167,7 @@ class SpidermonkeyBuild(MockMixin,
dirs = self.query_abs_dirs()
replacements = self.config['env_replacements'].copy()
for k,v in replacements.items():
for k, v in replacements.items():
replacements[k] = v % dirs
self.env = self.query_env(replace_dict=replacements,
@ -236,14 +237,16 @@ class SpidermonkeyBuild(MockMixin,
def query_compiler_manifest(self):
dirs = self.query_abs_dirs()
manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'], self.config['compiler_manifest'])
manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'],
self.config['compiler_manifest'])
if os.path.exists(manifest):
return manifest
return os.path.join(dirs['abs_work_dir'], self.config['compiler_manifest'])
def query_sixgill_manifest(self):
dirs = self.query_abs_dirs()
manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'], self.config['sixgill_manifest'])
manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'],
self.config['sixgill_manifest'])
if os.path.exists(manifest):
return manifest
return os.path.join(dirs['abs_work_dir'], self.config['sixgill_manifest'])

Просмотреть файл

@ -18,7 +18,7 @@ GECKO_SRCDIR = os.path.join(os.path.expanduser('~'), 'checkouts', 'gecko')
TELEMETRY_TEST_HOME = os.path.join(GECKO_SRCDIR, 'toolkit', 'components', 'telemetry',
'tests', 'marionette')
from mozharness.base.python import PostScriptRun, PreScriptAction
from mozharness.base.python import PreScriptAction
from mozharness.mozilla.structuredlog import StructuredOutputParser
from mozharness.mozilla.testing.testbase import (
TestingMixin,
@ -36,7 +36,8 @@ telemetry_tests_config_options = [
"action": "store_true",
"dest": "allow_software_gl_layers",
"default": False,
"help": "Permits a software GL implementation (such as LLVMPipe) to use the GL compositor.",
"help": "Permits a software GL implementation (such as LLVMPipe) "
"to use the GL compositor.",
}],
[["--enable-webrender"], {
"action": "store_true",
@ -101,7 +102,6 @@ class TelemetryTests(TestingMixin, VCSToolsScript, CodeCoverageMixin):
@PreScriptAction('create-virtualenv')
def _pre_create_virtualenv(self, action):
dirs = self.query_abs_dirs()
requirements = os.path.join(GECKO_SRCDIR, 'testing',
'config', 'telemetry_tests_requirements.txt')
@ -158,7 +158,8 @@ class TelemetryTests(TestingMixin, VCSToolsScript, CodeCoverageMixin):
strict=False)
# Add the default tests to run
tests = [os.path.join(dirs['abs_telemetry_dir'], 'tests', test) for test in self.default_tests]
tests = [os.path.join(dirs['abs_telemetry_dir'], 'tests', test)
for test in self.default_tests]
cmd.extend(tests)
# Set further environment settings

Просмотреть файл

@ -5,8 +5,6 @@
# You can obtain one at http://mozilla.org/MPL/2.0/.
# ***** END LICENSE BLOCK *****
import copy
import glob
import json
import os
import sys
@ -17,7 +15,7 @@ from mozharness.base.errors import BaseErrorList
from mozharness.base.script import PreScriptAction
from mozharness.base.vcs.vcsbase import MercurialScript
from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options, TOOLTOOL_PLATFORM_DIR
from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options
from mozharness.mozilla.testing.codecoverage import (
CodeCoverageMixin,
code_coverage_config_options
@ -27,6 +25,7 @@ from mozharness.mozilla.testing.errors import HarnessErrorList
from mozharness.mozilla.structuredlog import StructuredOutputParser
from mozharness.base.log import INFO
class WebPlatformTest(TestingMixin, MercurialScript, BlobUploadMixin, CodeCoverageMixin):
config_options = [
[['--test-type'], {
@ -54,7 +53,8 @@ class WebPlatformTest(TestingMixin, MercurialScript, BlobUploadMixin, CodeCovera
"action": "store_true",
"dest": "allow_software_gl_layers",
"default": False,
"help": "Permits a software GL implementation (such as LLVMPipe) to use the GL compositor."}
"help": "Permits a software GL implementation (such as LLVMPipe) "
"to use the GL compositor."}
],
[["--enable-webrender"], {
"action": "store_true",
@ -168,7 +168,7 @@ class WebPlatformTest(TestingMixin, MercurialScript, BlobUploadMixin, CodeCovera
def _query_cmd(self):
if not self.binary_path:
self.fatal("Binary path could not be determined")
#And exit
# And exit
c = self.config
dirs = self.query_abs_dirs()
@ -266,7 +266,8 @@ class WebPlatformTest(TestingMixin, MercurialScript, BlobUploadMixin, CodeCovera
if not sys.platform.startswith("darwin"):
font_path = os.path.join(os.path.dirname(self.binary_path), "fonts")
else:
font_path = os.path.join(os.path.dirname(self.binary_path), os.pardir, "Resources", "res", "fonts")
font_path = os.path.join(os.path.dirname(self.binary_path), os.pardir,
"Resources", "res", "fonts")
if not os.path.exists(font_path):
os.makedirs(font_path)
ahem_src = os.path.join(dirs["abs_wpttest_dir"], "tests", "fonts", "Ahem.ttf")

Просмотреть файл

@ -2,10 +2,6 @@
flake8:
description: Python linter
include:
- build/moz.configure/*.configure
- build/*.py
- configure.py
- config/check_macroassembler_style.py
- config/mozunit.py
- layout/tools/reftest
- python/mach
@ -20,8 +16,11 @@ flake8:
- testing/marionette/client
- testing/marionette/harness
- testing/marionette/puppeteer
- testing/mozbase
- testing/mochitest
- testing/mozbase
- testing/mozharness/mozfile
- testing/mozharness/mozinfo
- testing/mozharness/scripts
- testing/remotecppunittests.py
- testing/runcppunittests.py
- testing/talos/
@ -34,7 +33,6 @@ flake8:
# Excludes should be added to topsrcdir/.flake8 due to a bug in flake8 where
# specifying --exclude causes custom configuration files to be ignored.
exclude: []
# The configure option is used by the build system
extensions: ['configure', 'py']
extensions: ['py']
type: external
payload: python.flake8:lint