Backed out 9 changesets (bug 1630809, bug 1653476) for Gecko Decision failures. CLOSED TREE

Backed out changeset 02a27bfc76dd (bug 1653476)
Backed out changeset afb5df61943a (bug 1630809)
Backed out changeset 04628c1f98e9 (bug 1630809)
Backed out changeset 4b4d50e0b1bf (bug 1630809)
Backed out changeset 2fa2deb5c993 (bug 1630809)
Backed out changeset d6652114cac3 (bug 1630809)
Backed out changeset ad5e4caa3291 (bug 1630809)
Backed out changeset d3d841cd14f3 (bug 1630809)
Backed out changeset b3746502e227 (bug 1630809)
This commit is contained in:
Butkovits Atila 2020-08-28 01:15:03 +03:00
Родитель 6fe4fd09b5
Коммит b8629b8d1e
28 изменённых файлов: 108 добавлений и 1794 удалений

Просмотреть файл

@ -1,191 +0,0 @@
#! /usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
import argparse
import logging
import mmap
import json
import os
import shutil
import struct
import sys
import tempfile
import urllib.parse
logging.basicConfig(level=logging.INFO, format="%(levelname)s - %(message)s")
log = logging.getLogger()
def write_attribution_data(filepath, data):
"""Insert data into a prepared certificate in a signed PE file.
Returns False if the file isn't a valid PE file, or if the necessary
certificate was not found.
This function assumes that somewhere in the given file's certificate table
there exists a 1024-byte space which begins with the tag "__MOZCUSTOM__:".
The given data will be inserted into the file following this tag.
We don't bother updating the optional header checksum.
Windows doesn't check it for executables, only drivers and certain DLL's.
"""
with open(filepath, "r+b") as file:
mapped = mmap.mmap(file.fileno(), 0, access=mmap.ACCESS_WRITE)
# Get the location of the PE header and the optional header
pe_header_offset = struct.unpack("<I", mapped[0x3C:0x40])[0]
optional_header_offset = pe_header_offset + 24
# Look up the magic number in the optional header,
# so we know if we have a 32 or 64-bit executable.
# We need to know that so that we can find the data directories.
pe_magic_number = struct.unpack(
"<H", mapped[optional_header_offset : optional_header_offset + 2]
)[0]
if pe_magic_number == 0x10B:
# 32-bit
cert_dir_entry_offset = optional_header_offset + 128
elif pe_magic_number == 0x20B:
# 64-bit. Certain header fields are wider.
cert_dir_entry_offset = optional_header_offset + 144
else:
# Not any known PE format
mapped.close()
return False
# The certificate table offset and length give us the valid range
# to search through for where we should put our data.
cert_table_offset = struct.unpack(
"<I", mapped[cert_dir_entry_offset : cert_dir_entry_offset + 4]
)[0]
cert_table_size = struct.unpack(
"<I", mapped[cert_dir_entry_offset + 4 : cert_dir_entry_offset + 8]
)[0]
if cert_table_offset == 0 or cert_table_size == 0:
# The file isn't signed
mapped.close()
return False
tag = b"__MOZCUSTOM__:"
tag_index = mapped.find(
tag, cert_table_offset, cert_table_offset + cert_table_size
)
if tag_index == -1:
mapped.close()
return False
# convert to quoted-url byte-string for insertion
data = urllib.parse.quote(data).encode("utf-8")
mapped[tag_index + len(tag) : tag_index + len(tag) + len(data)] = data
return True
def validate_attribution_code(attribution):
log.info("Checking attribution %s" % attribution)
return_code = True
if len(attribution) == 0:
log.error("Attribution code has 0 length")
return False
# Set to match https://searchfox.org/mozilla-central/rev/a92ed79b0bc746159fc31af1586adbfa9e45e264/browser/components/attribution/AttributionCode.jsm#24 # noqa
MAX_LENGTH = 1010
if len(attribution) > MAX_LENGTH:
log.error("Attribution code longer than %s chars" % MAX_LENGTH)
return_code = False
# this leaves out empty values like 'foo='
params = urllib.parse.parse_qsl(attribution)
used_keys = set()
for key, value in params:
# check for invalid keys
if key not in (
"source",
"medium",
"campaign",
"content",
"experiment",
"variation",
"ua",
):
log.error("Invalid key %s" % key)
return_code = False
# avoid ambiguity from repeated keys
if key in used_keys:
log.error("Repeated key %s" % key)
return_code = False
else:
used_keys.add(key)
# TODO the service checks for valid source, should we do that here too ?
# some keys are required
for key in ("source", "medium", "campaign", "content"):
if key not in used_keys:
log.error("key '%s' must be set, use '(not set)' if not needed" % key)
return_code = False
return return_code
def main():
parser = argparse.ArgumentParser(
description="Add attribution to Windows installer(s).",
epilog="""
By default, configuration from envvar ATTRIBUTION_CONFIG is used, with
expected format
[{"input": "in/abc.exe", "output": "out/def.exe", "attribution": "abcdef"},
{"input": "in/ghi.exe", "output": "out/jkl.exe", "attribution": "ghijkl"}]
for 1 or more attributions. Or the script arguments may be used for a single attribution.
The attribution code should be a string which is not url-encoded.
""",
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument("--input", help="Source installer to attribute a copy of")
parser.add_argument("--output", help="Location to write the attributed installer")
parser.add_argument("--attribution", help="Attribution code")
args = parser.parse_args()
if os.environ.get("ATTRIBUTION_CONFIG"):
work = json.loads(os.environ["ATTRIBUTION_CONFIG"])
elif args.input and args.output and args.attribution:
work = [
{
"input": args.input,
"output": args.output,
"attribution": args.attribution,
}
]
else:
log.error("No configuration found. Set ATTRIBUTION_CONFIG or pass arguments.")
return 1
cached_code_checks = []
for job in work:
if job["attribution"] not in cached_code_checks:
status = validate_attribution_code(job["attribution"])
if status:
cached_code_checks.append(job["attribution"])
else:
log.error("Failed attribution code check")
return 1
with tempfile.TemporaryDirectory() as td:
log.info("Attributing installer %s ..." % job["input"])
tf = shutil.copy(job["input"], td)
if write_attribution_data(tf, job["attribution"]):
os.makedirs(os.path.dirname(job["output"]), exist_ok=True)
shutil.move(tf, job["output"])
log.info("Wrote %s" % job["output"])
if __name__ == "__main__":
sys.exit(main())

Просмотреть файл

@ -1,817 +0,0 @@
#!/usr/bin/env python
# Documentation: https://firefox-source-docs.mozilla.org/taskcluster/partner-repacks.html
import sys
import os
from os import path
import re
from shutil import copy, copytree, move
from subprocess import Popen
from optparse import OptionParser
import urllib.request
import urllib.parse
import logging
import json
import tarfile
import zipfile
from redo import retry
logging.basicConfig(stream=sys.stdout, level=logging.INFO,
format="%(asctime)-15s - %(levelname)s - %(message)s")
log = logging.getLogger(__name__)
# Set default values.
PARTNERS_DIR = path.join('..', '..', 'workspace', 'partners')
# No platform in this path because script only supports repacking a single platform at once
DEFAULT_OUTPUT_DIR = '%(partner)s/%(partner_distro)s/%(locale)s'
TASKCLUSTER_ARTIFACTS = (
os.environ.get('TASKCLUSTER_ROOT_URL', 'https://firefox-ci-tc.services.mozilla.com')
+ '/api/queue/v1/task/{taskId}/artifacts'
)
UPSTREAM_ENUS_PATH = 'public/build/{filename}'
UPSTREAM_L10N_PATH = 'public/build/{locale}/{filename}'
WINDOWS_DEST_DIR = 'firefox'
MAC_DEST_DIR = '{}/Contents/Resources'
LINUX_DEST_DIR = 'firefox'
BOUNCER_PRODUCT_TEMPLATE = 'partner-firefox-{release_type}-{partner}-{partner_distro}-latest'
class StrictFancyURLopener(urllib.request.FancyURLopener):
"""Unlike FancyURLopener this class raises exceptions for generic HTTP
errors, like 404, 500. It reuses URLopener.http_error_default redefined in
FancyURLopener"""
def http_error_default(self, url, fp, errcode, errmsg, headers):
urllib.request.URLopener.http_error_default(self, url, fp, errcode, errmsg,
headers)
# Source:
# http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
def which(program):
def is_exe(fpath):
return path.exists(fpath) and os.access(fpath, os.X_OK)
try:
fpath = path.dirname(program)
except AttributeError:
return None
if fpath:
if is_exe(program):
return program
else:
for p in os.environ["PATH"].split(os.pathsep):
exe_file = path.join(p, program)
if is_exe(exe_file):
return exe_file
return None
def rmdirRecursive(directory):
"""This is a replacement for shutil.rmtree that works better under
windows. Thanks to Bear at the OSAF for the code.
(Borrowed from buildbot.slave.commands)"""
if not path.exists(directory):
# This handles broken links
if path.islink(directory):
os.remove(directory)
return
if path.islink(directory):
os.remove(directory)
return
# Verify the directory is read/write/execute for the current user
os.chmod(directory, 0o700)
for name in os.listdir(directory):
full_name = path.join(directory, name)
# on Windows, if we don't have write permission we can't remove
# the file/directory either, so turn that on
if os.name == 'nt':
if not os.access(full_name, os.W_OK):
# I think this is now redundant, but I don't have an NT
# machine to test on, so I'm going to leave it in place
# -warner
os.chmod(full_name, 0o600)
if path.isdir(full_name):
rmdirRecursive(full_name)
else:
# Don't try to chmod links
if not path.islink(full_name):
os.chmod(full_name, 0o700)
os.remove(full_name)
os.rmdir(directory)
def printSeparator():
log.info("##################################################")
def shellCommand(cmd):
log.debug('Executing %s' % cmd)
log.debug('in %s' % os.getcwd())
# Shell command output gets dumped immediately to stdout, whereas
# print statements get buffered unless we flush them explicitly.
sys.stdout.flush()
p = Popen(cmd, shell=True)
(_, ret) = os.waitpid(p.pid, 0)
if ret != 0:
ret_real = (ret & 0xFF00) >> 8
log.error('Error: shellCommand had non-zero exit status: %d' %
ret_real)
log.error('Command: %s' % cmd, exc_info=True)
sys.exit(ret_real)
return True
def mkdir(directory, mode=0o755):
if not path.exists(directory):
return os.makedirs(directory, mode)
return True
def isLinux(platform):
return 'linux' in platform
def isLinux32(platform):
return ('linux32' in platform or 'linux-i686' in platform or
platform == 'linux')
def isLinux64(platform):
return ('linux64' in platform or 'linux-x86_64' in platform)
def isMac(platform):
return 'mac' in platform
def isWin(platform):
return 'win' in platform
def isWin32(platform):
return 'win32' in platform
def isWin64(platform):
return platform == 'win64'
def isWin64Aarch64(platform):
return platform == 'win64-aarch64'
def isValidPlatform(platform):
return (isLinux64(platform) or isLinux32(platform) or isMac(platform) or
isWin64(platform) or isWin64Aarch64(platform) or isWin32(platform))
def parseRepackConfig(filename, platform):
""" Did you hear about this cool file format called yaml ? json ? Yeah, me neither """
config = {}
config['platforms'] = []
f = open(filename, 'r')
for line in f:
line = line.rstrip("\n")
# Ignore empty lines
if line.strip() == "":
continue
# Ignore comments
if line.startswith("#"):
continue
[key, value] = line.split('=', 2)
value = value.strip('"')
# strings that don't need special handling
if key in ('dist_id', 'replacement_setup_exe'):
config[key] = value
continue
# booleans that don't need special handling
if key in ('migrationWizardDisabled', 'oem', 'repack_stub_installer'):
if value.lower() == 'true':
config[key] = True
continue
# special cases
if key == 'locales':
config['locales'] = value.split(' ')
continue
if key.startswith("locale."):
config[key] = value
continue
if key == 'deb_section':
config['deb_section'] = re.sub('/', '\/', value)
continue
if isValidPlatform(key):
ftp_platform = getFtpPlatform(key)
if ftp_platform == getFtpPlatform(platform) \
and value.lower() == 'true':
config['platforms'].append(ftp_platform)
continue
# this only works for one locale because setup.exe is localised
if config.get('replacement_setup_exe') and len(config.get('locales', [])) > 1:
log.error("Error: replacement_setup_exe is only supported for one locale, got %s" %
config['locales'])
sys.exit(1)
# also only works for one platform because setup.exe is platform-specific
if config['platforms']:
return config
def getFtpPlatform(platform):
'''Returns the platform in the format used in building package names.
Note: we rely on this code being idempotent
i.e. getFtpPlatform(getFtpPlatform(foo)) should work
'''
if isLinux64(platform):
return "linux-x86_64"
if isLinux(platform):
return "linux-i686"
if isMac(platform):
return "mac"
if isWin64Aarch64(platform):
return "win64-aarch64"
if isWin64(platform):
return "win64"
if isWin32(platform):
return "win32"
def getFileExtension(platform):
''' The extension for the output file, which may be passed to the internal-signing task
'''
if isLinux(platform):
return "tar.bz2"
elif isMac(platform):
return "tar.gz"
elif isWin(platform):
return "zip"
def getFilename(platform):
'''Returns the filename to be repacked for the platform
'''
return "target.%s" % getFileExtension(platform)
def getAllFilenames(platform, repack_stub_installer):
'''Returns the full list of filenames we want to downlaod for each platform
'''
files = [getFilename(platform)]
if isWin(platform):
# we want to copy forward setup.exe from upstream tasks to make it easier to repackage
# windows installers later
files.append('setup.exe')
# Same for the stub installer with setup-stub.exe, but only in win32 repack jobs
if isWin32(platform) and repack_stub_installer:
files.append('setup-stub.exe')
return tuple(files)
def getTaskArtifacts(taskId):
try:
retrieveFile(TASKCLUSTER_ARTIFACTS.format(taskId=taskId), 'tc_artifacts.json')
tc_index = json.load(open('tc_artifacts.json'))
return tc_index['artifacts']
except (ValueError, KeyError):
log.error('Failed to get task artifacts from TaskCluster')
raise
def getUpstreamArtifacts(upstream_tasks, repack_stub_installer):
useful_artifacts = getAllFilenames(options.platform, repack_stub_installer)
artifact_ids = {}
for taskId in upstream_tasks:
for artifact in getTaskArtifacts(taskId):
name = artifact['name']
if not name.endswith(useful_artifacts):
continue
if name in artifact_ids:
log.error('Duplicated artifact %s processing tasks %s & %s',
name, taskId, artifacts[name])
sys.exit(1)
else:
artifact_ids[name] = taskId
log.debug('Found artifacts: %s' % json.dumps(artifact_ids, indent=4, sort_keys=True))
return artifact_ids
def getArtifactNames(platform, locale, repack_stub_installer):
files = getAllFilenames(platform, repack_stub_installer)
if locale == 'en-US':
names = [UPSTREAM_ENUS_PATH.format(filename=f) for f in files]
else:
names = [UPSTREAM_L10N_PATH.format(locale=locale, filename=f) for f in files]
return names
def retrieveFile(url, file_path):
success = True
url = urllib.parse.quote(url, safe=':/')
log.info('Downloading from %s' % url)
log.info('To: %s', file_path)
log.info('CWD: %s' % os.getcwd())
try:
# use URLopener, which handles errors properly
retry(StrictFancyURLopener().retrieve,
kwargs=dict(url=url, filename=file_path))
except IOError:
log.error("Error downloading %s" % url, exc_info=True)
success = False
try:
os.remove(file_path)
except OSError:
log.info("Cannot remove %s" % file_path, exc_info=True)
return success
def getBouncerProduct(partner, partner_distro):
if 'RELEASE_TYPE' not in os.environ:
log.fatal('RELEASE_TYPE must be set in the environment')
sys.exit(1)
release_type = os.environ['RELEASE_TYPE']
# For X.0 releases we get 'release-rc' but the alias should use 'release'
if release_type == 'release-rc':
release_type = 'release'
return BOUNCER_PRODUCT_TEMPLATE.format(
release_type=release_type,
partner=partner,
partner_distro=partner_distro,
)
class RepackBase(object):
def __init__(self, build, partner_dir, build_dir, final_dir,
ftp_platform, repack_info, file_mode=0o644,
quiet=False, source_locale=None, locale=None):
self.base_dir = os.getcwd()
self.build = build
self.full_build_path = path.join(build_dir, build)
if not os.path.isabs(self.full_build_path):
self.full_build_path = path.join(self.base_dir,
self.full_build_path)
self.full_partner_path = path.join(self.base_dir, partner_dir)
self.working_dir = path.join(final_dir, "working")
self.final_dir = final_dir
self.final_build = os.path.join(final_dir, os.path.basename(build))
self.ftp_platform = ftp_platform
self.repack_info = repack_info
self.file_mode = file_mode
self.quiet = quiet
self.source_locale = source_locale
self.locale = locale
mkdir(self.working_dir)
def announceStart(self):
log.info('Repacking %s %s build %s' % (self.ftp_platform, self.locale, self.build))
def announceSuccess(self):
log.info('Done repacking %s %s build %s' % (self.ftp_platform, self.locale, self.build))
def unpackBuild(self):
copy(self.full_build_path, '.')
def createOverrideIni(self, partner_path):
''' If this is a partner specific locale (like en-HK), set the
distribution.ini to use that locale, not the default locale.
'''
if self.locale != self.source_locale:
filename = path.join(partner_path, 'distribution', 'distribution.ini')
f = open(filename, path.isfile(filename) and 'a' or 'w')
f.write('[Locale]\n')
f.write('locale=' + self.locale + '\n')
f.close()
''' Some partners need to override the migration wizard. This is done
by adding an override.ini file to the base install dir.
'''
# modify distribution.ini if 44 or later and we have migrationWizardDisabled
if int(options.version.split('.')[0]) >= 44:
filename = path.join(partner_path, 'distribution', 'distribution.ini')
f = open(filename, 'r')
ini = f.read()
f.close()
if ini.find('EnableProfileMigrator') >= 0:
return
else:
browserDir = path.join(partner_path, "browser")
if not path.exists(browserDir):
mkdir(browserDir)
filename = path.join(browserDir, 'override.ini')
if 'migrationWizardDisabled' in self.repack_info:
log.info("Adding EnableProfileMigrator to %r" % (filename,))
f = open(filename, path.isfile(filename) and 'a' or 'w')
f.write('[XRE]\n')
f.write('EnableProfileMigrator=0\n')
f.close()
def copyFiles(self, platform_dir):
log.info('Copying files into %s' % platform_dir)
# Check whether we've already copied files over for this partner.
if not path.exists(platform_dir):
mkdir(platform_dir)
for i in ['distribution', 'extensions', 'searchplugins']:
full_path = path.join(self.full_partner_path, i)
if path.exists(full_path):
copytree(full_path, path.join(platform_dir, i))
self.createOverrideIni(platform_dir)
def repackBuild(self):
pass
def stage(self):
move(self.build, self.final_dir)
os.chmod(self.final_build, self.file_mode)
def cleanup(self):
os.remove(self.final_build)
def doRepack(self):
self.announceStart()
os.chdir(self.working_dir)
self.unpackBuild()
self.copyFiles()
self.repackBuild()
self.stage()
os.chdir(self.base_dir)
rmdirRecursive(self.working_dir)
self.announceSuccess()
class RepackLinux(RepackBase):
def __init__(self, build, partner_dir, build_dir, final_dir,
ftp_platform, repack_info, **kwargs):
super(RepackLinux, self).__init__(build, partner_dir, build_dir,
final_dir,
ftp_platform, repack_info,
**kwargs)
self.uncompressed_build = build.replace('.bz2', '')
def unpackBuild(self):
super(RepackLinux, self).unpackBuild()
bunzip2_cmd = "bunzip2 %s" % self.build
shellCommand(bunzip2_cmd)
if not path.exists(self.uncompressed_build):
log.error("Error: Unable to uncompress build %s" % self.build)
sys.exit(1)
def copyFiles(self):
super(RepackLinux, self).copyFiles(LINUX_DEST_DIR)
def repackBuild(self):
if options.quiet:
tar_flags = "rf"
else:
tar_flags = "rvf"
tar_cmd = "tar %s %s %s" % (tar_flags, self.uncompressed_build, LINUX_DEST_DIR)
shellCommand(tar_cmd)
bzip2_command = "bzip2 %s" % self.uncompressed_build
shellCommand(bzip2_command)
class RepackMac(RepackBase):
def __init__(self, build, partner_dir, build_dir, final_dir,
ftp_platform, repack_info, **kwargs):
super(RepackMac, self).__init__(build, partner_dir, build_dir,
final_dir,
ftp_platform, repack_info,
**kwargs)
self.uncompressed_build = build.replace('.gz', '')
def unpackBuild(self):
super(RepackMac, self).unpackBuild()
gunzip_cmd = "gunzip %s" % self.build
shellCommand(gunzip_cmd)
if not path.exists(self.uncompressed_build):
log.error("Error: Unable to uncompress build %s" % self.build)
sys.exit(1)
self.appName = self.getAppName()
def getAppName(self):
# Cope with Firefox.app vs Firefox Nightly.app by returning the first line that
# ends with .app
t = tarfile.open(self.build.rsplit('.', 1)[0])
for name in t.getnames():
if name.endswith('.app'):
return name
def copyFiles(self):
super(RepackMac, self).copyFiles(MAC_DEST_DIR.format(self.appName))
def repackBuild(self):
if options.quiet:
tar_flags = "rf"
else:
tar_flags = "rvf"
# the final arg is quoted because it may contain a space, eg Firefox Nightly.app/....
tar_cmd = "tar %s %s \'%s\'" % (
tar_flags, self.uncompressed_build, MAC_DEST_DIR.format(self.appName))
shellCommand(tar_cmd)
gzip_command = "gzip %s" % self.uncompressed_build
shellCommand(gzip_command)
class RepackWin(RepackBase):
def __init__(self, build, partner_dir, build_dir, final_dir,
ftp_platform, repack_info, **kwargs):
super(RepackWin, self).__init__(build, partner_dir, build_dir,
final_dir,
ftp_platform, repack_info,
**kwargs)
def copyFiles(self):
super(RepackWin, self).copyFiles(WINDOWS_DEST_DIR)
def repackBuild(self):
if options.quiet:
zip_flags = "-rq"
else:
zip_flags = "-r"
zip_cmd = "zip %s %s %s" % (zip_flags,
self.build,
WINDOWS_DEST_DIR)
shellCommand(zip_cmd)
# we generate the stub installer during the win32 build, so repack it on win32 too
if isWin32(options.platform) and self.repack_info.get('repack_stub_installer'):
log.info("Creating target-stub.zip to hold custom urls")
dest = self.final_build.replace('target.zip', 'target-stub.zip')
z = zipfile.ZipFile(dest, 'w')
# load the partner.ini template and interpolate %LOCALE% to the actual locale
with open(path.join(self.full_partner_path, 'stub', 'partner.ini')) as f:
partner_ini_template = f.readlines()
partner_ini = ""
for l in partner_ini_template:
l = l.replace('%LOCALE%', self.locale)
l = l.replace('%BOUNCER_PRODUCT%', self.repack_info['bouncer_product'])
partner_ini += l
z.writestr('partner.ini', partner_ini)
# we need an empty firefox directory to use the repackage code
d = zipfile.ZipInfo('firefox/')
# https://stackoverflow.com/a/6297838, zip's representation of drwxr-xr-x permissions
# is 040755 << 16L, bitwise OR with 0x10 for the MS-DOS directory flag
d.external_attr = 1106051088
z.writestr(d, "")
z.close()
def stage(self):
super(RepackWin, self).stage()
setup_dest = self.final_build.replace('target.zip', 'setup.exe')
if 'replacement_setup_exe' in self.repack_info:
log.info("Overriding setup.exe with custom copy")
retrieveFile(self.repack_info['replacement_setup_exe'], setup_dest)
else:
# otherwise copy forward the vanilla copy
log.info("Copying vanilla setup.exe forward for installer creation")
setup = self.full_build_path.replace('target.zip', 'setup.exe')
copy(setup, setup_dest)
os.chmod(setup_dest, self.file_mode)
# we generate the stub installer in the win32 build, so repack it on win32 too
if isWin32(options.platform) and self.repack_info.get('repack_stub_installer'):
log.info("Copying vanilla setup-stub.exe forward for stub installer creation")
setup_dest = self.final_build.replace('target.zip', 'setup-stub.exe')
setup_source = self.full_build_path.replace('target.zip', 'setup-stub.exe')
copy(setup_source, setup_dest)
os.chmod(setup_dest, self.file_mode)
if __name__ == '__main__':
error = False
partner_builds = {}
repack_build = {
'linux-i686': RepackLinux,
'linux-x86_64': RepackLinux,
'mac': RepackMac,
'win32': RepackWin,
'win64': RepackWin,
'win64-aarch64': RepackWin,
}
parser = OptionParser(usage="usage: %prog [options]")
parser.add_option(
"-d", "--partners-dir", dest="partners_dir", default=PARTNERS_DIR,
help="Specify the directory where the partner config files are found"
)
parser.add_option(
"-p", "--partner", dest="partner",
help="Repack for a single partner, specified by name"
)
parser.add_option(
"-v", "--version", dest="version",
help="Set the version number for repacking"
)
parser.add_option(
"-n", "--build-number", dest="build_number", default=1,
help="Set the build number for repacking"
)
parser.add_option(
"--platform", dest="platform",
help="Set the platform to repack"
)
parser.add_option(
"--include-oem", action="store_true", dest="include_oem", default=False,
help="Process partners marked as OEM (these are usually one-offs)"
)
parser.add_option(
"-q", "--quiet", action="store_true", dest="quiet",
default=False,
help="Suppress standard output from the packaging tools"
)
parser.add_option(
"--taskid", action="append", dest="upstream_tasks",
help="Specify taskIds for upstream artifacts, using 'internal sign' tasks. Multiples "
"expected, e.g. --taskid foo --taskid bar. Alternatively, use a space-separated list "
"stored in UPSTREAM_TASKIDS in the environment."
)
parser.add_option(
"-l", "--limit-locale", action="append", dest="limit_locales", default=[],
)
(options, args) = parser.parse_args()
if not options.quiet:
log.setLevel(logging.DEBUG)
else:
log.setLevel(logging.WARNING)
options.partners_dir = options.partners_dir.rstrip("/")
if not path.isdir(options.partners_dir):
log.error("Error: partners dir %s is not a directory." %
options.partners_dir)
error = True
if not options.version:
log.error("Error: you must specify a version number.")
error = True
if not options.platform:
log.error('No platform specified.')
error = True
if not isValidPlatform(options.platform):
log.error('Invalid platform %s.' % options.platform)
error = True
upstream_tasks = options.upstream_tasks or os.getenv('UPSTREAM_TASKIDS')
if not upstream_tasks:
log.error('upstream tasks should be defined using --taskid args or '
'UPSTREAM_TASKIDS in env.')
error = True
for tool in ('tar', 'bunzip2', 'bzip2', 'gunzip', 'gzip', 'zip'):
if not which(tool):
log.error("Error: couldn't find the %s executable in PATH." %
tool)
error = True
if error:
sys.exit(1)
base_workdir = os.getcwd()
# Look up the artifacts available on our upstreams, but only if we need to
artifact_ids = {}
# Local directories for builds
script_directory = os.getcwd()
original_builds_dir = path.join(script_directory, "original_builds",
options.version,
"build%s" % options.build_number)
repack_version = "%s-%s" % (options.version, options.build_number,)
if os.getenv('MOZ_AUTOMATION'):
# running in production
repacked_builds_dir = '/builds/worker/artifacts'
else:
# local development
repacked_builds_dir = path.join(script_directory, "artifacts")
mkdir(original_builds_dir)
mkdir(repacked_builds_dir)
printSeparator()
# For each partner in the partners dir
# Read/check the config file
# Download required builds (if not already on disk)
# Perform repacks
# walk the partner dirs, find valid repack.cfg configs, and load them
partner_dirs = []
need_stub_installers = False
for root, _, files in os.walk(options.partners_dir):
root = root.lstrip('/')
partner = root[len(options.partners_dir) + 1:].split("/")[0]
partner_distro = os.path.split(root)[-1]
if options.partner:
if options.partner != partner and \
options.partner != partner_distro[:len(options.partner)]:
continue
for f in files:
if f == 'repack.cfg':
log.debug("Found partner config: {} ['{}'] {}".format(root, "', '".join(_), f))
# partner_dirs[os.path.split(root)[-1]] = (root, os.path.join(root, f))
repack_cfg = os.path.join(root, f)
repack_info = parseRepackConfig(repack_cfg, options.platform)
if not repack_info:
log.debug("no repack_info for platform %s in %s, skipping" %
(options.platform, repack_cfg))
continue
if repack_info.get('repack_stub_installer'):
need_stub_installers = True
repack_info['bouncer_product'] = getBouncerProduct(partner, partner_distro)
partner_dirs.append((partner, partner_distro, root, repack_info))
log.info('Retrieving artifact lists from upstream tasks')
artifact_ids = getUpstreamArtifacts(upstream_tasks, need_stub_installers)
if not artifact_ids:
log.fatal("No upstream artifacts were found")
sys.exit(1)
for partner, partner_distro, full_partner_dir, repack_info in partner_dirs:
log.info("Starting repack process for partner: %s/%s" % (partner, partner_distro))
if 'oem' in repack_info and options.include_oem is False:
log.info("Skipping partner: %s - marked as OEM and --include-oem was not set" %
partner)
continue
repack_stub_installer = repack_info.get('repack_stub_installer')
# where everything ends up
partner_repack_dir = path.join(repacked_builds_dir, DEFAULT_OUTPUT_DIR)
# Figure out which base builds we need to repack.
for locale in repack_info['locales']:
if options.limit_locales and locale not in options.limit_locales:
log.info("Skipping %s because it is not in limit_locales list", locale)
continue
source_locale = locale
# Partner has specified a different locale to
# use as the base for their custom locale.
if 'locale.' + locale in repack_info:
source_locale = repack_info['locale.' + locale]
for platform in repack_info['platforms']:
# ja-JP-mac only exists for Mac, so skip non-existent
# platform/locale combos.
if (source_locale == 'ja' and isMac(platform)) or \
(source_locale == 'ja-JP-mac' and not isMac(platform)):
continue
ftp_platform = getFtpPlatform(platform)
local_filepath = path.join(original_builds_dir, ftp_platform,
locale)
mkdir(local_filepath)
final_dir = partner_repack_dir % dict(
partner=partner,
partner_distro=partner_distro,
locale=locale,
)
if path.exists(final_dir):
rmdirRecursive(final_dir)
mkdir(final_dir)
# for the main repacking artifact
filename = getFilename(ftp_platform)
local_filename = path.join(local_filepath, filename)
# Check to see if this build is already on disk, i.e.
# has already been downloaded.
artifacts = getArtifactNames(platform, locale, repack_stub_installer)
for artifact in artifacts:
local_artifact = os.path.join(local_filepath, os.path.basename(artifact))
if os.path.exists(local_artifact):
log.info("Found %s on disk, not downloading" % local_artifact)
continue
if artifact not in artifact_ids:
log.fatal("Can't determine what taskID to retrieve %s from", artifact)
sys.exit(1)
original_build_url = '%s/%s' % (
TASKCLUSTER_ARTIFACTS.format(taskId=artifact_ids[artifact]),
artifact
)
retrieveFile(original_build_url, local_artifact)
# Make sure we have the local file now
if not path.exists(local_filename):
log.info("Error: Unable to retrieve %s\n" % filename)
sys.exit(1)
repackObj = repack_build[ftp_platform](
filename, full_partner_dir, local_filepath,
final_dir, ftp_platform,
repack_info,
locale=locale,
source_locale=source_locale,
)
repackObj.doRepack()

Просмотреть файл

@ -217,23 +217,15 @@ release-promotion:
product: firefox
target-tasks-method: promote_desktop
partial-updates: true
promote_firefox_partner_repack:
promote_firefox_partners:
product: firefox
rebuild-kinds:
- release-partner-repack
- release-partner-beetmover
- release-partner-repack-chunking-dummy
- release-partner-repack-signing
- release-partner-repack-notarization-part-1
- release-partner-repack-notarization-poller
- release-partner-repack-repackage
- release-partner-repack-repackage-signing
- release-partner-repack-beetmover
target-tasks-method: promote_desktop
promote_firefox_partner_attribution:
product: firefox
rebuild-kinds:
- release-partner-attribution
- release-partner-attribution-beetmover
- release-partner-repackage-signing
- release-partner-repackage
- release-partner-signing
target-tasks-method: promote_desktop
promote_firefox_rc:
product: firefox
@ -413,16 +405,6 @@ partner-urls:
by-release-level:
production: 'git@github.com:mozilla-partners/esr-repack-manifests.git'
staging: 'git@github.com:moz-releng-automation-stage/esr-repack-manifests.git'
release-partner-attribution:
by-release-product:
default: null
firefox:
by-release-type:
default: null
beta|release.*:
by-release-level:
production: 'git@github.com:mozilla-partners/repack-manifests.git'
staging: 'git@github.com:moz-releng-automation-stage/repack-manifests.git'
release-eme-free-repack:
by-release-product:
default: null

Просмотреть файл

@ -1,23 +0,0 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
---
loader: taskgraph.loader.single_dep:loader
transforms:
- taskgraph.transforms.name_sanity:transforms
- taskgraph.transforms.partner_attribution_beetmover:transforms
- taskgraph.transforms.task:transforms
kind-dependencies:
- release-partner-attribution
job-template:
shipping-product: firefox
shipping-phase: promote
partner-bucket-scope:
by-release-level:
production: beetmover:bucket:partner
staging: beetmover:bucket:dep-partner
partner-public-path: "partner-repacks/{partner}/{subpartner}/v{release_partner_build_number}/{platform}/{locale}"
partner-private-path: "{partner}/{version}-{build_number}/{subpartner}/{platform}/{locale}"

Просмотреть файл

@ -1,40 +0,0 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
---
loader: taskgraph.loader.transform:loader
transforms:
- taskgraph.transforms.release_deps:transforms
- taskgraph.transforms.partner_attribution:transforms
- taskgraph.transforms.job:transforms
- taskgraph.transforms.task:transforms
kind-dependencies:
- repackage-signing
- repackage-signing-l10n
# move this into the single job ??
job-defaults:
name: partner-attribution
description: Release Promotion partner attribution
run-on-projects: [] # to make sure this never runs as part of CI
shipping-product: firefox
shipping-phase: promote
worker-type: b-linux
worker:
docker-image:
in-tree: "partner-repack"
chain-of-trust: true
max-run-time: 1800
run:
using: mach
mach: python python/mozrelease/mozrelease/partner_attribution.py
jobs:
partner-attribution:
attributes:
build_platform: linux-shippable
build_type: opt
artifact_prefix: releng/partner
shippable: true

Просмотреть файл

@ -451,10 +451,6 @@ release-partner-repack
----------------------
Generates customized versions of releases for partners.
release-partner-attribution
---------------------------
Generates attributed versions of releases for partners.
release-partner-repack-chunking-dummy
-------------------------------------
Chunks the partner repacks by locale.
@ -487,10 +483,6 @@ release-partner-repack-beetmover
--------------------------------
Moves the partner repacks to S3 buckets.
release-partner-attribution-beetmover
-------------------------------------
Moves the partner attributions to S3 buckets.
release-partner-repack-bouncer-sub
----------------------------------
Sets up bouncer products for partners.

Просмотреть файл

@ -173,24 +173,21 @@ Release Promotion
``release_eta``
The time and date when a release is scheduled to live. This value is passed to Balrog.
``release_enable_partner_repack``
``release_enable_partners``
Boolean which controls repacking vanilla Firefox builds for partners.
``release_enable_partner_attribution``
Boolean which controls adding attribution to vanilla Firefox builds for partners.
``release_enable_emefree``
Boolean which controls repacking vanilla Firefox builds into EME-free builds.
``release_partners``
List of partners to repack or attribute if a subset of the whole config. A null value defaults to all.
List of partners to repack. A null value defaults to all.
``release_partner_config``
Configuration for partner repacks & attribution, as well as EME-free repacks.
Configuration for partner repacks.
``release_partner_build_number``
The build number for partner repacks. We sometimes have multiple partner build numbers per release build number; this parameter lets us bump them independently. Defaults to 1.
``release_enable_emefree``
Boolean which controls repacking vanilla Firefox builds into EME-free builds.
``release_product``
The product that is being released.

Просмотреть файл

@ -1,121 +0,0 @@
Partner attribution
===================
.. _partner attribution:
In contrast to :ref:`partner repacks`, attributed builds only differ from the normal Firefox
builds by the adding a string in the dummy windows signing certificate. We support doing this for
full installers but not stub. The parameters of the string are carried into the telemetry system,
tagging an install into a cohort of users. This a lighter weight process because we don't
repackage or re-sign the builds.
Parameters & Scheduling
-----------------------
Partner attribution uses a number of parameters to control how they work:
* ``release_enable_partner_attribution``
* ``release_partner_config``
* ``release_partner_build_number``
* ``release_partners``
The enable parameter is a boolean, a simple on/off switch. We set it in shipit's
`is_partner_enabled() <https://github.com/mozilla-releng/shipit/blob/main/api/src/shipit_api/admin/release.py#L93>`_ when starting a
release. It's true for Firefox betas >= b8 and releases, but otherwise false, the same as
partner repacks.
``release_partner_config`` is a dictionary of configuration data which drives the task generation
logic. It's usually looked up during the release promotion action task, using the Github
GraphQL API in the `get_partner_config_by_url()
<python/taskgraph.util.html#taskgraph.util.partners.get_partner_config_by_url>`_ function, with the
url defined in `taskcluster/ci/config.yml <https://searchfox.org/mozilla-central/search?q=partner-urls&path=taskcluster%2Fci%2Fconfig.yml&case=true&regexp=false&redirect=true>`_.
``release_partner_build_number`` is an integer used to create unique upload paths in the firefox
candidates directory, while ``release_partners`` is a list of partners that should be
attributed (i.e. a subset of the whole config). Both are intended for use when respinning a partner after
the regular Firefox has shipped. More information on that can be found in the
`RelEng Docs <https://moz-releng-docs.readthedocs.io/en/latest/procedures/misc-operations/off-cycle-partner-repacks-and-funnelcake.html>`_.
``release_partners`` is shared with partner repacks but we don't support doing both at the same time.
Configuration
-------------
This is done using an ``attribution_config.yml`` file which next lives to the ``default.xml`` used
for partner repacks. There are no repos for each partner, the whole configuration exists in the one
file because the amount of information to be tracked is much smaller.
An example config looks like this:
.. code-block:: yaml
defaults:
medium: distribution
source: mozilla
configs:
- campaign: sample
content: sample-001
locales:
- en-US
- de
- ru
platforms:
- win64-shippable
- win32-shippable
upload_to_candidates: true
The four main parameters are ``medium, source, campaign, content``, of which the first two are
common to all attributions. The combination of ``campaign`` and ``content`` should be unique
to avoid confusion in telemetry data. They correspond to the repo name and sub-directory in partner repacks,
so avoid any overlap between values in partner repacks and atrribution.
The optional parameters of ``variation``, and ``experiment`` may also be specified.
Non-empty lists of locales and platforms are required parameters (NB the `-shippable` suffix should be used on
the platforms).
``upload_to_candidates`` is an optional setting which controls whether the Firefox installers
are uploaded into the `candidates directory <https://archive.mozilla.org/pub/firefox/candidates/>`_.
If not set the files are uploaded to the private S3 bucket for partner builds.
Repacking process
-----------------
Attribution only has two kinds:
* attribution - add attribution code to the regular builds
* beetmover - move the files to a partner-specific destination
Attribution
^^^^^^^^^^^
* kinds: ``release-partner-attribution``
* platforms: Any Windows, runs on linux
* upstreams: ``repackage-signing`` ``repackage-signing-l10n``
There is one task, calling out to `python/mozrelease/mozrelease/partner_attribution.py
<https://hg.mozilla.org/releases/mozilla-release/file/default/python/mozrelease/mozrelease/partner_attribution.py>`_.
It takes as input the repackage-signing and repackage-signing-l10n artifacts, which are all
target.exe full installers. The ``ATTRIBUTION_CONFIG`` environment variable controls the script.
It produces more target.exe installers.
The size of ``ATTRIBUTION_CONFIG`` variable may grow large if the number of configurations
increases, and it may be necesssary to pass the content of ``attribution_config.yml`` to the
script instead, or via an artifact of the promotion task.
Beetmover
^^^^^^^^^
* kinds: ``release-partner-attribution-beetmover``
* platforms: N/A, scriptworker
* upstreams: ``release-partner-attribution``
Moves and renames the artifacts to their public location in the `candidates directory
<https://archive.mozilla.org/pub/firefox/candidates/>`_, or a private S3 bucket. There is one task
for public artifacts and another for private.
Each task will have the ``project:releng:beetmover:action:push-to-partner`` scope, with public uploads having
``project:releng:beetmover:bucket:release`` and private uploads using
``project:releng:beetmover:bucket:partner``. There's a partner-specific code path in
`beetmoverscript <https://github.com/mozilla-releng/scriptworker-scripts/tree/master/beetmoverscript>`_.

Просмотреть файл

@ -1,6 +1,5 @@
Partner repacks
===============
.. _partner repacks:
We create slightly-modified Firefox releases for some extra audiences
@ -16,8 +15,6 @@ We produce partner repacks for some beta builds, and for release builds, as part
automation. We don't produce any files to update these builds as they are handled automatically
(see updates_).
We also produce :ref:`partner attribution` builds, which are Firefox Windows installers with a cohort identifier
added.
Parameters & Scheduling
-----------------------
@ -25,7 +22,7 @@ Parameters & Scheduling
Partner repacks have a number of parameters which control how they work:
* ``release_enable_emefree``
* ``release_enable_partner_repack``
* ``release_enable_partners``
* ``release_partner_config``
* ``release_partner_build_number``
* ``release_partners``
@ -33,7 +30,8 @@ Partner repacks have a number of parameters which control how they work:
We split the repacks into two 'paths', EME-free and everything else, to retain some
flexibility over enabling/disabling them separately. This costs us some duplication of the kinds
in the repacking stack. The two enable parameters are booleans to turn these two paths
on/off. We set them in shipit's `is_partner_enabled() <https://github.com/mozilla-releng/shipit/blob/main/api/src/shipit_api/admin/release.py#L93>`_ when starting a
on/off. We set them in release-runner3's `is_partner_enabled() <https://dxr.mozilla
.org/build-central/search?q=function%3Ais_partner_enabled&redirect=true>`_ when starting a
release. They're both true for Firefox betas >= b8 and releases, but otherwise disabled.
``release_partner_config`` is a dictionary of configuration data which drives the task generation
@ -46,8 +44,9 @@ url defined in `taskcluster/ci/config.yml <https://dxr.mozilla
``release_partner_build_number`` is an integer used to create unique upload paths in the firefox
candidates directory, while ``release_partners`` is a list of partners that should be
repacked (i.e. a subset of the whole config). Both are intended for use when respinning a few partners after
the regular Firefox has shipped. More information on that can be found in the
`RelEng Docs <https://moz-releng-docs.readthedocs.io/en/latest/procedures/misc-operations/off-cycle-partner-repacks-and-funnelcake.html>`_.
the regular Firefox has shipped. More information on that can be found in the `release-warrior docs
<https://github.com/mozilla-releng/releasewarrior-2
.0/blob/master/docs/misc-operations/off-cycle-partner-repacks -and-funnelcake.md>`_.
Most of the machine time for generating partner repacks takes place in the `promote` phase of the
automation, or `promote_rc` in the case of X.0 release candidates. The EME-free builds are copied into the
@ -151,10 +150,8 @@ Partner repack
* upstreams: ``build-signing`` ``l10n-signing``
There is one task per platform in this step, calling out to `scripts/desktop_partner_repacks.py
<https://hg.mozilla.org/mozilla-central/file/default/testing/mozharness/scripts
<https://hg.mozilla.org/releases/mozilla-release/file/default/testing/mozharness/scripts
/desktop_partner_repacks.py>`_ in mozharness to prepare an environment and then perform the repacks.
The actual repacking is done by `python/mozrelease/mozrelease/partner_repack.py
<https://hg.mozilla.org/mozilla-central/file/default/python/mozrelease/mozrelease/partner_repack.py>`_.
It takes as input the build-signing and l10n-signing artifacts, which are all zip/tar.gz/tar.bz2
archives, simplifying the repack process by avoiding dmg and exe. Windows produces ``target.zip``
@ -164,17 +161,15 @@ archives, simplifying the repack process by avoiding dmg and exe. Windows produc
Signing
^^^^^^^
* kinds: ``release-partner-repack-notarization-part-1`` ``release-partner-repack-notarization-poller`` ``release-partner-repack-signing``
* kinds: ``release-partner-repack-signing`` ``release-eme-free-repack-signing``
* platforms: Mac
* upstreams: ``release-partner-repack`` ``release-eme-free-repack``
We chunk the single partner repack task out to a signing task with 5 artifacts each. For
example, EME-free will become 19 tasks. We collect the target.tar.gz from the
We chunk the single partner repack task out to a signing task per artifact at this point. For
example, EME-free will become ~95 tasks, one for each locale. We collect the target.tar.gz from the
upstream, and return a signed target.tar.gz. We use a ``target.dmg`` artifact for
nightlies/regular releases, but this is converted to ``target.tar.gz`` by the signing
scriptworker before sending it to the signing server, so partners are equivalent. The ``part-1`` task
uploads the binaries to apple, while the ``poller`` task waits for their approval, then
``release-partner-repack-signing`` staples on the notarization ticket.
scriptworker before sending it to the signing server, so partners are equivalent.
Repackage
^^^^^^^^^
@ -226,7 +221,8 @@ Moves and renames the artifacts to their public location in the `candidates dire
have the ``project:releng:beetmover:action:push-to-partner`` scope, with public uploads having
``project:releng:beetmover:bucket:release`` and private uploads using
``project:releng:beetmover:bucket:partner``. The ``upload_to_candidates`` key in the partner config
controls the second scope. There's a separate partner code path in `beetmoverscript <https://github.com/mozilla-releng/scriptworker-scripts/tree/master/beetmoverscript>`_.
controls the second scope. There's a separate partner code path in `beetmoverscript <https://github
.com/mozilla-releng/beetmoverscript>`_.
Beetmover checksums
^^^^^^^^^^^^^^^^^^^

Просмотреть файл

@ -50,4 +50,3 @@ In-depth relpro guide
partials
signing
partner-repacks
partner-attribution

Просмотреть файл

@ -241,27 +241,6 @@ def download_to_path(url, path, sha256=None, size=None):
raise Exception("Download failed, no more retries!")
def download_to_memory(url, sha256=None, size=None):
"""Download a URL to memory, possibly with verification."""
data = b""
for _ in retrier(attempts=5, sleeptime=60):
try:
log('Downloading %s' % (url))
for chunk in stream_download(url, sha256=sha256, size=size):
data += chunk
return data
except IntegrityError:
raise
except Exception as e:
log("Download failed: {}".format(e))
continue
raise Exception("Download failed, no more retries!")
def gpg_verify_path(path: pathlib.Path, public_key_data: bytes,
signature_data: bytes):
"""Verify that a filesystem path verifies using GPG.
@ -625,14 +604,6 @@ def api(root_url, service, version, path):
root_url=root_url, service=service, version=version, path=path)
def get_hash(fetch, root_url):
path = 'task/{task}/artifacts/{artifact}'.format(
task=fetch['task'], artifact='public/chain-of-trust.json')
url = api(root_url, 'queue', 'v1', path)
cot = json.loads(download_to_memory(url))
return cot['artifacts'][fetch['artifact']]['sha256']
def command_task_artifacts(args):
start = time.monotonic()
fetches = json.loads(os.environ['MOZ_FETCHES'])
@ -644,9 +615,6 @@ def command_task_artifacts(args):
extdir = pathlib.Path(os.path.normpath(str(extdir.joinpath(fetch['dest']))))
extdir.mkdir(parents=True, exist_ok=True)
root_url = os.environ['TASKCLUSTER_ROOT_URL']
sha256 = None
if fetch.get('verify-hash'):
sha256 = get_hash(fetch, root_url)
if fetch['artifact'].startswith('public/'):
path = 'task/{task}/artifacts/{artifact}'.format(
task=fetch['task'], artifact=fetch['artifact'])
@ -656,7 +624,7 @@ def command_task_artifacts(args):
proxy_url=os.environ['TASKCLUSTER_PROXY_URL'],
task=fetch['task'],
artifact=fetch['artifact'])
downloads.append((url, extdir, fetch['extract'], sha256))
downloads.append((url, extdir, fetch['extract']))
fetch_urls(downloads)
end = time.monotonic()

Просмотреть файл

@ -187,14 +187,10 @@ def get_flavors(graph_config, param):
'type': 'string',
'default': '',
},
'release_enable_partner_repack': {
'release_enable_partners': {
'type': 'boolean',
'description': 'Toggle for creating partner repacks',
},
'release_enable_partner_attribution': {
'type': 'boolean',
'description': 'Toggle for creating partner attribution',
},
'release_partner_build_number': {
'type': 'integer',
'default': 1,
@ -320,36 +316,24 @@ def release_promotion_action(parameters, graph_config, input, task_group_id, tas
# previous graphs.
parameters['optimize_target_tasks'] = True
if release_promotion_flavor == 'promote_firefox_partner_repack':
release_enable_partner_repack = True
release_enable_partner_attribution = False
release_enable_emefree = False
elif release_promotion_flavor == 'promote_firefox_partner_attribution':
release_enable_partner_repack = False
release_enable_partner_attribution = True
release_enable_emefree = False
else:
# for promotion or ship phases, we use the action input to turn the repacks/attribution off
release_enable_partner_repack = input.get('release_enable_partner_repack', True)
release_enable_partner_attribution = input.get('release_enable_partner_attribution', True)
release_enable_emefree = input.get('release_enable_emefree', True)
# Partner/EMEfree are enabled by default when get_partner_url_config() returns a non-null url
# The action input may override by sending False. It's an error to send True with no url found
partner_url_config = get_partner_url_config(parameters, graph_config)
if release_enable_partner_repack and not partner_url_config['release-partner-repack']:
release_enable_partners = partner_url_config['release-partner-repack'] is not None
release_enable_emefree = partner_url_config['release-eme-free-repack'] is not None
if input.get('release_enable_partners') is False:
release_enable_partners = False
elif input.get('release_enable_partners') is True and not release_enable_partners:
raise Exception("Can't enable partner repacks when no config url found")
if release_enable_partner_attribution and \
not partner_url_config['release-partner-attribution']:
raise Exception("Can't enable partner attribution when no config url found")
if release_enable_emefree and not partner_url_config['release-eme-free-repack']:
raise Exception("Can't enable EMEfree repacks when no config url found")
parameters['release_enable_partner_repack'] = release_enable_partner_repack
parameters['release_enable_partner_attribution'] = release_enable_partner_attribution
if input.get('release_enable_emefree') is False:
release_enable_emefree = False
elif input.get('release_enable_emefree') is True and not release_enable_emefree:
raise Exception("Can't enable EMEfree when no config url found")
parameters['release_enable_partners'] = release_enable_partners
parameters['release_enable_emefree'] = release_enable_emefree
partner_config = input.get('release_partner_config')
if not partner_config and any([release_enable_partner_repack,
release_enable_partner_attribution,
release_enable_emefree]):
if not partner_config and (release_enable_emefree or release_enable_partners):
github_token = get_token(parameters)
partner_config = get_partner_config(partner_url_config, github_token)
if partner_config:

Просмотреть файл

@ -92,9 +92,6 @@ graph_config_schema = Schema({
Required('release-partner-repack'):
optionally_keyed_by('release-product', 'release-level', 'release-type',
Any(text_type, None)),
Optional('release-partner-attribution'):
optionally_keyed_by('release-product', 'release-level', 'release-type',
Any(text_type, None)),
Required('release-eme-free-repack'):
optionally_keyed_by('release-product', 'release-level', 'release-type',
Any(text_type, None)),

Просмотреть файл

@ -336,8 +336,7 @@ def get_decision_parameters(graph_config, options):
parameters['phabricator_diff'] = None
parameters['release_type'] = ''
parameters['release_eta'] = ''
parameters['release_enable_partner_repack'] = False
parameters['release_enable_partner_attribution'] = False
parameters['release_enable_partners'] = False
parameters['release_partners'] = []
parameters['release_partner_config'] = {}
parameters['release_partner_build_number'] = 1

Просмотреть файл

@ -87,8 +87,7 @@ base_schema = Schema({
Required('pushdate'): int,
Required('pushlog_id'): text_type,
Required('release_enable_emefree'): bool,
Required('release_enable_partner_repack'): bool,
Required('release_enable_partner_attribution'): bool,
Required('release_enable_partners'): bool,
Required('release_eta'): Any(None, text_type),
Required('release_history'): {text_type: dict},
Required('release_partners'): Any(None, [text_type]),
@ -171,8 +170,7 @@ class Parameters(ReadOnlyDict):
'pushdate': seconds_from_epoch,
'pushlog_id': '0',
'release_enable_emefree': False,
'release_enable_partner_repack': False,
'release_enable_partner_attribution': False,
'release_enable_partners': False,
'release_eta': '',
'release_history': {},
'release_partners': [],

Просмотреть файл

@ -39,8 +39,7 @@ class TestParameters(unittest.TestCase):
'pushdate': 0,
'pushlog_id': 'pushlog_id',
'release_enable_emefree': False,
'release_enable_partner_repack': False,
'release_enable_partner_attribution': False,
'release_enable_partners': False,
'release_eta': None,
'release_history': {},
'release_partners': [],

Просмотреть файл

@ -13,6 +13,7 @@ from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.beetmover import craft_release_properties
from taskgraph.util.attributes import copy_attributes_from_dependent_job
from taskgraph.util.partners import (
check_if_partners_enabled,
get_ftp_platform,
get_partner_config_by_kind,
)
@ -49,6 +50,7 @@ beetmover_description_schema = schema.extend({
})
transforms = TransformSequence()
transforms.add(check_if_partners_enabled)
transforms.add_validate(beetmover_description_schema)

Просмотреть файл

@ -12,7 +12,8 @@ import copy
from mozbuild.chunkify import chunkify
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.partners import (
get_repack_ids_by_platform,
get_partner_config_by_kind,
locales_per_build_platform,
apply_partner_priority,
)
@ -20,8 +21,22 @@ transforms = TransformSequence()
transforms.add(apply_partner_priority)
def _get_repack_ids_by_platform(partner_configs, build_platform):
combinations = []
for partner, partner_config in partner_configs.items():
for sub_partner, cfg in partner_config.items():
if build_platform not in cfg.get("platforms", []):
continue
locales = locales_per_build_platform(build_platform, cfg.get('locales', []))
for locale in locales:
combinations.append("{}/{}/{}".format(partner, sub_partner, locale))
return sorted(combinations)
@transforms.add
def chunk_partners(config, jobs):
partner_configs = get_partner_config_by_kind(config, config.kind)
for job in jobs:
dep_job = job['primary-dependency']
build_platform = dep_job.attributes["build_platform"]
@ -37,7 +52,7 @@ def chunk_partners(config, jobs):
yield job
# first downstream of the repack task, no chunking or fanout has been done yet
elif not any([repack_id, repack_ids]):
platform_repack_ids = get_repack_ids_by_platform(config, build_platform)
platform_repack_ids = _get_repack_ids_by_platform(partner_configs, build_platform)
# we chunk mac signing
if config.kind in ("release-partner-repack-signing",
"release-eme-free-repack-signing",

Просмотреть файл

@ -88,7 +88,6 @@ job_description_schema = Schema({
Required('artifact'): text_type,
Optional('dest'): text_type,
Optional('extract'): bool,
Optional('verify-hash'): bool,
}],
},
@ -299,12 +298,10 @@ def use_fetches(config, jobs):
path = artifact
dest = None
extract = True
verify_hash = False
else:
path = artifact['artifact']
dest = artifact.get('dest')
extract = artifact.get('extract', True)
verify_hash = artifact.get('verify-hash', False)
fetch = {
'artifact': '{prefix}/{path}'.format(prefix=prefix, path=path)
@ -314,8 +311,6 @@ def use_fetches(config, jobs):
}
if dest is not None:
fetch['dest'] = dest
if verify_hash:
fetch['verify-hash'] = verify_hash
job_fetches.append(fetch)
if job.get('use-sccache') and not has_sccache:

Просмотреть файл

@ -1,133 +0,0 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
Transform the partner attribution task into an actual task description.
"""
from __future__ import absolute_import, print_function, unicode_literals
from collections import defaultdict
import json
import logging
import six
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.partners import (
apply_partner_priority,
check_if_partners_enabled,
get_partner_config_by_kind,
generate_attribution_code,
)
log = logging.getLogger(__name__)
transforms = TransformSequence()
transforms.add(check_if_partners_enabled)
transforms.add(apply_partner_priority)
@transforms.add
def add_command_arguments(config, tasks):
enabled_partners = config.params.get("release_partners")
dependencies = {}
fetches = defaultdict(set)
attributions = []
release_artifacts = []
attribution_config = get_partner_config_by_kind(config, config.kind)
for partner_config in attribution_config.get("configs", []):
# we might only be interested in a subset of all partners, eg for a respin
if enabled_partners and partner_config["campaign"] not in enabled_partners:
continue
attribution_code = generate_attribution_code(
attribution_config["defaults"], partner_config
)
for platform in partner_config["platforms"]:
stage_platform = platform.replace("-shippable", "")
for locale in partner_config["locales"]:
# find the upstream, throw away locales we don't have, somehow. Skip ?
if locale == "en-US":
upstream_label = "repackage-signing-{platform}/opt".format(
platform=platform
)
upstream_artifact = "target.installer.exe"
else:
upstream_label = "repackage-signing-l10n-{locale}-{platform}/opt".format(
locale=locale, platform=platform
)
upstream_artifact = "{locale}/target.installer.exe".format(
locale=locale
)
if upstream_label not in config.kind_dependencies_tasks:
raise Exception(
"Can't find upstream task for {} {}".format(
platform, locale
)
)
upstream = config.kind_dependencies_tasks[upstream_label]
# set the dependencies to just what we need rather than all of l10n
dependencies.update({upstream.label: upstream.label})
fetches[upstream_label].add(
(upstream_artifact, stage_platform, locale)
)
artifact_part = "{platform}/{locale}/target.installer.exe".format(
platform=stage_platform, locale=locale
)
artifact = "releng/partner/{partner}/{sub_partner}/{artifact_part}".format(
partner=partner_config["campaign"],
sub_partner=partner_config["content"],
artifact_part=artifact_part,
)
# config for script
# TODO - generalise input & output ??
# add releng/partner prefix via get_artifact_prefix..()
attributions.append(
{
"input": "/builds/worker/fetches/{}".format(artifact_part),
"output": "/builds/worker/artifacts/{}".format(artifact),
"attribution": attribution_code,
}
)
release_artifacts.append(artifact)
# bail-out early if we don't have any attributions to do
if not attributions:
return
for task in tasks:
worker = task.get("worker", {})
worker["chain-of-trust"] = True
task.setdefault("dependencies", {}).update(dependencies)
task.setdefault("fetches", {})
for upstream_label, upstream_artifacts in fetches.items():
task["fetches"][upstream_label] = [
{
"artifact": upstream_artifact,
"dest": "{platform}/{locale}".format(
platform=platform, locale=locale
),
"extract": False,
"verify-hash": True,
}
for upstream_artifact, platform, locale in upstream_artifacts
]
worker.setdefault("env", {})["ATTRIBUTION_CONFIG"] = six.ensure_text(
json.dumps(attributions, sort_keys=True)
)
worker["artifacts"] = [
{
"name": "releng/partner",
"path": "/builds/worker/artifacts/releng/partner",
"type": "directory",
}
]
task["release-artifacts"] = release_artifacts
task["label"] = config.kind
yield task

Просмотреть файл

@ -1,205 +0,0 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
Transform the beetmover task into an actual task description.
"""
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.beetmover import craft_release_properties
from taskgraph.util.attributes import copy_attributes_from_dependent_job
from taskgraph.util.partners import (
get_partner_config_by_kind,
apply_partner_priority,
)
from taskgraph.util.schema import (
optionally_keyed_by,
resolve_keyed_by,
)
from taskgraph.util.scriptworker import (
add_scope_prefix,
get_beetmover_bucket_scope,
)
from taskgraph.util.taskcluster import get_artifact_prefix
from taskgraph.transforms.task import task_description_schema
from voluptuous import Any, Required, Optional
from collections import defaultdict
from copy import deepcopy
beetmover_description_schema = schema.extend(
{
# depname is used in taskref's to identify the taskID of the unsigned things
Required("depname", default="build"): text_type,
# unique label to describe this beetmover task, defaults to {dep.label}-beetmover
Optional("label"): text_type,
Required("partner-bucket-scope"): optionally_keyed_by(
"release-level", text_type
),
Required("partner-public-path"): Any(None, text_type),
Required("partner-private-path"): Any(None, text_type),
Optional("extra"): object,
Required("shipping-phase"): task_description_schema["shipping-phase"],
Optional("shipping-product"): task_description_schema["shipping-product"],
Optional("priority"): task_description_schema["priority"],
}
)
transforms = TransformSequence()
transforms.add_validate(beetmover_description_schema)
transforms.add(apply_partner_priority)
@transforms.add
def resolve_keys(config, jobs):
for job in jobs:
resolve_keyed_by(
job,
"partner-bucket-scope",
item_name=job["label"],
**{"release-level": config.params.release_level()}
)
yield job
@transforms.add
def split_public_and_private(config, jobs):
# we need to separate private vs public destinations because beetmover supports one
# in a single task. Only use a single task for each type though.
partner_config = get_partner_config_by_kind(config, config.kind)
for job in jobs:
upstream_artifacts = job["primary-dependency"].release_artifacts
attribution_task_ref = "<{}>".format(job["primary-dependency"].label)
prefix = get_artifact_prefix(job["primary-dependency"])
artifacts = defaultdict(list)
for artifact in upstream_artifacts:
partner, sub_partner, platform, locale, _ = artifact.replace(
prefix + "/", ""
).split("/", 4)
destination = "private"
this_config = [p for p in partner_config["configs"] if (
p["campaign"] == partner and p["content"] == sub_partner)
]
if this_config[0].get("upload_to_candidates"):
destination = "public"
artifacts[destination].append(
(artifact, partner, sub_partner, platform, locale)
)
action_scope = add_scope_prefix(config, "beetmover:action:push-to-partner")
public_bucket_scope = get_beetmover_bucket_scope(config)
partner_bucket_scope = add_scope_prefix(config, job["partner-bucket-scope"])
repl_dict = {
"build_number": config.params["build_number"],
"release_partner_build_number": config.params[
"release_partner_build_number"
],
"version": config.params["version"],
"partner": "{partner}", # we'll replace these later, per artifact
"subpartner": "{subpartner}",
"platform": "{platform}",
"locale": "{locale}",
}
for destination, destination_artifacts in artifacts.items():
this_job = deepcopy(job)
if destination == "public":
this_job["scopes"] = [public_bucket_scope, action_scope]
this_job["partner_public"] = True
else:
this_job["scopes"] = [partner_bucket_scope, action_scope]
this_job["partner_public"] = False
partner_path_key = "partner-{destination}-path".format(
destination=destination
)
partner_path = this_job[partner_path_key].format(**repl_dict)
this_job.setdefault("worker", {})[
"upstream-artifacts"
] = generate_upstream_artifacts(
attribution_task_ref, destination_artifacts, partner_path
)
yield this_job
@transforms.add
def make_task_description(config, jobs):
for job in jobs:
dep_job = job["primary-dependency"]
attributes = dep_job.attributes
build_platform = attributes.get("build_platform")
if not build_platform:
raise Exception("Cannot find build platform!")
label = config.kind
description = "Beetmover for partner attribution"
if job["partner_public"]:
label = "{}-public".format(label)
description = "{} public".format(description)
else:
label = "{}-private".format(label)
description = "{} private".format(description)
attributes = copy_attributes_from_dependent_job(dep_job)
task = {
"label": label,
"description": description,
"dependencies": {dep_job.kind: dep_job.label},
"attributes": attributes,
"run-on-projects": dep_job.attributes.get("run_on_projects"),
"shipping-phase": job["shipping-phase"],
"shipping-product": job.get("shipping-product"),
"partner_public": job["partner_public"],
"worker": job["worker"],
"scopes": job["scopes"],
}
# we may have reduced the priority for partner jobs, otherwise task.py will set it
if job.get("priority"):
task["priority"] = job["priority"]
yield task
def generate_upstream_artifacts(attribution_task, artifacts, partner_path):
upstream_artifacts = []
for artifact, partner, subpartner, platform, locale in artifacts:
upstream_artifacts.append(
{
"taskId": {"task-reference": attribution_task},
"taskType": "repackage",
"paths": [artifact],
"locale": partner_path.format(
partner=partner,
subpartner=subpartner,
platform=platform,
locale=locale,
),
}
)
if not upstream_artifacts:
raise Exception("Couldn't find any upstream artifacts.")
return upstream_artifacts
@transforms.add
def make_task_worker(config, jobs):
for job in jobs:
job["worker-type"] = "beetmover"
worker = {
"implementation": "beetmover",
"release-properties": craft_release_properties(config, job),
"partner-public": job["partner_public"],
}
job["worker"].update(worker)
del job["partner_public"]
yield job

Просмотреть файл

@ -12,29 +12,15 @@ from taskgraph.util.schema import resolve_keyed_by
from taskgraph.util.scriptworker import get_release_config
from taskgraph.util.partners import (
check_if_partners_enabled,
get_partner_config_by_kind,
get_partner_url_config,
get_repack_ids_by_platform,
apply_partner_priority,
)
transforms = TransformSequence()
transforms.add(check_if_partners_enabled)
transforms.add(apply_partner_priority)
@transforms.add
def skip_unnecessary_platforms(config, tasks):
for task in tasks:
if config.kind == "release-partner-repack":
platform = task['attributes']['build_platform']
repack_ids = get_repack_ids_by_platform(config, platform)
if not repack_ids:
continue
yield task
@transforms.add
def populate_repack_manifests_url(config, tasks):
for task in tasks:
@ -75,14 +61,11 @@ def make_label(config, tasks):
@transforms.add
def add_command_arguments(config, tasks):
release_config = get_release_config(config)
# staging releases - pass reduced set of locales to the repacking script
all_locales = set()
partner_config = get_partner_config_by_kind(config, config.kind)
for partner in partner_config.values():
for sub_partner in partner.values():
all_locales.update(sub_partner.get('locales', []))
for partner_class in config.params['release_partner_config'].values():
for partner in partner_class.values():
for sub_partner in partner.values():
all_locales.update(sub_partner.get('locales', []))
for task in tasks:
# add the MOZHARNESS_OPTIONS, eg version=61.0, build-number=1, platform=win64
if not task['attributes']['build_platform'].endswith('-shippable'):
@ -115,3 +98,8 @@ def add_command_arguments(config, tasks):
task['worker']['env']['RELEASE_TYPE'] = config.params['release_type']
yield task
# This needs to be run at the *end*, because the generators are called in
# reverse order, when each downstream transform references `tasks`.
transforms.add(check_if_partners_enabled)

Просмотреть файл

@ -9,11 +9,13 @@ from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.attributes import copy_attributes_from_dependent_job
from taskgraph.util.partners import get_partner_config_by_kind
from taskgraph.util.partners import (get_partner_config_by_kind, check_if_partners_enabled)
from taskgraph.util.signed_artifacts import generate_specifications_of_artifacts_to_sign
transforms = TransformSequence()
transforms.add(check_if_partners_enabled)
@transforms.add
def set_mac_label(config, jobs):

Просмотреть файл

@ -18,7 +18,7 @@ from taskgraph.util.schema import (
resolve_keyed_by,
)
from taskgraph.util.taskcluster import get_artifact_prefix
from taskgraph.util.partners import get_partner_config_by_kind
from taskgraph.util.partners import check_if_partners_enabled, get_partner_config_by_kind
from taskgraph.util.platforms import archive_format, executable_extension
from taskgraph.util.workertypes import worker_type_implementation
from taskgraph.transforms.task import task_description_schema
@ -71,6 +71,7 @@ packaging_description_schema = schema.extend({
})
transforms = TransformSequence()
transforms.add(check_if_partners_enabled)
transforms.add_validate(packaging_description_schema)

Просмотреть файл

@ -11,7 +11,7 @@ from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.attributes import copy_attributes_from_dependent_job
from taskgraph.util.partners import get_partner_config_by_kind
from taskgraph.util.partners import check_if_partners_enabled, get_partner_config_by_kind
from taskgraph.util.scriptworker import (
get_signing_cert_scope_per_platform,
)
@ -29,6 +29,7 @@ repackage_signing_description_schema = schema.extend({
Optional('priority'): task_description_schema['priority'],
})
transforms.add(check_if_partners_enabled)
transforms.add_validate(repackage_signing_description_schema)

Просмотреть файл

@ -15,7 +15,6 @@ import xml.etree.ElementTree as ET
from taskgraph.util.attributes import release_level
from taskgraph.util.schema import resolve_keyed_by
import six
import yaml
# Suppress chatty requests logging
logging.getLogger("requests").setLevel(logging.WARNING)
@ -40,7 +39,7 @@ LOGIN_QUERY = """query {
# Returns the contents of default.xml from a manifest repository
MANIFEST_QUERY = """query {
repository(owner:"%(owner)s", name:"%(repo)s") {
object(expression: "master:%(file)s") {
object(expression: "master:default.xml") {
... on Blob {
text
}
@ -196,12 +195,12 @@ def get_repo_params(repo):
def get_partners(manifestRepo, token):
""" Given the url to a manifest repository, retrieve the default.xml and parse it into a
list of partner repos.
""" Given the url to a manifest repository, retieve the default.xml and parse it into a
list of parter repos.
"""
log.debug("Querying for manifest default.xml in %s", manifestRepo)
log.debug("Querying for manifest in %s", manifestRepo)
owner, repo = get_repo_params(manifestRepo)
query = MANIFEST_QUERY % {'owner': owner, 'repo': repo, 'file': 'default.xml'}
query = MANIFEST_QUERY % {'owner': owner, 'repo': repo}
raw_manifest = query_api(query, token)
log.debug("Raw manifest: %s", raw_manifest)
if not raw_manifest['data']['repository']:
@ -278,24 +277,6 @@ def get_repack_configs(repackRepo, token):
return configs
def get_attribution_config(manifestRepo, token):
log.debug("Querying for manifest attribution_config.yml in %s", manifestRepo)
owner, repo = get_repo_params(manifestRepo)
query = MANIFEST_QUERY % {'owner': owner, 'repo': repo, 'file': 'attribution_config.yml'}
raw_manifest = query_api(query, token)
if not raw_manifest['data']['repository']:
raise RuntimeError(
"Couldn't load partner manifest at %s, insufficient permissions ?" %
manifestRepo
)
# no file has been set up, gracefully continue
if raw_manifest['data']['repository']['object'] is None:
log.debug('No attribution_config.yml file found')
return {}
return yaml.safe_load(raw_manifest['data']['repository']['object']['text'])
def get_partner_config_by_url(manifest_url, kind, token, partner_subset=None):
""" Retrieve partner data starting from the manifest url, which points to a repository
containing a default.xml that is intended to be drive the Google tool 'repo'. It
@ -311,30 +292,23 @@ def get_partner_config_by_url(manifest_url, kind, token, partner_subset=None):
if kind not in partner_configs:
log.info('Looking up data for %s from %s', kind, manifest_url)
check_login(token)
if kind == 'release-partner-attribution':
partner_configs[kind] = get_attribution_config(manifest_url, token)
else:
partners = get_partners(manifest_url, token)
partner_configs[kind] = {}
for partner, partner_url in partners.items():
if partner_subset and partner not in partner_subset:
continue
partner_configs[kind][partner] = get_repack_configs(partner_url, token)
partners = get_partners(manifest_url, token)
partner_configs[kind] = {}
for partner, partner_url in partners.items():
if partner_subset and partner not in partner_subset:
continue
partner_configs[kind][partner] = get_repack_configs(partner_url, token)
return partner_configs[kind]
def check_if_partners_enabled(config, tasks):
if (
config.params['release_enable_partner_repack'] and
config.params['release_enable_partners'] and
config.kind.startswith('release-partner-repack')
) or (
config.params['release_enable_partner_attribution'] and
config.kind.startswith('release-partner-attribution')
) or (
config.params['release_enable_emefree'] and
config.kind.startswith('release-eme-free-')
config.kind.startswith('release-eme-free-repack')
):
for task in tasks:
yield task
@ -360,16 +334,11 @@ def get_partner_config_by_kind(config, kind):
return {}
# if we're only interested in a subset of partners we remove the rest
if partner_subset:
if kind.startswith('release-partner-repack'):
# TODO - should be fatal to have an unknown partner in partner_subset
for partner in [p for p in kind_config.keys() if p not in partner_subset]:
# TODO - should be fatal to have an unknown partner in partner_subset
for partner in kind_config.keys():
if partner not in partner_subset:
del(kind_config[partner])
elif kind.startswith('release-partner-attribution'):
all_configs = deepcopy(kind_config["configs"])
kind_config["configs"] = []
for this_config in all_configs:
if this_config["campaign"] in partner_subset:
kind_config["configs"].append(this_config)
return kind_config
@ -390,23 +359,14 @@ def fix_partner_config(orig_config):
if 'en-US' not in all_locales:
all_locales.append('en-US')
for kind, kind_config in six.iteritems(orig_config):
if kind == 'release-partner-attribution':
pc[kind] = {}
if kind_config:
pc[kind] = {"defaults": kind_config["defaults"]}
for config in kind_config["configs"]:
# Make sure our locale list is a subset of all_locales
pc[kind].setdefault("configs", []).append(
_fix_subpartner_locales(config, all_locales))
else:
for partner, partner_config in six.iteritems(kind_config):
for subpartner, subpartner_config in six.iteritems(partner_config):
# get rid of empty subpartner configs
if not subpartner_config:
continue
# Make sure our locale list is a subset of all_locales
pc.setdefault(kind, {}).setdefault(partner, {})[subpartner] = \
_fix_subpartner_locales(subpartner_config, all_locales)
for partner, partner_config in six.iteritems(kind_config):
for subpartner, subpartner_config in six.iteritems(partner_config):
# get rid of empty subpartner configs
if not subpartner_config:
continue
# Make sure our locale list is a subset of all_locales
pc.setdefault(kind, {}).setdefault(partner, {})[subpartner] = \
_fix_subpartner_locales(subpartner_config, all_locales)
return pc
@ -448,24 +408,9 @@ def get_partner_url_config(parameters, graph_config):
**substitutions)
resolve_keyed_by(partner_url_config, 'release-partner-repack', 'partner manifest url',
**substitutions)
resolve_keyed_by(partner_url_config, 'release-partner-attribution', 'partner attribution url',
**substitutions)
return partner_url_config
def get_repack_ids_by_platform(config, build_platform):
partner_config = get_partner_config_by_kind(config, config.kind)
combinations = []
for partner, subconfigs in partner_config.items():
for sub_config_name, sub_config in subconfigs.items():
if build_platform not in sub_config.get("platforms", []):
continue
locales = locales_per_build_platform(build_platform, sub_config.get('locales', []))
for locale in locales:
combinations.append("{}/{}/{}".format(partner, sub_config_name, locale))
return sorted(combinations)
def get_partners_to_be_published(config):
# hardcoded kind because release-bouncer-aliases doesn't match otherwise
partner_config = get_partner_config_by_kind(config, 'release-partner-repack')
@ -485,26 +430,10 @@ def apply_partner_priority(config, jobs):
# medium is the same as mozilla-central, see taskcluster/ci/config.yml. ie higher than
# integration branches because we don't want to wait a lot for the graph to be done, but
# for multiple releases the partner tasks always wait for non-partner.
if (config.kind.startswith(('release-partner-repack', 'release-partner-attribution')) and
if (config.kind.startswith('release-partner-repack') and
config.params.release_level() == "production"):
priority = 'medium'
for job in jobs:
if priority:
job['priority'] = priority
yield job
def generate_attribution_code(defaults, partner):
params = {
"medium": defaults["medium"],
"source": defaults["source"],
"campaign": partner["campaign"],
"content": partner["content"],
}
if partner.get("variation"):
params["variation"] = partner["variation"]
if partner.get("experiment"):
params["experiment"] = partner["experiment"]
code = six.moves.urllib.parse.urlencode(params)
return code

Просмотреть файл

@ -106,6 +106,7 @@ class DesktopPartnerRepacks(AutomationMixin, BaseScript, VirtualenvMixin, Secret
dirs = {}
dirs['abs_repo_dir'] = os.path.join(abs_dirs['abs_work_dir'], '.repo')
dirs['abs_partners_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'partners')
dirs['abs_scripts_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'scripts')
for key in dirs.keys():
if key not in abs_dirs:
abs_dirs[key] = dirs[key]
@ -116,6 +117,7 @@ class DesktopPartnerRepacks(AutomationMixin, BaseScript, VirtualenvMixin, Secret
def _repo_cleanup(self):
self.rmtree(self.query_abs_dirs()['abs_repo_dir'])
self.rmtree(self.query_abs_dirs()['abs_partners_dir'])
self.rmtree(self.query_abs_dirs()['abs_scripts_dir'])
def _repo_init(self, repo):
partial_env = {
@ -149,8 +151,7 @@ class DesktopPartnerRepacks(AutomationMixin, BaseScript, VirtualenvMixin, Secret
def repack(self):
"""creates the repacks"""
repack_cmd = ["./mach", "python",
"python/mozrelease/mozrelease/partner_repack.py",
repack_cmd = [sys.executable, "tc-partner-repacks.py",
"-v", self.config['version'],
"-n", str(self.config['build_number'])]
if self.config.get('platform'):
@ -165,7 +166,7 @@ class DesktopPartnerRepacks(AutomationMixin, BaseScript, VirtualenvMixin, Secret
repack_cmd.extend(["--limit-locale", locale])
self.run_command(repack_cmd,
cwd=os.environ["GECKO_PATH"],
cwd=self.query_abs_dirs()['abs_scripts_dir'],
halt_on_failure=True)

Просмотреть файл

@ -37,7 +37,6 @@ py2:
- python/mozbuild/mozbuild/html_build_viewer.py
- python/mozlint
- python/mozperftest
- python/mozrelease/mozrelease/partner_repack.py
- tools/crashreporter/system-symbols/win/symsrv-fetch.py
- tools/github-sync
- tools/lint