Bug 1133074 - Use routes.json for mozharness TC uploads; r=jlund

This commit is contained in:
Mike Shal 2015-07-27 11:03:54 -04:00
Родитель fce87bada5
Коммит 56bc9f06cd
3 изменённых файлов: 109 добавлений и 32 удалений

Просмотреть файл

@ -181,3 +181,37 @@ class BuildbotMixin(object):
retcode = self.run_command(buildbot + sendchange)
if retcode != 0:
self.info("The sendchange failed but we don't want to turn the build orange: %s" % retcode)
def query_build_name(self):
build_name = self.config.get('platform')
if not build_name:
self.fatal('Must specify "platform" in the mozharness config for indexing')
return build_name
def query_build_type(self):
if self.config.get('build_type'):
build_type = self.config['build_type']
elif self.config.get('pgo_build'):
build_type = 'pgo'
elif self.config.get('debug_build', False):
build_type = 'debug'
else:
build_type = 'opt'
return build_type
def buildid_to_dict(self, buildid):
"""Returns an dict with the year, month, day, hour, minute, and second
as keys, as parsed from the buildid"""
buildidDict = {}
try:
# strptime is no good here because it strips leading zeros
buildidDict['year'] = buildid[0:4]
buildidDict['month'] = buildid[4:6]
buildidDict['day'] = buildid[6:8]
buildidDict['hour'] = buildid[8:10]
buildidDict['minute'] = buildid[10:12]
buildidDict['second'] = buildid[12:14]
except:
self.fatal('Could not parse buildid into YYYYMMDDHHMMSS: %s' % buildid)
return buildidDict

Просмотреть файл

@ -1370,6 +1370,7 @@ or run without that action (ie: --no-{action})"
self.generated_build_props = True
def upload_files(self):
dirs = self.query_abs_dirs()
auth = os.path.join(os.getcwd(), self.config['taskcluster_credentials_file'])
credentials = {}
execfile(auth, credentials)
@ -1391,6 +1392,34 @@ or run without that action (ie: --no-{action})"
# messages while we are testing uploads.
logging.getLogger('taskcluster').setLevel(logging.DEBUG)
routes_json = os.path.join(dirs['abs_src_dir'],
'testing/taskcluster/routes.json')
with open(routes_json) as f:
contents = json.load(f)
if self.query_is_nightly():
templates = contents['nightly']
# Nightly builds with l10n counterparts also publish to the
# 'en-US' locale.
if self.config.get('publish_nightly_en_US_routes'):
templates.extend(contents['l10n'])
else:
templates = contents['routes']
routes = []
for template in templates:
fmt = {
'index': 'index.garbage.staging.mshal-testing', # TODO
'project': self.buildbot_config['properties']['branch'],
'head_rev': self.query_revision(),
'build_product': self.config['stage_product'],
'build_name': self.query_build_name(),
'build_type': self.query_build_type(),
'locale': 'en-US',
}
fmt.update(self.buildid_to_dict(self.query_buildid()))
routes.append(template.format(**fmt))
self.info("Using routes: %s" % routes)
tc = Taskcluster(self.branch,
self.query_pushdate(), # Use pushdate as the rank
client_id,
@ -1400,10 +1429,10 @@ or run without that action (ie: --no-{action})"
index = self.config.get('taskcluster_index', 'index.garbage.staging')
# TODO: Bug 1165980 - these should be in tree
routes = [
routes.extend([
"%s.buildbot.branches.%s.%s" % (index, self.branch, self.stage_platform),
"%s.buildbot.revisions.%s.%s.%s" % (index, self.query_revision(), self.branch, self.stage_platform),
]
])
task = tc.create_task(routes)
tc.claim_task(task)
@ -1473,7 +1502,6 @@ or run without that action (ie: --no-{action})"
files.extend([os.path.join(self.log_obj.abs_log_dir, x) for x in self.log_obj.log_files.values()])
# Also upload our buildprops.json file.
dirs = self.query_abs_dirs()
files.extend([os.path.join(dirs['base_work_dir'], 'buildprops.json')])
for upload_file in files:

Просмотреть файл

@ -206,10 +206,8 @@ class DesktopSingleLocale(LocalesMixin, ReleaseMixin, MockMixin, BuildbotMixin,
self.l10n_dir = None
self.package_urls = {}
self.pushdate = None
# Each locale adds its list of files to upload_files - some will be
# duplicates (like the mar binaries), so we use a set to prune those
# when uploading to taskcluster.
self.upload_files = set()
# upload_files is a dictionary of files to upload, keyed by locale.
self.upload_files = {}
if 'mock_target' in self.config:
self.enable_mock()
@ -749,9 +747,9 @@ class DesktopSingleLocale(LocalesMixin, ReleaseMixin, MockMixin, BuildbotMixin,
self.error('failed to get upload file list for locale %s' % (locale))
return FAILURE
for f in files:
abs_file = os.path.abspath(os.path.join(cwd, f))
self.upload_files.update([abs_file])
self.upload_files[locale] = [
os.path.abspath(os.path.join(cwd, f)) for f in files
]
return SUCCESS
def make_installers(self, locale):
@ -1018,31 +1016,48 @@ class DesktopSingleLocale(LocalesMixin, ReleaseMixin, MockMixin, BuildbotMixin,
branch = self.config['branch']
platform = self.config['platform']
revision = self._query_revision()
tc = Taskcluster(self.config['branch'],
self.query_pushdate(),
client_id,
access_token,
self.log_obj,
)
index = self.config.get('taskcluster_index', 'index.garbage.staging')
# TODO: Bug 1165980 - these should be in tree. Note the '.l10n' suffix.
routes = [
"%s.buildbot.branches.%s.%s.l10n" % (index, branch, platform),
"%s.buildbot.revisions.%s.%s.%s.l10n" % (index, revision, branch, platform),
]
routes_json = os.path.join(self.query_abs_dirs()['abs_mozilla_dir'],
'testing/taskcluster/routes.json')
with open(routes_json) as f:
contents = json.load(f)
templates = contents['l10n']
task = tc.create_task(routes)
tc.claim_task(task)
for locale, files in self.upload_files.iteritems():
self.info("Uploading files to S3 for locale '%s': %s" % (locale, files))
routes = []
for template in templates:
fmt = {
# TODO: Bug 1133074
#index = self.config.get('taskcluster_index', 'index.garbage.staging')
'index': 'index.garbage.staging.mshal-testing',
'project': branch,
'head_rev': revision,
'build_product': self.config['stage_product'],
'build_name': self.query_build_name(),
'build_type': self.query_build_type(),
'locale': locale,
}
fmt.update(self.buildid_to_dict(self._query_buildid()))
routes.append(template.format(**fmt))
self.info('Using routes: %s' % routes)
self.info("Uploading files to S3: %s" % self.upload_files)
for upload_file in self.upload_files:
# Create an S3 artifact for each file that gets uploaded. We also
# check the uploaded file against the property conditions so that we
# can set the buildbot config with the correct URLs for package
# locations.
tc.create_artifact(task, upload_file)
tc.report_completed(task)
tc = Taskcluster(branch,
self.query_pushdate(),
client_id,
access_token,
self.log_obj,
)
task = tc.create_task(routes)
tc.claim_task(task)
for upload_file in files:
# Create an S3 artifact for each file that gets uploaded. We also
# check the uploaded file against the property conditions so that we
# can set the buildbot config with the correct URLs for package
# locations.
tc.create_artifact(task, upload_file)
tc.report_completed(task)
def query_pushdate(self):
if self.pushdate: