2016-05-17 01:53:22 +03:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
|
|
|
|
2016-08-09 00:03:38 +03:00
|
|
|
|
2016-05-17 01:53:22 +03:00
|
|
|
from __future__ import absolute_import, print_function, unicode_literals
|
|
|
|
|
2016-06-07 06:09:48 +03:00
|
|
|
import json
|
2016-05-18 21:02:51 +03:00
|
|
|
import logging
|
2017-07-20 20:11:28 +03:00
|
|
|
import os
|
2016-05-17 01:53:22 +03:00
|
|
|
import sys
|
2016-05-16 20:44:24 +03:00
|
|
|
import traceback
|
2016-11-18 02:08:28 +03:00
|
|
|
import re
|
2016-05-17 01:53:22 +03:00
|
|
|
|
|
|
|
from mach.decorators import (
|
|
|
|
CommandArgument,
|
|
|
|
CommandProvider,
|
|
|
|
Command,
|
|
|
|
SubCommand,
|
|
|
|
)
|
|
|
|
|
|
|
|
from mozbuild.base import MachCommandBase
|
|
|
|
|
|
|
|
|
|
|
|
class ShowTaskGraphSubCommand(SubCommand):
|
|
|
|
"""A SubCommand with TaskGraph-specific arguments"""
|
|
|
|
|
|
|
|
def __call__(self, func):
|
|
|
|
after = SubCommand.__call__(self, func)
|
|
|
|
args = [
|
2017-10-25 01:28:19 +03:00
|
|
|
CommandArgument('--root', '-r',
|
2016-05-17 01:53:22 +03:00
|
|
|
help="root of the taskgraph definition relative to topsrcdir"),
|
2016-05-18 21:02:51 +03:00
|
|
|
CommandArgument('--quiet', '-q', action="store_true",
|
|
|
|
help="suppress all logging output"),
|
|
|
|
CommandArgument('--verbose', '-v', action="store_true",
|
|
|
|
help="include debug-level logging output"),
|
2016-06-07 06:09:48 +03:00
|
|
|
CommandArgument('--json', '-J', action="store_const",
|
|
|
|
dest="format", const="json",
|
2016-06-09 19:15:23 +03:00
|
|
|
help="Output task graph as a JSON object"),
|
2016-06-07 06:09:48 +03:00
|
|
|
CommandArgument('--labels', '-L', action="store_const",
|
|
|
|
dest="format", const="labels",
|
|
|
|
help="Output the label for each task in the task graph (default)"),
|
2017-07-04 23:10:05 +03:00
|
|
|
CommandArgument('--parameters', '-p', default="project=mozilla-central",
|
2016-05-17 01:53:22 +03:00
|
|
|
help="parameters file (.yml or .json; see "
|
|
|
|
"`taskcluster/docs/parameters.rst`)`"),
|
|
|
|
CommandArgument('--no-optimize', dest="optimize", action="store_false",
|
|
|
|
default="true",
|
|
|
|
help="do not remove tasks from the graph that are found in the "
|
|
|
|
"index (a.k.a. optimize the graph)"),
|
2016-11-18 02:46:25 +03:00
|
|
|
CommandArgument('--tasks-regex', '--tasks', default=None,
|
|
|
|
help="only return tasks with labels matching this regular "
|
2017-11-30 20:07:02 +03:00
|
|
|
"expression."),
|
|
|
|
CommandArgument('-F', '--fast', dest='fast', default=False, action='store_true',
|
|
|
|
help="enable fast task generation for local debugging.")
|
2016-11-18 02:08:28 +03:00
|
|
|
|
2016-05-17 01:53:22 +03:00
|
|
|
]
|
|
|
|
for arg in args:
|
|
|
|
after = arg(after)
|
|
|
|
return after
|
|
|
|
|
|
|
|
|
|
|
|
@CommandProvider
|
|
|
|
class MachCommands(MachCommandBase):
|
|
|
|
|
|
|
|
@Command('taskgraph', category="ci",
|
|
|
|
description="Manipulate TaskCluster task graphs defined in-tree")
|
|
|
|
def taskgraph(self):
|
|
|
|
"""The taskgraph subcommands all relate to the generation of task graphs
|
|
|
|
for Gecko continuous integration. A task graph is a set of tasks linked
|
|
|
|
by dependencies: for example, a binary must be built before it is tested,
|
|
|
|
and that build may further depend on various toolchains, libraries, etc.
|
|
|
|
"""
|
|
|
|
|
|
|
|
@ShowTaskGraphSubCommand('taskgraph', 'tasks',
|
2016-06-21 04:06:55 +03:00
|
|
|
description="Show all tasks in the taskgraph")
|
2016-05-17 01:53:22 +03:00
|
|
|
def taskgraph_tasks(self, **options):
|
|
|
|
return self.show_taskgraph('full_task_set', options)
|
|
|
|
|
|
|
|
@ShowTaskGraphSubCommand('taskgraph', 'full',
|
2016-06-21 04:06:55 +03:00
|
|
|
description="Show the full taskgraph")
|
2016-05-17 01:53:22 +03:00
|
|
|
def taskgraph_full(self, **options):
|
|
|
|
return self.show_taskgraph('full_task_graph', options)
|
|
|
|
|
|
|
|
@ShowTaskGraphSubCommand('taskgraph', 'target',
|
2016-06-21 04:06:55 +03:00
|
|
|
description="Show the target task set")
|
2016-05-17 01:53:22 +03:00
|
|
|
def taskgraph_target(self, **options):
|
|
|
|
return self.show_taskgraph('target_task_set', options)
|
|
|
|
|
|
|
|
@ShowTaskGraphSubCommand('taskgraph', 'target-graph',
|
2016-06-21 04:06:55 +03:00
|
|
|
description="Show the target taskgraph")
|
2016-05-17 01:53:22 +03:00
|
|
|
def taskgraph_target_taskgraph(self, **options):
|
|
|
|
return self.show_taskgraph('target_task_graph', options)
|
|
|
|
|
|
|
|
@ShowTaskGraphSubCommand('taskgraph', 'optimized',
|
2016-06-21 04:06:55 +03:00
|
|
|
description="Show the optimized taskgraph")
|
2016-05-17 01:53:22 +03:00
|
|
|
def taskgraph_optimized(self, **options):
|
|
|
|
return self.show_taskgraph('optimized_task_graph', options)
|
|
|
|
|
2017-03-07 23:39:27 +03:00
|
|
|
@ShowTaskGraphSubCommand('taskgraph', 'morphed',
|
|
|
|
description="Show the morphed taskgraph")
|
|
|
|
def taskgraph_morphed(self, **options):
|
|
|
|
return self.show_taskgraph('morphed_task_graph', options)
|
|
|
|
|
2018-04-24 00:14:14 +03:00
|
|
|
@SubCommand('taskgraph', 'actions',
|
|
|
|
description="Write actions.json to stdout")
|
|
|
|
@CommandArgument('--root', '-r',
|
|
|
|
help="root of the taskgraph definition relative to topsrcdir")
|
|
|
|
@CommandArgument('--quiet', '-q', action="store_true",
|
|
|
|
help="suppress all logging output")
|
|
|
|
@CommandArgument('--verbose', '-v', action="store_true",
|
|
|
|
help="include debug-level logging output")
|
|
|
|
@CommandArgument('--parameters', '-p', default="project=mozilla-central",
|
|
|
|
help="parameters file (.yml or .json; see "
|
|
|
|
"`taskcluster/docs/parameters.rst`)`")
|
|
|
|
def taskgraph_actions(self, **options):
|
|
|
|
return self.show_actions(options)
|
|
|
|
|
2016-05-17 01:53:22 +03:00
|
|
|
@SubCommand('taskgraph', 'decision',
|
|
|
|
description="Run the decision task")
|
|
|
|
@CommandArgument('--root', '-r',
|
2016-06-21 04:06:55 +03:00
|
|
|
help="root of the taskgraph definition relative to topsrcdir")
|
2016-05-17 01:53:22 +03:00
|
|
|
@CommandArgument('--base-repository',
|
2016-06-21 04:06:55 +03:00
|
|
|
required=True,
|
|
|
|
help='URL for "base" repository to clone')
|
2016-05-17 01:53:22 +03:00
|
|
|
@CommandArgument('--head-repository',
|
2016-06-21 04:06:55 +03:00
|
|
|
required=True,
|
|
|
|
help='URL for "head" repository to fetch revision from')
|
2016-05-17 01:53:22 +03:00
|
|
|
@CommandArgument('--head-ref',
|
2016-06-21 04:06:55 +03:00
|
|
|
required=True,
|
|
|
|
help='Reference (this is same as rev usually for hg)')
|
2016-05-17 01:53:22 +03:00
|
|
|
@CommandArgument('--head-rev',
|
2016-06-21 04:06:55 +03:00
|
|
|
required=True,
|
|
|
|
help='Commit revision to use from head repository')
|
2017-07-27 21:26:48 +03:00
|
|
|
@CommandArgument('--comm-base-repository',
|
|
|
|
required=False,
|
|
|
|
help='URL for "base" comm-* repository to clone')
|
|
|
|
@CommandArgument('--comm-head-repository',
|
|
|
|
required=False,
|
|
|
|
help='URL for "head" comm-* repository to fetch revision from')
|
|
|
|
@CommandArgument('--comm-head-ref',
|
|
|
|
required=False,
|
|
|
|
help='comm-* Reference (this is same as rev usually for hg)')
|
|
|
|
@CommandArgument('--comm-head-rev',
|
|
|
|
required=False,
|
|
|
|
help='Commit revision to use from head comm-* repository')
|
2016-05-17 01:53:22 +03:00
|
|
|
@CommandArgument('--message',
|
2016-06-21 04:06:55 +03:00
|
|
|
required=True,
|
|
|
|
help='Commit message to be parsed. Example: "try: -b do -p all -u all"')
|
2016-05-17 01:53:22 +03:00
|
|
|
@CommandArgument('--project',
|
2016-06-21 04:06:55 +03:00
|
|
|
required=True,
|
|
|
|
help='Project to use for creating task graph. Example: --project=try')
|
2016-05-17 01:53:22 +03:00
|
|
|
@CommandArgument('--pushlog-id',
|
2016-06-21 04:06:55 +03:00
|
|
|
dest='pushlog_id',
|
|
|
|
required=True,
|
|
|
|
default=0)
|
2016-07-13 21:50:50 +03:00
|
|
|
@CommandArgument('--pushdate',
|
|
|
|
dest='pushdate',
|
|
|
|
required=True,
|
|
|
|
type=int,
|
|
|
|
default=0)
|
2016-05-17 01:53:22 +03:00
|
|
|
@CommandArgument('--owner',
|
2016-06-21 04:06:55 +03:00
|
|
|
required=True,
|
|
|
|
help='email address of who owns this graph')
|
2016-05-17 01:53:22 +03:00
|
|
|
@CommandArgument('--level',
|
2016-06-21 04:06:55 +03:00
|
|
|
required=True,
|
|
|
|
help='SCM level of this repository')
|
2016-09-02 20:29:07 +03:00
|
|
|
@CommandArgument('--target-tasks-method',
|
|
|
|
help='method for selecting the target tasks to generate')
|
2017-11-21 21:26:56 +03:00
|
|
|
@CommandArgument('--try-task-config-file',
|
|
|
|
help='path to try task configuration file')
|
2016-05-17 01:53:22 +03:00
|
|
|
def taskgraph_decision(self, **options):
|
|
|
|
"""Run the decision task: generate a task graph and submit to
|
|
|
|
TaskCluster. This is only meant to be called within decision tasks,
|
|
|
|
and requires a great many arguments. Commands like `mach taskgraph
|
|
|
|
optimized` are better suited to use on the command line, and can take
|
|
|
|
the parameters file generated by a decision task. """
|
|
|
|
|
|
|
|
import taskgraph.decision
|
2016-05-16 20:44:24 +03:00
|
|
|
try:
|
2016-05-18 21:02:51 +03:00
|
|
|
self.setup_logging()
|
|
|
|
return taskgraph.decision.taskgraph_decision(options)
|
2016-06-21 04:06:55 +03:00
|
|
|
except Exception:
|
2016-05-16 20:44:24 +03:00
|
|
|
traceback.print_exc()
|
|
|
|
sys.exit(1)
|
2016-07-11 20:13:58 +03:00
|
|
|
|
2017-01-18 22:45:53 +03:00
|
|
|
@SubCommand('taskgraph', 'cron',
|
|
|
|
description="Run the cron task")
|
|
|
|
@CommandArgument('--base-repository',
|
2018-03-27 20:30:02 +03:00
|
|
|
required=False,
|
|
|
|
help='(ignored)')
|
2017-01-18 22:45:53 +03:00
|
|
|
@CommandArgument('--head-repository',
|
|
|
|
required=True,
|
|
|
|
help='URL for "head" repository to fetch')
|
|
|
|
@CommandArgument('--head-ref',
|
2018-03-27 20:30:02 +03:00
|
|
|
required=False,
|
|
|
|
help='(ignored)')
|
2017-01-18 22:45:53 +03:00
|
|
|
@CommandArgument('--project',
|
|
|
|
required=True,
|
|
|
|
help='Project to use for creating tasks. Example: --project=mozilla-central')
|
|
|
|
@CommandArgument('--level',
|
|
|
|
required=True,
|
|
|
|
help='SCM level of this repository')
|
|
|
|
@CommandArgument('--force-run',
|
|
|
|
required=False,
|
|
|
|
help='If given, force this cronjob to run regardless of time, '
|
|
|
|
'and run no others')
|
|
|
|
@CommandArgument('--no-create',
|
|
|
|
required=False,
|
|
|
|
action='store_true',
|
|
|
|
help='Do not actually create tasks')
|
2018-03-28 21:34:20 +03:00
|
|
|
@CommandArgument('--root', '-r',
|
|
|
|
required=False,
|
|
|
|
help="root of the repository to get cron task definitions from")
|
2017-01-18 22:45:53 +03:00
|
|
|
def taskgraph_cron(self, **options):
|
|
|
|
"""Run the cron task; this task creates zero or more decision tasks. It is run
|
|
|
|
from the hooks service on a regular basis."""
|
|
|
|
import taskgraph.cron
|
|
|
|
try:
|
|
|
|
self.setup_logging()
|
|
|
|
return taskgraph.cron.taskgraph_cron(options)
|
|
|
|
except Exception:
|
|
|
|
traceback.print_exc()
|
|
|
|
sys.exit(1)
|
|
|
|
|
2017-02-01 02:34:05 +03:00
|
|
|
@SubCommand('taskgraph', 'action-callback',
|
|
|
|
description='Run action callback used by action tasks')
|
2018-04-11 21:02:19 +03:00
|
|
|
@CommandArgument('--root', '-r', default='taskcluster/ci',
|
|
|
|
help="root of the taskgraph definition relative to topsrcdir")
|
2017-02-01 02:34:05 +03:00
|
|
|
def action_callback(self, **options):
|
2017-07-27 20:47:37 +03:00
|
|
|
import taskgraph.actions
|
2017-02-24 05:33:38 +03:00
|
|
|
try:
|
|
|
|
self.setup_logging()
|
2017-07-20 20:11:28 +03:00
|
|
|
|
2018-07-05 20:07:40 +03:00
|
|
|
# the target task for this action (or null if it's a group action)
|
2017-07-20 20:11:28 +03:00
|
|
|
task_id = json.loads(os.environ.get('ACTION_TASK_ID', 'null'))
|
2018-07-05 20:07:40 +03:00
|
|
|
# the target task group for this action
|
|
|
|
task_group_id = os.environ.get('ACTION_TASK_GROUP_ID', None)
|
2017-07-20 20:11:28 +03:00
|
|
|
input = json.loads(os.environ.get('ACTION_INPUT', 'null'))
|
|
|
|
callback = os.environ.get('ACTION_CALLBACK', None)
|
|
|
|
parameters = json.loads(os.environ.get('ACTION_PARAMETERS', '{}'))
|
2018-04-11 21:02:19 +03:00
|
|
|
root = options['root']
|
2017-07-20 20:11:28 +03:00
|
|
|
|
2017-07-27 20:47:37 +03:00
|
|
|
return taskgraph.actions.trigger_action_callback(
|
2017-07-20 20:11:28 +03:00
|
|
|
task_group_id=task_group_id,
|
2017-07-20 22:47:32 +03:00
|
|
|
task_id=task_id,
|
2017-07-20 20:11:28 +03:00
|
|
|
input=input,
|
|
|
|
callback=callback,
|
|
|
|
parameters=parameters,
|
2018-04-11 21:02:19 +03:00
|
|
|
root=root,
|
2017-07-20 20:11:28 +03:00
|
|
|
test=False)
|
|
|
|
except Exception:
|
|
|
|
traceback.print_exc()
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
@SubCommand('taskgraph', 'test-action-callback',
|
|
|
|
description='Run an action callback in a testing mode')
|
2018-04-11 21:02:19 +03:00
|
|
|
@CommandArgument('--root', '-r', default='taskcluster/ci',
|
|
|
|
help="root of the taskgraph definition relative to topsrcdir")
|
2017-07-20 20:11:28 +03:00
|
|
|
@CommandArgument('--parameters', '-p', default='project=mozilla-central',
|
|
|
|
help='parameters file (.yml or .json; see '
|
|
|
|
'`taskcluster/docs/parameters.rst`)`')
|
|
|
|
@CommandArgument('--task-id', default=None,
|
|
|
|
help='TaskId to which the action applies')
|
|
|
|
@CommandArgument('--task-group-id', default=None,
|
|
|
|
help='TaskGroupId to which the action applies')
|
|
|
|
@CommandArgument('--input', default=None,
|
|
|
|
help='Action input (.yml or .json)')
|
|
|
|
@CommandArgument('callback', default=None,
|
|
|
|
help='Action callback name (Python function name)')
|
|
|
|
def test_action_callback(self, **options):
|
|
|
|
import taskgraph.parameters
|
2017-07-27 20:47:37 +03:00
|
|
|
import taskgraph.actions
|
2017-07-20 20:11:28 +03:00
|
|
|
import yaml
|
|
|
|
|
|
|
|
def load_data(filename):
|
|
|
|
with open(filename) as f:
|
|
|
|
if filename.endswith('.yml'):
|
|
|
|
return yaml.safe_load(f)
|
|
|
|
elif filename.endswith('.json'):
|
|
|
|
return json.load(f)
|
|
|
|
else:
|
|
|
|
raise Exception("unknown filename {}".format(filename))
|
|
|
|
|
|
|
|
try:
|
|
|
|
self.setup_logging()
|
|
|
|
task_id = options['task_id']
|
|
|
|
|
|
|
|
if options['input']:
|
|
|
|
input = load_data(options['input'])
|
|
|
|
else:
|
|
|
|
input = None
|
|
|
|
|
|
|
|
parameters = taskgraph.parameters.load_parameters_file(options['parameters'])
|
|
|
|
parameters.check()
|
|
|
|
|
2018-04-11 21:02:19 +03:00
|
|
|
root = options['root']
|
|
|
|
|
2017-07-27 20:47:37 +03:00
|
|
|
return taskgraph.actions.trigger_action_callback(
|
2017-07-20 20:11:28 +03:00
|
|
|
task_group_id=options['task_group_id'],
|
|
|
|
task_id=task_id,
|
|
|
|
input=input,
|
|
|
|
callback=options['callback'],
|
|
|
|
parameters=parameters,
|
2018-04-11 21:02:19 +03:00
|
|
|
root=root,
|
2017-07-20 20:11:28 +03:00
|
|
|
test=True)
|
2017-02-24 05:33:38 +03:00
|
|
|
except Exception:
|
|
|
|
traceback.print_exc()
|
|
|
|
sys.exit(1)
|
2017-02-01 02:34:05 +03:00
|
|
|
|
2016-05-18 21:02:51 +03:00
|
|
|
def setup_logging(self, quiet=False, verbose=True):
|
|
|
|
"""
|
|
|
|
Set up Python logging for all loggers, sending results to stderr (so
|
|
|
|
that command output can be redirected easily) and adding the typical
|
|
|
|
mach timestamp.
|
|
|
|
"""
|
|
|
|
# remove the old terminal handler
|
2016-07-30 06:37:42 +03:00
|
|
|
old = self.log_manager.replace_terminal_handler(None)
|
2016-05-18 21:02:51 +03:00
|
|
|
|
|
|
|
# re-add it, with level and fh set appropriately
|
|
|
|
if not quiet:
|
|
|
|
level = logging.DEBUG if verbose else logging.INFO
|
2016-07-30 06:37:42 +03:00
|
|
|
self.log_manager.add_terminal_logging(
|
|
|
|
fh=sys.stderr, level=level,
|
|
|
|
write_interval=old.formatter.write_interval,
|
|
|
|
write_times=old.formatter.write_times)
|
2016-05-18 21:02:51 +03:00
|
|
|
|
|
|
|
# all of the taskgraph logging is unstructured logging
|
|
|
|
self.log_manager.enable_unstructured()
|
|
|
|
|
2016-05-17 01:53:22 +03:00
|
|
|
def show_taskgraph(self, graph_attr, options):
|
|
|
|
import taskgraph.parameters
|
|
|
|
import taskgraph.target_tasks
|
|
|
|
import taskgraph.generator
|
2017-11-30 20:07:02 +03:00
|
|
|
import taskgraph
|
|
|
|
if options['fast']:
|
|
|
|
taskgraph.fast = True
|
2016-05-17 01:53:22 +03:00
|
|
|
|
2016-05-16 20:44:24 +03:00
|
|
|
try:
|
2016-05-18 21:02:51 +03:00
|
|
|
self.setup_logging(quiet=options['quiet'], verbose=options['verbose'])
|
2017-07-04 23:10:05 +03:00
|
|
|
parameters = taskgraph.parameters.load_parameters_file(options['parameters'])
|
2016-11-07 22:13:40 +03:00
|
|
|
parameters.check()
|
2016-05-17 01:53:22 +03:00
|
|
|
|
2016-05-16 20:44:24 +03:00
|
|
|
tgg = taskgraph.generator.TaskGraphGenerator(
|
2017-10-25 01:28:19 +03:00
|
|
|
root_dir=options.get('root'),
|
2016-11-18 03:29:51 +03:00
|
|
|
parameters=parameters)
|
2016-05-17 01:53:22 +03:00
|
|
|
|
2016-05-16 20:44:24 +03:00
|
|
|
tg = getattr(tgg, graph_attr)
|
2016-05-17 01:53:22 +03:00
|
|
|
|
2016-06-07 06:09:48 +03:00
|
|
|
show_method = getattr(self, 'show_taskgraph_' + (options['format'] or 'labels'))
|
2016-11-18 02:08:28 +03:00
|
|
|
tg = self.get_filtered_taskgraph(tg, options["tasks_regex"])
|
2016-06-07 06:09:48 +03:00
|
|
|
show_method(tg)
|
2016-06-21 04:06:55 +03:00
|
|
|
except Exception:
|
2016-05-16 20:44:24 +03:00
|
|
|
traceback.print_exc()
|
|
|
|
sys.exit(1)
|
2016-06-07 06:09:48 +03:00
|
|
|
|
|
|
|
def show_taskgraph_labels(self, taskgraph):
|
2017-11-27 23:26:14 +03:00
|
|
|
for index in taskgraph.graph.visit_postorder():
|
|
|
|
print(taskgraph.tasks[index].label)
|
2016-06-07 06:09:48 +03:00
|
|
|
|
|
|
|
def show_taskgraph_json(self, taskgraph):
|
2016-06-09 19:15:23 +03:00
|
|
|
print(json.dumps(taskgraph.to_json(),
|
|
|
|
sort_keys=True, indent=2, separators=(',', ': ')))
|
2016-06-04 22:40:35 +03:00
|
|
|
|
2016-11-18 02:08:28 +03:00
|
|
|
def get_filtered_taskgraph(self, taskgraph, tasksregex):
|
|
|
|
from taskgraph.graph import Graph
|
|
|
|
from taskgraph.taskgraph import TaskGraph
|
|
|
|
"""
|
|
|
|
This class method filters all the tasks on basis of a regular expression
|
|
|
|
and returns a new TaskGraph object
|
|
|
|
"""
|
|
|
|
# return original taskgraph if no regular expression is passed
|
|
|
|
if not tasksregex:
|
|
|
|
return taskgraph
|
|
|
|
named_links_dict = taskgraph.graph.named_links_dict()
|
|
|
|
filteredtasks = {}
|
|
|
|
filterededges = set()
|
|
|
|
regexprogram = re.compile(tasksregex)
|
|
|
|
|
|
|
|
for key in taskgraph.graph.visit_postorder():
|
|
|
|
task = taskgraph.tasks[key]
|
|
|
|
if regexprogram.match(task.label):
|
|
|
|
filteredtasks[key] = task
|
|
|
|
for depname, dep in named_links_dict[key].iteritems():
|
|
|
|
if regexprogram.match(dep):
|
|
|
|
filterededges.add((key, dep, depname))
|
|
|
|
filtered_taskgraph = TaskGraph(filteredtasks, Graph(set(filteredtasks), filterededges))
|
|
|
|
return filtered_taskgraph
|
|
|
|
|
2018-04-24 00:14:14 +03:00
|
|
|
def show_actions(self, options):
|
|
|
|
import taskgraph.parameters
|
|
|
|
import taskgraph.target_tasks
|
|
|
|
import taskgraph.generator
|
|
|
|
import taskgraph
|
|
|
|
import taskgraph.actions
|
|
|
|
|
|
|
|
try:
|
|
|
|
self.setup_logging(quiet=options['quiet'], verbose=options['verbose'])
|
|
|
|
parameters = taskgraph.parameters.load_parameters_file(options['parameters'])
|
|
|
|
parameters.check()
|
|
|
|
|
|
|
|
tgg = taskgraph.generator.TaskGraphGenerator(
|
|
|
|
root_dir=options.get('root'),
|
|
|
|
parameters=parameters)
|
|
|
|
|
|
|
|
actions = taskgraph.actions.render_actions_json(parameters, tgg.graph_config)
|
|
|
|
print(json.dumps(actions, sort_keys=True, indent=2, separators=(',', ': ')))
|
|
|
|
except Exception:
|
|
|
|
traceback.print_exc()
|
|
|
|
sys.exit(1)
|
|
|
|
|
2016-06-04 22:40:35 +03:00
|
|
|
|
|
|
|
@CommandProvider
|
Bug 1432390 - Use zstandard and requests modules instead of spawning curl | zstd in docker.load_image. r=dustin
The zstd command we spawn, if available at all, might be the wrong
version: zstd changed its stream format in an incompatible way at some
point, and the version shipped in e.g. Ubuntu 16.04 uses the old format,
while the version taskcluster relies on uses the new format.
Relying on gps's zstandard library allows to ensure we use the right
version. Another advantage is that we can trivially pip install it in a
virtualenv if it isn't available on the system running the command.
If we're ridding ourselves of the subprocess spawning for zstd, we might
as well cover curl as well. Especially considering the error handling
when subprocesses are involved is not trivial, such that the current
error handling code is actually broken and leads to dead-lock
conditions, when, for example, curl is still waiting for the python side
to read data, but the python side is not reading data anymore because
an exception was thrown in the tar reading loop.
--HG--
extra : rebase_source : 054c37cfaa68bf475b37545ebaa99144584b93d4
2018-01-24 05:18:13 +03:00
|
|
|
class TaskClusterImagesProvider(MachCommandBase):
|
2018-02-13 02:32:11 +03:00
|
|
|
def _ensure_zstd(self):
|
Bug 1432390 - Use zstandard and requests modules instead of spawning curl | zstd in docker.load_image. r=dustin
The zstd command we spawn, if available at all, might be the wrong
version: zstd changed its stream format in an incompatible way at some
point, and the version shipped in e.g. Ubuntu 16.04 uses the old format,
while the version taskcluster relies on uses the new format.
Relying on gps's zstandard library allows to ensure we use the right
version. Another advantage is that we can trivially pip install it in a
virtualenv if it isn't available on the system running the command.
If we're ridding ourselves of the subprocess spawning for zstd, we might
as well cover curl as well. Especially considering the error handling
when subprocesses are involved is not trivial, such that the current
error handling code is actually broken and leads to dead-lock
conditions, when, for example, curl is still waiting for the python side
to read data, but the python side is not reading data anymore because
an exception was thrown in the tar reading loop.
--HG--
extra : rebase_source : 054c37cfaa68bf475b37545ebaa99144584b93d4
2018-01-24 05:18:13 +03:00
|
|
|
try:
|
2018-05-10 06:13:28 +03:00
|
|
|
import zstandard # noqa: F401
|
Bug 1432390 - Use zstandard and requests modules instead of spawning curl | zstd in docker.load_image. r=dustin
The zstd command we spawn, if available at all, might be the wrong
version: zstd changed its stream format in an incompatible way at some
point, and the version shipped in e.g. Ubuntu 16.04 uses the old format,
while the version taskcluster relies on uses the new format.
Relying on gps's zstandard library allows to ensure we use the right
version. Another advantage is that we can trivially pip install it in a
virtualenv if it isn't available on the system running the command.
If we're ridding ourselves of the subprocess spawning for zstd, we might
as well cover curl as well. Especially considering the error handling
when subprocesses are involved is not trivial, such that the current
error handling code is actually broken and leads to dead-lock
conditions, when, for example, curl is still waiting for the python side
to read data, but the python side is not reading data anymore because
an exception was thrown in the tar reading loop.
--HG--
extra : rebase_source : 054c37cfaa68bf475b37545ebaa99144584b93d4
2018-01-24 05:18:13 +03:00
|
|
|
except (ImportError, AttributeError):
|
|
|
|
self._activate_virtualenv()
|
2018-05-10 06:13:28 +03:00
|
|
|
self.virtualenv_manager.install_pip_package('zstandard==0.9.0')
|
Bug 1432390 - Use zstandard and requests modules instead of spawning curl | zstd in docker.load_image. r=dustin
The zstd command we spawn, if available at all, might be the wrong
version: zstd changed its stream format in an incompatible way at some
point, and the version shipped in e.g. Ubuntu 16.04 uses the old format,
while the version taskcluster relies on uses the new format.
Relying on gps's zstandard library allows to ensure we use the right
version. Another advantage is that we can trivially pip install it in a
virtualenv if it isn't available on the system running the command.
If we're ridding ourselves of the subprocess spawning for zstd, we might
as well cover curl as well. Especially considering the error handling
when subprocesses are involved is not trivial, such that the current
error handling code is actually broken and leads to dead-lock
conditions, when, for example, curl is still waiting for the python side
to read data, but the python side is not reading data anymore because
an exception was thrown in the tar reading loop.
--HG--
extra : rebase_source : 054c37cfaa68bf475b37545ebaa99144584b93d4
2018-01-24 05:18:13 +03:00
|
|
|
|
2016-06-04 22:40:35 +03:00
|
|
|
@Command('taskcluster-load-image', category="ci",
|
2016-06-21 04:06:55 +03:00
|
|
|
description="Load a pre-built Docker image")
|
2016-06-04 22:40:35 +03:00
|
|
|
@CommandArgument('--task-id',
|
2016-11-11 05:05:52 +03:00
|
|
|
help="Load the image at public/image.tar.zst in this task,"
|
2016-06-21 04:06:55 +03:00
|
|
|
"rather than searching the index")
|
2016-11-11 05:05:52 +03:00
|
|
|
@CommandArgument('-t', '--tag',
|
|
|
|
help="tag that the image should be loaded as. If not "
|
|
|
|
"image will be loaded with tag from the tarball",
|
|
|
|
metavar="name:tag")
|
2016-06-04 22:40:35 +03:00
|
|
|
@CommandArgument('image_name', nargs='?',
|
2016-06-21 04:06:55 +03:00
|
|
|
help="Load the image of this name based on the current"
|
|
|
|
"contents of the tree (as built for mozilla-central"
|
|
|
|
"or mozilla-inbound)")
|
2016-11-11 05:05:52 +03:00
|
|
|
def load_image(self, image_name, task_id, tag):
|
2018-02-13 02:32:11 +03:00
|
|
|
self._ensure_zstd()
|
2016-06-06 21:55:10 +03:00
|
|
|
from taskgraph.docker import load_image_by_name, load_image_by_task_id
|
2016-06-04 22:40:35 +03:00
|
|
|
if not image_name and not task_id:
|
|
|
|
print("Specify either IMAGE-NAME or TASK-ID")
|
|
|
|
sys.exit(1)
|
2016-06-06 21:55:10 +03:00
|
|
|
try:
|
|
|
|
if task_id:
|
2016-11-11 05:05:52 +03:00
|
|
|
ok = load_image_by_task_id(task_id, tag)
|
2016-06-06 21:55:10 +03:00
|
|
|
else:
|
2016-11-11 05:05:52 +03:00
|
|
|
ok = load_image_by_name(image_name, tag)
|
2016-06-06 21:55:10 +03:00
|
|
|
if not ok:
|
2016-06-04 22:40:35 +03:00
|
|
|
sys.exit(1)
|
2016-06-21 04:06:55 +03:00
|
|
|
except Exception:
|
2016-06-06 21:55:10 +03:00
|
|
|
traceback.print_exc()
|
|
|
|
sys.exit(1)
|
2016-07-29 22:45:25 +03:00
|
|
|
|
|
|
|
@Command('taskcluster-build-image', category='ci',
|
|
|
|
description='Build a Docker image')
|
|
|
|
@CommandArgument('image_name',
|
|
|
|
help='Name of the image to build')
|
2018-01-25 07:36:47 +03:00
|
|
|
@CommandArgument('-t', '--tag',
|
|
|
|
help="tag that the image should be built as.",
|
|
|
|
metavar="name:tag")
|
2016-11-07 22:26:27 +03:00
|
|
|
@CommandArgument('--context-only',
|
|
|
|
help="File name the context tarball should be written to."
|
|
|
|
"with this option it will only build the context.tar.",
|
|
|
|
metavar='context.tar')
|
2018-01-25 07:36:47 +03:00
|
|
|
def build_image(self, image_name, tag, context_only):
|
2016-11-07 22:26:27 +03:00
|
|
|
from taskgraph.docker import build_image, build_context
|
2016-07-29 22:45:25 +03:00
|
|
|
try:
|
2016-11-07 22:26:27 +03:00
|
|
|
if context_only is None:
|
2018-01-25 07:36:47 +03:00
|
|
|
build_image(image_name, tag, os.environ)
|
2016-11-07 22:26:27 +03:00
|
|
|
else:
|
2017-12-24 01:51:29 +03:00
|
|
|
build_context(image_name, context_only, os.environ)
|
2016-07-29 22:45:25 +03:00
|
|
|
except Exception:
|
|
|
|
traceback.print_exc()
|
|
|
|
sys.exit(1)
|
2017-09-18 15:36:36 +03:00
|
|
|
|
|
|
|
|
|
|
|
@CommandProvider
|
|
|
|
class TaskClusterPartialsData(object):
|
|
|
|
@Command('release-history', category="ci",
|
|
|
|
description="Query balrog for release history used by enable partials generation")
|
|
|
|
@CommandArgument('-b', '--branch',
|
|
|
|
help="The gecko project branch used in balrog, such as "
|
2018-01-24 00:48:27 +03:00
|
|
|
"mozilla-central, release, maple")
|
2017-09-18 15:36:36 +03:00
|
|
|
@CommandArgument('--product', default='Firefox',
|
|
|
|
help="The product identifier, such as 'Firefox'")
|
|
|
|
def generate_partials_builds(self, product, branch):
|
|
|
|
from taskgraph.util.partials import populate_release_history
|
|
|
|
try:
|
|
|
|
import yaml
|
|
|
|
release_history = {'release_history': populate_release_history(product, branch)}
|
|
|
|
print(yaml.safe_dump(release_history, allow_unicode=True, default_flow_style=False))
|
|
|
|
except Exception:
|
|
|
|
traceback.print_exc()
|
|
|
|
sys.exit(1)
|