2016-05-17 01:53:22 +03:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
|
|
|
|
2016-08-09 00:03:38 +03:00
|
|
|
|
2016-05-17 01:53:22 +03:00
|
|
|
from __future__ import absolute_import, print_function, unicode_literals
|
|
|
|
|
2019-03-27 22:34:53 +03:00
|
|
|
import argparse
|
2016-06-07 06:09:48 +03:00
|
|
|
import json
|
2016-05-18 21:02:51 +03:00
|
|
|
import logging
|
2017-07-20 20:11:28 +03:00
|
|
|
import os
|
2020-01-21 20:12:08 +03:00
|
|
|
from six import text_type
|
2020-08-04 20:16:15 +03:00
|
|
|
import six
|
2016-05-17 01:53:22 +03:00
|
|
|
import sys
|
2020-08-28 13:45:23 +03:00
|
|
|
import time
|
2016-05-16 20:44:24 +03:00
|
|
|
import traceback
|
2016-11-18 02:08:28 +03:00
|
|
|
import re
|
2016-05-17 01:53:22 +03:00
|
|
|
|
|
|
|
from mach.decorators import (
|
|
|
|
CommandArgument,
|
|
|
|
CommandProvider,
|
|
|
|
Command,
|
|
|
|
SubCommand,
|
|
|
|
)
|
|
|
|
|
|
|
|
from mozbuild.base import MachCommandBase
|
|
|
|
|
|
|
|
|
2020-05-29 23:17:27 +03:00
|
|
|
def strtobool(value):
|
|
|
|
"""Convert string to boolean.
|
|
|
|
|
|
|
|
Wraps "distutils.util.strtobool", deferring the import of the package
|
|
|
|
in case it's not installed. Otherwise, we have a "chicken and egg problem" where
|
|
|
|
|mach bootstrap| would install the required package to enable "distutils.util", but
|
|
|
|
it can't because mach fails to interpret this file.
|
|
|
|
"""
|
|
|
|
from distutils.util import strtobool
|
2020-10-26 21:34:53 +03:00
|
|
|
|
2020-05-29 23:17:27 +03:00
|
|
|
return bool(strtobool(value))
|
|
|
|
|
|
|
|
|
2016-05-17 01:53:22 +03:00
|
|
|
class ShowTaskGraphSubCommand(SubCommand):
|
|
|
|
"""A SubCommand with TaskGraph-specific arguments"""
|
|
|
|
|
|
|
|
def __call__(self, func):
|
|
|
|
after = SubCommand.__call__(self, func)
|
|
|
|
args = [
|
2017-10-25 01:28:19 +03:00
|
|
|
CommandArgument(
|
|
|
|
"--root",
|
|
|
|
"-r",
|
2016-05-17 01:53:22 +03:00
|
|
|
help="root of the taskgraph definition relative to topsrcdir",
|
|
|
|
),
|
2016-05-18 21:02:51 +03:00
|
|
|
CommandArgument(
|
|
|
|
"--quiet", "-q", action="store_true", help="suppress all logging output"
|
|
|
|
),
|
|
|
|
CommandArgument(
|
|
|
|
"--verbose",
|
|
|
|
"-v",
|
|
|
|
action="store_true",
|
|
|
|
help="include debug-level logging output",
|
|
|
|
),
|
2016-06-07 06:09:48 +03:00
|
|
|
CommandArgument(
|
|
|
|
"--json",
|
|
|
|
"-J",
|
|
|
|
action="store_const",
|
|
|
|
dest="format",
|
|
|
|
const="json",
|
2016-06-09 19:15:23 +03:00
|
|
|
help="Output task graph as a JSON object",
|
|
|
|
),
|
2016-06-07 06:09:48 +03:00
|
|
|
CommandArgument(
|
|
|
|
"--labels",
|
|
|
|
"-L",
|
|
|
|
action="store_const",
|
|
|
|
dest="format",
|
|
|
|
const="labels",
|
|
|
|
help="Output the label for each task in the task graph (default)",
|
2017-07-04 23:10:05 +03:00
|
|
|
),
|
|
|
|
CommandArgument(
|
|
|
|
"--parameters",
|
|
|
|
"-p",
|
|
|
|
default="project=mozilla-central",
|
2016-05-17 01:53:22 +03:00
|
|
|
help="parameters file (.yml or .json; see "
|
|
|
|
"`taskcluster/docs/parameters.rst`)`",
|
|
|
|
),
|
|
|
|
CommandArgument(
|
|
|
|
"--no-optimize",
|
|
|
|
dest="optimize",
|
|
|
|
action="store_false",
|
|
|
|
default="true",
|
|
|
|
help="do not remove tasks from the graph that are found in the "
|
|
|
|
"index (a.k.a. optimize the graph)",
|
|
|
|
),
|
2016-11-18 02:46:25 +03:00
|
|
|
CommandArgument(
|
|
|
|
"--tasks-regex",
|
|
|
|
"--tasks",
|
|
|
|
default=None,
|
|
|
|
help="only return tasks with labels matching this regular "
|
2017-11-30 20:07:02 +03:00
|
|
|
"expression.",
|
|
|
|
),
|
2018-11-28 20:55:35 +03:00
|
|
|
CommandArgument(
|
|
|
|
"--target-kind",
|
|
|
|
default=None,
|
|
|
|
help="only return tasks that are of the given kind, "
|
|
|
|
"or their dependencies.",
|
|
|
|
),
|
2017-11-30 20:07:02 +03:00
|
|
|
CommandArgument(
|
|
|
|
"-F",
|
|
|
|
"--fast",
|
|
|
|
dest="fast",
|
|
|
|
default=False,
|
|
|
|
action="store_true",
|
2018-11-28 20:55:35 +03:00
|
|
|
help="enable fast task generation for local debugging.",
|
2019-08-16 17:27:14 +03:00
|
|
|
),
|
|
|
|
CommandArgument(
|
|
|
|
"-o",
|
|
|
|
"--output-file",
|
|
|
|
default=None,
|
|
|
|
help="file path to store generated output.",
|
|
|
|
),
|
2016-05-17 01:53:22 +03:00
|
|
|
]
|
|
|
|
for arg in args:
|
|
|
|
after = arg(after)
|
|
|
|
return after
|
|
|
|
|
|
|
|
|
|
|
|
@CommandProvider
|
|
|
|
class MachCommands(MachCommandBase):
|
|
|
|
@Command(
|
|
|
|
"taskgraph",
|
|
|
|
category="ci",
|
|
|
|
description="Manipulate TaskCluster task graphs defined in-tree",
|
|
|
|
)
|
|
|
|
def taskgraph(self):
|
|
|
|
"""The taskgraph subcommands all relate to the generation of task graphs
|
|
|
|
for Gecko continuous integration. A task graph is a set of tasks linked
|
|
|
|
by dependencies: for example, a binary must be built before it is tested,
|
|
|
|
and that build may further depend on various toolchains, libraries, etc.
|
|
|
|
"""
|
|
|
|
|
|
|
|
@ShowTaskGraphSubCommand(
|
2016-06-21 04:06:55 +03:00
|
|
|
"taskgraph", "tasks", description="Show all tasks in the taskgraph"
|
|
|
|
)
|
2016-05-17 01:53:22 +03:00
|
|
|
def taskgraph_tasks(self, **options):
|
|
|
|
return self.show_taskgraph("full_task_set", options)
|
|
|
|
|
2016-06-21 04:06:55 +03:00
|
|
|
@ShowTaskGraphSubCommand("taskgraph", "full", description="Show the full taskgraph")
|
2016-05-17 01:53:22 +03:00
|
|
|
def taskgraph_full(self, **options):
|
|
|
|
return self.show_taskgraph("full_task_graph", options)
|
|
|
|
|
|
|
|
@ShowTaskGraphSubCommand(
|
2016-06-21 04:06:55 +03:00
|
|
|
"taskgraph", "target", description="Show the target task set"
|
|
|
|
)
|
2016-05-17 01:53:22 +03:00
|
|
|
def taskgraph_target(self, **options):
|
|
|
|
return self.show_taskgraph("target_task_set", options)
|
|
|
|
|
|
|
|
@ShowTaskGraphSubCommand(
|
2016-06-21 04:06:55 +03:00
|
|
|
"taskgraph", "target-graph", description="Show the target taskgraph"
|
|
|
|
)
|
2016-05-17 01:53:22 +03:00
|
|
|
def taskgraph_target_taskgraph(self, **options):
|
|
|
|
return self.show_taskgraph("target_task_graph", options)
|
|
|
|
|
|
|
|
@ShowTaskGraphSubCommand(
|
2016-06-21 04:06:55 +03:00
|
|
|
"taskgraph", "optimized", description="Show the optimized taskgraph"
|
|
|
|
)
|
2016-05-17 01:53:22 +03:00
|
|
|
def taskgraph_optimized(self, **options):
|
|
|
|
return self.show_taskgraph("optimized_task_graph", options)
|
|
|
|
|
2017-03-07 23:39:27 +03:00
|
|
|
@ShowTaskGraphSubCommand(
|
|
|
|
"taskgraph", "morphed", description="Show the morphed taskgraph"
|
|
|
|
)
|
|
|
|
def taskgraph_morphed(self, **options):
|
|
|
|
return self.show_taskgraph("morphed_task_graph", options)
|
2020-10-26 21:34:53 +03:00
|
|
|
|
2018-04-24 00:14:14 +03:00
|
|
|
@SubCommand("taskgraph", "actions", description="Write actions.json to stdout")
|
|
|
|
@CommandArgument(
|
|
|
|
"--root", "-r", help="root of the taskgraph definition relative to topsrcdir"
|
|
|
|
)
|
|
|
|
@CommandArgument(
|
|
|
|
"--quiet", "-q", action="store_true", help="suppress all logging output"
|
|
|
|
)
|
|
|
|
@CommandArgument(
|
|
|
|
"--verbose",
|
|
|
|
"-v",
|
|
|
|
action="store_true",
|
|
|
|
help="include debug-level logging output",
|
|
|
|
)
|
|
|
|
@CommandArgument(
|
|
|
|
"--parameters",
|
|
|
|
"-p",
|
|
|
|
default="project=mozilla-central",
|
|
|
|
help="parameters file (.yml or .json; see "
|
|
|
|
"`taskcluster/docs/parameters.rst`)`",
|
|
|
|
)
|
|
|
|
def taskgraph_actions(self, **options):
|
|
|
|
return self.show_actions(options)
|
|
|
|
|
2016-05-17 01:53:22 +03:00
|
|
|
@SubCommand("taskgraph", "decision", description="Run the decision task")
|
2020-01-21 20:12:08 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"--root",
|
|
|
|
"-r",
|
|
|
|
type=text_type,
|
2016-06-21 04:06:55 +03:00
|
|
|
help="root of the taskgraph definition relative to topsrcdir",
|
|
|
|
)
|
2020-01-21 20:12:08 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"--base-repository",
|
|
|
|
type=text_type,
|
|
|
|
required=True,
|
2016-06-21 04:06:55 +03:00
|
|
|
help='URL for "base" repository to clone',
|
|
|
|
)
|
2020-01-21 20:12:08 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"--head-repository",
|
|
|
|
type=text_type,
|
|
|
|
required=True,
|
2016-06-21 04:06:55 +03:00
|
|
|
help='URL for "head" repository to fetch revision from',
|
|
|
|
)
|
2020-01-21 20:12:08 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"--head-ref",
|
|
|
|
type=text_type,
|
|
|
|
required=True,
|
2016-06-21 04:06:55 +03:00
|
|
|
help="Reference (this is same as rev usually for hg)",
|
|
|
|
)
|
2020-01-21 20:12:08 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"--head-rev",
|
|
|
|
type=text_type,
|
|
|
|
required=True,
|
2016-06-21 04:06:55 +03:00
|
|
|
help="Commit revision to use from head repository",
|
|
|
|
)
|
2020-01-21 20:12:08 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"--comm-base-repository",
|
|
|
|
type=text_type,
|
|
|
|
required=False,
|
2017-07-27 21:26:48 +03:00
|
|
|
help='URL for "base" comm-* repository to clone',
|
|
|
|
)
|
2020-01-21 20:12:08 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"--comm-head-repository",
|
|
|
|
type=text_type,
|
|
|
|
required=False,
|
2017-07-27 21:26:48 +03:00
|
|
|
help='URL for "head" comm-* repository to fetch revision from',
|
|
|
|
)
|
2020-01-21 20:12:08 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"--comm-head-ref",
|
|
|
|
type=text_type,
|
|
|
|
required=False,
|
|
|
|
help="comm-* Reference (this is same as rev usually for hg)",
|
|
|
|
)
|
|
|
|
@CommandArgument(
|
|
|
|
"--comm-head-rev",
|
|
|
|
type=text_type,
|
|
|
|
required=False,
|
|
|
|
help="Commit revision to use from head comm-* repository",
|
|
|
|
)
|
|
|
|
@CommandArgument(
|
|
|
|
"--project",
|
|
|
|
type=text_type,
|
|
|
|
required=True,
|
2017-07-27 21:26:48 +03:00
|
|
|
help="Project to use for creating task graph. Example: --project=try",
|
|
|
|
)
|
2020-01-21 20:12:08 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"--pushlog-id", type=text_type, dest="pushlog_id", required=True, default="0"
|
|
|
|
)
|
|
|
|
@CommandArgument("--pushdate", dest="pushdate", required=True, type=int, default=0)
|
2018-04-11 21:02:19 +03:00
|
|
|
@CommandArgument(
|
2020-01-21 20:12:08 +03:00
|
|
|
"--owner",
|
|
|
|
type=text_type,
|
|
|
|
required=True,
|
2016-06-21 04:06:55 +03:00
|
|
|
help="email address of who owns this graph",
|
|
|
|
)
|
2020-01-21 20:12:08 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"--level", type=text_type, required=True, help="SCM level of this repository"
|
|
|
|
)
|
|
|
|
@CommandArgument(
|
|
|
|
"--target-tasks-method",
|
|
|
|
type=text_type,
|
2016-09-02 20:29:07 +03:00
|
|
|
help="method for selecting the target tasks to generate",
|
|
|
|
)
|
2018-12-26 17:43:34 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"--optimize-target-tasks",
|
2020-05-29 23:17:27 +03:00
|
|
|
type=lambda flag: strtobool(flag),
|
2018-12-26 17:43:34 +03:00
|
|
|
nargs="?",
|
|
|
|
const="true",
|
|
|
|
help="If specified, this indicates whether the target "
|
|
|
|
"tasks are eligible for optimization. Otherwise, "
|
|
|
|
"the default for the project is used.",
|
|
|
|
)
|
2020-01-21 20:12:08 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"--try-task-config-file",
|
|
|
|
type=text_type,
|
2017-11-21 21:26:56 +03:00
|
|
|
help="path to try task configuration file",
|
|
|
|
)
|
2020-01-21 20:12:08 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"--tasks-for",
|
|
|
|
type=text_type,
|
|
|
|
required=True,
|
|
|
|
help="the tasks_for value used to generate this task",
|
|
|
|
)
|
2019-03-27 22:34:53 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"--include-push-tasks",
|
|
|
|
action="store_true",
|
|
|
|
help="Whether tasks from the on-push graph should be re-used "
|
|
|
|
"in this graph. This allows cron graphs to avoid rebuilding "
|
|
|
|
"jobs that were built on-push.",
|
|
|
|
)
|
|
|
|
@CommandArgument(
|
|
|
|
"--rebuild-kind",
|
|
|
|
dest="rebuild_kinds",
|
|
|
|
action="append",
|
|
|
|
default=argparse.SUPPRESS,
|
|
|
|
help="Kinds that should not be re-used from the on-push graph.",
|
|
|
|
)
|
2016-05-17 01:53:22 +03:00
|
|
|
def taskgraph_decision(self, **options):
|
|
|
|
"""Run the decision task: generate a task graph and submit to
|
|
|
|
TaskCluster. This is only meant to be called within decision tasks,
|
|
|
|
and requires a great many arguments. Commands like `mach taskgraph
|
|
|
|
optimized` are better suited to use on the command line, and can take
|
|
|
|
the parameters file generated by a decision task."""
|
|
|
|
|
|
|
|
import taskgraph.decision
|
2020-10-26 21:34:53 +03:00
|
|
|
|
2016-05-16 20:44:24 +03:00
|
|
|
try:
|
2018-12-20 13:43:22 +03:00
|
|
|
self.setup_logging()
|
2020-08-28 13:45:23 +03:00
|
|
|
start = time.monotonic()
|
|
|
|
ret = taskgraph.decision.taskgraph_decision(options)
|
|
|
|
end = time.monotonic()
|
|
|
|
if os.environ.get("MOZ_AUTOMATION") == "1":
|
|
|
|
perfherder_data = {
|
|
|
|
"framework": {"name": "build_metrics"},
|
|
|
|
"suites": [
|
|
|
|
{
|
|
|
|
"name": "decision",
|
|
|
|
"value": end - start,
|
|
|
|
"lowerIsBetter": True,
|
|
|
|
"shouldAlert": True,
|
|
|
|
"subtests": [],
|
|
|
|
}
|
|
|
|
],
|
|
|
|
}
|
|
|
|
print(
|
|
|
|
"PERFHERDER_DATA: {}".format(json.dumps(perfherder_data)),
|
|
|
|
file=sys.stderr,
|
|
|
|
)
|
|
|
|
return ret
|
2016-06-21 04:06:55 +03:00
|
|
|
except Exception:
|
2016-05-16 20:44:24 +03:00
|
|
|
traceback.print_exc()
|
|
|
|
sys.exit(1)
|
2016-07-11 20:13:58 +03:00
|
|
|
|
2017-01-18 22:45:53 +03:00
|
|
|
@SubCommand(
|
|
|
|
"taskgraph",
|
|
|
|
"cron",
|
2020-07-07 19:04:00 +03:00
|
|
|
description="Provide a pointer to the new `.cron.yml` handler.",
|
|
|
|
)
|
2017-01-18 22:45:53 +03:00
|
|
|
def taskgraph_cron(self, **options):
|
2020-07-07 19:04:00 +03:00
|
|
|
print(
|
|
|
|
'Handling of ".cron.yml" files has move to '
|
2020-07-08 21:41:17 +03:00
|
|
|
"https://hg.mozilla.org/ci/ci-admin/file/default/build-decision."
|
2020-07-07 19:04:00 +03:00
|
|
|
)
|
|
|
|
sys.exit(1)
|
2017-01-18 22:45:53 +03:00
|
|
|
|
2017-02-01 02:34:05 +03:00
|
|
|
@SubCommand(
|
|
|
|
"taskgraph",
|
|
|
|
"action-callback",
|
|
|
|
description="Run action callback used by action tasks",
|
|
|
|
)
|
2018-04-11 21:02:19 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"--root",
|
|
|
|
"-r",
|
|
|
|
default="taskcluster/ci",
|
|
|
|
help="root of the taskgraph definition relative to topsrcdir",
|
|
|
|
)
|
2017-02-01 02:34:05 +03:00
|
|
|
def action_callback(self, **options):
|
2019-08-09 20:02:15 +03:00
|
|
|
from taskgraph.actions import trigger_action_callback
|
|
|
|
from taskgraph.actions.util import get_parameters
|
2020-10-26 21:34:53 +03:00
|
|
|
|
2017-02-24 05:33:38 +03:00
|
|
|
try:
|
2018-12-20 13:43:22 +03:00
|
|
|
self.setup_logging()
|
2017-07-20 20:11:28 +03:00
|
|
|
|
2018-07-05 20:07:40 +03:00
|
|
|
# the target task for this action (or null if it's a group action)
|
2017-07-20 20:11:28 +03:00
|
|
|
task_id = json.loads(os.environ.get("ACTION_TASK_ID", "null"))
|
2018-07-05 20:07:40 +03:00
|
|
|
# the target task group for this action
|
|
|
|
task_group_id = os.environ.get("ACTION_TASK_GROUP_ID", None)
|
2017-07-20 20:11:28 +03:00
|
|
|
input = json.loads(os.environ.get("ACTION_INPUT", "null"))
|
|
|
|
callback = os.environ.get("ACTION_CALLBACK", None)
|
2018-04-11 21:02:19 +03:00
|
|
|
root = options["root"]
|
2017-07-20 20:11:28 +03:00
|
|
|
|
2019-08-09 20:02:15 +03:00
|
|
|
parameters = get_parameters(task_group_id)
|
|
|
|
|
|
|
|
return trigger_action_callback(
|
2017-07-20 20:11:28 +03:00
|
|
|
task_group_id=task_group_id,
|
2017-07-20 22:47:32 +03:00
|
|
|
task_id=task_id,
|
2017-07-20 20:11:28 +03:00
|
|
|
input=input,
|
|
|
|
callback=callback,
|
|
|
|
parameters=parameters,
|
2018-04-11 21:02:19 +03:00
|
|
|
root=root,
|
2017-07-20 20:11:28 +03:00
|
|
|
test=False,
|
|
|
|
)
|
|
|
|
except Exception:
|
|
|
|
traceback.print_exc()
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
@SubCommand(
|
|
|
|
"taskgraph",
|
|
|
|
"test-action-callback",
|
|
|
|
description="Run an action callback in a testing mode",
|
|
|
|
)
|
2018-04-11 21:02:19 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"--root",
|
|
|
|
"-r",
|
|
|
|
default="taskcluster/ci",
|
|
|
|
help="root of the taskgraph definition relative to topsrcdir",
|
|
|
|
)
|
2017-07-20 20:11:28 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"--parameters",
|
|
|
|
"-p",
|
|
|
|
default="project=mozilla-central",
|
|
|
|
help="parameters file (.yml or .json; see "
|
|
|
|
"`taskcluster/docs/parameters.rst`)`",
|
|
|
|
)
|
|
|
|
@CommandArgument(
|
|
|
|
"--task-id", default=None, help="TaskId to which the action applies"
|
|
|
|
)
|
|
|
|
@CommandArgument(
|
|
|
|
"--task-group-id", default=None, help="TaskGroupId to which the action applies"
|
|
|
|
)
|
|
|
|
@CommandArgument("--input", default=None, help="Action input (.yml or .json)")
|
|
|
|
@CommandArgument(
|
|
|
|
"callback", default=None, help="Action callback name (Python function name)"
|
|
|
|
)
|
|
|
|
def test_action_callback(self, **options):
|
|
|
|
import taskgraph.parameters
|
2017-07-27 20:47:37 +03:00
|
|
|
import taskgraph.actions
|
2019-02-05 01:18:57 +03:00
|
|
|
from taskgraph.util import yaml
|
2017-07-20 20:11:28 +03:00
|
|
|
|
|
|
|
def load_data(filename):
|
|
|
|
with open(filename) as f:
|
|
|
|
if filename.endswith(".yml"):
|
2019-02-05 01:18:57 +03:00
|
|
|
return yaml.load_stream(f)
|
2017-07-20 20:11:28 +03:00
|
|
|
elif filename.endswith(".json"):
|
|
|
|
return json.load(f)
|
|
|
|
else:
|
|
|
|
raise Exception("unknown filename {}".format(filename))
|
|
|
|
|
|
|
|
try:
|
2018-12-20 13:43:22 +03:00
|
|
|
self.setup_logging()
|
2017-07-20 20:11:28 +03:00
|
|
|
task_id = options["task_id"]
|
|
|
|
|
|
|
|
if options["input"]:
|
|
|
|
input = load_data(options["input"])
|
|
|
|
else:
|
|
|
|
input = None
|
|
|
|
|
2019-02-15 00:34:49 +03:00
|
|
|
parameters = taskgraph.parameters.load_parameters_file(
|
|
|
|
options["parameters"],
|
|
|
|
strict=False,
|
|
|
|
# FIXME: There should be a way to parameterize this.
|
|
|
|
trust_domain="gecko",
|
|
|
|
)
|
2017-07-20 20:11:28 +03:00
|
|
|
parameters.check()
|
|
|
|
|
2018-04-11 21:02:19 +03:00
|
|
|
root = options["root"]
|
|
|
|
|
2017-07-27 20:47:37 +03:00
|
|
|
return taskgraph.actions.trigger_action_callback(
|
2017-07-20 20:11:28 +03:00
|
|
|
task_group_id=options["task_group_id"],
|
|
|
|
task_id=task_id,
|
|
|
|
input=input,
|
|
|
|
callback=options["callback"],
|
|
|
|
parameters=parameters,
|
2018-04-11 21:02:19 +03:00
|
|
|
root=root,
|
2017-07-20 20:11:28 +03:00
|
|
|
test=True,
|
|
|
|
)
|
2017-02-24 05:33:38 +03:00
|
|
|
except Exception:
|
|
|
|
traceback.print_exc()
|
|
|
|
sys.exit(1)
|
2017-02-01 02:34:05 +03:00
|
|
|
|
2018-12-20 13:43:22 +03:00
|
|
|
def setup_logging(self, quiet=False, verbose=True):
|
2016-05-18 21:02:51 +03:00
|
|
|
"""
|
|
|
|
Set up Python logging for all loggers, sending results to stderr (so
|
|
|
|
that command output can be redirected easily) and adding the typical
|
|
|
|
mach timestamp.
|
|
|
|
"""
|
|
|
|
# remove the old terminal handler
|
2016-07-30 06:37:42 +03:00
|
|
|
old = self.log_manager.replace_terminal_handler(None)
|
2016-05-18 21:02:51 +03:00
|
|
|
|
|
|
|
# re-add it, with level and fh set appropriately
|
|
|
|
if not quiet:
|
|
|
|
level = logging.DEBUG if verbose else logging.INFO
|
2016-07-30 06:37:42 +03:00
|
|
|
self.log_manager.add_terminal_logging(
|
|
|
|
fh=sys.stderr,
|
|
|
|
level=level,
|
|
|
|
write_interval=old.formatter.write_interval,
|
|
|
|
write_times=old.formatter.write_times,
|
|
|
|
)
|
2016-05-18 21:02:51 +03:00
|
|
|
|
|
|
|
# all of the taskgraph logging is unstructured logging
|
|
|
|
self.log_manager.enable_unstructured()
|
|
|
|
|
2016-05-17 01:53:22 +03:00
|
|
|
def show_taskgraph(self, graph_attr, options):
|
|
|
|
import taskgraph.parameters
|
|
|
|
import taskgraph.generator
|
2017-11-30 20:07:02 +03:00
|
|
|
import taskgraph
|
2020-10-26 21:34:53 +03:00
|
|
|
|
2017-11-30 20:07:02 +03:00
|
|
|
if options["fast"]:
|
|
|
|
taskgraph.fast = True
|
2016-05-17 01:53:22 +03:00
|
|
|
|
2016-05-16 20:44:24 +03:00
|
|
|
try:
|
2018-12-20 13:43:22 +03:00
|
|
|
self.setup_logging(quiet=options["quiet"], verbose=options["verbose"])
|
2020-07-14 10:17:26 +03:00
|
|
|
parameters = taskgraph.parameters.parameters_loader(
|
|
|
|
options["parameters"],
|
|
|
|
overrides={"target-kind": options.get("target_kind")},
|
|
|
|
strict=False,
|
|
|
|
)
|
2016-05-17 01:53:22 +03:00
|
|
|
|
2016-05-16 20:44:24 +03:00
|
|
|
tgg = taskgraph.generator.TaskGraphGenerator(
|
2017-10-25 01:28:19 +03:00
|
|
|
root_dir=options.get("root"),
|
2018-11-28 20:55:35 +03:00
|
|
|
parameters=parameters,
|
|
|
|
)
|
2016-05-17 01:53:22 +03:00
|
|
|
|
2016-05-16 20:44:24 +03:00
|
|
|
tg = getattr(tgg, graph_attr)
|
2016-05-17 01:53:22 +03:00
|
|
|
|
2016-06-07 06:09:48 +03:00
|
|
|
show_method = getattr(
|
|
|
|
self, "show_taskgraph_" + (options["format"] or "labels")
|
2020-10-26 21:34:53 +03:00
|
|
|
)
|
2016-11-18 02:08:28 +03:00
|
|
|
tg = self.get_filtered_taskgraph(tg, options["tasks_regex"])
|
2019-08-16 17:27:14 +03:00
|
|
|
|
|
|
|
fh = options["output_file"]
|
|
|
|
if fh:
|
|
|
|
fh = open(fh, "w")
|
|
|
|
show_method(tg, file=fh)
|
2016-06-21 04:06:55 +03:00
|
|
|
except Exception:
|
2016-05-16 20:44:24 +03:00
|
|
|
traceback.print_exc()
|
|
|
|
sys.exit(1)
|
2016-06-07 06:09:48 +03:00
|
|
|
|
2019-08-16 17:27:14 +03:00
|
|
|
def show_taskgraph_labels(self, taskgraph, file=None):
|
2017-11-27 23:26:14 +03:00
|
|
|
for index in taskgraph.graph.visit_postorder():
|
2019-08-16 17:27:14 +03:00
|
|
|
print(taskgraph.tasks[index].label, file=file)
|
2016-06-07 06:09:48 +03:00
|
|
|
|
2019-08-16 17:27:14 +03:00
|
|
|
def show_taskgraph_json(self, taskgraph, file=None):
|
|
|
|
print(
|
|
|
|
json.dumps(
|
|
|
|
taskgraph.to_json(), sort_keys=True, indent=2, separators=(",", ": ")
|
2020-10-26 21:34:53 +03:00
|
|
|
),
|
2019-08-16 17:27:14 +03:00
|
|
|
file=file,
|
|
|
|
)
|
2016-06-04 22:40:35 +03:00
|
|
|
|
2016-11-18 02:08:28 +03:00
|
|
|
def get_filtered_taskgraph(self, taskgraph, tasksregex):
|
|
|
|
from taskgraph.graph import Graph
|
|
|
|
from taskgraph.taskgraph import TaskGraph
|
2020-10-26 21:34:53 +03:00
|
|
|
|
2016-11-18 02:08:28 +03:00
|
|
|
"""
|
|
|
|
This class method filters all the tasks on basis of a regular expression
|
|
|
|
and returns a new TaskGraph object
|
|
|
|
"""
|
|
|
|
# return original taskgraph if no regular expression is passed
|
|
|
|
if not tasksregex:
|
|
|
|
return taskgraph
|
|
|
|
named_links_dict = taskgraph.graph.named_links_dict()
|
|
|
|
filteredtasks = {}
|
|
|
|
filterededges = set()
|
|
|
|
regexprogram = re.compile(tasksregex)
|
|
|
|
|
|
|
|
for key in taskgraph.graph.visit_postorder():
|
|
|
|
task = taskgraph.tasks[key]
|
|
|
|
if regexprogram.match(task.label):
|
|
|
|
filteredtasks[key] = task
|
2020-08-04 20:16:15 +03:00
|
|
|
for depname, dep in six.iteritems(named_links_dict[key]):
|
2016-11-18 02:08:28 +03:00
|
|
|
if regexprogram.match(dep):
|
|
|
|
filterededges.add((key, dep, depname))
|
|
|
|
filtered_taskgraph = TaskGraph(
|
|
|
|
filteredtasks, Graph(set(filteredtasks), filterededges)
|
2020-10-26 21:34:53 +03:00
|
|
|
)
|
2016-11-18 02:08:28 +03:00
|
|
|
return filtered_taskgraph
|
|
|
|
|
2018-04-24 00:14:14 +03:00
|
|
|
def show_actions(self, options):
|
|
|
|
import taskgraph.parameters
|
|
|
|
import taskgraph.generator
|
|
|
|
import taskgraph
|
|
|
|
import taskgraph.actions
|
|
|
|
|
|
|
|
try:
|
2018-12-20 13:43:22 +03:00
|
|
|
self.setup_logging(quiet=options["quiet"], verbose=options["verbose"])
|
2019-02-15 00:34:49 +03:00
|
|
|
parameters = taskgraph.parameters.parameters_loader(options["parameters"])
|
2018-04-24 00:14:14 +03:00
|
|
|
|
|
|
|
tgg = taskgraph.generator.TaskGraphGenerator(
|
|
|
|
root_dir=options.get("root"),
|
2020-06-03 05:43:34 +03:00
|
|
|
parameters=parameters,
|
|
|
|
)
|
2018-04-24 00:14:14 +03:00
|
|
|
|
2020-06-03 05:35:05 +03:00
|
|
|
actions = taskgraph.actions.render_actions_json(
|
|
|
|
tgg.parameters,
|
|
|
|
tgg.graph_config,
|
|
|
|
decision_task_id="DECISION-TASK",
|
|
|
|
)
|
2018-04-24 00:14:14 +03:00
|
|
|
print(json.dumps(actions, sort_keys=True, indent=2, separators=(",", ": ")))
|
|
|
|
except Exception:
|
|
|
|
traceback.print_exc()
|
|
|
|
sys.exit(1)
|
|
|
|
|
2016-06-04 22:40:35 +03:00
|
|
|
|
|
|
|
@CommandProvider
|
Bug 1432390 - Use zstandard and requests modules instead of spawning curl | zstd in docker.load_image. r=dustin
The zstd command we spawn, if available at all, might be the wrong
version: zstd changed its stream format in an incompatible way at some
point, and the version shipped in e.g. Ubuntu 16.04 uses the old format,
while the version taskcluster relies on uses the new format.
Relying on gps's zstandard library allows to ensure we use the right
version. Another advantage is that we can trivially pip install it in a
virtualenv if it isn't available on the system running the command.
If we're ridding ourselves of the subprocess spawning for zstd, we might
as well cover curl as well. Especially considering the error handling
when subprocesses are involved is not trivial, such that the current
error handling code is actually broken and leads to dead-lock
conditions, when, for example, curl is still waiting for the python side
to read data, but the python side is not reading data anymore because
an exception was thrown in the tar reading loop.
--HG--
extra : rebase_source : 054c37cfaa68bf475b37545ebaa99144584b93d4
2018-01-24 05:18:13 +03:00
|
|
|
class TaskClusterImagesProvider(MachCommandBase):
|
2016-06-04 22:40:35 +03:00
|
|
|
@Command(
|
|
|
|
"taskcluster-load-image",
|
|
|
|
category="ci",
|
2018-11-26 21:24:43 +03:00
|
|
|
description="Load a pre-built Docker image. Note that you need to "
|
|
|
|
"have docker installed and running for this to work.",
|
|
|
|
)
|
2016-06-04 22:40:35 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"--task-id",
|
2018-11-26 21:24:43 +03:00
|
|
|
help="Load the image at public/image.tar.zst in this task, "
|
2016-06-21 04:06:55 +03:00
|
|
|
"rather than searching the index",
|
|
|
|
)
|
2016-11-11 05:05:52 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"-t",
|
|
|
|
"--tag",
|
|
|
|
help="tag that the image should be loaded as. If not "
|
|
|
|
"image will be loaded with tag from the tarball",
|
|
|
|
metavar="name:tag",
|
|
|
|
)
|
2016-06-04 22:40:35 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"image_name",
|
|
|
|
nargs="?",
|
2018-11-26 21:24:43 +03:00
|
|
|
help="Load the image of this name based on the current "
|
|
|
|
"contents of the tree (as built for mozilla-central "
|
2016-06-21 04:06:55 +03:00
|
|
|
"or mozilla-inbound)",
|
|
|
|
)
|
2016-11-11 05:05:52 +03:00
|
|
|
def load_image(self, image_name, task_id, tag):
|
2016-06-06 21:55:10 +03:00
|
|
|
from taskgraph.docker import load_image_by_name, load_image_by_task_id
|
2020-10-26 21:34:53 +03:00
|
|
|
|
2016-06-04 22:40:35 +03:00
|
|
|
if not image_name and not task_id:
|
|
|
|
print("Specify either IMAGE-NAME or TASK-ID")
|
|
|
|
sys.exit(1)
|
2016-06-06 21:55:10 +03:00
|
|
|
try:
|
|
|
|
if task_id:
|
2016-11-11 05:05:52 +03:00
|
|
|
ok = load_image_by_task_id(task_id, tag)
|
2016-06-06 21:55:10 +03:00
|
|
|
else:
|
2016-11-11 05:05:52 +03:00
|
|
|
ok = load_image_by_name(image_name, tag)
|
2016-06-06 21:55:10 +03:00
|
|
|
if not ok:
|
2016-06-04 22:40:35 +03:00
|
|
|
sys.exit(1)
|
2016-06-21 04:06:55 +03:00
|
|
|
except Exception:
|
2016-06-06 21:55:10 +03:00
|
|
|
traceback.print_exc()
|
|
|
|
sys.exit(1)
|
2016-07-29 22:45:25 +03:00
|
|
|
|
|
|
|
@Command(
|
|
|
|
"taskcluster-build-image", category="ci", description="Build a Docker image"
|
|
|
|
)
|
|
|
|
@CommandArgument("image_name", help="Name of the image to build")
|
2018-01-25 07:36:47 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"-t", "--tag", help="tag that the image should be built as.", metavar="name:tag"
|
|
|
|
)
|
2016-11-07 22:26:27 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"--context-only",
|
|
|
|
help="File name the context tarball should be written to."
|
|
|
|
"with this option it will only build the context.tar.",
|
|
|
|
metavar="context.tar",
|
|
|
|
)
|
2018-01-25 07:36:47 +03:00
|
|
|
def build_image(self, image_name, tag, context_only):
|
2016-11-07 22:26:27 +03:00
|
|
|
from taskgraph.docker import build_image, build_context
|
2020-10-26 21:34:53 +03:00
|
|
|
|
2016-07-29 22:45:25 +03:00
|
|
|
try:
|
2016-11-07 22:26:27 +03:00
|
|
|
if context_only is None:
|
2018-01-25 07:36:47 +03:00
|
|
|
build_image(image_name, tag, os.environ)
|
2016-11-07 22:26:27 +03:00
|
|
|
else:
|
2017-12-24 01:51:29 +03:00
|
|
|
build_context(image_name, context_only, os.environ)
|
2016-07-29 22:45:25 +03:00
|
|
|
except Exception:
|
|
|
|
traceback.print_exc()
|
|
|
|
sys.exit(1)
|
2017-09-18 15:36:36 +03:00
|
|
|
|
|
|
|
|
|
|
|
@CommandProvider
|
Bug 1657650 - Require that Mach command providers subclass MachCommandBase. r=remote-protocol-reviewers,marionette-reviewers,maja_zf,mhentges,froydnj
Today we don't require that `mach` `CommandProvider`s subclass from any particular parent class and we're very lax about the requirements they must meet. While that's convenient in certain circumstances, it has some unfortunate implications for feature development.
Today the only requirements that we have for `CommandProvider`s are that they have an `__init__()` method that takes either 1 or 2 arguments, the second of which must be called `context` and is populated with the `mach` `CommandContext`. Again, while this flexibility is occasionally convenient, it is limiting. As we add features to `mach`, having a better idea what the shape of our `CommandProvider`s are and how we can instantiate them and use them is increasingly important, and this gives us additional control when having `mach` configure `CommandProvider`s based on data that is only available at the `mach` level. In particular, we plan to leverage this in bugs 985141 and 1654074.
Here we add validation to the `CommandProvider` decorator to ensure all classes inherit from `MachCommandBase`, update all `CommandProvider`s in-tree to inherit from `MachCommandBase`, and update source and test code accordingly.
Follow-up work: we now require (de facto) that the `context` be populated with a `topdir` attribute by the `populate_context_handler` function, since instantiating the `MachCommandBase` requires a `topdir` be provided. This is fine for now in the interest of keeping this patch reasonably sized, but some additional refactoring could make this cleaner.
Differential Revision: https://phabricator.services.mozilla.com/D86255
2020-08-07 21:24:59 +03:00
|
|
|
class TaskClusterPartialsData(MachCommandBase):
|
2017-09-18 15:36:36 +03:00
|
|
|
@Command(
|
|
|
|
"release-history",
|
|
|
|
category="ci",
|
|
|
|
description="Query balrog for release history used by enable partials generation",
|
|
|
|
)
|
|
|
|
@CommandArgument(
|
|
|
|
"-b",
|
|
|
|
"--branch",
|
|
|
|
help="The gecko project branch used in balrog, such as "
|
2018-01-24 00:48:27 +03:00
|
|
|
"mozilla-central, release, maple",
|
|
|
|
)
|
2017-09-18 15:36:36 +03:00
|
|
|
@CommandArgument(
|
|
|
|
"--product", default="Firefox", help="The product identifier, such as 'Firefox'"
|
|
|
|
)
|
|
|
|
def generate_partials_builds(self, product, branch):
|
|
|
|
from taskgraph.util.partials import populate_release_history
|
2020-10-26 21:34:53 +03:00
|
|
|
|
2017-09-18 15:36:36 +03:00
|
|
|
try:
|
|
|
|
import yaml
|
2020-10-26 21:34:53 +03:00
|
|
|
|
2017-09-18 15:36:36 +03:00
|
|
|
release_history = {
|
|
|
|
"release_history": populate_release_history(product, branch)
|
|
|
|
}
|
|
|
|
print(
|
|
|
|
yaml.safe_dump(
|
|
|
|
release_history, allow_unicode=True, default_flow_style=False
|
|
|
|
)
|
2020-10-26 21:34:53 +03:00
|
|
|
)
|
2017-09-18 15:36:36 +03:00
|
|
|
except Exception:
|
|
|
|
traceback.print_exc()
|
|
|
|
sys.exit(1)
|