Bug 1850045: Upgrade vendored taskgraph module to 6.2.1 r=taskgraph-reviewers,mach-reviewers,gabriel,ahal

Differential Revision: https://phabricator.services.mozilla.com/D186820
This commit is contained in:
Ben Hearsum 2023-09-07 15:42:09 +00:00
Родитель 8f7c8e399d
Коммит 6ff7df4e0e
22 изменённых файлов: 389 добавлений и 205 удалений

9
third_party/python/poetry.lock сгенерированный поставляемый
Просмотреть файл

@ -1153,14 +1153,14 @@ test = ["aiofiles", "coverage", "flake8", "httmock", "httptest", "hypothesis", "
[[package]]
name = "taskcluster-taskgraph"
version = "5.7.0"
version = "6.2.1"
description = "Build taskcluster taskgraphs"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "taskcluster-taskgraph-5.7.0.tar.gz", hash = "sha256:fb1e2b3d45a8dac713932d7849b919c7bf5d4c553a7ea791a8c3f3b222e845ae"},
{file = "taskcluster_taskgraph-5.7.0-py3-none-any.whl", hash = "sha256:a25bb0b68b9460902025ebd78aa5f00fe6674d07d0b1ccfa541a486d0112a82a"},
{file = "taskcluster-taskgraph-6.2.1.tar.gz", hash = "sha256:30b9f1ace27af870c77a0989e647b6ae9469a58acc6045f4c80e130b4e9ffc8a"},
{file = "taskcluster_taskgraph-6.2.1-py3-none-any.whl", hash = "sha256:04d794917af42e08ff18fda897d370791d193fe1c29194ea49fe727e6b25ddc8"},
]
[package.dependencies]
@ -1171,7 +1171,6 @@ mozilla-repo-urls = "*"
PyYAML = ">=5.3.1"
redo = ">=2.0"
requests = ">=2.25"
requests-unixsocket = ">=0.2"
slugid = ">=2.0"
taskcluster-urls = ">=11.0"
voluptuous = ">=0.12.1"
@ -1404,4 +1403,4 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=4.6)", "pytest-black (
[metadata]
lock-version = "2.0"
python-versions = "^3.7"
content-hash = "e7924fac6d9f167af4deeed520e4f607e7ea7492cc03068232b89e397905ac59"
content-hash = "c2d4197f554df102e18431111d839d2c048c6d2a58bbc46cb04f11f0f41df032"

2
third_party/python/requirements.in поставляемый
Просмотреть файл

@ -45,7 +45,7 @@ setuptools==51.2.0
six==1.13.0
slugid==2.0.0
taskcluster==44.2.2
taskcluster-taskgraph==5.7.0
taskcluster-taskgraph==6.2.1
taskcluster-urls==13.0.1
toml==0.10.2
tomlkit==0.11.8

6
third_party/python/requirements.txt поставляемый
Просмотреть файл

@ -354,9 +354,9 @@ six==1.13.0 ; python_version >= "3.7" and python_version < "4.0" \
slugid==2.0.0 ; python_version >= "3.7" and python_version < "4.0" \
--hash=sha256:a950d98b72691178bdd4d6c52743c4a2aa039207cf7a97d71060a111ff9ba297 \
--hash=sha256:aec8b0e01c4ad32e38e12d609eab3ec912fd129aaf6b2ded0199b56a5f8fd67c
taskcluster-taskgraph==5.7.0 ; python_version >= "3.7" and python_version < "4.0" \
--hash=sha256:a25bb0b68b9460902025ebd78aa5f00fe6674d07d0b1ccfa541a486d0112a82a \
--hash=sha256:fb1e2b3d45a8dac713932d7849b919c7bf5d4c553a7ea791a8c3f3b222e845ae
taskcluster-taskgraph==6.2.1 ; python_version >= "3.7" and python_version < "4.0" \
--hash=sha256:04d794917af42e08ff18fda897d370791d193fe1c29194ea49fe727e6b25ddc8 \
--hash=sha256:30b9f1ace27af870c77a0989e647b6ae9469a58acc6045f4c80e130b4e9ffc8a
taskcluster-urls==13.0.1 ; python_version >= "3.7" and python_version < "4.0" \
--hash=sha256:5e25e7e6818e8877178b175ff43d2e6548afad72694aa125f404a7329ece0973 \
--hash=sha256:b25e122ecec249c4299ac7b20b08db76e3e2025bdaeb699a9d444556de5fd367 \

Просмотреть файл

@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: taskcluster-taskgraph
Version: 5.7.0
Version: 6.2.1
Summary: Build taskcluster taskgraphs
Home-page: https://github.com/taskcluster/taskgraph
Classifier: Development Status :: 5 - Production/Stable
@ -20,7 +20,6 @@ Requires-Dist: mozilla-repo-urls
Requires-Dist: PyYAML (>=5.3.1)
Requires-Dist: redo (>=2.0)
Requires-Dist: requests (>=2.25)
Requires-Dist: requests-unixsocket (>=0.2)
Requires-Dist: slugid (>=2.0)
Requires-Dist: taskcluster-urls (>=11.0)
Requires-Dist: voluptuous (>=0.12.1)

Просмотреть файл

@ -1,13 +1,13 @@
taskgraph/__init__.py,sha256=SnTKobCPUED34yMA4oMNOcRw5JOSY61QVx1OiGY9eeg,729
taskgraph/__init__.py,sha256=aInyG7m4elr01O8pYKyXvuWQSfgXhqHKCBEBTspy1xc,729
taskgraph/config.py,sha256=XJYKaA9Egn7aiyZ0v70VCq3Kc-XkK08CK2LDsDfsDR8,4822
taskgraph/create.py,sha256=MeWVr5gKJefjwK_3_xZUcDDu2NVH97gbUuu1dw_I9hA,5184
taskgraph/decision.py,sha256=qARBTlLYJ7NVw3aflrspRn_hFmvKcrXJ058yao_4b7A,12882
taskgraph/docker.py,sha256=UtUfv3F7YBmrI7tJ1XODG_VvfwG0oWpNlsv59Bst728,7834
taskgraph/docker.py,sha256=6tdGVrKFNonznRJSP4IDZEhKnjV-wYKsR0nXnoDOvZk,7924
taskgraph/files_changed.py,sha256=W3_gEgUT-mVH9DaaU_8X6gYpftrqBU3kgveGbzPLziU,2793
taskgraph/filter_tasks.py,sha256=R7tYXiaVPGIkQ6O1c9-QJrKZ59m9pFXCloUlPraVnZU,866
taskgraph/generator.py,sha256=8d59-CK8LcnaKLa_qJG_R2G1gofiHqCFY7OWRqBkn2o,15667
taskgraph/graph.py,sha256=bHUsv2pPa2SSaWgBY-ItIj7REPd0o4fFYrwoQbwFKTY,4680
taskgraph/main.py,sha256=-BC0J4PhLL-6nvzHgk2YGHfPH8yfjYVbgeKBKiJ25QQ,26201
taskgraph/main.py,sha256=wuEHkMvdQ7ls7bvNpk6sWXGyXLrOofoCV8oNtnSBKsw,26480
taskgraph/morph.py,sha256=Q6weAi-xpJM4XoKA2mM6gVXQYLnE1YSws53vTZygMkY,9192
taskgraph/optimize.py,sha256=NVshvkqRKr7SQvRdqz5CELmnIXeiODkDxlK0D9QMi9k,16487
taskgraph/parameters.py,sha256=TYB5P2rIdGn-C8a2fcBoub_HZ4Svk2Rn3TK0MetFvjc,11918
@ -44,31 +44,33 @@ taskgraph/optimize/__pycache__/strategies.cpython-38.pyc,sha256=fjYsf_6YNa-kIA4v
taskgraph/run-task/fetch-content,sha256=G1aAvZlTg0yWHqxhSxi4RvfxW-KBJ5JwnGtWRqfH_bg,29990
taskgraph/run-task/hgrc,sha256=BybWLDR89bWi3pE5T05UqmDHs02CbLypE-omLZWU6Uk,896
taskgraph/run-task/robustcheckout.py,sha256=vPKvHb3fIIJli9ZVZG88XYoa8Sohy2JrpmH6pDgBDHI,30813
taskgraph/run-task/run-task,sha256=KSIUkIfZUzjfJtiIPtwXBU4B0vA9hZnZslJ-heKRIuU,45128
taskgraph/run-task/run-task,sha256=Mpr195iq9eOh6B4MBpPzEDlxeNyJq0Fa2yrtlJunlXE,45434
taskgraph/transforms/__init__.py,sha256=aw1dz2sRWZcbTILl6SVDuqIEw0mDdjSYu3LCVs-RLXE,110
taskgraph/transforms/base.py,sha256=LFw2NwhrSriI3vbcCttArTFb7uHxckQpHeFZmatofvM,5146
taskgraph/transforms/cached_tasks.py,sha256=Z10VD1kEBVXJvj8qSsNTq2mYpklh0V1EN8OT6QK3v_E,2607
taskgraph/transforms/chunking.py,sha256=7z9oXiA2dDguYwJPaZYCi-fEzbc--O9avZAFS3vP_kg,2592
taskgraph/transforms/code_review.py,sha256=eE2xrDtdD_n3HT3caQ2HGAkPm6Uutdm4hDCpCoFjEps,707
taskgraph/transforms/docker_image.py,sha256=AUuWMx43FcQfgbXy4_2Sjae0cWrh5XWMMcJ3ItcoKes,7606
taskgraph/transforms/fetch.py,sha256=ORnxpVidOQtI1q1xeHl1c1jlShXD8R_jTGC2CX3lLM4,10479
taskgraph/transforms/from_deps.py,sha256=aMqzvjC9ckK7T8-u4MoA0QyqSIceXfjJp4whExmUWHE,6647
taskgraph/transforms/from_deps.py,sha256=1mdjIWYshVI2zBywzB3JEqOyvqgVjFvarcQt9PLDSc4,8950
taskgraph/transforms/notify.py,sha256=0sga-Ls9dhWLAsL0FBjXmVbbduee8LAZp_1pHBQR0iI,6019
taskgraph/transforms/release_notifications.py,sha256=jrb9CCT-z_etDf690T-AeCvdzIoVWBAeM_FGoW7FIzA,3305
taskgraph/transforms/task.py,sha256=0oQYH7Upjus0-gzCrYbE0tUKZQUEv6Uq1adGBqiNM60,52254
taskgraph/transforms/task_context.py,sha256=FxZwT69ozierogtlCTNvk7zCW52d0HdhCaJN7EDmI1s,4272
taskgraph/transforms/__pycache__/__init__.cpython-38.pyc,sha256=XHsSgZEVDiQqINzElOjBvjhPjyfaNjAwTtV5Aj6ubDQ,232
taskgraph/transforms/__pycache__/base.cpython-38.pyc,sha256=s5IC570o9P2J12benwBfeAN7RSRWKKxPErkEHNZbq_c,4510
taskgraph/transforms/job/__init__.py,sha256=FPr9rGFYtcVT0zPk7CwzowAsRmSYlPL3RtFgztjIFMI,17324
taskgraph/transforms/job/__init__.py,sha256=JbNpqdoJRId24QVGe821r6u7Zvm2fTNvME_PMGunaoU,17706
taskgraph/transforms/job/common.py,sha256=ldlbRI8sdEd-eUcre4GtXMerUg0RQZ_XSe9GwAkfI3I,5897
taskgraph/transforms/job/index_search.py,sha256=Ngh9FFu1bx2kHVTChW2vcrbnb3SzMneRHopXk18RfB4,1220
taskgraph/transforms/job/run_task.py,sha256=5vmSwjWBNniSU2UcbnE_BQGct4bUTULIivYXlFSqB-4,9814
taskgraph/transforms/job/run_task.py,sha256=s9gq1bPdzBB0j2OguXJpWn1-S5Ctltqo4aLsB4kzpUc,8385
taskgraph/transforms/job/toolchain.py,sha256=GOqIvp1MgtV-6whi2ofgSCFB7GolikZbfLXz0C1h0vc,6015
taskgraph/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
taskgraph/util/archive.py,sha256=nzYn8cQ3NfLAeV-2SuTNoeQ6hg8m40f6FQcSTyVIKwQ,2855
taskgraph/util/attributes.py,sha256=pPOFmwkDQQ-IqfDpVghZ10YI_qXRY4Bi5JP3xr6XVvc,2964
taskgraph/util/cached_tasks.py,sha256=o-yJ91wlWbzoDB2GvKPpGcDE27_IEMgczp_figEBjV8,3406
taskgraph/util/decision.py,sha256=uTC143FpTKQkGff5jIz3voWRYXBCHgx-XAm7FMW53hE,2433
taskgraph/util/dependencies.py,sha256=U9nncoFvE4aXWeOr_Q-igeKNkzqGvgSTveBZO3OMyI4,2592
taskgraph/util/docker.py,sha256=vdTruZT2Z_GVcyAYilaHt8VaRj4b-dtBKVWlq_GwYvE,11699
taskgraph/util/dependencies.py,sha256=3Qba3zI87JYR5fk5FndGzEVW-5NIzzZrBf9rVYcnLD0,2734
taskgraph/util/docker.py,sha256=rTbzUt8S6s3N1r8gmwHrqsIY9VZ7TDWBM-jZQ5w0P_U,7762
taskgraph/util/hash.py,sha256=31sQmDwQOavA5hWsmzWDNFoFTaTp5a7qLSQLNTEALD8,1661
taskgraph/util/keyed_by.py,sha256=cgBH4tG8eH5UUrm5q4ODG7A4fzkGAOI7feVoZy3V8Ho,3419
taskgraph/util/memoize.py,sha256=XDlwc-56gzoY8QTwOoiCOYL-igX7JoMcY-9Ih80Euc8,1331
@ -80,10 +82,10 @@ taskgraph/util/schema.py,sha256=JGd0Imjfv6JKCY_tjJtOYwI6uwKUaNgzAcvcZj5WE6A,8323
taskgraph/util/shell.py,sha256=MB9zHVSvxgOuszgmKr2rWUDahANZkbHHNkjjagZG_3I,1317
taskgraph/util/taskcluster.py,sha256=cGUGvkrefRHngjyZm_iQRYKRlGi4jMIr7ky0fi_YBrg,12445
taskgraph/util/taskgraph.py,sha256=ecKEvTfmLVvEKLPO_0g34CqVvc0iCzuNMh3064BZNrE,1969
taskgraph/util/templates.py,sha256=Dqxfl244u-PX7dnsk3_vYyzDwpDgJtANK6NmZwN3Qow,1417
taskgraph/util/templates.py,sha256=HGTaIKCpAwEzBDHq0cDai1HJjPJrdnHsjJz6N4LVpKI,2139
taskgraph/util/time.py,sha256=pNFcTH-iYRfm2-okm1lMATc4B5wO-_FXbOFXEtXD27g,3390
taskgraph/util/treeherder.py,sha256=A3rpPUQB60Gn1Yx-OZgKuWWGJ8x0-6tcdeeslzco9ag,2687
taskgraph/util/vcs.py,sha256=wyDcz1oIvxyS7HbLFUP-G8Y1io3mV5dgfYagnDMSJ90,18780
taskgraph/util/vcs.py,sha256=54Haq2XyC5CmPnjrPRQZY5wUeoFsaV9pWTYvBjPcVMA,18917
taskgraph/util/verify.py,sha256=cSd7EeP9hUvp-5WOvKDHrvpFAGb_LuiNPxPp0-YmNEA,8947
taskgraph/util/workertypes.py,sha256=1wgM6vLrlgtyv8854anVIs0Bx11kV8JJJaKcOHJc2j0,2498
taskgraph/util/yaml.py,sha256=hfKI_D8Q7dimq4_VvO3WEh8CJsTrsIMwN6set7HIQbY,990
@ -97,9 +99,9 @@ taskgraph/util/__pycache__/schema.cpython-38.pyc,sha256=0Hew9ATBeA1amGZ_EjL7yULT
taskgraph/util/__pycache__/verify.cpython-38.pyc,sha256=8U5zC7jsLhwX6741yjcuH5quED3PxKItqDuACPc6yW0,7721
taskgraph/util/__pycache__/workertypes.cpython-38.pyc,sha256=hD8JOa_1TnhyQSyNcAZRpltqzgazoa2ukQB5gDGTNB4,2014
taskgraph/util/__pycache__/yaml.cpython-38.pyc,sha256=qOzXDWZxoUTcfeHjrKZKUKoI1y4vFKrMl93s7tqAAF4,1271
taskcluster_taskgraph-5.7.0.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
taskcluster_taskgraph-5.7.0.dist-info/METADATA,sha256=xH6ezBoxrpvd-kNaM9_4LUX9CqBhpZR08hqxV4G9p4w,1089
taskcluster_taskgraph-5.7.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
taskcluster_taskgraph-5.7.0.dist-info/entry_points.txt,sha256=2hxDzE3qq_sHh-J3ROqwpxgQgxO-196phWAQREl2-XA,50
taskcluster_taskgraph-5.7.0.dist-info/top_level.txt,sha256=3JNeYn_hNiNXC7DrdH_vcv-WYSE7QdgGjdvUYvSjVp0,10
taskcluster_taskgraph-5.7.0.dist-info/RECORD,,
taskcluster_taskgraph-6.2.1.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
taskcluster_taskgraph-6.2.1.dist-info/METADATA,sha256=IPinMIVrCoWs7yeylvxKwQgKOfpuJViBdNPjvNLbAGk,1046
taskcluster_taskgraph-6.2.1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
taskcluster_taskgraph-6.2.1.dist-info/entry_points.txt,sha256=2hxDzE3qq_sHh-J3ROqwpxgQgxO-196phWAQREl2-XA,50
taskcluster_taskgraph-6.2.1.dist-info/top_level.txt,sha256=3JNeYn_hNiNXC7DrdH_vcv-WYSE7QdgGjdvUYvSjVp0,10
taskcluster_taskgraph-6.2.1.dist-info/RECORD,,

Просмотреть файл

@ -1,5 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.40.0)
Generator: bdist_wheel (0.37.1)
Root-Is-Purelib: true
Tag: py3-none-any

Просмотреть файл

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
__version__ = "5.7.0"
__version__ = "6.2.1"
# Maximum number of dependencies a single task can have
# https://docs.taskcluster.net/reference/platform/taskcluster-queue/references/api#createTask

Просмотреть файл

@ -5,6 +5,7 @@
import json
import os
import subprocess
import tarfile
from io import BytesIO
from textwrap import dedent
@ -101,7 +102,9 @@ def build_image(name, tag, args=None):
buf = BytesIO()
docker.stream_context_tar(".", image_dir, buf, "", args)
docker.post_to_docker(buf.getvalue(), "/build", nocache=1, t=tag)
subprocess.run(
["docker", "image", "build", "--no-cache", "-t", tag, "-"], input=buf.getvalue()
)
print(f"Successfully built {name} and tagged with {tag}")
@ -205,7 +208,9 @@ def load_image(url, imageName=None, imageTag=None):
reader.close()
docker.post_to_docker(download_and_modify_image(), "/images/load", quiet=0)
subprocess.run(
["docker", "image", "load"], input=b"".join(download_and_modify_image())
)
# Check that we found a repositories file
if not info.get("image") or not info.get("tag") or not info.get("layer"):

Просмотреть файл

@ -16,6 +16,7 @@ import traceback
from collections import namedtuple
from concurrent.futures import ProcessPoolExecutor, as_completed
from pathlib import Path
from textwrap import dedent
from typing import Any, List
import appdirs
@ -799,16 +800,28 @@ def init_taskgraph(options):
# Populate some defaults from the current repository.
context = {"project_name": root.name}
repo_url = repo.get_url()
try:
repo_url = repo.get_url(remote=repo.remote_name)
except RuntimeError:
repo_url = ""
if repo.tool == "git" and "github.com" in repo_url:
context["repo_host"] = "github"
elif repo.tool == "hg" and "hg.mozilla.org" in repo_url:
context["repo_host"] = "hgmo"
else:
raise RuntimeError(
"Repository not supported! Taskgraph currently only "
"supports repositories hosted on Github or hg.mozilla.org."
print(
dedent(
"""\
Repository not supported!
Taskgraph only supports repositories hosted on Github or hg.mozilla.org.
Ensure you have a remote that points to one of these locations.
"""
),
file=sys.stderr,
)
return 1
# Generate the project.
cookiecutter(

Просмотреть файл

@ -42,6 +42,11 @@ from typing import Optional
SECRET_BASEURL_TPL = "http://taskcluster/secrets/v1/secret/{}"
GITHUB_SSH_FINGERPRINT = (
b"github.com ssh-ed25519 "
b"AAAAC3NzaC1lZDI1NTE5AAAAIOMqqnkVzrm0SdG6UOoqKLsabgH5C9okWi0dh2l9GKJl\n"
b"github.com ecdsa-sha2-nistp256 "
b"AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBEmKSENjQEezOmxkZMy7opKgwFB"
b"9nkt5YRrYMjNuG5N87uRgg6CLrbo5wAdT/y6v0mKV0U2w0WZ2YB/++Tpockg=\n"
b"github.com ssh-rsa "
b"AAAAB3NzaC1yc2EAAAADAQABAAABgQCj7ndNxQowgcQnjshcLrqPEiiphnt+VTTvDP6mHBL9j1aNUkY"
b"4Ue1gvwnGLVlOhGeYrnZaMgRK6+PKCUXaDbC7qtbW8gIkhL7aGCsOr/C56SJMy/BCZfxd1nWzAOxSDP"

Просмотреть файл

@ -0,0 +1,82 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import copy
from textwrap import dedent
from voluptuous import ALLOW_EXTRA, Optional, Required
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import Schema
from taskgraph.util.templates import substitute
CHUNK_SCHEMA = Schema(
{
# Optional, so it can be used for a subset of tasks in a kind
Optional(
"chunk",
description=dedent(
"""
`chunk` can be used to split one task into `total-chunks`
tasks, substituting `this_chunk` and `total_chunks` into any
fields in `substitution-fields`.
""".lstrip()
),
): {
Required(
"total-chunks",
description=dedent(
"""
The total number of chunks to split the task into.
""".lstrip()
),
): int,
Optional(
"substitution-fields",
description=dedent(
"""
A list of fields that need to have `{this_chunk}` and/or
`{total_chunks}` replaced in them.
""".lstrip()
),
): [str],
}
},
extra=ALLOW_EXTRA,
)
transforms = TransformSequence()
transforms.add_validate(CHUNK_SCHEMA)
@transforms.add
def chunk_tasks(config, tasks):
for task in tasks:
chunk_config = task.pop("chunk", None)
if not chunk_config:
yield task
continue
total_chunks = chunk_config["total-chunks"]
for this_chunk in range(1, total_chunks + 1):
subtask = copy.deepcopy(task)
subs = {
"this_chunk": this_chunk,
"total_chunks": total_chunks,
}
subtask.setdefault("attributes", {})
subtask["attributes"].update(subs)
for field in chunk_config["substitution-fields"]:
container, subfield = subtask, field
while "." in subfield:
f, subfield = subfield.split(".", 1)
container = container[f]
subcontainer = copy.deepcopy(container[subfield])
subfield = substitute(subfield, **subs)
container[subfield] = substitute(subcontainer, **subs)
yield subtask

Просмотреть файл

@ -16,9 +16,10 @@ from textwrap import dedent
from voluptuous import Any, Extra, Optional, Required
from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.job import fetches_schema
from taskgraph.util.attributes import attrmatch
from taskgraph.util.dependencies import GROUP_BY_MAP
from taskgraph.util.schema import Schema
from taskgraph.util.dependencies import GROUP_BY_MAP, get_dependencies
from taskgraph.util.schema import Schema, validate_schema
FROM_DEPS_SCHEMA = Schema(
{
@ -36,6 +37,16 @@ FROM_DEPS_SCHEMA = Schema(
""".lstrip()
),
): list,
Optional(
"set-name",
description=dedent(
"""
When True, `from_deps` will derive a name for the generated
tasks from the name of the primary dependency. Defaults to
True.
""".lstrip()
),
): bool,
Optional(
"with-attributes",
description=dedent(
@ -80,6 +91,17 @@ FROM_DEPS_SCHEMA = Schema(
""".lstrip()
),
): bool,
Optional(
"fetches",
description=dedent(
"""
If present, a `fetches` entry will be added for each task
dependency. Attributes of the upstream task may be used as
substitution values in the `artifact` or `dest` values of the
`fetches` entry.
""".lstrip()
),
): {str: [fetches_schema]},
},
Extra: object,
},
@ -139,15 +161,19 @@ def from_deps(config, tasks):
group_by, arg = group_by.popitem()
func = GROUP_BY_MAP[group_by]
if func.schema:
func.schema(arg)
validate_schema(
func.schema, arg, f"Invalid group-by {group_by} argument"
)
groups = func(config, deps, arg)
else:
func = GROUP_BY_MAP[group_by]
groups = func(config, deps)
# Split the task, one per group.
set_name = from_deps.get("set-name", True)
copy_attributes = from_deps.get("copy-attributes", False)
unique_kinds = from_deps.get("unique-kinds", True)
fetches = from_deps.get("fetches", [])
for group in groups:
# Verify there is only one task per kind in each group.
group_kinds = {t.kind for t in group}
@ -157,9 +183,10 @@ def from_deps(config, tasks):
)
new_task = deepcopy(task)
new_task["dependencies"] = {
dep.kind if unique_kinds else dep.label: dep.label for dep in group
}
new_task.setdefault("dependencies", {})
new_task["dependencies"].update(
{dep.kind if unique_kinds else dep.label: dep.label for dep in group}
)
# Set name and copy attributes from the primary kind.
for kind in kinds:
@ -175,14 +202,41 @@ def from_deps(config, tasks):
primary_dep = [dep for dep in group if dep.kind == primary_kind][0]
if set_name:
if primary_dep.label.startswith(primary_kind):
new_task["name"] = primary_dep.label[len(primary_kind) + 1 :]
else:
new_task["name"] = primary_dep.label
if copy_attributes:
attrs = new_task.get("attributes", {})
attrs = new_task.setdefault("attributes", {})
new_task["attributes"] = primary_dep.attributes.copy()
new_task["attributes"].update(attrs)
if fetches:
task_fetches = new_task.setdefault("fetches", {})
for dep_task in get_dependencies(config, new_task):
# Nothing to do if this kind has no fetches listed
if dep_task.kind not in fetches:
continue
fetches_from_dep = []
for kind, kind_fetches in fetches.items():
if kind != dep_task.kind:
continue
for fetch in kind_fetches:
entry = fetch.copy()
entry["artifact"] = entry["artifact"].format(
**dep_task.attributes
)
if "dest" in entry:
entry["dest"] = entry["dest"].format(
**dep_task.attributes
)
fetches_from_dep.append(entry)
task_fetches[dep_task.label] = fetches_from_dep
yield new_task

Просмотреть файл

@ -27,6 +27,16 @@ from taskgraph.util.workertypes import worker_type_implementation
logger = logging.getLogger(__name__)
# Fetches may be accepted in other transforms and eventually passed along
# to a `job` (eg: from_deps). Defining this here allows them to re-use
# the schema and avoid duplication.
fetches_schema = {
Required("artifact"): str,
Optional("dest"): str,
Optional("extract"): bool,
Optional("verify-hash"): bool,
}
# Schema for a build description
job_description_schema = Schema(
{
@ -76,12 +86,7 @@ job_description_schema = Schema(
Any("toolchain", "fetch"): [str],
str: [
str,
{
Required("artifact"): str,
Optional("dest"): str,
Optional("extract"): bool,
Optional("verify-hash"): bool,
},
fetches_schema,
],
},
# A description of how to run this job.
@ -241,9 +246,10 @@ def use_fetches(config, jobs):
worker = job.setdefault("worker", {})
env = worker.setdefault("env", {})
prefix = get_artifact_prefix(job)
for kind, artifacts in fetches.items():
for kind in sorted(fetches):
artifacts = fetches[kind]
if kind in ("fetch", "toolchain"):
for fetch_name in artifacts:
for fetch_name in sorted(artifacts):
label = f"{kind}-{fetch_name}"
label = aliases.get(label, label)
if label not in artifact_names:
@ -295,7 +301,13 @@ def use_fetches(config, jobs):
prefix = get_artifact_prefix(dep_tasks[0])
for artifact in artifacts:
def cmp_artifacts(a):
if isinstance(a, str):
return a
else:
return a["artifact"]
for artifact in sorted(artifacts, key=cmp_artifacts):
if isinstance(artifact, str):
path = artifact
dest = None

Просмотреть файл

@ -8,14 +8,13 @@ Support for running jobs that are invoked via the `run-task` script.
import dataclasses
import os
from voluptuous import Any, Extra, Optional, Required
from voluptuous import Any, Optional, Required
from taskgraph.transforms.job import run_job_using
from taskgraph.transforms.job.common import support_vcs_checkout
from taskgraph.transforms.task import taskref_or_string
from taskgraph.util import path, taskcluster
from taskgraph.util.schema import Schema
from taskgraph.util.yaml import load_yaml
EXEC_COMMANDS = {
"bash": ["bash", "-cx"],
@ -46,16 +45,6 @@ run_task_schema = Schema(
# it will be included in a single argument to the command specified by
# `exec-with`.
Required("command"): Any([taskref_or_string], taskref_or_string),
# Context to substitute into the command using format string
# substitution (e.g {value}). This is useful if certain aspects of the
# command need to be generated in transforms.
Optional("command-context"): {
# If present, loads a set of context variables from an unnested yaml
# file. If a value is present in both the provided file and directly
# in command-context, the latter will take priority.
Optional("from-file"): str,
Extra: object,
},
# What to execute the command with in the event command is a string.
Optional("exec-with"): Any(*list(EXEC_COMMANDS)),
# Command used to invoke the `run-task` script. Can be used if the script
@ -137,25 +126,6 @@ def script_url(config, script):
return f"{tc_url}/api/queue/v1/task/{task_id}/artifacts/public/{script}"
def substitute_command_context(command_context, command):
from_file = command_context.pop("from-file", None)
full_context = {}
if from_file:
full_context = load_yaml(from_file)
else:
full_context = {}
full_context.update(command_context)
if isinstance(command, list):
for i in range(len(command)):
command[i] = command[i].format(**full_context)
else:
command = command.format(**full_context)
return command
@run_job_using(
"docker-worker", "run-task", schema=run_task_schema, defaults=worker_defaults
)
@ -177,13 +147,6 @@ def docker_worker_run_task(config, job, taskdesc):
run_command = run["command"]
if run.get("command-context"):
run_command = substitute_command_context(
run.get("command-context"), run["command"]
)
else:
run_command = run["command"]
# dict is for the case of `{'task-reference': str}`.
if isinstance(run_command, str) or isinstance(run_command, dict):
exec_cmd = EXEC_COMMANDS[run.pop("exec-with", "bash")]
@ -250,11 +213,6 @@ def generic_worker_run_task(config, job, taskdesc):
exec_cmd = EXEC_COMMANDS[run.pop("exec-with", "bash")]
run_command = exec_cmd + [run_command]
if run.get("command-context"):
run_command = substitute_command_context(
run.get("command-context"), run_command
)
if run["run-as-root"]:
command.extend(("--user", "root", "--group", "root"))
command.append("--")

Просмотреть файл

@ -0,0 +1,121 @@
from textwrap import dedent
from voluptuous import ALLOW_EXTRA, Any, Optional, Required
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import Schema
from taskgraph.util.templates import deep_get, substitute
from taskgraph.util.yaml import load_yaml
SCHEMA = Schema(
{
Required(
"task-context",
description=dedent(
"""
`task-context` can be used to substitute values into any field in a
task with data that is not known until `taskgraph` runs.
This data can be provided via `from-parameters` or `from-file`,
which can pull in values from parameters and a defined yml file
respectively.
Data may also be provided directly in the `from-object` section of
`task-context`. This can be useful in `kinds` that define most of
their contents in `task-defaults`, but have some values that may
differ for various concrete `tasks` in the `kind`.
If the same key is found in multiple places the order of precedence
is as follows:
- Parameters
- `from-object` keys
- File
That is to say: parameters will always override anything else.
""".lstrip(),
),
): {
Optional(
"from-parameters",
description=dedent(
"""
Retrieve task context values from parameters. A single
parameter may be provided or a list of parameters in
priority order. The latter can be useful in implementing a
"default" value if some other parameter is not provided.
""".lstrip()
),
): {str: Any([str], str)},
Optional(
"from-file",
description=dedent(
"""
Retrieve task context values from a yaml file. The provided
file should usually only contain top level keys and values
(eg: nested objects will not be interpolated - they will be
substituted as text representations of the object).
""".lstrip()
),
): str,
Optional(
"from-object",
description="Key/value pairs to be used as task context",
): object,
Required(
"substitution-fields",
description=dedent(
"""
A list of fields in the task to substitute the provided values
into.
""".lstrip()
),
): [str],
},
},
extra=ALLOW_EXTRA,
)
transforms = TransformSequence()
transforms.add_validate(SCHEMA)
@transforms.add
def render_task(config, jobs):
for job in jobs:
sub_config = job.pop("task-context")
params_context = {}
for var, path in sub_config.pop("from-parameters", {}).items():
if isinstance(path, str):
params_context[var] = deep_get(config.params, path)
else:
for choice in path:
value = deep_get(config.params, choice)
if value is not None:
params_context[var] = value
break
file_context = {}
from_file = sub_config.pop("from-file", None)
if from_file:
file_context = load_yaml(from_file)
fields = sub_config.pop("substitution-fields")
subs = {}
subs.update(file_context)
# We've popped away the configuration; everything left in `sub_config` is
# substitution key/value pairs.
subs.update(sub_config.pop("from-object", {}))
subs.update(params_context)
# Now that we have our combined context, we can substitute.
for field in fields:
container, subfield = job, field
while "." in subfield:
f, subfield = subfield.split(".", 1)
container = container[f]
container[subfield] = substitute(container[subfield], **subs)
yield job

Просмотреть файл

@ -14,6 +14,9 @@ GROUP_BY_MAP = {}
def group_by(name, schema=None):
def wrapper(func):
assert (
name not in GROUP_BY_MAP
), f"duplicate group_by function name {name} ({func} and {GROUP_BY_MAP[name]})"
GROUP_BY_MAP[name] = func
func.schema = schema
return func

Просмотреть файл

@ -5,13 +5,8 @@
import hashlib
import io
import json
import os
import re
import sys
import urllib.parse
import requests_unixsocket
from taskgraph.util.archive import create_tar_gz_from_files
from taskgraph.util.memoize import memoize
@ -21,106 +16,6 @@ IMAGE_DIR = os.path.join(".", "taskcluster", "docker")
from .yaml import load_yaml
def docker_url(path, **kwargs):
docker_socket = os.environ.get("DOCKER_SOCKET", "/var/run/docker.sock")
return urllib.parse.urlunparse(
(
"http+unix",
urllib.parse.quote(docker_socket, safe=""),
path,
"",
urllib.parse.urlencode(kwargs),
"",
)
)
def post_to_docker(tar, api_path, **kwargs):
"""POSTs a tar file to a given docker API path.
The tar argument can be anything that can be passed to requests.post()
as data (e.g. iterator or file object).
The extra keyword arguments are passed as arguments to the docker API.
"""
req = requests_unixsocket.Session().post(
docker_url(api_path, **kwargs),
data=tar,
stream=True,
headers={"Content-Type": "application/x-tar"},
)
if req.status_code != 200:
message = req.json().get("message")
if not message:
message = f"docker API returned HTTP code {req.status_code}"
raise Exception(message)
status_line = {}
buf = b""
for content in req.iter_content(chunk_size=None):
if not content:
continue
# Sometimes, a chunk of content is not a complete json, so we cumulate
# with leftovers from previous iterations.
buf += content
try:
data = json.loads(buf)
except Exception:
continue
buf = b""
# data is sometimes an empty dict.
if not data:
continue
# Mimic how docker itself presents the output. This code was tested
# with API version 1.18 and 1.26.
if "status" in data:
if "id" in data:
if sys.stderr.isatty():
total_lines = len(status_line)
line = status_line.setdefault(data["id"], total_lines)
n = total_lines - line
if n > 0:
# Move the cursor up n lines.
sys.stderr.write(f"\033[{n}A")
# Clear line and move the cursor to the beginning of it.
sys.stderr.write("\033[2K\r")
sys.stderr.write(
"{}: {} {}\n".format(
data["id"], data["status"], data.get("progress", "")
)
)
if n > 1:
# Move the cursor down n - 1 lines, which, considering
# the carriage return on the last write, gets us back
# where we started.
sys.stderr.write(f"\033[{n - 1}B")
else:
status = status_line.get(data["id"])
# Only print status changes.
if status != data["status"]:
sys.stderr.write("{}: {}\n".format(data["id"], data["status"]))
status_line[data["id"]] = data["status"]
else:
status_line = {}
sys.stderr.write("{}\n".format(data["status"]))
elif "stream" in data:
sys.stderr.write(data["stream"])
elif "aux" in data:
sys.stderr.write(repr(data["aux"]))
elif "error" in data:
sys.stderr.write("{}\n".format(data["error"]))
# Sadly, docker doesn't give more than a plain string for errors,
# so the best we can do to propagate the error code from the command
# that failed is to parse the error message...
errcode = 1
m = re.search(r"returned a non-zero code: (\d+)", data["error"])
if m:
errcode = int(m.group(1))
sys.exit(errcode)
else:
raise NotImplementedError(repr(data))
sys.stderr.flush()
def docker_image(name, by_tag=False):
"""
Resolve in-tree prebuilt docker image to ``<registry>/<repository>@sha256:<digest>``,

Просмотреть файл

@ -48,3 +48,33 @@ def merge(*objects):
if len(objects) == 1:
return copy.deepcopy(objects[0])
return merge_to(objects[-1], merge(*objects[:-1]))
def deep_get(dict_, field):
container, subfield = dict_, field
while "." in subfield:
f, subfield = subfield.split(".", 1)
if f not in container:
return None
container = container[f]
return container.get(subfield)
def substitute(item, **subs):
if isinstance(item, list):
for i in range(len(item)):
item[i] = substitute(item[i], **subs)
elif isinstance(item, dict):
new_dict = {}
for k, v in item.items():
k = k.format(**subs)
new_dict[k] = substitute(v, **subs)
item = new_dict
elif isinstance(item, str):
item = item.format(**subs)
else:
item = item
return item

Просмотреть файл

@ -353,13 +353,19 @@ class GitRepository(Repository):
def remote_name(self):
try:
remote_branch_name = self.run(
"rev-parse", "--verify", "--abbrev-ref", "--symbolic-full-name", "@{u}"
"rev-parse",
"--verify",
"--abbrev-ref",
"--symbolic-full-name",
"@{u}",
stderr=subprocess.PIPE,
).strip()
return remote_branch_name.split("/")[0]
except subprocess.CalledProcessError as e:
# Error code 128 comes with the message:
# "fatal: no upstream configured for branch $BRANCH"
if e.returncode != 128:
print(e.stderr)
raise
return self._get_most_suitable_remote("`git remote add origin $URL`")