Bug 1775521 - Update vendored taskgraph to 1.7.1, r=releng-reviewers,gabriel

Differential Revision: https://phabricator.services.mozilla.com/D150160
This commit is contained in:
Andrew Halberstadt 2022-06-23 21:07:03 +00:00
Родитель a42db3a559
Коммит 5cb6993eca
56 изменённых файлов: 1034 добавлений и 379 удалений

8
third_party/python/poetry.lock сгенерированный поставляемый
Просмотреть файл

@ -607,7 +607,7 @@ test = ["pytest", "pytest-cov", "pytest-mock", "httmock", "mock", "setuptools-li
[[package]]
name = "taskcluster-taskgraph"
version = "1.4.0"
version = "1.7.1"
description = "Build taskcluster taskgraphs"
category = "main"
optional = false
@ -729,7 +729,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pyt
[metadata]
lock-version = "1.1"
python-versions = "^3.6"
content-hash = "f473795bdc4bd0ee2360afd9b5346ede7792e1befae9cc7dd5bf6b9b814a7874"
content-hash = "5e8aec06392b2810bc395d02c3c526dcf107ef680ced6bd8911f3fc37833a01e"
[metadata.files]
aiohttp = [
@ -1113,8 +1113,8 @@ taskcluster = [
{file = "taskcluster-44.2.2.tar.gz", hash = "sha256:0266a6a901e1a2ec838984a7f24e7adb6d58f9f2e221a7f613388f8f23f786fc"},
]
taskcluster-taskgraph = [
{file = "taskcluster-taskgraph-1.4.0.tar.gz", hash = "sha256:237ed0399ef55e9eef537e6163b456c04d58a7b72bc1b55e8c61a05331170b1e"},
{file = "taskcluster_taskgraph-1.4.0-py3-none-any.whl", hash = "sha256:2be5ac94745180029dfad260486aafeb14f564f5a95d1d3269e0d17834273f11"},
{file = "taskcluster-taskgraph-1.7.1.tar.gz", hash = "sha256:9cb13e19ddf74331c51a9f1dd68afde179f148e7856269e8e5f6182e0a2e5732"},
{file = "taskcluster_taskgraph-1.7.1-py3-none-any.whl", hash = "sha256:736054efee8c56987417d4ad2960cfc99c91338253580eadece072a43e042718"},
]
taskcluster-urls = [
{file = "taskcluster-urls-13.0.1.tar.gz", hash = "sha256:b25e122ecec249c4299ac7b20b08db76e3e2025bdaeb699a9d444556de5fd367"},

2
third_party/python/requirements.in поставляемый
Просмотреть файл

@ -37,7 +37,7 @@ setuptools==51.2.0
six==1.13.0
slugid==2.0.0
taskcluster==44.2.2
taskcluster-taskgraph==1.4.0
taskcluster-taskgraph==1.7.1
taskcluster-urls==13.0.1
tqdm==4.62.3
urllib3==1.26

6
third_party/python/requirements.txt поставляемый
Просмотреть файл

@ -319,9 +319,9 @@ six==1.13.0; (python_version >= "2.6" and python_full_version < "3.0.0") or (pyt
slugid==2.0.0 \
--hash=sha256:aec8b0e01c4ad32e38e12d609eab3ec912fd129aaf6b2ded0199b56a5f8fd67c \
--hash=sha256:a950d98b72691178bdd4d6c52743c4a2aa039207cf7a97d71060a111ff9ba297
taskcluster-taskgraph==1.4.0 \
--hash=sha256:237ed0399ef55e9eef537e6163b456c04d58a7b72bc1b55e8c61a05331170b1e \
--hash=sha256:2be5ac94745180029dfad260486aafeb14f564f5a95d1d3269e0d17834273f11
taskcluster-taskgraph==1.7.1 \
--hash=sha256:9cb13e19ddf74331c51a9f1dd68afde179f148e7856269e8e5f6182e0a2e5732 \
--hash=sha256:736054efee8c56987417d4ad2960cfc99c91338253580eadece072a43e042718
taskcluster-urls==13.0.1 \
--hash=sha256:b25e122ecec249c4299ac7b20b08db76e3e2025bdaeb699a9d444556de5fd367 \
--hash=sha256:5e25e7e6818e8877178b175ff43d2e6548afad72694aa125f404a7329ece0973 \

Просмотреть файл

@ -1,102 +0,0 @@
taskgraph/__init__.py,sha256=jwOtU7TkmU317LP_IsgIswpj2T1OPUXXgMRv4sIU7nE,707
taskgraph/config.py,sha256=Bm0RU1LyEj4RuX_lBnpQrr5fGmx7zCbKleN1Zy6fQ5w,4410
taskgraph/create.py,sha256=VeAYxtLK8f5HufgUSjzDRP7WBSwQza--_O7RzRRhzA4,5190
taskgraph/decision.py,sha256=TpKJyWJIT6mCI_wPORKHqSqWVwmZGzpR7VJuf-3kj-M,9724
taskgraph/docker.py,sha256=HlCEtfW2coUAyGce8ToVfn8OdLfQJT7Vq_mYQntO4gQ,7485
taskgraph/files_changed.py,sha256=30ONF-m1hbGQnQ31E-TCsHG1jhdNjLTOolSXu2jzOCs,2159
taskgraph/filter_tasks.py,sha256=ty4SK0ujcZ8824F4ikwdCaGX1TRSq_90TFArDogJHTo,875
taskgraph/generator.py,sha256=y89WzZmfeHNNxKM3k60x7L2IJMCciXad5u6lNhZ0mdY,15079
taskgraph/graph.py,sha256=W7I-lfDiBMqSj5ZKgqVmebwTaNxdmbKLx2MqFT_cD_I,4666
taskgraph/main.py,sha256=Dz-uH6QJ3Xtd_t-9gD5iwS4a0CmKypM9aQSW2eZCFao,23254
taskgraph/morph.py,sha256=xX6bU-4iJtwwMw60_0Qt9mG8N4i6zhp4fB_BUZOjQ4o,9567
taskgraph/optimize.py,sha256=qcYcxSmhN9joPZGFEdW6D27G5F4xorS3wmX6TO-gC1Q,16487
taskgraph/parameters.py,sha256=QSVVMhoOQ0Uck4rE9_McFkDByUvidAG0yJFLLxLGuEE,10744
taskgraph/target_tasks.py,sha256=mspItlKD-HCuQR1x_UD6HT_Qd1v5kEHvgRWIUbweRDg,3166
taskgraph/task.py,sha256=QCrOzMaTsy5QHShKUo89XgjJVMl3cSZGZJPLuHCXItE,3132
taskgraph/taskgraph.py,sha256=DfldF0Fp_hQeDK2lQs4SoWgrp_9tG6y2VfuBEp8C3OM,2397
taskgraph/actions/__init__.py,sha256=lVP1e0YyELg7-_42MWWDbT0cKv_p53BApVE6vWOiPww,416
taskgraph/actions/add_new_jobs.py,sha256=mX_DFDJaQUHetjyMNi5b8zPCCeqfzDrCjDg5DxTaA-I,1831
taskgraph/actions/cancel.py,sha256=vrCVtbkpYTCyW9s9IHCHYI18yuRs1C8g5x8DNRaBnW8,1307
taskgraph/actions/cancel_all.py,sha256=aa8rcM5-Wee8UcDsGAAsfc3AvwBbnM-ac19Lb-G_tXs,1945
taskgraph/actions/registry.py,sha256=fcakjIZ1KlYGl2IJ3hFzGT7nkSnhysBK2gQ5eH7VE6A,13370
taskgraph/actions/retrigger.py,sha256=TAhq1yDqkGz0z4Di40WT79RhFhNJdhQajHHg1lDUKSQ,9385
taskgraph/actions/util.py,sha256=za6ZjGpZOFKHhNouglyUlLBikAVo6y4ckY6kqKPBXpU,10661
taskgraph/loader/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
taskgraph/loader/transform.py,sha256=6DNQ5xIp4RFPJobCzDhisCzarH92YVoGFZOqLbJAFqY,2086
taskgraph/run-task/fetch-content,sha256=auxWFlOG8lSSIKRJZsia7irmA45AoyPiOOK2tdanq0E,23771
taskgraph/run-task/hgrc,sha256=BybWLDR89bWi3pE5T05UqmDHs02CbLypE-omLZWU6Uk,896
taskgraph/run-task/robustcheckout.py,sha256=P41ZGrec5aa8hVTEqOkKJ9wTygsgaXtjoQnl7hkfW-k,28978
taskgraph/run-task/run-task,sha256=XDT8N9NPIYBZIptcLliHDlRVuVClXcmrCOlSflYd-7s,39869
taskgraph/test/__init__.py,sha256=7LTScvkVcLPqivjL-wCb_Znk2GGajaJi1fJ4SjcLMoA,289
taskgraph/test/automationrelevance.json,sha256=ttGejNZeVhcTWXFdoU0xME0YEYRYvaYcAr8zBz0ewrw,17980
taskgraph/test/conftest.py,sha256=GoaEjxBClxzByVNkhwq1UDBC9uzOr4vy8PoWtqqNyeU,3615
taskgraph/test/mockedopen.py,sha256=Ccr2qGJSLeWIUYd03Poy8eKKRSW2aTpRGI-0AA7xYrw,4055
taskgraph/test/test_create.py,sha256=oY7DeVW8usjVNe-QPPyTrE3hsvwMo9HvMxPSCllOsMQ,3657
taskgraph/test/test_decision.py,sha256=twp07KM4iaw0hPOnX0YkmNOXD1jVlAmWc2ry5sqLPSk,2681
taskgraph/test/test_files_changed.py,sha256=MoHr_M-qtbi9PbKQ9loDlKOOzecUUyE1N0SgxjXfP5Y,2608
taskgraph/test/test_generator.py,sha256=6-ZpG45F3YlTWTCILrktz7LeSs3tEfQWCzekN6OHYHw,4284
taskgraph/test/test_graph.py,sha256=IEly2SS9NZTN3F0AM4VhxxYx0WTj5u7x2UFyojr1Ddg,7064
taskgraph/test/test_main.py,sha256=sytpicpCGX4dVKmBiliAzO2s-oQqTMHhSP1FN_uqSSA,5222
taskgraph/test/test_morph.py,sha256=XJVuhNQkGxhd3yCOs2AmsZgOFH_j_-V50wxpUpm1V-4,2195
taskgraph/test/test_optimize.py,sha256=C5TTrgfZEkOIx3f7umpU0YrhCEu1ULxM-1xIuOn_bnU,12929
taskgraph/test/test_parameters.py,sha256=EwyzbWiGgwbCBK2my-M5T3gEZvxhFf0WH8H9ZwD1dFQ,6856
taskgraph/test/test_scripts_run_task.py,sha256=ucv3oVin-gTwf7DUtFPG_bA3cqvQdRIKaOnv2pLpmNk,4046
taskgraph/test/test_target_tasks.py,sha256=AzvuEw1NI4b_f14rPFkWuK0bJyxknkPIQi5KI6k2K8A,12046
taskgraph/test/test_taskgraph.py,sha256=EA09CzcgZBUoXVqhAWeEDPrnUF8Vw2gbTPwHrZGCWDc,3760
taskgraph/test/test_transforms_base.py,sha256=Vo9slzCB2GePvMoLmkrSdhYVWh2nQYn5bRxMjsx40Mw,873
taskgraph/test/test_transforms_job.py,sha256=eEbbnet6Pvunz7lxiN09lIpTyVoXJDcDEPhkWYl0nAc,4656
taskgraph/test/test_util_attributes.py,sha256=K_Wro-p5oA-S4yrsLqT8HMBlOAN4L0krQQQ82WYgGAQ,3596
taskgraph/test/test_util_docker.py,sha256=TKe9D5d3q8rK0iA5rMhzbmhQ4c4OSnVKrjioeZ1uNuo,8800
taskgraph/test/test_util_memoize.py,sha256=yq-PTegHBdQZkRm6Iv1NdCRcKCVzsb4pmZPvIz0P6j8,2340
taskgraph/test/test_util_parameterization.py,sha256=TEI3WVpL9Z54xVk8RZk7u3NwzddJxdmuAWfZa1IPaFw,7485
taskgraph/test/test_util_path.py,sha256=icJyH1DyMNVuZ5xfPXvrEQwQ0pQYTg4ORlZq3RK6_V8,5906
taskgraph/test/test_util_python_path.py,sha256=VQo4hwsJ0It-jLIqe4nErPmZn9AQ7rliN25h2oO_zMg,1216
taskgraph/test/test_util_readonlydict.py,sha256=KRgjLvSBsZZj4EUhwcqeUsM1T--iGklVE0QJuC6Xv4o,1234
taskgraph/test/test_util_schema.py,sha256=6lLB-ToEyEt6OC6qQFPZ_yKcNAdv5hyApK-skA4B-KA,5683
taskgraph/test/test_util_taskcluster.py,sha256=hZqtF7OC58OO4nW4URxF5ZTyxnNiwh2Jkqbtx1ZZjww,9457
taskgraph/test/test_util_templates.py,sha256=u3ckrzmx1eyk7vXiqRiQlETtVZvSx4FHEXN5xB4GZDQ,1676
taskgraph/test/test_util_time.py,sha256=SG4WmSupTQiL1UhE2UMsMNZEugfPdoaxO4aWSxLGXBM,1803
taskgraph/test/test_util_treeherder.py,sha256=20zzGcMd0BL0ayTFQj6Etj39afdxZPgtZxSGUZ0iL5M,912
taskgraph/test/test_util_vcs.py,sha256=gWQcbEUYW0ApaGrXJDplE2Oze1i1_LxIchXEAcJjn_Y,6838
taskgraph/test/test_util_verify.py,sha256=hTia5J9DkaGejCnHU9FafBSjhZdE7LfmedFeUcmn8Bc,2486
taskgraph/test/test_util_yaml.py,sha256=zymZxaAZBIBn5u-p91QsA---IqCH_CVVk3YqMoshLlQ,1019
taskgraph/transforms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
taskgraph/transforms/base.py,sha256=oIQdBKrHG_FZXAoRyiazxgLa8zbAI4TEVlduqz8H12I,5277
taskgraph/transforms/cached_tasks.py,sha256=fMCbxqA-HPSMG6mILYCfdIpnUg9uSKlQGKkUcWG9w28,2597
taskgraph/transforms/code_review.py,sha256=du7npIGOpVMK9QZtdkHrkNecRiaqd-fHjHavDVkfL58,698
taskgraph/transforms/docker_image.py,sha256=14FPWvQ1cAOpSMX1hDV2vTQw3-E99DKI78GnDBIWCo8,7590
taskgraph/transforms/fetch.py,sha256=z-SAZTQSOcVRiFa2E8z0dAEhkIUhdOJdwQgdUah2LzA,9400
taskgraph/transforms/task.py,sha256=u4P7vk5d_g9395HhspQc5xGhdogVptuUnQOfA38D-QM,47778
taskgraph/transforms/job/__init__.py,sha256=imtb3MHVQbKtcCngSnvgumtBfOwxOPiRsJDwHKUtYn0,16891
taskgraph/transforms/job/common.py,sha256=onHnerPcmmvbSk0oHt8mvJmOo7AnjHQya0ombgMNLG8,7106
taskgraph/transforms/job/index_search.py,sha256=zPldmHSalHJjvULAMF9_QAeOZzIeWpr89kOVeP2IJAE,1220
taskgraph/transforms/job/run_task.py,sha256=p_sE5XfbO-EIRgJ5YseeY_YJAqsDl_MUdcCQyKXi0e4,8258
taskgraph/transforms/job/toolchain.py,sha256=z2Z7sxI4yn_dI8zzcMWcrcmfTHeK6mgfSNSM6MAgrCU,4649
taskgraph/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
taskgraph/util/archive.py,sha256=Hcm8YHsCtazX7efDSd8vXm8Pw87Z1UP_Bo-gIUEzb_4,2856
taskgraph/util/attributes.py,sha256=zSaRws02rdF1TgvOoFzVNLg2XmwbtVVCTsp4M_qm3RI,2617
taskgraph/util/cached_tasks.py,sha256=lvPtfs9mpm0Wv7Mbajen0vUxuiRAP-ZPRNKVlMgCoXE,3408
taskgraph/util/decision.py,sha256=E2Vm1F--yB2iTIA3ePpMsxOk6Qw5zl9lEgs6BKlqZPI,2432
taskgraph/util/docker.py,sha256=kj9V58ZqE12qtNDeRZjz6mxmgoJzZp_eZTzHQoU5lVA,11676
taskgraph/util/hash.py,sha256=_59JUSZeuSu6fo4XjP36Ubs4vbQ5_4RBv61mcmau-t8,1560
taskgraph/util/keyed_by.py,sha256=cgBH4tG8eH5UUrm5q4ODG7A4fzkGAOI7feVoZy3V8Ho,3419
taskgraph/util/memoize.py,sha256=XDlwc-56gzoY8QTwOoiCOYL-igX7JoMcY-9Ih80Euc8,1331
taskgraph/util/parameterization.py,sha256=rjc_dviIGQIo-C8Hrx1ZcHHOMmxKbE5CLJqkciWW8pQ,3184
taskgraph/util/path.py,sha256=lZKXxtxjbVTSvto0X-M6zKaNVRwpdoR5Y8IaJg-_Xag,4688
taskgraph/util/python_path.py,sha256=93R0mADSe1MeTTOsrDWEjLTW6MVpf2COuf1jXbxuQOk,821
taskgraph/util/readonlydict.py,sha256=XzTG-gqGqWVlSkDxSyOL6Ur7Z0ONhIJ9DVLWV3q4q1w,787
taskgraph/util/schema.py,sha256=rtb3ebLsC0aOdoc2g6DaMZF-Md2G9jfIXAoN65oPDjg,6829
taskgraph/util/taskcluster.py,sha256=zOoMKRxHrASJfdv-ni4evRskjanZUnoSNiAMkXPBDlQ,11395
taskgraph/util/taskgraph.py,sha256=OfaclpaZzVfxViMXaBIAYmSJlI-ZFsBudGVndgH7znw,1982
taskgraph/util/templates.py,sha256=Dqxfl244u-PX7dnsk3_vYyzDwpDgJtANK6NmZwN3Qow,1417
taskgraph/util/time.py,sha256=cMRYsBiz7rgPwgZk77p0P7h9JzeEJENBZCoetBaEHqY,3490
taskgraph/util/treeherder.py,sha256=XrdE-Je0ZvXe6_8f0DvvqNbrHherUk-hUuxirImPEIo,2138
taskgraph/util/vcs.py,sha256=p_cQ9iIqALzVNBVPUIG9nDz4IGMdqom_5o3eidL1vCY,6510
taskgraph/util/verify.py,sha256=ie3JTNRfooGhd3-M8_FSGglJFk1-7jUp1ZJ0T2kreP4,7398
taskgraph/util/workertypes.py,sha256=5g2mgIbEKMzDpZNnmPMoMNyy7Wahi-jmWcV1amDAcPo,2341
taskgraph/util/yaml.py,sha256=hfKI_D8Q7dimq4_VvO3WEh8CJsTrsIMwN6set7HIQbY,990
taskcluster_taskgraph-1.4.0.dist-info/METADATA,sha256=FT-KN56SvLSAGRPk2ZJbwGqE51fW1j5XEMkH--K1ooQ,995
taskcluster_taskgraph-1.4.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
taskcluster_taskgraph-1.4.0.dist-info/entry_points.txt,sha256=VoXNtZpN4LvyXYB1wq47AU9CO-DMYMJ0VktKxjugzbY,51
taskcluster_taskgraph-1.4.0.dist-info/top_level.txt,sha256=3JNeYn_hNiNXC7DrdH_vcv-WYSE7QdgGjdvUYvSjVp0,10
taskcluster_taskgraph-1.4.0.dist-info/RECORD,,

Просмотреть файл

@ -0,0 +1,373 @@
Mozilla Public License Version 2.0
==================================
1. Definitions
--------------
1.1. "Contributor"
means each individual or legal entity that creates, contributes to
the creation of, or owns Covered Software.
1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used
by a Contributor and that particular Contributor's Contribution.
1.3. "Contribution"
means Covered Software of a particular Contributor.
1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached
the notice in Exhibit A, the Executable Form of such Source Code
Form, and Modifications of such Source Code Form, in each case
including portions thereof.
1.5. "Incompatible With Secondary Licenses"
means
(a) that the initial Contributor has attached the notice described
in Exhibit B to the Covered Software; or
(b) that the Covered Software was made available under the terms of
version 1.1 or earlier of the License, but not also under the
terms of a Secondary License.
1.6. "Executable Form"
means any form of the work other than Source Code Form.
1.7. "Larger Work"
means a work that combines Covered Software with other material, in
a separate file or files, that is not Covered Software.
1.8. "License"
means this document.
1.9. "Licensable"
means having the right to grant, to the maximum extent possible,
whether at the time of the initial grant or subsequently, any and
all of the rights conveyed by this License.
1.10. "Modifications"
means any of the following:
(a) any file in Source Code Form that results from an addition to,
deletion from, or modification of the contents of Covered
Software; or
(b) any new file in Source Code Form that contains any Covered
Software.
1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
Contributor that would be infringed, but for the grant of the
License, by the making, using, selling, offering for sale, having
made, import, or transfer of either its Contributions or its
Contributor Version.
1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU
Lesser General Public License, Version 2.1, the GNU Affero General
Public License, Version 3.0, or any later versions of those
licenses.
1.13. "Source Code Form"
means the form of the work preferred for making modifications.
1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
License. For legal entities, "You" includes any entity that
controls, is controlled by, or is under common control with You. For
purposes of this definition, "control" means (a) the power, direct
or indirect, to cause the direction or management of such entity,
whether by contract or otherwise, or (b) ownership of more than
fifty percent (50%) of the outstanding shares or beneficial
ownership of such entity.
2. License Grants and Conditions
--------------------------------
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
(a) under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or
as part of a Larger Work; and
(b) under Patent Claims of such Contributor to make, use, sell, offer
for sale, have made, import, and otherwise transfer either its
Contributions or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor first
distributes such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under
this License. No additional rights or licenses will be implied from the
distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted by a
Contributor:
(a) for any code that a Contributor has removed from Covered Software;
or
(b) for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
(c) under Patent Claims infringed by Covered Software in the absence of
its Contributions.
This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License (if
permitted under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights
to grant the rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing, or other
equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
in Section 2.1.
3. Responsibilities
-------------------
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
(a) such Covered Software must also be made available in Source Code
Form, as described in Section 3.1, and You must inform recipients of
the Executable Form how they can obtain a copy of such Source Code
Form by reasonable means in a timely manner, at a charge no more
than the cost of distribution to the recipient; and
(b) You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter
the recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of Covered
Software with a work governed by one or more Secondary Licenses, and the
Covered Software is not Incompatible With Secondary Licenses, this
License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the Covered
Software under the terms of either this License or such Secondary
License(s).
3.4. Notices
You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty,
or limitations of liability) contained within the Source Code Form of
the Covered Software, except that You may alter any license notices to
the extent required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on
behalf of any Contributor. You must make it absolutely clear that any
such warranty, support, indemnity, or liability obligation is offered by
You alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
---------------------------------------------------
If it is impossible for You to comply with any of the terms of this
License with respect to some or all of the Covered Software due to
statute, judicial order, or regulation then You must: (a) comply with
the terms of this License to the maximum extent possible; and (b)
describe the limitations and the code they affect. Such description must
be placed in a text file included with all distributions of the Covered
Software under this License. Except to the extent prohibited by statute
or regulation, such description must be sufficiently detailed for a
recipient of ordinary skill to be able to understand it.
5. Termination
--------------
5.1. The rights granted under this License will terminate automatically
if You fail to comply with any of its terms. However, if You become
compliant, then the rights granted under this License from a particular
Contributor are reinstated (a) provisionally, unless and until such
Contributor explicitly and finally terminates Your grants, and (b) on an
ongoing basis, if such Contributor fails to notify You of the
non-compliance by some reasonable means prior to 60 days after You have
come back into compliance. Moreover, Your grants from a particular
Contributor are reinstated on an ongoing basis if such Contributor
notifies You of the non-compliance by some reasonable means, this is the
first time You have received notice of non-compliance with this License
from such Contributor, and You become compliant prior to 30 days after
Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted to
You by any and all Contributors for the Covered Software under Section
2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
end user license agreements (excluding distributors and resellers) which
have been validly granted by You or Your distributors under this License
prior to termination shall survive termination.
************************************************************************
* *
* 6. Disclaimer of Warranty *
* ------------------------- *
* *
* Covered Software is provided under this License on an "as is" *
* basis, without warranty of any kind, either expressed, implied, or *
* statutory, including, without limitation, warranties that the *
* Covered Software is free of defects, merchantable, fit for a *
* particular purpose or non-infringing. The entire risk as to the *
* quality and performance of the Covered Software is with You. *
* Should any Covered Software prove defective in any respect, You *
* (not any Contributor) assume the cost of any necessary servicing, *
* repair, or correction. This disclaimer of warranty constitutes an *
* essential part of this License. No use of any Covered Software is *
* authorized under this License except under this disclaimer. *
* *
************************************************************************
************************************************************************
* *
* 7. Limitation of Liability *
* -------------------------- *
* *
* Under no circumstances and under no legal theory, whether tort *
* (including negligence), contract, or otherwise, shall any *
* Contributor, or anyone who distributes Covered Software as *
* permitted above, be liable to You for any direct, indirect, *
* special, incidental, or consequential damages of any character *
* including, without limitation, damages for lost profits, loss of *
* goodwill, work stoppage, computer failure or malfunction, or any *
* and all other commercial damages or losses, even if such party *
* shall have been informed of the possibility of such damages. This *
* limitation of liability shall not apply to liability for death or *
* personal injury resulting from such party's negligence to the *
* extent applicable law prohibits such limitation. Some *
* jurisdictions do not allow the exclusion or limitation of *
* incidental or consequential damages, so this exclusion and *
* limitation may not apply to You. *
* *
************************************************************************
8. Litigation
-------------
Any litigation relating to this License may be brought only in the
courts of a jurisdiction where the defendant maintains its principal
place of business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions.
Nothing in this Section shall prevent a party's ability to bring
cross-claims or counter-claims.
9. Miscellaneous
----------------
This License represents the complete agreement concerning the subject
matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent
necessary to make it enforceable. Any law or regulation which provides
that the language of a contract shall be construed against the drafter
shall not be used to construe this License against a Contributor.
10. Versions of the License
---------------------------
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a
modified version of this License if you rename the license and remove
any references to the name of the license steward (except to note that
such modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary
Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
-------------------------------------------
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular
file, then You may include the notice in a location (such as a LICENSE
file in a relevant directory) where a recipient would be likely to look
for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - "Incompatible With Secondary Licenses" Notice
---------------------------------------------------------
This Source Code Form is "Incompatible With Secondary Licenses", as
defined by the Mozilla Public License, v. 2.0.

Просмотреть файл

@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: taskcluster-taskgraph
Version: 1.4.0
Version: 1.7.1
Summary: Build taskcluster taskgraphs
Home-page: https://github.com/taskcluster/taskgraph
License: UNKNOWN
@ -14,6 +14,7 @@ Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Topic :: Software Development
License-File: LICENSE
Requires-Dist: appdirs (>=1.4)
Requires-Dist: attrs (>=19.1.0)
Requires-Dist: json-e (>=2.7)

Просмотреть файл

@ -0,0 +1,71 @@
taskgraph/__init__.py,sha256=jwOtU7TkmU317LP_IsgIswpj2T1OPUXXgMRv4sIU7nE,707
taskgraph/config.py,sha256=MoFLjKPUViWYGALi_acWDVXZs7M8cy0zQpUKsJSlBMs,4411
taskgraph/create.py,sha256=1z2AyLvHMkZfDkmPy6um86HG9xTRhE0Sphnbpd-kuEg,5190
taskgraph/decision.py,sha256=TpKJyWJIT6mCI_wPORKHqSqWVwmZGzpR7VJuf-3kj-M,9724
taskgraph/docker.py,sha256=hsMIvRVXiqC8DIGD34WwQrC1JnjaYHSvVWq_lEeNQEE,7471
taskgraph/files_changed.py,sha256=VElSrr-5dVHUH4N4dPSNbQc1I07XgsalhEiPpeXyWXY,2161
taskgraph/filter_tasks.py,sha256=R7tYXiaVPGIkQ6O1c9-QJrKZ59m9pFXCloUlPraVnZU,866
taskgraph/generator.py,sha256=lF4B7zZeWbJD_M442pxEBgomvHXIhS1Dkh6rM4YTB2o,15052
taskgraph/graph.py,sha256=9tE3bSSBRHvRLgJzK4dTieGT3RrzQZdR1YbKizEhzlw,4667
taskgraph/main.py,sha256=ot8nMrW8hyQCicRpPCg-eT2CfiDE4OFqstop6i0bRSE,23238
taskgraph/morph.py,sha256=ASOaCed_YuLTWdj_pB8qH1_3RFJTu1V6VDwcX3JLn8w,9567
taskgraph/optimize.py,sha256=NVshvkqRKr7SQvRdqz5CELmnIXeiODkDxlK0D9QMi9k,16487
taskgraph/parameters.py,sha256=rBajNO7gb0vwkHKVJ9IG_wvrDjLPXcjXHLIWo49gosM,10926
taskgraph/target_tasks.py,sha256=fUdm3j6C6oj2BCrlz2vTqdMMbyTfZCgGcFZYYZADiGU,3166
taskgraph/task.py,sha256=QCrOzMaTsy5QHShKUo89XgjJVMl3cSZGZJPLuHCXItE,3132
taskgraph/taskgraph.py,sha256=tfj0ZMqjuwEQDET0W57EcP-_KBEbqkxJci9Z6DkeOEQ,2397
taskgraph/actions/__init__.py,sha256=lVP1e0YyELg7-_42MWWDbT0cKv_p53BApVE6vWOiPww,416
taskgraph/actions/add_new_jobs.py,sha256=mX_DFDJaQUHetjyMNi5b8zPCCeqfzDrCjDg5DxTaA-I,1831
taskgraph/actions/cancel.py,sha256=UQSt_6y3S6PXNmUo_mNaUOuDvK2bixWjzdjTKXieEEg,1309
taskgraph/actions/cancel_all.py,sha256=-ETWKl8BHkk5HjGZRIJpUsFOySE6co0pL0dBDupolu8,1947
taskgraph/actions/registry.py,sha256=p-YTqnhRPSouOqhSoRL5QgUkpO_ab4XIMSFKreu7E_8,13252
taskgraph/actions/retrigger.py,sha256=_1J7nxIGHBUUyhg7_nn0wwsq5wA-65U9Cprog7z5oo4,9386
taskgraph/actions/util.py,sha256=hSKOx_5T38rr-_U7dYVsYGV3lBtvPf5j9nu6gq4eFws,10652
taskgraph/loader/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
taskgraph/loader/transform.py,sha256=8lVeHRNRTGru_LHs4jm5q1DEx8pN_1VGUouZdfC8jmg,2085
taskgraph/run-task/fetch-content,sha256=uUoyua3OdIgynY5Q9K6EojBwuaM2zo2OiN9bmNS646Q,24291
taskgraph/run-task/hgrc,sha256=BybWLDR89bWi3pE5T05UqmDHs02CbLypE-omLZWU6Uk,896
taskgraph/run-task/robustcheckout.py,sha256=xc24zaBd6dyuoga1ace0M27jo14K4UXNwhqcbHutJ7U,28977
taskgraph/run-task/run-task,sha256=wUGP0QDP_uSeMrtxdJJAhtOf7FBVMTyNWqPCHP_vq8A,44701
taskgraph/transforms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
taskgraph/transforms/base.py,sha256=7eyfKlexzz3I4HmVUKYRu2fADeAVWeYZaVSnVUw-e8g,5276
taskgraph/transforms/cached_tasks.py,sha256=In7Dj-vfOaeQx_e9JXbYN167TuInY6XEhFBuObztngc,2598
taskgraph/transforms/code_review.py,sha256=du7npIGOpVMK9QZtdkHrkNecRiaqd-fHjHavDVkfL58,698
taskgraph/transforms/docker_image.py,sha256=ZH8xlDo8iupPBcI5XR5zqsNUgmYr7wt-jZVqHg0F7vc,7557
taskgraph/transforms/fetch.py,sha256=AECr8p3mDh69nfFzrnBO_Qpc1utmPlHLVOxTat6FwRg,9583
taskgraph/transforms/task.py,sha256=Wxc5zskc__aEe9WUYpRFS9OvGJvKM1Slkr9LPzN75oE,47837
taskgraph/transforms/job/__init__.py,sha256=yKPRJirTnQi3uMFmjtXSLHt0AoaPKgTnswgh8JtrI_4,16908
taskgraph/transforms/job/common.py,sha256=onHnerPcmmvbSk0oHt8mvJmOo7AnjHQya0ombgMNLG8,7106
taskgraph/transforms/job/index_search.py,sha256=Ngh9FFu1bx2kHVTChW2vcrbnb3SzMneRHopXk18RfB4,1220
taskgraph/transforms/job/run_task.py,sha256=oRR-is7dRKRrSCY3WntmJ-pKK3wx9-BMJpY9qru2FWY,8654
taskgraph/transforms/job/toolchain.py,sha256=WWsj6L_db9rJxzo26TdEf_0jcrK4MCoHHJDzFBkSFpI,5978
taskgraph/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
taskgraph/util/archive.py,sha256=nzYn8cQ3NfLAeV-2SuTNoeQ6hg8m40f6FQcSTyVIKwQ,2855
taskgraph/util/attributes.py,sha256=zSaRws02rdF1TgvOoFzVNLg2XmwbtVVCTsp4M_qm3RI,2617
taskgraph/util/cached_tasks.py,sha256=o-yJ91wlWbzoDB2GvKPpGcDE27_IEMgczp_figEBjV8,3406
taskgraph/util/decision.py,sha256=uTC143FpTKQkGff5jIz3voWRYXBCHgx-XAm7FMW53hE,2433
taskgraph/util/docker.py,sha256=pf_aJk8EzSlLGJH5YyywSIV3d4gVyZrMSUN8wBR0bDc,11670
taskgraph/util/hash.py,sha256=71R979-mlDnwTXC5GXrOWTS5VpW4DFWWK9S8Urm_Uic,1560
taskgraph/util/keyed_by.py,sha256=cgBH4tG8eH5UUrm5q4ODG7A4fzkGAOI7feVoZy3V8Ho,3419
taskgraph/util/memoize.py,sha256=XDlwc-56gzoY8QTwOoiCOYL-igX7JoMcY-9Ih80Euc8,1331
taskgraph/util/parameterization.py,sha256=dzxh8Bc8MBKoDMwj2V2AQab9UrC-JcM3tg0hDVTWpjc,3184
taskgraph/util/path.py,sha256=GOWPdvC144PVy8rsLda8SPenofwSnBaD0L5aJdDNtao,4688
taskgraph/util/python_path.py,sha256=ed4F5z2mId56LauVczgxm_LGxgQi8XlxlYDgXOPZyII,1576
taskgraph/util/readonlydict.py,sha256=XzTG-gqGqWVlSkDxSyOL6Ur7Z0ONhIJ9DVLWV3q4q1w,787
taskgraph/util/schema.py,sha256=tJX4nee7WoeZIi3obSZUR_P0Ft_v61k9uEHBtantNYk,8231
taskgraph/util/shell.py,sha256=MB9zHVSvxgOuszgmKr2rWUDahANZkbHHNkjjagZG_3I,1317
taskgraph/util/taskcluster.py,sha256=ItMBSmQSKTEkvjBM35XMcte4gJm6-YxVGFgScRQw6lE,11519
taskgraph/util/taskgraph.py,sha256=ecKEvTfmLVvEKLPO_0g34CqVvc0iCzuNMh3064BZNrE,1969
taskgraph/util/templates.py,sha256=Dqxfl244u-PX7dnsk3_vYyzDwpDgJtANK6NmZwN3Qow,1417
taskgraph/util/time.py,sha256=dmR9Y0IGKuE1eHfFZjDuBUroK63XLBxEMM5ploO4li4,3490
taskgraph/util/treeherder.py,sha256=XrdE-Je0ZvXe6_8f0DvvqNbrHherUk-hUuxirImPEIo,2138
taskgraph/util/vcs.py,sha256=uDQtziKfA7UvYADW8NoL_tf_yANb-U01p4wuAF-uXH8,6492
taskgraph/util/verify.py,sha256=AXnb3OEgjHaoNxeYg0Sr5xjgW03uxZyK03dQaOsqOLI,8272
taskgraph/util/workertypes.py,sha256=5g2mgIbEKMzDpZNnmPMoMNyy7Wahi-jmWcV1amDAcPo,2341
taskgraph/util/yaml.py,sha256=hfKI_D8Q7dimq4_VvO3WEh8CJsTrsIMwN6set7HIQbY,990
taskcluster_taskgraph-1.7.1.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
taskcluster_taskgraph-1.7.1.dist-info/METADATA,sha256=kztTo3ubdXP092HOQ3QKpFyfHBKgh4DlHFubO6evLvA,1017
taskcluster_taskgraph-1.7.1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
taskcluster_taskgraph-1.7.1.dist-info/entry_points.txt,sha256=VoXNtZpN4LvyXYB1wq47AU9CO-DMYMJ0VktKxjugzbY,51
taskcluster_taskgraph-1.7.1.dist-info/top_level.txt,sha256=3JNeYn_hNiNXC7DrdH_vcv-WYSE7QdgGjdvUYvSjVp0,10
taskcluster_taskgraph-1.7.1.dist-info/RECORD,,

Просмотреть файл

@ -4,9 +4,11 @@
import logging
import requests
from taskgraph.util.taskcluster import cancel_task
from .registry import register_callback_action
logger = logging.getLogger(__name__)

Просмотреть файл

@ -6,13 +6,15 @@
import concurrent.futures as futures
import logging
import os
import requests
from taskgraph.util.taskcluster import (
list_task_group_incomplete_tasks,
cancel_task,
CONCURRENCY,
cancel_task,
list_task_group_incomplete_tasks,
)
from .registry import register_callback_action
logger = logging.getLogger(__name__)

Просмотреть файл

@ -4,17 +4,15 @@
import json
import os
from types import FunctionType
from collections import namedtuple
from types import FunctionType
from taskgraph import create
from taskgraph.config import load_graph_config
from taskgraph.util import taskcluster, yaml, hash
from taskgraph.parameters import Parameters
from taskgraph.util import hash, taskcluster, yaml
from taskgraph.util.memoize import memoize
from taskgraph.util.python_path import import_sibling_modules
actions = []
callbacks = {}
@ -103,7 +101,7 @@ def register_callback_action(
Otherwise, if ``context = [{'k': 'b', 'p': 'l'}, {'k': 't'}]`` will only
be displayed in the context menu for tasks that has
``task.tags.k == 'b' && task.tags.p = 'l'`` or ``task.tags.k = 't'``.
Esentially, this allows filtering on ``task.tags``.
Essentially, this allows filtering on ``task.tags``.
If this is a function, it is given the decision parameters and must return
a value of the form described above.
@ -344,10 +342,7 @@ def trigger_action_callback(
def _load(graph_config):
# Load all modules from this folder, relying on the side-effects of register_
# functions to populate the action registry.
actions_dir = os.path.dirname(__file__)
for f in os.listdir(actions_dir):
if f.endswith(".py") and f not in ("__init__.py", "registry.py", "util.py"):
__import__("taskgraph.actions." + f[:-3])
import_sibling_modules(exceptions=("util.py",))
return callbacks, actions

Просмотреть файл

@ -9,15 +9,16 @@ import textwrap
from slugid import nice as slugid
from taskgraph.util import taskcluster
from .registry import register_callback_action
from .util import (
combine_task_graph_files,
create_task_from_def,
create_tasks,
fetch_graph_and_labels,
relativize_datestamps,
create_task_from_def,
)
from .registry import register_callback_action
from taskgraph.util import taskcluster
logger = logging.getLogger(__name__)

Просмотреть файл

@ -13,19 +13,17 @@ from functools import reduce
from requests.exceptions import HTTPError
from taskgraph import create
from taskgraph.decision import read_artifact, write_artifact, rename_artifact
from taskgraph.taskgraph import TaskGraph
from taskgraph.decision import read_artifact, rename_artifact, write_artifact
from taskgraph.optimize import optimize_task_graph
from taskgraph.taskgraph import TaskGraph
from taskgraph.util.taskcluster import (
get_session,
CONCURRENCY,
get_artifact,
get_session,
list_tasks,
parse_time,
CONCURRENCY,
)
from taskgraph.util.taskgraph import (
find_decision_task,
)
from taskgraph.util.taskgraph import find_decision_task
logger = logging.getLogger(__name__)

Просмотреть файл

@ -3,15 +3,16 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import logging
import os
import sys
import attr
from .util import path
import attr
from voluptuous import All, Any, Extra, Length, Optional, Required
from .util import path
from .util.python_path import find_object
from .util.schema import validate_schema, Schema, optionally_keyed_by
from voluptuous import Required, Extra, Any, Optional, Length, All
from .util.schema import Schema, optionally_keyed_by, validate_schema
from .util.yaml import load_yaml
logger = logging.getLogger(__name__)

Просмотреть файл

@ -5,14 +5,14 @@
import concurrent.futures as futures
import json
import sys
import logging
import sys
from slugid import nice as slugid
from taskgraph.util.parameterization import resolve_timestamps
from taskgraph.util.taskcluster import CONCURRENCY, get_session
from taskgraph.util.time import current_json_time
from taskgraph.util.taskcluster import get_session, CONCURRENCY
logger = logging.getLogger(__name__)

Просмотреть файл

@ -9,10 +9,7 @@ import tarfile
from io import BytesIO
from taskgraph.util import docker
from taskgraph.util.taskcluster import (
get_artifact_url,
get_session,
)
from taskgraph.util.taskcluster import get_artifact_url, get_session
def get_image_digest(image_name):
@ -133,7 +130,7 @@ def load_image(url, imageName=None, imageTag=None):
def download_and_modify_image():
# This function downloads and edits the downloaded tar file on the fly.
# It emits chunked buffers of the editted tar file, as a generator.
# It emits chunked buffers of the edited tar file, as a generator.
print(f"Downloading from {url}")
# get_session() gets us a requests.Session set to retry several times.
req = get_session().get(url, stream=True)

Просмотреть файл

@ -8,10 +8,12 @@ Support for optimizing tasks based on the set of files that have changed.
import logging
import requests
from redo import retry
from .util.path import match as match_path
from .util.memoize import memoize
from .util.path import match as match_path
logger = logging.getLogger(__name__)

Просмотреть файл

@ -5,9 +5,7 @@
import logging
from . import (
target_tasks,
)
from . import target_tasks
logger = logging.getLogger(__name__)

Просмотреть файл

@ -2,26 +2,25 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import copy
import logging
import os
import copy
import attr
from typing import AnyStr
import attr
from . import filter_tasks
from .config import GraphConfig, load_graph_config
from .graph import Graph
from .taskgraph import TaskGraph
from .task import Task
from .morph import morph
from .optimize import optimize_task_graph
from .parameters import Parameters
from .morph import morph
from .task import Task
from .taskgraph import TaskGraph
from .transforms.base import TransformConfig, TransformSequence
from .util.python_path import find_object
from .transforms.base import TransformSequence, TransformConfig
from .util.verify import (
verifications,
)
from .util.verify import verifications
from .util.yaml import load_yaml
from .config import load_graph_config, GraphConfig
logger = logging.getLogger(__name__)
@ -126,7 +125,7 @@ class TaskGraphGenerator:
):
"""
@param root_dir: root directory, with subdirectories for each kind
@param paramaters: parameters for this task-graph generation, or callable
@param parameters: parameters for this task-graph generation, or callable
taking a `GraphConfig` and returning parameters
@type parameters: Union[Parameters, Callable[[GraphConfig], Parameters]]
"""
@ -172,7 +171,7 @@ class TaskGraphGenerator:
@property
def target_task_set(self):
"""
The set of targetted tasks (a graph without edges)
The set of targeted tasks (a graph without edges)
@type: TaskGraph
"""
@ -181,7 +180,7 @@ class TaskGraphGenerator:
@property
def target_task_graph(self):
"""
The set of targetted tasks and all of their dependencies
The set of targeted tasks and all of their dependencies
@type: TaskGraph
"""
@ -190,7 +189,7 @@ class TaskGraphGenerator:
@property
def optimized_task_graph(self):
"""
The set of targetted tasks and all of their dependencies; tasks that
The set of targeted tasks and all of their dependencies; tasks that
have been optimized out are either omitted or replaced with a Task
instance containing only a task_id.
@ -265,8 +264,8 @@ class TaskGraphGenerator:
else:
parameters = self._parameters
logger.info("Using {}".format(parameters))
logger.debug("Dumping parameters:\n{}".format(repr(parameters)))
logger.info(f"Using {parameters}")
logger.debug(f"Dumping parameters:\n{repr(parameters)}")
filters = parameters.get("filters", [])
# Always add legacy target tasks method until we deprecate that API.

Просмотреть файл

@ -3,9 +3,10 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import attr
import collections
import attr
@attr.s(frozen=True)
class Graph:

Просмотреть файл

@ -5,7 +5,6 @@
import logging
from ..util.templates import merge
from ..util.yaml import load_yaml

Просмотреть файл

@ -2,7 +2,10 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import argparse
import atexit
import json
import logging
import os
import re
import shutil
@ -10,9 +13,6 @@ import subprocess
import sys
import tempfile
import traceback
import argparse
import logging
import json
from collections import namedtuple
from concurrent.futures import ProcessPoolExecutor, as_completed
from pathlib import Path
@ -163,7 +163,7 @@ def dump_output(out, path=None, params_spec=None):
fh = open(path, "w")
else:
print(
"Dumping result with parameters from {}:".format(params_name),
f"Dumping result with parameters from {params_name}:",
file=sys.stderr,
)
print(out + "\n", file=fh)
@ -486,7 +486,7 @@ def show_taskgraph(options):
)
if len(parameters) > 1:
print("See '{}' for logs".format(logdir), file=sys.stderr)
print(f"See '{logdir}' for logs", file=sys.stderr)
@command("build-image", help="Build a Docker image")
@ -501,7 +501,7 @@ def show_taskgraph(options):
metavar="context.tar",
)
def build_image(args):
from taskgraph.docker import build_image, build_context
from taskgraph.docker import build_context, build_image
if args["context_only"] is None:
build_image(args["image_name"], args["tag"], os.environ)
@ -671,10 +671,10 @@ def action_callback(options):
@argument("--input", default=None, help="Action input (.yml or .json)")
@argument("callback", default=None, help="Action callback name (Python function name)")
def test_action_callback(options):
import taskgraph.parameters
import taskgraph.actions
from taskgraph.util import yaml
import taskgraph.parameters
from taskgraph.config import load_graph_config
from taskgraph.util import yaml
def load_data(filename):
with open(filename) as f:

Просмотреть файл

@ -24,8 +24,8 @@ import re
from slugid import nice as slugid
from .task import Task
from .graph import Graph
from .task import Task
from .taskgraph import TaskGraph
from .util.workertypes import get_worker_type

Просмотреть файл

@ -18,11 +18,11 @@ from collections import defaultdict
from slugid import nice as slugid
from .graph import Graph
from . import files_changed
from .graph import Graph
from .taskgraph import TaskGraph
from .util.taskcluster import find_task_id
from .util.parameterization import resolve_task_references
from .util.taskcluster import find_task_id
logger = logging.getLogger(__name__)

Просмотреть файл

@ -2,26 +2,25 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import gzip
import hashlib
import json
import os
import time
from datetime import datetime
from io import BytesIO
from pprint import pformat
from subprocess import CalledProcessError
from urllib.parse import urlparse
from urllib.request import urlopen
from voluptuous import ALLOW_EXTRA, Optional, Required, Schema
from taskgraph.util import yaml
from taskgraph.util.readonlydict import ReadOnlyDict
from taskgraph.util.schema import validate_schema
from taskgraph.util.taskcluster import find_task_id, get_artifact_url
from taskgraph.util.vcs import get_repository
from voluptuous import (
ALLOW_EXTRA,
Required,
Optional,
Schema,
)
class ParameterMismatch(Exception):
@ -271,8 +270,6 @@ def load_parameters_file(
task-id=fdtgsD5DQUmAQZEaGMvQ4Q
project=mozilla-central
"""
from taskgraph.util.taskcluster import get_artifact_url, find_task_id
from taskgraph.util import yaml
if overrides is None:
overrides = {}
@ -305,6 +302,11 @@ def load_parameters_file(
spec = get_artifact_url(task_id, "public/parameters.yml")
f = urlopen(spec)
# Decompress gzipped parameters.
if f.info().get("Content-Encoding") == "gzip":
buf = BytesIO(f.read())
f = gzip.GzipFile(fileobj=buf)
if spec.endswith(".yml"):
kwargs = yaml.load_stream(f)
elif spec.endswith(".json"):

Просмотреть файл

@ -153,7 +153,7 @@ def retrier(attempts=5, sleeptime=10, max_sleeptime=300, sleepscale=1.5, jitter=
time.sleep(sleeptime_real)
def stream_download(url, sha256=None, size=None):
def stream_download(url, sha256=None, size=None, headers=None):
"""Download a URL to a generator, optionally with content verification.
If ``sha256`` or ``size`` are defined, the downloaded URL will be
@ -167,12 +167,19 @@ def stream_download(url, sha256=None, size=None):
on after the generator is exhausted without raising.
"""
log('Downloading %s' % url)
headers = headers or []
h = hashlib.sha256()
length = 0
t0 = time.time()
with urllib.request.urlopen(url) as fh:
req_headers = {}
for header in headers:
key, val = header.split(":")
req_headers[key.strip()] = val.strip()
req = urllib.request.Request(url, None, req_headers)
with urllib.request.urlopen(req) as fh:
if not url.endswith('.gz') and fh.info().get('Content-Encoding') == 'gzip':
fh = gzip.GzipFile(fileobj=fh)
@ -207,7 +214,7 @@ def stream_download(url, sha256=None, size=None):
url, sha256, digest))
def download_to_path(url, path, sha256=None, size=None):
def download_to_path(url, path, sha256=None, size=None, headers=None):
"""Download a URL to a filesystem path, possibly with verification."""
# We download to a temporary file and rename at the end so there's
@ -223,7 +230,7 @@ def download_to_path(url, path, sha256=None, size=None):
log('Downloading %s to %s' % (url, path))
with rename_after_close(path, 'wb') as fh:
for chunk in stream_download(url, sha256=sha256, size=size):
for chunk in stream_download(url, sha256=sha256, size=size, headers=headers):
fh.write(chunk)
return
@ -551,7 +558,8 @@ def command_static_url(args):
dl_dest = dest.parent / basename
try:
download_to_path(args.url, dl_dest, sha256=args.sha256, size=args.size)
download_to_path(args.url, dl_dest, sha256=args.sha256, size=args.size,
headers=args.headers)
if gpg_sig_url:
gpg_verify_path(dl_dest, gpg_key, gpg_signature)
@ -652,6 +660,9 @@ def main():
url.add_argument('--add-prefix', default='',
help='Prefix to add to file names in the downloaded '
'archive')
url.add_argument('-H', '--header', default=[], action='append', dest='headers',
help='Header to send as part of the request, can be passed '
'multiple times')
url.add_argument('url', help='URL to fetch')
url.add_argument('dest', help='Destination path')

Просмотреть файл

@ -711,4 +711,3 @@ def extsetup(ui):
extensions.find(ext)
except KeyError:
extensions.load(ui, ext, None)

Просмотреть файл

@ -115,6 +115,109 @@ def print_line(prefix, m):
sys.stdout.buffer.flush()
def _call_windows_retry(func, args=(), retry_max=5, retry_delay=0.5):
"""
It's possible to see spurious errors on Windows due to various things
keeping a handle to the directory open (explorer, virus scanners, etc)
So we try a few times if it fails with a known error.
retry_delay is multiplied by the number of failed attempts to increase
the likelihood of success in subsequent attempts.
"""
retry_count = 0
while True:
try:
func(*args)
except OSError as e:
# Error codes are defined in:
# https://docs.python.org/3/library/errno.html#module-errno
if e.errno not in (errno.EACCES, errno.ENOTEMPTY, errno.ENOENT):
raise
if retry_count == retry_max:
raise
retry_count += 1
print(
'%s() failed for "%s". Reason: %s (%s). Retrying...'
% (func.__name__, args, e.strerror, e.errno)
)
time.sleep(retry_count * retry_delay)
else:
# If no exception has been thrown it should be done
break
def remove(path):
"""Removes the specified file, link, or directory tree.
This is a replacement for shutil.rmtree that works better under
windows. It does the following things:
- check path access for the current user before trying to remove
- retry operations on some known errors due to various things keeping
a handle on file paths - like explorer, virus scanners, etc. The
known errors are errno.EACCES and errno.ENOTEMPTY, and it will
retry up to 5 five times with a delay of (failed_attempts * 0.5) seconds
between each attempt.
Note that no error will be raised if the given path does not exists.
:param path: path to be removed
"""
def _update_permissions(path):
"""Sets specified pemissions depending on filetype"""
if os.path.islink(path):
# Path is a symlink which we don't have to modify
# because it should already have all the needed permissions
return
stats = os.stat(path)
if os.path.isfile(path):
mode = stats.st_mode | stat.S_IWUSR
elif os.path.isdir(path):
mode = stats.st_mode | stat.S_IWUSR | stat.S_IXUSR
else:
# Not supported type
return
_call_windows_retry(os.chmod, (path, mode))
if not os.path.lexists(path):
print_line(b'remove', b'WARNING: %s does not exists!\n' % path.encode('utf-8'))
return
"""
On Windows, adds '\\\\?\\' to paths which match ^[A-Za-z]:\\.* to access
files or directories that exceed MAX_PATH(260) limitation or that ends
with a period.
"""
if (
sys.platform in ("win32", "cygwin")
and len(path) >= 3
and path[1] == ":"
and path[2] == "\\"
):
path = u"\\\\?\\%s" % path
if os.path.isfile(path) or os.path.islink(path):
# Verify the file or link is read/write for the current user
_update_permissions(path)
_call_windows_retry(os.remove, (path,))
elif os.path.isdir(path):
# Verify the directory is read/write/execute for the current user
_update_permissions(path)
# We're ensuring that every nested item has writable permission.
for root, dirs, files in os.walk(path):
for entry in dirs + files:
_update_permissions(os.path.join(root, entry))
_call_windows_retry(shutil.rmtree, (path,))
def run_required_command(prefix, args, *, extra_env=None, cwd=None):
res = run_command(prefix, args, extra_env=extra_env, cwd=cwd)
if res:
@ -416,6 +519,40 @@ def configure_volume_posix(volume, user, group, running_as_root):
set_dir_permissions(volume, user.pw_uid, group.gr_gid)
def _clean_git_checkout(destination_path):
# Delete untracked files (i.e. build products)
print_line(b'vcs', b'cleaning git checkout...\n')
args = [
'git',
'clean',
# Two -f`s causes subdirectories with `.git`
# directories to be cleaned as well.
'-nxdff',
]
print_line(b'vcs', b'executing %r\n' % args)
p = subprocess.Popen(args,
# Disable buffering because we want to receive output
# as it is generated so timestamps in logs are
# accurate.
bufsize=0,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
stdin=sys.stdin.fileno(),
cwd=destination_path,
env=os.environ)
stdout = io.TextIOWrapper(p.stdout, encoding='latin1')
ret = p.wait()
if ret:
sys.exit(ret)
data = stdout.read()
prefix = 'Would remove '
filenames = [os.path.join(destination_path, line[len(prefix):]) for line in data.splitlines()]
print_line(b'vcs', b'removing %r\n' % filenames)
for filename in filenames:
remove(filename)
print_line(b'vcs', b'successfully cleaned git checkout!\n')
def git_checkout(
destination_path: str,
head_repo: str,
@ -474,15 +611,7 @@ def git_checkout(
run_required_command(b'vcs', args, cwd=destination_path)
args = [
'git',
'clean',
# Two -f`s causes subdirectories with `.git` directories to be removed
# as well.
'-xdff',
]
run_required_command(b'vcs', args, cwd=destination_path)
_clean_git_checkout(destination_path)
args = [
'git',
@ -806,7 +935,10 @@ def install_pip_requirements(repositories):
if not requirements:
return
cmd = [sys.executable, '-mpip', 'install', '--require-hashes']
cmd = [sys.executable, '-mpip', 'install']
if os.environ.get("PIP_DISABLE_REQUIRE_HASHES") != "1":
cmd.append("--require-hashes")
for path in requirements:
cmd.extend(['-r', path])
@ -869,7 +1001,8 @@ def maybe_run_resource_monitoring():
def main(args):
print_line(b'setup', b'run-task started in %s\n' % os.getcwd().encode('utf-8'))
os.environ["TASK_WORKDIR"] = os.getcwd()
print_line(b'setup', b'run-task started in %s\n' % os.environ["TASK_WORKDIR"].encode('utf-8'))
running_as_root = IS_POSIX and os.getuid() == 0
# Arguments up to '--' are ours. After are for the main task
@ -1110,7 +1243,7 @@ def main(args):
fetches_dir = os.environ.get('MOZ_FETCHES_DIR')
if fetches_dir and os.path.isdir(fetches_dir):
print_line(b'fetches', b'removing %s\n' % fetches_dir.encode('utf-8'))
shutil.rmtree(fetches_dir)
remove(fetches_dir)
print_line(b'fetches', b'finished\n')

Просмотреть файл

@ -4,9 +4,9 @@
from taskgraph.util.attributes import (
match_run_on_git_branches,
match_run_on_projects,
match_run_on_tasks_for,
match_run_on_git_branches,
)
_target_task_methods = {}

Просмотреть файл

@ -3,11 +3,11 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import attr
from .graph import Graph
from .task import Task
import attr
@attr.s(frozen=True)
class TaskGraph:

Просмотреть файл

@ -8,11 +8,10 @@ from typing import AnyStr
import attr
from ..config import GraphConfig
from ..parameters import Parameters
from ..util.schema import Schema, validate_schema
from ..util.memoize import memoize
from ..util.schema import Schema, validate_schema
@attr.s(frozen=True)

Просмотреть файл

@ -4,6 +4,7 @@
from collections import deque
import taskgraph
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.cached_tasks import add_optimization

Просмотреть файл

@ -8,19 +8,13 @@ import logging
import os
import re
from voluptuous import Optional, Required
import taskgraph
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.docker import (
generate_context_hash,
create_context_tar,
)
from taskgraph.util.schema import (
Schema,
)
from voluptuous import (
Optional,
Required,
)
from taskgraph.util.docker import create_context_tar, generate_context_hash
from taskgraph.util.schema import Schema
from .task import task_description_schema
logger = logging.getLogger(__name__)

Просмотреть файл

@ -6,31 +6,19 @@
# it as task artifacts.
import attr
import os
import re
from voluptuous import (
Optional,
Required,
Extra,
)
import attr
from voluptuous import Extra, Optional, Required
import taskgraph
from .base import (
TransformSequence,
)
from ..util.cached_tasks import (
add_optimization,
)
from ..util.schema import Schema, validate_schema
from ..util.treeherder import (
join_symbol,
)
from ..util import path
from ..util.cached_tasks import add_optimization
from ..util.schema import Schema, validate_schema
from ..util.treeherder import join_symbol
from .base import TransformSequence
CACHE_TYPE = "content.v1"
@ -214,6 +202,10 @@ def make_task(config, jobs):
# Add the given prefix to each file entry in the archive.
# Requires an artifact-name ending with .tar.zst.
Optional("add-prefix"): str,
# Headers to pass alongside the request.
Optional("headers"): {
str: str,
},
# IMPORTANT: when adding anything that changes the behavior of the task,
# it is important to update the digest data used to compute cache hits.
},
@ -250,7 +242,7 @@ def create_fetch_url_task(config, name, fetch):
sig_url = fetch["gpg-signature"]["sig-url"].format(url=fetch["url"])
key_path = os.path.join(taskgraph.GECKO, fetch["gpg-signature"]["key-path"])
with open(key_path, "r") as fh:
with open(key_path) as fh:
gpg_key = fh.read()
env["FETCH_GPG_KEY"] = gpg_key
@ -263,6 +255,10 @@ def create_fetch_url_task(config, name, fetch):
]
)
if "headers" in fetch:
for k, v in fetch["headers"].items():
command.extend(["-H", f"{k}:{v}"])
command.extend(
[
fetch["url"],

Просмотреть файл

@ -11,28 +11,19 @@ run-using handlers in `taskcluster/taskgraph/transforms/job`.
import copy
import logging
import json
import os
import logging
from voluptuous import Any, Exclusive, Extra, Optional, Required
from taskgraph.transforms.base import TransformSequence
from taskgraph.util import path as mozpath
from taskgraph.util.schema import (
validate_schema,
Schema,
)
from taskgraph.util.taskcluster import get_artifact_prefix
from taskgraph.util.workertypes import worker_type_implementation
from taskgraph.transforms.cached_tasks import order_tasks
from taskgraph.transforms.task import task_description_schema
from voluptuous import (
Extra,
Any,
Optional,
Required,
Exclusive,
)
from taskgraph.util import path as mozpath
from taskgraph.util.python_path import import_sibling_modules
from taskgraph.util.schema import Schema, validate_schema
from taskgraph.util.taskcluster import get_artifact_prefix
from taskgraph.util.workertypes import worker_type_implementation
logger = logging.getLogger(__name__)
@ -52,6 +43,7 @@ job_description_schema = Schema(
Optional("job-from"): task_description_schema["job-from"],
Optional("dependencies"): task_description_schema["dependencies"],
Optional("soft-dependencies"): task_description_schema["soft-dependencies"],
Optional("if-dependencies"): task_description_schema["if-dependencies"],
Optional("requires"): task_description_schema["requires"],
Optional("expires-after"): task_description_schema["expires-after"],
Optional("routes"): task_description_schema["routes"],
@ -207,6 +199,7 @@ def get_attribute(dict, key, attributes, attribute_name):
def use_fetches(config, jobs):
artifact_names = {}
aliases = {}
extra_env = {}
if config.kind in ("toolchain", "fetch"):
jobs = list(jobs)
@ -226,6 +219,7 @@ def use_fetches(config, jobs):
task.attributes,
f"{task.kind}-artifact",
)
get_attribute(extra_env, task.label, task.attributes, f"{task.kind}-env")
value = task.attributes.get(f"{task.kind}-alias")
if value:
aliases[f"{task.kind}-{value}"] = task.label
@ -243,6 +237,7 @@ def use_fetches(config, jobs):
name = job.get("name", job.get("label"))
dependencies = job.setdefault("dependencies", {})
worker = job.setdefault("worker", {})
env = worker.setdefault("env", {})
prefix = get_artifact_prefix(job)
for kind, artifacts in fetches.items():
if kind in ("fetch", "toolchain"):
@ -255,6 +250,8 @@ def use_fetches(config, jobs):
kind=config.kind, name=name, fetch=fetch_name
)
)
if label in extra_env:
env.update(extra_env[label])
path = artifact_names[label]
@ -329,7 +326,6 @@ def use_fetches(config, jobs):
if scope not in job.setdefault("scopes", []):
job["scopes"].append(scope)
env = worker.setdefault("env", {})
env["MOZ_FETCHES"] = {"task-reference": json.dumps(job_fetches, sort_keys=True)}
env.setdefault("MOZ_FETCHES_DIR", "fetches")
@ -341,7 +337,8 @@ def use_fetches(config, jobs):
def make_task_description(config, jobs):
"""Given a build description, create a task description"""
# import plugin modules first, before iterating over jobs
import_all()
import_sibling_modules(exceptions=("common.py",))
for job in jobs:
# always-optimized tasks never execute, so have no workdir
if job["worker"]["implementation"] in ("docker-worker", "generic-worker"):
@ -434,11 +431,3 @@ def configure_taskdesc_for_run(config, job, taskdesc, worker_implementation):
),
)
func(config, job, taskdesc)
def import_all():
"""Import all modules that are siblings of this one, triggering the decorator
above in the process."""
for f in os.listdir(os.path.dirname(__file__)):
if f.endswith(".py") and f not in ("commmon.py", "__init__.py"):
__import__("taskgraph.transforms.job." + f[:-3])

Просмотреть файл

@ -9,12 +9,12 @@ phase will replace the task with the task from the other graph.
"""
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import Schema
from taskgraph.transforms.job import run_job_using
from voluptuous import Required
from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.job import run_job_using
from taskgraph.util.schema import Schema
transforms = TransformSequence()
run_task_schema = Schema(

Просмотреть файл

@ -9,13 +9,18 @@ Support for running jobs that are invoked via the `run-task` script.
import os
import attr
from voluptuous import Any, Optional, Required
from taskgraph.transforms.task import taskref_or_string
from taskgraph.transforms.job import run_job_using
from taskgraph.transforms.job.common import support_vcs_checkout
from taskgraph.transforms.task import taskref_or_string
from taskgraph.util import path, taskcluster
from taskgraph.util.schema import Schema
from taskgraph.transforms.job.common import support_vcs_checkout
from voluptuous import Required, Any, Optional
EXEC_COMMANDS = {
"bash": ["bash", "-cx"],
"powershell": ["powershell.exe", "-ExecutionPolicy", "Bypass"],
}
run_task_schema = Schema(
{
@ -38,12 +43,15 @@ run_task_schema = Schema(
Required("sparse-profile"): Any(str, None),
# The command arguments to pass to the `run-task` script, after the
# checkout arguments. If a list, it will be passed directly; otherwise
# it will be included in a single argument to `bash -cx`.
# it will be included in a single argument to the command specified by
# `exec-with`.
Required("command"): Any([taskref_or_string], taskref_or_string),
# Context to substitute into the command using format string
# substitution (e.g {value}). This is useful if certain aspects of the
# command need to be generated in transforms.
Optional("command-context"): dict,
# What to execute the command with in the event command is a string.
Optional("exec-with"): Any(*list(EXEC_COMMANDS)),
# Base work directory used to set up the task.
Required("workdir"): str,
# Whether to run as root. (defaults to False)
@ -57,14 +65,14 @@ def common_setup(config, job, taskdesc, command):
if run["checkout"]:
repo_configs = config.repo_configs
if len(repo_configs) > 1 and run["checkout"] is True:
raise Exception("Must explicitly sepcify checkouts with multiple repos.")
raise Exception("Must explicitly specify checkouts with multiple repos.")
elif run["checkout"] is not True:
repo_configs = {
repo: attr.evolve(repo_configs[repo], **config)
for (repo, config) in run["checkout"].items()
}
vcs_path = support_vcs_checkout(
support_vcs_checkout(
config,
job,
taskdesc,
@ -147,7 +155,8 @@ def docker_worker_run_task(config, job, taskdesc):
# dict is for the case of `{'task-reference': str}`.
if isinstance(run_command, str) or isinstance(run_command, dict):
run_command = ["bash", "-cx", run_command]
exec_cmd = EXEC_COMMANDS[run.pop("exec-with", "bash")]
run_command = exec_cmd + [run_command]
command.append("--fetch-hgfingerprint")
if run["run-as-root"]:
command.extend(("--user", "root", "--group", "root"))
@ -206,7 +215,8 @@ def generic_worker_run_task(config, job, taskdesc):
if isinstance(run_command, str):
if is_win:
run_command = f'"{run_command}"'
run_command = ["bash", "-cx", run_command]
exec_cmd = EXEC_COMMANDS[run.pop("exec-with", "bash")]
run_command = exec_cmd + [run_command]
command_context = run.get("command-context")
if command_context:

Просмотреть файл

@ -5,21 +5,18 @@
Support for running toolchain-building jobs via dedicated scripts
"""
from voluptuous import Any, Optional, Required
from taskgraph.util.schema import Schema
from voluptuous import Optional, Required, Any
from taskgraph.transforms.job import (
configure_taskdesc_for_run,
run_job_using,
)
import taskgraph
from taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
from taskgraph.transforms.job.common import (
docker_worker_add_artifacts,
generic_worker_add_artifacts,
get_vcsdir_name,
)
from taskgraph.util.hash import hash_paths
import taskgraph
from taskgraph.util.schema import Schema
from taskgraph.util.shell import quote as shell_quote
CACHE_TYPE = "toolchains.v3"
@ -45,7 +42,11 @@ toolchain_run_schema = Schema(
"toolchain-alias",
description="An alias that can be used instead of the real toolchain job name in "
"fetch stanzas for jobs.",
): str,
): Any(str, [str]),
Optional(
"toolchain-env",
description="Additional env variables to add to the worker when using this toolchain",
): {str: object},
# Base work directory used to set up the task.
Required("workdir"): str,
}
@ -60,6 +61,8 @@ def get_digest_data(config, run, taskdesc):
# Accumulate dependency hashes for index generation.
data = [hash_paths(config.graph_config.vcs_root, files)]
data.append(taskdesc["attributes"]["toolchain-artifact"])
# If the task uses an in-tree docker image, we want it to influence
# the index path as well. Ideally, the content of the docker image itself
# should have an influence, but at the moment, we can't get that
@ -78,18 +81,7 @@ def get_digest_data(config, run, taskdesc):
return data
toolchain_defaults = {
"sparse-profile": "toolchain-build",
}
@run_job_using(
"docker-worker",
"toolchain-script",
schema=toolchain_run_schema,
defaults=toolchain_defaults,
)
def docker_worker_toolchain(config, job, taskdesc):
def common_toolchain(config, job, taskdesc, is_docker):
run = job["run"]
worker = taskdesc["worker"] = job["worker"]
@ -97,14 +89,18 @@ def docker_worker_toolchain(config, job, taskdesc):
srcdir = get_vcsdir_name(worker["os"])
# If the task doesn't have a docker-image, set a default
worker.setdefault("docker-image", {"in-tree": "toolchain-build"})
if is_docker:
# If the task doesn't have a docker-image, set a default
worker.setdefault("docker-image", {"in-tree": "toolchain-build"})
# Allow the job to specify where artifacts come from, but add
# public/build if it's not there already.
artifacts = worker.setdefault("artifacts", [])
if not any(artifact.get("name") == "public/build" for artifact in artifacts):
docker_worker_add_artifacts(config, job, taskdesc)
if is_docker:
docker_worker_add_artifacts(config, job, taskdesc)
else:
generic_worker_add_artifacts(config, job, taskdesc)
env = worker["env"]
env.update(
@ -118,6 +114,8 @@ def docker_worker_toolchain(config, job, taskdesc):
attributes["toolchain-artifact"] = run.pop("toolchain-artifact")
if "toolchain-alias" in run:
attributes["toolchain-alias"] = run.pop("toolchain-alias")
if "toolchain-env" in run:
attributes["toolchain-env"] = run.pop("toolchain-env")
if not taskgraph.fast:
name = taskdesc["label"].replace(f"{config.kind}-", "", 1)
@ -127,10 +125,50 @@ def docker_worker_toolchain(config, job, taskdesc):
"digest-data": get_digest_data(config, run, taskdesc),
}
script = run.pop("script")
run["using"] = "run-task"
run["cwd"] = "{checkout}/.."
run["command"] = [
"{}/taskcluster/scripts/toolchain/{}".format(srcdir, run.pop("script"))
] + run.pop("arguments", [])
if script.endswith(".ps1"):
run["exec-with"] = "powershell"
command = [f"{srcdir}/taskcluster/scripts/toolchain/{script}"] + run.pop(
"arguments", []
)
if not is_docker:
# Don't quote the first item in the command because it purposely contains
# an environment variable that is not meant to be quoted.
if len(command) > 1:
command = command[0] + " " + shell_quote(*command[1:])
else:
command = command[0]
run["command"] = command
configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
toolchain_defaults = {
"sparse-profile": "toolchain-build",
}
@run_job_using(
"docker-worker",
"toolchain-script",
schema=toolchain_run_schema,
defaults=toolchain_defaults,
)
def docker_worker_toolchain(config, job, taskdesc):
common_toolchain(config, job, taskdesc, is_docker=True)
@run_job_using(
"generic-worker",
"toolchain-script",
schema=toolchain_run_schema,
defaults=toolchain_defaults,
)
def generic_worker_toolchain(config, job, taskdesc):
common_toolchain(config, job, taskdesc, is_docker=False)

Просмотреть файл

@ -16,23 +16,24 @@ import time
from copy import deepcopy
import attr
from voluptuous import All, Any, Extra, NotIn, Optional, Required
from taskgraph import MAX_DEPENDENCIES
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.hash import hash_path
from taskgraph.util.keyed_by import evaluate_keyed_by
from taskgraph.util.memoize import memoize
from taskgraph.util.treeherder import split_symbol
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import (
validate_schema,
OptimizationSchema,
Schema,
optionally_keyed_by,
resolve_keyed_by,
OptimizationSchema,
taskref_or_string,
validate_schema,
)
from taskgraph.util.treeherder import split_symbol
from taskgraph.util.workertypes import worker_type_implementation
from voluptuous import Any, Required, Optional, Extra, All, NotIn
from taskgraph import MAX_DEPENDENCIES
from ..util import docker as dockerutil
from ..util.workertypes import get_worker_type
@ -66,7 +67,7 @@ task_description_schema = Schema(
str,
NotIn(
["self", "decision"],
"Can't use 'self` or 'decision' as depdency names.",
"Can't use 'self` or 'decision' as dependency names.",
),
): object,
},
@ -118,7 +119,7 @@ task_description_schema = Schema(
# Type of gecko v2 index to use
"type": str,
# The rank that the task will receive in the TaskCluster
# index. A newly completed task supercedes the currently
# index. A newly completed task supersedes the currently
# indexed task iff it has a higher rank. If unspecified,
# 'by-tier' behavior will be used.
"rank": Any(
@ -317,7 +318,7 @@ def verify_index(config, index):
# the exit status code(s) that indicates the caches used by the task
# should be purged
Optional("purge-caches-exit-status"): [int],
# Wether any artifacts are assigned to this worker
# Whether any artifacts are assigned to this worker
Optional("skip-artifacts"): bool,
},
)
@ -482,7 +483,9 @@ def build_docker_worker_payload(config, task, task_def):
suffix = f"{cache_version}-{_run_task_suffix()}"
if out_of_tree_image:
name_hash = hashlib.sha256(out_of_tree_image).hexdigest()
name_hash = hashlib.sha256(
out_of_tree_image.encode("utf-8")
).hexdigest()
suffix += name_hash[0:12]
else:
@ -597,7 +600,7 @@ def build_docker_worker_payload(config, task, task_def):
# optional features
Required("chain-of-trust"): bool,
Optional("taskcluster-proxy"): bool,
# Wether any artifacts are assigned to this worker
# Whether any artifacts are assigned to this worker
Optional("skip-artifacts"): bool,
},
)

Просмотреть файл

@ -3,12 +3,11 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import gzip
import os
import stat
import tarfile
# 2016-01-01T00:00:00+0000
DEFAULT_MTIME = 1451606400

Просмотреть файл

@ -6,7 +6,6 @@
import hashlib
import time
TARGET_CACHE_INDEX = "{cache_prefix}.cache.level-{level}.{type}.{name}.hash.{digest}"
EXTRA_CACHE_INDEXES = [
"{cache_prefix}.cache.level-{level}.{type}.{name}.latest",
@ -27,7 +26,7 @@ def add_optimization(
:param dict taskdesc: The description of the current task.
:param str cache_type: The type of task result being cached.
:param str cache_name: The name of the object being cached.
:param digest: A unique string indentifying this version of the artifacts
:param digest: A unique string identifying this version of the artifacts
being generated. Typically this will be the hash of inputs to the task.
:type digest: bytes or None
:param digest_data: A list of bytes representing the inputs of this task.

Просмотреть файл

@ -7,14 +7,15 @@ Utilities for generating a decision task from :file:`.taskcluster.yml`.
"""
import jsone
import yaml
import os
import slugid
from .vcs import find_hg_revision_push_info
import jsone
import slugid
import yaml
from .templates import merge
from .time import current_json_time
from .vcs import find_hg_revision_push_info
def make_decision_task(params, root, context, head_rev=None):

Просмотреть файл

@ -8,14 +8,14 @@ import io
import json
import os
import re
import requests_unixsocket
import sys
import urllib.parse
import requests_unixsocket
from .archive import create_tar_gz_from_files
from .memoize import memoize
IMAGE_DIR = os.path.join(".", "taskcluster", "docker")
from .yaml import load_yaml
@ -70,7 +70,7 @@ def post_to_docker(tar, api_path, **kwargs):
# data is sometimes an empty dict.
if not data:
continue
# Mimick how docker itself presents the output. This code was tested
# Mimic how docker itself presents the output. This code was tested
# with API version 1.18 and 1.26.
if "status" in data:
if "id" in data:
@ -247,13 +247,13 @@ def stream_context_tar(topsrcdir, context_dir, out_file, image_name=None, args=N
# Parse Dockerfile for special syntax of extra files to include.
content = []
with open(os.path.join(context_dir, "Dockerfile"), "r") as fh:
with open(os.path.join(context_dir, "Dockerfile")) as fh:
for line in fh:
if line.startswith("# %ARG"):
p = line[len("# %ARG ") :].strip()
if not args or p not in args:
raise Exception(f"missing argument: {p}")
replace.append((re.compile(fr"\${p}\b"), args[p]))
replace.append((re.compile(rf"\${p}\b"), args[p]))
continue
for regexp, s in replace:

Просмотреть файл

@ -5,8 +5,8 @@
import hashlib
from pathlib import Path
from taskgraph.util.memoize import memoize
from taskgraph.util import path as mozpath
from taskgraph.util.memoize import memoize
@memoize

Просмотреть файл

@ -5,8 +5,8 @@
import re
from taskgraph.util.time import json_time_from_now
from taskgraph.util.taskcluster import get_artifact_url
from taskgraph.util.time import json_time_from_now
TASK_REFERENCE_PATTERN = re.compile("<([^>]+)>")
ARTIFACT_REFERENCE_PATTERN = re.compile("<([^/]+)/([^>]+)>")

Просмотреть файл

@ -12,8 +12,8 @@ Also contains a few additional utilities not found in :py:mod:`os.path`.
# https://searchfox.org/mozilla-central/rev/c3ebaf6de2d481c262c04bb9657eaf76bf47e2ac/python/mozbuild/mozpack/path.py
import posixpath
import os
import posixpath
import re

Просмотреть файл

@ -2,6 +2,9 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import inspect
import os
def find_object(path):
"""
@ -22,3 +25,28 @@ def find_object(path):
for a in objectpath.split("."):
obj = getattr(obj, a)
return obj
def import_sibling_modules(exceptions=None):
"""
Import all Python modules that are siblings of the calling module.
Args:
exceptions (list): A list of file names to exclude (caller and
__init__.py are implicitly excluded).
"""
frame = inspect.stack()[1]
mod = inspect.getmodule(frame[0])
name = os.path.basename(mod.__file__)
excs = {"__init__.py", name}
if exceptions:
excs.update(exceptions)
modpath = mod.__name__
if not name.startswith("__init__.py"):
modpath = modpath.rsplit(".", 1)[0]
for f in os.listdir(os.path.dirname(mod.__file__)):
if f.endswith(".py") and f not in excs:
__import__(modpath + "." + f[:-3])

Просмотреть файл

@ -3,11 +3,11 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import re
import pprint
import collections
import voluptuous
import pprint
import re
import voluptuous
import taskgraph
@ -46,18 +46,26 @@ def optionally_keyed_by(*arguments):
schema = arguments[-1]
fields = arguments[:-1]
# build the nestable schema by generating schema = Any(schema,
# by-fld1, by-fld2, by-fld3) once for each field. So we don't allow
# infinite nesting, but one level of nesting for each field.
for _ in arguments:
options = [schema]
for field in fields:
options.append({"by-" + field: {str: schema}})
schema = voluptuous.Any(*options)
return schema
def validator(obj):
if isinstance(obj, dict) and len(obj) == 1:
k, v = list(obj.items())[0]
if k.startswith("by-") and k[len("by-") :] in fields:
res = {}
for kk, vv in v.items():
try:
res[kk] = validator(vv)
except voluptuous.Invalid as e:
e.prepend([k, kk])
raise
return res
return Schema(schema)(obj)
return validator
def resolve_keyed_by(item, field, item_name, **extra_values):
def resolve_keyed_by(
item, field, item_name, defer=None, enforce_single_match=True, **extra_values
):
"""
For values which can either accept a literal value, or be keyed by some
other attribute of the item, perform that lookup and replacement in-place
@ -96,6 +104,26 @@ def resolve_keyed_by(item, field, item_name, **extra_values):
cedar: ..
linux: 13
default: 12
Args:
item (dict): Object being evaluated.
field (str): Name of the key to perform evaluation on.
item_name (str): Used to generate useful error messages.
defer (list):
Allows evaluating a by-* entry at a later time. In the example
above it's possible that the project attribute hasn't been set yet,
in which case we'd want to stop before resolving that subkey and
then call this function again later. This can be accomplished by
setting `defer=["project"]` in this example.
enforce_single_match (bool):
If True (default), each task may only match a single arm of the
evaluation.
extra_values (kwargs):
If supplied, represent additional values available
for reference from by-<field>.
Returns:
dict: item which has also been modified in-place.
"""
# find the field, returning the item unchanged if anything goes wrong
container, subfield = item, field
@ -113,6 +141,8 @@ def resolve_keyed_by(item, field, item_name, **extra_values):
container[subfield] = evaluate_keyed_by(
value=container[subfield],
item_name=f"`{field}` in `{item_name}`",
defer=defer,
enforce_single_match=enforce_single_match,
attributes=dict(item, **extra_values),
)
@ -132,7 +162,7 @@ WHITELISTED_SCHEMA_IDENTIFIERS = [
def check_schema(schema):
identifier_re = re.compile("^[a-z][a-z0-9-]*$")
identifier_re = re.compile(r"^\$?[a-z][a-z0-9-]*$")
def whitelisted(path):
return any(f(path) for f in WHITELISTED_SCHEMA_IDENTIFIERS)
@ -182,17 +212,27 @@ class Schema(voluptuous.Schema):
in the process.
"""
def __init__(self, *args, **kwargs):
def __init__(self, *args, check=True, **kwargs):
super().__init__(*args, **kwargs)
check_schema(self)
self.check = check
if not taskgraph.fast and self.check:
check_schema(self)
def extend(self, *args, **kwargs):
schema = super().extend(*args, **kwargs)
check_schema(schema)
if self.check:
check_schema(schema)
# We want twice extend schema to be checked too.
schema.__class__ = Schema
return schema
def _compile(self, schema):
if taskgraph.fast:
return
return super()._compile(schema)
def __getitem__(self, item):
return self.schema[item]

Просмотреть файл

@ -0,0 +1,40 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import re
SHELL_QUOTE_RE = re.compile(r"[\\\t\r\n \'\"#<>&|`(){}$;\*\?]")
def _quote(s):
"""Given a string, returns a version that can be used literally on a shell
command line, enclosing it with single quotes if necessary.
As a special case, if given an int, returns a string containing the int,
not enclosed in quotes.
"""
if type(s) == int:
return "%d" % s
# Empty strings need to be quoted to have any significance
if s and not SHELL_QUOTE_RE.search(s) and not s.startswith("~"):
return s
# Single quoted strings can contain any characters unescaped except the
# single quote itself, which can't even be escaped, so the string needs to
# be closed, an escaped single quote added, and reopened.
t = type(s)
return t("'%s'") % s.replace(t("'"), t("'\\''"))
def quote(*strings):
"""Given one or more strings, returns a quoted string that can be used
literally on a shell command line.
>>> quote('a', 'b')
"a b"
>>> quote('a b', 'c')
"'a b' c"
"""
return " ".join(_quote(s) for s in strings)

Просмотреть файл

@ -3,16 +3,18 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import datetime
import functools
import requests
import logging
import os
import requests
import taskcluster_urls as liburls
from requests.packages.urllib3.util.retry import Retry
from taskgraph.task import Task
from taskgraph.util.memoize import memoize
from taskgraph.util import yaml
from taskgraph.util.memoize import memoize
logger = logging.getLogger(__name__)
@ -21,7 +23,7 @@ testing = False
# Default rootUrl to use if none is given in the environment; this should point
# to the production Taskcluster deployment used for CI.
PRODUCTION_TASKCLUSTER_ROOT_URL = "https://taskcluster.net"
PRODUCTION_TASKCLUSTER_ROOT_URL = None
# the maximum number of parallel Taskcluster API calls to make
CONCURRENCY = 50
@ -29,11 +31,13 @@ CONCURRENCY = 50
@memoize
def get_root_url(use_proxy):
"""Get the current TASKCLUSTER_ROOT_URL. When running in a task, this must
come from $TASKCLUSTER_ROOT_URL; when run on the command line, we apply a
defualt that points to the production deployment of Taskcluster. If use_proxy
is set, this attempts to get TASKCLUSTER_PROXY_URL instead, failing if it
is not set."""
"""Get the current TASKCLUSTER_ROOT_URL.
When running in a task, this must come from $TASKCLUSTER_ROOT_URL; when run
on the command line, a default may be provided that points to the
production deployment of Taskcluster. If use_proxy is set, this attempts to
get TASKCLUSTER_PROXY_URL instead, failing if it is not set.
"""
if use_proxy:
try:
return liburls.normalize_root_url(os.environ["TASKCLUSTER_PROXY_URL"])
@ -45,21 +49,27 @@ def get_root_url(use_proxy):
else:
raise RuntimeError("taskcluster-proxy is not enabled for this task")
if "TASKCLUSTER_ROOT_URL" not in os.environ:
if "TASK_ID" in os.environ:
raise RuntimeError(
"$TASKCLUSTER_ROOT_URL must be set when running in a task"
if "TASKCLUSTER_ROOT_URL" in os.environ:
logger.debug(
"Running in Taskcluster instance {}{}".format(
os.environ["TASKCLUSTER_ROOT_URL"],
" with taskcluster-proxy"
if "TASKCLUSTER_PROXY_URL" in os.environ
else "",
)
else:
logger.debug("Using default TASKCLUSTER_ROOT_URL (Firefox CI production)")
return liburls.normalize_root_url(PRODUCTION_TASKCLUSTER_ROOT_URL)
logger.debug(
"Running in Taskcluster instance {}{}".format(
os.environ["TASKCLUSTER_ROOT_URL"],
" with taskcluster-proxy" if "TASKCLUSTER_PROXY_URL" in os.environ else "",
)
)
return liburls.normalize_root_url(os.environ["TASKCLUSTER_ROOT_URL"])
return liburls.normalize_root_url(os.environ["TASKCLUSTER_ROOT_URL"])
if "TASK_ID" in os.environ:
raise RuntimeError("$TASKCLUSTER_ROOT_URL must be set when running in a task")
if PRODUCTION_TASKCLUSTER_ROOT_URL is None:
raise RuntimeError(
"Could not detect Taskcluster instance, set $TASKCLUSTER_ROOT_URL"
)
logger.debug("Using default TASKCLUSTER_ROOT_URL")
return liburls.normalize_root_url(PRODUCTION_TASKCLUSTER_ROOT_URL)
def requests_retry_session(

Просмотреть файл

@ -7,10 +7,7 @@ Tools for interacting with existing taskgraphs.
"""
from taskgraph.util.taskcluster import (
find_task_id,
get_artifact,
)
from taskgraph.util.taskcluster import find_task_id, get_artifact
def find_decision_task(parameters, graph_config):

Просмотреть файл

@ -6,8 +6,8 @@
# more complicated or less precise and we lean on time delta here.
import re
import datetime
import re
PATTERN = re.compile(r"((?:\d+)?\.?\d+) *([a-z]+)")

Просмотреть файл

@ -5,7 +5,7 @@
import os
import subprocess
from abc import ABC, abstractproperty, abstractmethod
from abc import ABC, abstractmethod, abstractproperty
from shutil import which
import requests
@ -76,7 +76,7 @@ class HgRepository(Repository):
tool = "hg"
def __init__(self, *args, **kwargs):
super(HgRepository, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
self._env["HGPLAIN"] = "1"
@property

Просмотреть файл

@ -232,6 +232,34 @@ def verify_dependency_tiers(task, taskgraph, scratch_pad, graph_config, paramete
)
@verifications.add("full_task_graph")
def verify_toolchain_alias(task, taskgraph, scratch_pad, graph_config, parameters):
"""
This function verifies that toolchain aliases are not reused.
"""
if task is None:
return
attributes = task.attributes
if "toolchain-alias" in attributes:
keys = attributes["toolchain-alias"]
if not keys:
keys = []
elif isinstance(keys, str):
keys = [keys]
for key in keys:
if key in scratch_pad:
raise Exception(
"Duplicate toolchain-alias in tasks "
"`{}`and `{}`: {}".format(
task.label,
scratch_pad[key],
key,
)
)
else:
scratch_pad[key] = task.label
@verifications.add("optimized_task_graph")
def verify_always_optimized(task, taskgraph, scratch_pad, graph_config, parameters):
"""