Bug 1759030 - Vendor `taskcluster-taskgraph` at `3.5.1`, r=ahochheiden

Differential Revision: https://phabricator.services.mozilla.com/D161056
This commit is contained in:
ahochheiden 2022-11-04 14:14:56 +00:00
Родитель 71b86860bf
Коммит 55811f8006
24 изменённых файлов: 2217 добавлений и 1374 удалений

Просмотреть файл

@ -464,7 +464,7 @@ class PLURALS(LegacySource):
selector = ctx.evaluate(self.selector)
keys = ctx.plural_categories
forms = [
FTL.TextElement(part.strip())
FTL.TextElement(part)
for part in element.value.split(';')
]

15
third_party/python/poetry.lock сгенерированный поставляемый
Просмотреть файл

@ -252,8 +252,8 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
zipp = ">=0.5"
[package.extras]
testing = ["importlib-resources (>=1.3)", "pep517", "packaging"]
docs = ["rst.linker", "sphinx"]
docs = ["sphinx", "rst.linker"]
testing = ["packaging", "pep517", "importlib-resources (>=1.3)"]
[[package]]
name = "iso8601"
@ -634,7 +634,7 @@ test = ["pytest", "pytest-cov", "pytest-mock", "httmock", "mock", "setuptools-li
[[package]]
name = "taskcluster-taskgraph"
version = "3.2.1"
version = "3.5.1"
description = "Build taskcluster taskgraphs"
category = "main"
optional = false
@ -653,6 +653,9 @@ slugid = ">=2.0"
taskcluster-urls = ">=11.0"
voluptuous = ">=0.12.1"
[package.extras]
load-image = ["zstandard"]
[[package]]
name = "taskcluster-urls"
version = "13.0.1"
@ -757,7 +760,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pyt
[metadata]
lock-version = "1.1"
python-versions = "^3.6"
content-hash = "eb26337dadb86c9a4895059df8799e1d81eb31e85150dea8931c2207a0bef168"
content-hash = "a12185410cebae037b0b16a9f8a3b99e1069c4f8d55157c48e15cc3f14233228"
[metadata.files]
aiohttp = [
@ -1153,8 +1156,8 @@ taskcluster = [
{file = "taskcluster-44.2.2.tar.gz", hash = "sha256:0266a6a901e1a2ec838984a7f24e7adb6d58f9f2e221a7f613388f8f23f786fc"},
]
taskcluster-taskgraph = [
{file = "taskcluster-taskgraph-3.2.1.tar.gz", hash = "sha256:c638724f0d514a3fc2d6ba34ddd395cfe021312dcf78b01c789ec4c7bf068cf0"},
{file = "taskcluster_taskgraph-3.2.1-py3-none-any.whl", hash = "sha256:87498ce08c5d2bfe0fd0b1a860e3dc2b9eba4d7acb883e9e5c2b6f7f15281a34"},
{file = "taskcluster-taskgraph-3.5.1.tar.gz", hash = "sha256:e08b935175349ef8728ff5f19c7e9866a562256180f5580b291da3217cb5016c"},
{file = "taskcluster_taskgraph-3.5.1-py3-none-any.whl", hash = "sha256:dc56b87228fb8eb1ef611750202344817a8cf5d825c0dc7e2dcc0f8b2795cbcd"},
]
taskcluster-urls = [
{file = "taskcluster-urls-13.0.1.tar.gz", hash = "sha256:b25e122ecec249c4299ac7b20b08db76e3e2025bdaeb699a9d444556de5fd367"},

98
third_party/python/requirements.in поставляемый
Просмотреть файл

@ -1,49 +1,49 @@
appdirs==1.4.4
attrs==19.2.0
blessings==1.7
cbor2==4.0.1
colorama==0.4.5
compare-locales==8.2.1
cookies==2.2.1
cram==0.7
distro==1.4.0
ecdsa==0.15
esprima==4.0.1
fluent.migrate==0.11
fluent.syntax==0.18.1
glean_parser==6.1.2
# Pin importlib-metadata to a version compatible with poetry
importlib-metadata==1.7.0
jsmin==2.1.0
json-e==2.7.0
looseversion==1.0.1
mozilla-repo-urls==0.1.0
mozilla-version==0.3.4
packaging==20.9
pathspec==0.9.0
pip==21.2.4
pip-tools==5.5.0
ply==3.10
pyasn1==0.4.8
pyasn1-modules==0.2.8
pylru==1.0.9
python-hglib==2.4
pytoml==0.1.10
pyyaml==5.4.1
redo==2.0.3
requests==2.25.1
requests-unixsocket==0.2.0
responses==0.10.6
rsa==3.1.4
sentry-sdk==0.14.3
setuptools==51.2.0
six==1.13.0
slugid==2.0.0
taskcluster==44.2.2
taskcluster-taskgraph==3.2.1
taskcluster-urls==13.0.1
tqdm==4.62.3
urllib3==1.26
voluptuous==0.12.1
wheel==0.37.0
yamllint==1.23
appdirs==1.4.4
attrs==19.2.0
blessings==1.7
cbor2==4.0.1
colorama==0.4.5
compare-locales==8.2.1
cookies==2.2.1
cram==0.7
distro==1.4.0
ecdsa==0.15
esprima==4.0.1
fluent.migrate==0.11
fluent.syntax==0.18.1
glean_parser==6.1.2
# Pin importlib-metadata to a version compatible with poetry
importlib-metadata==1.7.0
jsmin==2.1.0
json-e==2.7.0
looseversion==1.0.1
mozilla-repo-urls==0.1.0
mozilla-version==0.3.4
packaging==20.9
pathspec==0.9.0
pip==21.2.4
pip-tools==5.5.0
ply==3.10
pyasn1==0.4.8
pyasn1-modules==0.2.8
pylru==1.0.9
python-hglib==2.4
pytoml==0.1.10
pyyaml==5.4.1
redo==2.0.3
requests==2.25.1
requests-unixsocket==0.2.0
responses==0.10.6
rsa==3.1.4
sentry-sdk==0.14.3
setuptools==51.2.0
six==1.13.0
slugid==2.0.0
taskcluster==44.2.2
taskcluster-taskgraph==3.5.1
taskcluster-urls==13.0.1
tqdm==4.62.3
urllib3==1.26
voluptuous==0.12.1
wheel==0.37.0
yamllint==1.23

802
third_party/python/requirements.txt поставляемый
Просмотреть файл

@ -1,401 +1,401 @@
aiohttp==3.7.4.post0; python_version >= "3.6" \
--hash=sha256:3cf75f7cdc2397ed4442594b935a11ed5569961333d49b7539ea741be2cc79d5 \
--hash=sha256:4b302b45040890cea949ad092479e01ba25911a15e648429c7c5aae9650c67a8 \
--hash=sha256:fe60131d21b31fd1a14bd43e6bb88256f69dfc3188b3a89d736d6c71ed43ec95 \
--hash=sha256:393f389841e8f2dfc86f774ad22f00923fdee66d238af89b70ea314c4aefd290 \
--hash=sha256:c6e9dcb4cb338d91a73f178d866d051efe7c62a7166653a91e7d9fb18274058f \
--hash=sha256:5df68496d19f849921f05f14f31bd6ef53ad4b00245da3195048c69934521809 \
--hash=sha256:0563c1b3826945eecd62186f3f5c7d31abb7391fedc893b7e2b26303b5a9f3fe \
--hash=sha256:3d78619672183be860b96ed96f533046ec97ca067fd46ac1f6a09cd9b7484287 \
--hash=sha256:f705e12750171c0ab4ef2a3c76b9a4024a62c4103e3a55dd6f99265b9bc6fcfc \
--hash=sha256:230a8f7e24298dea47659251abc0fd8b3c4e38a664c59d4b89cca7f6c09c9e87 \
--hash=sha256:2e19413bf84934d651344783c9f5e22dee452e251cfd220ebadbed2d9931dbf0 \
--hash=sha256:e4b2b334e68b18ac9817d828ba44d8fcb391f6acb398bcc5062b14b2cbeac970 \
--hash=sha256:d012ad7911653a906425d8473a1465caa9f8dea7fcf07b6d870397b774ea7c0f \
--hash=sha256:40eced07f07a9e60e825554a31f923e8d3997cfc7fb31dbc1328c70826e04cde \
--hash=sha256:209b4a8ee987eccc91e2bd3ac36adee0e53a5970b8ac52c273f7f8fd4872c94c \
--hash=sha256:14762875b22d0055f05d12abc7f7d61d5fd4fe4642ce1a249abdf8c700bf1fd8 \
--hash=sha256:7615dab56bb07bff74bc865307aeb89a8bfd9941d2ef9d817b9436da3a0ea54f \
--hash=sha256:d9e13b33afd39ddeb377eff2c1c4f00544e191e1d1dee5b6c51ddee8ea6f0cf5 \
--hash=sha256:547da6cacac20666422d4882cfcd51298d45f7ccb60a04ec27424d2f36ba3eaf \
--hash=sha256:af9aa9ef5ba1fd5b8c948bb11f44891968ab30356d65fd0cc6707d989cd521df \
--hash=sha256:64322071e046020e8797117b3658b9c2f80e3267daec409b350b6a7a05041213 \
--hash=sha256:bb437315738aa441251214dad17428cafda9cdc9729499f1d6001748e1d432f4 \
--hash=sha256:e54962802d4b8b18b6207d4a927032826af39395a3bd9196a5af43fc4e60b009 \
--hash=sha256:a00bb73540af068ca7390e636c01cbc4f644961896fa9363154ff43fd37af2f5 \
--hash=sha256:79ebfc238612123a713a457d92afb4096e2148be17df6c50fb9bf7a81c2f8013 \
--hash=sha256:515dfef7f869a0feb2afee66b957cc7bbe9ad0cdee45aec7fdc623f4ecd4fb16 \
--hash=sha256:114b281e4d68302a324dd33abb04778e8557d88947875cbf4e842c2c01a030c5 \
--hash=sha256:7b18b97cf8ee5452fa5f4e3af95d01d84d86d32c5e2bfa260cf041749d66360b \
--hash=sha256:15492a6368d985b76a2a5fdd2166cddfea5d24e69eefed4630cbaae5c81d89bd \
--hash=sha256:bdb230b4943891321e06fc7def63c7aace16095be7d9cf3b1e01be2f10fba439 \
--hash=sha256:cffe3ab27871bc3ea47df5d8f7013945712c46a3cc5a95b6bee15887f1675c22 \
--hash=sha256:f881853d2643a29e643609da57b96d5f9c9b93f62429dcc1cbb413c7d07f0e1a \
--hash=sha256:a5ca29ee66f8343ed336816c553e82d6cade48a3ad702b9ffa6125d187e2dedb \
--hash=sha256:17c073de315745a1510393a96e680d20af8e67e324f70b42accbd4cb3315c9fb \
--hash=sha256:932bb1ea39a54e9ea27fc9232163059a0b8855256f4052e776357ad9add6f1c9 \
--hash=sha256:02f46fc0e3c5ac58b80d4d56eb0a7c7d97fcef69ace9326289fb9f1955e65cfe \
--hash=sha256:493d3299ebe5f5a7c66b9819eacdcfbbaaf1a8e84911ddffcdc48888497afecf
appdirs==1.4.4 \
--hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128 \
--hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41
async-timeout==3.0.1; python_full_version >= "3.5.3" and python_version >= "3.6" \
--hash=sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f \
--hash=sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3
attrs==19.2.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0") \
--hash=sha256:ec20e7a4825331c1b5ebf261d111e16fa9612c1f7a5e1f884f12bd53a664dfd2 \
--hash=sha256:f913492e1663d3c36f502e5e9ba6cd13cf19d7fab50aa13239e420fef95e1396
blessings==1.7; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0") \
--hash=sha256:caad5211e7ba5afe04367cdd4cfc68fa886e2e08f6f35e76b7387d2109ccea6e \
--hash=sha256:b1fdd7e7a675295630f9ae71527a8ebc10bfefa236b3d6aa4932ee4462c17ba3 \
--hash=sha256:98e5854d805f50a5b58ac2333411b0482516a8210f23f43308baeb58d77c157d
cbor2==4.0.1 \
--hash=sha256:b0eb916c9ea226aa81e9091607737475d5b0e5c314fe8d5a87179fba449cd190 \
--hash=sha256:cee0d01e520563b5a73c72eace5c428bb68aefb1b3f7aee5d692d3af6a1e5172
certifi==2018.4.16 \
--hash=sha256:9fa520c1bacfb634fa7af20a76bcbd3d5fb390481724c597da32c719a7dca4b0 \
--hash=sha256:13e698f54293db9f89122b0581843a782ad0934a4fe0172d2a980ba77fc61bb7
chardet==4.0.0; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" or python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" \
--hash=sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5 \
--hash=sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa
click==7.1.2; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" \
--hash=sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc \
--hash=sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a
colorama==0.4.5; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") \
--hash=sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da \
--hash=sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4
compare-locales==8.2.1; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0" and python_version < "4") \
--hash=sha256:470d50d96c68f8e147daa3d70f29a7b750adefea450c5fa07e0f666c8083d854 \
--hash=sha256:e6a1610151d357e74ee6c1f5e944f1868e449f83e478c84d92f7b86132f721d7
cookies==2.2.1 \
--hash=sha256:15bee753002dff684987b8df8c235288eb8d45f8191ae056254812dfd42c81d3 \
--hash=sha256:d6b698788cae4cfa4e62ef8643a9ca332b79bd96cb314294b864ae8d7eb3ee8e
cram==0.7 \
--hash=sha256:008e4e8b4d325cf040964b5f62460535b004a7bc816d54f8527a4d299edfe4a3 \
--hash=sha256:7da7445af2ce15b90aad5ec4792f857cef5786d71f14377e9eb994d8b8337f2f
diskcache==4.1.0 \
--hash=sha256:69b253a6ffe95bb4bafb483b97c24fca3c2c6c47b82e92b36486969a7e80d47d \
--hash=sha256:bcee5a59f9c264e2809e58d01be6569a3bbb1e36a1e0fb83f7ef9b2075f95ce0
distro==1.4.0 \
--hash=sha256:eedf82a470ebe7d010f1872c17237c79ab04097948800029994fa458e52fb4b4 \
--hash=sha256:362dde65d846d23baee4b5c058c8586f219b5a54be1cf5fc6ff55c4578392f57
ecdsa==0.15; (python_version >= "2.6" and python_full_version < "3.0.0") or (python_full_version >= "3.3.0") \
--hash=sha256:867ec9cf6df0b03addc8ef66b56359643cb5d0c1dc329df76ba7ecfe256c8061 \
--hash=sha256:8f12ac317f8a1318efa75757ef0a651abe12e51fc1af8838fb91079445227277
esprima==4.0.1 \
--hash=sha256:08db1a876d3c2910db9cfaeb83108193af5411fc3a3a66ebefacd390d21323ee
fluent.migrate==0.11 \
--hash=sha256:3b93fdba9cbc8702d160367ba3a0d5c120707fdde752af35aecf516ce80ed252
fluent.syntax==0.18.1 \
--hash=sha256:0e63679fa4f1b3042565220a5127b4bab842424f07d6a13c12299e3b3835486a \
--hash=sha256:3a55f5e605d1b029a65cc8b6492c86ec4608e15447e73db1495de11fd46c104f
giturlparse==0.10.0; python_version >= "3.6" \
--hash=sha256:04ba1a3a099c3093fa8d24a422913c6a9b2c2cd22bcffc939cf72e3e98f672d7 \
--hash=sha256:2595ab291d30717cda8474b874c9fd509f1b9802ad7f6968c36a45e4b13eb337
glean-parser==6.1.2 \
--hash=sha256:e801af6463b7e0ba79d97ddfc0a58d9d71121c93cea601417571e33fa8142270 \
--hash=sha256:12a0fecedc1144d77fa571e0422ff3fea4dbadc381d631bea800a6b2f58f4f7f
idna-ssl==1.1.0; python_version < "3.7" and python_version >= "3.6" \
--hash=sha256:a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c
idna==2.10; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" or python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.7" or python_version < "3.7" and python_version >= "3.6" and python_full_version >= "3.4.0" or python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" \
--hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0 \
--hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6
importlib-metadata==1.7.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") \
--hash=sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070 \
--hash=sha256:90bb658cdbbf6d1735b6341ce708fc7024a3e14e99ffdc5783edea9f9b077f83
iso8601==0.1.14; python_version <= "3.6" \
--hash=sha256:e7e1122f064d626e17d47cd5106bed2c620cb38fe464999e0ddae2b6d2de6004 \
--hash=sha256:8aafd56fa0290496c5edbb13c311f78fa3a241f0853540da09d9363eae3ebd79
jinja2==2.11.3; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" \
--hash=sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419 \
--hash=sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6
jsmin==2.1.0 \
--hash=sha256:5d07bf0251a4128e5e8e8eef603849b6b5741c337bff087731a248f9cc774f56
json-e==2.7.0 \
--hash=sha256:d8c1ec3f5bbc7728c3a504ebe58829f283c64eca230871e4eefe974b4cdaae4a
jsonschema==3.2.0 \
--hash=sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163 \
--hash=sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a
looseversion==1.0.1; python_version >= "3" \
--hash=sha256:a205beabd0ffd40488edb9ccb3a39134510fc7c0c2847a25079f559e59c004ac \
--hash=sha256:b339dfde67680e9c5c2e96673e52bee9f94d2f0e1b8f4cbfd86d32311e86b952
markupsafe==1.1.1; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" or python_full_version >= "3.5.0" \
--hash=sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161 \
--hash=sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7 \
--hash=sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183 \
--hash=sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b \
--hash=sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e \
--hash=sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f \
--hash=sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1 \
--hash=sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5 \
--hash=sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1 \
--hash=sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735 \
--hash=sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21 \
--hash=sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235 \
--hash=sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b \
--hash=sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f \
--hash=sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905 \
--hash=sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1 \
--hash=sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d \
--hash=sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff \
--hash=sha256:d53bc011414228441014aa71dbec320c66468c1030aae3a6e29778a3382d96e5 \
--hash=sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473 \
--hash=sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e \
--hash=sha256:3b8a6499709d29c2e2399569d96719a1b21dcd94410a586a18526b143ec8470f \
--hash=sha256:84dee80c15f1b560d55bcfe6d47b27d070b4681c699c572af2e3c7cc90a3b8e0 \
--hash=sha256:b1dba4527182c95a0db8b6060cc98ac49b9e2f5e64320e2b56e47cb2831978c7 \
--hash=sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66 \
--hash=sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5 \
--hash=sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d \
--hash=sha256:bf5aa3cbcfdf57fa2ee9cd1822c862ef23037f5c832ad09cfea57fa846dec193 \
--hash=sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e \
--hash=sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6 \
--hash=sha256:6fffc775d90dcc9aed1b89219549b329a9250d918fd0b8fa8d93d154918422e1 \
--hash=sha256:a6a744282b7718a2a62d2ed9d993cad6f5f585605ad352c11de459f4108df0a1 \
--hash=sha256:195d7d2c4fbb0ee8139a6cf67194f3973a6b3042d742ebe0a9ed36d8b6f0c07f \
--hash=sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2 \
--hash=sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c \
--hash=sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15 \
--hash=sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2 \
--hash=sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42 \
--hash=sha256:acf08ac40292838b3cbbb06cfe9b2cb9ec78fce8baca31ddb87aaac2e2dc3bc2 \
--hash=sha256:d9be0ba6c527163cbed5e0857c451fcd092ce83947944d6c14bc95441203f032 \
--hash=sha256:caabedc8323f1e93231b52fc32bdcde6db817623d33e100708d9a68e1f53b26b \
--hash=sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b \
--hash=sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be \
--hash=sha256:d73a845f227b0bfe8a7455ee623525ee656a9e2e749e4742706d80a6065d5e2c \
--hash=sha256:98bae9582248d6cf62321dcb52aaf5d9adf0bad3b40582925ef7c7f0ed85fceb \
--hash=sha256:2beec1e0de6924ea551859edb9e7679da6e4870d32cb766240ce17e0a0ba2014 \
--hash=sha256:7fed13866cf14bba33e7176717346713881f56d9d2bcebab207f7a036f41b850 \
--hash=sha256:6f1e273a344928347c1290119b493a1f0303c52f5a5eae5f16d74f48c15d4a85 \
--hash=sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621 \
--hash=sha256:22c178a091fc6630d0d045bdb5992d2dfe14e3259760e713c490da5323866c39 \
--hash=sha256:b7d644ddb4dbd407d31ffb699f1d140bc35478da613b441c582aeb7c43838dd8 \
--hash=sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b
mohawk==0.3.4 \
--hash=sha256:b3f85ffa93a5c7d2f9cc591246ef9f8ac4a9fa716bfd5bae0377699a2d89d78c \
--hash=sha256:e98b331d9fa9ece7b8be26094cbe2d57613ae882133cc755167268a984bc0ab3
mozilla-repo-urls==0.1.0 \
--hash=sha256:aa43ebcc4744b4cf20bf27f8dff885e82efee21a0219ba20f6bd99931cabd7b9 \
--hash=sha256:5978abd796ae2b51a66e571754f0c559050cb4a024f2bf401471fa7ac4afd54e
mozilla-version==0.3.4 \
--hash=sha256:3ed4deb7a6fb25c83a5346ef4de08ddff9b2ddc4d16dd8fafb4a84978cc71255 \
--hash=sha256:ce5741c2e7d12c30b53de9f79e30d6ac2a8bd4c93be711d30c7a7a08e32a094f
multidict==5.1.0; python_version >= "3.6" \
--hash=sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f \
--hash=sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf \
--hash=sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281 \
--hash=sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d \
--hash=sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d \
--hash=sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da \
--hash=sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224 \
--hash=sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26 \
--hash=sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6 \
--hash=sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76 \
--hash=sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a \
--hash=sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f \
--hash=sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348 \
--hash=sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93 \
--hash=sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9 \
--hash=sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37 \
--hash=sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5 \
--hash=sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632 \
--hash=sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952 \
--hash=sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79 \
--hash=sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456 \
--hash=sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7 \
--hash=sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635 \
--hash=sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a \
--hash=sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea \
--hash=sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656 \
--hash=sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3 \
--hash=sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93 \
--hash=sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647 \
--hash=sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d \
--hash=sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8 \
--hash=sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1 \
--hash=sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841 \
--hash=sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda \
--hash=sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80 \
--hash=sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359 \
--hash=sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5
packaging==20.9; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0") \
--hash=sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a \
--hash=sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5
pathspec==0.9.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") \
--hash=sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a \
--hash=sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1
pip-tools==5.5.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") \
--hash=sha256:cb0108391366b3ef336185097b3c2c0f3fa115b15098dafbda5e78aef70ea114 \
--hash=sha256:10841c1e56c234d610d0466447685b9ea4ee4a2c274f858c0ef3c33d9bd0d985
pip==21.2.4; python_version >= "3.6" \
--hash=sha256:fa9ebb85d3fd607617c0c44aca302b1b45d87f9c2a1649b46c26167ca4296323 \
--hash=sha256:0eb8a1516c3d138ae8689c0c1a60fde7143310832f9dc77e11d8a4bc62de193b
ply==3.10 \
--hash=sha256:96e94af7dd7031d8d6dd6e2a8e0de593b511c211a86e28a9c9621c275ac8bacb
pyasn1-modules==0.2.8 \
--hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \
--hash=sha256:0fe1b68d1e486a1ed5473f1302bd991c1611d319bba158e98b106ff86e1d7199 \
--hash=sha256:fe0644d9ab041506b62782e92b06b8c68cca799e1a9636ec398675459e031405 \
--hash=sha256:a99324196732f53093a84c4369c996713eb8c89d360a496b599fb1a9c47fc3eb \
--hash=sha256:0845a5582f6a02bb3e1bde9ecfc4bfcae6ec3210dd270522fee602365430c3f8 \
--hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 \
--hash=sha256:f39edd8c4ecaa4556e989147ebf219227e2cd2e8a43c7e7fcb1f1c18c5fd6a3d \
--hash=sha256:b80486a6c77252ea3a3e9b1e360bc9cf28eaac41263d173c032581ad2f20fe45 \
--hash=sha256:65cebbaffc913f4fe9e4808735c95ea22d7a7775646ab690518c056784bc21b4 \
--hash=sha256:15b7c67fabc7fc240d87fb9aabf999cf82311a6d6fb2c70d00d3d0604878c811 \
--hash=sha256:426edb7a5e8879f1ec54a1864f16b882c2837bfd06eee62f2c982315ee2473ed \
--hash=sha256:cbac4bc38d117f2a49aeedec4407d23e8866ea4ac27ff2cf7fb3e5b570df19e0 \
--hash=sha256:c29a5e5cc7a3f05926aff34e097e84f8589cd790ce0ed41b67aed6857b26aafd
pyasn1==0.4.8 \
--hash=sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3 \
--hash=sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf \
--hash=sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00 \
--hash=sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8 \
--hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \
--hash=sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86 \
--hash=sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7 \
--hash=sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576 \
--hash=sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12 \
--hash=sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2 \
--hash=sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359 \
--hash=sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776 \
--hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba
pylru==1.0.9 \
--hash=sha256:71376192671f0ad1690b2a7427d39a29b1df994c8469a9b46b03ed7e28c0172c
pyparsing==2.4.7; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" \
--hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b \
--hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1
pyrsistent==0.16.0 \
--hash=sha256:28669905fe725965daa16184933676547c5bb40a5153055a8dee2a4bd7933ad3
python-hglib==2.4 \
--hash=sha256:693d6ed92a6566e78802c7a03c256cda33d08c63ad3f00fcfa11379b184b9462
pytoml==0.1.10 \
--hash=sha256:98399eabd927cd3e12457525315b6abbc5abf9a6f392ab578cbcec327f73890c
pyyaml==5.4.1; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.6.0") \
--hash=sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922 \
--hash=sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393 \
--hash=sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8 \
--hash=sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185 \
--hash=sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253 \
--hash=sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc \
--hash=sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347 \
--hash=sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541 \
--hash=sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5 \
--hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \
--hash=sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018 \
--hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \
--hash=sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa \
--hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0 \
--hash=sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b \
--hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \
--hash=sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46 \
--hash=sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb \
--hash=sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247 \
--hash=sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc \
--hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \
--hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \
--hash=sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77 \
--hash=sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183 \
--hash=sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122 \
--hash=sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6 \
--hash=sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10 \
--hash=sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db \
--hash=sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e
redo==2.0.3 \
--hash=sha256:36784bf8ae766e14f9db0e377ccfa02835d648321d2007b6ae0bf4fd612c0f94 \
--hash=sha256:71161cb0e928d824092a5f16203939bbc0867ce4c4685db263cf22c3ae7634a8
requests-unixsocket==0.2.0 \
--hash=sha256:9e5c1a20afc3cf786197ae59c79bcdb0e7565f218f27df5f891307ee8817c1ea \
--hash=sha256:014d07bfb66dc805a011a8b4b306cf4ec96d2eddb589f6b2b5765e626f0dc0cc
requests==2.25.1; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") \
--hash=sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e \
--hash=sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804
responses==0.10.6; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0") \
--hash=sha256:97193c0183d63fba8cd3a041c75464e4b09ea0aff6328800d1546598567dde0b \
--hash=sha256:502d9c0c8008439cfcdef7e251f507fcfdd503b56e8c0c87c3c3e3393953f790
rsa==3.1.4 \
--hash=sha256:e2b0b05936c276b1edd2e1525553233b666df9e29b5c3ba223eed738277c82a0
sentry-sdk==0.14.3 \
--hash=sha256:bb90a4e19c7233a580715fc986cc44be2c48fc10b31e71580a2037e1c94b6950 \
--hash=sha256:23808d571d2461a4ce3784ec12bbee5bdb8c026c143fe79d36cef8a6d653e71f
setuptools==51.2.0; python_version >= "3.6" \
--hash=sha256:56948bf25c682e166cf2bfe7c1ad63e5745849b50d1ae7b0f8bff5decdcf34f2 \
--hash=sha256:7ef59b1790b3491f8d321f531eccc11517a07a4d7637e498465cd834d80d4c2c
six==1.13.0; (python_version >= "2.6" and python_full_version < "3.0.0") or (python_full_version >= "3.2.0") \
--hash=sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd \
--hash=sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66
slugid==2.0.0 \
--hash=sha256:aec8b0e01c4ad32e38e12d609eab3ec912fd129aaf6b2ded0199b56a5f8fd67c \
--hash=sha256:a950d98b72691178bdd4d6c52743c4a2aa039207cf7a97d71060a111ff9ba297
taskcluster-taskgraph==3.2.1 \
--hash=sha256:87498ce08c5d2bfe0fd0b1a860e3dc2b9eba4d7acb883e9e5c2b6f7f15281a34 \
--hash=sha256:c638724f0d514a3fc2d6ba34ddd395cfe021312dcf78b01c789ec4c7bf068cf0
taskcluster-urls==13.0.1 \
--hash=sha256:b25e122ecec249c4299ac7b20b08db76e3e2025bdaeb699a9d444556de5fd367 \
--hash=sha256:5e25e7e6818e8877178b175ff43d2e6548afad72694aa125f404a7329ece0973 \
--hash=sha256:f66dcbd6572a6216ab65949f0fa0b91f2df647918028436c384e6af5cd12ae2b
taskcluster==44.2.2 \
--hash=sha256:c1b0e82be25b1ed17e07c90b24a382634b2bfce273fdf2682d94568abe10716c \
--hash=sha256:846d73c597f0f47dd8525c85c8d9bc41111d5200b090690d3f16b2f57c56a2e1 \
--hash=sha256:0266a6a901e1a2ec838984a7f24e7adb6d58f9f2e221a7f613388f8f23f786fc
tqdm==4.62.3; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0") \
--hash=sha256:8dd278a422499cd6b727e6ae4061c40b48fce8b76d1ccbf5d34fca9b7f925b0c \
--hash=sha256:d359de7217506c9851b7869f3708d8ee53ed70a1b8edbba4dbcb47442592920d
typing-extensions==3.10.0.0; python_version < "3.8" and python_version >= "3.6" or python_version >= "3.6" \
--hash=sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497 \
--hash=sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84 \
--hash=sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342
urllib3==1.26.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0" and python_version < "4") \
--hash=sha256:bad31cb622ceee0ab46c4c884cf61957def0ff2e644de0a7a093678844c9ccac \
--hash=sha256:4849f132941d68144df0a3785ccc4fe423430ba5db0108d045c8cadbc90f517a
voluptuous==0.12.1 \
--hash=sha256:8ace33fcf9e6b1f59406bfaf6b8ec7bcc44266a9f29080b4deb4fe6ff2492386 \
--hash=sha256:663572419281ddfaf4b4197fd4942d181630120fb39b333e3adad70aeb56444b
wheel==0.37.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") \
--hash=sha256:21014b2bd93c6d0034b6ba5d35e4eb284340e09d63c59aef6fc14b0f346146fd \
--hash=sha256:e2ef7239991699e3355d54f8e968a21bb940a1dbf34a4d226741e64462516fad
yamllint==1.23.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0") \
--hash=sha256:0fa69bf8a86182b7fe14918bdd3a30354c869966bbc7cbfff176af71bda9c806 \
--hash=sha256:59f3ff77f44e7f46be6aecdb985830f73a1c51e290b7082a7d38c2ae1940f4a9
yarl==1.6.3; python_version >= "3.6" \
--hash=sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434 \
--hash=sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478 \
--hash=sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6 \
--hash=sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e \
--hash=sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406 \
--hash=sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76 \
--hash=sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366 \
--hash=sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721 \
--hash=sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643 \
--hash=sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e \
--hash=sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3 \
--hash=sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8 \
--hash=sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a \
--hash=sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c \
--hash=sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f \
--hash=sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970 \
--hash=sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e \
--hash=sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50 \
--hash=sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2 \
--hash=sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec \
--hash=sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71 \
--hash=sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc \
--hash=sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959 \
--hash=sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2 \
--hash=sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2 \
--hash=sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896 \
--hash=sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a \
--hash=sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e \
--hash=sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724 \
--hash=sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c \
--hash=sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25 \
--hash=sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96 \
--hash=sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0 \
--hash=sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4 \
--hash=sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424 \
--hash=sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6 \
--hash=sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10
zipp==3.4.1; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" or python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.8" or python_full_version >= "3.5.0" and python_version < "3.8" and python_version >= "3.6" \
--hash=sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098 \
--hash=sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76
aiohttp==3.7.4.post0; python_version >= "3.6" \
--hash=sha256:3cf75f7cdc2397ed4442594b935a11ed5569961333d49b7539ea741be2cc79d5 \
--hash=sha256:4b302b45040890cea949ad092479e01ba25911a15e648429c7c5aae9650c67a8 \
--hash=sha256:fe60131d21b31fd1a14bd43e6bb88256f69dfc3188b3a89d736d6c71ed43ec95 \
--hash=sha256:393f389841e8f2dfc86f774ad22f00923fdee66d238af89b70ea314c4aefd290 \
--hash=sha256:c6e9dcb4cb338d91a73f178d866d051efe7c62a7166653a91e7d9fb18274058f \
--hash=sha256:5df68496d19f849921f05f14f31bd6ef53ad4b00245da3195048c69934521809 \
--hash=sha256:0563c1b3826945eecd62186f3f5c7d31abb7391fedc893b7e2b26303b5a9f3fe \
--hash=sha256:3d78619672183be860b96ed96f533046ec97ca067fd46ac1f6a09cd9b7484287 \
--hash=sha256:f705e12750171c0ab4ef2a3c76b9a4024a62c4103e3a55dd6f99265b9bc6fcfc \
--hash=sha256:230a8f7e24298dea47659251abc0fd8b3c4e38a664c59d4b89cca7f6c09c9e87 \
--hash=sha256:2e19413bf84934d651344783c9f5e22dee452e251cfd220ebadbed2d9931dbf0 \
--hash=sha256:e4b2b334e68b18ac9817d828ba44d8fcb391f6acb398bcc5062b14b2cbeac970 \
--hash=sha256:d012ad7911653a906425d8473a1465caa9f8dea7fcf07b6d870397b774ea7c0f \
--hash=sha256:40eced07f07a9e60e825554a31f923e8d3997cfc7fb31dbc1328c70826e04cde \
--hash=sha256:209b4a8ee987eccc91e2bd3ac36adee0e53a5970b8ac52c273f7f8fd4872c94c \
--hash=sha256:14762875b22d0055f05d12abc7f7d61d5fd4fe4642ce1a249abdf8c700bf1fd8 \
--hash=sha256:7615dab56bb07bff74bc865307aeb89a8bfd9941d2ef9d817b9436da3a0ea54f \
--hash=sha256:d9e13b33afd39ddeb377eff2c1c4f00544e191e1d1dee5b6c51ddee8ea6f0cf5 \
--hash=sha256:547da6cacac20666422d4882cfcd51298d45f7ccb60a04ec27424d2f36ba3eaf \
--hash=sha256:af9aa9ef5ba1fd5b8c948bb11f44891968ab30356d65fd0cc6707d989cd521df \
--hash=sha256:64322071e046020e8797117b3658b9c2f80e3267daec409b350b6a7a05041213 \
--hash=sha256:bb437315738aa441251214dad17428cafda9cdc9729499f1d6001748e1d432f4 \
--hash=sha256:e54962802d4b8b18b6207d4a927032826af39395a3bd9196a5af43fc4e60b009 \
--hash=sha256:a00bb73540af068ca7390e636c01cbc4f644961896fa9363154ff43fd37af2f5 \
--hash=sha256:79ebfc238612123a713a457d92afb4096e2148be17df6c50fb9bf7a81c2f8013 \
--hash=sha256:515dfef7f869a0feb2afee66b957cc7bbe9ad0cdee45aec7fdc623f4ecd4fb16 \
--hash=sha256:114b281e4d68302a324dd33abb04778e8557d88947875cbf4e842c2c01a030c5 \
--hash=sha256:7b18b97cf8ee5452fa5f4e3af95d01d84d86d32c5e2bfa260cf041749d66360b \
--hash=sha256:15492a6368d985b76a2a5fdd2166cddfea5d24e69eefed4630cbaae5c81d89bd \
--hash=sha256:bdb230b4943891321e06fc7def63c7aace16095be7d9cf3b1e01be2f10fba439 \
--hash=sha256:cffe3ab27871bc3ea47df5d8f7013945712c46a3cc5a95b6bee15887f1675c22 \
--hash=sha256:f881853d2643a29e643609da57b96d5f9c9b93f62429dcc1cbb413c7d07f0e1a \
--hash=sha256:a5ca29ee66f8343ed336816c553e82d6cade48a3ad702b9ffa6125d187e2dedb \
--hash=sha256:17c073de315745a1510393a96e680d20af8e67e324f70b42accbd4cb3315c9fb \
--hash=sha256:932bb1ea39a54e9ea27fc9232163059a0b8855256f4052e776357ad9add6f1c9 \
--hash=sha256:02f46fc0e3c5ac58b80d4d56eb0a7c7d97fcef69ace9326289fb9f1955e65cfe \
--hash=sha256:493d3299ebe5f5a7c66b9819eacdcfbbaaf1a8e84911ddffcdc48888497afecf
appdirs==1.4.4 \
--hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128 \
--hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41
async-timeout==3.0.1; python_full_version >= "3.5.3" and python_version >= "3.6" \
--hash=sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f \
--hash=sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3
attrs==19.2.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0") \
--hash=sha256:ec20e7a4825331c1b5ebf261d111e16fa9612c1f7a5e1f884f12bd53a664dfd2 \
--hash=sha256:f913492e1663d3c36f502e5e9ba6cd13cf19d7fab50aa13239e420fef95e1396
blessings==1.7; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0") \
--hash=sha256:caad5211e7ba5afe04367cdd4cfc68fa886e2e08f6f35e76b7387d2109ccea6e \
--hash=sha256:b1fdd7e7a675295630f9ae71527a8ebc10bfefa236b3d6aa4932ee4462c17ba3 \
--hash=sha256:98e5854d805f50a5b58ac2333411b0482516a8210f23f43308baeb58d77c157d
cbor2==4.0.1 \
--hash=sha256:b0eb916c9ea226aa81e9091607737475d5b0e5c314fe8d5a87179fba449cd190 \
--hash=sha256:cee0d01e520563b5a73c72eace5c428bb68aefb1b3f7aee5d692d3af6a1e5172
certifi==2018.4.16 \
--hash=sha256:9fa520c1bacfb634fa7af20a76bcbd3d5fb390481724c597da32c719a7dca4b0 \
--hash=sha256:13e698f54293db9f89122b0581843a782ad0934a4fe0172d2a980ba77fc61bb7
chardet==4.0.0; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" or python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" \
--hash=sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5 \
--hash=sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa
click==7.1.2; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" \
--hash=sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc \
--hash=sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a
colorama==0.4.5; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") \
--hash=sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da \
--hash=sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4
compare-locales==8.2.1; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0" and python_version < "4") \
--hash=sha256:470d50d96c68f8e147daa3d70f29a7b750adefea450c5fa07e0f666c8083d854 \
--hash=sha256:e6a1610151d357e74ee6c1f5e944f1868e449f83e478c84d92f7b86132f721d7
cookies==2.2.1 \
--hash=sha256:15bee753002dff684987b8df8c235288eb8d45f8191ae056254812dfd42c81d3 \
--hash=sha256:d6b698788cae4cfa4e62ef8643a9ca332b79bd96cb314294b864ae8d7eb3ee8e
cram==0.7 \
--hash=sha256:008e4e8b4d325cf040964b5f62460535b004a7bc816d54f8527a4d299edfe4a3 \
--hash=sha256:7da7445af2ce15b90aad5ec4792f857cef5786d71f14377e9eb994d8b8337f2f
diskcache==4.1.0 \
--hash=sha256:69b253a6ffe95bb4bafb483b97c24fca3c2c6c47b82e92b36486969a7e80d47d \
--hash=sha256:bcee5a59f9c264e2809e58d01be6569a3bbb1e36a1e0fb83f7ef9b2075f95ce0
distro==1.4.0 \
--hash=sha256:eedf82a470ebe7d010f1872c17237c79ab04097948800029994fa458e52fb4b4 \
--hash=sha256:362dde65d846d23baee4b5c058c8586f219b5a54be1cf5fc6ff55c4578392f57
ecdsa==0.15; (python_version >= "2.6" and python_full_version < "3.0.0") or (python_full_version >= "3.3.0") \
--hash=sha256:867ec9cf6df0b03addc8ef66b56359643cb5d0c1dc329df76ba7ecfe256c8061 \
--hash=sha256:8f12ac317f8a1318efa75757ef0a651abe12e51fc1af8838fb91079445227277
esprima==4.0.1 \
--hash=sha256:08db1a876d3c2910db9cfaeb83108193af5411fc3a3a66ebefacd390d21323ee
fluent.migrate==0.11 \
--hash=sha256:3b93fdba9cbc8702d160367ba3a0d5c120707fdde752af35aecf516ce80ed252
fluent.syntax==0.18.1 \
--hash=sha256:0e63679fa4f1b3042565220a5127b4bab842424f07d6a13c12299e3b3835486a \
--hash=sha256:3a55f5e605d1b029a65cc8b6492c86ec4608e15447e73db1495de11fd46c104f
giturlparse==0.10.0; python_version >= "3.6" \
--hash=sha256:04ba1a3a099c3093fa8d24a422913c6a9b2c2cd22bcffc939cf72e3e98f672d7 \
--hash=sha256:2595ab291d30717cda8474b874c9fd509f1b9802ad7f6968c36a45e4b13eb337
glean-parser==6.1.2 \
--hash=sha256:e801af6463b7e0ba79d97ddfc0a58d9d71121c93cea601417571e33fa8142270 \
--hash=sha256:12a0fecedc1144d77fa571e0422ff3fea4dbadc381d631bea800a6b2f58f4f7f
idna-ssl==1.1.0; python_version < "3.7" and python_version >= "3.6" \
--hash=sha256:a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c
idna==2.10; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" or python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.7" or python_version < "3.7" and python_version >= "3.6" and python_full_version >= "3.4.0" or python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" \
--hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0 \
--hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6
importlib-metadata==1.7.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") \
--hash=sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070 \
--hash=sha256:90bb658cdbbf6d1735b6341ce708fc7024a3e14e99ffdc5783edea9f9b077f83
iso8601==0.1.14; python_version <= "3.6" \
--hash=sha256:e7e1122f064d626e17d47cd5106bed2c620cb38fe464999e0ddae2b6d2de6004 \
--hash=sha256:8aafd56fa0290496c5edbb13c311f78fa3a241f0853540da09d9363eae3ebd79
jinja2==2.11.3; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" \
--hash=sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419 \
--hash=sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6
jsmin==2.1.0 \
--hash=sha256:5d07bf0251a4128e5e8e8eef603849b6b5741c337bff087731a248f9cc774f56
json-e==2.7.0 \
--hash=sha256:d8c1ec3f5bbc7728c3a504ebe58829f283c64eca230871e4eefe974b4cdaae4a
jsonschema==3.2.0 \
--hash=sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163 \
--hash=sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a
looseversion==1.0.1; python_version >= "3" \
--hash=sha256:a205beabd0ffd40488edb9ccb3a39134510fc7c0c2847a25079f559e59c004ac \
--hash=sha256:b339dfde67680e9c5c2e96673e52bee9f94d2f0e1b8f4cbfd86d32311e86b952
markupsafe==1.1.1; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" or python_full_version >= "3.5.0" \
--hash=sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161 \
--hash=sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7 \
--hash=sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183 \
--hash=sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b \
--hash=sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e \
--hash=sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f \
--hash=sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1 \
--hash=sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5 \
--hash=sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1 \
--hash=sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735 \
--hash=sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21 \
--hash=sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235 \
--hash=sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b \
--hash=sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f \
--hash=sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905 \
--hash=sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1 \
--hash=sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d \
--hash=sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff \
--hash=sha256:d53bc011414228441014aa71dbec320c66468c1030aae3a6e29778a3382d96e5 \
--hash=sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473 \
--hash=sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e \
--hash=sha256:3b8a6499709d29c2e2399569d96719a1b21dcd94410a586a18526b143ec8470f \
--hash=sha256:84dee80c15f1b560d55bcfe6d47b27d070b4681c699c572af2e3c7cc90a3b8e0 \
--hash=sha256:b1dba4527182c95a0db8b6060cc98ac49b9e2f5e64320e2b56e47cb2831978c7 \
--hash=sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66 \
--hash=sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5 \
--hash=sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d \
--hash=sha256:bf5aa3cbcfdf57fa2ee9cd1822c862ef23037f5c832ad09cfea57fa846dec193 \
--hash=sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e \
--hash=sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6 \
--hash=sha256:6fffc775d90dcc9aed1b89219549b329a9250d918fd0b8fa8d93d154918422e1 \
--hash=sha256:a6a744282b7718a2a62d2ed9d993cad6f5f585605ad352c11de459f4108df0a1 \
--hash=sha256:195d7d2c4fbb0ee8139a6cf67194f3973a6b3042d742ebe0a9ed36d8b6f0c07f \
--hash=sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2 \
--hash=sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c \
--hash=sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15 \
--hash=sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2 \
--hash=sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42 \
--hash=sha256:acf08ac40292838b3cbbb06cfe9b2cb9ec78fce8baca31ddb87aaac2e2dc3bc2 \
--hash=sha256:d9be0ba6c527163cbed5e0857c451fcd092ce83947944d6c14bc95441203f032 \
--hash=sha256:caabedc8323f1e93231b52fc32bdcde6db817623d33e100708d9a68e1f53b26b \
--hash=sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b \
--hash=sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be \
--hash=sha256:d73a845f227b0bfe8a7455ee623525ee656a9e2e749e4742706d80a6065d5e2c \
--hash=sha256:98bae9582248d6cf62321dcb52aaf5d9adf0bad3b40582925ef7c7f0ed85fceb \
--hash=sha256:2beec1e0de6924ea551859edb9e7679da6e4870d32cb766240ce17e0a0ba2014 \
--hash=sha256:7fed13866cf14bba33e7176717346713881f56d9d2bcebab207f7a036f41b850 \
--hash=sha256:6f1e273a344928347c1290119b493a1f0303c52f5a5eae5f16d74f48c15d4a85 \
--hash=sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621 \
--hash=sha256:22c178a091fc6630d0d045bdb5992d2dfe14e3259760e713c490da5323866c39 \
--hash=sha256:b7d644ddb4dbd407d31ffb699f1d140bc35478da613b441c582aeb7c43838dd8 \
--hash=sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b
mohawk==0.3.4 \
--hash=sha256:b3f85ffa93a5c7d2f9cc591246ef9f8ac4a9fa716bfd5bae0377699a2d89d78c \
--hash=sha256:e98b331d9fa9ece7b8be26094cbe2d57613ae882133cc755167268a984bc0ab3
mozilla-repo-urls==0.1.0 \
--hash=sha256:aa43ebcc4744b4cf20bf27f8dff885e82efee21a0219ba20f6bd99931cabd7b9 \
--hash=sha256:5978abd796ae2b51a66e571754f0c559050cb4a024f2bf401471fa7ac4afd54e
mozilla-version==0.3.4 \
--hash=sha256:3ed4deb7a6fb25c83a5346ef4de08ddff9b2ddc4d16dd8fafb4a84978cc71255 \
--hash=sha256:ce5741c2e7d12c30b53de9f79e30d6ac2a8bd4c93be711d30c7a7a08e32a094f
multidict==5.1.0; python_version >= "3.6" \
--hash=sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f \
--hash=sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf \
--hash=sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281 \
--hash=sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d \
--hash=sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d \
--hash=sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da \
--hash=sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224 \
--hash=sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26 \
--hash=sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6 \
--hash=sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76 \
--hash=sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a \
--hash=sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f \
--hash=sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348 \
--hash=sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93 \
--hash=sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9 \
--hash=sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37 \
--hash=sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5 \
--hash=sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632 \
--hash=sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952 \
--hash=sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79 \
--hash=sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456 \
--hash=sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7 \
--hash=sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635 \
--hash=sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a \
--hash=sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea \
--hash=sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656 \
--hash=sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3 \
--hash=sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93 \
--hash=sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647 \
--hash=sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d \
--hash=sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8 \
--hash=sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1 \
--hash=sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841 \
--hash=sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda \
--hash=sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80 \
--hash=sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359 \
--hash=sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5
packaging==20.9; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0") \
--hash=sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a \
--hash=sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5
pathspec==0.9.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") \
--hash=sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a \
--hash=sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1
pip-tools==5.5.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") \
--hash=sha256:cb0108391366b3ef336185097b3c2c0f3fa115b15098dafbda5e78aef70ea114 \
--hash=sha256:10841c1e56c234d610d0466447685b9ea4ee4a2c274f858c0ef3c33d9bd0d985
pip==21.2.4; python_version >= "3.6" \
--hash=sha256:fa9ebb85d3fd607617c0c44aca302b1b45d87f9c2a1649b46c26167ca4296323 \
--hash=sha256:0eb8a1516c3d138ae8689c0c1a60fde7143310832f9dc77e11d8a4bc62de193b
ply==3.10 \
--hash=sha256:96e94af7dd7031d8d6dd6e2a8e0de593b511c211a86e28a9c9621c275ac8bacb
pyasn1-modules==0.2.8 \
--hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \
--hash=sha256:0fe1b68d1e486a1ed5473f1302bd991c1611d319bba158e98b106ff86e1d7199 \
--hash=sha256:fe0644d9ab041506b62782e92b06b8c68cca799e1a9636ec398675459e031405 \
--hash=sha256:a99324196732f53093a84c4369c996713eb8c89d360a496b599fb1a9c47fc3eb \
--hash=sha256:0845a5582f6a02bb3e1bde9ecfc4bfcae6ec3210dd270522fee602365430c3f8 \
--hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 \
--hash=sha256:f39edd8c4ecaa4556e989147ebf219227e2cd2e8a43c7e7fcb1f1c18c5fd6a3d \
--hash=sha256:b80486a6c77252ea3a3e9b1e360bc9cf28eaac41263d173c032581ad2f20fe45 \
--hash=sha256:65cebbaffc913f4fe9e4808735c95ea22d7a7775646ab690518c056784bc21b4 \
--hash=sha256:15b7c67fabc7fc240d87fb9aabf999cf82311a6d6fb2c70d00d3d0604878c811 \
--hash=sha256:426edb7a5e8879f1ec54a1864f16b882c2837bfd06eee62f2c982315ee2473ed \
--hash=sha256:cbac4bc38d117f2a49aeedec4407d23e8866ea4ac27ff2cf7fb3e5b570df19e0 \
--hash=sha256:c29a5e5cc7a3f05926aff34e097e84f8589cd790ce0ed41b67aed6857b26aafd
pyasn1==0.4.8 \
--hash=sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3 \
--hash=sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf \
--hash=sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00 \
--hash=sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8 \
--hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \
--hash=sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86 \
--hash=sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7 \
--hash=sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576 \
--hash=sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12 \
--hash=sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2 \
--hash=sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359 \
--hash=sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776 \
--hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba
pylru==1.0.9 \
--hash=sha256:71376192671f0ad1690b2a7427d39a29b1df994c8469a9b46b03ed7e28c0172c
pyparsing==2.4.7; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" \
--hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b \
--hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1
pyrsistent==0.16.0 \
--hash=sha256:28669905fe725965daa16184933676547c5bb40a5153055a8dee2a4bd7933ad3
python-hglib==2.4 \
--hash=sha256:693d6ed92a6566e78802c7a03c256cda33d08c63ad3f00fcfa11379b184b9462
pytoml==0.1.10 \
--hash=sha256:98399eabd927cd3e12457525315b6abbc5abf9a6f392ab578cbcec327f73890c
pyyaml==5.4.1; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.6.0") \
--hash=sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922 \
--hash=sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393 \
--hash=sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8 \
--hash=sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185 \
--hash=sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253 \
--hash=sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc \
--hash=sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347 \
--hash=sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541 \
--hash=sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5 \
--hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \
--hash=sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018 \
--hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \
--hash=sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa \
--hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0 \
--hash=sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b \
--hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \
--hash=sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46 \
--hash=sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb \
--hash=sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247 \
--hash=sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc \
--hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \
--hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \
--hash=sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77 \
--hash=sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183 \
--hash=sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122 \
--hash=sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6 \
--hash=sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10 \
--hash=sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db \
--hash=sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e
redo==2.0.3 \
--hash=sha256:36784bf8ae766e14f9db0e377ccfa02835d648321d2007b6ae0bf4fd612c0f94 \
--hash=sha256:71161cb0e928d824092a5f16203939bbc0867ce4c4685db263cf22c3ae7634a8
requests-unixsocket==0.2.0 \
--hash=sha256:9e5c1a20afc3cf786197ae59c79bcdb0e7565f218f27df5f891307ee8817c1ea \
--hash=sha256:014d07bfb66dc805a011a8b4b306cf4ec96d2eddb589f6b2b5765e626f0dc0cc
requests==2.25.1; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") \
--hash=sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e \
--hash=sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804
responses==0.10.6; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0") \
--hash=sha256:97193c0183d63fba8cd3a041c75464e4b09ea0aff6328800d1546598567dde0b \
--hash=sha256:502d9c0c8008439cfcdef7e251f507fcfdd503b56e8c0c87c3c3e3393953f790
rsa==3.1.4 \
--hash=sha256:e2b0b05936c276b1edd2e1525553233b666df9e29b5c3ba223eed738277c82a0
sentry-sdk==0.14.3 \
--hash=sha256:bb90a4e19c7233a580715fc986cc44be2c48fc10b31e71580a2037e1c94b6950 \
--hash=sha256:23808d571d2461a4ce3784ec12bbee5bdb8c026c143fe79d36cef8a6d653e71f
setuptools==51.2.0; python_version >= "3.6" \
--hash=sha256:56948bf25c682e166cf2bfe7c1ad63e5745849b50d1ae7b0f8bff5decdcf34f2 \
--hash=sha256:7ef59b1790b3491f8d321f531eccc11517a07a4d7637e498465cd834d80d4c2c
six==1.13.0; (python_version >= "2.6" and python_full_version < "3.0.0") or (python_full_version >= "3.2.0") \
--hash=sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd \
--hash=sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66
slugid==2.0.0 \
--hash=sha256:aec8b0e01c4ad32e38e12d609eab3ec912fd129aaf6b2ded0199b56a5f8fd67c \
--hash=sha256:a950d98b72691178bdd4d6c52743c4a2aa039207cf7a97d71060a111ff9ba297
taskcluster-taskgraph==3.5.1 \
--hash=sha256:e08b935175349ef8728ff5f19c7e9866a562256180f5580b291da3217cb5016c \
--hash=sha256:dc56b87228fb8eb1ef611750202344817a8cf5d825c0dc7e2dcc0f8b2795cbcd
taskcluster-urls==13.0.1 \
--hash=sha256:b25e122ecec249c4299ac7b20b08db76e3e2025bdaeb699a9d444556de5fd367 \
--hash=sha256:5e25e7e6818e8877178b175ff43d2e6548afad72694aa125f404a7329ece0973 \
--hash=sha256:f66dcbd6572a6216ab65949f0fa0b91f2df647918028436c384e6af5cd12ae2b
taskcluster==44.2.2 \
--hash=sha256:c1b0e82be25b1ed17e07c90b24a382634b2bfce273fdf2682d94568abe10716c \
--hash=sha256:846d73c597f0f47dd8525c85c8d9bc41111d5200b090690d3f16b2f57c56a2e1 \
--hash=sha256:0266a6a901e1a2ec838984a7f24e7adb6d58f9f2e221a7f613388f8f23f786fc
tqdm==4.62.3; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0") \
--hash=sha256:8dd278a422499cd6b727e6ae4061c40b48fce8b76d1ccbf5d34fca9b7f925b0c \
--hash=sha256:d359de7217506c9851b7869f3708d8ee53ed70a1b8edbba4dbcb47442592920d
typing-extensions==3.10.0.0; python_version < "3.8" and python_version >= "3.6" or python_version >= "3.6" \
--hash=sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497 \
--hash=sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84 \
--hash=sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342
urllib3==1.26.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0" and python_version < "4") \
--hash=sha256:bad31cb622ceee0ab46c4c884cf61957def0ff2e644de0a7a093678844c9ccac \
--hash=sha256:4849f132941d68144df0a3785ccc4fe423430ba5db0108d045c8cadbc90f517a
voluptuous==0.12.1 \
--hash=sha256:8ace33fcf9e6b1f59406bfaf6b8ec7bcc44266a9f29080b4deb4fe6ff2492386 \
--hash=sha256:663572419281ddfaf4b4197fd4942d181630120fb39b333e3adad70aeb56444b
wheel==0.37.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") \
--hash=sha256:21014b2bd93c6d0034b6ba5d35e4eb284340e09d63c59aef6fc14b0f346146fd \
--hash=sha256:e2ef7239991699e3355d54f8e968a21bb940a1dbf34a4d226741e64462516fad
yamllint==1.23.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0") \
--hash=sha256:0fa69bf8a86182b7fe14918bdd3a30354c869966bbc7cbfff176af71bda9c806 \
--hash=sha256:59f3ff77f44e7f46be6aecdb985830f73a1c51e290b7082a7d38c2ae1940f4a9
yarl==1.6.3; python_version >= "3.6" \
--hash=sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434 \
--hash=sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478 \
--hash=sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6 \
--hash=sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e \
--hash=sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406 \
--hash=sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76 \
--hash=sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366 \
--hash=sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721 \
--hash=sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643 \
--hash=sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e \
--hash=sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3 \
--hash=sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8 \
--hash=sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a \
--hash=sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c \
--hash=sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f \
--hash=sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970 \
--hash=sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e \
--hash=sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50 \
--hash=sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2 \
--hash=sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec \
--hash=sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71 \
--hash=sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc \
--hash=sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959 \
--hash=sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2 \
--hash=sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2 \
--hash=sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896 \
--hash=sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a \
--hash=sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e \
--hash=sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724 \
--hash=sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c \
--hash=sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25 \
--hash=sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96 \
--hash=sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0 \
--hash=sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4 \
--hash=sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424 \
--hash=sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6 \
--hash=sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10
zipp==3.4.1; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" or python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.8" or python_full_version >= "3.5.0" and python_version < "3.8" and python_version >= "3.6" \
--hash=sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098 \
--hash=sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76

Просмотреть файл

@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: taskcluster-taskgraph
Version: 3.2.1
Version: 3.5.1
Summary: Build taskcluster taskgraphs
Home-page: https://github.com/taskcluster/taskgraph
License: UNKNOWN
@ -26,6 +26,8 @@ Requires-Dist: requests-unixsocket (>=0.2)
Requires-Dist: slugid (>=2.0)
Requires-Dist: taskcluster-urls (>=11.0)
Requires-Dist: voluptuous (>=0.12.1)
Provides-Extra: load-image
Requires-Dist: zstandard ; extra == 'load-image'
UNKNOWN

Просмотреть файл

@ -1,15 +1,16 @@
taskgraph/__init__.py,sha256=jwOtU7TkmU317LP_IsgIswpj2T1OPUXXgMRv4sIU7nE,707
taskgraph/config.py,sha256=MoFLjKPUViWYGALi_acWDVXZs7M8cy0zQpUKsJSlBMs,4411
taskgraph/create.py,sha256=1z2AyLvHMkZfDkmPy6um86HG9xTRhE0Sphnbpd-kuEg,5190
taskgraph/decision.py,sha256=X94bfSp6LyYkO7hpi4A0ytWSfHl9YtkRLNaJR8loAWQ,12758
taskgraph/docker.py,sha256=hsMIvRVXiqC8DIGD34WwQrC1JnjaYHSvVWq_lEeNQEE,7471
taskgraph/decision.py,sha256=ApfQeXumRH7uq55DLt7gjQCh_eKls6lPhnNaH2ZpR-0,12849
taskgraph/docker.py,sha256=dB282jKjfLnHwL73YSg1Eeqj-ojHQc676vEpWt4PjVw,7835
taskgraph/files_changed.py,sha256=W3_gEgUT-mVH9DaaU_8X6gYpftrqBU3kgveGbzPLziU,2793
taskgraph/filter_tasks.py,sha256=R7tYXiaVPGIkQ6O1c9-QJrKZ59m9pFXCloUlPraVnZU,866
taskgraph/generator.py,sha256=ZfSb8dek6tQRxfpHbvQP2KMxXFzmhqwN821tOlNcvzo,15118
taskgraph/generator.py,sha256=tonQ3UvaZYRdpWOtmdQ5Mr4en1FRCUJvbvlbzfChluM,15590
taskgraph/graph.py,sha256=9tE3bSSBRHvRLgJzK4dTieGT3RrzQZdR1YbKizEhzlw,4667
taskgraph/main.py,sha256=E7dC1q14L4psrNfUe-PMC8QH4cYjsIs91I-aVmzeBaI,23551
taskgraph/main.py,sha256=rb7cwghT5U97kSpIho0KzXo4HSXp2Iw_jaL2A2Qrf18,23581
taskgraph/morph.py,sha256=8qxYdruEQkbHGqv7dh3e1OWhH9Y5i6bFUKzDMs-Ctnw,9625
taskgraph/parameters.py,sha256=8556WayG8J-3w_DZTjF--VKd7Czuaxng1Zl3Cvdz5eg,11644
taskgraph/optimize.py,sha256=NVshvkqRKr7SQvRdqz5CELmnIXeiODkDxlK0D9QMi9k,16487
taskgraph/parameters.py,sha256=CYaR9E6pFsysUcRahlFILplEy3unVwUu7scLhP03nQo,11824
taskgraph/target_tasks.py,sha256=41BIVwiATy8DCQujPduTtnFmgHlKOfw6RPGL4b20WO8,3324
taskgraph/task.py,sha256=QCrOzMaTsy5QHShKUo89XgjJVMl3cSZGZJPLuHCXItE,3132
taskgraph/taskgraph.py,sha256=tfj0ZMqjuwEQDET0W57EcP-_KBEbqkxJci9Z6DkeOEQ,2397
@ -25,22 +26,22 @@ taskgraph/loader/transform.py,sha256=olUBPjxk3eEIg25sduxlcyqhjoig4ts5kPlT_zs6g9g
taskgraph/optimize/__init__.py,sha256=Oqpq1RW8QzOcu7zaMlNQ3BHT9ws9e_93FWfCqzNcQps,123
taskgraph/optimize/base.py,sha256=WvoDNewyHG46IQbG3th-aau9OxSKegsYNfvdOEmunbA,18341
taskgraph/optimize/strategies.py,sha256=Y5fS-f_3xsQNfFjCXIwDxrwXBvyp4yZxdPVNh49c7XU,2381
taskgraph/run-task/fetch-content,sha256=uUoyua3OdIgynY5Q9K6EojBwuaM2zo2OiN9bmNS646Q,24291
taskgraph/run-task/fetch-content,sha256=z3kx-vxaaaAmfqW-JW7dPKIFpjnxdZiXMdpPj1jAG8M,29915
taskgraph/run-task/hgrc,sha256=BybWLDR89bWi3pE5T05UqmDHs02CbLypE-omLZWU6Uk,896
taskgraph/run-task/robustcheckout.py,sha256=xc24zaBd6dyuoga1ace0M27jo14K4UXNwhqcbHutJ7U,28977
taskgraph/run-task/run-task,sha256=76p0Zo19a6f4NkwTq8s9y4Emt3YW6Q-VdTInlcqjPjo,46956
taskgraph/run-task/robustcheckout.py,sha256=tZi_FRGFhX27fspaUj2RGsMCmkwn8IfpRiSsPOrGfXQ,29802
taskgraph/run-task/run-task,sha256=zT83gWFaB0qBWdxCLxOVHiMdq1bmSmi90FjXjcegfpk,43584
taskgraph/transforms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
taskgraph/transforms/base.py,sha256=N9ec4kw65V_J2KY4C4QRPlbIREbRDYwTlhClstYmOBU,5285
taskgraph/transforms/cached_tasks.py,sha256=Z10VD1kEBVXJvj8qSsNTq2mYpklh0V1EN8OT6QK3v_E,2607
taskgraph/transforms/code_review.py,sha256=eE2xrDtdD_n3HT3caQ2HGAkPm6Uutdm4hDCpCoFjEps,707
taskgraph/transforms/docker_image.py,sha256=ADiOUB-Ngm9Y6uwzGDpQsDJ_-4w6-ZYwLCxQ-0b16E0,7567
taskgraph/transforms/fetch.py,sha256=jxJw7wlEh_WxAa1Bmy2WIHfpdvL79PDsKwC1DFymbBQ,9584
taskgraph/transforms/fetch.py,sha256=Q7Co4wdBKL6Tr3Uc-eitJ3NGgGUYmRXNLuC5m-59-M8,10443
taskgraph/transforms/release_notifications.py,sha256=jrb9CCT-z_etDf690T-AeCvdzIoVWBAeM_FGoW7FIzA,3305
taskgraph/transforms/task.py,sha256=kWic-qqvK8vEFxQwojRPxc42GAsdkxoV3HVcG1pdBxE,47942
taskgraph/transforms/job/__init__.py,sha256=GKYODycxov7u05owF_ZWgczd7WHi2yHTd8L5Ftvxge0,16929
taskgraph/transforms/job/common.py,sha256=onHnerPcmmvbSk0oHt8mvJmOo7AnjHQya0ombgMNLG8,7106
taskgraph/transforms/task.py,sha256=fBiSCyC0Lzd2GDSZ_QwhQ1RRebXLmkw4ZCPte9fwEL8,48212
taskgraph/transforms/job/__init__.py,sha256=ayAytoDmlmNvJNArJc-_nBz1Xuc191rZdbobUgp9hQA,17192
taskgraph/transforms/job/common.py,sha256=XtKSxUCwRYqpPgRTyLD_8JGRuJs2JYuR0RXpTarPdTE,6826
taskgraph/transforms/job/index_search.py,sha256=Ngh9FFu1bx2kHVTChW2vcrbnb3SzMneRHopXk18RfB4,1220
taskgraph/transforms/job/run_task.py,sha256=oRR-is7dRKRrSCY3WntmJ-pKK3wx9-BMJpY9qru2FWY,8654
taskgraph/transforms/job/run_task.py,sha256=z5DqgHmmHYEbKtnpMQqcMY6ksgCnnoB7CugH3Z41Gag,8610
taskgraph/transforms/job/toolchain.py,sha256=WWsj6L_db9rJxzo26TdEf_0jcrK4MCoHHJDzFBkSFpI,5978
taskgraph/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
taskgraph/util/archive.py,sha256=nzYn8cQ3NfLAeV-2SuTNoeQ6hg8m40f6FQcSTyVIKwQ,2855
@ -66,9 +67,9 @@ taskgraph/util/vcs.py,sha256=i13idS8y9ooR216mnd1gksdjSgHBNlAZEdq7Xr-ROwE,18536
taskgraph/util/verify.py,sha256=YETuZVkwnfYe57GRPx2x_vedstgqdGiH46HLWAdcks8,8827
taskgraph/util/workertypes.py,sha256=5g2mgIbEKMzDpZNnmPMoMNyy7Wahi-jmWcV1amDAcPo,2341
taskgraph/util/yaml.py,sha256=hfKI_D8Q7dimq4_VvO3WEh8CJsTrsIMwN6set7HIQbY,990
taskcluster_taskgraph-3.2.1.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
taskcluster_taskgraph-3.2.1.dist-info/METADATA,sha256=ahNDmBrUgn48sWk5gx2bq4WMRmnUlDkC_E-wXC6Yglg,1050
taskcluster_taskgraph-3.2.1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
taskcluster_taskgraph-3.2.1.dist-info/entry_points.txt,sha256=VoXNtZpN4LvyXYB1wq47AU9CO-DMYMJ0VktKxjugzbY,51
taskcluster_taskgraph-3.2.1.dist-info/top_level.txt,sha256=3JNeYn_hNiNXC7DrdH_vcv-WYSE7QdgGjdvUYvSjVp0,10
taskcluster_taskgraph-3.2.1.dist-info/RECORD,,
taskcluster_taskgraph-3.5.1.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
taskcluster_taskgraph-3.5.1.dist-info/METADATA,sha256=uy5bE9DFHpqImbRhKEVM6CSC1me3wjdCW76836B0rEc,1126
taskcluster_taskgraph-3.5.1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
taskcluster_taskgraph-3.5.1.dist-info/entry_points.txt,sha256=VoXNtZpN4LvyXYB1wq47AU9CO-DMYMJ0VktKxjugzbY,51
taskcluster_taskgraph-3.5.1.dist-info/top_level.txt,sha256=3JNeYn_hNiNXC7DrdH_vcv-WYSE7QdgGjdvUYvSjVp0,10
taskcluster_taskgraph-3.5.1.dist-info/RECORD,,

Просмотреть файл

@ -188,9 +188,11 @@ def get_decision_parameters(graph_config, options):
parameters["filters"] = [
"target_tasks_method",
]
parameters["optimize_strategies"] = None
parameters["optimize_target_tasks"] = True
parameters["existing_tasks"] = {}
parameters["do_not_optimize"] = []
parameters["enable_always_target"] = True
parameters["build_number"] = 1
parameters["version"] = get_version(repo_path)
parameters["next_version"] = None
@ -224,13 +226,8 @@ def get_decision_parameters(graph_config, options):
# ..but can be overridden by the commit message: if it contains the special
# string "DONTBUILD" and this is an on-push decision task, then use the
# special 'nothing' target task method. (except on the toolchains project,
# where we ignore "DONTBUILD").
if (
"DONTBUILD" in commit_message
and options["tasks_for"] == "hg-push"
and project != "toolchains"
):
# special 'nothing' target task method.
if "DONTBUILD" in commit_message and options["tasks_for"] == "hg-push":
parameters["target_tasks_method"] = "nothing"
if options.get("optimize_target_tasks") is not None:

Просмотреть файл

@ -7,6 +7,12 @@ import json
import os
import tarfile
from io import BytesIO
from textwrap import dedent
try:
import zstandard as zstd
except ImportError as e:
zstd = e
from taskgraph.util import docker
from taskgraph.util.taskcluster import get_artifact_url, get_session
@ -115,7 +121,15 @@ def load_image(url, imageName=None, imageTag=None):
Returns an object with properties 'image', 'tag' and 'layer'.
"""
import zstandard as zstd
if isinstance(zstd, ImportError):
raise ImportError(
dedent(
"""
zstandard is not installed! Use `pip install taskcluster-taskgraph[load-image]`
to use this feature.
"""
)
) from zstd
# If imageName is given and we don't have an imageTag
# we parse out the imageTag from imageName, or default it to 'latest'

Просмотреть файл

@ -14,7 +14,7 @@ from .config import GraphConfig, load_graph_config
from .graph import Graph
from .morph import morph
from .optimize.base import optimize_task_graph
from .parameters import Parameters
from .parameters import parameters_loader
from .task import Task
from .taskgraph import TaskGraph
from .transforms.base import TransformConfig, TransformSequence
@ -249,9 +249,6 @@ class TaskGraphGenerator:
continue
def _run(self):
# Initial verifications that don't depend on any generation state.
verifications("initial")
logger.info("Loading graph configuration.")
graph_config = load_graph_config(self.root_dir)
@ -259,6 +256,9 @@ class TaskGraphGenerator:
graph_config.register()
# Initial verifications that don't depend on any generation state.
verifications("initial")
if callable(self._parameters):
parameters = self._parameters(graph_config)
else:
@ -360,11 +360,14 @@ class TaskGraphGenerator:
if t.attributes["kind"] == "docker-image"
}
# include all tasks with `always_target` set
always_target_tasks = {
t.label
for t in full_task_graph.tasks.values()
if t.attributes.get("always_target")
}
if parameters["enable_always_target"]:
always_target_tasks = {
t.label
for t in full_task_graph.tasks.values()
if t.attributes.get("always_target")
}
else:
always_target_tasks = set()
logger.info(
"Adding %d tasks with `always_target` attribute"
% (len(always_target_tasks) - len(always_target_tasks & target_tasks))
@ -383,6 +386,14 @@ class TaskGraphGenerator:
do_not_optimize = set(parameters.get("do_not_optimize", []))
if not parameters.get("optimize_target_tasks", True):
do_not_optimize = set(target_task_set.graph.nodes).union(do_not_optimize)
# this is used for testing experimental optimization strategies
strategies = os.environ.get(
"TASKGRAPH_OPTIMIZE_STRATEGIES", parameters.get("optimize_strategies")
)
if strategies:
strategies = find_object(strategies)
optimized_task_graph, label_to_taskid = optimize_task_graph(
target_task_graph,
requested_tasks,
@ -390,6 +401,7 @@ class TaskGraphGenerator:
do_not_optimize,
self._decision_task_id,
existing_tasks=existing_tasks,
strategy_override=strategies,
)
yield self.verify(
@ -428,7 +440,7 @@ def load_tasks_for_kind(parameters, kind, root_dir=None):
# make parameters read-write
parameters = dict(parameters)
parameters["target-kind"] = kind
parameters = Parameters(strict=False, **parameters)
parameters = parameters_loader(spec=None, strict=False, overrides=parameters)
tgg = TaskGraphGenerator(root_dir=root_dir, parameters=parameters)
return {
task.task["metadata"]["name"]: task

Просмотреть файл

@ -47,7 +47,9 @@ def argument(*args, **kwargs):
def format_taskgraph_labels(taskgraph):
return "\n".join(
taskgraph.tasks[index].label for index in taskgraph.graph.visit_postorder()
sorted(
taskgraph.tasks[index].label for index in taskgraph.graph.visit_postorder()
)
)

471
third_party/python/taskcluster_taskgraph/taskgraph/optimize.py поставляемый Normal file
Просмотреть файл

@ -0,0 +1,471 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
The objective of optimization is to remove as many tasks from the graph as
possible, as efficiently as possible, thereby delivering useful results as
quickly as possible. For example, ideally if only a test script is modified in
a push, then the resulting graph contains only the corresponding test suite
task.
See ``taskcluster/docs/optimization.rst`` for more information.
"""
import logging
import os
from collections import defaultdict
from slugid import nice as slugid
from . import files_changed
from .graph import Graph
from .taskgraph import TaskGraph
from .util.parameterization import resolve_task_references
from .util.taskcluster import find_task_id
logger = logging.getLogger(__name__)
TOPSRCDIR = os.path.abspath(os.path.join(__file__, "../../../"))
def optimize_task_graph(
target_task_graph,
params,
do_not_optimize,
decision_task_id,
existing_tasks=None,
strategies=None,
):
"""
Perform task optimization, returning a taskgraph and a map from label to
assigned taskId, including replacement tasks.
"""
label_to_taskid = {}
if not existing_tasks:
existing_tasks = {}
# instantiate the strategies for this optimization process
if not strategies:
strategies = _make_default_strategies()
optimizations = _get_optimizations(target_task_graph, strategies)
removed_tasks = remove_tasks(
target_task_graph=target_task_graph,
optimizations=optimizations,
params=params,
do_not_optimize=do_not_optimize,
)
replaced_tasks = replace_tasks(
target_task_graph=target_task_graph,
optimizations=optimizations,
params=params,
do_not_optimize=do_not_optimize,
label_to_taskid=label_to_taskid,
existing_tasks=existing_tasks,
removed_tasks=removed_tasks,
)
return (
get_subgraph(
target_task_graph,
removed_tasks,
replaced_tasks,
label_to_taskid,
decision_task_id,
),
label_to_taskid,
)
def _make_default_strategies():
return {
"never": OptimizationStrategy(), # "never" is the default behavior
"index-search": IndexSearch(),
"skip-unless-changed": SkipUnlessChanged(),
}
def _get_optimizations(target_task_graph, strategies):
def optimizations(label):
task = target_task_graph.tasks[label]
if task.optimization:
opt_by, arg = list(task.optimization.items())[0]
return (opt_by, strategies[opt_by], arg)
else:
return ("never", strategies["never"], None)
return optimizations
def _log_optimization(verb, opt_counts, opt_reasons=None):
if opt_reasons:
message = "optimize: {label} {action} because of {reason}"
for label, (action, reason) in opt_reasons.items():
logger.debug(message.format(label=label, action=action, reason=reason))
if opt_counts:
logger.info(
f"{verb.title()} "
+ ", ".join(f"{c} tasks by {b}" for b, c in sorted(opt_counts.items()))
+ " during optimization."
)
else:
logger.info(f"No tasks {verb} during optimization")
def remove_tasks(target_task_graph, params, optimizations, do_not_optimize):
"""
Implement the "Removing Tasks" phase, returning a set of task labels of all removed tasks.
"""
opt_counts = defaultdict(int)
opt_reasons = {}
removed = set()
dependents_of = target_task_graph.graph.reverse_links_dict()
tasks = target_task_graph.tasks
prune_candidates = set()
# Traverse graph so dependents (child nodes) are guaranteed to be processed
# first.
for label in target_task_graph.graph.visit_preorder():
# Dependents that can be pruned away (shouldn't cause this task to run).
# Only dependents that either:
# A) Explicitly reference this task in their 'if_dependencies' list, or
# B) Don't have an 'if_dependencies' attribute (i.e are in 'prune_candidates'
# because they should be removed but have prune_deps themselves)
# should be considered.
prune_deps = {
l
for l in dependents_of[label]
if l in prune_candidates
if not tasks[l].if_dependencies or label in tasks[l].if_dependencies
}
def _keep(reason):
"""Mark a task as being kept in the graph. Also recursively removes
any dependents from `prune_candidates`, assuming they should be
kept because of this task.
"""
opt_reasons[label] = ("kept", reason)
# Removes dependents that were in 'prune_candidates' from a task
# that ended up being kept (and therefore the dependents should
# also be kept).
queue = list(prune_deps)
while queue:
l = queue.pop()
# If l is a prune_dep of multiple tasks it could be queued up
# multiple times. Guard against it being already removed.
if l not in prune_candidates:
continue
# If a task doesn't set 'if_dependencies' itself (rather it was
# added to 'prune_candidates' due to one of its depenendents),
# then we shouldn't remove it.
if not tasks[l].if_dependencies:
continue
prune_candidates.remove(l)
queue.extend([r for r in dependents_of[l] if r in prune_candidates])
def _remove(reason):
"""Potentially mark a task as being removed from the graph. If the
task has dependents that can be pruned, add this task to
`prune_candidates` rather than removing it.
"""
if prune_deps:
# If there are prune_deps, unsure if we can remove this task yet.
prune_candidates.add(label)
else:
opt_reasons[label] = ("removed", reason)
opt_counts[reason] += 1
removed.add(label)
# if we're not allowed to optimize, that's easy..
if label in do_not_optimize:
_keep("do not optimize")
continue
# If there are remaining tasks depending on this one, do not remove.
if any(
l for l in dependents_of[label] if l not in removed and l not in prune_deps
):
_keep("dependent tasks")
continue
# Call the optimization strategy.
task = tasks[label]
opt_by, opt, arg = optimizations(label)
if opt.should_remove_task(task, params, arg):
_remove(opt_by)
continue
# Some tasks should only run if their dependency was also run. Since we
# haven't processed dependencies yet, we add them to a list of
# candidate tasks for pruning.
if task.if_dependencies:
opt_reasons[label] = ("kept", opt_by)
prune_candidates.add(label)
else:
_keep(opt_by)
if prune_candidates:
reason = "if-dependencies pruning"
for label in prune_candidates:
# There's an edge case where a triangle graph can cause a
# dependency to stay in 'prune_candidates' when the dependent
# remains. Do a final check to ensure we don't create any bad
# edges.
dependents = any(
d
for d in dependents_of[label]
if d not in prune_candidates
if d not in removed
)
if dependents:
opt_reasons[label] = ("kept", "dependent tasks")
continue
removed.add(label)
opt_counts[reason] += 1
opt_reasons[label] = ("removed", reason)
_log_optimization("removed", opt_counts, opt_reasons)
return removed
def replace_tasks(
target_task_graph,
params,
optimizations,
do_not_optimize,
label_to_taskid,
removed_tasks,
existing_tasks,
):
"""
Implement the "Replacing Tasks" phase, returning a set of task labels of
all replaced tasks. The replacement taskIds are added to label_to_taskid as
a side-effect.
"""
opt_counts = defaultdict(int)
replaced = set()
links_dict = target_task_graph.graph.links_dict()
for label in target_task_graph.graph.visit_postorder():
# if we're not allowed to optimize, that's easy..
if label in do_not_optimize:
continue
# if this task depends on un-replaced, un-removed tasks, do not replace
if any(l not in replaced and l not in removed_tasks for l in links_dict[label]):
continue
# if the task already exists, that's an easy replacement
repl = existing_tasks.get(label)
if repl:
label_to_taskid[label] = repl
replaced.add(label)
opt_counts["existing_tasks"] += 1
continue
# call the optimization strategy
task = target_task_graph.tasks[label]
opt_by, opt, arg = optimizations(label)
repl = opt.should_replace_task(task, params, arg)
if repl:
if repl is True:
# True means remove this task; get_subgraph will catch any
# problems with removed tasks being depended on
removed_tasks.add(label)
else:
label_to_taskid[label] = repl
replaced.add(label)
opt_counts[opt_by] += 1
continue
_log_optimization("replaced", opt_counts)
return replaced
def get_subgraph(
target_task_graph,
removed_tasks,
replaced_tasks,
label_to_taskid,
decision_task_id,
):
"""
Return the subgraph of target_task_graph consisting only of
non-optimized tasks and edges between them.
To avoid losing track of taskIds for tasks optimized away, this method
simultaneously substitutes real taskIds for task labels in the graph, and
populates each task definition's `dependencies` key with the appropriate
taskIds. Task references are resolved in the process.
"""
# check for any dependency edges from included to removed tasks
bad_edges = [
(l, r, n)
for l, r, n in target_task_graph.graph.edges
if l not in removed_tasks and r in removed_tasks
]
if bad_edges:
probs = ", ".join(
f"{l} depends on {r} as {n} but it has been removed"
for l, r, n in bad_edges
)
raise Exception("Optimization error: " + probs)
# fill in label_to_taskid for anything not removed or replaced
assert replaced_tasks <= set(label_to_taskid)
for label in sorted(
target_task_graph.graph.nodes - removed_tasks - set(label_to_taskid)
):
label_to_taskid[label] = slugid()
# resolve labels to taskIds and populate task['dependencies']
tasks_by_taskid = {}
named_links_dict = target_task_graph.graph.named_links_dict()
omit = removed_tasks | replaced_tasks
for label, task in target_task_graph.tasks.items():
if label in omit:
continue
task.task_id = label_to_taskid[label]
named_task_dependencies = {
name: label_to_taskid[label]
for name, label in named_links_dict.get(label, {}).items()
}
# Add remaining soft dependencies
if task.soft_dependencies:
named_task_dependencies.update(
{
label: label_to_taskid[label]
for label in task.soft_dependencies
if label in label_to_taskid and label not in omit
}
)
task.task = resolve_task_references(
task.label,
task.task,
task_id=task.task_id,
decision_task_id=decision_task_id,
dependencies=named_task_dependencies,
)
deps = task.task.setdefault("dependencies", [])
deps.extend(sorted(named_task_dependencies.values()))
tasks_by_taskid[task.task_id] = task
# resolve edges to taskIds
edges_by_taskid = (
(label_to_taskid.get(left), label_to_taskid.get(right), name)
for (left, right, name) in target_task_graph.graph.edges
)
# ..and drop edges that are no longer entirely in the task graph
# (note that this omits edges to replaced tasks, but they are still in task.dependnecies)
edges_by_taskid = {
(left, right, name)
for (left, right, name) in edges_by_taskid
if left in tasks_by_taskid and right in tasks_by_taskid
}
return TaskGraph(tasks_by_taskid, Graph(set(tasks_by_taskid), edges_by_taskid))
class OptimizationStrategy:
def should_remove_task(self, task, params, arg):
"""Determine whether to optimize this task by removing it. Returns
True to remove."""
return False
def should_replace_task(self, task, params, arg):
"""Determine whether to optimize this task by replacing it. Returns a
taskId to replace this task, True to replace with nothing, or False to
keep the task."""
return False
class Either(OptimizationStrategy):
"""Given one or more optimization strategies, remove a task if any of them
says to, and replace with a task if any finds a replacement (preferring the
earliest). By default, each substrategy gets the same arg, but split_args
can return a list of args for each strategy, if desired."""
def __init__(self, *substrategies, **kwargs):
self.substrategies = substrategies
self.split_args = kwargs.pop("split_args", None)
if not self.split_args:
self.split_args = lambda arg: [arg] * len(substrategies)
if kwargs:
raise TypeError("unexpected keyword args")
def _for_substrategies(self, arg, fn):
for sub, arg in zip(self.substrategies, self.split_args(arg)):
rv = fn(sub, arg)
if rv:
return rv
return False
def should_remove_task(self, task, params, arg):
return self._for_substrategies(
arg, lambda sub, arg: sub.should_remove_task(task, params, arg)
)
def should_replace_task(self, task, params, arg):
return self._for_substrategies(
arg, lambda sub, arg: sub.should_replace_task(task, params, arg)
)
class IndexSearch(OptimizationStrategy):
# A task with no dependencies remaining after optimization will be replaced
# if artifacts exist for the corresponding index_paths.
# Otherwise, we're in one of the following cases:
# - the task has un-optimized dependencies
# - the artifacts have expired
# - some changes altered the index_paths and new artifacts need to be
# created.
# In every of those cases, we need to run the task to create or refresh
# artifacts.
def should_replace_task(self, task, params, index_paths):
"Look for a task with one of the given index paths"
for index_path in index_paths:
try:
task_id = find_task_id(
index_path, use_proxy=bool(os.environ.get("TASK_ID"))
)
return task_id
except KeyError:
# 404 will end up here and go on to the next index path
pass
return False
class SkipUnlessChanged(OptimizationStrategy):
def should_remove_task(self, task, params, file_patterns):
if params.get("repository_type") != "hg":
raise RuntimeError(
"SkipUnlessChanged optimization only works with mercurial repositories"
)
# pushlog_id == -1 - this is the case when run from a cron.yml job
if params.get("pushlog_id") == -1:
return False
changed = files_changed.check(params, file_patterns)
if not changed:
logger.debug(
'no files found matching a pattern in `skip-unless-changed` for "{}"'.format(
task.label
)
)
return True
return False

Просмотреть файл

@ -37,6 +37,7 @@ base_schema = Schema(
Required("build_date"): int,
Required("build_number"): int,
Required("do_not_optimize"): [str],
Required("enable_always_target"): bool,
Required("existing_tasks"): {str: str},
Required("filters"): [str],
Required("head_ref"): str,
@ -46,6 +47,7 @@ base_schema = Schema(
Required("level"): str,
Required("moz_build_date"): str,
Required("next_version"): Any(str, None),
Required("optimize_strategies"): Any(str, None),
Required("optimize_target_tasks"): bool,
Required("owner"): str,
Required("project"): str,
@ -93,6 +95,7 @@ def _get_defaults(repo_root=None):
"build_date": int(time.time()),
"build_number": 1,
"do_not_optimize": [],
"enable_always_target": True,
"existing_tasks": {},
"filters": ["target_tasks_method"],
"head_ref": repo.branch or repo.head_rev,
@ -102,6 +105,7 @@ def _get_defaults(repo_root=None):
"level": "3",
"moz_build_date": datetime.now().strftime("%Y%m%d%H%M%S"),
"next_version": None,
"optimize_strategies": None,
"optimize_target_tasks": True,
"owner": "nobody@mozilla.com",
"project": project,

Просмотреть файл

@ -16,6 +16,7 @@ import multiprocessing
import os
import pathlib
import random
import re
import stat
import subprocess
import sys
@ -31,6 +32,11 @@ try:
except ImportError:
zstandard = None
try:
import certifi
except ImportError:
certifi = None
CONCURRENCY = multiprocessing.cpu_count()
@ -46,13 +52,13 @@ class IntegrityError(Exception):
def ZstdCompressor(*args, **kwargs):
if not zstandard:
raise ValueError('zstandard Python package not available')
raise ValueError("zstandard Python package not available")
return zstandard.ZstdCompressor(*args, **kwargs)
def ZstdDecompressor(*args, **kwargs):
if not zstandard:
raise ValueError('zstandard Python package not available')
raise ValueError("zstandard Python package not available")
return zstandard.ZstdDecompressor(*args, **kwargs)
@ -67,7 +73,7 @@ def rename_after_close(fname, *args, **kwargs):
manager.
"""
path = pathlib.Path(fname)
tmp = path.with_name('%s.tmp' % path.name)
tmp = path.with_name("%s.tmp" % path.name)
try:
with tmp.open(*args, **kwargs) as fh:
yield fh
@ -127,7 +133,9 @@ def retrier(attempts=5, sleeptime=10, max_sleeptime=300, sleepscale=1.5, jitter=
jitter = jitter or 0 # py35 barfs on the next line if jitter is None
if jitter > sleeptime:
# To prevent negative sleep times
raise Exception('jitter ({}) must be less than sleep time ({})'.format(jitter, sleeptime))
raise Exception(
"jitter ({}) must be less than sleep time ({})".format(jitter, sleeptime)
)
sleeptime_real = sleeptime
for _ in range(attempts):
@ -149,7 +157,9 @@ def retrier(attempts=5, sleeptime=10, max_sleeptime=300, sleepscale=1.5, jitter=
# Don't need to sleep the last time
if _ < attempts - 1:
log("sleeping for %.2fs (attempt %i/%i)" % (sleeptime_real, _ + 1, attempts))
log(
"sleeping for %.2fs (attempt %i/%i)" % (sleeptime_real, _ + 1, attempts)
)
time.sleep(sleeptime_real)
@ -166,7 +176,7 @@ def stream_download(url, sha256=None, size=None, headers=None):
content, it should be streamed to a file or memory and only operated
on after the generator is exhausted without raising.
"""
log('Downloading %s' % url)
log("Downloading %s" % url)
headers = headers or []
h = hashlib.sha256()
@ -179,8 +189,10 @@ def stream_download(url, sha256=None, size=None, headers=None):
req_headers[key.strip()] = val.strip()
req = urllib.request.Request(url, None, req_headers)
with urllib.request.urlopen(req) as fh:
if not url.endswith('.gz') and fh.info().get('Content-Encoding') == 'gzip':
with urllib.request.urlopen(
req, cafile=certifi.where()
) if certifi else urllib.request.urlopen(req) as fh:
if not url.endswith(".gz") and fh.info().get("Content-Encoding") == "gzip":
fh = gzip.GzipFile(fileobj=fh)
while True:
@ -196,22 +208,26 @@ def stream_download(url, sha256=None, size=None, headers=None):
duration = time.time() - t0
digest = h.hexdigest()
log('%s resolved to %d bytes with sha256 %s in %.3fs' % (
url, length, digest, duration))
log(
"%s resolved to %d bytes with sha256 %s in %.3fs"
% (url, length, digest, duration)
)
if size:
if size == length:
log('Verified size of %s' % url)
log("Verified size of %s" % url)
else:
raise IntegrityError('size mismatch on %s: wanted %d; got %d' % (
url, size, length))
raise IntegrityError(
"size mismatch on %s: wanted %d; got %d" % (url, size, length)
)
if sha256:
if digest == sha256:
log('Verified sha256 integrity of %s' % url)
log("Verified sha256 integrity of %s" % url)
else:
raise IntegrityError('sha256 mismatch on %s: wanted %s; got %s' % (
url, sha256, digest))
raise IntegrityError(
"sha256 mismatch on %s: wanted %s; got %s" % (url, sha256, digest)
)
def download_to_path(url, path, sha256=None, size=None, headers=None):
@ -227,10 +243,12 @@ def download_to_path(url, path, sha256=None, size=None, headers=None):
for _ in retrier(attempts=5, sleeptime=60):
try:
log('Downloading %s to %s' % (url, path))
log("Downloading %s to %s" % (url, path))
with rename_after_close(path, 'wb') as fh:
for chunk in stream_download(url, sha256=sha256, size=size, headers=headers):
with rename_after_close(path, "wb") as fh:
for chunk in stream_download(
url, sha256=sha256, size=size, headers=headers
):
fh.write(chunk)
return
@ -243,65 +261,85 @@ def download_to_path(url, path, sha256=None, size=None, headers=None):
raise Exception("Download failed, no more retries!")
def gpg_verify_path(path: pathlib.Path, public_key_data: bytes,
signature_data: bytes):
def download_to_memory(url, sha256=None, size=None):
"""Download a URL to memory, possibly with verification."""
data = b""
for _ in retrier(attempts=5, sleeptime=60):
try:
log("Downloading %s" % (url))
for chunk in stream_download(url, sha256=sha256, size=size):
data += chunk
return data
except IntegrityError:
raise
except Exception as e:
log("Download failed: {}".format(e))
continue
raise Exception("Download failed, no more retries!")
def gpg_verify_path(path: pathlib.Path, public_key_data: bytes, signature_data: bytes):
"""Verify that a filesystem path verifies using GPG.
Takes a Path defining a file to verify. ``public_key_data`` contains
bytes with GPG public key data. ``signature_data`` contains a signed
GPG document to use with ``gpg --verify``.
"""
log('Validating GPG signature of %s' % path)
log('GPG key data:\n%s' % public_key_data.decode('ascii'))
log("Validating GPG signature of %s" % path)
log("GPG key data:\n%s" % public_key_data.decode("ascii"))
with tempfile.TemporaryDirectory() as td:
try:
# --batch since we're running unattended.
gpg_args = ['gpg', '--homedir', td, '--batch']
gpg_args = ["gpg", "--homedir", td, "--batch"]
log('Importing GPG key...')
subprocess.run(gpg_args + ['--import'],
input=public_key_data,
check=True)
log("Importing GPG key...")
subprocess.run(gpg_args + ["--import"], input=public_key_data, check=True)
log('Verifying GPG signature...')
subprocess.run(gpg_args + ['--verify', '-', '%s' % path],
input=signature_data,
check=True)
log("Verifying GPG signature...")
subprocess.run(
gpg_args + ["--verify", "-", "%s" % path],
input=signature_data,
check=True,
)
log('GPG signature verified!')
log("GPG signature verified!")
finally:
# There is a race between the agent self-terminating and
# shutil.rmtree() from the temporary directory cleanup that can
# lead to exceptions. Kill the agent before cleanup to prevent this.
env = dict(os.environ)
env['GNUPGHOME'] = td
subprocess.run(['gpgconf', '--kill', 'gpg-agent'], env=env)
env["GNUPGHOME"] = td
subprocess.run(["gpgconf", "--kill", "gpg-agent"], env=env)
def open_tar_stream(path: pathlib.Path):
""""""
if path.suffix == '.bz2':
return bz2.open(str(path), 'rb')
elif path.suffix == '.gz':
return gzip.open(str(path), 'rb')
elif path.suffix == '.xz':
return lzma.open(str(path), 'rb')
elif path.suffix == '.zst':
if path.suffix == ".bz2":
return bz2.open(str(path), "rb")
elif path.suffix == ".gz":
return gzip.open(str(path), "rb")
elif path.suffix == ".xz":
return lzma.open(str(path), "rb")
elif path.suffix == ".zst":
dctx = ZstdDecompressor()
return dctx.stream_reader(path.open('rb'))
elif path.suffix == '.tar':
return path.open('rb')
return dctx.stream_reader(path.open("rb"))
elif path.suffix == ".tar":
return path.open("rb")
else:
raise ValueError('unknown archive format for tar file: %s' % path)
raise ValueError("unknown archive format for tar file: %s" % path)
def archive_type(path: pathlib.Path):
"""Attempt to identify a path as an extractable archive."""
if path.suffixes[-2:-1] == ['.tar']:
return 'tar'
elif path.suffix == '.zip':
return 'zip'
if path.suffixes[-2:-1] == [".tar"]:
return "tar"
elif path.suffix == ".zip":
return "zip"
else:
return None
@ -313,12 +351,12 @@ def extract_archive(path, dest_dir, typ):
path = path.resolve()
dest_dir = dest_dir.resolve()
log('Extracting %s to %s' % (path, dest_dir))
log("Extracting %s to %s" % (path, dest_dir))
t0 = time.time()
# We pipe input to the decompressor program so that we can apply
# custom decompressors that the program may not know about.
if typ == 'tar':
if typ == "tar":
ifh = open_tar_stream(path)
# On Windows, the tar program doesn't support things like symbolic
# links, while Windows actually support them. The tarfile module in
@ -326,24 +364,25 @@ def extract_archive(path, dest_dir, typ):
# the tar program on Linux, only use tarfile on Windows (tarfile is
# also not much slower on Windows, presumably because of the
# notoriously bad I/O).
if sys.platform == 'win32':
tar = tarfile.open(fileobj=ifh, mode='r|')
if sys.platform == "win32":
tar = tarfile.open(fileobj=ifh, mode="r|")
tar.extractall(str(dest_dir))
args = []
else:
args = ['tar', 'xf', '-']
args = ["tar", "xf", "-"]
pipe_stdin = True
elif typ == 'zip':
elif typ == "zip":
# unzip from stdin has wonky behavior. We don't use a pipe for it.
ifh = open(os.devnull, 'rb')
args = ['unzip', '-o', str(path)]
ifh = open(os.devnull, "rb")
args = ["unzip", "-o", str(path)]
pipe_stdin = False
else:
raise ValueError('unknown archive format: %s' % path)
raise ValueError("unknown archive format: %s" % path)
if args:
with ifh, subprocess.Popen(args, cwd=str(dest_dir), bufsize=0,
stdin=subprocess.PIPE) as p:
with ifh, subprocess.Popen(
args, cwd=str(dest_dir), bufsize=0, stdin=subprocess.PIPE
) as p:
while True:
if not pipe_stdin:
break
@ -355,46 +394,50 @@ def extract_archive(path, dest_dir, typ):
p.stdin.write(chunk)
if p.returncode:
raise Exception('%r exited %d' % (args, p.returncode))
raise Exception("%r exited %d" % (args, p.returncode))
log('%s extracted in %.3fs' % (path, time.time() - t0))
log("%s extracted in %.3fs" % (path, time.time() - t0))
def repack_archive(orig: pathlib.Path, dest: pathlib.Path,
strip_components=0, prefix=''):
def repack_archive(
orig: pathlib.Path, dest: pathlib.Path, strip_components=0, prefix=""
):
assert orig != dest
log('Repacking as %s' % dest)
log("Repacking as %s" % dest)
orig_typ = archive_type(orig)
typ = archive_type(dest)
if not orig_typ:
raise Exception('Archive type not supported for %s' % orig.name)
raise Exception("Archive type not supported for %s" % orig.name)
if not typ:
raise Exception('Archive type not supported for %s' % dest.name)
raise Exception("Archive type not supported for %s" % dest.name)
if dest.suffixes[-2:] != ['.tar', '.zst']:
raise Exception('Only producing .tar.zst archives is supported.')
if dest.suffixes[-2:] != [".tar", ".zst"]:
raise Exception("Only producing .tar.zst archives is supported.")
if strip_components or prefix:
def filter(name):
if strip_components:
stripped = '/'.join(name.split('/')[strip_components:])
stripped = "/".join(name.split("/")[strip_components:])
if not stripped:
raise Exception(
'Stripping %d components would remove files'
% strip_components)
"Stripping %d components would remove files" % strip_components
)
name = stripped
return prefix + name
else:
filter = None
with rename_after_close(dest, 'wb') as fh:
with rename_after_close(dest, "wb") as fh:
ctx = ZstdCompressor()
if orig_typ == 'zip':
assert typ == 'tar'
if orig_typ == "zip":
assert typ == "tar"
zip = zipfile.ZipFile(orig)
# Convert the zip stream to a tar on the fly.
with ctx.stream_writer(fh) as compressor, \
tarfile.open(fileobj=compressor, mode='w:') as tar:
with ctx.stream_writer(fh) as compressor, tarfile.open(
fileobj=compressor, mode="w:"
) as tar:
for zipinfo in zip.infolist():
if zipinfo.is_dir():
continue
@ -408,7 +451,8 @@ def repack_archive(orig: pathlib.Path, dest: pathlib.Path,
# care about accuracy, but rather about reproducibility,
# so we pick UTC.
time = datetime.datetime(
*zipinfo.date_time, tzinfo=datetime.timezone.utc)
*zipinfo.date_time, tzinfo=datetime.timezone.utc
)
tarinfo.mtime = time.timestamp()
# 0 is MS-DOS, 3 is UNIX. Only in the latter case do we
# get anything useful for the tar file mode.
@ -424,26 +468,35 @@ def repack_archive(orig: pathlib.Path, dest: pathlib.Path,
elif stat.S_ISREG(mode) or stat.S_IFMT(mode) == 0:
tar.addfile(tarinfo, zip.open(filename))
else:
raise Exception('Unsupported file mode %o'
% stat.S_IFMT(mode))
raise Exception("Unsupported file mode %o" % stat.S_IFMT(mode))
elif orig_typ == 'tar':
if typ == 'zip':
raise Exception('Repacking a tar to zip is not supported')
assert typ == 'tar'
elif orig_typ == "tar":
if typ == "zip":
raise Exception("Repacking a tar to zip is not supported")
assert typ == "tar"
ifh = open_tar_stream(orig)
if filter:
# To apply the filter, we need to open the tar stream and
# tweak it.
origtar = tarfile.open(fileobj=ifh, mode='r|')
with ctx.stream_writer(fh) as compressor, \
tarfile.open(fileobj=compressor, mode='w:') as tar:
origtar = tarfile.open(fileobj=ifh, mode="r|")
with ctx.stream_writer(fh) as compressor, tarfile.open(
fileobj=compressor,
mode="w:",
format=origtar.format,
) as tar:
for tarinfo in origtar:
if tarinfo.isdir():
continue
tarinfo.name = filter(tarinfo.name)
tar.addfile(tarinfo, origtar.extractfile(tarinfo))
if "path" in tarinfo.pax_headers:
tarinfo.pax_headers["path"] = filter(
tarinfo.pax_headers["path"]
)
if tarinfo.isfile():
tar.addfile(tarinfo, origtar.extractfile(tarinfo))
else:
tar.addfile(tarinfo)
else:
# We only change compression here. The tar stream is unchanged.
ctx.copy_stream(ifh, fh)
@ -457,7 +510,7 @@ def fetch_and_extract(url, dest_dir, extract=True, sha256=None, size=None):
the destination directory.
"""
basename = urllib.parse.urlparse(url).path.split('/')[-1]
basename = urllib.parse.urlparse(url).path.split("/")[-1]
dest_path = dest_dir / basename
download_to_path(url, dest_path, sha256=sha256, size=size)
@ -468,7 +521,7 @@ def fetch_and_extract(url, dest_dir, extract=True, sha256=None, size=None):
typ = archive_type(dest_path)
if typ:
extract_archive(dest_path, dest_dir, typ)
log('Removing %s' % dest_path)
log("Removing %s" % dest_path)
dest_path.unlink()
@ -484,37 +537,152 @@ def fetch_urls(downloads):
f.result()
def git_checkout_archive(dest_path: pathlib.Path, repo: str, commit: str,
prefix=None):
def _git_checkout_github_archive(
dest_path: pathlib.Path, repo: str, commit: str, prefix: str
):
"Use github archive generator to speed up github git repo cloning"
repo = repo.rstrip("/")
github_url = "{repo}/archive/{commit}.tar.gz".format(**locals())
with tempfile.TemporaryDirectory() as td:
temp_dir = pathlib.Path(td)
dl_dest = temp_dir / "archive.tar.gz"
download_to_path(github_url, dl_dest)
repack_archive(dl_dest, dest_path, strip_components=1, prefix=prefix + "/")
def _github_submodule_required(repo: str, commit: str):
"Use github API to check if submodules are used"
url = "{repo}/blob/{commit}/.gitmodules".format(**locals())
try:
status_code = urllib.request.urlopen(url).getcode()
return status_code == 200
except:
return False
def git_checkout_archive(
dest_path: pathlib.Path,
repo: str,
commit: str,
prefix=None,
ssh_key=None,
include_dot_git=False,
):
"""Produce an archive of the files comprising a Git checkout."""
dest_path.parent.mkdir(parents=True, exist_ok=True)
if dest_path.suffixes[-2:] != ['.tar', '.zst']:
raise Exception('Only producing .tar.zst archives is supported.')
if not prefix:
prefix = repo.rstrip("/").rsplit("/", 1)[-1]
if dest_path.suffixes[-2:] != [".tar", ".zst"]:
raise Exception("Only producing .tar.zst archives is supported.")
if repo.startswith("https://github.com/"):
if not include_dot_git and not _github_submodule_required(repo, commit):
log("Using github archive service to speedup archive creation")
# Always log sha1 info, either from commit or resolved from repo.
if re.match(r"^[a-fA-F0-9]{40}$", commit):
revision = commit
else:
ref_output = subprocess.check_output(["git", "ls-remote", repo,
'refs/heads/' + commit])
revision, _ = ref_output.decode().split(maxsplit=1)
log("Fetching revision {}".format(revision))
return _git_checkout_github_archive(dest_path, repo, commit, prefix)
with tempfile.TemporaryDirectory() as td:
temp_dir = pathlib.Path(td)
if not prefix:
prefix = repo.rstrip('/').rsplit('/', 1)[-1]
git_dir = temp_dir / prefix
# This could be faster with a shallow clone. However, Git requires a ref
# to initiate a clone. Since the commit-ish may not refer to a ref, we
# simply perform a full clone followed by a checkout.
print('cloning %s to %s' % (repo, git_dir))
subprocess.run(['git', 'clone', '--recurse-submodules', repo, str(git_dir)],
check=True)
print("cloning %s to %s" % (repo, git_dir))
subprocess.run(['git', 'checkout', '--recurse-submodules', commit],
cwd=str(git_dir), check=True)
env = os.environ.copy()
keypath = ""
if ssh_key:
taskcluster_secret_url = api(
os.environ.get("TASKCLUSTER_PROXY_URL"),
"secrets",
"v1",
"secret/{keypath}".format(keypath=ssh_key),
)
taskcluster_secret = b"".join(stream_download(taskcluster_secret_url))
taskcluster_secret = json.loads(taskcluster_secret)
sshkey = taskcluster_secret["secret"]["ssh_privkey"]
print('creating archive %s of commit %s' % (dest_path, commit))
proc = subprocess.Popen([
'tar', 'cf', '-', '--exclude=.git', '-C', str(temp_dir), prefix,
], stdout=subprocess.PIPE)
keypath = temp_dir.joinpath("ssh-key")
keypath.write_text(sshkey)
keypath.chmod(0o600)
with rename_after_close(dest_path, 'wb') as out:
env = {
"GIT_SSH_COMMAND": "ssh -o 'StrictHostKeyChecking no' -i {keypath}".format(
keypath=keypath
)
}
subprocess.run(["git", "clone", "-n", repo, str(git_dir)], check=True, env=env)
# Always use a detached head so that git prints out what it checked out.
subprocess.run(
["git", "checkout", "--detach", commit], cwd=str(git_dir), check=True
)
# When including the .git, we want --depth 1, but a direct clone would not
# necessarily be able to give us the right commit.
if include_dot_git:
initial_clone = git_dir.with_name(git_dir.name + ".orig")
git_dir.rename(initial_clone)
subprocess.run(
[
"git",
"clone",
"file://" + str(initial_clone),
str(git_dir),
"--depth",
"1",
],
check=True,
)
subprocess.run(
["git", "remote", "set-url", "origin", repo],
cwd=str(git_dir),
check=True,
)
# --depth 1 can induce more work on the server side, so only use it for
# submodule initialization when we want to keep the .git directory.
depth = ["--depth", "1"] if include_dot_git else []
subprocess.run(
["git", "submodule", "update", "--init"] + depth,
cwd=str(git_dir),
check=True,
)
if keypath:
os.remove(keypath)
print("creating archive %s of commit %s" % (dest_path, commit))
exclude_dot_git = [] if include_dot_git else ["--exclude=.git"]
proc = subprocess.Popen(
[
"tar",
"cf",
"-",
]
+ exclude_dot_git
+ [
"-C",
str(temp_dir),
prefix,
],
stdout=subprocess.PIPE,
)
with rename_after_close(dest_path, "wb") as out:
ctx = ZstdCompressor()
ctx.copy_stream(proc.stdout, out)
@ -525,8 +693,14 @@ def command_git_checkout_archive(args):
dest = pathlib.Path(args.dest)
try:
git_checkout_archive(dest, args.repo, args.commit,
prefix=args.path_prefix)
git_checkout_archive(
dest,
args.repo,
args.commit,
prefix=args.path_prefix,
ssh_key=args.ssh_key_secret,
include_dot_git=args.include_dot_git,
)
except Exception:
try:
dest.unlink()
@ -541,25 +715,26 @@ def command_static_url(args):
gpg_env_key = args.gpg_key_env
if bool(gpg_sig_url) != bool(gpg_env_key):
print('--gpg-sig-url and --gpg-key-env must both be defined')
print("--gpg-sig-url and --gpg-key-env must both be defined")
return 1
if gpg_sig_url:
gpg_signature = b''.join(stream_download(gpg_sig_url))
gpg_key = os.environb[gpg_env_key.encode('ascii')]
gpg_signature = b"".join(stream_download(gpg_sig_url))
gpg_key = os.environb[gpg_env_key.encode("ascii")]
dest = pathlib.Path(args.dest)
dest.parent.mkdir(parents=True, exist_ok=True)
basename = urllib.parse.urlparse(args.url).path.split('/')[-1]
if basename.endswith(''.join(dest.suffixes)):
basename = urllib.parse.urlparse(args.url).path.split("/")[-1]
if basename.endswith("".join(dest.suffixes)):
dl_dest = dest
else:
dl_dest = dest.parent / basename
try:
download_to_path(args.url, dl_dest, sha256=args.sha256, size=args.size,
headers=args.headers)
download_to_path(
args.url, dl_dest, sha256=args.sha256, size=args.size, headers=args.headers
)
if gpg_sig_url:
gpg_verify_path(dl_dest, gpg_key, gpg_signature)
@ -575,112 +750,150 @@ def command_static_url(args):
raise
if dl_dest != dest:
log('Removing %s' % dl_dest)
log("Removing %s" % dl_dest)
dl_dest.unlink()
def api(root_url, service, version, path):
# taskcluster-lib-urls is not available when this script runs, so
# simulate its behavior:
if root_url == 'https://taskcluster.net':
return 'https://{service}.taskcluster.net/{version}/{path}'.format(
service=service, version=version, path=path)
return '{root_url}/api/{service}/{version}/{path}'.format(
root_url=root_url, service=service, version=version, path=path)
return "{root_url}/api/{service}/{version}/{path}".format(
root_url=root_url, service=service, version=version, path=path
)
def get_hash(fetch, root_url):
path = "task/{task}/artifacts/{artifact}".format(
task=fetch["task"], artifact="public/chain-of-trust.json"
)
url = api(root_url, "queue", "v1", path)
cot = json.loads(download_to_memory(url))
return cot["artifacts"][fetch["artifact"]]["sha256"]
def command_task_artifacts(args):
start = time.monotonic()
fetches = json.loads(os.environ['MOZ_FETCHES'])
fetches = json.loads(os.environ["MOZ_FETCHES"])
downloads = []
for fetch in fetches:
extdir = pathlib.Path(args.dest)
if 'dest' in fetch:
extdir = extdir.joinpath(fetch['dest'])
if "dest" in fetch:
# Note: normpath doesn't like pathlib.Path in python 3.5
extdir = pathlib.Path(os.path.normpath(str(extdir.joinpath(fetch["dest"]))))
extdir.mkdir(parents=True, exist_ok=True)
root_url = os.environ['TASKCLUSTER_ROOT_URL']
if fetch['artifact'].startswith('public/'):
path = 'task/{task}/artifacts/{artifact}'.format(
task=fetch['task'], artifact=fetch['artifact'])
url = api(root_url, 'queue', 'v1', path)
root_url = os.environ["TASKCLUSTER_ROOT_URL"]
sha256 = None
if fetch.get("verify-hash"):
sha256 = get_hash(fetch, root_url)
if fetch["artifact"].startswith("public/"):
path = "task/{task}/artifacts/{artifact}".format(
task=fetch["task"], artifact=fetch["artifact"]
)
url = api(root_url, "queue", "v1", path)
else:
url = ('{proxy_url}/api/queue/v1/task/{task}/artifacts/{artifact}').format(
proxy_url=os.environ['TASKCLUSTER_PROXY_URL'],
task=fetch['task'],
artifact=fetch['artifact'])
downloads.append((url, extdir, fetch['extract']))
url = ("{proxy_url}/api/queue/v1/task/{task}/artifacts/{artifact}").format(
proxy_url=os.environ["TASKCLUSTER_PROXY_URL"],
task=fetch["task"],
artifact=fetch["artifact"],
)
downloads.append((url, extdir, fetch["extract"], sha256))
fetch_urls(downloads)
end = time.monotonic()
perfherder_data = {
'framework': {'name': 'build_metrics'},
'suites': [{
'name': 'fetch_content',
'value': end - start,
'lowerIsBetter': True,
'shouldAlert': False,
'subtests': [],
}],
"framework": {"name": "build_metrics"},
"suites": [
{
"name": "fetch_content",
"value": end - start,
"lowerIsBetter": True,
"shouldAlert": False,
"subtests": [],
}
],
}
print('PERFHERDER_DATA: {}'.format(json.dumps(perfherder_data)), file=sys.stderr)
print("PERFHERDER_DATA: {}".format(json.dumps(perfherder_data)), file=sys.stderr)
def main():
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(title='sub commands')
subparsers = parser.add_subparsers(title="sub commands")
git_checkout = subparsers.add_parser(
'git-checkout-archive',
help='Obtain an archive of files from a Git repository checkout')
"git-checkout-archive",
help="Obtain an archive of files from a Git repository checkout",
)
git_checkout.set_defaults(func=command_git_checkout_archive)
git_checkout.add_argument('--path-prefix',
help='Prefix for paths in produced archive')
git_checkout.add_argument('repo',
help='URL to Git repository to be cloned')
git_checkout.add_argument('commit',
help='Git commit to check out')
git_checkout.add_argument('dest',
help='Destination path of archive')
git_checkout.add_argument(
"--path-prefix", help="Prefix for paths in produced archive"
)
git_checkout.add_argument("repo", help="URL to Git repository to be cloned")
git_checkout.add_argument("commit", help="Git commit to check out")
git_checkout.add_argument("dest", help="Destination path of archive")
git_checkout.add_argument(
"--ssh-key-secret", help="The scope path of the ssh key to used for checkout"
)
git_checkout.add_argument(
"--include-dot-git", action="store_true", help="Include the .git directory"
)
url = subparsers.add_parser('static-url', help='Download a static URL')
url = subparsers.add_parser("static-url", help="Download a static URL")
url.set_defaults(func=command_static_url)
url.add_argument('--sha256', required=True,
help='SHA-256 of downloaded content')
url.add_argument('--size', required=True, type=int,
help='Size of downloaded content, in bytes')
url.add_argument('--gpg-sig-url',
help='URL containing signed GPG document validating '
'URL to fetch')
url.add_argument('--gpg-key-env',
help='Environment variable containing GPG key to validate')
url.add_argument('--strip-components', type=int, default=0,
help='Number of leading components to strip from file '
'names in the downloaded archive')
url.add_argument('--add-prefix', default='',
help='Prefix to add to file names in the downloaded '
'archive')
url.add_argument('-H', '--header', default=[], action='append', dest='headers',
help='Header to send as part of the request, can be passed '
'multiple times')
url.add_argument('url', help='URL to fetch')
url.add_argument('dest', help='Destination path')
url.add_argument("--sha256", required=True, help="SHA-256 of downloaded content")
url.add_argument(
"--size", required=True, type=int, help="Size of downloaded content, in bytes"
)
url.add_argument(
"--gpg-sig-url",
help="URL containing signed GPG document validating " "URL to fetch",
)
url.add_argument(
"--gpg-key-env", help="Environment variable containing GPG key to validate"
)
url.add_argument(
"--strip-components",
type=int,
default=0,
help="Number of leading components to strip from file "
"names in the downloaded archive",
)
url.add_argument(
"--add-prefix",
default="",
help="Prefix to add to file names in the downloaded " "archive",
)
url.add_argument(
"-H",
"--header",
default=[],
action="append",
dest="headers",
help="Header to send as part of the request, can be passed " "multiple times",
)
url.add_argument("url", help="URL to fetch")
url.add_argument("dest", help="Destination path")
artifacts = subparsers.add_parser('task-artifacts',
help='Fetch task artifacts')
artifacts = subparsers.add_parser("task-artifacts", help="Fetch task artifacts")
artifacts.set_defaults(func=command_task_artifacts)
artifacts.add_argument('-d', '--dest', default=os.environ.get('MOZ_FETCHES_DIR'),
help='Destination directory which will contain all '
'artifacts (defaults to $MOZ_FETCHES_DIR)')
artifacts.add_argument(
"-d",
"--dest",
default=os.environ.get("MOZ_FETCHES_DIR"),
help="Destination directory which will contain all "
"artifacts (defaults to $MOZ_FETCHES_DIR)",
)
args = parser.parse_args()
if not args.dest:
parser.error('no destination directory specified, either pass in --dest '
'or set $MOZ_FETCHES_DIR')
parser.error(
"no destination directory specified, either pass in --dest "
"or set $MOZ_FETCHES_DIR"
)
return args.func(args)
if __name__ == '__main__':
if __name__ == "__main__":
sys.exit(main())

Просмотреть файл

@ -9,6 +9,7 @@ from a source repo using best practices to ensure optimal clone
times and storage efficiency.
"""
from __future__ import absolute_import
import contextlib
import json
@ -40,8 +41,8 @@ from mercurial import (
# Causes worker to purge caches on process exit and for task to retry.
EXIT_PURGE_CACHE = 72
testedwith = b'4.5 4.6 4.7 4.8 4.9 5.0 5.1 5.2 5.3 5.4 5.5'
minimumhgversion = b'4.5'
testedwith = b"4.5 4.6 4.7 4.8 4.9 5.0 5.1 5.2 5.3 5.4 5.5 5.6 5.7 5.8 5.9"
minimumhgversion = b"4.5"
cmdtable = {}
command = registrar.command(cmdtable)
@ -49,41 +50,60 @@ command = registrar.command(cmdtable)
configtable = {}
configitem = registrar.configitem(configtable)
configitem(b'robustcheckout', b'retryjittermin', default=configitems.dynamicdefault)
configitem(b'robustcheckout', b'retryjittermax', default=configitems.dynamicdefault)
configitem(b"robustcheckout", b"retryjittermin", default=configitems.dynamicdefault)
configitem(b"robustcheckout", b"retryjittermax", default=configitems.dynamicdefault)
def getsparse():
from mercurial import sparse
return sparse
def peerlookup(remote, v):
# TRACKING hg46 4.6 added commandexecutor API.
if util.safehasattr(remote, 'commandexecutor'):
with remote.commandexecutor() as e:
return e.callcommand(b'lookup', {b'key': v}).result()
else:
return remote.lookup(v)
with remote.commandexecutor() as e:
return e.callcommand(b"lookup", {b"key": v}).result()
@command(b'robustcheckout', [
(b'', b'upstream', b'', b'URL of upstream repo to clone from'),
(b'r', b'revision', b'', b'Revision to check out'),
(b'b', b'branch', b'', b'Branch to check out'),
(b'', b'purge', False, b'Whether to purge the working directory'),
(b'', b'sharebase', b'', b'Directory where shared repos should be placed'),
(b'', b'networkattempts', 3, b'Maximum number of attempts for network '
b'operations'),
(b'', b'sparseprofile', b'', b'Sparse checkout profile to use (path in repo)'),
(b'U', b'noupdate', False, b'the clone will include an empty working directory\n'
b'(only a repository)'),
@command(
b"robustcheckout",
[
(b"", b"upstream", b"", b"URL of upstream repo to clone from"),
(b"r", b"revision", b"", b"Revision to check out"),
(b"b", b"branch", b"", b"Branch to check out"),
(b"", b"purge", False, b"Whether to purge the working directory"),
(b"", b"sharebase", b"", b"Directory where shared repos should be placed"),
(
b"",
b"networkattempts",
3,
b"Maximum number of attempts for network " b"operations",
),
(b"", b"sparseprofile", b"", b"Sparse checkout profile to use (path in repo)"),
(
b"U",
b"noupdate",
False,
b"the clone will include an empty working directory\n"
b"(only a repository)",
),
],
b'[OPTION]... URL DEST',
norepo=True)
def robustcheckout(ui, url, dest, upstream=None, revision=None, branch=None,
purge=False, sharebase=None, networkattempts=None,
sparseprofile=None, noupdate=False):
b"[OPTION]... URL DEST",
norepo=True,
)
def robustcheckout(
ui,
url,
dest,
upstream=None,
revision=None,
branch=None,
purge=False,
sharebase=None,
networkattempts=None,
sparseprofile=None,
noupdate=False,
):
"""Ensure a working copy has the specified revision checked out.
Repository data is automatically pooled into the common directory
@ -115,21 +135,28 @@ def robustcheckout(ui, url, dest, upstream=None, revision=None, branch=None,
4.3 or newer and the ``sparse`` extension must be enabled.
"""
if not revision and not branch:
raise error.Abort(b'must specify one of --revision or --branch')
raise error.Abort(b"must specify one of --revision or --branch")
if revision and branch:
raise error.Abort(b'cannot specify both --revision and --branch')
raise error.Abort(b"cannot specify both --revision and --branch")
# Require revision to look like a SHA-1.
if revision:
if len(revision) < 12 or len(revision) > 40 or not re.match(b'^[a-f0-9]+$', revision):
raise error.Abort(b'--revision must be a SHA-1 fragment 12-40 '
b'characters long')
if (
len(revision) < 12
or len(revision) > 40
or not re.match(b"^[a-f0-9]+$", revision)
):
raise error.Abort(
b"--revision must be a SHA-1 fragment 12-40 " b"characters long"
)
sharebase = sharebase or ui.config(b'share', b'pool')
sharebase = sharebase or ui.config(b"share", b"pool")
if not sharebase:
raise error.Abort(b'share base directory not defined; refusing to operate',
hint=b'define share.pool config option or pass --sharebase')
raise error.Abort(
b"share base directory not defined; refusing to operate",
hint=b"define share.pool config option or pass --sharebase",
)
# Sparse profile support was added in Mercurial 4.3, where it was highly
# experimental. Because of the fragility of it, we only support sparse
@ -139,16 +166,17 @@ def robustcheckout(ui, url, dest, upstream=None, revision=None, branch=None,
# fast if we can't satisfy the desired checkout request.
if sparseprofile:
try:
extensions.find(b'sparse')
extensions.find(b"sparse")
except KeyError:
raise error.Abort(b'sparse extension must be enabled to use '
b'--sparseprofile')
raise error.Abort(
b"sparse extension must be enabled to use " b"--sparseprofile"
)
ui.warn(b'(using Mercurial %s)\n' % util.version())
ui.warn(b"(using Mercurial %s)\n" % util.version())
# worker.backgroundclose only makes things faster if running anti-virus,
# which our automation doesn't. Disable it.
ui.setconfig(b'worker', b'backgroundclose', False)
ui.setconfig(b"worker", b"backgroundclose", False)
# By default the progress bar starts after 3s and updates every 0.1s. We
# change this so it shows and updates every 1.0s.
@ -156,9 +184,9 @@ def robustcheckout(ui, url, dest, upstream=None, revision=None, branch=None,
# even if there is no known TTY.
# We make the config change here instead of in a config file because
# otherwise we're at the whim of whatever configs are used in automation.
ui.setconfig(b'progress', b'delay', 1.0)
ui.setconfig(b'progress', b'refresh', 1.0)
ui.setconfig(b'progress', b'assume-tty', True)
ui.setconfig(b"progress", b"delay", 1.0)
ui.setconfig(b"progress", b"refresh", 1.0)
ui.setconfig(b"progress", b"assume-tty", True)
sharebase = os.path.realpath(sharebase)
@ -167,9 +195,21 @@ def robustcheckout(ui, url, dest, upstream=None, revision=None, branch=None,
start = time.time()
try:
return _docheckout(ui, url, dest, upstream, revision, branch, purge,
sharebase, optimes, behaviors, networkattempts,
sparse_profile=sparseprofile, noupdate=noupdate)
return _docheckout(
ui,
url,
dest,
upstream,
revision,
branch,
purge,
sharebase,
optimes,
behaviors,
networkattempts,
sparse_profile=sparseprofile,
noupdate=noupdate,
)
finally:
overall = time.time() - start
@ -177,89 +217,118 @@ def robustcheckout(ui, url, dest, upstream=None, revision=None, branch=None,
# the various "flavors" of operations.
# ``overall`` is always the total operation time.
optimes.append(('overall', overall))
optimes.append(("overall", overall))
def record_op(name):
# If special behaviors due to "corrupt" storage occur, we vary the
# name to convey that.
if 'remove-store' in behaviors:
name += '_rmstore'
if 'remove-wdir' in behaviors:
name += '_rmwdir'
if "remove-store" in behaviors:
name += "_rmstore"
if "remove-wdir" in behaviors:
name += "_rmwdir"
optimes.append((name, overall))
# We break out overall operations primarily by their network interaction
# We have variants within for working directory operations.
if 'clone' in behaviors and 'create-store' in behaviors:
record_op('overall_clone')
if "clone" in behaviors and "create-store" in behaviors:
record_op("overall_clone")
if 'sparse-update' in behaviors:
record_op('overall_clone_sparsecheckout')
if "sparse-update" in behaviors:
record_op("overall_clone_sparsecheckout")
else:
record_op('overall_clone_fullcheckout')
record_op("overall_clone_fullcheckout")
elif 'pull' in behaviors or 'clone' in behaviors:
record_op('overall_pull')
elif "pull" in behaviors or "clone" in behaviors:
record_op("overall_pull")
if 'sparse-update' in behaviors:
record_op('overall_pull_sparsecheckout')
if "sparse-update" in behaviors:
record_op("overall_pull_sparsecheckout")
else:
record_op('overall_pull_fullcheckout')
record_op("overall_pull_fullcheckout")
if 'empty-wdir' in behaviors:
record_op('overall_pull_emptywdir')
if "empty-wdir" in behaviors:
record_op("overall_pull_emptywdir")
else:
record_op('overall_pull_populatedwdir')
record_op("overall_pull_populatedwdir")
else:
record_op('overall_nopull')
record_op("overall_nopull")
if 'sparse-update' in behaviors:
record_op('overall_nopull_sparsecheckout')
if "sparse-update" in behaviors:
record_op("overall_nopull_sparsecheckout")
else:
record_op('overall_nopull_fullcheckout')
record_op("overall_nopull_fullcheckout")
if 'empty-wdir' in behaviors:
record_op('overall_nopull_emptywdir')
if "empty-wdir" in behaviors:
record_op("overall_nopull_emptywdir")
else:
record_op('overall_nopull_populatedwdir')
record_op("overall_nopull_populatedwdir")
server_url = urllibcompat.urlreq.urlparse(url).netloc
if 'TASKCLUSTER_INSTANCE_TYPE' in os.environ:
if "TASKCLUSTER_INSTANCE_TYPE" in os.environ:
perfherder = {
'framework': {
'name': 'vcs',
"framework": {
"name": "vcs",
},
'suites': [],
"suites": [],
}
for op, duration in optimes:
perfherder['suites'].append({
'name': op,
'value': duration,
'lowerIsBetter': True,
'shouldAlert': False,
'serverUrl': server_url.decode('utf-8'),
'hgVersion': util.version().decode('utf-8'),
'extraOptions': [os.environ['TASKCLUSTER_INSTANCE_TYPE']],
'subtests': [],
})
ui.write(b'PERFHERDER_DATA: %s\n' %
pycompat.bytestr(json.dumps(perfherder, sort_keys=True)))
perfherder["suites"].append(
{
"name": op,
"value": duration,
"lowerIsBetter": True,
"shouldAlert": False,
"serverUrl": server_url.decode("utf-8"),
"hgVersion": util.version().decode("utf-8"),
"extraOptions": [os.environ["TASKCLUSTER_INSTANCE_TYPE"]],
"subtests": [],
}
)
ui.write(
b"PERFHERDER_DATA: %s\n"
% pycompat.bytestr(json.dumps(perfherder, sort_keys=True))
)
def _docheckout(ui, url, dest, upstream, revision, branch, purge, sharebase,
optimes, behaviors, networkattemptlimit, networkattempts=None,
sparse_profile=None, noupdate=False):
def _docheckout(
ui,
url,
dest,
upstream,
revision,
branch,
purge,
sharebase,
optimes,
behaviors,
networkattemptlimit,
networkattempts=None,
sparse_profile=None,
noupdate=False,
):
if not networkattempts:
networkattempts = [1]
def callself():
return _docheckout(ui, url, dest, upstream, revision, branch, purge,
sharebase, optimes, behaviors, networkattemptlimit,
networkattempts=networkattempts,
sparse_profile=sparse_profile,
noupdate=noupdate)
return _docheckout(
ui,
url,
dest,
upstream,
revision,
branch,
purge,
sharebase,
optimes,
behaviors,
networkattemptlimit,
networkattempts=networkattempts,
sparse_profile=sparse_profile,
noupdate=noupdate,
)
@contextlib.contextmanager
def timeit(op, behavior):
@ -275,12 +344,11 @@ def _docheckout(ui, url, dest, upstream, revision, branch, purge, sharebase,
elapsed = time.time() - start
if errored:
op += '_errored'
op += "_errored"
optimes.append((op, elapsed))
ui.write(b'ensuring %s@%s is available at %s\n' % (url, revision or branch,
dest))
ui.write(b"ensuring %s@%s is available at %s\n" % (url, revision or branch, dest))
# We assume that we're the only process on the machine touching the
# repository paths that we were told to use. This means our recovery
@ -293,70 +361,75 @@ def _docheckout(ui, url, dest, upstream, revision, branch, purge, sharebase,
destvfs = vfs.vfs(dest, audit=False, realpath=True)
def deletesharedstore(path=None):
storepath = path or destvfs.read(b'.hg/sharedpath').strip()
if storepath.endswith(b'.hg'):
storepath = path or destvfs.read(b".hg/sharedpath").strip()
if storepath.endswith(b".hg"):
storepath = os.path.dirname(storepath)
storevfs = vfs.vfs(storepath, audit=False)
storevfs.rmtree(forcibly=True)
if destvfs.exists() and not destvfs.exists(b'.hg'):
raise error.Abort(b'destination exists but no .hg directory')
if destvfs.exists() and not destvfs.exists(b".hg"):
raise error.Abort(b"destination exists but no .hg directory")
# Refuse to enable sparse checkouts on existing checkouts. The reasoning
# here is that another consumer of this repo may not be sparse aware. If we
# enabled sparse, we would lock them out.
if destvfs.exists() and sparse_profile and not destvfs.exists(b'.hg/sparse'):
raise error.Abort(b'cannot enable sparse profile on existing '
b'non-sparse checkout',
hint=b'use a separate working directory to use sparse')
if destvfs.exists() and sparse_profile and not destvfs.exists(b".hg/sparse"):
raise error.Abort(
b"cannot enable sparse profile on existing " b"non-sparse checkout",
hint=b"use a separate working directory to use sparse",
)
# And the other direction for symmetry.
if not sparse_profile and destvfs.exists(b'.hg/sparse'):
raise error.Abort(b'cannot use non-sparse checkout on existing sparse '
b'checkout',
hint=b'use a separate working directory to use sparse')
if not sparse_profile and destvfs.exists(b".hg/sparse"):
raise error.Abort(
b"cannot use non-sparse checkout on existing sparse " b"checkout",
hint=b"use a separate working directory to use sparse",
)
# Require checkouts to be tied to shared storage because efficiency.
if destvfs.exists(b'.hg') and not destvfs.exists(b'.hg/sharedpath'):
ui.warn(b'(destination is not shared; deleting)\n')
with timeit('remove_unshared_dest', 'remove-wdir'):
if destvfs.exists(b".hg") and not destvfs.exists(b".hg/sharedpath"):
ui.warn(b"(destination is not shared; deleting)\n")
with timeit("remove_unshared_dest", "remove-wdir"):
destvfs.rmtree(forcibly=True)
# Verify the shared path exists and is using modern pooled storage.
if destvfs.exists(b'.hg/sharedpath'):
storepath = destvfs.read(b'.hg/sharedpath').strip()
if destvfs.exists(b".hg/sharedpath"):
storepath = destvfs.read(b".hg/sharedpath").strip()
ui.write(b'(existing repository shared store: %s)\n' % storepath)
ui.write(b"(existing repository shared store: %s)\n" % storepath)
if not os.path.exists(storepath):
ui.warn(b'(shared store does not exist; deleting destination)\n')
with timeit('removed_missing_shared_store', 'remove-wdir'):
ui.warn(b"(shared store does not exist; deleting destination)\n")
with timeit("removed_missing_shared_store", "remove-wdir"):
destvfs.rmtree(forcibly=True)
elif not re.search(br'[a-f0-9]{40}/\.hg$', storepath.replace(b'\\', b'/')):
ui.warn(b'(shared store does not belong to pooled storage; '
b'deleting destination to improve efficiency)\n')
with timeit('remove_unpooled_store', 'remove-wdir'):
elif not re.search(b"[a-f0-9]{40}/\.hg$", storepath.replace(b"\\", b"/")):
ui.warn(
b"(shared store does not belong to pooled storage; "
b"deleting destination to improve efficiency)\n"
)
with timeit("remove_unpooled_store", "remove-wdir"):
destvfs.rmtree(forcibly=True)
if destvfs.isfileorlink(b'.hg/wlock'):
ui.warn(b'(dest has an active working directory lock; assuming it is '
b'left over from a previous process and that the destination '
b'is corrupt; deleting it just to be sure)\n')
with timeit('remove_locked_wdir', 'remove-wdir'):
if destvfs.isfileorlink(b".hg/wlock"):
ui.warn(
b"(dest has an active working directory lock; assuming it is "
b"left over from a previous process and that the destination "
b"is corrupt; deleting it just to be sure)\n"
)
with timeit("remove_locked_wdir", "remove-wdir"):
destvfs.rmtree(forcibly=True)
def handlerepoerror(e):
if pycompat.bytestr(e) == _(b'abandoned transaction found'):
ui.warn(b'(abandoned transaction found; trying to recover)\n')
if pycompat.bytestr(e) == _(b"abandoned transaction found"):
ui.warn(b"(abandoned transaction found; trying to recover)\n")
repo = hg.repository(ui, dest)
if not repo.recover():
ui.warn(b'(could not recover repo state; '
b'deleting shared store)\n')
with timeit('remove_unrecovered_shared_store', 'remove-store'):
ui.warn(b"(could not recover repo state; " b"deleting shared store)\n")
with timeit("remove_unrecovered_shared_store", "remove-store"):
deletesharedstore()
ui.warn(b'(attempting checkout from beginning)\n')
ui.warn(b"(attempting checkout from beginning)\n")
return callself()
raise
@ -366,11 +439,14 @@ def _docheckout(ui, url, dest, upstream, revision, branch, purge, sharebase,
def handlenetworkfailure():
if networkattempts[0] >= networkattemptlimit:
raise error.Abort(b'reached maximum number of network attempts; '
b'giving up\n')
raise error.Abort(
b"reached maximum number of network attempts; " b"giving up\n"
)
ui.warn(b'(retrying after network failure on attempt %d of %d)\n' %
(networkattempts[0], networkattemptlimit))
ui.warn(
b"(retrying after network failure on attempt %d of %d)\n"
% (networkattempts[0], networkattemptlimit)
)
# Do a backoff on retries to mitigate the thundering herd
# problem. This is an exponential backoff with a multipler
@ -380,10 +456,10 @@ def _docheckout(ui, url, dest, upstream, revision, branch, purge, sharebase,
# 2) 5.5 - 9.5
# 3) 11.5 - 15.5
backoff = (2 ** networkattempts[0] - 1) * 1.5
jittermin = ui.configint(b'robustcheckout', b'retryjittermin', 1000)
jittermax = ui.configint(b'robustcheckout', b'retryjittermax', 5000)
jittermin = ui.configint(b"robustcheckout", b"retryjittermin", 1000)
jittermax = ui.configint(b"robustcheckout", b"retryjittermax", 5000)
backoff += float(random.randint(jittermin, jittermax)) / 1000.0
ui.warn(b'(waiting %.2fs before retry)\n' % backoff)
ui.warn(b"(waiting %.2fs before retry)\n" % backoff)
time.sleep(backoff)
networkattempts[0] += 1
@ -394,19 +470,19 @@ def _docheckout(ui, url, dest, upstream, revision, branch, purge, sharebase,
Returns True if caller should call ``callself()`` to retry.
"""
if isinstance(e, error.Abort):
if e.args[0] == _(b'repository is unrelated'):
ui.warn(b'(repository is unrelated; deleting)\n')
if e.args[0] == _(b"repository is unrelated"):
ui.warn(b"(repository is unrelated; deleting)\n")
destvfs.rmtree(forcibly=True)
return True
elif e.args[0].startswith(_(b'stream ended unexpectedly')):
ui.warn(b'%s\n' % e.args[0])
elif e.args[0].startswith(_(b"stream ended unexpectedly")):
ui.warn(b"%s\n" % e.args[0])
# Will raise if failure limit reached.
handlenetworkfailure()
return True
# TODO test this branch
elif isinstance(e, error.ResponseError):
if e.args[0].startswith(_(b'unexpected response from remote server:')):
ui.warn(b'(unexpected response from remote server; retrying)\n')
if e.args[0].startswith(_(b"unexpected response from remote server:")):
ui.warn(b"(unexpected response from remote server; retrying)\n")
destvfs.rmtree(forcibly=True)
# Will raise if failure limit reached.
handlenetworkfailure()
@ -415,20 +491,28 @@ def _docheckout(ui, url, dest, upstream, revision, branch, purge, sharebase,
# Assume all SSL errors are due to the network, as Mercurial
# should convert non-transport errors like cert validation failures
# to error.Abort.
ui.warn(b'ssl error: %s\n' % e)
ui.warn(b"ssl error: %s\n" % pycompat.bytestr(str(e)))
handlenetworkfailure()
return True
elif isinstance(e, urllibcompat.urlerr.urlerror):
if isinstance(e.reason, socket.error):
ui.warn(b'socket error: %s\n' % pycompat.bytestr(e.reason))
ui.warn(b"socket error: %s\n" % pycompat.bytestr(str(e.reason)))
handlenetworkfailure()
return True
else:
ui.warn(b'unhandled URLError; reason type: %s; value: %s\n' % (
e.reason.__class__.__name__, e.reason))
ui.warn(
b"unhandled URLError; reason type: %s; value: %s\n"
% (
pycompat.bytestr(e.reason.__class__.__name__),
pycompat.bytestr(str(e.reason)),
)
)
else:
ui.warn(b'unhandled exception during network operation; type: %s; '
b'value: %s\n' % (e.__class__.__name__, e))
ui.warn(
b"unhandled exception during network operation; type: %s; "
b"value: %s\n"
% (pycompat.bytestr(e.__class__.__name__), pycompat.bytestr(str(e)))
)
return False
@ -440,59 +524,69 @@ def _docheckout(ui, url, dest, upstream, revision, branch, purge, sharebase,
try:
clonepeer = hg.peer(ui, {}, cloneurl)
rootnode = peerlookup(clonepeer, b'0')
rootnode = peerlookup(clonepeer, b"0")
except error.RepoLookupError:
raise error.Abort(b'unable to resolve root revision from clone '
b'source')
raise error.Abort(b"unable to resolve root revision from clone " b"source")
except (error.Abort, ssl.SSLError, urllibcompat.urlerr.urlerror) as e:
if handlepullerror(e):
return callself()
raise
if rootnode == nullid:
raise error.Abort(b'source repo appears to be empty')
raise error.Abort(b"source repo appears to be empty")
storepath = os.path.join(sharebase, hex(rootnode))
storevfs = vfs.vfs(storepath, audit=False)
if storevfs.isfileorlink(b'.hg/store/lock'):
ui.warn(b'(shared store has an active lock; assuming it is left '
b'over from a previous process and that the store is '
b'corrupt; deleting store and destination just to be '
b'sure)\n')
if storevfs.isfileorlink(b".hg/store/lock"):
ui.warn(
b"(shared store has an active lock; assuming it is left "
b"over from a previous process and that the store is "
b"corrupt; deleting store and destination just to be "
b"sure)\n"
)
if destvfs.exists():
with timeit('remove_dest_active_lock', 'remove-wdir'):
with timeit("remove_dest_active_lock", "remove-wdir"):
destvfs.rmtree(forcibly=True)
with timeit('remove_shared_store_active_lock', 'remove-store'):
with timeit("remove_shared_store_active_lock", "remove-store"):
storevfs.rmtree(forcibly=True)
if storevfs.exists() and not storevfs.exists(b'.hg/requires'):
ui.warn(b'(shared store missing requires file; this is a really '
b'odd failure; deleting store and destination)\n')
if storevfs.exists() and not storevfs.exists(b".hg/requires"):
ui.warn(
b"(shared store missing requires file; this is a really "
b"odd failure; deleting store and destination)\n"
)
if destvfs.exists():
with timeit('remove_dest_no_requires', 'remove-wdir'):
with timeit("remove_dest_no_requires", "remove-wdir"):
destvfs.rmtree(forcibly=True)
with timeit('remove_shared_store_no_requires', 'remove-store'):
with timeit("remove_shared_store_no_requires", "remove-store"):
storevfs.rmtree(forcibly=True)
if storevfs.exists(b'.hg/requires'):
requires = set(storevfs.read(b'.hg/requires').splitlines())
if storevfs.exists(b".hg/requires"):
requires = set(storevfs.read(b".hg/requires").splitlines())
# "share-safe" (enabled by default as of hg 6.1) moved most
# requirements to a new file, so we need to look there as well to avoid
# deleting and re-cloning each time
if b"share-safe" in requires:
requires |= set(storevfs.read(b".hg/store/requires").splitlines())
# FUTURE when we require generaldelta, this is where we can check
# for that.
required = {b'dotencode', b'fncache'}
required = {b"dotencode", b"fncache"}
missing = required - requires
if missing:
ui.warn(b'(shared store missing requirements: %s; deleting '
b'store and destination to ensure optimal behavior)\n' %
b', '.join(sorted(missing)))
ui.warn(
b"(shared store missing requirements: %s; deleting "
b"store and destination to ensure optimal behavior)\n"
% b", ".join(sorted(missing))
)
if destvfs.exists():
with timeit('remove_dest_missing_requires', 'remove-wdir'):
with timeit("remove_dest_missing_requires", "remove-wdir"):
destvfs.rmtree(forcibly=True)
with timeit('remove_shared_store_missing_requires', 'remove-store'):
with timeit("remove_shared_store_missing_requires", "remove-store"):
storevfs.rmtree(forcibly=True)
created = False
@ -500,7 +594,7 @@ def _docheckout(ui, url, dest, upstream, revision, branch, purge, sharebase,
if not destvfs.exists():
# Ensure parent directories of destination exist.
# Mercurial 3.8 removed ensuredirs and made makedirs race safe.
if util.safehasattr(util, 'ensuredirs'):
if util.safehasattr(util, "ensuredirs"):
makedirs = util.ensuredirs
else:
makedirs = util.makedirs
@ -509,17 +603,23 @@ def _docheckout(ui, url, dest, upstream, revision, branch, purge, sharebase,
makedirs(sharebase, notindexed=True)
if upstream:
ui.write(b'(cloning from upstream repo %s)\n' % upstream)
ui.write(b"(cloning from upstream repo %s)\n" % upstream)
if not storevfs.exists():
behaviors.add(b'create-store')
behaviors.add(b"create-store")
try:
with timeit('clone', 'clone'):
shareopts = {b'pool': sharebase, b'mode': b'identity'}
res = hg.clone(ui, {}, clonepeer, dest=dest, update=False,
shareopts=shareopts,
stream=True)
with timeit("clone", "clone"):
shareopts = {b"pool": sharebase, b"mode": b"identity"}
res = hg.clone(
ui,
{},
clonepeer,
dest=dest,
update=False,
shareopts=shareopts,
stream=True,
)
except (error.Abort, ssl.SSLError, urllibcompat.urlerr.urlerror) as e:
if handlepullerror(e):
return callself()
@ -527,18 +627,18 @@ def _docheckout(ui, url, dest, upstream, revision, branch, purge, sharebase,
except error.RepoError as e:
return handlerepoerror(e)
except error.RevlogError as e:
ui.warn(b'(repo corruption: %s; deleting shared store)\n' % e)
with timeit('remove_shared_store_revlogerror', 'remote-store'):
ui.warn(b"(repo corruption: %s; deleting shared store)\n" % e)
with timeit("remove_shared_store_revlogerror", "remote-store"):
deletesharedstore()
return callself()
# TODO retry here.
if res is None:
raise error.Abort(b'clone failed')
raise error.Abort(b"clone failed")
# Verify it is using shared pool storage.
if not destvfs.exists(b'.hg/sharedpath'):
raise error.Abort(b'clone did not create a shared repo')
if not destvfs.exists(b".hg/sharedpath"):
raise error.Abort(b"clone did not create a shared repo")
created = True
@ -559,15 +659,16 @@ def _docheckout(ui, url, dest, upstream, revision, branch, purge, sharebase,
if ctx:
if not ctx.hex().startswith(revision):
raise error.Abort(b'--revision argument is ambiguous',
hint=b'must be the first 12+ characters of a '
b'SHA-1 fragment')
raise error.Abort(
b"--revision argument is ambiguous",
hint=b"must be the first 12+ characters of a " b"SHA-1 fragment",
)
checkoutrevision = ctx.hex()
havewantedrev = True
if not havewantedrev:
ui.write(b'(pulling to obtain %s)\n' % (revision or branch,))
ui.write(b"(pulling to obtain %s)\n" % (revision or branch,))
remote = None
try:
@ -575,17 +676,18 @@ def _docheckout(ui, url, dest, upstream, revision, branch, purge, sharebase,
pullrevs = [peerlookup(remote, revision or branch)]
checkoutrevision = hex(pullrevs[0])
if branch:
ui.warn(b'(remote resolved %s to %s; '
b'result is not deterministic)\n' %
(branch, checkoutrevision))
ui.warn(
b"(remote resolved %s to %s; "
b"result is not deterministic)\n" % (branch, checkoutrevision)
)
if checkoutrevision in repo:
ui.warn(b'(revision already present locally; not pulling)\n')
ui.warn(b"(revision already present locally; not pulling)\n")
else:
with timeit('pull', 'pull'):
with timeit("pull", "pull"):
pullop = exchange.pull(repo, remote, heads=pullrevs)
if not pullop.rheads:
raise error.Abort(b'unable to pull requested revision')
raise error.Abort(b"unable to pull requested revision")
except (error.Abort, ssl.SSLError, urllibcompat.urlerr.urlerror) as e:
if handlepullerror(e):
return callself()
@ -593,7 +695,7 @@ def _docheckout(ui, url, dest, upstream, revision, branch, purge, sharebase,
except error.RepoError as e:
return handlerepoerror(e)
except error.RevlogError as e:
ui.warn(b'(repo corruption: %s; deleting shared store)\n' % e)
ui.warn(b"(repo corruption: %s; deleting shared store)\n" % e)
deletesharedstore()
return callself()
finally:
@ -605,47 +707,46 @@ def _docheckout(ui, url, dest, upstream, revision, branch, purge, sharebase,
# Avoid any working directory manipulations if `-U`/`--noupdate` was passed
if noupdate:
ui.write(b'(skipping update since `-U` was passed)\n')
ui.write(b"(skipping update since `-U` was passed)\n")
return None
# Purge if requested. We purge before update because this way we're
# guaranteed to not have conflicts on `hg update`.
if purge and not created:
ui.write(b'(purging working directory)\n')
purgeext = extensions.find(b'purge')
ui.write(b"(purging working directory)\n")
purge = getattr(commands, "purge", None)
if not purge:
purge = extensions.find(b"purge").purge
# Mercurial 4.3 doesn't purge files outside the sparse checkout.
# See https://bz.mercurial-scm.org/show_bug.cgi?id=5626. Force
# purging by monkeypatching the sparse matcher.
try:
old_sparse_fn = getattr(repo.dirstate, '_sparsematchfn', None)
old_sparse_fn = getattr(repo.dirstate, "_sparsematchfn", None)
if old_sparse_fn is not None:
# TRACKING hg50
# Arguments passed to `matchmod.always` were unused and have been removed
if util.versiontuple(n=2) >= (5, 0):
repo.dirstate._sparsematchfn = lambda: matchmod.always()
else:
repo.dirstate._sparsematchfn = lambda: matchmod.always(repo.root, '')
repo.dirstate._sparsematchfn = lambda: matchmod.always()
with timeit('purge', 'purge'):
if purgeext.purge(ui, repo, all=True, abort_on_err=True,
# The function expects all arguments to be
# defined.
**{'print': None,
'print0': None,
'dirs': None,
'files': None}):
raise error.Abort(b'error purging')
with timeit("purge", "purge"):
if purge(
ui,
repo,
all=True,
abort_on_err=True,
# The function expects all arguments to be
# defined.
**{"print": None, "print0": None, "dirs": None, "files": None}
):
raise error.Abort(b"error purging")
finally:
if old_sparse_fn is not None:
repo.dirstate._sparsematchfn = old_sparse_fn
# Update the working directory.
if repo[b'.'].node() == nullid:
behaviors.add('empty-wdir')
if repo[b"."].node() == nullid:
behaviors.add("empty-wdir")
else:
behaviors.add('populated-wdir')
behaviors.add("populated-wdir")
if sparse_profile:
sparsemod = getsparse()
@ -655,58 +756,70 @@ def _docheckout(ui, url, dest, upstream, revision, branch, purge, sharebase,
try:
repo.filectx(sparse_profile, changeid=checkoutrevision).data()
except error.ManifestLookupError:
raise error.Abort(b'sparse profile %s does not exist at revision '
b'%s' % (sparse_profile, checkoutrevision))
raise error.Abort(
b"sparse profile %s does not exist at revision "
b"%s" % (sparse_profile, checkoutrevision)
)
# TRACKING hg48 - parseconfig takes `action` param
if util.versiontuple(n=2) >= (4, 8):
old_config = sparsemod.parseconfig(repo.ui, repo.vfs.tryread(b'sparse'), b'sparse')
else:
old_config = sparsemod.parseconfig(repo.ui, repo.vfs.tryread(b'sparse'))
old_config = sparsemod.parseconfig(
repo.ui, repo.vfs.tryread(b"sparse"), b"sparse"
)
old_includes, old_excludes, old_profiles = old_config
if old_profiles == {sparse_profile} and not old_includes and not \
old_excludes:
ui.write(b'(sparse profile %s already set; no need to update '
b'sparse config)\n' % sparse_profile)
if old_profiles == {sparse_profile} and not old_includes and not old_excludes:
ui.write(
b"(sparse profile %s already set; no need to update "
b"sparse config)\n" % sparse_profile
)
else:
if old_includes or old_excludes or old_profiles:
ui.write(b'(replacing existing sparse config with profile '
b'%s)\n' % sparse_profile)
ui.write(
b"(replacing existing sparse config with profile "
b"%s)\n" % sparse_profile
)
else:
ui.write(b'(setting sparse config to profile %s)\n' %
sparse_profile)
ui.write(b"(setting sparse config to profile %s)\n" % sparse_profile)
# If doing an incremental update, this will perform two updates:
# one to change the sparse profile and another to update to the new
# revision. This is not desired. But there's not a good API in
# Mercurial to do this as one operation.
with repo.wlock(), timeit('sparse_update_config',
'sparse-update-config'):
fcounts = map(len, sparsemod._updateconfigandrefreshwdir(
repo, [], [], [sparse_profile], force=True))
with repo.wlock(), repo.dirstate.parentchange(), timeit(
"sparse_update_config", "sparse-update-config"
):
# pylint --py3k: W1636
fcounts = list(
map(
len,
sparsemod._updateconfigandrefreshwdir(
repo, [], [], [sparse_profile], force=True
),
)
)
repo.ui.status(b'%d files added, %d files dropped, '
b'%d files conflicting\n' % tuple(fcounts))
repo.ui.status(
b"%d files added, %d files dropped, "
b"%d files conflicting\n" % tuple(fcounts)
)
ui.write(b'(sparse refresh complete)\n')
ui.write(b"(sparse refresh complete)\n")
op = 'update_sparse' if sparse_profile else 'update'
behavior = 'update-sparse' if sparse_profile else 'update'
op = "update_sparse" if sparse_profile else "update"
behavior = "update-sparse" if sparse_profile else "update"
with timeit(op, behavior):
if commands.update(ui, repo, rev=checkoutrevision, clean=True):
raise error.Abort(b'error updating')
raise error.Abort(b"error updating")
ui.write(b'updated to %s\n' % checkoutrevision)
ui.write(b"updated to %s\n" % checkoutrevision)
return None
def extsetup(ui):
# Ensure required extensions are loaded.
for ext in (b'purge', b'share'):
for ext in (b"purge", b"share"):
try:
extensions.find(ext)
except KeyError:

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -157,6 +157,10 @@ def make_task(config, jobs):
"tier": 1,
}
if job.get("secret", None):
task["scopes"] = ["secrets:get:" + job.get("secret")]
task["worker"]["taskcluster-proxy"] = True
if not taskgraph.fast:
cache_name = task["label"].replace(f"{config.kind}-", "", 1)
@ -282,8 +286,14 @@ def create_fetch_url_task(config, name, fetch):
schema={
Required("repo"): str,
Required("revision"): str,
Optional("include-dot-git"): bool,
Optional("artifact-name"): str,
Optional("path-prefix"): str,
# ssh-key is a taskcluster secret path (e.g. project/civet/github-deploy-key)
# In the secret dictionary, the key should be specified as
# "ssh_privkey": "-----BEGIN OPENSSH PRIVATE KEY-----\nkfksnb3jc..."
# n.b. The OpenSSH private key file format requires a newline at the end of the file.
Optional("ssh-key"): str,
},
)
def create_git_fetch_task(config, name, fetch):
@ -307,8 +317,19 @@ def create_git_fetch_task(config, name, fetch):
"/builds/worker/artifacts/%s" % artifact_name,
]
ssh_key = fetch.get("ssh-key")
if ssh_key:
args.append("--ssh-key-secret")
args.append(ssh_key)
digest_data = [fetch["revision"], path_prefix, artifact_name]
if fetch.get("include-dot-git", False):
args.append("--include-dot-git")
digest_data.append(".git")
return {
"command": args,
"artifact_name": artifact_name,
"digest_data": [fetch["revision"], path_prefix, artifact_name],
"digest_data": digest_data,
"secret": ssh_key,
}

Просмотреть файл

@ -79,6 +79,7 @@ job_description_schema = Schema(
Required("artifact"): str,
Optional("dest"): str,
Optional("extract"): bool,
Optional("verify-hash"): bool,
},
],
},
@ -298,10 +299,12 @@ def use_fetches(config, jobs):
path = artifact
dest = None
extract = True
verify_hash = False
else:
path = artifact["artifact"]
dest = artifact.get("dest")
extract = artifact.get("extract", True)
verify_hash = artifact.get("verify-hash", False)
fetch = {
"artifact": f"{prefix}/{path}",
@ -310,6 +313,8 @@ def use_fetches(config, jobs):
}
if dest is not None:
fetch["dest"] = dest
if verify_hash:
fetch["verify-hash"] = verify_hash
job_fetches.append(fetch)
job_artifact_prefixes = {

Просмотреть файл

@ -191,11 +191,6 @@ def support_vcs_checkout(config, job, taskdesc, repo_configs, sparse=False):
if repo_config.ssh_secret_name:
taskdesc["scopes"].append(f"secrets:get:{repo_config.ssh_secret_name}")
if any(repo_config.type == "hg" for repo_config in repo_configs.values()):
# Give task access to hgfingerprint secret so it can pin the certificate
# for hg.mozilla.org.
taskdesc["scopes"].append("secrets:get:project/taskcluster/gecko/hgfingerprint")
# only some worker platforms have taskcluster-proxy enabled
if job["worker"]["implementation"] in ("docker-worker",):
taskdesc["worker"]["taskcluster-proxy"] = True

Просмотреть файл

@ -157,7 +157,6 @@ def docker_worker_run_task(config, job, taskdesc):
if isinstance(run_command, str) or isinstance(run_command, dict):
exec_cmd = EXEC_COMMANDS[run.pop("exec-with", "bash")]
run_command = exec_cmd + [run_command]
command.append("--fetch-hgfingerprint")
if run["run-as-root"]:
command.extend(("--user", "root", "--group", "root"))
command.append("--")

Просмотреть файл

@ -268,6 +268,7 @@ def verify_index(config, index):
Required("loopback-audio"): bool,
Required("docker-in-docker"): bool, # (aka 'dind')
Required("privileged"): bool,
Required("disable-seccomp"): bool,
# Paths to Docker volumes.
#
# For in-tree Docker images, volumes can be parsed from Dockerfile.
@ -406,6 +407,10 @@ def build_docker_worker_payload(config, task, task_def):
capabilities["privileged"] = True
task_def["scopes"].append("docker-worker:capability:privileged")
if worker.get("disable-seccomp"):
capabilities["disableSeccomp"] = True
task_def["scopes"].append("docker-worker:capability:disableSeccomp")
task_def["payload"] = payload = {
"image": image,
"env": worker["env"],
@ -831,6 +836,7 @@ def set_defaults(config, tasks):
worker.setdefault("loopback-audio", False)
worker.setdefault("docker-in-docker", False)
worker.setdefault("privileged", False)
worker.setdefault("disable-seccomp", False)
worker.setdefault("volumes", [])
worker.setdefault("env", {})
if "caches" in worker:
@ -992,7 +998,7 @@ def build_task(config, tasks):
branch_rev = get_branch_rev(config)
if config.params["tasks_for"] == "github-pull-request":
if config.params["tasks_for"].startswith("github-pull-request"):
# In the past we used `project` for this, but that ends up being
# set to the repository name of the _head_ repo, which is not correct
# (and causes scope issues) if it doesn't match the name of the