зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1266624: Apply exponential backoff for mozharness download. r=dustin
MozReview-Commit-ID: Ip4MjVJFwT6 --HG-- extra : rebase_source : 62a0c16da8804df0dfcb8d194b6bbc0f1d74b8f8
This commit is contained in:
Родитель
0d8088d742
Коммит
5d5f326c05
|
@ -25,12 +25,38 @@ if [[ -z ${MOZHARNESS_URL} ]]; then fail "MOZHARNESS_URL is not set"; fi
|
|||
if [[ -z ${MOZHARNESS_SCRIPT} ]]; then fail "MOZHARNESS_SCRIPT is not set"; fi
|
||||
if [[ -z ${MOZHARNESS_CONFIG} ]]; then fail "MOZHARNESS_CONFIG is not set"; fi
|
||||
|
||||
# Unzip the mozharness ZIP file created by the build task
|
||||
if ! curl --fail -o mozharness.zip --retry 10 -L $MOZHARNESS_URL; then
|
||||
fail "failed to download mozharness zip"
|
||||
fi
|
||||
# Download mozharness with exponential backoff
|
||||
# curl already applies exponential backoff, but not for all
|
||||
# failed cases, apparently, as we keep getting failed downloads
|
||||
# with 404 code.
|
||||
download_mozharness() {
|
||||
local max_attempts=10
|
||||
local timeout=1
|
||||
local attempt=0
|
||||
|
||||
echo "Downloading mozharness"
|
||||
|
||||
while [[ $attempt < $max_attempts ]]; do
|
||||
if curl --fail -o mozharness.zip --retry 10 -L $MOZHARNESS_URL; then
|
||||
rm -rf mozharness
|
||||
unzip -q mozharness.zip
|
||||
if unzip -q mozharness.zip; then
|
||||
break
|
||||
else
|
||||
echo "error unzipping mozharness.zip" >&2
|
||||
fi
|
||||
else
|
||||
echo "failed to download mozharness zip" >&2
|
||||
fi
|
||||
echo "Download failed, retrying in $timeout seconds..." >&2
|
||||
sleep $timeout
|
||||
timeout=$((timeout*2))
|
||||
attempt=$((attempt+1))
|
||||
done
|
||||
|
||||
fail "Failed to download and unzip mozharness"
|
||||
}
|
||||
|
||||
download_mozharness
|
||||
rm mozharness.zip
|
||||
|
||||
# For telemetry purposes, the build process wants information about the
|
||||
|
|
|
@ -69,15 +69,39 @@ cleanup() {
|
|||
}
|
||||
trap cleanup EXIT INT
|
||||
|
||||
# Download mozharness with exponential backoff
|
||||
# curl already applies exponential backoff, but not for all
|
||||
# failed cases, apparently, as we keep getting failed downloads
|
||||
# with 404 code.
|
||||
download_mozharness() {
|
||||
local max_attempts=10
|
||||
local timeout=1
|
||||
local attempt=0
|
||||
|
||||
echo "Downloading mozharness"
|
||||
|
||||
while [[ $attempt < $max_attempts ]]; do
|
||||
if curl --fail -o mozharness.zip --retry 10 -L $MOZHARNESS_URL; then
|
||||
rm -rf mozharness
|
||||
if unzip -q mozharness.zip; then
|
||||
return 0
|
||||
fi
|
||||
echo "error unzipping mozharness.zip" >&2
|
||||
else
|
||||
echo "failed to download mozharness zip" >&2
|
||||
fi
|
||||
echo "Download failed, retrying in $timeout seconds..." >&2
|
||||
sleep $timeout
|
||||
timeout=$((timeout*2))
|
||||
attempt=$((attempt+1))
|
||||
done
|
||||
|
||||
fail "Failed to download and unzip mozharness"
|
||||
}
|
||||
|
||||
# Download mozharness if we're told to.
|
||||
if [ ${MOZHARNESS_URL} ]; then
|
||||
if ! curl --fail -o mozharness.zip --retry 10 -L $MOZHARNESS_URL; then
|
||||
fail "failed to download mozharness zip"
|
||||
fi
|
||||
rm -rf mozharness
|
||||
if ! unzip -q mozharness.zip; then
|
||||
fail "error unzipping mozharness.zip"
|
||||
fi
|
||||
download_mozharness
|
||||
rm mozharness.zip
|
||||
|
||||
if ! [ -d mozharness ]; then
|
||||
|
|
Загрузка…
Ссылка в новой задаче