Bug 1147977 - Add additional timeouts and retries to curl downloads in tc-vcs r=jonasfj

--HG--
extra : rebase_source : b58c3363566d90d43b8cc4df080e778d04996b12
This commit is contained in:
jlal@mozilla.com 2015-03-26 14:27:17 -07:00
Родитель ef57da5f55
Коммит 08df206531
3 изменённых файлов: 42 добавлений и 1 удалений

Просмотреть файл

@ -10,6 +10,7 @@ COPY mozharness_configs /home/worker/mozharness_configs
COPY buildprops.json /home/worker/buildprops.json
ADD https://s3-us-west-2.amazonaws.com/test-caching/packages/linux64-stackwalk /usr/local/bin/linux64-minidump_stackwalk
ADD https://raw.githubusercontent.com/taskcluster/buildbot-step/master/buildbot_step /home/worker/bin/buildbot_step
COPY tc-vcs-config.yml /etc/taskcluster-vcs.yml
# Run test setup script

Просмотреть файл

@ -1 +1 @@
0.2.9
0.2.10

Просмотреть файл

@ -0,0 +1,40 @@
# Default configuration used by the tc-vs tools these can be overridden by
# passing the config you wish to use over the command line...
git: git
hg: hg
repoCache:
# Repo url to clone when running repo init..
repoUrl: https://git.mozilla.org/external/google/gerrit/git-repo.git
# Version of repo to utilize...
repoRevision: master
# The root where all downloaded cache files are stored on the local machine...
cacheDir: '{{env.HOME}}/.tc-vcs-repo/'
# Name/prefixed used as part of the base url.
cacheName: sources/{{name}}.tar.gz
# Command used to upload the tarball
uploadTar: "curl --header 'Content-Type: application/x-tar' --header 'Content-Encoding: gzip' -X PUT --data-binary @'{{source}}' '{{url}}'"
# Large http get requests are often slower using nodes built in http layer so
# we utilize a subprocess which is responsible for fetching...
get: curl --connect-timeout 30 --speed-limit 500000 -L -o {{dest}} {{url}}
# Used to create clone tarball
compress: tar -czf {{dest}} {{source}}
# All cache urls use tar + gz this is the command used to extract those files
# downloaded by the "get" command.
extract: tar -x -z -C {{dest}} -f {{source}}
cloneCache:
# The root where all downloaded cache files are stored on the local machine...
cacheDir: '{{env.HOME}}/.tc-vcs/'
# Command used to upload the tarball
uploadTar: "curl --header 'Content-Type: application/x-tar' --header 'Content-Encoding: gzip' -X PUT --data-binary @'{{source}}' '{{url}}'"
# Large http get requests are often slower using nodes built in http layer so
# we utilize a subprocess which is responsible for fetching...
get: curl --connect-timeout 30 --speed-limit 500000 -L -o {{dest}} {{url}}
# Used to create clone tarball
compress: tar -czf {{dest}} {{source}}
# All cache urls use tar + gz this is the command used to extract those files
# downloaded by the "get" command.
extract: tar -x -z --strip-components 1 -C {{dest}} -f {{source}}
# Name/prefixed used as part of the base url.
cacheName: clones/{{name}}.tar.gz