Bug 1382729: allow uploading docs at all levels; r=gps

The upload now uses MOZ_SCM_LEVEL to determine which secret and bucket to
upload to, so it can potentially run at any level.

This also modifies task descriptions to allow {level} in scopes, and updates
try syntax to allow `-j doc-upload` even though run-on-tasks says it doesn't
run on try by default.

MozReview-Commit-ID: Dm27TGPa7IM

--HG--
extra : rebase_source : f1131abc8cd639251e085c8ebf776827a6b831ed
extra : amend_source : b2b0cb253c7f6e90fdd710c2c788877411bd9e1d
This commit is contained in:
Dustin J. Mitchell 2017-08-08 19:13:05 +00:00
Родитель 2a133d756a
Коммит 64c81d4d30
5 изменённых файлов: 29 добавлений и 20 удалений

Просмотреть файл

@ -40,13 +40,12 @@ doc-upload:
worker:
docker-image: {in-tree: "lint"}
max-run-time: 1800
taskcluster-proxy: true
run:
using: run-task
command: >
cd /home/worker/checkouts/gecko &&
./mach doc-upload --bucket gecko-docs.mozilla.org --region us-west-2
command: cd /home/worker/checkouts/gecko && ./mach doc-upload
scopes:
- secrets:get:project/releng/gecko/build/level-3/gecko-docs-upload
- secrets:get:project/releng/gecko/build/level-{level}/gecko-docs-upload
when:
files-changed:
- '**/*.py'

Просмотреть файл

@ -34,6 +34,8 @@ def common_setup(config, job, taskdesc):
if run['checkout']:
support_vcs_checkout(config, job, taskdesc)
taskdesc['worker'].setdefault('env', {})['MOZ_SCM_LEVEL'] = config.params['level']
@run_job_using("docker-worker", "run-task", schema=run_task_schema)
def docker_worker_run_task(config, job, taskdesc):

Просмотреть файл

@ -57,7 +57,9 @@ task_description_schema = Schema({
Optional('routes'): [basestring],
# custom scopes for this task; any scopes required for the worker will be
# added automatically
# added automatically. The following parameters will be substituted in each
# scope:
# {level} -- the scm level of this push
Optional('scopes'): [basestring],
# Tags
@ -950,11 +952,12 @@ def add_index_routes(config, tasks):
@transforms.add
def build_task(config, tasks):
for task in tasks:
worker_type = task['worker-type'].format(level=str(config.params['level']))
level = str(config.params['level'])
worker_type = task['worker-type'].format(level=level)
provisioner_id, worker_type = worker_type.split('/', 1)
routes = task.get('routes', [])
scopes = task.get('scopes', [])
scopes = [s.format(level=level) for s in task.get('scopes', [])]
# set up extra
extra = task.get('extra', {})

Просмотреть файл

@ -595,8 +595,8 @@ class TryOptionSyntax(object):
# Beware the subtle distinction between [] and None for self.jobs and self.platforms.
# They will be [] if there was no try syntax, and None if try syntax was detected but
# they remained unspecified.
if self.jobs and job_try_name not in self.jobs:
return False
if self.jobs:
return job_try_name in self.jobs
elif not self.jobs and 'build' in task.dependencies:
# We exclude tasks with build dependencies from the default set of jobs because
# they will schedule their builds even if they end up optimized away. This means

Просмотреть файл

@ -112,36 +112,41 @@ class Documentation(MachCommandBase):
@Command('doc-upload', category='devenv',
description='Generate and upload documentation from the tree.')
@CommandArgument('--bucket', required=True,
help='Target S3 bucket.')
@CommandArgument('--region', required=True,
help='Region containing target S3 bucket.')
@CommandArgument('what', nargs='*', metavar='DIRECTORY [, DIRECTORY]',
help='Path(s) to documentation to build and upload.')
def upload_docs(self, bucket, region, what=None):
def upload_docs(self, what=None):
self._activate_virtualenv()
self.virtualenv_manager.install_pip_package('boto3==1.4.4')
outdir = os.path.join(self.topobjdir, 'docs')
self.build_docs(what=what, outdir=outdir, format='html')
self.s3_upload(os.path.join(outdir, 'html', 'Mozilla_Source_Tree_Docs'), bucket, region)
self.s3_upload(os.path.join(outdir, 'html', 'Mozilla_Source_Tree_Docs'))
def s3_upload(self, root, bucket, region):
def s3_upload(self, root):
"""Upload the contents of outdir recursively to S3"""
import boto3
import mimetypes
import requests
# Get the credentials from the TC secrets service. Note that these are
# only available to level-3 pushes.
region = 'us-west-2'
level = os.environ.get('MOZ_SCM_LEVEL', '1')
bucket = {
'1': 'gecko-docs.mozilla.org-l1',
'2': 'gecko-docs.mozilla.org-l2',
'3': 'gecko-docs.mozilla.org',
}[level]
secrets_url = 'http://taskcluster/secrets/v1/secret/'
secrets_url += 'project/releng/gecko/build/level-{}/gecko-docs-upload'.format(level)
# Get the credentials from the TC secrets service. Note that these
# differ per SCM level
if 'TASK_ID' in os.environ:
print("Using AWS credentials from the secrets service")
session = requests.Session()
secrets_url = 'http://taskcluster/secrets/repo:hg.mozilla.org/mozilla-central/gecko-docs-upload'
res = session.get(secrets_url)
res.raise_for_status()
secret = res.json()
secret = res.json()['secret']
session = boto3.session.Session(
aws_access_key_id=secret['AWS_ACCESS_KEY_ID'],
aws_secret_access_key=secret['AWS_SECRET_ACCESS_KEY'],