2013-11-21 00:37:22 +04:00
|
|
|
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
|
|
|
|
|
2015-11-10 23:37:38 +03:00
|
|
|
from __future__ import absolute_import, print_function, unicode_literals
|
2013-11-21 00:37:22 +04:00
|
|
|
|
|
|
|
import os
|
2015-11-10 23:37:38 +03:00
|
|
|
import sys
|
2013-11-21 00:37:22 +04:00
|
|
|
|
|
|
|
from mach.decorators import (
|
2015-11-10 23:37:38 +03:00
|
|
|
Command,
|
2013-11-21 00:37:22 +04:00
|
|
|
CommandArgument,
|
|
|
|
CommandProvider,
|
|
|
|
)
|
|
|
|
|
2016-02-10 13:04:50 +03:00
|
|
|
import mozhttpd
|
2015-11-10 23:37:38 +03:00
|
|
|
|
2013-11-21 00:37:22 +04:00
|
|
|
from mozbuild.base import MachCommandBase
|
|
|
|
|
|
|
|
|
|
|
|
@CommandProvider
|
|
|
|
class Documentation(MachCommandBase):
|
|
|
|
"""Helps manage in-tree documentation."""
|
|
|
|
|
2015-11-03 23:39:50 +03:00
|
|
|
@Command('doc', category='devenv',
|
2017-08-24 20:11:55 +03:00
|
|
|
description='Generate and display documentation from the tree.')
|
2015-11-10 23:37:38 +03:00
|
|
|
@CommandArgument('what', nargs='*', metavar='DIRECTORY [, DIRECTORY]',
|
2017-08-24 20:11:55 +03:00
|
|
|
help='Path(s) to documentation to build and display.')
|
2013-11-21 00:37:22 +04:00
|
|
|
@CommandArgument('--format', default='html',
|
2017-08-24 20:11:55 +03:00
|
|
|
help='Documentation format to write.')
|
2015-11-10 23:37:38 +03:00
|
|
|
@CommandArgument('--outdir', default=None, metavar='DESTINATION',
|
2017-08-24 20:11:55 +03:00
|
|
|
help='Where to write output.')
|
2017-08-24 20:37:53 +03:00
|
|
|
@CommandArgument('--archive', action='store_true',
|
|
|
|
help='Write a gzipped tarball of generated docs')
|
2017-08-24 20:11:55 +03:00
|
|
|
@CommandArgument('--no-open', dest='auto_open', default=True,
|
|
|
|
action='store_false',
|
|
|
|
help="Don't automatically open HTML docs in a browser.")
|
2015-11-10 23:37:38 +03:00
|
|
|
@CommandArgument('--http', const=':6666', metavar='ADDRESS', nargs='?',
|
2017-08-24 20:11:55 +03:00
|
|
|
help='Serve documentation on an HTTP server, '
|
|
|
|
'e.g. ":6666".')
|
2017-08-24 20:37:53 +03:00
|
|
|
def build_docs(self, what=None, format=None, outdir=None, auto_open=True,
|
|
|
|
http=None, archive=False):
|
2013-11-21 00:37:22 +04:00
|
|
|
self._activate_virtualenv()
|
2014-10-30 20:09:29 +03:00
|
|
|
self.virtualenv_manager.install_pip_package('sphinx_rtd_theme==0.1.6')
|
2013-11-21 00:37:22 +04:00
|
|
|
|
2014-12-24 20:46:41 +03:00
|
|
|
import sphinx
|
2015-11-03 23:39:50 +03:00
|
|
|
import webbrowser
|
2017-08-24 20:37:53 +03:00
|
|
|
import moztreedocs
|
2013-11-21 00:37:22 +04:00
|
|
|
|
2015-11-03 23:39:50 +03:00
|
|
|
if not outdir:
|
2013-11-21 00:37:22 +04:00
|
|
|
outdir = os.path.join(self.topobjdir, 'docs')
|
2015-11-03 23:39:50 +03:00
|
|
|
if not what:
|
|
|
|
what = [os.path.join(self.topsrcdir, 'tools')]
|
2017-08-24 20:31:41 +03:00
|
|
|
|
|
|
|
format_outdir = os.path.join(outdir, format)
|
2013-11-21 00:37:22 +04:00
|
|
|
|
2015-11-03 23:39:50 +03:00
|
|
|
generated = []
|
|
|
|
failed = []
|
|
|
|
for path in what:
|
|
|
|
path = os.path.normpath(os.path.abspath(path))
|
|
|
|
docdir = self._find_doc_dir(path)
|
|
|
|
|
|
|
|
if not docdir:
|
|
|
|
failed.append((path, 'could not find docs at this location'))
|
|
|
|
continue
|
|
|
|
|
|
|
|
# find project name to use as a namespace within `outdir`
|
|
|
|
project = self._find_project_name(docdir)
|
2017-08-24 20:31:41 +03:00
|
|
|
savedir = os.path.join(format_outdir, project)
|
2015-11-03 23:39:50 +03:00
|
|
|
|
|
|
|
args = [
|
|
|
|
'sphinx',
|
|
|
|
'-b', format,
|
|
|
|
docdir,
|
|
|
|
savedir,
|
|
|
|
]
|
|
|
|
result = sphinx.build_main(args)
|
|
|
|
if result != 0:
|
|
|
|
failed.append((path, 'sphinx return code %d' % result))
|
|
|
|
else:
|
|
|
|
generated.append(savedir)
|
|
|
|
|
2017-08-24 20:37:53 +03:00
|
|
|
if archive:
|
|
|
|
archive_path = os.path.join(outdir,
|
|
|
|
'%s.tar.gz' % project)
|
|
|
|
moztreedocs.create_tarball(archive_path, savedir)
|
|
|
|
print('Archived to %s' % archive_path)
|
|
|
|
|
2015-11-03 23:39:50 +03:00
|
|
|
index_path = os.path.join(savedir, 'index.html')
|
2015-11-10 23:37:38 +03:00
|
|
|
if not http and auto_open and os.path.isfile(index_path):
|
2015-11-03 23:39:50 +03:00
|
|
|
webbrowser.open(index_path)
|
|
|
|
|
|
|
|
if generated:
|
2015-11-10 23:37:38 +03:00
|
|
|
print('\nGenerated documentation:\n%s\n' % '\n'.join(generated))
|
2015-11-03 23:39:50 +03:00
|
|
|
|
|
|
|
if failed:
|
2015-11-10 23:37:38 +03:00
|
|
|
failed = ['%s: %s' % (f[0], f[1]) for f in failed]
|
|
|
|
return die('failed to generate documentation:\n%s' % '\n'.join(failed))
|
|
|
|
|
|
|
|
if http is not None:
|
|
|
|
host, port = http.split(':', 1)
|
|
|
|
addr = (host, int(port))
|
|
|
|
if len(addr) != 2:
|
|
|
|
return die('invalid address: %s' % http)
|
|
|
|
|
2017-08-24 20:31:41 +03:00
|
|
|
httpd = mozhttpd.MozHttpd(host=addr[0], port=addr[1],
|
|
|
|
docroot=format_outdir)
|
2015-11-10 23:37:38 +03:00
|
|
|
print('listening on %s:%d' % addr)
|
|
|
|
httpd.start(block=True)
|
2015-11-03 23:39:50 +03:00
|
|
|
|
|
|
|
def _find_project_name(self, path):
|
|
|
|
import imp
|
|
|
|
path = os.path.join(path, 'conf.py')
|
|
|
|
with open(path, 'r') as fh:
|
|
|
|
conf = imp.load_module('doc_conf', fh, path,
|
|
|
|
('.py', 'r', imp.PY_SOURCE))
|
|
|
|
|
|
|
|
return conf.project.replace(' ', '_')
|
|
|
|
|
|
|
|
def _find_doc_dir(self, path):
|
|
|
|
search_dirs = ('doc', 'docs')
|
|
|
|
for d in search_dirs:
|
|
|
|
p = os.path.join(path, d)
|
|
|
|
if os.path.isfile(os.path.join(p, 'conf.py')):
|
|
|
|
return p
|
2015-11-10 23:37:38 +03:00
|
|
|
|
2017-07-28 21:32:04 +03:00
|
|
|
@Command('doc-upload', category='devenv',
|
|
|
|
description='Generate and upload documentation from the tree.')
|
|
|
|
@CommandArgument('what', nargs='*', metavar='DIRECTORY [, DIRECTORY]',
|
|
|
|
help='Path(s) to documentation to build and upload.')
|
2017-08-08 22:13:05 +03:00
|
|
|
def upload_docs(self, what=None):
|
2017-07-28 21:32:04 +03:00
|
|
|
self._activate_virtualenv()
|
|
|
|
self.virtualenv_manager.install_pip_package('boto3==1.4.4')
|
|
|
|
|
|
|
|
outdir = os.path.join(self.topobjdir, 'docs')
|
|
|
|
self.build_docs(what=what, outdir=outdir, format='html')
|
|
|
|
|
2017-08-08 22:13:05 +03:00
|
|
|
self.s3_upload(os.path.join(outdir, 'html', 'Mozilla_Source_Tree_Docs'))
|
2017-07-28 21:32:04 +03:00
|
|
|
|
2017-08-08 22:13:05 +03:00
|
|
|
def s3_upload(self, root):
|
2017-07-28 21:32:04 +03:00
|
|
|
"""Upload the contents of outdir recursively to S3"""
|
|
|
|
import boto3
|
|
|
|
import mimetypes
|
|
|
|
import requests
|
|
|
|
|
2017-08-08 22:13:05 +03:00
|
|
|
region = 'us-west-2'
|
|
|
|
level = os.environ.get('MOZ_SCM_LEVEL', '1')
|
|
|
|
bucket = {
|
|
|
|
'1': 'gecko-docs.mozilla.org-l1',
|
|
|
|
'2': 'gecko-docs.mozilla.org-l2',
|
|
|
|
'3': 'gecko-docs.mozilla.org',
|
|
|
|
}[level]
|
|
|
|
secrets_url = 'http://taskcluster/secrets/v1/secret/'
|
|
|
|
secrets_url += 'project/releng/gecko/build/level-{}/gecko-docs-upload'.format(level)
|
|
|
|
|
|
|
|
# Get the credentials from the TC secrets service. Note that these
|
|
|
|
# differ per SCM level
|
2017-07-28 21:32:04 +03:00
|
|
|
if 'TASK_ID' in os.environ:
|
|
|
|
print("Using AWS credentials from the secrets service")
|
|
|
|
session = requests.Session()
|
|
|
|
res = session.get(secrets_url)
|
|
|
|
res.raise_for_status()
|
2017-08-08 22:13:05 +03:00
|
|
|
secret = res.json()['secret']
|
2017-07-28 21:32:04 +03:00
|
|
|
session = boto3.session.Session(
|
|
|
|
aws_access_key_id=secret['AWS_ACCESS_KEY_ID'],
|
|
|
|
aws_secret_access_key=secret['AWS_SECRET_ACCESS_KEY'],
|
|
|
|
region_name=region)
|
|
|
|
else:
|
|
|
|
print("Trying to use your AWS credentials..")
|
|
|
|
session = boto3.session.Session(region_name=region)
|
|
|
|
s3 = session.client('s3')
|
|
|
|
|
|
|
|
try:
|
|
|
|
old_cwd = os.getcwd()
|
|
|
|
os.chdir(root)
|
|
|
|
|
|
|
|
for dir, dirs, filenames in os.walk('.'):
|
|
|
|
if dir == '.':
|
|
|
|
# ignore a few things things in the root directory
|
|
|
|
bad = [d for d in dirs if d.startswith('.') or d in ('_venv', '_staging')]
|
|
|
|
for b in bad:
|
|
|
|
dirs.remove(b)
|
|
|
|
for filename in filenames:
|
|
|
|
pathname = os.path.join(dir, filename)[2:] # strip '.''
|
|
|
|
content_type, content_encoding = mimetypes.guess_type(pathname)
|
|
|
|
extra_args = {}
|
|
|
|
if content_type:
|
|
|
|
extra_args['ContentType'] = content_type
|
|
|
|
if content_encoding:
|
|
|
|
extra_args['ContentEncoding'] = content_encoding
|
|
|
|
print('uploading', pathname)
|
|
|
|
s3.upload_file(pathname, bucket, pathname, ExtraArgs=extra_args)
|
|
|
|
finally:
|
|
|
|
os.chdir(old_cwd)
|
2015-11-10 23:37:38 +03:00
|
|
|
|
|
|
|
def die(msg, exit_code=1):
|
|
|
|
msg = '%s: %s' % (sys.argv[0], msg)
|
|
|
|
print(msg, file=sys.stderr)
|
|
|
|
return exit_code
|