Bug 1382729: Upload documentation to S3 automatically; r=gps

This uses credentials stored in the Taskcluster secret service. The task should
only run on mozilla-central to avoid confusion between branches.

MozReview-Commit-ID: 31XfTg0sCht
This commit is contained in:
Dustin J. Mitchell
2017-07-28 18:32:04 +00:00
parent 4dfc1be95e
commit 228f6911be
2 changed files with 90 additions and 1 deletions

View File

@@ -110,6 +110,69 @@ class Documentation(MachCommandBase):
if os.path.isfile(os.path.join(p, 'conf.py')):
return p
@Command('doc-upload', category='devenv',
description='Generate and upload documentation from the tree.')
@CommandArgument('--bucket', required=True,
help='Target S3 bucket.')
@CommandArgument('--region', required=True,
help='Region containing target S3 bucket.')
@CommandArgument('what', nargs='*', metavar='DIRECTORY [, DIRECTORY]',
help='Path(s) to documentation to build and upload.')
def upload_docs(self, bucket, region, what=None):
self._activate_virtualenv()
self.virtualenv_manager.install_pip_package('boto3==1.4.4')
outdir = os.path.join(self.topobjdir, 'docs')
self.build_docs(what=what, outdir=outdir, format='html')
self.s3_upload(os.path.join(outdir, 'html', 'Mozilla_Source_Tree_Docs'), bucket, region)
def s3_upload(self, root, bucket, region):
"""Upload the contents of outdir recursively to S3"""
import boto3
import mimetypes
import requests
# Get the credentials from the TC secrets service. Note that these are
# only available to level-3 pushes.
if 'TASK_ID' in os.environ:
print("Using AWS credentials from the secrets service")
session = requests.Session()
secrets_url = 'http://taskcluster/secrets/repo:hg.mozilla.org/mozilla-central/gecko-docs-upload'
res = session.get(secrets_url)
res.raise_for_status()
secret = res.json()
session = boto3.session.Session(
aws_access_key_id=secret['AWS_ACCESS_KEY_ID'],
aws_secret_access_key=secret['AWS_SECRET_ACCESS_KEY'],
region_name=region)
else:
print("Trying to use your AWS credentials..")
session = boto3.session.Session(region_name=region)
s3 = session.client('s3')
try:
old_cwd = os.getcwd()
os.chdir(root)
for dir, dirs, filenames in os.walk('.'):
if dir == '.':
# ignore a few things things in the root directory
bad = [d for d in dirs if d.startswith('.') or d in ('_venv', '_staging')]
for b in bad:
dirs.remove(b)
for filename in filenames:
pathname = os.path.join(dir, filename)[2:] # strip '.''
content_type, content_encoding = mimetypes.guess_type(pathname)
extra_args = {}
if content_type:
extra_args['ContentType'] = content_type
if content_encoding:
extra_args['ContentEncoding'] = content_encoding
print('uploading', pathname)
s3.upload_file(pathname, bucket, pathname, ExtraArgs=extra_args)
finally:
os.chdir(old_cwd)
def die(msg, exit_code=1):
msg = '%s: %s' % (sys.argv[0], msg)