Bug 1637845 - Apply 'black' to the vendor subdirectory r=glob

Differential Revision: https://phabricator.services.mozilla.com/D75896

Depends on D75693
This commit is contained in:
Tom Ritter 2020-06-10 14:40:02 +00:00
Родитель 47fe961b00
Коммит 680ca25e0e
7 изменённых файлов: 681 добавлений и 461 удалений

Просмотреть файл

@ -16,85 +16,134 @@ from mach.decorators import (
from mozbuild.base import MachCommandBase
@CommandProvider
class Vendor(MachCommandBase):
"""Vendor third-party dependencies into the source repository."""
@Command('vendor', category='misc',
description='Vendor third-party dependencies into the source repository.')
@Command(
"vendor",
category="misc",
description="Vendor third-party dependencies into the source repository.",
)
def vendor(self):
self._sub_mach(['help', 'vendor'])
self._sub_mach(["help", "vendor"])
return 1
@SubCommand('vendor', 'rust',
description='Vendor rust crates from crates.io into third_party/rust')
@CommandArgument('--ignore-modified', action='store_true',
help='Ignore modified files in current checkout',
default=False)
@SubCommand(
"vendor",
"rust",
description="Vendor rust crates from crates.io into third_party/rust",
)
@CommandArgument(
'--build-peers-said-large-imports-were-ok', action='store_true',
help=('Permit overly-large files to be added to the repository. '
'To get permission to set this, raise a question in the #build '
'channel at https://chat.mozilla.org.'),
default=False)
"--ignore-modified",
action="store_true",
help="Ignore modified files in current checkout",
default=False,
)
@CommandArgument(
"--build-peers-said-large-imports-were-ok",
action="store_true",
help=(
"Permit overly-large files to be added to the repository. "
"To get permission to set this, raise a question in the #build "
"channel at https://chat.mozilla.org."
),
default=False,
)
def vendor_rust(self, **kwargs):
from mozbuild.vendor_rust import VendorRust
vendor_command = self._spawn(VendorRust)
vendor_command.vendor(**kwargs)
@SubCommand('vendor', 'aom',
description='Vendor av1 video codec reference implementation into the '
'source repository.')
@CommandArgument('-r', '--revision',
help='Repository tag or commit to update to.')
@CommandArgument('--repo',
help='Repository url to pull a snapshot from. '
'Supports github and googlesource.')
@CommandArgument('--ignore-modified', action='store_true',
help='Ignore modified files in current checkout',
default=False)
@SubCommand(
"vendor",
"aom",
description="Vendor av1 video codec reference implementation into the "
"source repository.",
)
@CommandArgument("-r", "--revision", help="Repository tag or commit to update to.")
@CommandArgument(
"--repo",
help="Repository url to pull a snapshot from. "
"Supports github and googlesource.",
)
@CommandArgument(
"--ignore-modified",
action="store_true",
help="Ignore modified files in current checkout",
default=False,
)
def vendor_aom(self, **kwargs):
from mozbuild.vendor_aom import VendorAOM
vendor_command = self._spawn(VendorAOM)
vendor_command.vendor(**kwargs)
@SubCommand('vendor', 'dav1d',
description='Vendor dav1d implementation of AV1 into the source repository.')
@CommandArgument('-r', '--revision',
help='Repository tag or commit to update to.')
@CommandArgument('--repo',
help='Repository url to pull a snapshot from. Supports gitlab.')
@CommandArgument('--ignore-modified', action='store_true',
help='Ignore modified files in current checkout',
default=False)
@SubCommand(
"vendor",
"dav1d",
description="Vendor dav1d implementation of AV1 into the source repository.",
)
@CommandArgument("-r", "--revision", help="Repository tag or commit to update to.")
@CommandArgument(
"--repo", help="Repository url to pull a snapshot from. Supports gitlab."
)
@CommandArgument(
"--ignore-modified",
action="store_true",
help="Ignore modified files in current checkout",
default=False,
)
def vendor_dav1d(self, **kwargs):
from mozbuild.vendor_dav1d import VendorDav1d
vendor_command = self._spawn(VendorDav1d)
vendor_command.vendor(**kwargs)
@SubCommand('vendor', 'python',
description='Vendor Python packages from pypi.org into third_party/python')
@CommandArgument('--with-windows-wheel', action='store_true',
help='Vendor a wheel for Windows along with the source package',
default=False)
@CommandArgument('packages', default=None, nargs='*',
help='Packages to vendor. If omitted, packages and their dependencies '
'defined in Pipfile.lock will be vendored. If Pipfile has been modified, '
'then Pipfile.lock will be regenerated. Note that transient dependencies '
'may be updated when running this command.')
@SubCommand(
"vendor",
"python",
description="Vendor Python packages from pypi.org into third_party/python",
)
@CommandArgument(
"--with-windows-wheel",
action="store_true",
help="Vendor a wheel for Windows along with the source package",
default=False,
)
@CommandArgument(
"packages",
default=None,
nargs="*",
help="Packages to vendor. If omitted, packages and their dependencies "
"defined in Pipfile.lock will be vendored. If Pipfile has been modified, "
"then Pipfile.lock will be regenerated. Note that transient dependencies "
"may be updated when running this command.",
)
def vendor_python(self, **kwargs):
from mozbuild.vendor_python import VendorPython
vendor_command = self._spawn(VendorPython)
vendor_command.vendor(**kwargs)
@SubCommand('vendor', 'manifest',
description='Vendor externally hosted repositories into this '
'repository.')
@CommandArgument('files', nargs='+',
help='Manifest files to work on')
@CommandArgumentGroup('verify')
@CommandArgument('--verify', '-v', action='store_true', group='verify',
required=True, help='Verify manifest')
@SubCommand(
"vendor",
"manifest",
description="Vendor externally hosted repositories into this " "repository.",
)
@CommandArgument("files", nargs="+", help="Manifest files to work on")
@CommandArgumentGroup("verify")
@CommandArgument(
"--verify",
"-v",
action="store_true",
group="verify",
required=True,
help="Verify manifest",
)
def vendor_manifest(self, files, verify):
from mozbuild.vendor_manifest import verify_manifests
verify_manifests(files)

170
python/mozbuild/mozbuild/vendor/moz_yaml.py поставляемый
Просмотреть файл

@ -15,36 +15,44 @@ import re
import sys
HERE = os.path.abspath(os.path.dirname(__file__))
lib_path = os.path.join(HERE, '..', '..', '..', 'third_party', 'python')
sys.path.append(os.path.join(lib_path, 'voluptuous'))
sys.path.append(os.path.join(lib_path, 'pyyaml', 'lib'))
lib_path = os.path.join(HERE, "..", "..", "..", "third_party", "python")
sys.path.append(os.path.join(lib_path, "voluptuous"))
sys.path.append(os.path.join(lib_path, "pyyaml", "lib"))
import voluptuous
import yaml
from voluptuous import (All, FqdnUrl, Length, Match, Msg, Required, Schema,
Unique, )
from voluptuous import (
All,
FqdnUrl,
Length,
Match,
Msg,
Required,
Schema,
Unique,
)
from yaml.error import MarkedYAMLError
# TODO ensure this matches the approved list of licenses
VALID_LICENSES = [
# Standard Licenses (as per https://spdx.org/licenses/)
'Apache-2.0',
'BSD-2-Clause',
'BSD-3-Clause-Clear',
'GPL-3.0',
'ISC',
'ICU',
'LGPL-2.1',
'LGPL-3.0',
'MIT',
'MPL-1.1',
'MPL-2.0',
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause-Clear",
"GPL-3.0",
"ISC",
"ICU",
"LGPL-2.1",
"LGPL-3.0",
"MIT",
"MPL-1.1",
"MPL-2.0",
# Unique Licenses
'ACE', # http://www.cs.wustl.edu/~schmidt/ACE-copying.html
'Anti-Grain-Geometry', # http://www.antigrain.com/license/index.html
'JPNIC', # https://www.nic.ad.jp/ja/idn/idnkit/download/index.html
'Khronos', # https://www.khronos.org/openmaxdl
'Unicode', # http://www.unicode.org/copyright.html
"ACE", # http://www.cs.wustl.edu/~schmidt/ACE-copying.html
"Anti-Grain-Geometry", # http://www.antigrain.com/license/index.html
"JPNIC", # https://www.nic.ad.jp/ja/idn/idnkit/download/index.html
"Khronos", # https://www.khronos.org/openmaxdl
"Unicode", # http://www.unicode.org/copyright.html
]
"""
@ -151,8 +159,8 @@ vendoring:
- another script
"""
RE_SECTION = re.compile(r'^(\S[^:]*):').search
RE_FIELD = re.compile(r'^\s\s([^:]+):\s+(\S+)$').search
RE_SECTION = re.compile(r"^(\S[^:]*):").search
RE_FIELD = re.compile(r"^\s\s([^:]+):\s+(\S+)$").search
class VerifyError(Exception):
@ -161,7 +169,7 @@ class VerifyError(Exception):
self.error = error
def __str__(self):
return '%s: %s' % (self.filename, self.error)
return "%s: %s" % (self.filename, self.error)
def load_moz_yaml(filename, verify=True, require_license_file=True):
@ -169,12 +177,11 @@ def load_moz_yaml(filename, verify=True, require_license_file=True):
# Load and parse YAML.
try:
with open(filename, 'r') as f:
with open(filename, "r") as f:
manifest = yaml.safe_load(f)
except IOError as e:
if e.errno == errno.ENOENT:
raise VerifyError(filename,
'Failed to find manifest: %s' % filename)
raise VerifyError(filename, "Failed to find manifest: %s" % filename)
raise
except MarkedYAMLError as e:
raise VerifyError(filename, e)
@ -183,18 +190,17 @@ def load_moz_yaml(filename, verify=True, require_license_file=True):
return manifest
# Verify schema.
if 'schema' not in manifest:
if "schema" not in manifest:
raise VerifyError(filename, 'Missing manifest "schema"')
if manifest['schema'] == 1:
if manifest["schema"] == 1:
schema = _schema_1()
schema_additional = _schema_1_additional
else:
raise VerifyError(filename, 'Unsupported manifest schema')
raise VerifyError(filename, "Unsupported manifest schema")
try:
schema(manifest)
schema_additional(filename, manifest,
require_license_file=require_license_file)
schema_additional(filename, manifest, require_license_file=require_license_file)
except (voluptuous.Error, ValueError) as e:
raise VerifyError(filename, e)
@ -221,86 +227,92 @@ def update_moz_yaml(filename, release, revision, verify=True, write=True):
m = RE_FIELD(line)
if m:
(name, value) = m.groups()
if section == 'origin' and name == 'release':
line = ' release: %s\n' % release
if section == "origin" and name == "release":
line = " release: %s\n" % release
found_release = True
elif section == 'vendoring' and name == 'revision':
line = ' revision: %s\n' % revision
elif section == "vendoring" and name == "revision":
line = " revision: %s\n" % revision
found_revision = True
lines.append(line)
if not found_release and found_revision:
raise ValueError('Failed to find origin:release and '
'vendoring:revision')
raise ValueError("Failed to find origin:release and " "vendoring:revision")
if write:
with open(filename, 'w') as f:
with open(filename, "w") as f:
f.writelines(lines)
def _schema_1():
"""Returns Voluptuous Schema object."""
return Schema({
Required('schema'): 1,
Required('bugzilla'): {
Required('product'): All(str, Length(min=1)),
Required('component'): All(str, Length(min=1)),
return Schema(
{
Required("schema"): 1,
Required("bugzilla"): {
Required("product"): All(str, Length(min=1)),
Required("component"): All(str, Length(min=1)),
},
'origin': {
Required('name'): All(str, Length(min=1)),
Required('description'): All(str, Length(min=1)),
Required('url'): FqdnUrl(),
Required('license'): Msg(License(), msg='Unsupported License'),
Required('release'): All(str, Length(min=1)),
"origin": {
Required("name"): All(str, Length(min=1)),
Required("description"): All(str, Length(min=1)),
Required("url"): FqdnUrl(),
Required("license"): Msg(License(), msg="Unsupported License"),
Required("release"): All(str, Length(min=1)),
},
'vendoring': {
Required('url'): FqdnUrl(),
Required('revision'): Match(r'^[a-fA-F0-9]{12,40}$'),
'patches': Unique([str]),
'keep': Unique([str]),
'exclude': Unique([str]),
'include': Unique([str]),
'run_after': Unique([str]),
"vendoring": {
Required("url"): FqdnUrl(),
Required("revision"): Match(r"^[a-fA-F0-9]{12,40}$"),
"patches": Unique([str]),
"keep": Unique([str]),
"exclude": Unique([str]),
"include": Unique([str]),
"run_after": Unique([str]),
},
})
}
)
def _schema_1_additional(filename, manifest, require_license_file=True):
"""Additional schema/validity checks"""
# LICENSE file must exist.
if require_license_file and 'origin' in manifest:
files = [f.lower() for f in os.listdir(os.path.dirname(filename))
if f.lower().startswith('license')]
if not ('license' in files
or 'license.txt' in files
or 'license.rst' in files
or 'license.html' in files
or 'license.md' in files):
license = manifest['origin']['license']
if require_license_file and "origin" in manifest:
files = [
f.lower()
for f in os.listdir(os.path.dirname(filename))
if f.lower().startswith("license")
]
if not (
"license" in files
or "license.txt" in files
or "license.rst" in files
or "license.html" in files
or "license.md" in files
):
license = manifest["origin"]["license"]
if isinstance(license, list):
license = '/'.join(license)
raise ValueError('Failed to find %s LICENSE file' % license)
license = "/".join(license)
raise ValueError("Failed to find %s LICENSE file" % license)
# Cannot vendor without an origin.
if 'vendoring' in manifest and 'origin' not in manifest:
if "vendoring" in manifest and "origin" not in manifest:
raise ValueError('"vendoring" requires an "origin"')
# Check for a simple YAML file
with open(filename, 'r') as f:
with open(filename, "r") as f:
has_schema = False
for line in f.readlines():
m = RE_SECTION(line)
if m:
if m.group(1) == 'schema':
if m.group(1) == "schema":
has_schema = True
break
if not has_schema:
raise ValueError('Not simple YAML')
raise ValueError("Not simple YAML")
# Verify YAML can be updated.
if 'vendor' in manifest:
update_moz_yaml(filename, '', '', verify=False, write=True)
if "vendor" in manifest:
update_moz_yaml(filename, "", "", verify=False, write=True)
class License(object):
@ -311,11 +323,11 @@ class License(object):
if isinstance(values, str):
values = [values]
elif not isinstance(values, list):
raise ValueError('Must be string or list')
raise ValueError("Must be string or list")
for v in values:
if v not in VALID_LICENSES:
raise ValueError('Bad License')
raise ValueError("Bad License")
return values
def __repr__(self):
return 'License'
return "License"

188
python/mozbuild/mozbuild/vendor/vendor_aom.py поставляемый
Просмотреть файл

@ -5,9 +5,7 @@
from __future__ import absolute_import, print_function, unicode_literals
import logging
from mozbuild.base import (
MozbuildObject,
)
from mozbuild.base import MozbuildObject
import mozfile
import mozpack.path as mozpath
import os
@ -20,43 +18,47 @@ from urllib.parse import urlparse
class VendorAOM(MozbuildObject):
def upstream_snapshot(self, revision):
'''Construct a url for a tarball snapshot of the given revision.'''
if 'googlesource' in self.repo_url:
return mozpath.join(self.repo_url, '+archive', revision + '.tar.gz')
elif 'github' in self.repo_url:
return mozpath.join(self.repo_url, 'archive', revision + '.tar.gz')
"""Construct a url for a tarball snapshot of the given revision."""
if "googlesource" in self.repo_url:
return mozpath.join(self.repo_url, "+archive", revision + ".tar.gz")
elif "github" in self.repo_url:
return mozpath.join(self.repo_url, "archive", revision + ".tar.gz")
else:
raise ValueError('Unknown git host, no snapshot lookup method')
raise ValueError("Unknown git host, no snapshot lookup method")
def upstream_commit(self, revision):
'''Convert a revision to a git commit and timestamp.
"""Convert a revision to a git commit and timestamp.
Ask the upstream repo to convert the requested revision to
a git commit id and timestamp, so we can be precise in
what we're vendoring.'''
if 'googlesource' in self.repo_url:
what we're vendoring."""
if "googlesource" in self.repo_url:
return self.upstream_googlesource_commit(revision)
elif 'github' in self.repo_url:
elif "github" in self.repo_url:
return self.upstream_github_commit(revision)
else:
raise ValueError('Unknown git host, no commit lookup method')
raise ValueError("Unknown git host, no commit lookup method")
def upstream_validate(self, url):
'''Validate repository urls to make sure we can handle them.'''
"""Validate repository urls to make sure we can handle them."""
host = urlparse(url).netloc
valid_domains = ('googlesource.com', 'github.com')
valid_domains = ("googlesource.com", "github.com")
if not any(filter(lambda domain: domain in host, valid_domains)):
self.log(logging.ERROR, 'upstream_url', {},
'''Unsupported git host %s; cannot fetch snapshots.
self.log(
logging.ERROR,
"upstream_url",
{},
"""Unsupported git host %s; cannot fetch snapshots.
Please set a repository url with --repo on either googlesource or github.''' % host)
Please set a repository url with --repo on either googlesource or github."""
% host,
)
sys.exit(1)
def upstream_googlesource_commit(self, revision):
'''Query gitiles for a git commit and timestamp.'''
url = mozpath.join(self.repo_url, '+', revision + '?format=JSON')
self.log(logging.INFO, 'fetch', {'url': url},
'Fetching commit id from {url}')
"""Query gitiles for a git commit and timestamp."""
url = mozpath.join(self.repo_url, "+", revision + "?format=JSON")
self.log(logging.INFO, "fetch", {"url": url}, "Fetching commit id from {url}")
req = requests.get(url)
req.raise_for_status()
try:
@ -66,80 +68,84 @@ Please set a repository url with --repo on either googlesource or github.''' % h
# at the beginning of the json response. Work around this.
# https://bugs.chromium.org/p/chromium/issues/detail?id=718550
import json
info = json.loads(req.text[4:])
return (info['commit'], info['committer']['time'])
return (info["commit"], info["committer"]["time"])
def upstream_github_commit(self, revision):
'''Query the github api for a git commit id and timestamp.'''
github_api = 'https://api.github.com/'
"""Query the github api for a git commit id and timestamp."""
github_api = "https://api.github.com/"
repo = urlparse(self.repo_url).path[1:]
url = mozpath.join(github_api, 'repos', repo, 'commits', revision)
self.log(logging.INFO, 'fetch', {'url': url},
'Fetching commit id from {url}')
url = mozpath.join(github_api, "repos", repo, "commits", revision)
self.log(logging.INFO, "fetch", {"url": url}, "Fetching commit id from {url}")
req = requests.get(url)
req.raise_for_status()
info = req.json()
return (info['sha'], info['commit']['committer']['date'])
return (info["sha"], info["commit"]["committer"]["date"])
def fetch_and_unpack(self, revision, target):
'''Fetch and unpack upstream source'''
"""Fetch and unpack upstream source"""
url = self.upstream_snapshot(revision)
self.log(logging.INFO, 'fetch', {'url': url}, 'Fetching {url}')
prefix = 'aom-' + revision
filename = prefix + '.tar.gz'
with open(filename, 'wb') as f:
self.log(logging.INFO, "fetch", {"url": url}, "Fetching {url}")
prefix = "aom-" + revision
filename = prefix + ".tar.gz"
with open(filename, "wb") as f:
req = requests.get(url, stream=True)
for data in req.iter_content(4096):
f.write(data)
tar = tarfile.open(filename)
bad_paths = filter(lambda name: name.startswith('/') or '..' in name,
tar.getnames())
bad_paths = filter(
lambda name: name.startswith("/") or ".." in name, tar.getnames()
)
if any(bad_paths):
raise Exception("Tar archive contains non-local paths,"
"e.g. '%s'" % bad_paths[0])
self.log(logging.INFO, 'rm_vendor_dir', {}, 'rm -rf %s' % target)
raise Exception(
"Tar archive contains non-local paths," "e.g. '%s'" % bad_paths[0]
)
self.log(logging.INFO, "rm_vendor_dir", {}, "rm -rf %s" % target)
mozfile.remove(target)
self.log(logging.INFO, 'unpack', {}, 'Unpacking upstream files.')
self.log(logging.INFO, "unpack", {}, "Unpacking upstream files.")
tar.extractall(target)
# Github puts everything properly down a directory; move it up.
if all(map(lambda name: name.startswith(prefix), tar.getnames())):
tardir = mozpath.join(target, prefix)
os.system('mv %s/* %s/.* %s' % (tardir, tardir, target))
os.system("mv %s/* %s/.* %s" % (tardir, tardir, target))
os.rmdir(tardir)
# Remove the tarball.
mozfile.remove(filename)
def update_readme(self, revision, timestamp, target):
filename = mozpath.join(target, 'README_MOZILLA')
filename = mozpath.join(target, "README_MOZILLA")
with open(filename) as f:
readme = f.read()
prefix = 'The git commit ID used was'
prefix = "The git commit ID used was"
if prefix in readme:
new_readme = re.sub(prefix + ' [v\.a-f0-9]+.*$',
prefix + ' %s (%s).' % (revision, timestamp),
readme)
new_readme = re.sub(
prefix + " [v\.a-f0-9]+.*$",
prefix + " %s (%s)." % (revision, timestamp),
readme,
)
else:
new_readme = '%s\n\n%s %s.' % (readme, prefix, revision)
new_readme = "%s\n\n%s %s." % (readme, prefix, revision)
prefix = 'The last update was pulled from'
new_readme = re.sub(prefix + ' https*://.*',
prefix + ' %s' % self.repo_url,
new_readme)
prefix = "The last update was pulled from"
new_readme = re.sub(
prefix + " https*://.*", prefix + " %s" % self.repo_url, new_readme
)
if readme != new_readme:
with open(filename, 'w') as f:
with open(filename, "w") as f:
f.write(new_readme)
def clean_upstream(self, target):
'''Remove files we don't want to import.'''
mozfile.remove(mozpath.join(target, '.gitattributes'))
mozfile.remove(mozpath.join(target, '.gitignore'))
mozfile.remove(mozpath.join(target, 'build', '.gitattributes'))
mozfile.remove(mozpath.join(target, 'build', '.gitignore'))
"""Remove files we don't want to import."""
mozfile.remove(mozpath.join(target, ".gitattributes"))
mozfile.remove(mozpath.join(target, ".gitignore"))
mozfile.remove(mozpath.join(target, "build", ".gitattributes"))
mozfile.remove(mozpath.join(target, "build", ".gitignore"))
def generate_sources(self, target):
'''
"""
Run the library's native build system to update ours.
Invoke configure for each supported platform to generate
@ -147,28 +153,37 @@ Please set a repository url with --repo on either googlesource or github.''' % h
makefile to obtain a list of source files, writing
these out in the appropriate format for our build
system to use.
'''
config_dir = mozpath.join(target, 'config')
self.log(logging.INFO, 'rm_confg_dir', {}, 'rm -rf %s' % config_dir)
"""
config_dir = mozpath.join(target, "config")
self.log(logging.INFO, "rm_confg_dir", {}, "rm -rf %s" % config_dir)
mozfile.remove(config_dir)
self.run_process(args=['./generate_sources_mozbuild.sh'],
cwd=target, log_name='generate_sources')
self.run_process(
args=["./generate_sources_mozbuild.sh"],
cwd=target,
log_name="generate_sources",
)
def check_modified_files(self):
'''
"""
Ensure that there aren't any uncommitted changes to files
in the working copy, since we're going to change some state
on the user.
'''
modified = self.repository.get_changed_files('M')
"""
modified = self.repository.get_changed_files("M")
if modified:
self.log(logging.ERROR, 'modified_files', {},
'''You have uncommitted changes to the following files:
self.log(
logging.ERROR,
"modified_files",
{},
"""You have uncommitted changes to the following files:
{files}
Please commit or stash these changes before vendoring, or re-run with `--ignore-modified`.
'''.format(files='\n'.join(sorted(modified))))
""".format(
files="\n".join(sorted(modified))
),
)
sys.exit(1)
def vendor(self, revision, repo, ignore_modified=False):
@ -178,30 +193,35 @@ Please commit or stash these changes before vendoring, or re-run with `--ignore-
if not ignore_modified:
self.check_modified_files()
if not revision:
revision = 'master'
revision = "master"
if repo:
self.repo_url = repo
else:
self.repo_url = 'https://aomedia.googlesource.com/aom/'
self.repo_url = "https://aomedia.googlesource.com/aom/"
self.upstream_validate(self.repo_url)
commit, timestamp = self.upstream_commit(revision)
vendor_dir = mozpath.join(self.topsrcdir, 'third_party/aom')
vendor_dir = mozpath.join(self.topsrcdir, "third_party/aom")
self.fetch_and_unpack(commit, vendor_dir)
self.log(logging.INFO, 'clean_upstream', {},
'''Removing unnecessary files.''')
self.log(logging.INFO, "clean_upstream", {}, """Removing unnecessary files.""")
self.clean_upstream(vendor_dir)
glue_dir = mozpath.join(self.topsrcdir, 'media/libaom')
self.log(logging.INFO, 'generate_sources', {},
'''Generating build files...''')
glue_dir = mozpath.join(self.topsrcdir, "media/libaom")
self.log(logging.INFO, "generate_sources", {}, """Generating build files...""")
self.generate_sources(glue_dir)
self.log(logging.INFO, 'update_readme', {},
'''Updating README_MOZILLA.''')
self.log(logging.INFO, "update_readme", {}, """Updating README_MOZILLA.""")
self.update_readme(commit, timestamp, glue_dir)
self.log(logging.INFO, 'add_remove_files', {},
'''Registering changes with version control.''')
self.log(
logging.INFO,
"add_remove_files",
{},
"""Registering changes with version control.""",
)
self.repository.add_remove_files(vendor_dir, glue_dir)
self.repository.add_remove_files(glue_dir)
self.log(logging.INFO, 'done', {'revision': revision},
'''Update to aom version '{revision}' ready to commit.''')
self.log(
logging.INFO,
"done",
{"revision": revision},
"""Update to aom version '{revision}' ready to commit.""",
)

Просмотреть файл

@ -5,9 +5,7 @@
from __future__ import absolute_import, print_function, unicode_literals
import logging
from mozbuild.base import (
MozbuildObject,
)
from mozbuild.base import MozbuildObject
import mozfile
import mozpack.path as mozpath
import os
@ -20,121 +18,136 @@ from urllib.parse import urlparse
class VendorDav1d(MozbuildObject):
def upstream_snapshot(self, revision):
'''Construct a url for a tarball snapshot of the given revision.'''
if 'code.videolan.org' in self.repo_url:
return mozpath.join(self.repo_url, '-', 'archive', revision + '.tar.gz')
"""Construct a url for a tarball snapshot of the given revision."""
if "code.videolan.org" in self.repo_url:
return mozpath.join(self.repo_url, "-", "archive", revision + ".tar.gz")
else:
raise ValueError('Unknown git host, no snapshot lookup method')
raise ValueError("Unknown git host, no snapshot lookup method")
def upstream_commit(self, revision):
'''Convert a revision to a git commit and timestamp.
"""Convert a revision to a git commit and timestamp.
Ask the upstream repo to convert the requested revision to
a git commit id and timestamp, so we can be precise in
what we're vendoring.'''
if 'code.videolan.org' in self.repo_url:
what we're vendoring."""
if "code.videolan.org" in self.repo_url:
return self.upstream_gitlab_commit(revision)
else:
raise ValueError('Unknown git host, no commit lookup method')
raise ValueError("Unknown git host, no commit lookup method")
def upstream_validate(self, url):
'''Validate repository urls to make sure we can handle them.'''
"""Validate repository urls to make sure we can handle them."""
host = urlparse(url).netloc
valid_domains = ('code.videolan.org')
valid_domains = "code.videolan.org"
if not any(filter(lambda domain: domain in host, valid_domains)):
self.log(logging.ERROR, 'upstream_url', {},
'''Unsupported git host %s; cannot fetch snapshots.
self.log(
logging.ERROR,
"upstream_url",
{},
"""Unsupported git host %s; cannot fetch snapshots.
Please set a repository url with --repo on either googlesource or github.''' % host)
Please set a repository url with --repo on either googlesource or github."""
% host,
)
sys.exit(1)
def upstream_gitlab_commit(self, revision):
'''Query the github api for a git commit id and timestamp.'''
gitlab_api = 'https://code.videolan.org/api/v4/projects/videolan%2Fdav1d/repository/commits' # noqa
"""Query the github api for a git commit id and timestamp."""
gitlab_api = "https://code.videolan.org/api/v4/projects/videolan%2Fdav1d/repository/commits" # noqa
url = mozpath.join(gitlab_api, revision)
self.log(logging.INFO, 'fetch', {'url': url},
'Fetching commit id from {url}')
self.log(logging.INFO, "fetch", {"url": url}, "Fetching commit id from {url}")
req = requests.get(url)
req.raise_for_status()
info = req.json()
return (info['id'], info['committed_date'])
return (info["id"], info["committed_date"])
def fetch_and_unpack(self, revision, target):
'''Fetch and unpack upstream source'''
"""Fetch and unpack upstream source"""
url = self.upstream_snapshot(revision)
self.log(logging.INFO, 'fetch', {'url': url}, 'Fetching {url}')
prefix = 'dav1d-' + revision
filename = prefix + '.tar.gz'
with open(filename, 'wb') as f:
self.log(logging.INFO, "fetch", {"url": url}, "Fetching {url}")
prefix = "dav1d-" + revision
filename = prefix + ".tar.gz"
with open(filename, "wb") as f:
req = requests.get(url, stream=True)
for data in req.iter_content(4096):
f.write(data)
tar = tarfile.open(filename)
bad_paths = filter(lambda name: name.startswith('/') or '..' in name,
tar.getnames())
bad_paths = filter(
lambda name: name.startswith("/") or ".." in name, tar.getnames()
)
if any(bad_paths):
raise Exception("Tar archive contains non-local paths,"
"e.g. '%s'" % bad_paths[0])
self.log(logging.INFO, 'rm_vendor_dir', {}, 'rm -rf %s' % target)
raise Exception(
"Tar archive contains non-local paths," "e.g. '%s'" % bad_paths[0]
)
self.log(logging.INFO, "rm_vendor_dir", {}, "rm -rf %s" % target)
mozfile.remove(target)
self.log(logging.INFO, 'unpack', {}, 'Unpacking upstream files.')
self.log(logging.INFO, "unpack", {}, "Unpacking upstream files.")
tar.extractall(target)
# Github puts everything properly down a directory; move it up.
if all(map(lambda name: name.startswith(prefix), tar.getnames())):
tardir = mozpath.join(target, prefix)
os.system('mv %s/* %s/.* %s' % (tardir, tardir, target))
os.system("mv %s/* %s/.* %s" % (tardir, tardir, target))
os.rmdir(tardir)
# Remove the tarball.
mozfile.remove(filename)
def update_yaml(self, revision, timestamp, target):
filename = mozpath.join(target, 'moz.yaml')
filename = mozpath.join(target, "moz.yaml")
with open(filename) as f:
yaml = f.read()
prefix = ' release: commit'
prefix = " release: commit"
if prefix in yaml:
new_yaml = re.sub(prefix + ' [v\.a-f0-9]+.*$',
prefix + ' %s (%s).' % (revision, timestamp),
yaml, flags=re.MULTILINE)
new_yaml = re.sub(
prefix + " [v\.a-f0-9]+.*$",
prefix + " %s (%s)." % (revision, timestamp),
yaml,
flags=re.MULTILINE,
)
else:
new_yaml = '%s\n\n%s %s.' % (yaml, prefix, revision)
new_yaml = "%s\n\n%s %s." % (yaml, prefix, revision)
if yaml != new_yaml:
with open(filename, 'w') as f:
with open(filename, "w") as f:
f.write(new_yaml)
def update_vcs_version(self, revision, vendor_dir, glue_dir):
src_filename = mozpath.join(vendor_dir, 'include/vcs_version.h.in')
dst_filename = mozpath.join(glue_dir, 'vcs_version.h')
src_filename = mozpath.join(vendor_dir, "include/vcs_version.h.in")
dst_filename = mozpath.join(glue_dir, "vcs_version.h")
with open(src_filename) as f:
vcs_version_in = f.read()
vcs_version = vcs_version_in.replace('@VCS_TAG@', revision)
with open(dst_filename, 'w') as f:
vcs_version = vcs_version_in.replace("@VCS_TAG@", revision)
with open(dst_filename, "w") as f:
f.write(vcs_version)
def clean_upstream(self, target):
'''Remove files we don't want to import.'''
mozfile.remove(mozpath.join(target, '.gitattributes'))
mozfile.remove(mozpath.join(target, '.gitignore'))
mozfile.remove(mozpath.join(target, 'build', '.gitattributes'))
mozfile.remove(mozpath.join(target, 'build', '.gitignore'))
"""Remove files we don't want to import."""
mozfile.remove(mozpath.join(target, ".gitattributes"))
mozfile.remove(mozpath.join(target, ".gitignore"))
mozfile.remove(mozpath.join(target, "build", ".gitattributes"))
mozfile.remove(mozpath.join(target, "build", ".gitignore"))
def check_modified_files(self):
'''
"""
Ensure that there aren't any uncommitted changes to files
in the working copy, since we're going to change some state
on the user.
'''
modified = self.repository.get_changed_files('M')
"""
modified = self.repository.get_changed_files("M")
if modified:
self.log(logging.ERROR, 'modified_files', {},
'''You have uncommitted changes to the following files:
self.log(
logging.ERROR,
"modified_files",
{},
"""You have uncommitted changes to the following files:
{files}
Please commit or stash these changes before vendoring, or re-run with `--ignore-modified`.
'''.format(files='\n'.join(sorted(modified))))
""".format(
files="\n".join(sorted(modified))
),
)
sys.exit(1)
def vendor(self, revision, repo, ignore_modified=False):
@ -144,29 +157,34 @@ Please commit or stash these changes before vendoring, or re-run with `--ignore-
if not ignore_modified:
self.check_modified_files()
if not revision:
revision = 'master'
revision = "master"
if repo:
self.repo_url = repo
else:
self.repo_url = 'https://code.videolan.org/videolan/dav1d'
self.repo_url = "https://code.videolan.org/videolan/dav1d"
self.upstream_validate(self.repo_url)
commit, timestamp = self.upstream_commit(revision)
vendor_dir = mozpath.join(self.topsrcdir, 'third_party/dav1d')
vendor_dir = mozpath.join(self.topsrcdir, "third_party/dav1d")
self.fetch_and_unpack(commit, vendor_dir)
self.log(logging.INFO, 'clean_upstream', {},
'''Removing unnecessary files.''')
self.log(logging.INFO, "clean_upstream", {}, """Removing unnecessary files.""")
self.clean_upstream(vendor_dir)
glue_dir = mozpath.join(self.topsrcdir, 'media/libdav1d')
self.log(logging.INFO, 'update_moz.yaml', {},
'''Updating moz.yaml.''')
glue_dir = mozpath.join(self.topsrcdir, "media/libdav1d")
self.log(logging.INFO, "update_moz.yaml", {}, """Updating moz.yaml.""")
self.update_yaml(commit, timestamp, glue_dir)
self.log(logging.INFO, 'update_vcs_version', {},
'''Updating vcs_version.h.''')
self.log(logging.INFO, "update_vcs_version", {}, """Updating vcs_version.h.""")
self.update_vcs_version(commit, vendor_dir, glue_dir)
self.log(logging.INFO, 'add_remove_files', {},
'''Registering changes with version control.''')
self.log(
logging.INFO,
"add_remove_files",
{},
"""Registering changes with version control.""",
)
self.repository.add_remove_files(vendor_dir, glue_dir)
self.log(logging.INFO, 'done', {'revision': revision},
'''Update to dav1d version '{revision}' ready to commit.''')
self.log(
logging.INFO,
"done",
{"revision": revision},
"""Update to dav1d version '{revision}' ready to commit.""",
)

Просмотреть файл

@ -14,7 +14,7 @@ def verify_manifests(files):
for fn in files:
try:
moz_yaml.load_moz_yaml(fn)
print('%s: OK' % fn)
print("%s: OK" % fn)
except moz_yaml.VerifyError as e:
success = False
print(e)

Просмотреть файл

@ -16,29 +16,30 @@ from mozpack.files import FileFinder
class VendorPython(MozbuildObject):
def vendor(self, packages=None, with_windows_wheel=False):
self.populate_logger()
self.log_manager.enable_unstructured()
vendor_dir = mozpath.join(
self.topsrcdir, os.path.join('third_party', 'python'))
vendor_dir = mozpath.join(self.topsrcdir, os.path.join("third_party", "python"))
packages = packages or []
if with_windows_wheel and len(packages) != 1:
raise Exception('--with-windows-wheel is only supported for a single package!')
raise Exception(
"--with-windows-wheel is only supported for a single package!"
)
self._activate_virtualenv()
pip_compile = os.path.join(self.virtualenv_manager.bin_path, 'pip-compile')
pip_compile = os.path.join(self.virtualenv_manager.bin_path, "pip-compile")
if not os.path.exists(pip_compile):
path = os.path.normpath(os.path.join(
self.topsrcdir, 'third_party', 'python', 'pip-tools'))
path = os.path.normpath(
os.path.join(self.topsrcdir, "third_party", "python", "pip-tools")
)
self.virtualenv_manager.install_pip_package(path, vendored=True)
spec = os.path.join(vendor_dir, 'requirements.in')
requirements = os.path.join(vendor_dir, 'requirements.txt')
spec = os.path.join(vendor_dir, "requirements.in")
requirements = os.path.join(vendor_dir, "requirements.txt")
with TemporaryDirectory() as spec_dir:
tmpspec = 'requirements-mach-vendor-python.in'
tmpspec = "requirements-mach-vendor-python.in"
tmpspec_absolute = os.path.join(spec_dir, tmpspec)
shutil.copyfile(spec, tmpspec_absolute)
self._update_packages(tmpspec_absolute, packages)
@ -48,39 +49,56 @@ class VendorPython(MozbuildObject):
[
pip_compile,
tmpspec,
'--no-header',
'--no-index',
'--output-file', requirements,
'--generate-hashes'
"--no-header",
"--no-index",
"--output-file",
requirements,
"--generate-hashes",
],
# Run pip-compile from within the temporary directory so that the "via"
# annotations don't have the non-deterministic temporary path in them.
cwd=spec_dir)
cwd=spec_dir,
)
with TemporaryDirectory() as tmp:
# use requirements.txt to download archived source distributions of all packages
self.virtualenv_manager._run_pip([
'download',
'-r', requirements,
'--no-deps',
'--dest', tmp,
'--no-binary', ':all:',
'--disable-pip-version-check'])
self.virtualenv_manager._run_pip(
[
"download",
"-r",
requirements,
"--no-deps",
"--dest",
tmp,
"--no-binary",
":all:",
"--disable-pip-version-check",
]
)
if with_windows_wheel:
# This is hardcoded to CPython 2.7 for win64, which is good
# enough for what we need currently. If we need psutil for Python 3
# in the future that could be added here as well.
self.virtualenv_manager._run_pip([
'download',
'--dest', tmp,
'--no-deps',
'--only-binary', ':all:',
'--platform', 'win_amd64',
'--implementation', 'cp',
'--python-version', '27',
'--abi', 'none',
'--disable-pip-version-check',
packages[0]])
self.virtualenv_manager._run_pip(
[
"download",
"--dest",
tmp,
"--no-deps",
"--only-binary",
":all:",
"--platform",
"win_amd64",
"--implementation",
"cp",
"--python-version",
"27",
"--abi",
"none",
"--disable-pip-version-check",
packages[0],
]
)
self._extract(tmp, vendor_dir)
shutil.copyfile(tmpspec_absolute, spec)
@ -88,59 +106,61 @@ class VendorPython(MozbuildObject):
def _update_packages(self, spec, packages):
for package in packages:
if not all(package.partition('==')):
raise Exception('Package {} must be in the format name==version'.format(package))
if not all(package.partition("==")):
raise Exception(
"Package {} must be in the format name==version".format(package)
)
requirements = {}
with open(spec, 'r') as f:
with open(spec, "r") as f:
comments = []
for line in f.readlines():
line = line.strip()
if not line or line.startswith('#'):
if not line or line.startswith("#"):
comments.append(line)
continue
name, version = line.split('==')
name, version = line.split("==")
requirements[name] = version, comments
comments = []
for package in packages:
name, version = package.split('==')
name, version = package.split("==")
requirements[name] = version, []
with open(spec, 'w') as f:
with open(spec, "w") as f:
for name, (version, comments) in sorted(requirements.items()):
if comments:
f.write('{}\n'.format('\n'.join(comments)))
f.write('{}=={}\n'.format(name, version))
f.write("{}\n".format("\n".join(comments)))
f.write("{}=={}\n".format(name, version))
def _extract(self, src, dest):
"""extract source distribution into vendor directory"""
finder = FileFinder(src)
for path, _ in finder.find('*'):
for path, _ in finder.find("*"):
base, ext = os.path.splitext(path)
if ext == '.whl':
if ext == ".whl":
# Wheels would extract into a directory with the name of the package, but
# we want the platform signifiers, minus the version number.
# Wheel filenames look like:
# {distribution}-{version}(-{build tag})?-{python tag}-{abi tag}-{platform tag}
bits = base.split('-')
bits = base.split("-")
# Remove the version number.
bits.pop(1)
target = os.path.join(dest, '-'.join(bits))
target = os.path.join(dest, "-".join(bits))
mozfile.remove(target) # remove existing version of vendored package
os.mkdir(target)
mozfile.extract(os.path.join(finder.base, path), target)
else:
# packages extract into package-version directory name and we strip the version
tld = mozfile.extract(os.path.join(finder.base, path), dest)[0]
target = os.path.join(dest, tld.rpartition('-')[0])
target = os.path.join(dest, tld.rpartition("-")[0])
mozfile.remove(target) # remove existing version of vendored package
mozfile.move(tld, target)
# If any files inside the vendored package were symlinks, turn them into normal files
# because hg.mozilla.org forbids symlinks in the repository.
link_finder = FileFinder(target)
for _, f in link_finder.find('**'):
for _, f in link_finder.find("**"):
if os.path.islink(f.path):
link_target = os.path.realpath(f.path)
os.unlink(f.path)

347
python/mozbuild/mozbuild/vendor/vendor_rust.py поставляемый
Просмотреть файл

@ -24,7 +24,7 @@ from mozbuild.base import (
)
CARGO_CONFIG_TEMPLATE = '''\
CARGO_CONFIG_TEMPLATE = """\
# This file contains vendoring instructions for cargo.
# It was generated by `mach vendor rust`.
# Please do not edit.
@ -51,97 +51,118 @@ directory = "{directory}"
#filter substitution
[source."@REPLACE_NAME@"]
directory = "@top_srcdir@/@VENDORED_DIRECTORY@"
'''
"""
CARGO_LOCK_NOTICE = '''
CARGO_LOCK_NOTICE = """
NOTE: `cargo vendor` may have made changes to your Cargo.lock. To restore your
Cargo.lock to the HEAD version, run `git checkout -- Cargo.lock` or
`hg revert Cargo.lock`.
'''
"""
class VendorRust(MozbuildObject):
def get_cargo_path(self):
try:
return self.substs['CARGO']
return self.substs["CARGO"]
except (BuildEnvironmentNotFoundException, KeyError):
# Default if this tree isn't configured.
from mozfile import which
cargo = which('cargo')
cargo = which("cargo")
if not cargo:
raise OSError(errno.ENOENT, "Could not find 'cargo' on your $PATH.")
return cargo
def check_cargo_version(self, cargo):
'''
"""
Ensure that cargo is new enough. cargo 1.37 added support
for the vendor command.
'''
out = subprocess.check_output([cargo, '--version']).splitlines()[0].decode('UTF-8')
if not out.startswith('cargo'):
"""
out = (
subprocess.check_output([cargo, "--version"])
.splitlines()[0]
.decode("UTF-8")
)
if not out.startswith("cargo"):
return False
return LooseVersion(out.split()[1]) >= '1.37'
return LooseVersion(out.split()[1]) >= "1.37"
def check_modified_files(self):
'''
"""
Ensure that there aren't any uncommitted changes to files
in the working copy, since we're going to change some state
on the user. Allow changes to Cargo.{toml,lock} since that's
likely to be a common use case.
'''
modified = [f for f in self.repository.get_changed_files(
'M') if os.path.basename(f) not in ('Cargo.toml', 'Cargo.lock')]
"""
modified = [
f
for f in self.repository.get_changed_files("M")
if os.path.basename(f) not in ("Cargo.toml", "Cargo.lock")
]
if modified:
self.log(logging.ERROR, 'modified_files', {},
'''You have uncommitted changes to the following files:
self.log(
logging.ERROR,
"modified_files",
{},
"""You have uncommitted changes to the following files:
{files}
Please commit or stash these changes before vendoring, or re-run with `--ignore-modified`.
'''.format(files='\n'.join(sorted(modified))))
""".format(
files="\n".join(sorted(modified))
),
)
sys.exit(1)
def check_openssl(self):
'''
"""
Set environment flags for building with openssl.
MacOS doesn't include openssl, but the openssl-sys crate used by
mach-vendor expects one of the system. It's common to have one
installed in /usr/local/opt/openssl by homebrew, but custom link
flags are necessary to build against it.
'''
"""
test_paths = ['/usr/include', '/usr/local/include']
if any([os.path.exists(os.path.join(path, 'openssl/ssl.h')) for path in test_paths]):
test_paths = ["/usr/include", "/usr/local/include"]
if any(
[os.path.exists(os.path.join(path, "openssl/ssl.h")) for path in test_paths]
):
# Assume we can use one of these system headers.
return None
if os.path.exists('/usr/local/opt/openssl/include/openssl/ssl.h'):
if os.path.exists("/usr/local/opt/openssl/include/openssl/ssl.h"):
# Found a likely homebrew install.
self.log(logging.INFO, 'openssl', {},
'Using OpenSSL in /usr/local/opt/openssl')
self.log(
logging.INFO, "openssl", {}, "Using OpenSSL in /usr/local/opt/openssl"
)
return {
'OPENSSL_INCLUDE_DIR': '/usr/local/opt/openssl/include',
'OPENSSL_LIB_DIR': '/usr/local/opt/openssl/lib',
"OPENSSL_INCLUDE_DIR": "/usr/local/opt/openssl/include",
"OPENSSL_LIB_DIR": "/usr/local/opt/openssl/lib",
}
self.log(logging.ERROR, 'openssl', {}, "OpenSSL not found!")
self.log(logging.ERROR, "openssl", {}, "OpenSSL not found!")
return None
def _ensure_cargo(self):
'''
"""
Ensures all the necessary cargo bits are installed.
Returns the path to cargo if successful, None otherwise.
'''
"""
cargo = self.get_cargo_path()
if not self.check_cargo_version(cargo):
self.log(logging.ERROR, 'cargo_version', {},
'Cargo >= 1.37 required (install Rust 1.37 or newer)')
self.log(
logging.ERROR,
"cargo_version",
{},
"Cargo >= 1.37 required (install Rust 1.37 or newer)",
)
return None
else:
self.log(logging.DEBUG, 'cargo_version', {}, 'cargo is new enough')
self.log(logging.DEBUG, "cargo_version", {}, "cargo is new enough")
return cargo
@ -160,30 +181,30 @@ Please commit or stash these changes before vendoring, or re-run with `--ignore-
# Licenses for code used at runtime. Please see the above comment before
# adding anything to this list.
RUNTIME_LICENSE_WHITELIST = [
'Apache-2.0',
'Apache-2.0 WITH LLVM-exception',
"Apache-2.0",
"Apache-2.0 WITH LLVM-exception",
# BSD-2-Clause and BSD-3-Clause are ok, but packages using them
# must be added to the appropriate section of about:licenses.
# To encourage people to remember to do that, we do not whitelist
# the licenses themselves, and we require the packages to be added
# to RUNTIME_LICENSE_PACKAGE_WHITELIST below.
'CC0-1.0',
'ISC',
'MIT',
'MPL-2.0',
'Unlicense',
'Zlib',
"CC0-1.0",
"ISC",
"MIT",
"MPL-2.0",
"Unlicense",
"Zlib",
]
# Licenses for code used at build time (e.g. code generators). Please see the above
# comments before adding anything to this list.
BUILDTIME_LICENSE_WHITELIST = {
'BSD-3-Clause': [
'bindgen',
'fuchsia-zircon',
'fuchsia-zircon-sys',
'fuchsia-cprng',
'glsl',
"BSD-3-Clause": [
"bindgen",
"fuchsia-zircon",
"fuchsia-zircon-sys",
"fuchsia-cprng",
"glsl",
]
}
@ -195,7 +216,6 @@ Please commit or stash these changes before vendoring, or re-run with `--ignore-
'cloudabi',
'Inflector',
'mach',
'qlog',
],
'BSD-3-Clause': [
]
@ -211,10 +231,10 @@ Please commit or stash these changes before vendoring, or re-run with `--ignore-
# somebody competent to review licensing minutiae.
RUNTIME_LICENSE_FILE_PACKAGE_WHITELIST = {
# MIT
'deque': '6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb',
"deque": "6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb",
# we're whitelisting this fuchsia crate because it doesn't get built in the final
# product but has a license-file that needs ignoring
'fuchsia-cprng': '03b114f53e6587a398931762ee11e2395bfdba252a329940e2c8c9e81813845b',
"fuchsia-cprng": "03b114f53e6587a398931762ee11e2395bfdba252a329940e2c8c9e81813845b",
}
@staticmethod
@ -236,10 +256,10 @@ Please commit or stash these changes before vendoring, or re-run with `--ignore-
we will abort if that is detected. We'll handle `/` and OR as
equivalent and approve is any is in our approved list."""
if re.search(r'\s+AND', license_string):
if re.search(r"\s+AND", license_string):
return False
license_list = re.split(r'\s*/\s*|\s+OR\s+', license_string)
license_list = re.split(r"\s*/\s*|\s+OR\s+", license_string)
for license in license_list:
if license in VendorRust.RUNTIME_LICENSE_WHITELIST:
return True
@ -252,59 +272,95 @@ Please commit or stash these changes before vendoring, or re-run with `--ignore-
LICENSE_FILE_LINE_RE = re.compile(r'\s*license[-_]file\s*=\s*"([^"]+)"')
def verify_acceptable_license(package, license):
self.log(logging.DEBUG, 'package_license', {},
'has license {}'.format(license))
self.log(
logging.DEBUG, "package_license", {}, "has license {}".format(license)
)
if not self.runtime_license(package, license):
if license not in self.BUILDTIME_LICENSE_WHITELIST:
self.log(logging.ERROR, 'package_license_error', {},
'''Package {} has a non-approved license: {}.
self.log(
logging.ERROR,
"package_license_error",
{},
"""Package {} has a non-approved license: {}.
Please request license review on the package's license. If the package's license
is approved, please add it to the whitelist of suitable licenses.
'''.format(package, license))
""".format(
package, license
),
)
return False
elif package not in self.BUILDTIME_LICENSE_WHITELIST[license]:
self.log(logging.ERROR, 'package_license_error', {},
'''Package {} has a license that is approved for build-time dependencies: {}
self.log(
logging.ERROR,
"package_license_error",
{},
"""Package {} has a license that is approved for build-time dependencies: {}
but the package itself is not whitelisted as being a build-time only package.
If your package is build-time only, please add it to the whitelist of build-time
only packages. Otherwise, you need to request license review on the package's license.
If the package's license is approved, please add it to the whitelist of suitable licenses.
'''.format(package, license))
""".format(
package, license
),
)
return False
def check_package(package):
self.log(logging.DEBUG, 'package_check', {},
'Checking license for {}'.format(package))
self.log(
logging.DEBUG,
"package_check",
{},
"Checking license for {}".format(package),
)
toml_file = os.path.join(vendor_dir, package, 'Cargo.toml')
toml_file = os.path.join(vendor_dir, package, "Cargo.toml")
# pytoml is not sophisticated enough to parse Cargo.toml files
# with [target.'cfg(...)'.dependencies sections, so we resort
# to scanning individual lines.
with io.open(toml_file, 'r', encoding='utf-8') as f:
license_lines = [l for l in f if l.strip().startswith('license')]
with io.open(toml_file, "r", encoding="utf-8") as f:
license_lines = [l for l in f if l.strip().startswith("license")]
license_matches = list(
filter(lambda x: x, [LICENSE_LINE_RE.match(l) for l in license_lines]))
filter(
lambda x: x, [LICENSE_LINE_RE.match(l) for l in license_lines]
)
)
license_file_matches = list(
filter(lambda x: x, [LICENSE_FILE_LINE_RE.match(l) for l in license_lines]))
filter(
lambda x: x,
[LICENSE_FILE_LINE_RE.match(l) for l in license_lines],
)
)
# License information is optional for crates to provide, but
# we require it.
if not license_matches and not license_file_matches:
self.log(logging.ERROR, 'package_no_license', {},
'package {} does not provide a license'.format(package))
self.log(
logging.ERROR,
"package_no_license",
{},
"package {} does not provide a license".format(package),
)
return False
# The Cargo.toml spec suggests that crates should either have
# `license` or `license-file`, but not both. We might as well
# be defensive about that, though.
if len(license_matches) > 1 or len(license_file_matches) > 1 or \
license_matches and license_file_matches:
self.log(logging.ERROR, 'package_many_licenses', {},
'package {} provides too many licenses'.format(package))
if (
len(license_matches) > 1
or len(license_file_matches) > 1
or license_matches
and license_file_matches
):
self.log(
logging.ERROR,
"package_many_licenses",
{},
"package {} provides too many licenses".format(package),
)
return False
if license_matches:
@ -312,29 +368,48 @@ Please commit or stash these changes before vendoring, or re-run with `--ignore-
verify_acceptable_license(package, license)
else:
license_file = license_file_matches[0].group(1)
self.log(logging.DEBUG, 'package_license_file', {},
'has license-file {}'.format(license_file))
self.log(
logging.DEBUG,
"package_license_file",
{},
"has license-file {}".format(license_file),
)
if package not in self.RUNTIME_LICENSE_FILE_PACKAGE_WHITELIST:
self.log(logging.ERROR, 'package_license_file_unknown', {},
'''Package {} has an unreviewed license file: {}.
self.log(
logging.ERROR,
"package_license_file_unknown",
{},
"""Package {} has an unreviewed license file: {}.
Please request review on the provided license; if approved, the package can be added
to the whitelist of packages whose licenses are suitable.
'''.format(package, license_file))
""".format(
package, license_file
),
)
return False
approved_hash = self.RUNTIME_LICENSE_FILE_PACKAGE_WHITELIST[package]
license_contents = open(os.path.join(
vendor_dir, package, license_file), 'r').read()
current_hash = hashlib.sha256(license_contents.encode('UTF-8')).hexdigest()
license_contents = open(
os.path.join(vendor_dir, package, license_file), "r"
).read()
current_hash = hashlib.sha256(
license_contents.encode("UTF-8")
).hexdigest()
if current_hash != approved_hash:
self.log(logging.ERROR, 'package_license_file_mismatch', {},
'''Package {} has changed its license file: {} (hash {}).
self.log(
logging.ERROR,
"package_license_file_mismatch",
{},
"""Package {} has changed its license file: {} (hash {}).
Please request review on the provided license; if approved, please update the
license file's hash.
'''.format(package, license_file, current_hash))
""".format(
package, license_file, current_hash
),
)
return False
return True
@ -342,12 +417,16 @@ license file's hash.
# Force all of the packages to be checked for license information
# before reducing via `all`, so all license issues are found in a
# single `mach vendor rust` invocation.
results = [check_package(p) for p in os.listdir(vendor_dir)
if os.path.isdir(os.path.join(vendor_dir, p))]
results = [
check_package(p)
for p in os.listdir(vendor_dir)
if os.path.isdir(os.path.join(vendor_dir, p))
]
return all(results)
def vendor(self, ignore_modified=False,
build_peers_said_large_imports_were_ok=False):
def vendor(
self, ignore_modified=False, build_peers_said_large_imports_were_ok=False
):
self.populate_logger()
self.log_manager.enable_unstructured()
if not ignore_modified:
@ -357,24 +436,25 @@ license file's hash.
if not cargo:
return
relative_vendor_dir = 'third_party/rust'
relative_vendor_dir = "third_party/rust"
vendor_dir = mozpath.join(self.topsrcdir, relative_vendor_dir)
# We use check_call instead of mozprocess to ensure errors are displayed.
# We do an |update -p| here to regenerate the Cargo.lock file with minimal
# changes. See bug 1324462
subprocess.check_call([cargo, 'update', '-p', 'gkrust'], cwd=self.topsrcdir)
subprocess.check_call([cargo, "update", "-p", "gkrust"], cwd=self.topsrcdir)
output = subprocess.check_output([cargo, 'vendor', vendor_dir],
stderr=subprocess.STDOUT,
cwd=self.topsrcdir).decode('UTF-8')
output = subprocess.check_output(
[cargo, "vendor", vendor_dir], stderr=subprocess.STDOUT, cwd=self.topsrcdir
).decode("UTF-8")
# Get the snippet of configuration that cargo vendor outputs, and
# update .cargo/config with it.
# XXX(bug 1576765): Hopefully do something better after
# https://github.com/rust-lang/cargo/issues/7280 is addressed.
config = '\n'.join(dropwhile(lambda l: not l.startswith('['),
output.splitlines()))
config = "\n".join(
dropwhile(lambda l: not l.startswith("["), output.splitlines())
)
# The config is toml, parse it as such.
config = pytoml.loads(config)
@ -382,30 +462,33 @@ license file's hash.
# For each replace-with, extract their configuration and update the
# corresponding directory to be relative to topsrcdir.
replaces = {
v['replace-with']
for v in config['source'].values()
if 'replace-with' in v
v["replace-with"] for v in config["source"].values() if "replace-with" in v
}
# We only really expect one replace-with
if len(replaces) != 1:
self.log(
logging.ERROR, 'vendor_failed', {},
'''cargo vendor didn't output a unique replace-with. Found: %s.''' % replaces)
logging.ERROR,
"vendor_failed",
{},
"""cargo vendor didn't output a unique replace-with. Found: %s."""
% replaces,
)
sys.exit(1)
replace_name = replaces.pop()
replace = config['source'].pop(replace_name)
replace['directory'] = mozpath.relpath(
mozpath.normsep(os.path.normcase(replace['directory'])),
replace = config["source"].pop(replace_name)
replace["directory"] = mozpath.relpath(
mozpath.normsep(os.path.normcase(replace["directory"])),
mozpath.normsep(os.path.normcase(self.topsrcdir)),
)
# Introduce some determinism for the output.
def recursive_sort(obj):
if isinstance(obj, dict):
return OrderedDict(sorted(
(k, recursive_sort(v)) for k, v in obj.items()))
return OrderedDict(
sorted((k, recursive_sort(v)) for k, v in obj.items())
)
if isinstance(obj, list):
return [recursive_sort(o) for o in obj]
return obj
@ -420,23 +503,30 @@ license file's hash.
if isinstance(data, dict):
for k, v in data.items():
if all(isinstance(v2, dict) for v2 in v.values()):
dump = dump.replace('[%s]' % k, '')
dump = dump.replace("[%s]" % k, "")
return dump.strip()
cargo_config = os.path.join(self.topsrcdir, '.cargo', 'config.in')
with open(cargo_config, 'w') as fh:
fh.write(CARGO_CONFIG_TEMPLATE.format(
cargo_config = os.path.join(self.topsrcdir, ".cargo", "config.in")
with open(cargo_config, "w") as fh:
fh.write(
CARGO_CONFIG_TEMPLATE.format(
config=toml_dump(config),
replace_name=replace_name,
directory=replace['directory'],
))
directory=replace["directory"],
)
)
if not self._check_licenses(vendor_dir):
self.log(
logging.ERROR, 'license_check_failed', {},
'''The changes from `mach vendor rust` will NOT be added to version control.
logging.ERROR,
"license_check_failed",
{},
"""The changes from `mach vendor rust` will NOT be added to version control.
{notice}'''.format(notice=CARGO_LOCK_NOTICE))
{notice}""".format(
notice=CARGO_LOCK_NOTICE
),
)
self.repository.clean_directory(vendor_dir)
sys.exit(1)
@ -446,7 +536,7 @@ license file's hash.
FILESIZE_LIMIT = 100 * 1024
large_files = set()
cumulative_added_size = 0
for f in self.repository.get_changed_files('A'):
for f in self.repository.get_changed_files("A"):
path = mozpath.join(self.topsrcdir, f)
size = os.stat(path).st_size
cumulative_added_size += size
@ -456,8 +546,11 @@ license file's hash.
# Forcefully complain about large files being added, as history has
# shown that large-ish files typically are not needed.
if large_files and not build_peers_said_large_imports_were_ok:
self.log(logging.ERROR, 'filesize_check', {},
'''The following files exceed the filesize limit of {size}:
self.log(
logging.ERROR,
"filesize_check",
{},
"""The following files exceed the filesize limit of {size}:
{files}
@ -467,8 +560,12 @@ adding.
The changes from `mach vendor rust` will NOT be added to version control.
{notice}'''.format(files='\n'.join(sorted(large_files)), size=FILESIZE_LIMIT,
notice=CARGO_LOCK_NOTICE))
{notice}""".format(
files="\n".join(sorted(large_files)),
size=FILESIZE_LIMIT,
notice=CARGO_LOCK_NOTICE,
),
)
self.repository.forget_add_remove_files(vendor_dir)
self.repository.clean_directory(vendor_dir)
sys.exit(1)
@ -477,12 +574,16 @@ The changes from `mach vendor rust` will NOT be added to version control.
# drops from time to time (e.g. importing features into m-c).
SIZE_WARN_THRESHOLD = 5 * 1024 * 1024
if cumulative_added_size >= SIZE_WARN_THRESHOLD:
self.log(logging.WARN, 'filesize_check', {},
'''Your changes add {size} bytes of added files.
self.log(
logging.WARN,
"filesize_check",
{},
"""Your changes add {size} bytes of added files.
Please consider finding ways to reduce the size of the vendored packages.
For instance, check the vendored packages for unusually large test or
benchmark files that don't need to be published to crates.io and submit
a pull request upstream to ignore those files when publishing.'''.format(
size=cumulative_added_size)
a pull request upstream to ignore those files when publishing.""".format(
size=cumulative_added_size
),
)