Backed out 2 changesets (bug 1543247) for build bustages. CLOSED TREE

Backed out changeset feb726e4f15d (bug 1543247)
Backed out changeset 4b3619d89abd (bug 1543247)
This commit is contained in:
Razvan Maries 2019-05-04 03:10:55 +03:00
Родитель 399a45f767
Коммит 12bcfbb334
8 изменённых файлов: 245 добавлений и 6100 удалений

Просмотреть файл

@ -129,9 +129,8 @@ _OPT\.OBJ/
# Ignore tox generated dir
.tox/
# Ignore ESLint and other tool's node_modules.
# Ignore ESLint node_modules
^node_modules/
^tools/browsertime/node_modules/
^tools/lint/eslint/eslint-plugin-mozilla/node_modules/
# Ignore talos virtualenv and tp5n files.

Просмотреть файл

@ -61,7 +61,6 @@ MACH_MODULES = [
'testing/web-platform/mach_commands.py',
'testing/xpcshell/mach_commands.py',
'toolkit/components/telemetry/tests/marionette/mach_commands.py',
'tools/browsertime/mach_commands.py',
'tools/compare-locales/mach_commands.py',
'tools/docs/mach_commands.py',
'tools/lint/mach_commands.py',

Просмотреть файл

@ -1,233 +0,0 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
'''
Fetch and cache artifacts from URLs.
This module manages fetching artifacts from URLS and purging old
artifacts using a simple Least Recently Used cache.
This module requires certain modules be importable from the ambient Python
environment. Consumers will need to arrange this themselves.
The bulk of the complexity is in managing and persisting several caches. If
we found a Python LRU cache that pickled cleanly, we could remove a lot of
this code! Sadly, I found no such candidate implementations, so we pickle
pylru caches manually.
None of the instances (or the underlying caches) are safe for concurrent use.
A future need, perhaps.
'''
from __future__ import absolute_import, print_function, unicode_literals
import binascii
import hashlib
import logging
import os
import urlparse
from mozbuild.util import (
mkdir,
)
import mozpack.path as mozpath
from dlmanager import (
DownloadManager,
PersistLimit,
)
# Minimum number of downloaded artifacts to keep. Each artifact can be very large,
# so don't make this to large!
MIN_CACHED_ARTIFACTS = 6
# Maximum size of the downloaded artifacts to keep in cache, in bytes (1GiB).
MAX_CACHED_ARTIFACTS_SIZE = 1024 * 1024 * 1024
class ArtifactPersistLimit(PersistLimit):
'''Handle persistence for a cache of artifacts.
When instantiating a DownloadManager, it starts by filling the
PersistLimit instance it's given with register_dir_content.
In practice, this registers all the files already in the cache directory.
After a download finishes, the newly downloaded file is registered, and the
oldest files registered to the PersistLimit instance are removed depending
on the size and file limits it's configured for.
This is all good, but there are a few tweaks we want here:
- We have pickle files in the cache directory that we don't want purged.
- Files that were just downloaded in the same session shouldn't be purged.
(if for some reason we end up downloading more than the default max size,
we don't want the files to be purged)
To achieve this, this subclass of PersistLimit inhibits the register_file
method for pickle files and tracks what files were downloaded in the same
session to avoid removing them.
The register_file method may be used to register cache matches too, so that
later sessions know they were freshly used.
'''
def __init__(self, log=None):
super(ArtifactPersistLimit, self).__init__(
size_limit=MAX_CACHED_ARTIFACTS_SIZE,
file_limit=MIN_CACHED_ARTIFACTS)
self._log = log
self._registering_dir = False
self._downloaded_now = set()
def log(self, *args, **kwargs):
if self._log:
self._log(*args, **kwargs)
def register_file(self, path):
if path.endswith('.pickle') or \
path.endswith('.checksum') or \
os.path.basename(path) == '.metadata_never_index':
return
if not self._registering_dir:
# Touch the file so that subsequent calls to a mach artifact
# command know it was recently used. While remove_old_files
# is based on access time, in various cases, the access time is not
# updated when just reading the file, so we force an update.
try:
os.utime(path, None)
except OSError:
pass
self._downloaded_now.add(path)
super(ArtifactPersistLimit, self).register_file(path)
def register_dir_content(self, directory, pattern="*"):
self._registering_dir = True
super(ArtifactPersistLimit, self).register_dir_content(
directory, pattern)
self._registering_dir = False
def remove_old_files(self):
from dlmanager import fs
files = sorted(self.files, key=lambda f: f.stat.st_atime)
kept = []
while len(files) > self.file_limit and \
self._files_size >= self.size_limit:
f = files.pop(0)
if f.path in self._downloaded_now:
kept.append(f)
continue
try:
fs.remove(f.path)
except WindowsError:
# For some reason, on automation, we can't remove those files.
# So for now, ignore the error.
kept.append(f)
continue
self.log(
logging.INFO,
'artifact',
{'filename': f.path},
'Purged artifact {filename}')
self._files_size -= f.stat.st_size
self.files = files + kept
def remove_all(self):
from dlmanager import fs
for f in self.files:
fs.remove(f.path)
self._files_size = 0
self.files = []
class ArtifactCache(object):
'''Fetch artifacts from URLS and purge least recently used artifacts from disk.'''
def __init__(self, cache_dir, log=None, skip_cache=False):
mkdir(cache_dir, not_indexed=True)
self._cache_dir = cache_dir
self._log = log
self._skip_cache = skip_cache
self._persist_limit = ArtifactPersistLimit(log)
self._download_manager = DownloadManager(
self._cache_dir, persist_limit=self._persist_limit)
self._last_dl_update = -1
def log(self, *args, **kwargs):
if self._log:
self._log(*args, **kwargs)
def fetch(self, url, force=False):
fname = os.path.basename(url)
try:
# Use the file name from the url if it looks like a hash digest.
if len(fname) not in (32, 40, 56, 64, 96, 128):
raise TypeError()
binascii.unhexlify(fname)
except TypeError:
# We download to a temporary name like HASH[:16]-basename to
# differentiate among URLs with the same basenames. We used to then
# extract the build ID from the downloaded artifact and use it to make a
# human readable unique name, but extracting build IDs is time consuming
# (especially on Mac OS X, where we must mount a large DMG file).
hash = hashlib.sha256(url).hexdigest()[:16]
# Strip query string and fragments.
basename = os.path.basename(urlparse.urlparse(url).path)
fname = hash + '-' + basename
path = os.path.abspath(mozpath.join(self._cache_dir, fname))
if self._skip_cache and os.path.exists(path):
self.log(
logging.INFO,
'artifact',
{'path': path},
'Skipping cache: removing cached downloaded artifact {path}')
os.remove(path)
self.log(
logging.INFO,
'artifact',
{'path': path},
'Downloading to temporary location {path}')
try:
dl = self._download_manager.download(url, fname)
def download_progress(dl, bytes_so_far, total_size):
if not total_size:
return
percent = (float(bytes_so_far) / total_size) * 100
now = int(percent / 5)
if now == self._last_dl_update:
return
self._last_dl_update = now
self.log(logging.INFO, 'artifact',
{'bytes_so_far': bytes_so_far,
'total_size': total_size,
'percent': percent},
'Downloading... {percent:02.1f} %')
if dl:
dl.set_progress(download_progress)
dl.wait()
else:
# Avoid the file being removed if it was in the cache already.
path = os.path.join(self._cache_dir, fname)
self._persist_limit.register_file(path)
self.log(
logging.INFO,
'artifact',
{'path': os.path.abspath(mozpath.join(self._cache_dir, fname))},
'Downloaded artifact to {path}')
return os.path.abspath(mozpath.join(self._cache_dir, fname))
finally:
# Cancel any background downloads in progress.
self._download_manager.cancel()
def clear_cache(self):
if self._skip_cache:
self.log(
logging.INFO,
'artifact',
{},
'Skipping cache: ignoring clear_cache!')
return
self._persist_limit.remove_all()

Просмотреть файл

@ -26,6 +26,14 @@ This module performs the following steps:
extract relevant files from Mac OS X DMG files into a friendly archive format
so we don't have to mount DMG files frequently.
The bulk of the complexity is in managing and persisting several caches. If
we found a Python LRU cache that pickled cleanly, we could remove a lot of
this code! Sadly, I found no such candidate implementations, so we pickle
pylru caches manually.
None of the instances (or the underlying caches) are safe for concurrent use.
A future need, perhaps.
This module requires certain modules be importable from the ambient Python
environment. |mach artifact| ensures these modules are available, but other
consumers will need to arrange this themselves.
@ -34,9 +42,11 @@ consumers will need to arrange this themselves.
from __future__ import absolute_import, print_function, unicode_literals
import binascii
import collections
import functools
import glob
import hashlib
import logging
import operator
import os
@ -58,7 +68,6 @@ from taskgraph.util.taskcluster import (
list_artifacts,
)
from mozbuild.artifact_cache import ArtifactCache
from mozbuild.artifact_builds import JOB_CHOICES
from mozbuild.util import (
ensureParentDir,
@ -76,6 +85,10 @@ from mozpack.mozjar import (
)
from mozpack.packager.unpack import UnpackFinder
import mozpack.path as mozpath
from dlmanager import (
DownloadManager,
PersistLimit,
)
NUM_PUSHHEADS_TO_QUERY_PER_PARENT = 50 # Number of candidate pushheads to cache per parent changeset.
@ -88,6 +101,13 @@ NUM_REVISIONS_TO_QUERY = 500
MAX_CACHED_TASKS = 400 # Number of pushheads to cache Task Cluster task data for.
# Minimum number of downloaded artifacts to keep. Each artifact can be very large,
# so don't make this to large!
MIN_CACHED_ARTIFACTS = 6
# Maximum size of the downloaded artifacts to keep in cache, in bytes (1GiB).
MAX_CACHED_ARTIFACTS_SIZE = 1024 * 1024 * 1024
# Downloaded artifacts are cached, and a subset of their contents extracted for
# easy installation. This is most noticeable on Mac OS X: since mounting and
# copying from DMG files is very slow, we extract the desired binaries to a
@ -738,6 +758,179 @@ class TaskCache(CacheManager):
return taskId, list_artifacts(taskId)
class ArtifactPersistLimit(PersistLimit):
'''Handle persistence for artifacts cache
When instantiating a DownloadManager, it starts by filling the
PersistLimit instance it's given with register_dir_content.
In practice, this registers all the files already in the cache directory.
After a download finishes, the newly downloaded file is registered, and the
oldest files registered to the PersistLimit instance are removed depending
on the size and file limits it's configured for.
This is all good, but there are a few tweaks we want here:
- We have pickle files in the cache directory that we don't want purged.
- Files that were just downloaded in the same session shouldn't be purged.
(if for some reason we end up downloading more than the default max size,
we don't want the files to be purged)
To achieve this, this subclass of PersistLimit inhibits the register_file
method for pickle files and tracks what files were downloaded in the same
session to avoid removing them.
The register_file method may be used to register cache matches too, so that
later sessions know they were freshly used.
'''
def __init__(self, log=None):
super(ArtifactPersistLimit, self).__init__(
size_limit=MAX_CACHED_ARTIFACTS_SIZE,
file_limit=MIN_CACHED_ARTIFACTS)
self._log = log
self._registering_dir = False
self._downloaded_now = set()
def log(self, *args, **kwargs):
if self._log:
self._log(*args, **kwargs)
def register_file(self, path):
if path.endswith('.pickle') or \
os.path.basename(path) == '.metadata_never_index':
return
if not self._registering_dir:
# Touch the file so that subsequent calls to a mach artifact
# command know it was recently used. While remove_old_files
# is based on access time, in various cases, the access time is not
# updated when just reading the file, so we force an update.
try:
os.utime(path, None)
except OSError:
pass
self._downloaded_now.add(path)
super(ArtifactPersistLimit, self).register_file(path)
def register_dir_content(self, directory, pattern="*"):
self._registering_dir = True
super(ArtifactPersistLimit, self).register_dir_content(
directory, pattern)
self._registering_dir = False
def remove_old_files(self):
from dlmanager import fs
files = sorted(self.files, key=lambda f: f.stat.st_atime)
kept = []
while len(files) > self.file_limit and \
self._files_size >= self.size_limit:
f = files.pop(0)
if f.path in self._downloaded_now:
kept.append(f)
continue
try:
fs.remove(f.path)
except WindowsError:
# For some reason, on automation, we can't remove those files.
# So for now, ignore the error.
kept.append(f)
continue
self.log(logging.INFO, 'artifact',
{'filename': f.path},
'Purged artifact {filename}')
self._files_size -= f.stat.st_size
self.files = files + kept
def remove_all(self):
from dlmanager import fs
for f in self.files:
fs.remove(f.path)
self._files_size = 0
self.files = []
class ArtifactCache(object):
'''Fetch Task Cluster artifact URLs and purge least recently used artifacts from disk.'''
def __init__(self, cache_dir, log=None, skip_cache=False):
mkdir(cache_dir, not_indexed=True)
self._cache_dir = cache_dir
self._log = log
self._skip_cache = skip_cache
self._persist_limit = ArtifactPersistLimit(log)
self._download_manager = DownloadManager(
self._cache_dir, persist_limit=self._persist_limit)
self._last_dl_update = -1
def log(self, *args, **kwargs):
if self._log:
self._log(*args, **kwargs)
def fetch(self, url, force=False):
fname = os.path.basename(url)
try:
# Use the file name from the url if it looks like a hash digest.
if len(fname) not in (32, 40, 56, 64, 96, 128):
raise TypeError()
binascii.unhexlify(fname)
except TypeError:
# We download to a temporary name like HASH[:16]-basename to
# differentiate among URLs with the same basenames. We used to then
# extract the build ID from the downloaded artifact and use it to make a
# human readable unique name, but extracting build IDs is time consuming
# (especially on Mac OS X, where we must mount a large DMG file).
hash = hashlib.sha256(url).hexdigest()[:16]
# Strip query string and fragments.
basename = os.path.basename(urlparse.urlparse(url).path)
fname = hash + '-' + basename
path = os.path.abspath(mozpath.join(self._cache_dir, fname))
if self._skip_cache and os.path.exists(path):
self.log(logging.INFO, 'artifact',
{'path': path},
'Skipping cache: removing cached downloaded artifact {path}')
os.remove(path)
self.log(logging.INFO, 'artifact',
{'path': path},
'Downloading to temporary location {path}')
try:
dl = self._download_manager.download(url, fname)
def download_progress(dl, bytes_so_far, total_size):
if not total_size:
return
percent = (float(bytes_so_far) / total_size) * 100
now = int(percent / 5)
if now == self._last_dl_update:
return
self._last_dl_update = now
self.log(logging.INFO, 'artifact',
{'bytes_so_far': bytes_so_far, 'total_size': total_size, 'percent': percent},
'Downloading... {percent:02.1f} %')
if dl:
dl.set_progress(download_progress)
dl.wait()
else:
# Avoid the file being removed if it was in the cache already.
path = os.path.join(self._cache_dir, fname)
self._persist_limit.register_file(path)
self.log(logging.INFO, 'artifact',
{'path': os.path.abspath(mozpath.join(self._cache_dir, fname))},
'Downloaded artifact to {path}')
return os.path.abspath(mozpath.join(self._cache_dir, fname))
finally:
# Cancel any background downloads in progress.
self._download_manager.cancel()
def clear_cache(self):
if self._skip_cache:
self.log(logging.INFO, 'artifact',
{},
'Skipping cache: ignoring clear_cache!')
return
self._persist_limit.remove_all()
class Artifacts(object):
'''Maintain state to efficiently fetch build artifacts from a Firefox tree.'''

Просмотреть файл

@ -1,371 +0,0 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
r'''Make it easy to install and run [browsertime](https://github.com/sitespeedio/browsertime).
Browsertime is a harness for running performance tests, similar to
Mozilla's Raptor testing framework. Browsertime is written in Node.js
and uses Selenium WebDriver to drive multiple browsers including
Chrome, Chrome for Android, Firefox, and (pending the resolution of
[Bug 1525126](https://bugzilla.mozilla.org/show_bug.cgi?id=1525126)
and similar tickets) Firefox for Android and GeckoView-based vehicles.
Right now a custom version of browsertime and the underlying
geckodriver binary are needed to support GeckoView-based vehicles;
this module accommodates those in-progress custom versions.
To get started, run
```
./mach browsertime --setup [--clobber]
```
This will populate `tools/browsertime/node_modules`.
To invoke browsertime, run
```
./mach browsertime [ARGS]
```
All arguments are passed through to browsertime.
'''
from __future__ import absolute_import, print_function, unicode_literals
import argparse
import logging
import os
import sys
from mach.decorators import CommandArgument, CommandProvider, Command
from mozbuild.base import MachCommandBase
import mozpack.path as mozpath
BROWSERTIME_ROOT = os.path.dirname(__file__)
def host_platform():
is_64bits = sys.maxsize > 2**32
if sys.platform.startswith('win'):
if is_64bits:
return 'win64'
elif sys.platform.startswith('linux'):
if is_64bits:
return 'linux64'
elif sys.platform.startswith('darwin'):
return 'darwin'
raise ValueError('sys.platform is not yet supported: {}'.format(sys.platform))
# Map from `host_platform()` to a `fetch`-like syntax.
host_fetches = {
'darwin': {
'ffmpeg': {
'type': 'static-url',
'url': 'https://ffmpeg.zeranoe.com/builds/macos64/static/ffmpeg-4.1.1-macos64-static.zip', # noqa
# An extension to `fetch` syntax.
'path': 'ffmpeg-4.1.1-macos64-static',
},
'ImageMagick': {
'type': 'static-url',
# It's sad that the macOS URLs don't include version numbers. If
# ImageMagick is released frequently, we'll need to be more
# accommodating of multiple versions here.
'url': 'https://imagemagick.org/download/binaries/ImageMagick-x86_64-apple-darwin17.7.0.tar.gz', # noqa
# An extension to `fetch` syntax.
'path': 'ImageMagick-7.0.8',
},
},
'linux64': {
'ffmpeg': {
'type': 'static-url',
'url': 'https://www.johnvansickle.com/ffmpeg/old-releases/ffmpeg-4.0.3-64bit-static.tar.xz', # noqa
# An extension to `fetch` syntax.
'path': 'ffmpeg-4.0.3-64bit-static',
},
# TODO: install a static ImageMagick. These binaries are not
# statically linked, so they will (mostly) fail at runtime due to
# missing dependencies. For now we require folks to install
# ImageMagick globally with their package manager of choice.
'ImageMagick': {
'type': 'static-url',
'url': 'https://imagemagick.org/download/binaries/ImageMagick-x86_64-pc-linux-gnu.tar.gz', # noqa
# An extension to `fetch` syntax.
'path': 'ImageMagick-6.9.2',
},
},
'win64': {
'ffmpeg': {
'type': 'static-url',
'url': 'https://ffmpeg.zeranoe.com/builds/win64/static/ffmpeg-4.1.1-win64-static.zip', # noqa
# An extension to `fetch` syntax.
'path': 'ffmpeg-4.1.1-win64-static',
},
'ImageMagick': {
'type': 'static-url',
# 'url': 'https://imagemagick.org/download/binaries/ImageMagick-7.0.8-39-portable-Q16-x64.zip', # noqa
# imagemagick.org doesn't keep old versions; the mirror below does.
'url': 'https://ftp.icm.edu.pl/packages/ImageMagick/binaries/ImageMagick-7.0.8-39-portable-Q16-x64.zip', # noqa
# An extension to `fetch` syntax.
'path': 'ImageMagick-7.0.8',
},
},
}
@CommandProvider
class MachBrowsertime(MachCommandBase):
@property
def artifact_cache_path(self):
r'''Downloaded artifacts will be kept here.'''
# The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE.
return mozpath.join(self._mach_context.state_dir, 'cache', 'browsertime')
@property
def state_path(self):
r'''Unpacked artifacts will be kept here.'''
# The convention is $MOZBUILD_STATE_PATH/$FEATURE.
return mozpath.join(self._mach_context.state_dir, 'browsertime')
def setup(self, should_clobber=False):
r'''Install browsertime and visualmetrics.py requirements.'''
from mozbuild.action.tooltool import unpack_file
from mozbuild.artifact_cache import ArtifactCache
sys.path.append(mozpath.join(self.topsrcdir, 'tools', 'lint', 'eslint'))
import setup_helper
# Download the visualmetrics.py requirements.
artifact_cache = ArtifactCache(self.artifact_cache_path,
log=self.log, skip_cache=False)
fetches = host_fetches[host_platform()]
for tool, fetch in sorted(fetches.items()):
archive = artifact_cache.fetch(fetch['url'])
# TODO: assert type, verify sha256 (and size?).
if fetch.get('unpack', True):
cwd = os.getcwd()
try:
os.chdir(self.state_path)
self.log(
logging.INFO,
'browsertime',
{'path': archive},
'Unpacking temporary location {path}')
unpack_file(archive)
finally:
os.chdir(cwd)
# Install the browsertime Node.js requirements.
if not setup_helper.check_node_executables_valid():
return 1
self.log(
logging.INFO,
'browsertime',
{'package_json': mozpath.join(BROWSERTIME_ROOT, 'package.json')},
'Installing browsertime node module from {package_json}')
status = setup_helper.package_setup(
BROWSERTIME_ROOT,
'browsertime',
should_clobber=should_clobber)
if status:
return status
return self.check()
@property
def node_path(self):
from mozbuild.nodeutil import find_node_executable
node, _ = find_node_executable()
return os.path.abspath(node)
def node(self, args):
r'''Invoke node (interactively) with the given arguments.'''
return self.run_process(
[self.node_path] + args,
append_env=self.append_env(),
pass_thru=True, # Allow user to run Node interactively.
ensure_exit_code=False, # Don't throw on non-zero exit code.
cwd=mozpath.join(self.topsrcdir))
@property
def package_path(self):
r'''The path to the `browsertime` directory.
Override the default with the `BROWSERTIME` environment variable.'''
override = os.environ.get('BROWSERTIME', None)
if override:
return override
return mozpath.join(BROWSERTIME_ROOT, 'node_modules', 'browsertime')
@property
def browsertime_path(self):
'''The path to the `browsertime.js` script.'''
# On Windows, invoking `node_modules/.bin/browsertime{.cmd}`
# doesn't work when invoked as an argument to our specific
# binary. Since we want our version of node, invoke the
# actual script directly.
return mozpath.join(
self.package_path,
'bin',
'browsertime.js')
@property
def visualmetrics_path(self):
'''The path to the `visualmetrics.py` script.'''
return mozpath.join(
self.package_path,
'vendor',
'visualmetrics.py')
def append_env(self, append_path=True):
fetches = host_fetches[host_platform()]
# Ensure that bare `ffmpeg` and ImageMagick commands
# {`convert`,`compare`,`mogrify`} are found. The `visualmetrics.py`
# script doesn't take these as configuration, so we do this (for now).
# We should update the script itself to accept this configuration.
path = os.environ.get('PATH', '').split(os.pathsep) if append_path else []
path_to_ffmpeg = mozpath.join(
self.state_path,
fetches['ffmpeg']['path'])
path_to_imagemagick = mozpath.join(
self.state_path,
fetches['ImageMagick']['path'])
path = [
path_to_ffmpeg if host_platform().startswith('linux') else mozpath.join(path_to_ffmpeg, 'bin'), # noqa
self.state_path if host_platform().startswith('win') else mozpath.join(path_to_imagemagick, 'bin'), # noqa
] + path
# Ensure that bare `node` and `npm` in scripts, including post-install
# scripts, finds the binary we're invoking with. Without this, it's
# easy for compiled extensions to get mismatched versions of the Node.js
# extension API.
node_dir = os.path.dirname(self.node_path)
path = [node_dir] + path
# Ensure that `/usr/bin/env python` in `visualmetrics.py` finds our
# virtualenv Python.
path = [os.path.dirname(self.virtualenv_manager.python_path)] + path
return {
# See https://imagemagick.org/script/download.php. Harmless on other platforms.
'LD_LIBRARY_PATH': mozpath.join(path_to_imagemagick, 'lib'),
'DYLD_LIBRARY_PATH': mozpath.join(path_to_imagemagick, 'lib'),
'MAGICK_HOME': path_to_imagemagick,
'PATH': os.pathsep.join(path),
}
def _activate_virtualenv(self, *args, **kwargs):
MachCommandBase._activate_virtualenv(self, *args, **kwargs)
try:
self.virtualenv_manager.install_pip_package('Pillow==6.0.0')
except Exception:
print('Could not install Pillow from pip.')
return 1
try:
self.virtualenv_manager.install_pip_package('pyssim==0.4')
except Exception:
print('Could not install pyssim from pip.')
return 1
def check(self):
r'''Run `visualmetrics.py --check`.'''
self._activate_virtualenv()
args = ['--check']
status = self.run_process(
[self.virtualenv_manager.python_path, self.visualmetrics_path] + args,
# For --check, don't allow user's path to interfere with
# path testing except on Linux, where ImageMagick needs to
# be installed manually.
append_env=self.append_env(append_path=host_platform().startswith('linux')),
pass_thru=True,
ensure_exit_code=False, # Don't throw on non-zero exit code.
cwd=mozpath.join(self.topsrcdir))
sys.stdout.flush()
sys.stderr.flush()
if status:
return status
# Avoid logging the command (and, on Windows, the environment).
self.log_manager.terminal_handler.setLevel(logging.CRITICAL)
print('browsertime version:', end=' ')
sys.stdout.flush()
sys.stderr.flush()
return self.node([self.browsertime_path] + ['--version'])
@Command('browsertime', category='testing',
description='Run [browsertime](https://github.com/sitespeedio/browsertime) '
'performance tests.')
@CommandArgument('--verbose', action='store_true',
help='Verbose output for what commands the build is running.')
@CommandArgument('--setup', default=False, action='store_true')
@CommandArgument('--clobber', default=False, action='store_true')
@CommandArgument('--skip-cache', action='store_true',
help='Skip all local caches to force re-fetching remote artifacts.',
default=False)
@CommandArgument('--check', default=False, action='store_true')
@CommandArgument('args', nargs=argparse.REMAINDER)
def browsertime(self, args, verbose=False,
setup=False, clobber=False, skip_cache=False,
check=False):
self._set_log_level(True)
if setup:
return self.setup(should_clobber=clobber)
if check:
return self.check()
self._activate_virtualenv()
return self.node([self.browsertime_path] + args)
@Command('visualmetrics', category='testing',
description='Run visualmetrics.py')
@CommandArgument('video')
@CommandArgument('args', nargs=argparse.REMAINDER)
def visualmetrics(self, video, args):
self._set_log_level(True)
self._activate_virtualenv()
# Turn '/path/to/video/1.mp4' into '/path/to/video' and '1'.
d, base = os.path.split(video)
index, _ = os.path.splitext(base)
# TODO: write a '--logfile' as well.
args = ['--dir', # Images are written to `/path/to/video/images` (following browsertime).
mozpath.join(d, 'images', index),
'--video',
video,
'--orange',
'--perceptual',
'--force',
'--renderignore',
'5',
'--json',
'--viewport',
'-q',
'75',
'-vvvv']
return self.run_process(
[self.visualmetrics_path] + args,
append_env=self.append_env(),
pass_thru=True,
ensure_exit_code=False, # Don't throw on non-zero exit code.
cwd=mozpath.join(self.topsrcdir))

5405
tools/browsertime/package-lock.json сгенерированный

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,12 +0,0 @@
{
"name": "mozilla-central-tools-browsertime",
"description": "This package file is for node modules used in mozilla-central/tools/browsertime",
"repository": {},
"license": "MPL-2.0",
"dependencies": {},
"devDependencies": {
"browsertime": "https://github.com/ncalexan/browsertime/tarball/4f8738e2225b3298556308c0d8c916d3b22e9741"
},
"notes(private)": "We don't want to publish to npm, so this is marked as private",
"private": true
}

Просмотреть файл

@ -71,18 +71,7 @@ def eslint_setup(should_clobber=False):
guide you through an interactive wizard helping you configure
eslint for optimal use on Mozilla projects.
"""
package_setup(get_project_root(), 'eslint', should_clobber=should_clobber)
def package_setup(package_root, package_name, should_clobber=False):
"""Ensure `package_name` at `package_root` is installed.
This populates `package_root/node_modules`.
"""
orig_project_root = get_project_root()
orig_cwd = os.getcwd()
try:
set_project_root(package_root)
sys.path.append(os.path.dirname(__file__))
# npm sometimes fails to respect cwd when it is run using check_call so
@ -92,7 +81,7 @@ def package_setup(package_root, package_name, should_clobber=False):
if should_clobber:
node_modules_path = os.path.join(project_root, "node_modules")
print("Clobbering %s..." % node_modules_path)
print("Clobbering node_modules...")
if sys.platform.startswith('win') and have_winrm():
process = subprocess.Popen(['winrm', '-rf', node_modules_path])
process.wait()
@ -128,17 +117,8 @@ def package_setup(package_root, package_name, should_clobber=False):
cmd.insert(0, node_path)
cmd.extend(extra_parameters)
# Ensure that bare `node` and `npm` in scripts, including post-install scripts, finds the
# binary we're invoking with. Without this, it's easy for compiled extensions to get
# mismatched versions of the Node.js extension API.
path = os.environ.get('PATH', '').split(os.pathsep)
node_dir = os.path.dirname(node_path)
if node_dir not in path:
path = [node_dir] + path
print("Installing %s for mach using \"%s\"..." % (package_name, " ".join(cmd)))
result = call_process(package_name, cmd, append_env={'PATH': os.pathsep.join(path)})
print("Installing eslint for mach using \"%s\"..." % (" ".join(cmd)))
result = call_process("eslint", cmd)
if npm_is_older_version:
shutil.move(package_lock_json_tmp_path, package_lock_json_path)
@ -146,23 +126,18 @@ def package_setup(package_root, package_name, should_clobber=False):
if not result:
return 1
bin_path = os.path.join(get_project_root(), "node_modules", ".bin", package_name)
eslint_path = os.path.join(get_project_root(), "node_modules", ".bin", "eslint")
print("\n%s installed successfully!" % package_name)
print("\nNOTE: Your local %s binary is at %s\n" % (package_name, bin_path))
print("\nESLint and approved plugins installed successfully!")
print("\nNOTE: Your local eslint binary is at %s\n" % eslint_path)
finally:
set_project_root(orig_project_root)
os.chdir(orig_cwd)
def call_process(name, cmd, cwd=None, append_env={}):
env = dict(os.environ)
env.update(append_env)
def call_process(name, cmd, cwd=None):
try:
with open(os.devnull, "w") as fnull:
subprocess.check_call(cmd, cwd=cwd, stdout=fnull, env=env)
subprocess.check_call(cmd, cwd=cwd, stdout=fnull)
except subprocess.CalledProcessError:
if cwd:
print("\nError installing %s in the %s folder, aborting." % (name, cwd))