gecko-dev/build/mach_bootstrap.py

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

586 строки
22 KiB
Python
Исходник Обычный вид История

# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import division, print_function, unicode_literals
import math
import os
import platform
import shutil
import site
import sys
if sys.version_info[0] < 3:
import __builtin__ as builtins
class MetaPathFinder(object):
pass
else:
from importlib.abc import MetaPathFinder
from types import ModuleType
STATE_DIR_FIRST_RUN = """
mach and the build system store shared state in a common directory on the
filesystem. The following directory will be created:
{userdir}
If you would like to use a different directory, hit CTRL+c and set the
MOZBUILD_STATE_PATH environment variable to the directory you would like to
use and re-run mach. For this change to take effect forever, you'll likely
want to export this environment variable from your shell's init scripts.
Press ENTER/RETURN to continue or CTRL+c to abort.
""".lstrip()
# Individual files providing mach commands.
MACH_MODULES = [
"build/valgrind/mach_commands.py",
"devtools/shared/css/generated/mach_commands.py",
"dom/bindings/mach_commands.py",
"js/src/devtools/rootAnalysis/mach_commands.py",
"layout/tools/reftest/mach_commands.py",
"mobile/android/mach_commands.py",
"python/mach/mach/commands/commandinfo.py",
"python/mach/mach/commands/settings.py",
"python/mach_commands.py",
"python/mozboot/mozboot/mach_commands.py",
"python/mozbuild/mozbuild/artifact_commands.py",
"python/mozbuild/mozbuild/backend/mach_commands.py",
"python/mozbuild/mozbuild/build_commands.py",
"python/mozbuild/mozbuild/code_analysis/mach_commands.py",
"python/mozbuild/mozbuild/compilation/codecomplete.py",
"python/mozbuild/mozbuild/frontend/mach_commands.py",
"python/mozbuild/mozbuild/vendor/mach_commands.py",
"python/mozbuild/mozbuild/mach_commands.py",
"python/mozperftest/mozperftest/mach_commands.py",
"python/mozrelease/mozrelease/mach_commands.py",
"remote/mach_commands.py",
"security/manager/tools/mach_commands.py",
"taskcluster/mach_commands.py",
Bug 1344813 - Implement AWSY as a marionette-based mach command, r=jmaher,pyang,erahm. usage: mach [global arguments] awsy-test [command arguments] mach awsy-test runs the in-tree version of the Are We Slim Yet (AWSY) tests. awsy-test is implemented as a marionette test and marionette test arguments also apply although they are not necessary since reasonable defaults will be chosen. The AWSY specific arguments can be found in the Command Arguments for AWSY section below. awsy-test will automatically download the tp5n.zip talos pageset from tooltool and install it under topobjdir/_tests/awsy/html. You can specify your own page set by specifying --web-root and --page-manifest. The results of the test will be placed in the results directory specified by the --results argument. Command Arguments for AWSY: --web-root WEBROOTDIR Path to web server root directory. If not specified, defaults to topobjdir/_tests/awsy/html. --page-manifest PAGEMANIFEST Path to page manifest text file containing a list of urls to test. The urls must be served from localhost. If not specified, defaults to page_load_test/tp5b/tp5n.manifest under the web root. --results RESULTSDIR Path to results directory. If not specified, defaults to the parent directory of the web root. --quick Set --entities=3, --iterations=1, --per-tab-pause=1, --settle-wait-time=1 for a quick test. Overrides any explicit argument settings. --entities ENTITIES Number of urls to load. Defaults to the total number of urls. --max-tabs MAXTABS Maximum number of tabs to open. Defaults to 30. --iterations ITERATIONS Number of times to run through the test suite. Defaults to 5. --per-tab-pause PERTABPAUSE Seconds to wait in between opening tabs. Defaults to 10. --settle-wait-time SETTLEWAITTIME Seconds to wait for things to settled down. Defaults to 30.
2017-03-29 11:47:46 +03:00
"testing/awsy/mach_commands.py",
"testing/condprofile/mach_commands.py",
"testing/firefox-ui/mach_commands.py",
"testing/geckodriver/mach_commands.py",
"testing/mach_commands.py",
"testing/marionette/mach_commands.py",
"testing/mochitest/mach_commands.py",
"testing/mozharness/mach_commands.py",
"testing/raptor/mach_commands.py",
"testing/talos/mach_commands.py",
"testing/tps/mach_commands.py",
"testing/web-platform/mach_commands.py",
"testing/xpcshell/mach_commands.py",
"toolkit/components/telemetry/tests/marionette/mach_commands.py",
Bug 1543247 - Part 1: Add `mach browsertime` command that installs and invokes browsertime. r=Standard8,ahal [browsertime](https://github.com/sitespeedio/browsertime) is a harness for running performance tests, similar to Mozilla's Raptor testing framework. The Performance Team is using it locally with some success, but we're running a heavily modified toolchain that is challenging to install. This mach command is intended to be leverage for getting more folks able to use browsertime easily. In particular, the version of browsertime that this installs has nalexander's changes to support testing GeckoView-based vehicles. If this approach meets with approval, I'll continue to follow-up with additional configuration and tooling layers to make it even easier to drive GeckoView-based vehicles. I elected to piggy-back install on the eslint installation process, since this is very similar. To that end, I generalized what was there very slightly. I elected not to try to move the existing code into a more obvious shared location, although it might be possible, because it wasn't clear what contexts the existing code would be invoked from. In particular I wasn't certain the code could rely on a complete mozbuild checkout. I did need to ensure the local Node.js binary is early on the PATH; this was an issue I ran into with my initial Node/Yarn prototyping many months ago. At heart the issue is that package scripts in the wild invoke a bare `node` or `npm` command; if there was a culture of invoking $NODE or $NPM, this wouldn't be necessary. There's no harm doing it for ESlint, and it will help the next person who wants to install an NPM package for tooling in this manner. Differential Revision: https://phabricator.services.mozilla.com/D26820 --HG-- extra : moz-landing-system : lando
2019-05-07 02:56:49 +03:00
"tools/browsertime/mach_commands.py",
"tools/compare-locales/mach_commands.py",
"tools/lint/mach_commands.py",
"tools/mach_commands.py",
"tools/moztreedocs/mach_commands.py",
"tools/phabricator/mach_commands.py",
"tools/power/mach_commands.py",
"tools/tryselect/mach_commands.py",
"tools/vcs/mach_commands.py",
]
CATEGORIES = {
"build": {
"short": "Build Commands",
"long": "Interact with the build system",
"priority": 80,
},
"post-build": {
"short": "Post-build Commands",
"long": "Common actions performed after completing a build.",
"priority": 70,
},
"testing": {
"short": "Testing",
"long": "Run tests.",
"priority": 60,
},
"ci": {
"short": "CI",
"long": "Taskcluster commands",
"priority": 59,
},
"devenv": {
"short": "Development Environment",
"long": "Set up and configure your development environment.",
"priority": 50,
},
"build-dev": {
"short": "Low-level Build System Interaction",
"long": "Interact with specific parts of the build system.",
"priority": 20,
},
"misc": {
"short": "Potpourri",
"long": "Potent potables and assorted snacks.",
"priority": 10,
},
"release": {
"short": "Release automation",
"long": "Commands for used in release automation.",
"priority": 5,
},
"disabled": {
"short": "Disabled",
"long": "The disabled commands are hidden by default. Use -v to display them. "
"These commands are unavailable for your current context, "
'run "mach <command>" to see why.',
"priority": 0,
},
}
Bug 1356101 - Derive the mach_bootstrap search path from build/virtualenv_packages.txt. r=gps Most entries in virtualenv_packages.txt that are .pth or packages.txt are currently in SEARCH_PATHS in mach_bootstrap. The ones that are missing would make sense in SEARCH_PATHS. None of non-.pth or packages.txt entries, however, are in SEARCH_PATHS and don't make sense there. On the other hand, virtualenv_packages.txt misses a lot of things that are in SEARCH_PATHS, all of which should be there. One exception: xpcom/idl-parser, which causes problems due to the xpidl package containing an xpidl module, which causes problems with the in-tree scripts using it. Plus, it needs a cache directory, which is messy, so it's preferable to keep it away from the virtualenv. It turns out it was added to mach_bootstrap.py in bug 893976 for a command that was since then removed (bug 1244736), so we can get away with removing it. So instead of keeping those two separate lists out of sync, we replace the SEARCH_PATHS list from mach_bootstrap with one that is derived at runtime from the contents of virtualenv_packages.txt. And since a .pth can't fail to install in the virtualenv, it makes no sense to have psutil.pth defined as optional, which allows it to end up in the mach_bootstrap search path automatically. Finally, because we do have overlapping module names in the tree (e.g. runtests), and mach_bootstrap's SEARCH_PATHS had a guaranteed order, we change the order of the virtualenv_packages.txt file to match what used to be in mach_bootstrap, and make all the pth entries use the same file name so that the order is more guaranteed in the virtualenv too. --HG-- extra : rebase_source : 5bd09f2f984d6f78a76b38e768d8a67806af5954
2017-04-14 02:19:08 +03:00
def search_path(mozilla_dir, packages_txt):
with open(os.path.join(mozilla_dir, packages_txt)) as f:
Bug 1717051: Move global mach dependencies to requirements definition r=ahal After removing `optional` in Bug 1712804, we need to add a variant back here because there's fallible dependencies. However, I've tweaked the re-introduction of the feature to require a specific repercussion message as well. This seemed like a decent tradeoff - the developer becomes aware that the failure is bad, it has repercussions, but it's not a blocking issue. Additionally, since we're printing pip's output, the developer will be able to see the underlying error causing the warning. I also added comment functionality to requirements definitions to allow adjacent documentation of why some requirements are fallible. (Related: I'm looking forward to `mach_bootstrap` not needing to parse requirements definitions. Almost there!) Note that we'll temporarily lose the "pinned" nature of the three moved dependencies until dependency locking is implemented for Mach requirements definitions. Also note that the pinned `zstandard_requirements.txt` can't be removed like the other files because it still has a dangling usage. Finally, in preparation for review: I didn't make `PypiOptionalSpecifier` extend `PypiSpecifier` because I figured that the benefit of flexibility (easier to allow implementations to diverge without needing to untangle an inheritance relationship) was larger than the cost of needing to add properties to both specifiers. If we wanted re-use, I'd probably have `PypiOptionalSpecifier` _contain_ a `PypiSpecifier`, but then you have to reach deeper into the object to get data, so *shrug*. Differential Revision: https://phabricator.services.mozilla.com/D119835
2021-08-05 18:14:20 +03:00
packages = [
line.strip().split(":", maxsplit=1)
for line in f
if not line.lstrip().startswith("#")
]
Bug 1356101 - Derive the mach_bootstrap search path from build/virtualenv_packages.txt. r=gps Most entries in virtualenv_packages.txt that are .pth or packages.txt are currently in SEARCH_PATHS in mach_bootstrap. The ones that are missing would make sense in SEARCH_PATHS. None of non-.pth or packages.txt entries, however, are in SEARCH_PATHS and don't make sense there. On the other hand, virtualenv_packages.txt misses a lot of things that are in SEARCH_PATHS, all of which should be there. One exception: xpcom/idl-parser, which causes problems due to the xpidl package containing an xpidl module, which causes problems with the in-tree scripts using it. Plus, it needs a cache directory, which is messy, so it's preferable to keep it away from the virtualenv. It turns out it was added to mach_bootstrap.py in bug 893976 for a command that was since then removed (bug 1244736), so we can get away with removing it. So instead of keeping those two separate lists out of sync, we replace the SEARCH_PATHS list from mach_bootstrap with one that is derived at runtime from the contents of virtualenv_packages.txt. And since a .pth can't fail to install in the virtualenv, it makes no sense to have psutil.pth defined as optional, which allows it to end up in the mach_bootstrap search path automatically. Finally, because we do have overlapping module names in the tree (e.g. runtests), and mach_bootstrap's SEARCH_PATHS had a guaranteed order, we change the order of the virtualenv_packages.txt file to match what used to be in mach_bootstrap, and make all the pth entries use the same file name so that the order is more guaranteed in the virtualenv too. --HG-- extra : rebase_source : 5bd09f2f984d6f78a76b38e768d8a67806af5954
2017-04-14 02:19:08 +03:00
def handle_package(action, package):
if action == "packages.txt":
for p in search_path(mozilla_dir, package):
Bug 1356101 - Derive the mach_bootstrap search path from build/virtualenv_packages.txt. r=gps Most entries in virtualenv_packages.txt that are .pth or packages.txt are currently in SEARCH_PATHS in mach_bootstrap. The ones that are missing would make sense in SEARCH_PATHS. None of non-.pth or packages.txt entries, however, are in SEARCH_PATHS and don't make sense there. On the other hand, virtualenv_packages.txt misses a lot of things that are in SEARCH_PATHS, all of which should be there. One exception: xpcom/idl-parser, which causes problems due to the xpidl package containing an xpidl module, which causes problems with the in-tree scripts using it. Plus, it needs a cache directory, which is messy, so it's preferable to keep it away from the virtualenv. It turns out it was added to mach_bootstrap.py in bug 893976 for a command that was since then removed (bug 1244736), so we can get away with removing it. So instead of keeping those two separate lists out of sync, we replace the SEARCH_PATHS list from mach_bootstrap with one that is derived at runtime from the contents of virtualenv_packages.txt. And since a .pth can't fail to install in the virtualenv, it makes no sense to have psutil.pth defined as optional, which allows it to end up in the mach_bootstrap search path automatically. Finally, because we do have overlapping module names in the tree (e.g. runtests), and mach_bootstrap's SEARCH_PATHS had a guaranteed order, we change the order of the virtualenv_packages.txt file to match what used to be in mach_bootstrap, and make all the pth entries use the same file name so that the order is more guaranteed in the virtualenv too. --HG-- extra : rebase_source : 5bd09f2f984d6f78a76b38e768d8a67806af5954
2017-04-14 02:19:08 +03:00
yield os.path.join(mozilla_dir, p)
if action == "pth":
yield os.path.join(mozilla_dir, package)
Bug 1356101 - Derive the mach_bootstrap search path from build/virtualenv_packages.txt. r=gps Most entries in virtualenv_packages.txt that are .pth or packages.txt are currently in SEARCH_PATHS in mach_bootstrap. The ones that are missing would make sense in SEARCH_PATHS. None of non-.pth or packages.txt entries, however, are in SEARCH_PATHS and don't make sense there. On the other hand, virtualenv_packages.txt misses a lot of things that are in SEARCH_PATHS, all of which should be there. One exception: xpcom/idl-parser, which causes problems due to the xpidl package containing an xpidl module, which causes problems with the in-tree scripts using it. Plus, it needs a cache directory, which is messy, so it's preferable to keep it away from the virtualenv. It turns out it was added to mach_bootstrap.py in bug 893976 for a command that was since then removed (bug 1244736), so we can get away with removing it. So instead of keeping those two separate lists out of sync, we replace the SEARCH_PATHS list from mach_bootstrap with one that is derived at runtime from the contents of virtualenv_packages.txt. And since a .pth can't fail to install in the virtualenv, it makes no sense to have psutil.pth defined as optional, which allows it to end up in the mach_bootstrap search path automatically. Finally, because we do have overlapping module names in the tree (e.g. runtests), and mach_bootstrap's SEARCH_PATHS had a guaranteed order, we change the order of the virtualenv_packages.txt file to match what used to be in mach_bootstrap, and make all the pth entries use the same file name so that the order is more guaranteed in the virtualenv too. --HG-- extra : rebase_source : 5bd09f2f984d6f78a76b38e768d8a67806af5954
2017-04-14 02:19:08 +03:00
for current_action, current_package in packages:
for path in handle_package(current_action, current_package):
yield path
Bug 1356101 - Derive the mach_bootstrap search path from build/virtualenv_packages.txt. r=gps Most entries in virtualenv_packages.txt that are .pth or packages.txt are currently in SEARCH_PATHS in mach_bootstrap. The ones that are missing would make sense in SEARCH_PATHS. None of non-.pth or packages.txt entries, however, are in SEARCH_PATHS and don't make sense there. On the other hand, virtualenv_packages.txt misses a lot of things that are in SEARCH_PATHS, all of which should be there. One exception: xpcom/idl-parser, which causes problems due to the xpidl package containing an xpidl module, which causes problems with the in-tree scripts using it. Plus, it needs a cache directory, which is messy, so it's preferable to keep it away from the virtualenv. It turns out it was added to mach_bootstrap.py in bug 893976 for a command that was since then removed (bug 1244736), so we can get away with removing it. So instead of keeping those two separate lists out of sync, we replace the SEARCH_PATHS list from mach_bootstrap with one that is derived at runtime from the contents of virtualenv_packages.txt. And since a .pth can't fail to install in the virtualenv, it makes no sense to have psutil.pth defined as optional, which allows it to end up in the mach_bootstrap search path automatically. Finally, because we do have overlapping module names in the tree (e.g. runtests), and mach_bootstrap's SEARCH_PATHS had a guaranteed order, we change the order of the virtualenv_packages.txt file to match what used to be in mach_bootstrap, and make all the pth entries use the same file name so that the order is more guaranteed in the virtualenv too. --HG-- extra : rebase_source : 5bd09f2f984d6f78a76b38e768d8a67806af5954
2017-04-14 02:19:08 +03:00
def mach_sys_path(mozilla_dir):
return [
os.path.join(mozilla_dir, path)
for path in search_path(mozilla_dir, "build/mach_virtualenv_packages.txt")
]
INSTALL_PYTHON_GUIDANCE_LINUX = """
See https://firefox-source-docs.mozilla.org/setup/linux_build.html#installingpython
for guidance on how to install Python on your system.
""".strip()
INSTALL_PYTHON_GUIDANCE_OSX = """
See https://firefox-source-docs.mozilla.org/setup/macos_build.html
for guidance on how to prepare your system to build Firefox. Perhaps
you need to update Xcode, or install Python using brew?
""".strip()
INSTALL_PYTHON_GUIDANCE_MOZILLABUILD = """
Python is provided by MozillaBuild; ensure your MozillaBuild
installation is up to date.
See https://firefox-source-docs.mozilla.org/setup/windows_build.html#install-mozillabuild
for details.
""".strip()
INSTALL_PYTHON_GUIDANCE_OTHER = """
We do not have specific instructions for your platform on how to
install Python. You may find Pyenv (https://github.com/pyenv/pyenv)
helpful, if your system package manager does not provide a way to
install a recent enough Python 3.
""".strip()
def bootstrap(topsrcdir):
# Ensure we are running Python 3.6+. We run this check as soon as
# possible to avoid a cryptic import/usage error.
if sys.version_info < (3, 6):
print("Python 3.6+ is required to run mach.")
print("You are running Python", platform.python_version())
if sys.platform.startswith("linux"):
print(INSTALL_PYTHON_GUIDANCE_LINUX)
elif sys.platform.startswith("darwin"):
print(INSTALL_PYTHON_GUIDANCE_OSX)
elif "MOZILLABUILD" in os.environ:
print(INSTALL_PYTHON_GUIDANCE_MOZILLABUILD)
else:
print(INSTALL_PYTHON_GUIDANCE_OTHER)
sys.exit(1)
# This directory was deleted in bug 1666345, but there may be some ignored
# files here. We can safely just delete it for the user so they don't have
# to clean the repo themselves.
deleted_dir = os.path.join(topsrcdir, "third_party", "python", "psutil")
if os.path.exists(deleted_dir):
shutil.rmtree(deleted_dir, ignore_errors=True)
if sys.prefix == sys.base_prefix:
# We are not in a virtualenv. Remove global site packages
# from sys.path.
site_paths = set(site.getsitepackages() + [site.getusersitepackages()])
sys.path = [path for path in sys.path if path not in site_paths]
sys.path[0:0] = mach_sys_path(topsrcdir)
import mach.base
import mach.main
from mach.util import setenv
from mozboot.util import get_state_dir
# Set a reasonable limit to the number of open files.
#
# Some linux systems set `ulimit -n` to a very high number, which works
# well for systems that run servers, but this setting causes performance
# problems when programs close file descriptors before forking, like
# Python's `subprocess.Popen(..., close_fds=True)` (close_fds=True is the
# default in Python 3), or Rust's stdlib. In some cases, Firefox does the
# same thing when spawning processes. We would prefer to lower this limit
# to avoid such performance problems; processes spawned by `mach` will
# inherit the limit set here.
#
# The Firefox build defaults the soft limit to 1024, except for builds that
# do LTO, where the soft limit is 8192. We're going to default to the
# latter, since people do occasionally do LTO builds on their local
# machines, and requiring them to discover another magical setting after
# setting up an LTO build in the first place doesn't seem good.
#
# This code mimics the code in taskcluster/scripts/run-task.
try:
import resource
# Keep the hard limit the same, though, allowing processes to change
# their soft limit if they need to (Firefox does, for instance).
(soft, hard) = resource.getrlimit(resource.RLIMIT_NOFILE)
# Permit people to override our default limit if necessary via
# MOZ_LIMIT_NOFILE, which is the same variable `run-task` uses.
limit = os.environ.get("MOZ_LIMIT_NOFILE")
if limit:
limit = int(limit)
else:
# If no explicit limit is given, use our default if it's less than
# the current soft limit. For instance, the default on macOS is
# 256, so we'd pick that rather than our default.
limit = min(soft, 8192)
# Now apply the limit, if it's different from the original one.
if limit != soft:
resource.setrlimit(resource.RLIMIT_NOFILE, (limit, hard))
except ImportError:
# The resource module is UNIX only.
pass
def resolve_repository():
import mozversioncontrol
try:
# This API doesn't respect the vcs binary choices from configure.
# If we ever need to use the VCS binary here, consider something
# more robust.
return mozversioncontrol.get_repository_object(path=topsrcdir)
except (mozversioncontrol.InvalidRepoPath, mozversioncontrol.MissingVCSTool):
return None
def pre_dispatch_handler(context, handler, args):
# If --disable-tests flag was enabled in the mozconfig used to compile
# the build, tests will be disabled. Instead of trying to run
# nonexistent tests then reporting a failure, this will prevent mach
# from progressing beyond this point.
if handler.category == "testing" and not handler.ok_if_tests_disabled:
from mozbuild.base import BuildEnvironmentNotFoundException
try:
from mozbuild.base import MozbuildObject
# all environments should have an instance of build object.
build = MozbuildObject.from_environment()
if build is not None and hasattr(build, "mozconfig"):
ac_options = build.mozconfig["configure_args"]
if ac_options and "--disable-tests" in ac_options:
print(
"Tests have been disabled by mozconfig with the flag "
+ '"ac_add_options --disable-tests".\n'
+ "Remove the flag, and re-compile to enable tests."
)
sys.exit(1)
except BuildEnvironmentNotFoundException:
# likely automation environment, so do nothing.
pass
def post_dispatch_handler(
context, handler, instance, success, start_time, end_time, depth, args
):
"""Perform global operations after command dispatch.
For now, we will use this to handle build system telemetry.
"""
# Don't finalize telemetry data if this mach command was invoked as part of
# another mach command.
if depth != 1:
return
_finalize_telemetry_glean(
context.telemetry, handler.name == "bootstrap", success
)
def populate_context(key=None):
if key is None:
return
if key == "state_dir":
state_dir = get_state_dir()
if state_dir == os.environ.get("MOZBUILD_STATE_PATH"):
if not os.path.exists(state_dir):
print(
"Creating global state directory from environment variable: %s"
% state_dir
)
os.makedirs(state_dir, mode=0o770)
else:
if not os.path.exists(state_dir):
if not os.environ.get("MOZ_AUTOMATION"):
print(STATE_DIR_FIRST_RUN.format(userdir=state_dir))
try:
sys.stdin.readline()
except KeyboardInterrupt:
sys.exit(1)
print("\nCreating default state directory: %s" % state_dir)
os.makedirs(state_dir, mode=0o770)
return state_dir
if key == "local_state_dir":
return get_state_dir(srcdir=True)
if key == "topdir":
return topsrcdir
if key == "pre_dispatch_handler":
return pre_dispatch_handler
if key == "post_dispatch_handler":
return post_dispatch_handler
if key == "repository":
return resolve_repository()
raise AttributeError(key)
bug 1505205 - don't write telemetry for recursive mach command invocations. r=firefox-build-system-reviewers,chmanchester This change tries to ensure that we don't write telemetry data for mach commands invoked recursively as part of other mach commands. The intent of build system telemetry is to only collect data about commands that users are invoking directly. There are two ways that we found mach commands can be recursively invoked: * By running a python subprocess to recursively invoke mach (used in `mach bootstrap` to call `mach artifact toolchain`) * By using `Registrar.dispatch` to delegate to a sub-command (used by many build system commands to invoke `mach build`). The subprocess case is handled here by having mach set a `MACH_MAIN_PID` environment variable whose value is the current process' pid on startup if it does not already exist in the environment. Telemetry code then checks that the value of that variable matches the current pid and skips writing telemetry data if not. The dispatch case is handled by making `MachRegistrar` store the current depth of the command stack and pass it to the `post_dispatch_handler` which will skip writing telemetry data if depth != 1. Additionally the `should_skip_dispatch` function in mach_bootstrap is renamed to `should_skip_telemetry_submission`, which was its original intent. The combination of checks added in this change should be sufficient for deciding when to write telemetry data, and we were not collecting telemetry for the set of mach commands in that function (which included `mach bootstrap`). In order to facilitate writing a test for the dispatch case this change adds a `mach python --exec-file` option to execute Python code directly in the context of the `mach python` command. Differential Revision: https://phabricator.services.mozilla.com/D11207 --HG-- extra : moz-landing-system : lando
2018-11-10 22:04:30 +03:00
# Note which process is top-level so that recursive mach invocations can avoid writing
# telemetry data.
if "MACH_MAIN_PID" not in os.environ:
setenv("MACH_MAIN_PID", str(os.getpid()))
bug 1505205 - don't write telemetry for recursive mach command invocations. r=firefox-build-system-reviewers,chmanchester This change tries to ensure that we don't write telemetry data for mach commands invoked recursively as part of other mach commands. The intent of build system telemetry is to only collect data about commands that users are invoking directly. There are two ways that we found mach commands can be recursively invoked: * By running a python subprocess to recursively invoke mach (used in `mach bootstrap` to call `mach artifact toolchain`) * By using `Registrar.dispatch` to delegate to a sub-command (used by many build system commands to invoke `mach build`). The subprocess case is handled here by having mach set a `MACH_MAIN_PID` environment variable whose value is the current process' pid on startup if it does not already exist in the environment. Telemetry code then checks that the value of that variable matches the current pid and skips writing telemetry data if not. The dispatch case is handled by making `MachRegistrar` store the current depth of the command stack and pass it to the `post_dispatch_handler` which will skip writing telemetry data if depth != 1. Additionally the `should_skip_dispatch` function in mach_bootstrap is renamed to `should_skip_telemetry_submission`, which was its original intent. The combination of checks added in this change should be sufficient for deciding when to write telemetry data, and we were not collecting telemetry for the set of mach commands in that function (which included `mach bootstrap`). In order to facilitate writing a test for the dispatch case this change adds a `mach python --exec-file` option to execute Python code directly in the context of the `mach python` command. Differential Revision: https://phabricator.services.mozilla.com/D11207 --HG-- extra : moz-landing-system : lando
2018-11-10 22:04:30 +03:00
driver = mach.main.Mach(os.getcwd())
driver.populate_context_handler = populate_context
if not driver.settings_paths:
# default global machrc location
driver.settings_paths.append(get_state_dir())
# always load local repository configuration
driver.settings_paths.append(topsrcdir)
for category, meta in CATEGORIES.items():
driver.define_category(category, meta["short"], meta["long"], meta["priority"])
# Sparse checkouts may not have all mach_commands.py files. Ignore
# errors from missing files. Same for spidermonkey tarballs.
repo = resolve_repository()
missing_ok = (
repo is not None and repo.sparse_checkout_present()
) or os.path.exists(os.path.join(topsrcdir, "INSTALL"))
for path in MACH_MODULES:
try:
driver.load_commands_from_file(os.path.join(topsrcdir, path))
except mach.base.MissingFileError:
if not missing_ok:
raise
return driver
def _finalize_telemetry_glean(telemetry, is_bootstrap, success):
"""Submit telemetry collected by Glean.
Finalizes some metrics (command success state and duration, system information) and
requests Glean to send the collected data.
"""
from mach.telemetry import MACH_METRICS_PATH
from mozbuild.telemetry import (
get_cpu_brand,
get_distro_and_version,
get_psutil_stats,
get_shell_info,
)
mach_metrics = telemetry.metrics(MACH_METRICS_PATH)
mach_metrics.mach.duration.stop()
mach_metrics.mach.success.set(success)
system_metrics = mach_metrics.mach.system
cpu_brand = get_cpu_brand()
if cpu_brand:
system_metrics.cpu_brand.set(cpu_brand)
distro, version = get_distro_and_version()
system_metrics.distro.set(distro)
system_metrics.distro_version.set(version)
vscode_terminal, ssh_connection = get_shell_info()
system_metrics.vscode_terminal.set(vscode_terminal)
system_metrics.ssh_connection.set(ssh_connection)
has_psutil, logical_cores, physical_cores, memory_total = get_psutil_stats()
if has_psutil:
# psutil may not be available (we allow `mach create-mach-environment`
# to fail to install it).
system_metrics.logical_cores.add(logical_cores)
system_metrics.physical_cores.add(physical_cores)
if memory_total is not None:
system_metrics.memory.accumulate(
int(math.ceil(float(memory_total) / (1024 * 1024 * 1024)))
)
telemetry.submit(is_bootstrap)
# Hook import such that .pyc/.pyo files without a corresponding .py file in
# the source directory are essentially ignored. See further below for details
# and caveats.
# Objdirs outside the source directory are ignored because in most cases, if
# a .pyc/.pyo file exists there, a .py file will be next to it anyways.
class ImportHook(object):
def __init__(self, original_import):
self._original_import = original_import
# Assume the source directory is the parent directory of the one
# containing this file.
self._source_dir = (
os.path.normcase(
os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
)
+ os.sep
)
self._modules = set()
def __call__(self, name, globals=None, locals=None, fromlist=None, level=-1):
if sys.version_info[0] >= 3 and level < 0:
level = 0
# name might be a relative import. Instead of figuring out what that
# resolves to, which is complex, just rely on the real import.
# Since we don't know the full module name, we can't check sys.modules,
# so we need to keep track of which modules we've already seen to avoid
# to stat() them again when they are imported multiple times.
module = self._original_import(name, globals, locals, fromlist, level)
# Some tests replace modules in sys.modules with non-module instances.
if not isinstance(module, ModuleType):
return module
resolved_name = module.__name__
if resolved_name in self._modules:
return module
self._modules.add(resolved_name)
# Builtin modules don't have a __file__ attribute.
if not getattr(module, "__file__", None):
return module
# Note: module.__file__ is not always absolute.
path = os.path.normcase(os.path.abspath(module.__file__))
# Note: we could avoid normcase and abspath above for non pyc/pyo
# files, but those are actually rare, so it doesn't really matter.
if not path.endswith((".pyc", ".pyo")):
return module
# Ignore modules outside our source directory
if not path.startswith(self._source_dir):
return module
# If there is no .py corresponding to the .pyc/.pyo module we're
# loading, remove the .pyc/.pyo file, and reload the module.
# Since we already loaded the .pyc/.pyo module, if it had side
# effects, they will have happened already, and loading the module
# with the same name, from another directory may have the same side
# effects (or different ones). We assume it's not a problem for the
# python modules under our source directory (either because it
# doesn't happen or because it doesn't matter).
if not os.path.exists(module.__file__[:-1]):
if os.path.exists(module.__file__):
os.remove(module.__file__)
del sys.modules[module.__name__]
module = self(name, globals, locals, fromlist, level)
return module
# Hook import such that .pyc/.pyo files without a corresponding .py file in
# the source directory are essentially ignored. See further below for details
# and caveats.
# Objdirs outside the source directory are ignored because in most cases, if
# a .pyc/.pyo file exists there, a .py file will be next to it anyways.
class FinderHook(MetaPathFinder):
def __init__(self, klass):
# Assume the source directory is the parent directory of the one
# containing this file.
self._source_dir = (
os.path.normcase(
os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
)
+ os.sep
)
self.finder_class = klass
def find_spec(self, full_name, paths=None, target=None):
spec = self.finder_class.find_spec(full_name, paths, target)
# Some modules don't have an origin.
if spec is None or spec.origin is None:
return spec
# Normalize the origin path.
path = os.path.normcase(os.path.abspath(spec.origin))
# Note: we could avoid normcase and abspath above for non pyc/pyo
# files, but those are actually rare, so it doesn't really matter.
if not path.endswith((".pyc", ".pyo")):
return spec
# Ignore modules outside our source directory
if not path.startswith(self._source_dir):
return spec
# If there is no .py corresponding to the .pyc/.pyo module we're
# resolving, remove the .pyc/.pyo file, and try again.
if not os.path.exists(spec.origin[:-1]):
if os.path.exists(spec.origin):
os.remove(spec.origin)
spec = self.finder_class.find_spec(full_name, paths, target)
return spec
# Additional hook for python >= 3.8's importlib.metadata.
class MetadataHook(FinderHook):
def find_distributions(self, *args, **kwargs):
return self.finder_class.find_distributions(*args, **kwargs)
def hook(finder):
has_find_spec = hasattr(finder, "find_spec")
has_find_distributions = hasattr(finder, "find_distributions")
if has_find_spec and has_find_distributions:
return MetadataHook(finder)
elif has_find_spec:
return FinderHook(finder)
return finder
# Install our hook. This can be deleted when the Python 3 migration is complete.
if sys.version_info[0] < 3:
builtins.__import__ = ImportHook(builtins.__import__)
else:
sys.meta_path = [hook(c) for c in sys.meta_path]