Revert of Add a script to merge the PGC files (patchset #6 id:120001 of https://codereview.chromium.org/2575543002/ )
Reason for revert:
Breaks the PGO bots.
Original issue's description:
> Add a script to merge the PGC files.
>
> Workaround for a flakyness in pgomgr.exe where it can run out of address
> space while trying to merge all the PGC files at the same time.
>
> This will also allow to upload the pgd file before running the optimize step.
>
> BUG=674956
>
> Review-Url: https://codereview.chromium.org/2575543002
> Cr-Commit-Position: refs/heads/master@{#448332}
> Committed: 6c18b1e861
TBR=scottmg@chromium.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=674956
Review-Url: https://codereview.chromium.org/2680683002
Cr-Original-Commit-Position: refs/heads/master@{#448561}
Cr-Mirrored-From: https://chromium.googlesource.com/chromium/src
Cr-Mirrored-Commit: 6c2f8b4bd17f67d03b3e4d85401a0eaa68597afa
This commit is contained in:
Родитель
7968040976
Коммит
a5b79d9869
|
@ -1,116 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright 2017 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Merge the PGC files generated during the profiling step to the PGD database.
|
||||
|
||||
This is required to workaround a flakyness in pgomgr.exe where it can run out
|
||||
of address space while trying to merge all the PGC files at the same time.
|
||||
"""
|
||||
|
||||
import glob
|
||||
import json
|
||||
import optparse
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def find_pgomgr(chrome_checkout_dir, target_cpu):
|
||||
"""Find pgomgr.exe."""
|
||||
if target_cpu not in ('x86', 'x64'):
|
||||
raise Exception('target_cpu should be x86 or x64.')
|
||||
|
||||
win_toolchain_json_file = os.path.join(chrome_checkout_dir, 'build',
|
||||
'win_toolchain.json')
|
||||
if not os.path.exists(win_toolchain_json_file):
|
||||
raise Exception('The toolchain JSON file is missing.')
|
||||
with open(win_toolchain_json_file) as temp_f:
|
||||
toolchain_data = json.load(temp_f)
|
||||
if not os.path.isdir(toolchain_data['path']):
|
||||
raise Exception('The toolchain JSON file is invalid.')
|
||||
|
||||
pgomgr_dir = os.path.join(toolchain_data['path'], 'VC', 'bin')
|
||||
if target_cpu == 'x64':
|
||||
pgomgr_dir = os.path.join(pgomgr_dir, 'amd64')
|
||||
|
||||
pgomgr_path = os.path.join(pgomgr_dir, 'pgomgr.exe')
|
||||
if not os.path.exists(pgomgr_path):
|
||||
raise Exception('pgomgr.exe is missing from %s.' % pgomgr_dir)
|
||||
|
||||
return pgomgr_path
|
||||
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser(usage='%prog [options]')
|
||||
parser.add_option('--checkout-dir', help='The Chrome checkout directory.')
|
||||
parser.add_option('--target-cpu', help='The target\'s bitness.')
|
||||
parser.add_option('--build-dir', help='Chrome build directory.')
|
||||
parser.add_option('--binary-name', help='The binary for which the PGC files '
|
||||
'should be merged, without extension.')
|
||||
options, _ = parser.parse_args()
|
||||
|
||||
if not options.checkout_dir:
|
||||
parser.error('--checkout-dir is required')
|
||||
if not options.target_cpu:
|
||||
parser.error('--target-cpu is required')
|
||||
if not options.build_dir:
|
||||
parser.error('--build-dir is required')
|
||||
if not options.binary_name:
|
||||
parser.error('--binary-name is required')
|
||||
|
||||
# Starts by finding pgomgr.exe.
|
||||
pgomgr_path = find_pgomgr(options.checkout_dir, options.target_cpu)
|
||||
|
||||
pgc_files = glob.glob(os.path.join(options.build_dir,
|
||||
'%s*.pgc' % options.binary_name))
|
||||
|
||||
# Number of PGC files that should be merged in each iterations, merging all
|
||||
# the files one by one is really slow but merging more to 10 at a time doesn't
|
||||
# really seem to impact the total time.
|
||||
#
|
||||
# Number of pgc merged per iteration | Time (in min)
|
||||
# 1 | 27.2
|
||||
# 10 | 12.8
|
||||
# 20 | 12.0
|
||||
# 30 | 11.5
|
||||
# 40 | 11.4
|
||||
# 50 | 11.5
|
||||
# 60 | 11.6
|
||||
# 70 | 11.6
|
||||
# 80 | 11.7
|
||||
#
|
||||
# TODO(sebmarchand): Measure the memory usage of pgomgr.exe to see how it get
|
||||
# affected by the number of pgc files.
|
||||
pgc_per_iter = 20
|
||||
|
||||
def _split_in_chunks(items, chunk_size):
|
||||
"""Split |items| in chunks of size |chunk_size|.
|
||||
|
||||
Source: http://stackoverflow.com/a/312464
|
||||
"""
|
||||
for i in xrange(0, len(items), chunk_size):
|
||||
yield items[i:i + chunk_size]
|
||||
|
||||
for chunk in _split_in_chunks(pgc_files, pgc_per_iter):
|
||||
merge_command = [
|
||||
pgomgr_path,
|
||||
'/merge'
|
||||
]
|
||||
for pgc_file in chunk:
|
||||
merge_command.append([
|
||||
os.path.join(options.build_dir, pgc_file)
|
||||
])
|
||||
|
||||
merge_command.append([
|
||||
os.path.join(options.build_dir, '%s.pgd' % options.binary_name)
|
||||
])
|
||||
proc = subprocess.Popen(merge_command, stdout=subprocess.PIPE)
|
||||
stdout, _ = proc.communicate()
|
||||
if proc.returncode != 0:
|
||||
raise Exception('Error while trying to merge the PGC files:\n%s' % stdout)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
Загрузка…
Ссылка в новой задаче