Bug 1622687 - Fixed files in toolkit/crashreporter to make it flake8 compliant. r=froydnj

Changed .flake8 file to include the changes for flake8 compliance

Differential Revision: https://phabricator.services.mozilla.com/D68691

--HG--
extra : moz-landing-system : lando
This commit is contained in:
sumagnadas 2020-03-30 16:10:03 +00:00
Родитель 21d3595b1e
Коммит dc9c74f709
3 изменённых файлов: 102 добавлений и 43 удалений

Просмотреть файл

@ -51,8 +51,6 @@ exclude =
testing/web-platform,
toolkit/components/featuregates,
toolkit/content/tests/chrome/file_about_networking_wsh.py,
toolkit/crashreporter/tools/symbolstore.py,
toolkit/crashreporter/tools/unit-symbolstore.py,
toolkit/library/build/dependentlibs.py,
toolkit/locales/generate_update_locale.py,
toolkit/mozapps,

Просмотреть файл

@ -31,13 +31,9 @@ import os
import re
import shutil
import textwrap
import fnmatch
import subprocess
import time
import ctypes
import urlparse
import concurrent.futures
import multiprocessing
from optparse import OptionParser
@ -55,6 +51,7 @@ from mozpack.manifests import (
# Utility classes
class VCSFileInfo:
""" A base class for version-controlled file information. Ensures that the
following attributes are generated only once (successfully):
@ -136,10 +133,12 @@ class VCSFileInfo:
#
rootRegex = re.compile(r'^\S+?:/+(?:[^\s/]*@)?(\S+)$')
def read_output(*args):
(stdout, _) = subprocess.Popen(args=args, stdout=subprocess.PIPE).communicate()
return stdout.rstrip()
class HGRepoInfo:
def __init__(self, path):
self.path = path
@ -169,8 +168,8 @@ class HGRepoInfo:
if cleanroot is None:
print(textwrap.dedent("""\
Could not determine repo info for %s. This is either not a clone of the web-based
repository, or you have not specified MOZ_SOURCE_REPO, or the clone is corrupt.""") % path,
sys.stderr)
repository, or you have not specified MOZ_SOURCE_REPO, or the clone is corrupt.""")
% path, sys.stderr)
sys.exit(1)
self.rev = rev
self.root = root
@ -179,6 +178,7 @@ class HGRepoInfo:
def GetFileInfo(self, file):
return HGFileInfo(file, self)
class HGFileInfo(VCSFileInfo):
def __init__(self, file, repo):
VCSFileInfo.__init__(self, file)
@ -199,12 +199,14 @@ class HGFileInfo(VCSFileInfo):
return "hg:%s:%s:%s" % (self.clean_root, self.file, self.revision)
return self.file
class GitRepoInfo:
"""
Info about a local git repository. Does not currently
support discovering info about a git clone, the info must be
provided out-of-band.
"""
def __init__(self, path, rev, root):
self.path = path
cleanroot = None
@ -217,7 +219,8 @@ class GitRepoInfo:
if cleanroot is None:
print(textwrap.dedent("""\
Could not determine repo info for %s (%s). This is either not a clone of a web-based
repository, or you have not specified MOZ_SOURCE_REPO, or the clone is corrupt.""") % (path, root),
repository, or you have not specified MOZ_SOURCE_REPO, or the clone is corrupt.""")
% (path, root),
file=sys.stderr)
sys.exit(1)
self.rev = rev
@ -226,6 +229,7 @@ class GitRepoInfo:
def GetFileInfo(self, file):
return GitFileInfo(file, self)
class GitFileInfo(VCSFileInfo):
def __init__(self, file, repo):
VCSFileInfo.__init__(self, file)
@ -248,6 +252,7 @@ class GitFileInfo(VCSFileInfo):
# Utility functions
# A cache of files for which VCS info has already been determined. Used to
# prevent extra filesystem activity or process launching.
vcsFileInfoCache = {}
@ -266,8 +271,8 @@ if platform.system() == 'Windows':
result = path
ctypes.windll.kernel32.SetErrorMode(ctypes.c_uint(1))
if not isinstance(path, unicode):
path = unicode(path, sys.getfilesystemencoding())
if not isinstance(path, str):
path = str(path, sys.getfilesystemencoding())
handle = ctypes.windll.kernel32.CreateFileW(path,
# GENERIC_READ
0x80000000,
@ -300,12 +305,14 @@ else:
# Just use the os.path version otherwise.
realpath = os.path.realpath
def IsInDir(file, dir):
# the lower() is to handle win32+vc8, where
# the source filenames come out all lowercase,
# but the srcdir can be mixed case
return os.path.abspath(file).lower().startswith(os.path.abspath(dir).lower())
def GetVCSFilenameFromSrcdir(file, srcdir):
if srcdir not in Dumper.srcdirRepoInfo:
# Not in cache, so find it adnd cache it
@ -316,6 +323,7 @@ def GetVCSFilenameFromSrcdir(file, srcdir):
return None
return Dumper.srcdirRepoInfo[srcdir].GetFileInfo(file)
def GetVCSFilename(file, srcdirs):
"""Given a full path to a file, and the top source directory,
look for version control information about this file, and return
@ -351,6 +359,7 @@ def GetVCSFilename(file, srcdirs):
# we want forward slashes on win32 paths
return (file.replace("\\", "/"), root)
def validate_install_manifests(install_manifest_args):
args = []
for arg in install_manifest_args:
@ -373,6 +382,7 @@ def validate_install_manifests(install_manifest_args):
args.append((manifest, destination))
return args
def make_file_mapping(install_manifests):
file_mapping = {}
for manifest, destination in install_manifests:
@ -386,6 +396,7 @@ def make_file_mapping(install_manifests):
file_mapping[abs_dest] = realpath(src.path)
return file_mapping
@memoize
def get_generated_file_s3_path(filename, rel_path, bucket):
"""Given a filename, return a path formatted similarly to
@ -402,16 +413,27 @@ def GetPlatformSpecificDumper(**kwargs):
'Linux': Dumper_Linux,
'Darwin': Dumper_Mac}[buildconfig.substs['OS_ARCH']](**kwargs)
def SourceIndex(fileStream, outputPath, vcs_root):
"""Takes a list of files, writes info to a data block in a .stream file"""
# Creates a .pdb.stream file in the mozilla\objdir to be used for source indexing
# Create the srcsrv data block that indexes the pdb file
result = True
pdbStreamFile = open(outputPath, "w")
pdbStreamFile.write('''SRCSRV: ini ------------------------------------------------\r\nVERSION=2\r\nINDEXVERSION=2\r\nVERCTRL=http\r\nSRCSRV: variables ------------------------------------------\r\nHGSERVER=''')
pdbStreamFile.write(
'''SRCSRV: ini ------------------------------------------------\r\nVERSION=2\r
INDEXVERSION=2\r
VERCTRL=http\r\nSRCSRV: variables ------------------------------------------\r
HGSERVER=''')
pdbStreamFile.write(vcs_root)
pdbStreamFile.write('''\r\nSRCSRVVERCTRL=http\r\nHTTP_EXTRACT_TARGET=%hgserver%/raw-file/%var3%/%var2%\r\nSRCSRVTRG=%http_extract_target%\r\nSRCSRV: source files ---------------------------------------\r\n''')
pdbStreamFile.write(fileStream) # can't do string interpolation because the source server also uses this and so there are % in the above
pdbStreamFile.write(
'''\r\nSRCSRVVERCTRL=http\r\nHTTP_EXTRACT_TARGET=%hgserver%/raw-file/%var3%/%var2%\r
SRCSRVTRG=%http_extract_target%\r
SRCSRV: source files ---------------------------------------\r\n''')
pdbStreamFile.write(
fileStream)
# can't do string interpolation because the source server also uses this
# so there are % in the above
pdbStreamFile.write("SRCSRV: end ------------------------------------------------\r\n\n")
pdbStreamFile.close()
return result
@ -540,7 +562,7 @@ class Dumper:
rel_path))
try:
os.makedirs(os.path.dirname(full_path))
except OSError: # already exists
except OSError: # already exists
pass
f = open(full_path, "w")
f.write(module_line)
@ -557,18 +579,22 @@ class Dumper:
if self.vcsinfo:
gen_path = self.generated_files.get(filename)
if gen_path and self.s3_bucket:
filename = get_generated_file_s3_path(filename, gen_path, self.s3_bucket)
filename = get_generated_file_s3_path(
filename, gen_path, self.s3_bucket)
rootname = ''
else:
(filename, rootname) = GetVCSFilename(filename, self.srcdirs)
# sets vcs_root in case the loop through files were to end on an empty rootname
# sets vcs_root in case the loop through files were to end
# on an empty rootname
if vcs_root is None:
if rootname:
vcs_root = rootname
if rootname:
vcs_root = rootname
# gather up files with hg for indexing
if filename.startswith("hg"):
(ver, checkout, source_file, revision) = filename.split(":", 3)
sourceFileStream += sourcepath + "*" + source_file + '*' + revision + "\r\n"
(ver, checkout,
source_file, revision) = filename.split(":", 3)
sourceFileStream += sourcepath + "*" + source_file
sourceFileStream += '*' + revision + "\r\n"
f.write("FILE %s %s\n" % (index, filename))
elif line.startswith("INFO CODE_ID "):
# INFO CODE_ID code_id code_file
@ -638,7 +664,7 @@ class Dumper:
if 'asan' not in perfherder_extra_options.lower():
print('PERFHERDER_DATA: %s' % json.dumps(perfherder_data),
file=sys.stderr)
file=sys.stderr)
elapsed = time.time() - t_start
print('Finished processing %s in %.2fs' % (file, elapsed),
@ -647,6 +673,7 @@ class Dumper:
# Platform-specific subclasses. For the most part, these just have
# logic to determine what files to extract symbols from.
def locate_pdb(path):
'''Given a path to a binary, attempt to locate the matching pdb file with simple heuristics:
* Look for a pdb file with the same base name next to the binary
@ -667,6 +694,7 @@ def locate_pdb(path):
return pdb
return None
class Dumper_Win32(Dumper):
fixedFilenameCaseCache = {}
@ -678,9 +706,9 @@ class Dumper_Win32(Dumper):
return True
return False
def CopyDebug(self, file, debug_file, guid, code_file, code_id):
file = locate_pdb(file)
def compress(path):
compressed_file = path[:-1] + '_'
# ignore makecab's output
@ -744,7 +772,7 @@ class Dumper_Win32(Dumper):
else:
cmd = [pdbstr]
subprocess.call(cmd + ["-w", "-p:" + os.path.basename(debug_file),
"-i:" + os.path.basename(streamFilename), "-s:srcsrv"],
"-i:" + os.path.basename(streamFilename), "-s:srcsrv"],
cwd=os.path.dirname(stream_output_path))
# clean up all the .stream files when done
os.remove(stream_output_path)
@ -753,6 +781,7 @@ class Dumper_Win32(Dumper):
class Dumper_Linux(Dumper):
objcopy = os.environ['OBJCOPY'] if 'OBJCOPY' in os.environ else 'objcopy'
def ShouldProcess(self, file):
"""This function will allow processing of files that are
executable, or end with the .so extension, and additionally
@ -786,13 +815,14 @@ class Dumper_Linux(Dumper):
if os.path.isfile(file_dbg):
os.unlink(file_dbg)
class Dumper_Solaris(Dumper):
def RunFileCommand(self, file):
"""Utility function, returns the output of file(1)"""
try:
output = os.popen("file " + file).read()
return output.split('\t')[1];
except:
return output.split('\t')[1]
except Exception:
return ""
def ShouldProcess(self, file):
@ -804,6 +834,7 @@ class Dumper_Solaris(Dumper):
return self.RunFileCommand(file).startswith("ELF")
return False
class Dumper_Mac(Dumper):
def ShouldProcess(self, file):
"""This function will allow processing of files that are
@ -911,7 +942,7 @@ class Dumper_Mac(Dumper):
guid,
os.path.basename(dsymbundle) + ".tar.bz2")
full_path = os.path.abspath(os.path.join(self.symbol_path,
rel_path))
rel_path))
success = subprocess.call(["tar", "cjf", full_path, os.path.basename(dsymbundle)],
cwd=os.path.dirname(dsymbundle),
stdout=open(os.devnull, 'w'), stderr=subprocess.STDOUT)
@ -919,14 +950,19 @@ class Dumper_Mac(Dumper):
print(rel_path)
# Entry point if called as a standalone program
def main():
parser = OptionParser(usage="usage: %prog [options] <dump_syms binary> <symbol store path> <debug info files>")
parser = OptionParser(
usage="usage: %prog [options] <dump_syms binary> <symbol store path> <debug info files>")
parser.add_option("-c", "--copy",
action="store_true", dest="copy_debug", default=False,
help="Copy debug info files into the same directory structure as symbol files")
help="""Copy debug info files into the same directory structure
as symbol files""")
parser.add_option("-a", "--archs",
action="store", dest="archs",
help="Run dump_syms -a <arch> for each space separated cpu architecture in ARCHS (only on OS X)")
help="""Run dump_syms -a <arch> for each space separated cpu architecture
in ARCHS (only on OS X)""")
parser.add_option("-s", "--srcdir",
action="append", dest="srcdir", default=[],
help="Use SRCDIR to determine relative paths to source files")
@ -935,7 +971,8 @@ def main():
help="Try to retrieve VCS info for each FILE listed in the output")
parser.add_option("-i", "--source-index",
action="store_true", dest="srcsrv", default=False,
help="Add source index information to debug files, making them suitable for use in a source server.")
help="""Add source index information to debug files, making them suitable
for use in a source server.""")
parser.add_option("--install-manifest",
action="append", dest="install_manifests",
default=[],
@ -948,7 +985,7 @@ to canonical locations in the source repository. Specify
help="Count static initializers")
(options, args) = parser.parse_args()
#check to see if the pdbstr.exe exists
# check to see if the pdbstr.exe exists
if options.srcsrv:
if 'PDBSTR' not in buildconfig.substs:
print("pdbstr was not found by configure.\n", file=sys.stderr)
@ -981,6 +1018,7 @@ to canonical locations in the source repository. Specify
dumper.Process(args[2], options.count_ctors)
# run main if run directly
if __name__ == "__main__":
main()

45
toolkit/crashreporter/tools/unit-symbolstore.py Executable file → Normal file
Просмотреть файл

@ -3,11 +3,9 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import concurrent.futures
import mock
import mozunit
import os
import platform
import shutil
import struct
import subprocess
@ -26,23 +24,30 @@ from symbolstore import realpath
# Some simple functions to mock out files that the platform-specific dumpers will accept.
# dump_syms itself will not be run (we mock that call out), but we can't override
# the ShouldProcessFile method since we actually want to test that.
def write_elf(filename):
open(filename, "wb").write(struct.pack("<7B45x", 0x7f, ord("E"), ord("L"), ord("F"), 1, 1, 1))
def write_macho(filename):
open(filename, "wb").write(struct.pack("<I28x", 0xfeedface))
def write_dll(filename):
open(filename, "w").write("aaa")
# write out a fake PDB too
open(os.path.splitext(filename)[0] + ".pdb", "w").write("aaa")
def target_platform():
return buildconfig.substs['OS_TARGET']
def host_platform():
return buildconfig.substs['HOST_OS_ARCH']
writer = {'WINNT': write_dll,
'Linux': write_elf,
'Sunos5': write_elf,
@ -55,13 +60,16 @@ file_output = [{'WINNT': "bogus data",
'Linux': "ELF executable",
'Darwin': "Mach-O executable"}[target_platform()]]
def add_extension(files):
return [f + extension for f in files]
class HelperMixin(object):
"""
Test that passing filenames to exclude from processing works.
"""
def setUp(self):
self.test_dir = tempfile.mkdtemp()
if not self.test_dir.endswith(os.sep):
@ -88,7 +96,7 @@ class HelperMixin(object):
def make_file(self, path):
self.make_dirs(path)
with open(path, 'wb') as f:
with open(path, 'wb'):
pass
def add_test_files(self, files):
@ -113,6 +121,7 @@ class TestCopyDebug(HelperMixin, unittest.TestCase):
self.stdouts = []
self.mock_popen = patch("subprocess.Popen").start()
stdout_iter = self.next_mock_stdout()
def next_popen(*args, **kwargs):
m = mock.MagicMock()
# Get the iterators over whatever output was provided.
@ -145,14 +154,17 @@ class TestCopyDebug(HelperMixin, unittest.TestCase):
per file.
"""
copied = []
def mock_copy_debug(filename, debug_file, guid, code_file, code_id):
copied.append(filename[len(self.symbol_dir):] if filename.startswith(self.symbol_dir) else filename)
copied.append(filename[len(self.symbol_dir):]
if filename.startswith(self.symbol_dir) else filename)
self.add_test_files(add_extension(["foo"]))
# Windows doesn't call file(1) to figure out if the file should be processed.
if target_platform() != 'WINNT':
self.stdouts.append(file_output)
self.stdouts.append(mock_dump_syms("X" * 33, add_extension(["foo"])[0]))
self.stdouts.append(mock_dump_syms("Y" * 33, add_extension(["foo"])[0]))
def mock_dsymutil(args, **kwargs):
filename = args[-1]
os.makedirs(filename + ".dSYM")
@ -177,6 +189,7 @@ class TestCopyDebug(HelperMixin, unittest.TestCase):
code_id = 'abc123'
self.stdouts.append(mock_dump_syms('X' * 33, 'foo.pdb',
['INFO CODE_ID %s %s' % (code_id, code_file)]))
def mock_compress(args, **kwargs):
filename = args[-1]
open(filename, 'w').write('stuff')
@ -186,7 +199,9 @@ class TestCopyDebug(HelperMixin, unittest.TestCase):
symbol_path=self.symbol_dir,
copy_debug=True)
d.Process(test_file)
self.assertTrue(os.path.isfile(os.path.join(self.symbol_dir, code_file, code_id, code_file[:-1] + '_')))
self.assertTrue(os.path.isfile(os.path.join(self.symbol_dir,
code_file, code_id, code_file[:-1] + '_')))
class TestGetVCSFilename(HelperMixin, unittest.TestCase):
def setUp(self):
@ -236,7 +251,8 @@ class TestGetVCSFilename(HelperMixin, unittest.TestCase):
# SHA-512 of a zero-byte file
EMPTY_SHA512 = 'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'
EMPTY_SHA512 = 'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff'
EMPTY_SHA512 += '8318d2877eec2f63b931bd47417a81a538327af927da3e'
class TestGeneratedFilePath(HelperMixin, unittest.TestCase):
@ -250,7 +266,7 @@ class TestGeneratedFilePath(HelperMixin, unittest.TestCase):
# Make an empty generated file
g = os.path.join(self.test_dir, 'generated')
rel_path = 'a/b/generated'
with open(g, 'wb') as f:
with open(g, 'wb'):
pass
expected = 's3:bucket:{}/{}:'.format(EMPTY_SHA512,
rel_path)
@ -312,6 +328,7 @@ if target_platform() == 'WINNT':
# And mock the call to pdbstr to capture the srcsrv stream data.
global srcsrv_stream
srcsrv_stream = None
def mock_pdbstr(args, cwd="", **kwargs):
for arg in args:
if arg.startswith("-i:"):
@ -329,10 +346,12 @@ if target_platform() == 'WINNT':
d.CopyDebug = lambda *args: True
d.Process(os.path.join(self.test_dir, test_files[0]))
self.assertNotEqual(srcsrv_stream, None)
hgserver = [x.rstrip() for x in srcsrv_stream.splitlines() if x.startswith("HGSERVER=")]
hgserver = [x.rstrip() for x in srcsrv_stream.splitlines()
if x.startswith("HGSERVER=")]
self.assertEqual(len(hgserver), 1)
self.assertEqual(hgserver[0].split("=")[1], "http://example.com/repo")
class TestInstallManifest(HelperMixin, unittest.TestCase):
def setUp(self):
HelperMixin.setUp(self)
@ -398,9 +417,10 @@ class TestInstallManifest(HelperMixin, unittest.TestCase):
'''
Test that a bad manifest argument gives an error.
'''
with self.assertRaises(ValueError) as e:
with self.assertRaises(ValueError):
symbolstore.validate_install_manifests(['foo'])
class TestFileMapping(HelperMixin, unittest.TestCase):
def setUp(self):
HelperMixin.setUp(self)
@ -432,13 +452,14 @@ class TestFileMapping(HelperMixin, unittest.TestCase):
'..', '..', o))
# mock the dump_syms output
file_id = ("X" * 33, 'somefile')
def mk_output(files):
return iter(
[
'MODULE os x86 %s %s\n' % file_id
] +
[
'FILE %d %s\n' % (i,s) for i, s in enumerate(files)
'FILE %d %s\n' % (i, s) for i, s in enumerate(files)
] +
[
'PUBLIC xyz 123\n'
@ -457,6 +478,7 @@ class TestFileMapping(HelperMixin, unittest.TestCase):
file_id[1], file_id[0], file_id[1] + '.sym')
self.assertEqual(open(symbol_file, 'r').read(), expected_output)
class TestFunctional(HelperMixin, unittest.TestCase):
'''Functional tests of symbolstore.py, calling it with a real
dump_syms binary and passing in a real binary to dump symbols from.
@ -465,6 +487,7 @@ class TestFunctional(HelperMixin, unittest.TestCase):
don't use the actual process pool like buildsymbols does, this tests
that the way symbolstore.py gets called in buildsymbols works.
'''
def setUp(self):
HelperMixin.setUp(self)
self.skip_test = False
@ -494,7 +517,6 @@ class TestFunctional(HelperMixin, unittest.TestCase):
self.target_bin = os.path.join(buildconfig.topobjdir,
'dist', 'bin', 'firefox-bin')
def tearDown(self):
HelperMixin.tearDown(self)
@ -523,6 +545,7 @@ class TestFunctional(HelperMixin, unittest.TestCase):
self.assertTrue(os.path.isfile(symbol_file))
symlines = open(symbol_file, 'r').readlines()
file_lines = [l for l in symlines if l.startswith('FILE')]
def check_hg_path(lines, match):
match_lines = [l for l in file_lines if match in l]
self.assertTrue(len(match_lines) >= 1,