Compute a digest of each report file, and nuke reports that have the same

digest as a file already added to the index.  This a workaround for the
analyzer analyzing the same file more than once, which can happen when building
multiple targets.


git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@49903 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
Ted Kremenek 2008-04-18 15:09:30 +00:00
Родитель d5e2d87d53
Коммит 57cf446d34
1 изменённых файлов: 31 добавлений и 0 удалений

Просмотреть файл

@ -115,16 +115,47 @@ sub SetHtmlEnv {
$ENV{'CCC_ANALYZER_HTML'} = $Dir;
}
##----------------------------------------------------------------------------##
# ComputeDigest - Compute a digest of the specified file.
##----------------------------------------------------------------------------##
sub ComputeDigest {
my $FName = shift;
die "Cannot read $FName" if (! -r $FName);
my $Result = `sha1sum -b $FName`;
my @Output = split /\s+/,$Result;
die "Bad output from sha1sum" if (scalar(@Output) != 2);
return $Output[0];
}
##----------------------------------------------------------------------------##
# ScanFile - Scan a report file for various identifying attributes.
##----------------------------------------------------------------------------##
# Sometimes a source file is scanned more than once, and thus produces
# multiple error reports. We use a cache to solve this problem.
my %AlreadyScanned;
sub ScanFile {
my $Index = shift;
my $Dir = shift;
my $FName = shift;
# Compute a digest for the report file. Determine if we have already
# scanned a file that looks just like it.
my $digest = ComputeDigest("$Dir/$FName");
if (defined($AlreadyScanned{$digest})) {
# Redundant file. Remove it.
`rm -f $Dir/$FName`;
return;
}
$AlreadyScanned{$digest} = 1;
open(IN, "$Dir/$FName") or die "$Prog: Cannot open '$Dir/$FName'\n";
my $BugDesc = "";