- adding support for sfextlog type for managed cluster extension logs

This commit is contained in:
Jason Gilbertson 2022-07-31 15:59:10 -04:00
Родитель 04902297b4
Коммит 2ab3af8852
12 изменённых файлов: 232 добавлений и 64 удалений

2
.vscode/launch.json поставляемый
Просмотреть файл

@ -66,7 +66,7 @@
"console": "internalConsole",
},
{
"name": "NET 5 Launch (console)",
"name": "NET 6 Launch (console)",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",

Просмотреть файл

@ -4,13 +4,26 @@
- fix issue where trace file was wrongly excluded. file was in specified time range by ticks in file name, but last modified date is outside of time range.
- update all .net versions for newtonsoft.json from 11.0.2 -> 13.0.1 due to security vulnerability
- fix /? not displaying by partial revert ead7b71e
- adding support for sfextlog type for managed cluster extension logs
## 7/1/2022
## 7/1/2022
- update newtonsoft.json from 11.0.2 -> 13.0.1 due to security vulnerability
- remove CollectSFDataGui references from project
## 6/2/2022
- modify log.open for file exception
## 4/15/2022
- add version to always be logged at completion
## 4/7/2022
- add support for tls 1.3
## 03/22/2022
- add explicit reference to microsoft.identity.client 4.42.0 for jarvis error

Просмотреть файл

@ -244,7 +244,7 @@ collectsfdata.exe -type trace -s "https://sflogsxxxxxxxxxxxxx.blob.core.windows.
```json
{
"GatherType": "[counter|setup|trace|table]", // choose one
"GatherType": "[counter|setup|sfextlog|trace|table]", // choose one
"SasKey": "[account sas uri|service sas uri|sas uri connection string]",
"StartTimeStamp": null,
"EndTimeStamp": null,
@ -258,7 +258,7 @@ collectsfdata.exe -type trace -s "https://sflogsxxxxxxxxxxxxx.blob.core.windows.
```json
{
"GatherType": "[counter|setup|trace|table]", // choose one
"GatherType": "[counter|setup|sfextlog|trace|table]", // choose one
"SasKey": "[account sas uri|service sas uri|sas uri connection string]",
"StartTimeStamp": null,
"EndTimeStamp": null,

Просмотреть файл

@ -50,6 +50,7 @@ Options:
counter
trace
exception
sfextlog
table
setup
any
@ -128,6 +129,8 @@ To use a default configuration file without having to specify on the command lin
- **GatherType** - required. string. options: counter, exception, table, trace, any
- **counter** - 'counter' will enumerate service fabric performance counter (.blg) blobs from 'fabriccounters*' container.
- **exception** - 'exception' will enumerate service fabric fabric crash dumps (.dmp) blobs from 'fabriccrashdumps*' container.
- **setup** - 'setup' will enumerate service fabric fabric deployer setup (.trace) blobs from 'fabriclogs*' container.
- **sfextlog** - 'sfextlog' will enumerate managed service fabric node extension logs (.log) blobs from 'vmextlog*' container.
- **table** - 'table' will enumerate service fabric events from blob tables 'fabriclogs*'
- **trace** - 'trace' will enumerate service fabric diagnostic logs (.dtr) zip blobs from 'fabriclogs*'
- **any** - 'any' without other filters will enumerate all containers for blobs matching criteria.
@ -303,7 +306,7 @@ user managed identity: 3080722d-0cf6-4552-8e45-c5ccbc3d091f
{
"ContainerFilter": "",
"DeleteCache": true,
"GatherType": "[counter|exception|trace|table|any]",
"GatherType": "[counter|exception|setup|sfextlog|trace|table|any]",
"LogDebug": 4,
"CacheLocation": "<%fast drive path with 100 GB free%>",
"SasKey": "[account sas uri|service sas uri|sas uri connection string]",
@ -321,7 +324,7 @@ user managed identity: 3080722d-0cf6-4552-8e45-c5ccbc3d091f
{
"ContainerFilter": "",
"DeleteCache": true,
"GatherType": "[counter|exception|trace|table|any]",
"GatherType": "[counter|exception|setup|sfextlog|trace|table|any]",
"LogDebug": 4,
"CacheLocation": "<%fast drive path with 100 GB free%>",
"SasKey": "[account sas uri|service sas uri|sas uri connection string]",
@ -342,7 +345,7 @@ user managed identity: 3080722d-0cf6-4552-8e45-c5ccbc3d091f
{
"ContainerFilter": "",
"DeleteCache": true,
"GatherType": "[counter|exception|trace|table|any]",
"GatherType": "[counter|exception|setup|sfextlog|trace|table|any]",
"LogDebug": 4,
"CacheLocation": "<%fast drive path with 100 GB free%>",
"SasKey": "[account sas uri|service sas uri|sas uri connection string]",

Просмотреть файл

@ -325,14 +325,14 @@ namespace CollectSFData.Azure
Directory.CreateDirectory(Path.GetDirectoryName(fileObject.FileUri));
}
((CloudBlockBlob)blob).DownloadToFileAsync(fileObject.FileUri, FileMode.Create, null, blobRequestOptions, null).Wait();
((CloudBlob)blob).DownloadToFileAsync(fileObject.FileUri, FileMode.Create, null, blobRequestOptions, null).Wait();
};
}
else
{
fileObject.DownloadAction = () =>
{
((CloudBlockBlob)blob).DownloadToStreamAsync(fileObject.Stream.Get(), null, blobRequestOptions, null).Wait();
((CloudBlob)blob).DownloadToStreamAsync(fileObject.Stream.Get(), null, blobRequestOptions, null).Wait();
};
}

Просмотреть файл

@ -179,8 +179,7 @@ namespace CollectSFData
if (!string.IsNullOrEmpty(clusterId))
{
// 's-' in prefix may not always be correct
containerPrefix += "s-" + clusterId;
containerPrefix += "-" + clusterId;
}
tablePrefix = containerPrefix + clusterId?.Replace("-", "");
@ -468,6 +467,11 @@ namespace CollectSFData
}
break;
case FileTypesEnum.sfextlog:
files = Directory.GetFiles(Config.CacheLocation, $"*{Constants.LogExtension}", SearchOption.AllDirectories).ToList();
break;
default:
Log.Warning($"configured filetype:{Config.FileType} not valid for cache upload. returning.");

Просмотреть файл

@ -424,6 +424,7 @@ namespace CollectSFData.Common
$"{newLine}counter" +
$"{newLine}trace" +
$"{newLine}exception" +
$"{newLine}sfextlog" +
$"{newLine}table" +
$"{newLine}setup" +
$"{newLine}any",

Просмотреть файл

@ -31,6 +31,7 @@ namespace CollectSFData.Common
public const string InstanceMetaDataRestUri = "http://169.254.169.254/metadata/instance?api-version=2017-08-01";
public const string JsonExtension = ".json";
public const string KustoUrlPattern = "https://(?<ingest>ingest-){1}(?<clusterName>.+?)\\.(?<location>.+?){0,1}\\.(?<domainName>.+?)/(?<databaseName>.+?){1}(/|$)";
public const string LogExtension = ".log";
public const string ManagementAzureCom = "https://management.azure.com";
public const int MaxCsvTransmitBytes = 1024 * 1024 * 100;
public const int MaxJsonTransmitBytes = 1024 * 1024 * 25;

Просмотреть файл

@ -223,6 +223,20 @@ namespace CollectSFData.DataFile
return PopulateCollection<CsvExceptionRecord>(fileObject);
}
public FileObjectCollection FormatExtensionFile(FileObject fileObject)
{
return FormatLogFile<LogExtensionRecord>(fileObject);
}
public FileObjectCollection FormatLogFile<T>(FileObject fileObject) where T : ITraceRecord, new()
{
Log.Debug($"enter:{fileObject.FileUri}");
// handles sfextlog file format
// [3104:5] 2022-07-29T14:37:47.147:637947022671478520 [INFO] HandlerHeartbeatWriter - Heartbeat: Ready: New .settings configuration found version 1. Applying config...
string newEventPattern = @"^\[\d+:\d+\] [0-9]{2,4}(-|/)[0-9]{1,2}(-|/)[0-9]{1,2}(-|T)[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}\.[0-9]{1,3}";
return FormatRecord<T>(fileObject, newEventPattern);
}
public FileObjectCollection FormatSetupFile(FileObject fileObject)
{
return FormatTraceFile<CsvSetupRecord>(fileObject);
@ -237,47 +251,9 @@ namespace CollectSFData.DataFile
public FileObjectCollection FormatTraceFile<T>(FileObject fileObject) where T : ITraceRecord, new()
{
Log.Debug($"enter:{fileObject.FileUri}");
IList<IRecord> records = new List<IRecord>();
// handles dtr, setup, and deployer file timestamp formats
string newEventPattern = @"^[0-9]{2,4}(-|/)[0-9]{1,2}(-|/)[0-9]{1,2}(-| )[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}";
Regex regex = new Regex(newEventPattern, RegexOptions.Compiled);
string record = string.Empty;
try
{
foreach (string tempLine in fileObject.Stream.ReadLine())
{
if (regex.IsMatch(tempLine))
{
// new record, write old record
if (record.Length > 0)
{
records.Add(new T().Populate(fileObject, record, _config.ResourceUri));
}
record = string.Empty;
}
record += tempLine;
}
// last record
if (record.Length > 0)
{
records.Add(new T().Populate(fileObject, record, _config.ResourceUri));
}
Log.Debug($"finished format:{fileObject.FileUri}");
fileObject.Stream.ResetPosition();
fileObject.Stream.Write(records);
return PopulateCollection<T>(fileObject);
}
catch (Exception e)
{
Log.Exception($"file:{fileObject.FileUri} exception:{e}");
return new FileObjectCollection() { fileObject };
}
return FormatRecord<T>(fileObject, newEventPattern);
}
public FileObjectCollection PopulateCollection<T>(FileObject fileObject) where T : IRecord
@ -409,6 +385,14 @@ namespace CollectSFData.DataFile
return FormatTableFile(fileObject);
}
break;
}
case FileDataTypesEnum.sfextlog:
{
if (fileObject.FileExtensionType.Equals(FileExtensionTypesEnum.log))
{
return FormatExtensionFile(fileObject);
}
break;
}
default:
@ -736,5 +720,48 @@ namespace CollectSFData.DataFile
traceSession.Dispose();
return true;
}
private FileObjectCollection FormatRecord<T>(FileObject fileObject, string newEventPattern) where T : ITraceRecord, new()
{
IList<IRecord> records = new List<IRecord>();
Regex regex = new Regex(newEventPattern, RegexOptions.Compiled);
string record = string.Empty;
try
{
foreach (string tempLine in fileObject.Stream.ReadLine())
{
if (regex.IsMatch(tempLine))
{
// new record, write old record
if (record.Length > 0)
{
records.Add(new T().Populate(fileObject, record, _config.ResourceUri));
}
record = string.Empty;
}
record += tempLine;
}
// last record
if (record.Length > 0)
{
records.Add(new T().Populate(fileObject, record, _config.ResourceUri));
}
Log.Debug($"finished format:{fileObject.FileUri}");
fileObject.Stream.ResetPosition();
fileObject.Stream.Write(records);
return PopulateCollection<T>(fileObject);
}
catch (Exception e)
{
Log.Exception($"file:{fileObject.FileUri} exception:{e}");
return new FileObjectCollection() { fileObject };
}
}
}
}

Просмотреть файл

@ -186,12 +186,37 @@ namespace CollectSFData.DataFile
// standalone nodename should be surrounded by /
_nodePattern = $@"(/|\.)(?<nodeName>[^/^\.]+?)(/|\.)({_fileDataTypesPattern}|[^/]+?\.dmp)(/|\.|_|$)";
if (Regex.IsMatch(fileUri, _nodePattern, RegexOptions.IgnoreCase))
switch (FileDataType)
{
case FileDataTypesEnum.table:
return;
case FileDataTypesEnum.fabriccrashdumps:
case FileDataTypesEnum.counter:
case FileDataTypesEnum.sfextlog:
NodeName = Path.GetFileName(Path.GetDirectoryName(fileUri));
break;
case FileDataTypesEnum.fabric:
case FileDataTypesEnum.lease:
case FileDataTypesEnum.bootstrap:
case FileDataTypesEnum.fabricdeployer:
case FileDataTypesEnum.fabricsetup:
NodeName = Path.GetFileName(Path.GetDirectoryName(Path.GetDirectoryName(fileUri)));
break;
default:
break;
}
if (string.IsNullOrEmpty(NodeName) && Regex.IsMatch(fileUri, _nodePattern, RegexOptions.IgnoreCase))
{
Match match = Regex.Match(fileUri, _nodePattern, RegexOptions.IgnoreCase);
NodeName = match.Groups["nodeName"].Value;
Log.Debug($"node name: {NodeName}");
}
if (string.IsNullOrEmpty(NodeName))
{
Log.Error($"unable to determine nodename:{fileUri} using pattern {_nodePattern}");
}
}
private string ExtractProperties(string fileUri)
@ -199,16 +224,8 @@ namespace CollectSFData.DataFile
if (!string.IsNullOrEmpty(fileUri))
{
fileUri = FileManager.NormalizePath(fileUri);
ExtractNodeName(fileUri);
FileDataType = FileTypes.MapFileDataTypeUri(fileUri);
if (string.IsNullOrEmpty(NodeName))
{
if (FileDataType != FileDataTypesEnum.table)
{
Log.Error($"unable to determine nodename:{fileUri} using pattern {_nodePattern}");
}
}
ExtractNodeName(fileUri);
}
if (!string.IsNullOrEmpty(BaseUri) & Uri.IsWellFormedUriString(fileUri, UriKind.Relative))

Просмотреть файл

@ -22,6 +22,7 @@ namespace CollectSFData.DataFile
bootstrap,
data,
fabriccrashdumps,
sfextlog,
unknown
}
@ -34,6 +35,7 @@ namespace CollectSFData.DataFile
dtr,
etl,
json,
log,
trace,
zip
}
@ -44,6 +46,7 @@ namespace CollectSFData.DataFile
any,
counter,
exception,
sfextlog,
setup,
trace,
table
@ -92,6 +95,10 @@ namespace CollectSFData.DataFile
// using default fabricsetup / fabricdeployer
extension = FileDataTypesEnum.fabricsetup;
}
else if (fileUri.EndsWith(Constants.LogExtension))
{
extension = FileDataTypesEnum.sfextlog;
}
else if (fileUri.EndsWith(Constants.ZipExtension))
{
// todo: implement
@ -186,6 +193,10 @@ namespace CollectSFData.DataFile
knownPrefix = FileTypesKnownUrisPrefix.fabriccrashdump;
break;
case FileTypesEnum.sfextlog:
knownPrefix = FileTypesKnownUrisPrefix.extension;
break;
case FileTypesEnum.setup:
case FileTypesEnum.table:
case FileTypesEnum.trace:
@ -237,6 +248,11 @@ namespace CollectSFData.DataFile
fileTypesEnum = FileTypesEnum.table;
break;
}
case FileDataTypesEnum.sfextlog:
{
fileTypesEnum = FileTypesEnum.sfextlog;
break;
}
default:
{
fileTypesEnum = FileTypesEnum.any;
@ -328,6 +344,12 @@ namespace CollectSFData.DataFile
break;
}
case Constants.LogExtension:
{
extension = FileExtensionTypesEnum.log;
break;
}
case Constants.SetupExtension:
{
extension = FileExtensionTypesEnum.trace;
@ -355,9 +377,10 @@ namespace CollectSFData.DataFile
public class FileTypesKnownUrisPrefix
{
public static string any = "";
public static string fabriccounter = "fabriccounter";
public static string fabriccrashdump = "fabriccrashdump";
public static string fabriclog = "fabriclog";
public static string extension = "vmextlog";
public static string fabriccounter = "fabriccounters";
public static string fabriccrashdump = "fabriccrashdumps";
public static string fabriclog = "fabriclogs";
public static string unknown = "unknown";
}
}

Просмотреть файл

@ -0,0 +1,79 @@
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
using System;
using System.Text.RegularExpressions;
namespace CollectSFData.DataFile
{
[Serializable]
public class LogExtensionRecord : ITraceRecord
{
private const int _fieldCount = 4;
private const string _pidPattern = @"\[(?<pid>\d+?):\d+?\]";
private const string _timePattern = @"(?<time>[0-9]{2,4}(-|/)[0-9]{1,2}(-|/)[0-9]{1,2}(-|T)[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}\.[0-9]{1,3}):\d+?";
private const string _levelPattern = @"\[(?<level>\w+?)\]";
private const string _typePattern = @"(\s+?-\s+?){0,1}(?<type>\w+?|)";
private string _eventPattern = $@"^{_pidPattern} {_timePattern} {_levelPattern} {_typePattern} - (?<text>.+)";
public string FileType { get; set; }
public string Level { get; set; }
public string NodeName { get; set; }
public int PID { get; set; }
public string RelativeUri { get; set; }
public string ResourceUri { get; set; }
public string Text { get; set; }
public int TID { get; set; }
public DateTime Timestamp { get; set; }
public string Type { get; set; }
public LogExtensionRecord()
{
}
public LogExtensionRecord(string traceRecord, FileObject fileObject, string resourceUri = null)
{
Populate(fileObject, traceRecord, resourceUri);
}
public IRecord Populate(FileObject fileObject, string traceRecord, string resourceUri = null)
{
// format for csv compliance
// kusto conforms to csv standards. service fabric trace (csv file) does not
// [3104:5] 2022-07-29T14:38:52.233:637947023322330619 [INFO] Utility - Starting process sc.exe with arguments create FabricInstallerSvc binPath="\"C:\Program Files\Microsoft Service Fabric\FabricInstallerService.Code\FabricInstallerService.exe\"" DisplayName="Service Fabric Installer Service" ..
Match matchResult = Regex.Match(traceRecord, _eventPattern);
if (matchResult.Success)
{
Timestamp = Convert.ToDateTime(matchResult.Groups["time"].Value);
Level = matchResult.Groups["level"].Value;
PID = Convert.ToInt32(matchResult.Groups["pid"].Value);
Type = matchResult.Groups["type"].Value;
Text = matchResult.Groups["text"].Value.Replace("\"", "'").TrimEnd('\r', '\n');
NodeName = fileObject.NodeName;
FileType = fileObject.FileDataType.ToString();
RelativeUri = fileObject.RelativeUri;
ResourceUri = resourceUri;
}
return this;
}
public override string ToString()
{
return $"{Timestamp:o},{Level},{PID},{Type},{Text},{NodeName},{FileType},{RelativeUri},{ResourceUri}{Environment.NewLine}";
}
}
}