Merge pull request #175 from jagilber/development

Development feature build with localPath functionality
This commit is contained in:
dbucce 2024-07-29 09:22:30 -06:00 коммит произвёл GitHub
Родитель 0211f6efc3 72da758681
Коммит e2c33e46e2
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: B5690EEEBB952194
37 изменённых файлов: 7394 добавлений и 1175 удалений

3
.gitattributes поставляемый Normal file
Просмотреть файл

@ -0,0 +1,3 @@
# fix incorrect Roff programming language tag in github repo
*.man -linguist-language
#*.man linguist-language=xml

72
.vscode/launch.json поставляемый
Просмотреть файл

@ -5,9 +5,74 @@
"version": "0.2.0",
"configurations": [
{
"name": ".NET 8 LTS Launch with Args (console)",
"name": ".NET462 Launch with Args (console)",
"type": "clr",
"request": "launch",
"requireExactSource": false,
"program": "${workspaceFolder}/src/bin/Debug/net462/CollectSFData.exe",
"args": [
//"${input:args}"
"-type",
"trace",
"-krt",
"1",
"-t",
"1",
// "-l",
// "1"
],
"cwd": "${workspaceFolder}/src/bin/Debug/net8.0",
"stopAtEntry": false,
"console": "internalConsole",
},
{
"name": ".NET48 Launch with Args (console)",
"type": "clr",
"request": "launch",
"requireExactSource": false,
"program": "${workspaceFolder}/src/bin/Debug/net48/CollectSFData.exe",
"args": [
//"${input:args}"
"-type",
"trace",
"-krt",
"1",
"-t",
"1",
// "-l",
// "1"
],
"cwd": "${workspaceFolder}/src/bin/Debug/net8.0",
"stopAtEntry": false,
"console": "internalConsole",
},
{
"name": ".NET 8 Launch with Args (console)",
"type": "coreclr",
"request": "launch",
"requireExactSource": false,
//"preLaunchTask": "build",
"program": "${workspaceFolder}/src/bin/Debug/net8.0/CollectSFData.dll",
"args": [
//"${input:args}"
"-type",
"trace",
"-krt",
"1",
"-t",
"1",
// "-l",
// "1"
],
"cwd": "${workspaceFolder}/src/bin/Debug/net8.0",
"stopAtEntry": false,
"console": "internalConsole",
},
{
"name": ".NET 8 Build / Launch with Args (console)",
"type": "coreclr",
"request": "launch",
"requireExactSource": false,
"preLaunchTask": "build",
"program": "${workspaceFolder}/src/bin/Debug/net8.0/CollectSFData.dll",
"args": [
@ -22,9 +87,10 @@
"console": "internalConsole",
},
{
"name": ".NET 6 LTS Launch with Args (console)",
"name": ".NET 6 Build / Launch with Args (console)",
"type": "coreclr",
"request": "launch",
"requireExactSource": false,
"preLaunchTask": "build",
"program": "${workspaceFolder}/src/bin/Debug/net6.0/CollectSFData.dll",
"args": [
@ -41,6 +107,7 @@
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
"requireExactSource": false,
"program": "${workspaceFolder}/src/bin/Debug/net6.0/CollectSFData.dll",
"args": [],
"cwd": "${workspaceFolder}/src/bin/Debug/net6.0",
@ -52,6 +119,7 @@
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
"requireExactSource": false,
"program": "${workspaceFolder}/src/bin/Debug/net8.0/CollectSFData.dll",
"args": [],
"cwd": "${workspaceFolder}/src/bin/Debug/net8.0",

4
.vscode/settings.json поставляемый
Просмотреть файл

@ -1,5 +1,7 @@
{
"dotnet-test-explorer.testProjectPath": "**/*Test.csproj",
"dotnet.defaultSolution": "src\\CollectSFData.sln",
"accessibility.signals.sounds.volume": 0
"accessibility.signalOptions": {
"volume": 0
}
}

Просмотреть файл

@ -1,12 +1,37 @@
# Change log
## 06/03/2024
- add configuration option to specify / disable utility update check in days 'CheckForUpdates' #25
- default 30 days
- 0 to disable
- remove unused RelogBlg function #26
- add collectsfdata.options.json to all lib\net* frameworks #24
- script updates for kusto myfreecluster
## 04/29/2024
- remove unused Azure.Security.KeyVault.Certificates from nuget.config
- set System.Memory explicity version 4.5.5 in nuget.config
- bump Microsoft.Identity.Client from 4.56.0 to 4.60.1 in nuget.config due to security vulnerability
- bump Azure.Identity from 1.10.2 to 1.11.0 in nuget.config due to security vulnerability
- add Service Fabric 10.1 manifests
## 03/24/2024
- add sign stage
- build
- sign
- release nupkg to internal feed if master
## 03/20/2024
- modify azure pipelines for ado
- add stages and jobs for build and release
- add signing
- add publish
- set publish on merge to master
- add stages and jobs for build and release
- add signing
- add publish
- set publish on merge to master
- only add net framework / core versions to zip for release
## 03/16/2024

Просмотреть файл

@ -140,6 +140,10 @@ Options:
type collectsfdata.exe -save default.json to create a default file.
if collectsfdata.options.json exists, it will be used for configuration.
-cf|--containerFilter [string] string / regex to filter container names
-dp|--databasePersistence [bool] default false to create a volatile database. a value of true will persist
the database on a given path in your container.
-dpp|--databasePersistencePath [string] path where you want your database to be persisted in for local ingestion.
path must be in the format: "@'C:\\kustodata\\MyDatabaseName\\md',@'C:\\kustodata\\MyDatabaseName\\data'"
-dc|--deleteCache [bool] delete downloaded blobs from local disk at end of execution.
-to|--stop [DateTime] end time range to collect data to. default is now.
example: "03/22/2023 18:24:01 -04:00"
@ -170,6 +174,7 @@ Options:
service fabric 6.5+ dtr files are compliant.
-kim|--kustoUseIngestMessage [bool] for kusto ingestion message tracking.
-l|--list [bool] list files instead of downloading
-lp|--localPath [string] path to original files and indicates local ingestion e.g. 'C:\Perfcounters\Output'
-lac|--logAnalyticsCreate [bool] create new log analytics workspace.
requires LogAnalyticsWorkspaceName, AzureResourceGroup,
AzureResourceGroupLocation, and AzureSubscriptionId

Просмотреть файл

@ -16,7 +16,7 @@ variables:
project_feed: Tools/CollectServiceFabricData
build_configuration: debug
system.debug: true
publish_release: $[eq(variables['Build.SourceBranch'], 'refs/heads/master')]
publish_release: true #$[eq(variables['Build.SourceBranch'], 'refs/heads/master')]
project_root: .\src
start_time: $[format('{0}-{1:yyyy}{1:MM}{1:dd}-{1:HH}{1:mm}{1:ss}', variables['project_name'], pipeline.startTime)]
artifacts_drive: Z
@ -69,6 +69,8 @@ stages:
inputs:
targetType: 'inline'
script: |
write-host "dotnet list `"$env:project_root\$env:project_name\$env:project_name.csproj`" package --include-transitive"
dotnet list "$env:project_root\$env:project_name\$env:project_name.csproj" package --include-transitive
write-host "dotnet build `"$env:project_root\$env:project_name\$env:project_name.csproj`" -v detailed -c $env:build_configuration"
dotnet build "$env:project_root\$env:project_name\$env:project_name.csproj" -v detailed -c $env:build_configuration
errorActionPreference: 'continue'
@ -234,11 +236,10 @@ stages:
script: |
$nupkg = @(get-childItem -recurse "$($env:release_path)\*.nupkg")[0].FullName
$nupkgZip = "$nupkg.zip"
$nupkgUnsigned = "$nupkg".replace(".nupkg", ".unsigned.nupkg")
write-host "nupkg=$nupkg"
$nupkgDir = "$($env:release_stage_path)"
del "$nupkgDir\CodeSignSummary*.md" -force
ren $nupkg $nuPkgUnsigned
del $nupkg -force
del $nupkgZip -force
write-host "compress-archive -path $nupkgDir\* -destinationPath $nupkgZip -force"
compress-archive -path $nupkgDir\* -destinationPath $nupkgZip -force
@ -281,11 +282,11 @@ stages:
VerboseLogin: false
- task: PowerShell@2
displayName: 'create zip with signed files'
displayName: 'create zip with signed .net version files'
inputs:
targetType: 'inline'
script: |
$source = "$($env:release_path)\\*"
$source = "$($env:release_path)\\net*"
$destination = "$($env:release_stage_path)\\$($env:project_name).zip"
write-host "compress-archive -path $source -destinationPath $destination -force"
compress-archive -path $source -destinationPath $destination -force

Просмотреть файл

@ -14,7 +14,7 @@ pool:
variables:
project_name: CollectSFData
esrp_signing_connection: 'collectsfdata esrp signing connection'
project_feed: Tools/CollectServiceFabricData
project_feed: Tools/CollectServiceFabricData_Public
build_configuration: release
system.debug: false
publish_release: $[eq(variables['Build.SourceBranch'], 'refs/heads/master')]
@ -70,6 +70,8 @@ stages:
inputs:
targetType: 'inline'
script: |
write-host "dotnet list `"$env:project_root\$env:project_name\$env:project_name.csproj`" package --include-transitive"
dotnet list "$env:project_root\$env:project_name\$env:project_name.csproj" package --include-transitive
write-host "dotnet build `"$env:project_root\$env:project_name\$env:project_name.csproj`" -v detailed -c $env:build_configuration"
dotnet build "$env:project_root\$env:project_name\$env:project_name.csproj" -v detailed -c $env:build_configuration
errorActionPreference: 'continue'
@ -235,11 +237,10 @@ stages:
script: |
$nupkg = @(get-childItem -recurse "$($env:release_path)\*.nupkg")[0].FullName
$nupkgZip = "$nupkg.zip"
$nupkgUnsigned = "$nupkg".replace(".nupkg", ".unsigned.nupkg")
write-host "nupkg=$nupkg"
$nupkgDir = "$($env:release_stage_path)"
del "$nupkgDir\CodeSignSummary*.md" -force
ren $nupkg $nuPkgUnsigned
del $nupkg -force
del $nupkgZip -force
write-host "compress-archive -path $nupkgDir\* -destinationPath $nupkgZip -force"
compress-archive -path $nupkgDir\* -destinationPath $nupkgZip -force
@ -282,11 +283,11 @@ stages:
VerboseLogin: false
- task: PowerShell@2
displayName: 'create zip with signed files'
displayName: 'create zip with signed .net version files'
inputs:
targetType: 'inline'
script: |
$source = "$($env:release_path)\\*"
$source = "$($env:release_path)\\net*"
$destination = "$($env:release_stage_path)\\$($env:project_name).zip"
write-host "compress-archive -path $source -destinationPath $destination -force"
compress-archive -path $source -destinationPath $destination -force

Просмотреть файл

@ -6,8 +6,9 @@
"AzureResourceGroup": null,
"AzureResourceGroupLocation": null,
"AzureSubscriptionId": null,
"AzureTenantId": null,
"AzureTenantId": "common",
"CacheLocation": null,
"CheckForUpdates": 30,
"ContainerFilter": null,
"DeleteCache": false,
"EndTimeStamp": null,

Просмотреть файл

@ -103,8 +103,9 @@
"type": "string",
"title": "AzureTenantId",
"description": "azure tenant id guid for certain non-default azure operations",
"default": null,
"default": "common",
"examples": [
"common",
null,
"123456578-1234-1234-1234-123456789012"
],
@ -122,6 +123,30 @@
],
"pattern": "^(.+)$"
},
"CheckForUpdates": {
"$id": "#/properties/CheckForUpdates",
"type": "integer",
"title": "CheckForUpdates",
"description": "0-disabled, else check for new utility updates every n days",
"default": 30,
"enum": [
0,
1,
2,
3,
4,
5
],
"examples": [
0,
1,
2,
3,
4,
5
],
"pattern": "^(\\d{0,3})$"
},
"ContainerFilter": {
"$id": "#/properties/ContainerFilter",
"type": "string",
@ -134,6 +159,29 @@
],
"pattern": "^(.+)$"
},
"DatabasePersistence": {
"$id": "#/properties/DatabasePersistence",
"type": "boolean",
"title": "DatabasePersistence",
"description": "true to create a local persistent database.",
"default": false,
"examples": [
false,
true
]
},
"DatabasePersistencePath": {
"$id": "#/properties/DatabasePersistencePath",
"type": "string",
"title": "DatabasePersistencePath",
"description": "local device database path if a local kusto url is provided.",
"default": null,
"examples": [
null,
"@'C:\\kustodata\\MyDatabaseName\\md',@'C:\\kustodata\\MyDatabaseName\\data'"
],
"pattern": "^(.+)$"
},
"DeleteCache": {
"$id": "#/properties/DeleteCache",
"type": "boolean",
@ -320,6 +368,18 @@
true
]
},
"LocalPath": {
"$id": "#/properties/LocalPath",
"type": "string",
"title": "LocalPath",
"description": "local device path containing source trace files for ingestion.",
"default": null,
"examples": [
null,
"c:\\kustodata\\dbs\\"
],
"pattern": "^(.+)$"
},
"LogAnalyticsCreate": {
"$id": "#/properties/LogAnalyticsCreate",
"type": "boolean",

Просмотреть файл

@ -38,6 +38,10 @@ Options:
type collectsfdata.exe -save default.json to create a default file.
if collectsfdata.options.json exists, it will be used for configuration.
-cf|--containerFilter [string] string / regex to filter container names
-dp|--databasePersistence [bool] default false to create a volatile database. a value of true will persist
the database on a given path in your container.
-dpp|--databasePersistencePath [string] path where you want your database to be persisted in for local ingestion.
path must be in the format: "@'C:\\kustodata\\MyDatabaseName\\md',@'C:\\kustodata\\MyDatabaseName\\data'"
-dc|--deleteCache [bool] delete downloaded blobs from local disk at end of execution.
-to|--stop [DateTime] end time range to collect data to. default is now.
example: "06/01/2021 09:01:23 -04:00"
@ -68,6 +72,7 @@ Options:
service fabric 6.5+ dtr files are compliant.
-kim|--kustoUseIngestMessage [bool] for kusto ingestion message tracking.
-l|--list [bool] list files instead of downloading
-lp|--localPath [string] path to original files and indicates local ingestion e.g. 'C:\Perfcounters\Output'
-lac|--logAnalyticsCreate [bool] create new log analytics workspace.
requires LogAnalyticsWorkspaceName, AzureResourceGroup,
AzureResourceGroupLocation, and AzureSubscriptionId
@ -122,6 +127,8 @@ To use a default configuration file without having to specify on the command lin
- **CacheLocation** - required. string. path to blob download location. this path depending on configuration may need to have many GB free and should be premium / fast ssd disk for best performance. **NOTE:** this path should be as short as possible as downloaded file names lengths are close to MAX_PATH.
- **ContainerFilter** - optional. string / regex. default null. if populated, pattern will be used to filter which containers are enumerated for blob download.
- **DatabasePersistance** - bool. default false. to create a volatile local database. a value of true will persist the database a given path in your container.
- **DatabasePersistencePath** - string. path where you want your database to be persisted in for local ingestion. path must be in the format: '@'c:\...\..',@'c:\...\..''
- **DeleteCache** - bool. default false. if true, blobs downloaded from storage account into 'cacheLocation' will be deleted at end after successful formatting and ingestion.
- **EndTimeStamp** - datetime string. default is now. example format: "10/31/2018 22:00:00 +00:00".
- **EtwManifestsCache** - required. string. default is ./manifests. local path where manifest (.man) files are located for parsing .etl files to .csv.
@ -135,6 +142,7 @@ To use a default configuration file without having to specify on the command lin
- **trace** - 'trace' will enumerate service fabric diagnostic logs (.dtr) zip blobs from 'fabriclogs*'
- **any** - 'any' without other filters will enumerate all containers for blobs matching criteria.
- **List** - bool. default false. if true, lists the blobs meeting all criteria for download but does not download the file.
- **LocalPath** - string. local path to original files and indicates local ingestion e.g. 'C:\Perfcounters\Output'
- **LogDebug** - int. default 4. if > 0, logs additional 'debug' output to console for troubleshooting. 0-disabled, 1-exception, 2-error, 3-warning, 4-info, 5-debug.
- **LogFile** - optional. string. default null. if populated with file and path, will log all console output to specified file. file is recreated every execution if exists. can optionally specify .net datetime format specifier inside '<>'. example: collectsfdata-\<yyyy-MM-dd-HH-mm-ss\>.log.
- **NodeFilter** - optional. string / regex. if populated uses client side searching for blobs after enumeration before download.

Просмотреть файл

@ -372,7 +372,6 @@ Binaries are signed.
| | Azure.Core.dll
| | Azure.Data.Tables.dll
| | Azure.Identity.dll
| | Azure.Security.KeyVault.Certificates.dll
| | Azure.Security.KeyVault.Keys.dll
| | Azure.Security.KeyVault.Secrets.dll
| | Azure.Storage.Blobs.dll
@ -526,7 +525,6 @@ Binaries are signed.
| | Azure.Core.dll
| | Azure.Data.Tables.dll
| | Azure.Identity.dll
| | Azure.Security.KeyVault.Certificates.dll
| | Azure.Security.KeyVault.Keys.dll
| | Azure.Security.KeyVault.Secrets.dll
| | Azure.Storage.Blobs.dll
@ -556,7 +554,6 @@ Binaries are signed.
| | System.Buffers.dll
| | System.CodeDom.dll
| | System.Collections.Immutable.dll
| | System.Diagnostics.DiagnosticSource.dll
| | System.Fabric.Strings.dll
| | System.IO.FileSystem.AccessControl.dll
| | System.IO.Hashing.dll
@ -583,7 +580,6 @@ Binaries are signed.
| | Azure.Core.dll
| | Azure.Data.Tables.dll
| | Azure.Identity.dll
| | Azure.Security.KeyVault.Certificates.dll
| | Azure.Security.KeyVault.Keys.dll
| | Azure.Security.KeyVault.Secrets.dll
| | Azure.Storage.Blobs.dll
@ -615,7 +611,6 @@ Binaries are signed.
| | Sf.Tx.Windows.pdb
| | System.CodeDom.dll
| | System.Collections.Immutable.dll
| | System.Diagnostics.DiagnosticSource.dll
| | System.Fabric.Strings.dll
| | System.IO.Hashing.dll
| | System.Memory.Data.dll
@ -646,7 +641,6 @@ Binaries are signed.
| Azure.Core.dll
| Azure.Data.Tables.dll
| Azure.Identity.dll
| Azure.Security.KeyVault.Certificates.dll
| Azure.Security.KeyVault.Keys.dll
| Azure.Security.KeyVault.Secrets.dll
| Azure.Storage.Blobs.dll
@ -696,3 +690,22 @@ Binaries are signed.
\---net7.0
System.Security.Cryptography.ProtectedData.dll
```
### NuGet Package Dependencies
Project 'CollectSFDataDll' has the following package references
> Azure.Data.Tables 12.8.1
> Azure.Identity 1.11.0
> Azure.Security.KeyVault.Keys 4.5.0
> Azure.Security.KeyVault.Secrets 4.5.0
> Azure.Storage.Blobs 12.18.0
> Azure.Storage.Queues 12.16.0
> Microsoft.Azure.Kusto.Data 11.3.4
> Microsoft.Extensions.CommandLineUtils 1.1.1
> Microsoft.Identity.Client 4.60.1
> Microsoft.Identity.Client.Extensions.Msal 4.60.1
> Newtonsoft.Json 13.0.3
> System.CodeDom 4.7.0
> System.Diagnostics.DiagnosticSource 7.0.2
> System.Memory 4.5.5
> System.Reactive 4.0.0

Просмотреть файл

@ -131,4 +131,12 @@
| invoke TypeAndTextContains("not an error","FabricDCA.WindowsFabric_ServiceFabricEtlFile","Unable to decode ETW event. The event will be skipped.")
| invoke TypeAndTextContains("not an error","General.RSMessage","RSMessage::GetActivityId Missing activity id header")
| invoke TypeAndTextContains("file not found not usually an error.","General.File@File.GetAttributes","GetAttributes failed with the following error 0x80070002 for the path:")
| invoke TypeAndTextContains("only issue if using docker.","FabricDeployer.FabricDeployer","Error invoking PowerShell script")
| invoke TypeAndTextContains("only issue if using docker.","FabricDeployer.FabricDeployer","Failed to get nat mac address")
| invoke TypeAndTextContains("only issue if using docker.","FabricDeployer.FabricDeployer","Failed to unblock access to wireserver")
| invoke TypeAndTextContains("only issue if using docker.","Hosting.DockerProcessManager","Failed to load vmcompute.dll")
| invoke TypeAndTextContains("only issue if using docker.","Hosting.DockerProcessManager","OnContainerServiceStarted: ErrorCode=FABRIC_E_SERVICE_DOES_NOT_EXIST")
| invoke TypeAndTextContains("only issue if using docker.","Hosting.DockerProcessManager","StopDockerNtService: ShutdownDockerService() returned with ErrorCode=FABRIC_E_SERVICE_DOES_NOT_EXIST.")
| invoke TypeAndTextContains("only issue if using docker.","Hosting.ContainerHelper","Container Log Root not found at:")
| invoke TypeAndTextContains("only issue if using docker.","Hosting.ContainerActivator","OnContainerServiceStarted: ErrorCode=FABRIC_E_SERVICE_DOES_NOT_EXIST")
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -70,6 +70,14 @@
symbol="CHANNEL_Microsoft_ServiceFabric_Lease_Audit"
type="Admin"
/>
<channel
chid="Telemetry"
enabled="true"
isolation="Application"
name="Microsoft-ServiceFabric-Lease/Telemetry"
symbol="CHANNEL_Microsoft_ServiceFabric_Lease_Telemetry"
type="Operational"
/>
</channels>
<events>
<event

Просмотреть файл

@ -1,7 +1,7 @@
{
"manifests": [
"Microsoft-WindowsFabric-Events_10.0.1816.9590.man",
"Microsoft-WindowsFabric-KtlEvents_10.0.1816.9590.man",
"Microsoft-WindowsFabric-LeaseEvents_10.0.1816.9590.man"
"Microsoft-WindowsFabric-Events_10.1.1951.9590.man",
"Microsoft-WindowsFabric-KtlEvents_10.1.1951.9590.man",
"Microsoft-WindowsFabric-LeaseEvents_10.1.1951.9590.man"
]
}

Просмотреть файл

@ -108,8 +108,8 @@ function main() {
}
function build-configuration($configuration) {
write-host "dotnet list $csproj package"
dotnet list $csproj package
write-host "dotnet list $csproj package --include-transitive"
dotnet list $csproj package --include-transitive
write-host "dotnet build $csproj -c $configuration" -ForegroundColor Magenta
dotnet build $csproj -c $configuration

Просмотреть файл

@ -1,5 +1,5 @@
<#
script to export kusto functions in .csl format using kusto-rest.ps1
script to export kusto functions in .csl format using kusto-rest.ps1
#>
[cmdletbinding()]
param(
@ -7,10 +7,13 @@ param(
[string]$kustoCluster = '',
[Parameter(Mandatory = $true)]
[string]$kustoDatabase = '',
[string[]]$exclusions = @('sfrplog','cluster','rc-perf'),
[string[]]$exclusions = @('sfrplog', 'cluster', 'rc-perf', 'armprod'),
[switch]$test,
[switch]$force,
[string]$kustoDir = "$psscriptroot\..\kusto\functions"
[string]$kustoDir = "$psscriptroot\..\kusto\functions",
[string]$clientId,
[string]$clientSecret,
[string]$tenantId
)
$ErrorActionPreference = 'continue'
@ -20,55 +23,61 @@ $scriptSuccess = [collections.arraylist]::new()
function main() {
$error.clear()
if (!$kusto -or $force) {
. .\kusto-rest.ps1 -cluster $kustoCluster -database $kustoDatabase
$kustoParams = @{
cluster = $kustoCluster
database = $kustoDatabase
}
if ($clientId) { $kustoParams.clientId = $clientId }
if ($clientSecret) { $kustoParams.clientSecret = $clientSecret }
if ($tenantId) { $kustoParams.tenantId = $tenantId }
if (!$kusto -or $force) {
. .\kusto-rest.ps1 @kustoParams
}
$kusto.Exec('.show functions')
foreach ($function in $kusto.ResultTable) {
export-function $function
}
if ($scriptSuccess) {
$scriptSuccess | out-string
Write-host "the above scripts executed successfully:" -ForegroundColor Green
}
if ($scriptErrors) {
$scriptErrors | out-string
Write-Warning "the above scripts need to be executed manually:"
}
write-host 'finished'
}
function export-function($function)
{
function export-function($function) {
write-host "exporting $($function.Name)"
$functionScript = ".create-or-alter function with (docstring = `"$($function.DocString)`", folder = `"$($function.Folder)`")`r`n $($function.Name)$($function.Parameters) $($function.Body)"
Write-Verbose $functionScript
$fileName = "$kustoDir\$($function.Folder)\$($function.Name).csl"
$fileDirectory = [io.path]::GetDirectoryName($fileName)
foreach($exclusion in $exclusions)
{
if($fileName.Contains($exclusion))
{
foreach ($exclusion in $exclusions) {
if ($fileName.Contains($exclusion)) {
write-warning "skipping exclusion $($function.Name)"
return
}
}
if(!(test-path $fileDirectory)){
if (!(test-path $fileDirectory)) {
mkdir $fileDirectory
}
if((test-path $fileName) -and !$force){
if ((test-path $fileName) -and !$force) {
$currentFunction = get-content -raw $fileName
write-verbose "comparing export and current functions"
if([string]::Compare([regex]::replace($functionScript,"\s",""),[regex]::replace($currentFunction,"\s","")) -eq 0){
if ([string]::Compare([regex]::replace($functionScript, "\s", ""), [regex]::replace($currentFunction, "\s", "")) -eq 0) {
write-host "no change to function $functionScript. skipping" -ForegroundColor Cyan
return
}

Просмотреть файл

@ -8,7 +8,10 @@ param(
[string]$kustoDatabase = '',
[switch]$test,
[switch]$force,
[string]$kustoDir = "$psscriptroot\..\kusto\functions"
[string]$kustoDir = "$psscriptroot\..\kusto\functions",
[string]$clientId,
[string]$clientSecret,
[string]$tenantId
)
$ErrorActionPreference = 'continue'
@ -19,8 +22,17 @@ $scriptSuccess = [collections.arraylist]::new()
function main() {
$error.clear()
$kustoParams = @{
cluster = $kustoCluster
database = $kustoDatabase
}
if ($clientId) { $kustoParams.clientId = $clientId }
if ($clientSecret) { $kustoParams.clientSecret = $clientSecret }
if ($tenantId) { $kustoParams.tenantId = $tenantId }
if (!$kusto -or $force) {
. .\kusto-rest.ps1 -cluster $kustoCluster -database $kustoDatabase
. .\kusto-rest.ps1 @kustoParams
}
foreach ($script in $kustoScripts) {

Просмотреть файл

@ -8,14 +8,15 @@ this script will setup Microsoft.IdentityModel.Clients Msal for use with powersh
KustoObj will be created as $global:kusto to hold properties and run methods from
use the following to save and pass arguments:
[net.servicePointManager]::Expect100Continue = $true;[net.servicePointManager]::SecurityProtocol = [net.SecurityProtocolType]::Tls12;
invoke-webRequest "https://raw.githubusercontent.com/microsoft/CollectServiceFabricData/master/scripts/kusto-rest.ps1" -outFile "$pwd/kusto-rest.ps1";
.\kusto-rest.ps1 -cluster %kusto cluster% -database %kusto database%
.NOTES
Author : jagilber
File Name : kusto-rest.ps1
Version : 231030
History : migrate from netcoreapp2.1 to net6.0
Version : 240521
History : resolve cluster and database on first run
.EXAMPLE
.\kusto-rest.ps1 -cluster kustocluster -database kustodatabase
@ -32,7 +33,7 @@ $kusto.SetCluster($cluster)
$kusto.parameters = @{'T'= $table}
$kusto.ExecScript("..\kusto\sflogs\TraceKnownIssueSummary.csl", $kusto.parameters)
.EXAMPLE
.EXAMPLE
$kusto.SetTable("test_$env:USERNAME").Import()
.EXAMPLE
@ -52,12 +53,15 @@ query string or command to execute against a kusto database
example: kustocluster
example: azurekusto.eastus
.PARAMETER database
.PARAMETER database
[string]kusto database name
.PARAMETER table
[string]optional kusto table for import
.PARAMETER headers
[string]optional kusto table headers for import ['columnname']:columntype,
.PARAMETER resultFile
[string]optional json file name and path to store raw result content
@ -137,14 +141,39 @@ $global:identityPackageLocation
$packageVersion = "4.28.0"
if ($updateScript) {
invoke-webRequest "https://raw.githubusercontent.com/jagilber/powershellScripts/master/kusto-rest.ps1" -outFile "$psscriptroot/kusto-rest.ps1";
invoke-webRequest "https://raw.githubusercontent.com/microsoft/CollectServiceFabricData/master/scripts/kusto-rest.ps1" -outFile "$psscriptroot/kusto-rest.ps1";
write-warning "script updated. restart script"
return
}
function main() {
try {
$error.Clear()
$global:kusto = [KustoObj]::new()
$kusto.SetTables()
$kusto.SetFunctions()
$kusto.Exec()
$kusto.ClearResults()
write-host ($PSBoundParameters | out-string)
if ($error) {
write-warning ($error | out-string)
}
else {
write-host ($kusto | Get-Member | out-string)
write-host "use `$kusto object to set properties and run queries. example: `$kusto.Exec('.show operations')" -ForegroundColor Green
write-host "set `$kusto.viewresults=`$true to see results." -ForegroundColor Green
}
}
catch {
write-host "exception::$($psitem.Exception.Message)`r`n$($psitem.scriptStackTrace)" -ForegroundColor Red
}
}
function AddIdentityPackageType([string]$packageName, [string] $edition) {
# support ps core on linux
if ($IsLinux) {
if ($IsLinux) {
$env:USERPROFILE = $env:HOME
}
[string]$nugetPackageDirectory = "$($env:USERPROFILE)/.nuget/packages"
@ -152,14 +181,14 @@ function AddIdentityPackageType([string]$packageName, [string] $edition) {
[string]$nugetDownloadUrl = "https://dist.nuget.org/win-x86-commandline/latest/nuget.exe"
[io.directory]::createDirectory($nugetPackageDirectory)
[string]$packageDirectory = "$nugetPackageDirectory/$packageName"
$global:identityPackageLocation = get-identityPackageLocation $packageDirectory
if (!$global:identityPackageLocation) {
if ($psedition -ieq 'core') {
$tempProjectFile = './temp.csproj'
#dotnet new console
#dotnet new console
$csproj = "<Project Sdk=`"Microsoft.NET.Sdk`">
<PropertyGroup>
<OutputType>Exe</OutputType>
@ -174,12 +203,12 @@ function AddIdentityPackageType([string]$packageName, [string] $edition) {
out-file -InputObject $csproj -FilePath $tempProjectFile
write-host "dotnet restore --packages $packageDirectory --no-cache --no-dependencies $tempProjectFile"
dotnet restore --packages $packageDirectory --no-cache --no-dependencies $tempProjectFile
remove-item "$pwd/obj" -re -fo
remove-item -path $tempProjectFile
}
else {
$nuget = "nuget.exe"
$nuget = "nuget.exe"
if (!(test-path $nuget)) {
$nuget = "$env:temp/nuget.exe"
if (!(test-path $nuget)) {
@ -197,7 +226,7 @@ function AddIdentityPackageType([string]$packageName, [string] $edition) {
}
}
}
$global:identityPackageLocation = get-identityPackageLocation $packageDirectory
write-host "identityDll: $($global:identityPackageLocation)" -ForegroundColor Green
add-type -literalPath $global:identityPackageLocation
@ -208,7 +237,7 @@ function get-identityPackageLocation($packageDirectory) {
$pv = [version]::new($packageVersion)
$pv = [version]::new($pv.Major, $pv.Minor)
$versions = @{}
$versions = @{}
$files = @(get-childitem -Path $packageDirectory -Recurse | where-object FullName -imatch "lib.$edition.$packageName\.dll")
write-host "existing identity dlls $($files|out-string)"
@ -278,6 +307,7 @@ class KustoObj {
[string]$clientId = $clientId
hidden [string]$clientSecret = $clientSecret
[string]$Cluster = $cluster
[bool]$ClusterResolved = $false
[string]$Database = $database
[bool]$FixDuplicateColumns = $fixDuplicateColumns
[bool]$Force = $force
@ -303,7 +333,7 @@ class KustoObj {
[hashtable]$Tables = @{}
[hashtable]$Functions = @{}
hidden [hashtable]$FunctionObjs = @{}
KustoObj() { }
static KustoObj() { }
@ -315,12 +345,12 @@ class KustoObj {
[KustoObj] CreateResultTable() {
$this.ResultTable = [collections.arraylist]@()
$columns = @{ }
if (!$this.ResultObject.tables) {
write-warning "run query first"
return $this.Pipe()
}
foreach ($column in ($this.ResultObject.tables[0].columns)) {
try {
[void]$columns.Add($column.ColumnName, $null)
@ -329,14 +359,14 @@ class KustoObj {
write-warning "$($column.ColumnName) already added"
}
}
$resultModel = New-Object -TypeName PsObject -Property $columns
$rowCount = 0
foreach ($row in ($this.ResultObject.tables[0].rows)) {
$count = 0
$resultCopy = $resultModel.PsObject.Copy()
foreach ($column in ($this.ResultObject.tables[0].columns)) {
#write-verbose "createResultTable: procesing column $count"
$resultCopy.($column.ColumnName) = $row[$count++]
@ -344,75 +374,75 @@ class KustoObj {
write-verbose "createResultTable: processing row $rowCount columns $count"
$rowCount++
[void]$this.ResultTable.add($resultCopy)
}
$this.ResultTable = $this.RemoveEmptyResults($this.ResultTable)
return $this.Pipe()
}
[KustoObj] Exec([string]$query) {
$this.Query = $query
$this.Exec()
$this.Query = $null
return $this.Pipe()
}
[KustoObj] Exec() {
$startTime = get-date
$this
if (!$this.Limit) {
$this.Limit = 10000
}
if (!$this.Script -and !$this.Query) {
Write-Warning "-script and / or -query should be set. exiting"
return $this.Pipe()
}
if (!$this.Cluster -or !$this.Database) {
Write-Warning "-cluster and -database have to be set once. exiting"
return $this.Pipe()
}
if ($this.Query) {
write-host "table:$($this.Table) query:$($this.Query.substring(0, [math]::min($this.Query.length,512)))" -ForegroundColor Cyan
}
if ($this.Script) {
write-host "script:$($this.Script)" -ForegroundColor Cyan
}
if ($this.Table -and $this.Query.startswith("|")) {
$this.Query = $this.Table + $this.Query
}
$this.ResultObject = $this.Post($null)
if ($this.ResultObject.Exceptions) {
write-warning ($this.ResultObject.Exceptions | out-string)
$this.ResultObject.Exceptions = $null
}
if ($this.ViewResults -or $this.CreateResults) {
$this.CreateResultTable()
if ($this.ViewResults) {
write-host ($this.ResultTable | out-string)
}
}
if ($this.ResultFile) {
out-file -FilePath $this.ResultFile -InputObject ($this.ResultObject | convertto-json -Depth 99)
}
$primaryResult = $this.ResultObject | where-object TableKind -eq PrimaryResult
if ($primaryResult) {
write-host ($primaryResult.columns | out-string)
write-host ($primaryResult.Rows | out-string)
}
if ($this.ResultObject.tables) {
write-host "results: $($this.ResultObject.tables[0].rows.count) / $(((get-date) - $startTime).TotalSeconds) seconds to execute" -ForegroundColor DarkCyan
if ($this.ResultObject.tables[0].rows.count -eq $this.limit) {
@ -461,18 +491,18 @@ class KustoObj {
$this.Script = $null
return $this.Pipe()
}
[KustoObj] ExecScript([string]$script) {
$this.Script = $script
$this.ExecScript()
$this.Script = $null
return $this.Pipe()
}
[KustoObj] ExecScript() {
if ($this.Script.startswith('http')) {
$destFile = "$pwd\$([io.path]::GetFileName($this.Script))" -replace '\?.*', ''
if (!(test-path $destFile)) {
Write-host "downloading $($this.Script)" -foregroundcolor green
invoke-webRequest $this.Script -outFile $destFile
@ -480,10 +510,10 @@ class KustoObj {
else {
Write-host "using cached script $($this.Script)"
}
$this.Script = $destFile
}
if ((test-path $this.Script)) {
$this.Query = (Get-Content -raw -Path $this.Script)
}
@ -495,46 +525,46 @@ class KustoObj {
$this.Exec()
return $this.Pipe()
}
[void] ExportCsv([string]$exportFile) {
$this.CreateResultTable()
[io.directory]::createDirectory([io.path]::getDirectoryName($exportFile))
$this.ResultTable | export-csv -notypeinformation $exportFile
}
[void] ExportJson([string]$exportFile) {
$this.CreateResultTable()
[io.directory]::createDirectory([io.path]::getDirectoryName($exportFile))
$this.ResultTable | convertto-json -depth 99 | out-file $exportFile
}
[string] FixColumns([string]$sourceContent) {
if (!($this.fixDuplicateColumns)) {
return $sourceContent
}
[hashtable]$tempTable = @{ }
$matches = [regex]::Matches($sourceContent, '"ColumnName":"(?<columnName>.+?)"', 1)
foreach ($match in $matches) {
$matchInfo = $match.Captures[0].Groups['columnName']
$column = $match.Captures[0].Groups['columnName'].Value
$newColumn = $column
$increment = $true
$count = 0
while ($increment) {
try {
[void]$tempTable.Add($newColumn, $null)
$increment = $false
if ($newColumn -ne $column) {
write-warning "replacing $column with $newColumn"
return $this.FixColumns($sourceContent.Substring(0, $matchInfo.index) `
+ $newColumn `
+ $sourceContent.Substring($matchInfo.index + $matchinfo.Length))
+ $sourceContent.Substring($matchInfo.index + $matchinfo.Length))
}
}
catch {
$count++
@ -545,7 +575,7 @@ class KustoObj {
}
return $sourceContent
}
[void] Import() {
if ($this.Table) {
$this.Import($this.Table)
@ -555,29 +585,29 @@ class KustoObj {
return
}
}
[KustoObj] Import([string]$table) {
if (!$this.ResultObject.Tables) {
write-warning 'no results to import'
return $this.Pipe()
}
[object]$results = $this.ResultObject.Tables[0]
[string]$formattedHeaders = "("
foreach ($column in ($results.Columns)) {
$formattedHeaders += "['$($column.ColumnName)']:$($column.DataType.tolower()), "
}
$formattedHeaders = $formattedHeaders.trimend(', ')
$formattedHeaders += ")"
[text.StringBuilder]$csv = New-Object text.StringBuilder
foreach ($row in ($results.rows)) {
$csv.AppendLine($row -join ',')
}
$this.Exec(".drop table ['$table'] ifexists")
$this.Exec(".create table ['$table'] $formattedHeaders")
$this.Exec(".ingest inline into table ['$table'] <| $($csv.tostring())")
@ -596,26 +626,26 @@ class KustoObj {
$this.ImportCsv($csvFile)
return $this.Pipe()
}
[KustoObj] ImportCsv([string]$csvFile) {
if (!(test-path $csvFile) -or !$this.Table) {
write-warning "verify importfile: $csvFile and import table: $($this.Table)"
return $this.Pipe()
}
# not working
#POST https://help.kusto.windows.net/v1/rest/ingest/Test/Logs?streamFormat=Csv HTTP/1.1
#[string]$csv = Get-Content -Raw $csvFile -encoding utf8
#$this.Post($csv)
$sr = new-object io.streamreader($csvFile)
$sr = new-object io.streamreader($csvFile)
[string]$tempHeaders = $sr.ReadLine()
[text.StringBuilder]$csv = New-Object text.StringBuilder
while ($sr.peek() -ge 0) {
$csv.AppendLine($sr.ReadLine())
}
$sr.close()
$formattedHeaderList = @{ }
[string]$formattedHeaders = "("
@ -626,11 +656,11 @@ class KustoObj {
[string]$normalizedHeader = $header.trim('`"').Replace(" ", "_")
$normalizedHeader = [regex]::Replace($normalizedHeader, "\W", "")
$uniqueHeader = $normalizedHeader
while ($formattedHeaderList.ContainsKey($uniqueHeader)) {
$uniqueHeader = $normalizedHeader + ++$columnCount
}
$formattedHeaderList.Add($uniqueHeader, "")
$formattedHeaders += "['$($uniqueHeader)']:string, "
}
@ -642,13 +672,13 @@ class KustoObj {
$formattedHeaders = $formattedHeaders.trimend(', ')
$formattedHeaders += ")"
#$this.Exec(".drop table ['$($this.Table)'] ifexists")
$this.Exec(".create table ['$($this.Table)'] $formattedHeaders")
$this.Exec(".create table ['$($this.Table)'] $formattedHeaders")
$this.Exec(".ingest inline into table ['$($this.Table)'] <| $($csv.tostring())")
return $this.Pipe()
}
[KustoObj] ImportJson([string]$jsonFile) {
[string]$csvFile = [io.path]::GetTempFileName()
try {
@ -677,6 +707,7 @@ class KustoObj {
[bool] Logon([string]$resourceUrl) {
[int]$expirationRefreshMinutes = 15
[int]$expirationMinutes = 0
write-host "logon($resourceUrl)" -foregroundcolor green
if (!$resourceUrl) {
write-warning "-resourceUrl required. example: https://{{ kusto cluster }}.kusto.windows.net"
@ -700,7 +731,8 @@ class KustoObj {
try {
$error.Clear()
[string[]]$defaultScope = @(".default")
write-host "logonMsal($resourceUrl,$($scopes | out-string))" -foregroundcolor green
if ($this.clientId -and $this.clientSecret) {
[string[]]$defaultScope = @("$resourceUrl/.default")
[Microsoft.Identity.Client.ConfidentialClientApplicationOptions] $cAppOptions = new-Object Microsoft.Identity.Client.ConfidentialClientApplicationOptions
@ -732,7 +764,7 @@ class KustoObj {
# user creds
[Microsoft.Identity.Client.PublicClientApplicationBuilder]$pAppBuilder = [Microsoft.Identity.Client.PublicClientApplicationBuilder]::Create($this.clientId)
$pAppBuilder = $pAppBuilder.WithAuthority([microsoft.identity.client.azureCloudInstance]::AzurePublic, $this.tenantId)
if (!($this.publicClientApplication)) {
if ($global:PSVersionTable.PSEdition -eq "Core") {
$pAppBuilder = $pAppBuilder.WithDefaultRedirectUri()
@ -743,7 +775,7 @@ class KustoObj {
}
$this.publicClientApplication = $pAppBuilder.Build()
}
write-verbose ($this.publicClientApplication | convertto-json)
[Microsoft.Identity.Client.IAccount]$account = $this.publicClientApplication.GetAccountsAsync().Result[0]
@ -815,15 +847,19 @@ class KustoObj {
hidden [object] Post([string]$body = "") {
# authorize aad to get token
[string]$kustoHost = "$($this.cluster).kusto.windows.net"
[string]$kustoResource = "https://$kustoHost"
if(!($this.ClusterResolved)){
$this.ClusterResolved = $this.ResolveCluster()
}
[string]$kustoHost = $this.cluster
[string]$kustoResource = 'https://' + $kustoHost
[string]$csl = "$($this.Query)"
$this.Result = $null
$this.ResultObject = $null
$this.ResultTable = $null
$this.Query = $this.Query.trim()
if ($body -and ($this.Table)) {
$uri = "$kustoResource/v1/rest/ingest/$($this.Database)/$($this.Table)?streamFormat=Csv&mappingName=CsvMapping"
}
@ -841,20 +877,20 @@ class KustoObj {
return $error
}
}
$requestId = [guid]::NewGuid().ToString()
write-verbose "request id: $requestId"
$header = @{
'accept' = 'application/json'
'authorization' = "Bearer $($this.Token)"
'content-type' = 'application/json'
'host' = $kustoHost
'x-ms-app' = 'kusto-rest.ps1'
'x-ms-app' = 'kusto-rest.ps1'
'x-ms-user' = $env:USERNAME
'x-ms-client-request-id' = $requestId
}
}
if ($body) {
$header.Add("content-length", $body.Length)
}
@ -871,31 +907,31 @@ class KustoObj {
}
} | ConvertTo-Json
}
write-verbose ($header | convertto-json)
write-verbose $body
$error.clear()
$this.Result = Invoke-WebRequest -Method Post -Uri $uri -Headers $header -Body $body
write-verbose $this.Result
if ($error) {
return $error
}
try {
return ($this.FixColumns($this.Result.content) | convertfrom-json)
}
catch {
write-warning "error converting json result to object. unparsed results in `$this.Result`r`n$error"
if (!$this.FixDuplicateColumns) {
write-warning "$this.fixDuplicateColumns = $true may resolve."
}
return ($this.Result.content)
}
}
[collections.arrayList] RemoveEmptyResults([collections.arrayList]$sourceContent) {
if (!$this.RemoveEmptyColumns -or !$sourceContent -or $sourceContent.count -eq 0) {
return $sourceContent
@ -903,7 +939,7 @@ class KustoObj {
$columnList = (Get-Member -InputObject $sourceContent[0] -View Extended).Name
write-verbose "checking column list $columnList"
$populatedColumnList = [collections.arraylist]@()
foreach ($column in $columnList) {
if (@($sourceContent | where-object $column -ne "").Count -gt 0) {
$populatedColumnList += $column
@ -911,24 +947,43 @@ class KustoObj {
}
return [collections.arrayList]@($sourceContent | select-object $populatedColumnList)
}
[bool] ResolveCluster() {
if($this.cluster.startswith('https://')) {
$this.cluster = $this.cluster.trimstart('https://')
}
if(!(test-netConnection $this.cluster -p 443).TcpTestSucceeded) {
write-warning "cluster not reachable:$($this.cluster)"
if((test-netConnection "$($this.cluster).kusto.windows.net" -p 443).TcpTestSucceeded) {
$this.cluster += '.kusto.windows.net'
write-host "cluster reachable:$($this.cluster)" -foregroundcolor green
}
else {
write-warning "cluster not reachable:$($this.cluster)"
return $false
}
}
return $true
}
[KustoObj] SetCluster([string]$cluster) {
$this.Cluster = $cluster
return $this.Pipe()
}
[KustoObj] SetDatabase([string]$database) {
$this.Database = $database
$this.SetTables()
$this.SetFunctions()
return $this.Pipe()
}
[KustoObj] SetPipe([bool]$enable) {
$this.PipeLine = $enable
return $this.Pipe()
}
[KustoObj] SetTable([string]$table) {
$this.Table = $table
return $this.Pipe()
@ -960,22 +1015,6 @@ class KustoObj {
}
# comment next line after microsoft.identity.client type has been imported into powershell session to troubleshoot 2 of 2
'@
'@
$error.Clear()
$global:kusto = [KustoObj]::new()
$kusto.SetTables()
$kusto.SetFunctions()
$kusto.Exec()
$kusto.ClearResults()
write-host ($PSBoundParameters | out-string)
if ($error) {
write-warning ($error | out-string)
}
else {
write-host ($kusto | Get-Member | out-string)
write-host "use `$kusto object to set properties and run queries. example: `$kusto.Exec('.show operations')" -ForegroundColor Green
write-host "set `$kusto.viewresults=`$true to see results." -ForegroundColor Green
}
main

Просмотреть файл

@ -20,63 +20,72 @@
<tags>servicefabric</tags>
<dependencies>
<group targetFramework=".NETFramework4.6.2">
<dependency id="Azure.Security.KeyVault.Certificates" version="4.5.1" exclude="Build,Analyzers" />
<dependency id="Azure.Data.Tables" version="12.8.1" exclude="Build,Analyzers" />
<dependency id="Azure.Identity" version="1.11.0" exclude="Build,Analyzers" />
<dependency id="Azure.Security.KeyVault.Keys" version="4.5.0" exclude="Build,Analyzers" />
<dependency id="Azure.Security.KeyVault.Secrets" version="4.5.0" exclude="Build,Analyzers" />
<dependency id="Azure.Storage.Blobs" version="12.18.0" exclude="Build,Analyzers" />
<dependency id="Azure.Storage.Queues" version="12.16.0" exclude="Build,Analyzers" />
<dependency id="Microsoft.Azure.Kusto.Data" version="11.3.4" exclude="Build,Analyzers" />
<dependency id="Microsoft.Extensions.CommandLineUtils" version="1.1.1" exclude="Build,Analyzers" />
<dependency id="Azure.Identity" version="1.10.2" exclude="Build,Analyzers" />
<dependency id="Microsoft.Identity.Client.Extensions.Msal" version="4.56.0" exclude="Build,Analyzers" />
<dependency id="Microsoft.Identity.Client" version="4.60.1" exclude="Build,Analyzers" />
<dependency id="Microsoft.Identity.Client.Extensions.Msal" version="4.60.1" exclude="Build,Analyzers" />
<dependency id="Newtonsoft.Json" version="13.0.3" exclude="Build,Analyzers" />
<dependency id="System.CodeDom" version="4.7.0" exclude="Build,Analyzers" />
<dependency id="System.Diagnostics.DiagnosticSource" version="7.0.2" exclude="Build,Analyzers" />
<dependency id="System.Memory" version="4.5.5" exclude="Build,Analyzers" />
<dependency id="System.Reactive" version="4.0.0" exclude="Build,Analyzers" />
</group>
<group targetFramework=".NETFramework4.8.0">
<dependency id="Azure.Security.KeyVault.Certificates" version="4.5.1" exclude="Build,Analyzers" />
<dependency id="Azure.Data.Tables" version="12.8.1" exclude="Build,Analyzers" />
<dependency id="Azure.Identity" version="1.11.0" exclude="Build,Analyzers" />
<dependency id="Azure.Security.KeyVault.Keys" version="4.5.0" exclude="Build,Analyzers" />
<dependency id="Azure.Security.KeyVault.Secrets" version="4.5.0" exclude="Build,Analyzers" />
<dependency id="Azure.Storage.Blobs" version="12.18.0" exclude="Build,Analyzers" />
<dependency id="Azure.Storage.Queues" version="12.16.0" exclude="Build,Analyzers" />
<dependency id="Microsoft.Azure.Kusto.Data" version="11.3.4" exclude="Build,Analyzers" />
<dependency id="Microsoft.Extensions.CommandLineUtils" version="1.1.1" exclude="Build,Analyzers" />
<dependency id="Azure.Identity" version="1.10.2" exclude="Build,Analyzers" />
<dependency id="Microsoft.Identity.Client.Extensions.Msal" version="4.56.0" exclude="Build,Analyzers" />
<dependency id="Microsoft.Identity.Client" version="4.60.1" exclude="Build,Analyzers" />
<dependency id="Microsoft.Identity.Client.Extensions.Msal" version="4.60.1" exclude="Build,Analyzers" />
<dependency id="Newtonsoft.Json" version="13.0.3" exclude="Build,Analyzers" />
<dependency id="System.CodeDom" version="4.7.0" exclude="Build,Analyzers" />
<dependency id="System.Diagnostics.DiagnosticSource" version="7.0.2" exclude="Build,Analyzers" />
<dependency id="System.Memory" version="4.5.5" exclude="Build,Analyzers" />
<dependency id="System.Reactive" version="4.0.0" exclude="Build,Analyzers" />
<dependency id="System.Security.Cryptography.Cng" version="4.7.0" exclude="Build,Analyzers" />
</group>
<group targetFramework=".net6.0">
<dependency id="Azure.Security.KeyVault.Certificates" version="4.5.1" exclude="Build,Analyzers" />
<dependency id="Azure.Data.Tables" version="12.8.1" exclude="Build,Analyzers" />
<dependency id="Azure.Identity" version="1.11.0" exclude="Build,Analyzers" />
<dependency id="Azure.Security.KeyVault.Keys" version="4.5.0" exclude="Build,Analyzers" />
<dependency id="Azure.Security.KeyVault.Secrets" version="4.5.0" exclude="Build,Analyzers" />
<dependency id="Azure.Storage.Blobs" version="12.18.0" exclude="Build,Analyzers" />
<dependency id="Azure.Storage.Queues" version="12.16.0" exclude="Build,Analyzers" />
<dependency id="Microsoft.Azure.Kusto.Data" version="11.3.4" exclude="Build,Analyzers" />
<dependency id="Microsoft.Extensions.CommandLineUtils" version="1.1.1" exclude="Build,Analyzers" />
<dependency id="Azure.Identity" version="1.10.2" exclude="Build,Analyzers" />
<dependency id="Microsoft.Identity.Client.Extensions.Msal" version="4.56.0" exclude="Build,Analyzers" />
<dependency id="Microsoft.Identity.Client" version="4.60.1" exclude="Build,Analyzers" />
<dependency id="Microsoft.Identity.Client.Extensions.Msal" version="4.60.1" exclude="Build,Analyzers" />
<dependency id="Newtonsoft.Json" version="13.0.3" exclude="Build,Analyzers" />
<dependency id="System.CodeDom" version="4.7.0" exclude="Build,Analyzers" />
<dependency id="System.Diagnostics.DiagnosticSource" version="7.0.2" exclude="Build,Analyzers" />
<dependency id="System.Memory" version="4.5.5" exclude="Build,Analyzers" />
<dependency id="System.Reactive" version="4.0.0" exclude="Build,Analyzers" />
</group>
<group targetFramework=".net8.0">
<dependency id="Azure.Security.KeyVault.Certificates" version="4.5.1" exclude="Build,Analyzers" />
<dependency id="Azure.Data.Tables" version="12.8.1" exclude="Build,Analyzers" />
<dependency id="Azure.Identity" version="1.11.0" exclude="Build,Analyzers" />
<dependency id="Azure.Security.KeyVault.Keys" version="4.5.0" exclude="Build,Analyzers" />
<dependency id="Azure.Security.KeyVault.Secrets" version="4.5.0" exclude="Build,Analyzers" />
<dependency id="Azure.Storage.Blobs" version="12.18.0" exclude="Build,Analyzers" />
<dependency id="Azure.Storage.Queues" version="12.16.0" exclude="Build,Analyzers" />
<dependency id="Microsoft.Azure.Kusto.Data" version="11.3.4" exclude="Build,Analyzers" />
<dependency id="Microsoft.Extensions.CommandLineUtils" version="1.1.1" exclude="Build,Analyzers" />
<dependency id="Azure.Identity" version="1.10.2" exclude="Build,Analyzers" />
<dependency id="Microsoft.Identity.Client.Extensions.Msal" version="4.56.0" exclude="Build,Analyzers" />
<dependency id="Microsoft.Identity.Client" version="4.60.1" exclude="Build,Analyzers" />
<dependency id="Microsoft.Identity.Client.Extensions.Msal" version="4.60.1" exclude="Build,Analyzers" />
<dependency id="Newtonsoft.Json" version="13.0.3" exclude="Build,Analyzers" />
<dependency id="System.CodeDom" version="4.7.0" exclude="Build,Analyzers" />
<dependency id="System.Diagnostics.DiagnosticSource" version="7.0.2" exclude="Build,Analyzers" />
<dependency id="System.Memory" version="4.5.5" exclude="Build,Analyzers" />
<dependency id="System.Reactive" version="4.0.0" exclude="Build,Analyzers" />
</group>
</dependencies>
@ -115,6 +124,7 @@
<file src="..\bin\$configuration$\net462\*.pdb" target="lib\net462" />
<file src="..\bin\$configuration$\net462\Etlreader.dll" target="lib\net462" />
<file src="..\bin\$configuration$\net462\System.Fabric.Strings.dll" target="lib\net462" />
<file src="..\..\configurationFiles\collectsfdata.options.json" target="lib\net462" />
<!-- net48 tools -->
<!-- <file src="..\bin\$configuration$\net48\*.exe" target="tools\net48" />
<file src="..\bin\$configuration$\net48\*.dll" target="tools\net48" />
@ -129,6 +139,7 @@
<file src="..\bin\$configuration$\net48\*.pdb" target="lib\net48" />
<file src="..\bin\$configuration$\net48\Etlreader.dll" target="lib\net48" />
<file src="..\bin\$configuration$\net48\System.Fabric.Strings.dll" target="lib\net48" />
<file src="..\..\configurationFiles\collectsfdata.options.json" target="lib\net48" />
<!-- net6.0 tools -->
<!-- <file src="..\bin\$configuration$\net6.0\*.exe" target="tools\net6.0" />
<file src="..\bin\$configuration$\net6.0\*.runtimeconfig.json" target="tools\net6.0" />
@ -144,6 +155,7 @@
<file src="..\bin\$configuration$\net6.0\*.pdb" target="lib\net6.0" />
<file src="..\bin\$configuration$\net6.0\Etlreader.dll" target="lib\net6.0" />
<file src="..\bin\$configuration$\net6.0\System.Fabric.Strings.dll" target="lib\net6.0" />
<file src="..\..\configurationFiles\collectsfdata.options.json" target="lib\net6.0" />
<!-- net8.0 tools -->
<file src="..\bin\$configuration$\net8.0\*.exe" target="tools\net8.0" />
<file src="..\bin\$configuration$\net8.0\*.runtimeconfig.json" target="tools\net8.0" />
@ -159,6 +171,7 @@
<file src="..\bin\$configuration$\net8.0\*.pdb" target="lib\net8.0" />
<file src="..\bin\$configuration$\net8.0\Etlreader.dll" target="lib\net8.0" />
<file src="..\bin\$configuration$\net8.0\System.Fabric.Strings.dll" target="lib\net8.0" />
<file src="..\..\configurationFiles\collectsfdata.options.json" target="lib\net8.0" />
<!-- icon -->
<file src="..\FabricSupport.png" target="images\" />
</files>

Просмотреть файл

@ -93,7 +93,13 @@ namespace CollectSFData.Azure
{
Log.Debug($"enter: {blobUri}");
Uri uri = new Uri(blobUri);
return CreateBlobClient(uri);
string query = "";
if (string.IsNullOrEmpty(uri.Query))
{
Log.Debug($"adding sas token to uri: {blobUri}");
query = _config.SasEndpointInfo.SasToken;
}
return CreateBlobClient(uri, query);
}
catch (Exception e)
{

Просмотреть файл

@ -6,7 +6,6 @@
using CollectSFData.Common;
using CollectSFData.DataFile;
using Microsoft.Identity.Client;
using System;
using System.IO;
using System.Security.Cryptography;
@ -17,20 +16,16 @@ namespace CollectSFData.Azure
public static readonly string CacheFilePath;
private static readonly object _fileLock = new object();
private static string _appDataFolder;
private static string _friendlyName;
public static bool HasTokens { get; set; }
// Create byte array for additional entropy when using Protect method.
static byte [] s_additionalEntropy = { 9, 8, 7, 6, 5, 4 };
private static byte[] s_additionalEntropy = { 9, 8, 7, 6, 5, 4 };
public static bool HasTokens { get; set; }
static TokenCacheHelper()
{
_friendlyName = Path.GetFileNameWithoutExtension(Constants.ApplicationName);
_appDataFolder = $"{Environment.GetEnvironmentVariable("LOCALAPPDATA")}\\{_friendlyName}";
CacheFilePath = $"{_appDataFolder}\\{_friendlyName}.msalcache.bin3";
FileManager.CreateDirectory(_appDataFolder);
CacheFilePath = $"{Constants.AppDataFolder}\\{Constants.ApplicationName}.msalcache.bin3";
FileManager.CreateDirectory(Constants.AppDataFolder);
}
public static void AfterAccessNotification(TokenCacheNotificationArgs args)

Просмотреть файл

@ -40,10 +40,6 @@
</PackageReference>
<PackageReference Include="Microsoft.Azure.Kusto.Data" Version="11.3.4" />
<PackageReference Include="Azure.Data.Tables" Version="12.8.1" />
<PackageReference Include="Azure.Security.KeyVault.Certificates">
<Version>4.5.1</Version>
<TreatAsUsed>true</TreatAsUsed>
</PackageReference>
<PackageReference Include="Azure.Security.KeyVault.Keys">
<Version>4.5.0</Version>
<TreatAsUsed>true</TreatAsUsed>
@ -57,17 +53,22 @@
<Version>1.1.1</Version>
</PackageReference>
<PackageReference Include="Azure.Identity">
<Version>1.10.2</Version>
<Version>1.11.0</Version>
<TreatAsUsed>true</TreatAsUsed>
</PackageReference>
<PackageReference Include="Microsoft.Identity.Client">
<Version>4.60.1</Version>
<TreatAsUsed>true</TreatAsUsed>
</PackageReference>
<PackageReference Include="Microsoft.Identity.Client.Extensions.Msal">
<Version>4.56.0</Version>
<Version>4.60.1</Version>
<TreatAsUsed>true</TreatAsUsed>
</PackageReference>
<PackageReference Include="System.CodeDom">
<Version>4.7.0</Version>
</PackageReference>
<PackageReference Include="System.Diagnostics.DiagnosticSource" Version="7.0.2" />
<PackageReference Include="System.Memory" Version="4.5.5" />
<PackageReference Include="System.Reactive">
<Version>4.0.0</Version>
</PackageReference>

Просмотреть файл

@ -19,7 +19,6 @@ namespace CollectSFData
public class Collector
{
private bool _checkedVersion;
private int _noProgressCounter = 0;
private Timer _noProgressTimer;
private ParallelOptions _parallelConfig;
@ -228,6 +227,10 @@ namespace CollectSFData
{
Log.Warning($"there may have been errors during kusto import. {Config.CacheLocation} has *not* been deleted.");
}
else if (Config.IsKustoConfigured() && Config.IsIngestionLocal)
{
Log.Last($"{Instance.Kusto.Endpoint.ClusterName}", ConsoleColor.Cyan);
}
else if (Config.IsKustoConfigured())
{
Log.Last($"{Constants.DataExplorer}/clusters/{Instance.Kusto.Endpoint.ClusterName}/databases/{Instance.Kusto.Endpoint.DatabaseName}", ConsoleColor.Cyan);
@ -309,12 +312,8 @@ namespace CollectSFData
Log.Last("0 files enumerated.", ConsoleColor.Red);
}
// do random (10%) version check
if (Log.IsConsole && !_checkedVersion && new Random().Next(1, 11) == 10)
{
_checkedVersion = true;
Config.CheckReleaseVersion();
}
// do version check
Config.CheckReleaseVersion();
Log.Last($"{Instance.TotalErrors} errors.", Instance.TotalErrors > 0 ? ConsoleColor.Yellow : ConsoleColor.Green);
Log.Last($"{Instance.FileObjects.Pending()} files failed to be processed.", Instance.FileObjects.Pending() > 0 ? ConsoleColor.Red : ConsoleColor.Green);
@ -438,87 +437,165 @@ namespace CollectSFData
}
}
}
else if (Config.IsCacheLocationPreConfigured())
else if (Config.IsCacheLocationPreConfigured() && !Config.IsIngestionLocal)
{
switch (Config.FileType)
{
case FileTypesEnum.counter:
files = Instance.FileMgr.GetFilesByExtension(Config.CacheLocation, Constants.PerfCtrExtension);
if (files.Count < 1)
{
files = Instance.FileMgr.GetFilesByExtension(Config.CacheLocation, Constants.PerfCsvExtension);
}
break;
case FileTypesEnum.setup:
files = Instance.FileMgr.GetFilesByExtension(Config.CacheLocation, Constants.SetupExtension);
break;
case FileTypesEnum.table:
files = Instance.FileMgr.GetFilesByExtension(Config.CacheLocation, Constants.TableExtension);
break;
case FileTypesEnum.trace:
files = Instance.FileMgr.GetFilesByExtension(Config.CacheLocation, Constants.DtrExtension + Constants.ZipExtension);
if (files.Count < 1)
{
files = Instance.FileMgr.GetFilesByExtension(Config.CacheLocation, Constants.DtrExtension);
}
if (files.Count < 1)
{
files = Instance.FileMgr.GetFilesByExtension(Config.CacheLocation, Constants.EtlExtension + Constants.ZipExtension);
}
if (files.Count < 1)
{
files = Instance.FileMgr.GetFilesByExtension(Config.CacheLocation, Constants.EtlExtension);
}
break;
case FileTypesEnum.sfextlog:
files = Instance.FileMgr.GetFilesByExtension(Config.CacheLocation, Constants.LogExtension);
break;
default:
Log.Warning($"configured filetype:{Config.FileType} not valid for cache upload. returning.");
return;
}
files = GetFilesWithNecessaryExtension(files, Config.CacheLocation);
if (files.Count < 1)
{
Log.Error($"configuration set to upload cache files from 'cachelocation' {Config.CacheLocation} but no files found");
}
}
else if (Config.IsCacheLocationPreConfigured() && Config.IsIngestionLocal)
{
files = GetFilesWithNecessaryExtension(files, Config.LocalPath);
if (files.Count < 1)
{
Log.Error($"configuration set to upload cache files from 'LocalPath' {Config.LocalPath} but no files found");
}
}
Instance.TotalFilesEnumerated += files.Count;
foreach (string file in files)
if (Config.IsIngestionLocal)
{
FileObject fileObject = new FileObject(file, Config.CacheLocation) { Status = FileStatus.enumerated };
// only queue if file not already in FileObjects list
if (Instance.FileObjects.Add(fileObject))
List<FileObject> fileObjects = PrepareFiles(files);
foreach (FileObject fileObject in fileObjects)
{
Log.Info($"adding file: {fileObject.FileUri}", ConsoleColor.Green);
if (!Config.List)
{
QueueForIngest(fileObject);
}
QueueAndAddToFileObjects(fileObject);
}
else
}
else
{
foreach (string file in files)
{
Log.Debug($"file {fileObject.FileUri} already in FileObjects. not queueing for ingest.");
FileObject fileObject = new FileObject(file, Config.CacheLocation) { Status = FileStatus.enumerated };
QueueAndAddToFileObjects(fileObject);
}
}
}
private void QueueAndAddToFileObjects(FileObject fileObject)
{
// only queue if file not already in FileObjects list
if (Instance.FileObjects.Add(fileObject))
{
Log.Info($"adding file: {fileObject.FileUri}", ConsoleColor.Green);
if (!Config.List)
{
QueueForIngest(fileObject);
}
}
else
{
Log.Debug($"file {fileObject.FileUri} already in FileObjects. not queueing for ingest.");
}
}
private List<string> GetFilesWithNecessaryExtension(List<string> files, string location)
{
switch (Config.FileType)
{
case FileTypesEnum.counter:
files = Instance.FileMgr.GetFilesByExtension(location, Constants.PerfCtrExtension);
if (files.Count < 1)
{
files = Instance.FileMgr.GetFilesByExtension(location, Constants.PerfCsvExtension);
}
break;
case FileTypesEnum.setup:
files = Instance.FileMgr.GetFilesByExtension(location, Constants.SetupExtension);
break;
case FileTypesEnum.table:
files = Instance.FileMgr.GetFilesByExtension(location, Constants.TableExtension);
break;
case FileTypesEnum.trace:
files = Instance.FileMgr.GetFilesByExtension(location, Constants.DtrExtension + Constants.ZipExtension);
if (files.Count < 1)
{
files = Instance.FileMgr.GetFilesByExtension(location, Constants.DtrExtension);
}
if (files.Count < 1)
{
files = Instance.FileMgr.GetFilesByExtension(location, Constants.EtlExtension + Constants.ZipExtension);
}
if (files.Count < 1)
{
files = Instance.FileMgr.GetFilesByExtension(location, Constants.EtlExtension);
}
break;
case FileTypesEnum.sfextlog:
files = Instance.FileMgr.GetFilesByExtension(location, Constants.LogExtension);
break;
default:
Log.Warning($"configured filetype:{Config.FileType} not valid for cache upload. returning.");
return null;
}
return files;
}
private List<string> FilterFilesByTimeStamp(List<string> files)
{
List<string> filteredFilesByTime = new List<string>();
foreach (string file in files)
{
DateTimeOffset lastWriteTime = File.GetLastWriteTimeUtc(file);
if (lastWriteTime >= Config.StartTimeUtc && lastWriteTime <= Config.EndTimeUtc)
{
filteredFilesByTime.Add(file);
Instance.TotalFilesMatched++;
}
else
{
Instance.TotalFilesSkipped++;
}
}
return filteredFilesByTime;
}
private FileObject CopyLocalFileToCacheLocation(string fileLocalPath)
{
// remove path from the file name
string fileName = fileLocalPath.Substring(Config.LocalPath.Length + 1);
// create copy of this file in the cache location
string fileCacheLocationPath = Path.Combine(Config.CacheLocation, fileName);
File.Copy(fileLocalPath, fileCacheLocationPath, true);
FileObject fileObject = new FileObject(fileCacheLocationPath, Config.CacheLocation) { Status = FileStatus.enumerated };
return fileObject;
}
private List<FileObject> PrepareFiles(List<string> files)
{
List<string> filteredFilesByTime = FilterFilesByTimeStamp(files);
List<FileObject> fileObjects = new List<FileObject>();
foreach (string file in filteredFilesByTime)
{
FileObject fileObject = CopyLocalFileToCacheLocation(file);
fileObjects.Add(fileObject);
}
return fileObjects;
}
}
}

Просмотреть файл

@ -29,12 +29,18 @@ namespace CollectSFData.Common
public CommandOption CacheLocation { get; set; }
public CommandOption CheckForUpdates { get; set; }
public CommandLineApplication CmdLineApp { get; private set; }
public CommandOption ConfigurationFile { get; set; }
public CommandOption ContainerFilter { get; set; }
public CommandOption DatabasePersistence { get; set; }
public CommandOption DatabasePersistencePath { get; set; }
public CommandOption DeleteCache { get; set; }
public CommandOption EndTimeStamp { get; set; }
@ -63,6 +69,8 @@ namespace CollectSFData.Common
public CommandOption List { get; set; }
public CommandOption LocalPath { get; set; }
public CommandOption LogAnalyticsCreate { get; set; }
public CommandOption LogAnalyticsId { get; set; }
@ -386,6 +394,11 @@ namespace CollectSFData.Common
"[string] Write files to this output location. e.g. \"C:\\Perfcounters\\Output\" ",
CommandOptionType.SingleValue);
CheckForUpdates = CmdLineApp.Option("-cfu|--checkForUpdates",
$"[int] 0-disabled." +
$"{newLine} use checkForUpdates for specifying / disabling utility update checks in days.",
CommandOptionType.SingleValue);
ConfigurationFile = CmdLineApp.Option("-config|--configurationFile",
$"[string] json file containing configuration options." +
$"{newLine} type collectsfdata.exe -save default.json to create a default file." +
@ -396,6 +409,17 @@ namespace CollectSFData.Common
"[string] string / regex to filter container names",
CommandOptionType.SingleValue);
DatabasePersistence = CmdLineApp.Option("-dp|--databasePersistence",
"[bool] default false to create a volatile database. a value of true will" +
$"{newLine} persist the database on a given path in your container.",
CommandOptionType.SingleValue);
DatabasePersistencePath = CmdLineApp.Option("-dpp|--databasePersistencePath",
$"[string] path where you want your database to be persisted in for local ingestion." +
$"{newLine} path must be in the format: '@'c:\\...\\..',@'c:\\...\\..''" +
$"{newLine} example: '@'C:\\kustodata\\MyDatabaseName\\md',@'C:\\kustodata\\MyDatabaseName\\data''",
CommandOptionType.SingleValue);
DeleteCache = CmdLineApp.Option("-dc|--deleteCache",
"[bool] delete downloaded blobs from local disk at end of execution. ",
CommandOptionType.SingleValue);
@ -436,7 +460,8 @@ namespace CollectSFData.Common
KustoCluster = CmdLineApp.Option("-kc|--kustoCluster",
$"[string] ingest url for kusto." +
$"{newLine} ex: https://ingest-{{clusterName}}.{{location}}.kusto.windows.net/{{databaseName}}",
$"{newLine} ex: https://ingest-{{clusterName}}.{{location}}.kusto.windows.net/{{databaseName}}" +
$"{newLine} ex: http://localhost:{{port}}/{{databaseName}}",
CommandOptionType.SingleValue);
KustoPurge = CmdLineApp.Option("-kp|--KustoPurge",
@ -468,6 +493,10 @@ namespace CollectSFData.Common
"[bool] list files instead of downloading",
CommandOptionType.SingleValue);
LocalPath = CmdLineApp.Option("-lp|--localPath",
@"[string] path to original files and indicates local ingestion e.g. 'C:\Perfcounters\Output'",
CommandOptionType.SingleValue);
LogAnalyticsCreate = CmdLineApp.Option("-lac|--logAnalyticsCreate",
$"[bool] create new log analytics workspace." +
$"{newLine} requires LogAnalyticsWorkspaceName, AzureResourceGroup," +

Просмотреть файл

@ -14,7 +14,6 @@ using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Reflection;
using System.Security.Cryptography.X509Certificates;
using System.Text.RegularExpressions;
@ -25,26 +24,14 @@ namespace CollectSFData.Common
public class ConfigurationOptions : ConfigurationProperties
{
private static readonly CommandLineArguments _cmdLineArgs = new CommandLineArguments();
private static bool _cmdLineExecuted;
private static bool? _cacheLocationPreconfigured = null;
private static bool _cmdLineExecuted;
private static string[] _commandlineArguments = new string[0];
private static ConfigurationOptions _defaultConfig;
private static object _singleLock = new Object();
private static ConfigurationOptions _singleton;// = new ConfigurationOptions();
private readonly string _tempName = "csfd";
private string _tempPath;
private static object _singleLock = new Object();
public static ConfigurationOptions Singleton()
{
lock (_singleLock)
{
if(_singleton == null) {
_singleton = new ConfigurationOptions();
}
return _singleton;
}
}
public X509Certificate2 ClientCertificate { get; set; }
public new string EndTimeStamp
@ -81,8 +68,12 @@ namespace CollectSFData.Common
}
}
public bool IsARMValid { get; set; } = false;
public bool IsValid { get; set; }
public bool IsIngestionLocal { get; set; }
public bool NeedsValidation { get; set; } = true;
public new string StartTimeStamp
@ -105,6 +96,7 @@ namespace CollectSFData.Common
}
public string Version { get; } = $"{Process.GetCurrentProcess().MainModule?.FileVersionInfo.FileVersion}";
private bool _defaultConfigLoaded => HasValue(_defaultConfig);
static ConfigurationOptions()
@ -140,6 +132,45 @@ namespace CollectSFData.Common
{
Validate();
}
IsIngestionLocal = IsLocalIngestionConfigured();
}
public static ConfigurationOptions Singleton()
{
lock (_singleLock)
{
if (_singleton == null)
{
_singleton = new ConfigurationOptions();
}
return _singleton;
}
}
public bool CheckLogFile()
{
if (LogDebug == LoggingLevel.Verbose && !HasValue(LogFile))
{
LogFile = $"{CacheLocation}/{_tempName}.log";
Log.Warning($"LogDebug 5 (Verbose) requires log file. setting LogFile:{LogFile}");
}
if (HasValue(LogFile))
{
LogFile = FileManager.NormalizePath(LogFile);
if (Regex.IsMatch(LogFile, @"<.+>"))
{
string timePattern = Regex.Match(LogFile, @"<(.+?)>").Groups[1].Value;
LogFile = Regex.Replace(LogFile, @"<.+?>", DateTime.Now.ToString(timePattern));
Log.Info($"replaced datetime token {timePattern}: new LogFile name:{LogFile}");
}
Log.Info($"setting output log file to: {LogFile}");
return FileManager.CreateDirectory(Path.GetDirectoryName(LogFile));
}
return true;
}
public void CheckPublicIp()
@ -168,6 +199,33 @@ namespace CollectSFData.Common
public void CheckReleaseVersion()
{
Log.Info($"CheckReleaseVersionDays:{CheckForUpdates}");
if (CheckForUpdates < 1)
{
Log.Warning($"CheckReleaseVersionDays disabled. skipping version check.");
return;
}
string lastUpdateCheck = Constants.AppDataFolder + "\\lastupdatecheck.txt";
if (File.Exists(lastUpdateCheck))
{
string lastCheck = File.ReadAllText(lastUpdateCheck);
if (DateTime.Now.Subtract(Convert.ToDateTime(lastCheck)).TotalDays < CheckForUpdates)
{
Log.Info($"last update check was less than {CheckForUpdates} days ago. skipping version check.");
return;
}
}
else if (!FileManager.CreateDirectory(Constants.AppDataFolder))
{
Log.Warning($"unable to create directory {Constants.AppDataFolder}. skipping version check.");
return;
}
Log.Info($"writing last update check date to {lastUpdateCheck}");
File.WriteAllText(lastUpdateCheck, DateTime.Now.ToString(Constants.DefaultDatePattern));
string response = $"\r\n\tlocal running version: {Version}";
Http http = Http.ClientFactory();
@ -381,6 +439,11 @@ namespace CollectSFData.Common
return Guid.TryParse(guid, out testGuid);
}
public bool IsLocalIngestionConfigured()
{
return HasValue(KustoCluster) && Regex.IsMatch(KustoCluster, Constants.LocalWebServerPattern) && HasValue(LocalPath);
}
public bool IsKustoConfigured()
{
return (!FileType.Equals(FileTypesEnum.any) & HasValue(KustoCluster) & HasValue(KustoTable));
@ -403,6 +466,17 @@ namespace CollectSFData.Common
return HasValue(LogAnalyticsPurge);
}
public bool IsTenantValid()
{
if (!HasValue(AzureTenantId) || AzureTenantId.Length > Constants.MaxStringLength)
{
Log.Error($"invalid tenant id value:'{AzureTenantId}' expected:guid, domain name, or 'common'");
return false;
}
return true;
}
public bool IsUploadConfigured()
{
return IsKustoConfigured() | IsLogAnalyticsConfigured();
@ -536,6 +610,8 @@ namespace CollectSFData.Common
options.Remove("Examples");
options.Remove("ExePath");
options.Remove("FileType");
options.Remove("IsARMValid");
options.Remove("IsIngestionLocal");
options.Remove("IsValid");
options.Remove("NeedsValidation");
options.Remove("SasEndpointInfo");
@ -572,6 +648,11 @@ namespace CollectSFData.Common
_defaultConfig = configurationOptions;
}
public bool ShouldAuthenticateToArm()
{
return IsClientIdConfigured() | IsLogAnalyticsConfigured();
}
public bool Validate()
{
try
@ -593,8 +674,14 @@ namespace CollectSFData.Common
retval &= ValidateFileType();
retval &= ValidateTime();
retval &= ValidateSource();
if (ShouldAuthenticateToArm())
{
retval &= IsARMValid = ValidateAad();
}
retval &= ValidateDestination();
retval &= ValidateAad();
retval &= ValidateDatabasePersistencePaths();
if (retval)
{
@ -623,8 +710,7 @@ namespace CollectSFData.Common
AzureResourceManager arm = new AzureResourceManager(this);
bool retval = true;
bool clientIdConfigured = IsClientIdConfigured();
bool usingAad = clientIdConfigured | IsKustoConfigured() | IsKustoPurgeRequested();
usingAad |= LogAnalyticsCreate | LogAnalyticsRecreate | IsLogAnalyticsPurgeRequested() | IsLogAnalyticsConfigured();
bool usingAad = clientIdConfigured | LogAnalyticsCreate | LogAnalyticsRecreate | IsLogAnalyticsPurgeRequested() | IsLogAnalyticsConfigured();
if (HasValue(AzureClientId) && !IsGuid(AzureClientId))
{
@ -638,9 +724,9 @@ namespace CollectSFData.Common
retval &= false;
}
if (HasValue(AzureTenantId) && !IsGuid(AzureTenantId))
if (!HasValue(AzureTenantId) | AzureTenantId.Length > Constants.MaxStringLength)
{
Log.Error($"invalid tenant id value:{AzureTenantId} expected:guid");
Log.Error($"invalid tenant id value:{AzureTenantId} expected:guid, domain name, or 'common'");
retval &= false;
}
@ -654,6 +740,8 @@ namespace CollectSFData.Common
{
if (clientIdConfigured && HasValue(AzureClientCertificate) && !HasValue(ClientCertificate))
{
retval &= IsTenantValid();
if (HasValue(AzureClientSecret))
{
certificateUtilities.SetSecurePassword(AzureClientSecret);
@ -675,11 +763,6 @@ namespace CollectSFData.Common
retval &= false;
}
}
// else
// {
// Log.Error("Failed certificate to find certificate");
// retval &= false;
// }
retval &= arm.Authenticate();
@ -707,6 +790,19 @@ namespace CollectSFData.Common
return retval;
}
public bool ValidateDatabasePersistencePaths()
{
bool retval = true;
if (HasValue(DatabasePersistencePath) && !Regex.IsMatch(DatabasePersistencePath, Constants.CustomDatabasePersistencePathPattern))
{
string errMessage = $"invalid paths. input should match pattern and include one path each for metadata and data. pattern: {Constants.CustomDatabasePersistencePathPattern}\r\nexample: '@'c:\\kustodata\\dbs\\customfolder\\DatabaseName\\md',@'c:\\kustodata\\dbs\\customfolder\\DatabaseName\\data''";
Log.Error(errMessage);
retval = false;
}
return retval;
}
public bool ValidateDestination()
{
bool retval = true;
@ -721,9 +817,9 @@ namespace CollectSFData.Common
retval = IsKustoConfigured();
}
if (!Regex.IsMatch(KustoCluster, Constants.KustoUrlPattern))
if (!Regex.IsMatch(KustoCluster, Constants.KustoUrlPattern) && !Regex.IsMatch(KustoCluster, Constants.LocalWebServerPattern))
{
string errMessage = $"invalid kusto url. should match pattern {Constants.KustoUrlPattern}\r\nexample: https://ingest-{{kustocluster}}.{{optional location}}.kusto.windows.net/{{kustodatabase}}";
string errMessage = $"invalid url. should match either Kusto or local web server pattern. Kusto pattern: {Constants.KustoUrlPattern}\r\nexample: https://ingest-{{kustocluster}}.{{optional location}}.kusto.windows.net/{{kustodatabase}} \n Local web server pattern: {Constants.LocalWebServerPattern}\r\nexample: http://localhost:{{port}}/{{databaseName}}";
Log.Error(errMessage);
retval = false;
}
@ -774,6 +870,22 @@ namespace CollectSFData.Common
}
}
if ((IsKustoConfigured() | IsLogAnalyticsConfigured()) & !IsTenantValid())
{
Log.Error("tenant id value expected for this configuration.");
retval = false;
}
#if NET462
// if net462, this is not supported and will throw an exception
if (IsKustoConfigured() && !IsARMValid)
{
string errorMessage = "kusto federated security not supported in .net framework 4.6.2. use different framework or configure 'AzureClientId'";
Log.Error(errorMessage);
throw new NotSupportedException(errorMessage);
}
#endif
if (IsKustoConfigured() & IsLogAnalyticsConfigured())
{
Log.Error($"kusto and log analytics *cannot* both be enabled. remove configuration for one");
@ -792,6 +904,30 @@ namespace CollectSFData.Common
UseMemoryStream = false;
}
if (HasValue(KustoCluster) && Regex.IsMatch(KustoCluster, Constants.KustoUrlPattern) && HasValue(LocalPath))
{
Log.Error($"local and remote ingestion *cannot* both be enabled. please either remove input for LocalPath field or provide a local web server url instead.");
retval = false;
}
if (HasValue(KustoCluster) && Regex.IsMatch(KustoCluster, Constants.LocalWebServerPattern) && !HasValue(LocalPath))
{
Log.Error($"if connecting to a local web server, please provide a value for the LocalPath field.");
retval = false;
}
if (HasValue(KustoCluster) && Regex.IsMatch(KustoCluster, Constants.KustoUrlPattern) && (DatabasePersistence || HasValue(DatabasePersistencePath)))
{
Log.Error($"persistent database creation is only available for local ingestion.");
retval = false;
}
if (!DatabasePersistence && HasValue(DatabasePersistencePath))
{
Log.Error($"cannot provide a database persistence path if database persistence is not enabled.");
retval = false;
}
return retval;
}
@ -847,7 +983,15 @@ namespace CollectSFData.Common
Log.Error($"sasKey, fileUris, or cacheLocation should be populated as file source.");
retval = false;
}
if (HasValue(CacheLocation) && HasValue(LocalPath)) {
LocalPath = FileManager.NormalizePath(LocalPath);
if (LocalPath.Equals(CacheLocation))
{
Log.Error("CacheLocation and LocalPath should be different directories in order to perform local ingestion.");
retval = false;
}
}
return retval;
}
@ -957,32 +1101,6 @@ namespace CollectSFData.Common
}
}
public bool CheckLogFile()
{
if (LogDebug == LoggingLevel.Verbose && !HasValue(LogFile))
{
LogFile = $"{CacheLocation}/{_tempName}.log";
Log.Warning($"LogDebug 5 (Verbose) requires log file. setting LogFile:{LogFile}");
}
if (HasValue(LogFile))
{
LogFile = FileManager.NormalizePath(LogFile);
if(Regex.IsMatch(LogFile,@"<.+>"))
{
string timePattern = Regex.Match(LogFile, @"<(.+?)>").Groups[1].Value;
LogFile = Regex.Replace(LogFile, @"<.+?>", DateTime.Now.ToString(timePattern));
Log.Info($"replaced datetime token {timePattern}: new LogFile name:{LogFile}");
}
Log.Info($"setting output log file to: {LogFile}");
return FileManager.CreateDirectory(Path.GetDirectoryName(LogFile));
}
return true;
}
private string CleanTableName(string tableName, bool withGatherType = false)
{
if (withGatherType)

Просмотреть файл

@ -24,14 +24,20 @@ namespace CollectSFData.Common
public string AzureSubscriptionId { get; set; }
public string AzureTenantId { get; set; }
public string AzureTenantId { get; set; } = "common";
public string CacheLocation { get; set; }
public int CheckForUpdates { get; set; } = 30;
public string ConfigurationFile { get; set; }
public string ContainerFilter { get; set; }
public bool DatabasePersistence { get; set; } = false;
public string DatabasePersistencePath { get; set; }
public bool DeleteCache { get; set; }
public string EndTimeStamp { get; set; }
@ -62,6 +68,8 @@ namespace CollectSFData.Common
public bool List { get; set; } = false;
public string LocalPath { get; set; }
public bool LogAnalyticsCreate { get; set; }
public string LogAnalyticsId { get; set; }
@ -108,7 +116,6 @@ namespace CollectSFData.Common
public bool UseMemoryStream { get; set; } = true;
public bool UseTx { get; set; } = true;
public bool VersionOption { get; set; }

Просмотреть файл

@ -15,6 +15,7 @@ namespace CollectSFData.Common
public const string CodeLatestRelease = "https://api.github.com/repos/microsoft/CollectServiceFabricData/releases/latest";
public const string CodeRepository = "https://github.com/microsoft/CollectServiceFabricData";
public const string CsvExtension = ".csv";
public const string CustomDatabasePersistencePathPattern = "^@'[a-zA-Z]:.+',@'[a-zA-Z]:.+'$";
public const string DataExplorer = "https://dataexplorer.azure.com";
public const string DateTimeFormat = "yyyy-MM-ddTHH:mm:ss.ffffffZ";
public const string DefaultDatePattern = "MM/dd/yyyy HH:mm zzz";
@ -31,6 +32,7 @@ namespace CollectSFData.Common
public const string InstanceMetaDataRestUri = "http://169.254.169.254/metadata/instance?api-version=2017-08-01"; //DevSkim: ignore DS137138 as is used on internal az network for instance metadata
public const string JsonExtension = ".json";
public const string KustoUrlPattern = "https://(?<ingest>ingest-){1}(?<clusterName>.+?)\\.(?<location>.+?){0,1}\\.(?<domainName>.+?)/(?<databaseName>.+?){1}(/|$)";
public const string LocalWebServerPattern = @"^http://localhost:(?<port>\d){1,5}/(?<databaseName>.+?)$";
public const string LogExtension = ".log";
public const string ManagementAzureCom = "https://management.azure.com";
public const int MaxCsvTransmitBytes = 1024 * 1024 * 100;
@ -38,6 +40,7 @@ namespace CollectSFData.Common
public const int MaxJsonTransmitBytes = 1024 * 1024 * 25;
public const int MaxResults = 5000;
public const int MaxStreamTransmitBytes = 1024 * 1024 * 1024;
public const int MaxStringLength = 256;
public const int MaxThreadMultiplier = 20;
public const int MinThreadMultiplier = 10;
public const string PerfCsvExtension = ".perf.csv";
@ -48,6 +51,7 @@ namespace CollectSFData.Common
public const int RetryMaxDelay = 30;
public const string SchemaFile = "https://raw.githubusercontent.com/microsoft/CollectServiceFabricData/master/configurationFiles/collectsfdata.schema.json";
public const string SetupExtension = ".trace";
public const string StartOfDefaultDatabasePersistencePath = "c:\\kustodata\\dbs\\";
public const string TableExtension = ".table.csv";
public const int TableMaxResults = 50000;
public const string TableNamePattern = "fabriclog(?<guidString>[A-Fa-f0-9]{32}).+$";
@ -62,6 +66,7 @@ namespace CollectSFData.Common
public const double WarningTimeSpanMinHours = .5F;
public const string ZipExtension = ".zip";
public static readonly string ApplicationName = AppDomain.CurrentDomain.FriendlyName;
public static readonly string AppDataFolder = $"{Environment.GetEnvironmentVariable("LOCALAPPDATA")}\\{ApplicationName}";
public static readonly string EtwManifestsUrlIndex = $"{EtwManifestsUrl}/index.json";
public static readonly DateTime UnixEpochMinValue = new DateTime(1601,1,1);
}

Просмотреть файл

@ -9,7 +9,6 @@ using System.Collections.Generic;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Net.Sockets;
using System.Text;
using System.Threading;
@ -58,11 +57,11 @@ namespace CollectSFData.Common
{
Log.Info($"enter: {uri}", ConsoleColor.Magenta);
bool result = false;
if(headers == null)
if (headers == null)
{
headers = new Dictionary<string, string>();
}
if(!headers.ContainsKey("User-Agent"))
if (!headers.ContainsKey("User-Agent"))
{
headers.Add("User-Agent", "CollectSFData");
}
@ -105,7 +104,7 @@ namespace CollectSFData.Common
}
// head not working with httpclient use get with ResponseHeadersRead
if(httpMethod == HttpMethod.Head)
if (httpMethod == HttpMethod.Head)
{
httpMethod = HttpMethod.Get;
_httpClient.DefaultRequestHeaders.Add("Accept", "*/*");

Просмотреть файл

@ -3,7 +3,6 @@
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
using System;
using System.Collections.Generic;
namespace CollectSFData.Common
@ -11,14 +10,14 @@ namespace CollectSFData.Common
public class Total
{
public int Converted;
public int Downloading;
public int Downloaded;
public int Downloading;
public int Enumerated;
public int Formatting;
public int Existing;
public int Errors;
public int Existing;
public int Failed;
public int Formatted;
public int Formatting;
public int Matched;
public int Queued;
public int Records;
@ -50,23 +49,23 @@ namespace CollectSFData.Common
public override int GetHashCode()
{
Dictionary<string,int> hash = new Dictionary<string,int>();
hash.Add("Converted",Converted);
hash.Add("Downloading",Downloading);
hash.Add("Downloaded",Downloaded);
hash.Add("Enumerated",Enumerated);
hash.Add("Formatting",Formatting);
hash.Add("Existing",Existing);
hash.Add("Errors",Errors);
hash.Add("Failed",Failed);
hash.Add("Formatted",Formatted);
hash.Add("Matched",Matched);
hash.Add("Queued",Queued);
hash.Add("Records",Records);
hash.Add("Skipped",Skipped);
hash.Add("Succeeded",Succeeded);
hash.Add("Unknown",Unknown);
hash.Add("Uploading",Uploading);
Dictionary<string, int> hash = new Dictionary<string, int>();
hash.Add("Converted", Converted);
hash.Add("Downloading", Downloading);
hash.Add("Downloaded", Downloaded);
hash.Add("Enumerated", Enumerated);
hash.Add("Formatting", Formatting);
hash.Add("Existing", Existing);
hash.Add("Errors", Errors);
hash.Add("Failed", Failed);
hash.Add("Formatted", Formatted);
hash.Add("Matched", Matched);
hash.Add("Queued", Queued);
hash.Add("Records", Records);
hash.Add("Skipped", Skipped);
hash.Add("Succeeded", Succeeded);
hash.Add("Unknown", Unknown);
hash.Add("Uploading", Uploading);
return hash.GetHashCode();
}
}

Просмотреть файл

@ -6,7 +6,6 @@
using CollectSFData.Common;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
@ -103,63 +102,6 @@ namespace CollectSFData.DataFile
}
}
public IList<CsvCounterRecord> ExtractPerfRelogCsvData(FileObject fileObject)
{
List<CsvCounterRecord> csvRecords = new List<CsvCounterRecord>();
string counterPattern = @"\\\\.+?\\(?<object>.+?)(?<instance>\(.*?\)){0,1}\\(?<counter>.+)";
try
{
IList<string> allLines = fileObject.Stream.Read();
MatchCollection matchList = Regex.Matches(allLines[0], "\"(.+?)\"");
string[] headers = matchList.Cast<Match>().Select(match => match.Value).ToArray();
for (int csvRecordIndex = 1; csvRecordIndex < allLines.Count; csvRecordIndex++)
{
string[] counterValues = allLines[csvRecordIndex].Split(',');
for (int headerIndex = 1; headerIndex < headers.Length; headerIndex++)
{
if (counterValues.Length > headerIndex)
{
string stringValue = counterValues[headerIndex].Trim('"').Trim(' ');
Match counterInfo = Regex.Match(headers[headerIndex], counterPattern);
if (!string.IsNullOrEmpty(stringValue))
{
try
{
csvRecords.Add(new CsvCounterRecord()
{
Timestamp = Convert.ToDateTime(counterValues[0].Trim('"').Trim(' ')),
CounterName = headers[headerIndex].Replace("\"", "").Trim(),
CounterValue = Decimal.Parse(stringValue, NumberStyles.AllowExponent | NumberStyles.AllowDecimalPoint),
Object = counterInfo.Groups["object"].Value.Replace("\"", "").Trim(),
Counter = counterInfo.Groups["counter"].Value.Replace("\"", "").Trim(),
Instance = Regex.Replace(counterInfo.Groups["instance"].Value, @"^\(|\)$", "").Trim(),
NodeName = fileObject.NodeName,
FileType = fileObject.FileDataType.ToString(),
RelativeUri = fileObject.RelativeUri
});
}
catch (Exception ex)
{
Log.Exception($"stringValue:{stringValue} exception:{ex}");
}
}
}
}
}
return csvRecords;
}
catch (Exception e)
{
Log.Exception($"{e}", csvRecords);
return csvRecords;
}
}
public FileObjectCollection FormatCounterFile(FileObject fileObject)
{
Log.Debug($"enter:{fileObject.FileUri}");
@ -169,25 +111,11 @@ namespace CollectSFData.DataFile
fileObject.Stream.SaveToFile();
DeleteFile(outputFile);
Log.Info($"Writing {outputFile}");
if (_config.UseTx)
{
result = TxBlg(fileObject, outputFile);
}
else
{
result = RelogBlg(fileObject, outputFile);
}
result = TxBlg(fileObject, outputFile);
if (result)
{
_instance.TotalFilesConverted++;
if (!_config.UseTx)
{
fileObject.Stream.ReadFromFile(outputFile);
fileObject.Stream.Write<CsvCounterRecord>(ExtractPerfRelogCsvData(fileObject));
}
}
else
{
@ -225,13 +153,6 @@ namespace CollectSFData.DataFile
if (result)
{
_instance.TotalFilesConverted++;
// if (!Config.UseTx)
// {
// fileObject.Stream.ReadFromFile(outputFile);
// fileObject.Stream.Write<CsvCounterRecord>(ExtractPerfRelogCsvData(fileObject));
// }
return PopulateCollection<DtrTraceRecord>(fileObject);
}
@ -291,7 +212,7 @@ namespace CollectSFData.DataFile
List<string> files = new List<string>();
SearchOption subDirectories = includeSubDirectories ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly;
if(Directory.Exists(filePath))
if (Directory.Exists(filePath))
{
files.AddRange(Directory.GetFiles(filePath, $"*{fileExtensionPattern}", subDirectories));
}
@ -299,7 +220,7 @@ namespace CollectSFData.DataFile
{
Log.Warning($"directory does not exist:filePath{filePath}");
}
Log.Info($"exit:filePath{filePath} subdir:{includeSubDirectories} files:", files);
return files;
}
@ -509,39 +430,6 @@ namespace CollectSFData.DataFile
return observer;
}
public bool RelogBlg(FileObject fileObject, string outputFile)
{
bool result = true;
string csvParams = fileObject.FileUri + " -f csv -o " + outputFile;
Log.Info($"relog.exe {csvParams}");
ProcessStartInfo startInfo = new ProcessStartInfo("relog.exe", csvParams)
{
CreateNoWindow = true,
UseShellExecute = false,
RedirectStandardError = true,
RedirectStandardOutput = true
};
Process convertFileProc = Process.Start(startInfo);
if (!convertFileProc.HasExited)
{
if (convertFileProc.StandardOutput.Peek() > -1)
{
Log.Info($"{convertFileProc.StandardOutput.ReadToEnd()}");
}
if (convertFileProc.StandardError.Peek() > -1)
{
Log.Error($"{convertFileProc.StandardError.ReadToEnd()}");
result = false;
}
}
convertFileProc.WaitForExit();
return result;
}
public void SaveToCache(FileObject fileObject, bool force = false)
{
try

Просмотреть файл

@ -6,8 +6,6 @@
using CollectSFData.Common;
using CollectSFData.DataFile;
using Azure;
using Azure.Storage.Blobs;
using Azure.Storage.Blobs.Models;
using Azure.Storage.Queues;
using Azure.Storage.Queues.Models;
using Newtonsoft.Json;
@ -60,15 +58,23 @@ namespace CollectSFData.Kusto
Log.Warning($"file already ingested. skipping: {fileObject.RelativeUri}");
return;
}
if (_config.KustoUseBlobAsSource && fileObject.IsSourceFileLinkCompliant())
if (!_config.IsIngestionLocal && _config.KustoUseBlobAsSource && fileObject.IsSourceFileLinkCompliant())
{
IngestSingleFile(fileObject);
}
else
else if (!_config.IsIngestionLocal)
{
IngestMultipleFiles(_instance.FileMgr.ProcessFile(fileObject));
}
else
{
// format the trace files
FileObjectCollection fileObjectCollection = _instance.FileMgr.ProcessFile(fileObject);
// ingest the trace files
fileObjectCollection.ForEach(x => IngestLocally(x));
}
}
public bool Complete()
@ -77,9 +83,13 @@ namespace CollectSFData.Kusto
{
Log.Info("finished. cancelling", ConsoleColor.White);
_tokenSource.Cancel();
_monitorTask.Wait();
_monitorTask.Dispose();
if (!_config.IsIngestionLocal)
{
_monitorTask.Wait();
_monitorTask.Dispose();
}
if (_appendingToExistingTableUnique
&& _config.FileType == FileTypesEnum.table
&& _instance.FileObjects.Any(FileStatus.succeeded))
@ -93,7 +103,7 @@ namespace CollectSFData.Kusto
string command = $".set-or-replace {_config.KustoTable} <| {_config.KustoTable} | summarize min(RelativeUri) by {names}";
Log.Info(command);
Endpoint.Command(command);
Endpoint.CommandAsync(command).Wait();
Log.Info("removed duplicate records", ConsoleColor.White);
}
@ -116,11 +126,21 @@ namespace CollectSFData.Kusto
Endpoint.Authenticate();
_failureQueryTime = _instance.StartTime.ToUniversalTime();
if (!PopulateQueueEnumerators())
if (_config.IsIngestionLocal)
{
return false;
if (!Endpoint.CreateDatabase(Endpoint.DatabaseName))
{
return false;
}
}
else
{
if (!PopulateQueueEnumerators())
{
return false;
}
}
if (_config.IsKustoPurgeRequested())
{
Purge();
@ -128,17 +148,20 @@ namespace CollectSFData.Kusto
}
else if (_config.KustoRecreateTable)
{
PurgeMessages(Endpoint.TableName);
if (!_config.IsIngestionLocal)
{
PurgeMessages(Endpoint.TableName);
}
if (!Endpoint.DropTable(Endpoint.TableName))
{
return false;
}
}
else if (_config.Unique && Endpoint.HasTable(Endpoint.TableName))
else if (!_config.IsIngestionLocal && _config.Unique && Endpoint.HasTable(Endpoint.TableName))
{
_appendingToExistingTableUnique = true;
List<string> existingUploads = Endpoint.Query($"['{Endpoint.TableName}']|distinct RelativeUri");
List<string> existingUploads = Endpoint.QueryAsCsvAsync($"['{Endpoint.TableName}']|distinct RelativeUri").Result;
foreach (string existingUpload in existingUploads)
{
_instance.FileObjects.Add(new FileObject(existingUpload) { Status = FileStatus.existing });
@ -147,10 +170,13 @@ namespace CollectSFData.Kusto
IngestResourceIdKustoTableMapping();
// monitor for new files to be uploaded
if (_monitorTask == null)
if (!_config.IsIngestionLocal)
{
_monitorTask = Task.Run((Action)QueueMonitor, _tokenSource.Token);
// monitor for new files to be uploaded
if (_monitorTask == null)
{
_monitorTask = Task.Run((Action)QueueMonitor, _tokenSource.Token);
}
}
return true;
@ -215,6 +241,22 @@ namespace CollectSFData.Kusto
return fileObject.Status != FileStatus.existing;
}
private void IngestLocally(FileObject fileObject)
{
string ingestionMapping = SetIngestionMapping(fileObject);
// As part of the file formatting process, the files are compressed into a zip file format. To read the stream's contents, the file must be decompressed
fileObject.Stream.Decompress();
string traces = fileObject.Stream.ReadToEnd();
if (traces.Length != 0)
{
Endpoint.IngestInlineWithMapping(_config.KustoTable, ingestionMapping, traces);
}
else
{
Log.Error($"Length of file {fileObject.BaseUri} is 0. Please provide non-empty files for ingestion.");
}
}
private void IngestMultipleFiles(FileObjectCollection fileObjectCollection)
{
fileObjectCollection.ForEach(x => IngestSingleFile(x));
@ -259,19 +301,16 @@ namespace CollectSFData.Kusto
private void IngestStatusFailQuery()
{
Endpoint.Query($".show ingestion failures" +
KustoRestRecords failedRecords = Endpoint.QueryAsListAsync($".show ingestion failures" +
$"| where Table == '{Endpoint.TableName}'" +
$"| where FailedOn >= todatetime('{_failureQueryTime}')" +
$"| order by FailedOn asc");
KustoRestRecords failedRecords = Endpoint.PrimaryResultTable.Records();
$"| order by FailedOn asc").Result;
foreach (KustoRestRecord record in failedRecords)
{
string uriFile = record["IngestionSourcePath"].ToString();
Log.Debug($"checking failed ingested for failed relativeuri: {uriFile}");
FileObject fileObject = _instance.FileObjects.FindByUriFirstOrDefault(uriFile);
fileObject.Status = FileStatus.failed;
if (fileObject.IsPopulated)
@ -290,13 +329,18 @@ namespace CollectSFData.Kusto
private void IngestStatusSuccessQuery()
{
long queryTime = DateTime.Now.Ticks;
List<string> successUris = new List<string>();
successUris.AddRange(Endpoint.Query($"['{Endpoint.TableName}']" +
successUris.AddRange(Endpoint.QueryAsCsvAsync($"['{Endpoint.TableName}']" +
$"| where cursor_after('{_ingestCursor}')" +
$"| where ingestion_time() > todatetime('{_instance.StartTime.ToUniversalTime().ToString("o")}')" +
$"| distinct RelativeUri"));
_ingestCursor = !_instance.FileObjects.Any(FileStatus.succeeded) ? "" : Endpoint.Cursor;
$"| distinct RelativeUri").Result);
if (successUris.Count > 0)
{
// to avoid time gaps between query and ingestion
_ingestCursor = Math.Min(queryTime, Convert.ToInt64(Endpoint.GetCursor())).ToString();
}
Log.Debug($"files ingested:{successUris.Count}");
foreach (string uriFile in successUris)
@ -419,11 +463,11 @@ namespace CollectSFData.Kusto
if (_config.KustoPurge.ToLower().Split(' ').Length > 1)
{
results = Endpoint.Query($".show tables | where TableName contains {_config.KustoPurge.ToLower().Split(' ')[1]} | project TableName");
results = Endpoint.QueryAsCsvAsync($".show tables | where TableName contains {_config.KustoPurge.ToLower().Split(' ')[1]} | project TableName").Result;
}
else
{
results = Endpoint.Query(".show tables | project TableName");
results = Endpoint.QueryAsCsvAsync(".show tables | project TableName").Result;
}
Log.Info($"current table list:", results);

Просмотреть файл

@ -5,58 +5,45 @@
using CollectSFData.Azure;
using CollectSFData.Common;
using Kusto.Cloud.Platform.Utils;
using Kusto.Data;
using Kusto.Data.Common;
using Kusto.Data.Net.Client;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Net.Http;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
namespace CollectSFData.Kusto
{
public class KustoEndpoint
{
public string Cursor;
private static ICslAdminProvider _kustoAdminClient;
private static ICslQueryProvider _kustoQueryClient;
private static int maxKustoClientTimeMs = 300 * 1000;
private static ICslAdminProvider _adminClient;
private static ICslAdminProvider _adminIngestClient;
private static int _maxKustoClientTimeMs = 300 * 1000;
private static ICslQueryProvider _queryClient;
private readonly CustomTaskManager _kustoTasks = new CustomTaskManager();
private AzureResourceManager _arm;
private ConfigurationOptions _config;
private Http _httpClient = Http.ClientFactory();
public string ClusterIngestUrl { get; set; }
public string ClusterName { get; private set; }
public KustoConnectionStringBuilder DatabaseConnection { get; set; }
public string DatabaseName { get; set; }
public KustoRestTable ExtendedPropertiesTable { get; private set; } = new KustoRestTable();
public List<string> ExtendedResults { get; private set; }
public string HostName { get; private set; }
public string IdentityToken { get; private set; }
public KustoConnectionStringBuilder IngestConnection { get; set; }
public IngestionResourcesSnapshot IngestionResources { get; set; }
public bool LogLargeResults { get; set; } = true;
public KustoConnectionStringBuilder ManagementConnection { get; set; }
public string ManagementUrl { get; private set; }
public KustoRestTable PrimaryResultTable { get; private set; } = new KustoRestTable();
public List<KustoRestTable> QueryResultTables { get; private set; } = new List<KustoRestTable>();
public KustoRestResponseV1 ResponseDataSet { get; private set; } = new KustoRestResponseV1();
public string RestMgmtUri { get; private set; }
public string RestQueryUri { get; private set; }
public string TableName { get; private set; }
public KustoRestTableOfContentsV1 TableOfContents { get; private set; } = new KustoRestTableOfContentsV1();
private Timer adminTimer { get; set; }
private KustoConnectionStringBuilder ManagementConnection { get; set; }
private Timer queryTimer { get; set; }
private Timer _adminIngestTimer { get; set; }
private Timer _adminTimer { get; set; }
private Timer _queryTimer { get; set; }
private object _lockObj = new object();
public KustoEndpoint(ConfigurationOptions config)
{
@ -85,6 +72,15 @@ namespace CollectSFData.Kusto
RestMgmtUri = $"{ClusterIngestUrl}/v1/rest/mgmt";
RestQueryUri = $"{ManagementUrl}/v1/rest/query";
}
else if (Regex.IsMatch(_config.KustoCluster, Constants.LocalWebServerPattern))
{
Match matches = Regex.Match(_config.KustoCluster, Constants.LocalWebServerPattern);
DatabaseName = matches.Groups["databaseName"].Value;
TableName = _config.KustoTable;
ClusterName = _config.KustoCluster;
ManagementUrl = ClusterName;
ClusterIngestUrl = ClusterName;
}
else
{
string errMessage = $"invalid kusto url.";
@ -102,13 +98,13 @@ namespace CollectSFData.Kusto
_arm.Scopes = new List<string>() { $"{ClusterIngestUrl}/.default" };
}
if (_config.IsKustoConfigured() && _arm.Authenticate(throwOnError, ClusterIngestUrl))
if (_config.IsKustoConfigured() && _config.IsARMValid && _arm.Authenticate(throwOnError, ClusterIngestUrl))
{
if (_arm.ClientIdentity.IsAppRegistration)
{
Log.Info($"connecting to kusto with app registration {_config.AzureClientId}");
DatabaseConnection = new KustoConnectionStringBuilder(ClusterIngestUrl)
IngestConnection = new KustoConnectionStringBuilder(ClusterIngestUrl)
{
FederatedSecurity = true,
InitialCatalog = DatabaseName,
@ -130,10 +126,11 @@ namespace CollectSFData.Kusto
}
else
{
DatabaseConnection = new KustoConnectionStringBuilder(ClusterIngestUrl)
IngestConnection = new KustoConnectionStringBuilder(ClusterIngestUrl)
{
FederatedSecurity = true,
InitialCatalog = DatabaseName,
Authority = _config.AzureTenantId,
UserToken = _arm.BearerToken
};
@ -141,44 +138,111 @@ namespace CollectSFData.Kusto
{
FederatedSecurity = true,
InitialCatalog = DatabaseName,
Authority = _config.AzureTenantId,
UserToken = _arm.BearerToken
};
}
}
else
else if (Regex.IsMatch(_config.KustoCluster, Constants.KustoUrlPattern))
{
DatabaseConnection = new KustoConnectionStringBuilder(ClusterIngestUrl)
// use kusto federated security to connect to kusto directly and use kusto identity token instead of arm token
Log.Info($"connecting to kusto with kusto federated authentication.");
IngestConnection = new KustoConnectionStringBuilder(ClusterIngestUrl)
{
FederatedSecurity = true,
InitialCatalog = DatabaseName
InitialCatalog = DatabaseName,
Authority = _config.AzureTenantId
};
ManagementConnection = new KustoConnectionStringBuilder(ManagementUrl)
{
FederatedSecurity = true,
InitialCatalog = DatabaseName,
Authority = _config.AzureTenantId
};
}
else
{
// set up connection to localhost server
IngestConnection = new KustoConnectionStringBuilder(ClusterIngestUrl)
{
InitialCatalog = DatabaseName
};
ManagementConnection = new KustoConnectionStringBuilder(ManagementUrl)
{
InitialCatalog = DatabaseName
};
}
IdentityToken = RetrieveKustoIdentityToken();
IngestionResources = RetrieveIngestionResources();
if (!_config.IsIngestionLocal)
{
IdentityToken = RetrieveKustoIdentityToken();
IngestionResources = RetrieveIngestionResources();
}
}
public List<string> Command(string command)
public async Task<List<string>> CommandAsync(string command)
{
Log.Info($"command:{command}", ConsoleColor.Blue);
if (_kustoAdminClient == null)
return await _kustoTasks.TaskFunction((responseList) =>
{
_kustoAdminClient = KustoClientFactory.CreateCslAdminProvider(ManagementConnection);
// ingest commands are limited by cluster ingest capacity
lock (_lockObj)
{
ICslAdminProvider adminClient = CreateAdminClient();
return EnumerateResultsCsv(adminClient.ExecuteControlCommand(command));
}
}) as List<string>;
}
public ICslAdminProvider CreateAdminClient()
{
if (_adminClient == null)
{
_adminClient = KustoClientFactory.CreateCslAdminProvider(ManagementConnection);
}
if (adminTimer == null)
if (_adminTimer == null)
{
adminTimer = new Timer(DisposeAdminClient, null, maxKustoClientTimeMs, maxKustoClientTimeMs);
_adminTimer = new Timer(DisposeAdminClient, null, _maxKustoClientTimeMs, _maxKustoClientTimeMs);
}
adminTimer.Change(maxKustoClientTimeMs, maxKustoClientTimeMs);
return EnumerateResults(_kustoAdminClient.ExecuteControlCommand(command));
_adminTimer.Change(_maxKustoClientTimeMs, _maxKustoClientTimeMs);
return _adminClient;
}
public ICslAdminProvider CreateAdminIngestClient()
{
if (_adminIngestClient == null)
{
_adminIngestClient = KustoClientFactory.CreateCslAdminProvider(IngestConnection);
}
if (_adminIngestTimer == null)
{
_adminIngestTimer = new Timer(DisposeAdminIngestClient, null, _maxKustoClientTimeMs, _maxKustoClientTimeMs);
}
_adminIngestTimer.Change(_maxKustoClientTimeMs, _maxKustoClientTimeMs);
return _adminIngestClient;
}
public ICslQueryProvider CreateQueryClient()
{
if (_queryClient == null)
{
_queryClient = KustoClientFactory.CreateCslQueryProvider(ManagementConnection);
}
if (_queryTimer == null)
{
_queryTimer = new Timer(DisposeQueryClient, null, _maxKustoClientTimeMs, _maxKustoClientTimeMs);
}
_queryTimer.Change(_maxKustoClientTimeMs, _maxKustoClientTimeMs);
return _queryClient;
}
public bool CreateTable(string tableName, string tableSchema)
@ -186,110 +250,135 @@ namespace CollectSFData.Kusto
if (!HasTable(tableName))
{
Log.Info($"creating table: {tableName}");
return Command($".create table ['{tableName}'] ( {tableSchema} )").Count > 0;
return CommandAsync($".create table ['{tableName}'] ( {tableSchema} )").Result.Count > 0;
}
return true;
}
public bool CreateDatabase(string databaseName)
{
if (!HasDatabase(databaseName))
{
Log.Info($"creating database: {databaseName}");
if (_config.DatabasePersistence && string.IsNullOrEmpty(_config.DatabasePersistencePath))
{
try
{
return CommandAsync($".create database {databaseName} persist ( {string.Format("@'{0}',@'{1}'", $"{Constants.StartOfDefaultDatabasePersistencePath}{databaseName}\\md", $"{Constants.StartOfDefaultDatabasePersistencePath}{databaseName}\\data")} )").Result.Count > 0;
}
catch (Exception ex)
{
if (ex.InnerException.Message.Contains("DirectoryNotFoundException"))
{
Log.Error($"Directory {Constants.StartOfDefaultDatabasePersistencePath} doesn't exist in docker container");
}
Log.Exception($"{ex}");
return false;
}
}
else if (_config.DatabasePersistence)
{
try
{
return CommandAsync($".create database {databaseName} persist ( {_config.DatabasePersistencePath} )").Result.Count > 0;
}
catch (Exception ex)
{
if (ex.InnerException.Message.Contains("DirectoryNotFoundException"))
{
Log.Error($"Directory {_config.DatabasePersistencePath} isn't valid. Please provide a valid path in the container's C drive. example: '@'c:\\kustodata\\dbs\\customfolder\\DatabaseName\\md',@'c:\\kustodata\\dbs\\customfolder\\DatabaseName\\data''");
}
Log.Exception($"{ex}");
return false;
}
}
else
{
try
{
return CommandAsync($".create database {databaseName} volatile").Result.Count > 0;
}
catch (Exception ex)
{
Log.Exception($"{ex}");
return false;
}
}
}
return true;
}
public bool DropTable(string tableName)
{
if (HasTable(tableName))
{
Log.Warning($"dropping table: {tableName}");
return Command($".drop table ['{tableName}'] ifexists skip-seal | project TableName | where TableName == '{tableName}'").Count == 0;
return CommandAsync($".drop table ['{tableName}'] ifexists skip-seal | project TableName | where TableName == '{tableName}'").Result.Count == 0;
}
return true;
}
public string GetCursor()
{
string cursor = QueryAsCsvAsync("print Cursor=current_cursor()").Result.FirstOrDefault();
Log.Info($"returning cursor: {cursor}");
return cursor;
}
public bool HasDatabase(string databaseName)
{
bool caseInsensitiveResult = QueryAsCsvAsync($".show databases | project DatabaseName | where DatabaseName =~ '{databaseName}'").Result.Count() > 0;
bool caseSensitiveResult = QueryAsCsvAsync($".show databases | project DatabaseName | where DatabaseName == '{databaseName}'").Result.Count() > 0;
if (caseInsensitiveResult && !caseSensitiveResult)
{
Log.Warning($"database names are treated as case insensitive by default. a database with the name of '{databaseName}' with different capitalization already exists. the program will continue by using the pre-existing database.");
}
return caseInsensitiveResult;
}
public bool HasTable(string tableName)
{
return Query($".show tables | project TableName | where TableName == '{tableName}'").Count > 0;
return QueryAsCsvAsync($".show tables | project TableName | where TableName == '{tableName}'").Result.Count > 0;
}
public bool IngestInlineWithMapping(string tableName, string mapping, string stream)
{
return CommandAsync($".ingest inline into table ['{tableName}'] with (format='csv', ingestionMapping = '{mapping}') <| {stream}").Result.Count > 0;
}
public bool IngestInline(string tableName, string csv)
{
Log.Info($"inline ingesting data: {csv} into table: {tableName}");
return Command($".ingest inline into table ['{tableName}'] <| {csv}").Count > 0;
return CommandAsync($".ingest inline into table ['{tableName}'] <| {csv}").Result.Count > 0;
}
public List<string> Query(string query)
public async Task<List<string>> QueryAsCsvAsync(string query)
{
Log.Info($"query:{query}", ConsoleColor.Blue);
if (_kustoQueryClient == null)
{
_kustoQueryClient = KustoClientFactory.CreateCslQueryProvider(ManagementConnection);
}
if (queryTimer == null)
{
queryTimer = new Timer(DisposeQueryClient, null, maxKustoClientTimeMs, maxKustoClientTimeMs);
}
try
{
PrimaryResultTable = null;
QueryResultTables.Clear();
Cursor = null;
queryTimer.Change(maxKustoClientTimeMs, maxKustoClientTimeMs);
// unable to parse multiple tables v1 or v2 using kusto so using httpclient and rest
string requestBody = "{ \"db\": \"" + DatabaseName + "\", \"csl\": \"" + query + ";print Cursor=current_cursor()\" }";
string requestId = new Guid().ToString();
Dictionary<string, string> headers = new Dictionary<string, string>();
headers.Add("accept", "application/json");
headers.Add("host", HostName);
headers.Add("x-ms-client-request-id", requestId);
Log.Info($"query:", requestBody);
_httpClient.SendRequest(uri: RestQueryUri, authToken: _arm.BearerToken, jsonBody: requestBody, httpMethod: HttpMethod.Post, headers: headers);
ResponseDataSet = JsonConvert.DeserializeObject<KustoRestResponseV1>(_httpClient.ResponseStreamString);
if (!ResponseDataSet.HasData())
List<string> response = await _kustoTasks.TaskFunction((responseList) =>
{
Log.Info($"no tables:", ResponseDataSet);
return new List<string>();
}
ICslQueryProvider queryClient = CreateQueryClient();
IDataReader reader = queryClient.ExecuteQuery(query);
KustoRestTableOfContentsV1 toc = SetTableOfContents(ResponseDataSet);
if (toc.HasData)
{
SetExtendedProperties();
List<long> indexes = toc.Rows.Where(x => x.Kind.Equals("QueryResult")).Select(x => x.Ordinal).ToList();
foreach (long index in indexes)
if (reader == null)
{
KustoRestTable table = new KustoRestTable(ResponseDataSet.Tables[index]);
QueryResultTables.Add(table);
if (PrimaryResultTable == null)
{
PrimaryResultTable = table;
continue;
}
if (table.Columns.FirstOrDefault(x => x.ColumnName.Contains("Cursor")) != null)
{
Cursor = table.Records().FirstOrDefault()["Cursor"].ToString();
}
Log.Info($"no results:", ConsoleColor.DarkBlue);
return new List<string>();
}
Log.Debug($"table cursor: {Cursor}");
return PrimaryResultTable.RecordsCsv();
}
else
{
TableOfContents = new KustoRestTableOfContentsV1();
ExtendedPropertiesTable = new KustoRestTable();
PrimaryResultTable = new KustoRestTable(ResponseDataSet.Tables[0]);
ResponseDataSet.Tables.ForEach(x => QueryResultTables.Add(new KustoRestTable(x)));
List<string> results = EnumerateResultsCsv(reader);
reader.Close();
return results;
}) as List<string>;
return PrimaryResultTable.RecordsCsv();
}
return response;
}
catch (Exception e)
{
@ -298,43 +387,80 @@ namespace CollectSFData.Kusto
}
}
public async Task<KustoRestRecords> QueryAsListAsync(string query)
{
Log.Info($"query:{query}", ConsoleColor.Blue);
KustoRestRecords response = new KustoRestRecords();
try
{
response = await _kustoTasks.TaskFunction((responseList) =>
{
ICslQueryProvider queryClient = CreateQueryClient();
IDataReader reader = queryClient.ExecuteQuery(query);
if (reader == null)
{
Log.Info($"no results:", ConsoleColor.DarkBlue);
return null;
}
KustoRestRecords results = EnumerateResultsList(reader);
reader.Close();
return results;
}) as KustoRestRecords;
return response;
}
catch (Exception e)
{
Log.Exception($"exception executing query: {query}\r\n{e}");
return null;
}
}
private static void DisposeAdminClient(object state)
{
if (_kustoAdminClient != null)
if (_adminClient != null)
{
Log.Info("Disposing kusto admin client");
_kustoAdminClient.Dispose();
_kustoAdminClient = null;
_adminClient.Dispose();
_adminClient = null;
}
}
private static void DisposeAdminIngestClient(object state)
{
if (_adminIngestClient != null)
{
Log.Info("Disposing kusto admin ingest client");
_adminIngestClient.Dispose();
_adminIngestClient = null;
}
}
private static void DisposeQueryClient(object state)
{
if (_kustoQueryClient != null)
if (_queryClient != null)
{
Log.Info("Disposing kusto query client");
_kustoQueryClient.Dispose();
_kustoQueryClient = null;
_queryClient.Dispose();
_queryClient = null;
}
}
private List<string> EnumerateResults(IDataReader reader)
private KustoRestTable CreateResponseTable(IDataReader reader)
{
KustoRestTable test = new KustoRestTable(reader);
return test;
}
private List<string> EnumerateResultsCsv(IDataReader reader)
{
int maxRecords = 1000;
int index = 0;
List<string> csvRecords = new List<string>();
while (reader.Read())
{
StringBuilder csvRecord = new StringBuilder();
for (int i = 0; i < reader.FieldCount; i++)
{
csvRecord.Append(reader.GetValue(i) + ",");
}
csvRecords.Add(csvRecord.ToString().TrimEnd(','));
}
KustoRestTable dataTable = CreateResponseTable(reader);
List<string> csvRecords = dataTable.RecordsCsv();
if (csvRecords.Count < maxRecords | LogLargeResults)
{
@ -352,35 +478,60 @@ namespace CollectSFData.Kusto
return csvRecords;
}
private KustoRestRecords EnumerateResultsList(IDataReader reader)
{
int maxRecords = 1000;
string jsonRecords = string.Empty;
KustoRestTable dataTable = CreateResponseTable(reader);
KustoRestRecords dictionary = dataTable.RecordsList();
if (dataTable.Rows.Count < maxRecords | LogLargeResults)
{
Log.Info($"results:\r\n{jsonRecords}", ConsoleColor.DarkBlue, null, dataTable);
}
else
{
Log.Info($"results: {dataTable.Rows.Count}");
}
return dictionary;
}
private IngestionResourcesSnapshot RetrieveIngestionResources()
{
// retrieve ingestion resources (queues and blob containers) with SAS from specified Kusto Ingestion service using supplied Access token
string requestBody = "{ \"csl\": \".get ingestion resources\" }";
string command = CslCommandGenerator.GenerateIngestionResourcesGetCommand();
ICslAdminProvider ingestClient = CreateAdminIngestClient();
IngestionResourcesSnapshot ingestionResources = new IngestionResourcesSnapshot();
_httpClient.SendRequest(RestMgmtUri, _arm.BearerToken, requestBody, HttpMethod.Post);
JObject responseJson = _httpClient.ResponseStreamJson;
IEnumerable<string> results = EnumerateResultsCsv(ingestClient.ExecuteControlCommand(command));
IEnumerable<JToken> tokens = responseJson.SelectTokens("Tables[0].Rows[?(@.[0] == 'SecuredReadyForAggregationQueue')]");
foreach (JToken token in tokens)
foreach (string result in results)
{
ingestionResources.IngestionQueues.Add((string)token.Last);
if (string.IsNullOrEmpty(result) || !result.Contains(','))
{
Log.Warning($"invalid ingestion resource: {result}");
continue;
}
string propertyName = result.Split(',')[0];
string propertyValue = result.Split(',')[1];
if (propertyName.Equals("SecuredReadyForAggregationQueue"))
{
ingestionResources.IngestionQueues.Add(propertyValue);
}
else if (propertyName.Equals("TempStorage"))
{
ingestionResources.TempStorageContainers.Add(propertyValue);
}
else if (propertyName.Equals("FailedIngestionsQueue"))
{
ingestionResources.FailureNotificationsQueue = propertyValue;
}
else if (propertyName.Equals("SuccessfulIngestionsQueue"))
{
ingestionResources.SuccessNotificationsQueue = propertyValue;
}
}
tokens = responseJson.SelectTokens("Tables[0].Rows[?(@.[0] == 'TempStorage')]");
foreach (JToken token in tokens)
{
ingestionResources.TempStorageContainers.Add((string)token.Last);
}
JToken singleToken = responseJson.SelectTokens("Tables[0].Rows[?(@.[0] == 'FailedIngestionsQueue')].[1]").FirstOrDefault();
ingestionResources.FailureNotificationsQueue = (string)singleToken;
singleToken = responseJson.SelectTokens("Tables[0].Rows[?(@.[0] == 'SuccessfulIngestionsQueue')].[1]").FirstOrDefault();
ingestionResources.SuccessNotificationsQueue = (string)singleToken;
Log.Info("ingestion resources:", ingestionResources);
return ingestionResources;
}
@ -388,74 +539,12 @@ namespace CollectSFData.Kusto
private string RetrieveKustoIdentityToken()
{
// retrieve kusto identity token that will be added to every ingest message
string requestBody = "{ \"csl\": \".get kusto identity token\" }";
string jsonPath = "Tables[0].Rows[*].[0]";
string command = CslCommandGenerator.GenerateKustoIdentityTokenGetCommand();
ICslAdminProvider ingestClient = CreateAdminIngestClient();
string identityToken = EnumerateResultsCsv(ingestClient.ExecuteControlCommand(command)).FirstOrDefault();
_httpClient.SendRequest(RestMgmtUri, _arm.BearerToken, requestBody, HttpMethod.Post);
JToken identityToken = _httpClient.ResponseStreamJson.SelectTokens(jsonPath).FirstOrDefault();
Log.Info("identityToken:", identityToken);
return ((string)identityToken);
}
private void SetExtendedProperties()
{
// extended properties stored in single 'Value' column as key value pair in json string
string tableName = "@ExtendedProperties";
if (TableOfContents.Rows.Any(x => x.Name.Equals(tableName)))
{
long index = TableOfContents.Rows.FirstOrDefault(x => x.Name.Equals(tableName)).Ordinal;
ExtendedPropertiesTable = new KustoRestTable(ResponseDataSet.Tables[index]);
}
}
private KustoRestTableOfContentsV1 SetTableOfContents(KustoRestResponseV1 responseDataSet)
{
KustoRestTableOfContentsV1 content = new KustoRestTableOfContentsV1();
if (responseDataSet == null || responseDataSet.Tables?.Length < 2)
{
Log.Debug($"no table of content table");
return content;
}
KustoRestResponseTableV1 tableOfContents = responseDataSet.Tables.Last();
for (int c = 0; c < tableOfContents.Columns.Length; c++)
{
content.Columns.Add(
new KustoRestTableOfContentsColumnV1
{
_index = c,
ColumnName = tableOfContents.Columns[c].ColumnName,
ColumnType = tableOfContents.Columns[c].ColumnType,
DataType = tableOfContents.Columns[c].DataType
});
}
for (int r = 0; r < tableOfContents.Rows.Length; r++)
{
Hashtable record = new Hashtable();
KustoRestTableOfContentsRowV1 row = new KustoRestTableOfContentsRowV1();
object[] rowFields = tableOfContents.Rows[r];
if (rowFields.Length != tableOfContents.Columns.Length)
{
Log.Error($"mismatch in column count and row count {rowFields.Count()} {tableOfContents.Columns.Length}");
return content;
}
row._index = r;
row.Id = rowFields[content.Columns.First(x => x.ColumnName.Equals("Id"))._index].ToString();
row.Kind = rowFields[content.Columns.First(x => x.ColumnName.Equals("Kind"))._index].ToString();
row.Name = rowFields[content.Columns.First(x => x.ColumnName.Equals("Name"))._index].ToString();
row.Ordinal = Convert.ToInt64(rowFields[content.Columns.First(x => x.ColumnName.Equals("Ordinal"))._index]);
row.PrettyName = rowFields[content.Columns.First(x => x.ColumnName.Equals("PrettyName"))._index].ToString();
content.Rows.Add(row);
}
return content;
}
Log.Info($"identityToken:{identityToken}");
return identityToken;
}
}
}

Просмотреть файл

@ -1,30 +0,0 @@
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
namespace CollectSFData.Kusto
{
public class KustoRestResponseColumnV1
{
public string ColumnName;
public string ColumnType;
public string DataType;
}
public class KustoRestResponseTableV1
{
public KustoRestResponseColumnV1[] Columns;
public object[][] Rows;
public string TableName;
public bool HasData() => Rows?.Length > 0;
}
public class KustoRestResponseV1
{
public KustoRestResponseTableV1[] Tables;
public bool HasData() => Tables?.Length > 0;
}
}

Просмотреть файл

@ -7,50 +7,84 @@ using CollectSFData.Common;
using Kusto.Cloud.Platform.Utils;
using System;
using System.Collections.Generic;
using System.Data;
using System.Globalization;
using System.Linq;
namespace CollectSFData.Kusto
{
public class KustoRestRecord : Dictionary<string, object> { }
public class KustoRestRecord : Dictionary<string, object>
{ }
public class KustoRestRecords : List<KustoRestRecord> { }
public class KustoRestRecords : List<KustoRestRecord>
{ }
public class KustoRestTable : KustoRestResponseTableV1
public class KustoRestTable : DataTable
{
public KustoRestTable(KustoRestResponseTableV1 table = null)
private IDataReader _reader;
public KustoRestTable(IDataReader reader) : base()
{
if (table != null)
_reader = reader;
DataTable schemaTable = reader.GetSchemaTable();
if (schemaTable == null)
{
TableName = table.TableName;
Columns = table.Columns;
Rows = table.Rows;
Log.Error("no schema table");
return;
}
TableName = schemaTable.TableName;
if (schemaTable.Columns.Count < 1)
{
Log.Error("no columns in table");
return;
}
for (int i = 0; i < reader.FieldCount; i++)
{
DataColumn newColumn = new DataColumn();
newColumn.ColumnName = reader.GetName(i);
newColumn.DataType = reader.GetFieldType(i);
Columns.Add(newColumn);
}
while (reader.Read())
{
object[] rowValues = new object[reader.FieldCount];
reader.GetValues(rowValues);
foreach (object value in rowValues)
{
Log.Info(value.ToString());
}
Rows.Add(rowValues);
}
}
public KustoRestRecords Records()
{
KustoRestRecords records = new KustoRestRecords();
if (Rows?.Length < 1)
if (Rows?.Count < 1)
{
Log.Info("no rows in table", ConsoleColor.White);
return records;
}
for (int r = 0; r < Rows.Length; r++)
for (int r = 0; r < Rows.Count; r++)
{
KustoRestRecord record = new KustoRestRecord();
object[] rowFields = Rows[r];
object[] rowFields = Rows[r].ItemArray;
if (rowFields.Length != Columns.Length)
if (rowFields.Length != Columns.Count)
{
Log.Error($"mismatch in column count and row count {rowFields.Count()} {Columns.Length}");
Log.Error($"mismatch in column count and row count {rowFields.Count()} {Columns.Count}");
return records;
}
for (int f = 0; f < rowFields.Length; f++)
{
string columnType = Columns[f].DataType.ToLower();
string columnType = Columns[f].DataType.ToString().ToLower();
string columnName = Columns[f].ColumnName;
if (columnType.Contains("string"))
@ -105,15 +139,45 @@ namespace CollectSFData.Kusto
{
List<string> results = new List<string>();
if (Rows?.Length < 1)
if (Rows?.Count < 1)
{
Log.Info("no rows in table", ConsoleColor.White);
return results;
}
Rows.ForEach(r => results.Add(string.Join(",", (Array.ConvertAll(r, ra => ra.ToString())))));
foreach (DataRow row in Rows)
{
results.Add(string.Join(",", (Array.ConvertAll<object, string>(row.ItemArray, ra => ra.ToString()))));
}
Log.Info($"returning {results.Count} record csv's", ConsoleColor.Cyan);
return results;
}
public KustoRestRecords RecordsList()
{
KustoRestRecords list = new KustoRestRecords();
if (Rows?.Count < 1)
{
Log.Info("no rows in table", ConsoleColor.White);
return list;
}
for (int i = 0; i < Rows.Count; i++)
{
KustoRestRecord row = new KustoRestRecord();
for (int j = 0; j < Columns.Count; j++)
{
row.Add(Columns[j].ColumnName, Rows[i][j]);
}
list.Add(row);
}
Log.Info($"returning {list.Count} records", ConsoleColor.Cyan);
return list;
}
}
}

Просмотреть файл

@ -1,36 +0,0 @@
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
using System.Collections.Generic;
namespace CollectSFData.Kusto
{
public class KustoRestTableOfContentsColumnV1 : KustoRestResponseColumnV1
{
public int _index;
}
public class KustoRestTableOfContentsRowV1
{
public int _index;
public string Id;
public string Kind;
public string Name;
public long Ordinal;
public string PrettyName;
public override string ToString()
{
return $"{Ordinal},{Kind},{Name},{Id},{PrettyName}";
}
}
public class KustoRestTableOfContentsV1
{
public List<KustoRestTableOfContentsColumnV1> Columns { get; set; } = new List<KustoRestTableOfContentsColumnV1>();
public bool HasData => Rows.Count > 0;
public List<KustoRestTableOfContentsRowV1> Rows { get; set; } = new List<KustoRestTableOfContentsRowV1>();
}
}