commit 5c0a94a64f4bd07b387e6d4120df4e488458c53a
Author: minwal <43315226+minwal@users.noreply.github.com>
Date: Sun Feb 21 12:35:40 2021 +1100
Initial check-in of sample logs data generator tool
diff --git a/.github/workflows/dotnet.yml b/.github/workflows/dotnet.yml
new file mode 100644
index 0000000..52894c5
--- /dev/null
+++ b/.github/workflows/dotnet.yml
@@ -0,0 +1,25 @@
+name: .NET
+
+on:
+ push:
+ branches: [ main ]
+ pull_request:
+ branches: [ main ]
+
+jobs:
+ build:
+
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v2
+ - name: Setup .NET core
+ uses: actions/setup-dotnet@v1.7.2
+ with:
+ dotnet-version: 3.1
+ - name: Restore dependencies
+ run: dotnet restore
+ - name: Build
+ run: dotnet build --no-restore
+ - name: Test
+ run: dotnet test --no-build --verbosity normal
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..5ac1510
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,351 @@
+## Ignore Visual Studio temporary files, build results, and
+## files generated by popular Visual Studio add-ons.
+##
+## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
+
+# User-specific files
+*.rsuser
+*.suo
+*.user
+*.userosscache
+*.sln.docstates
+
+# User-specific files (MonoDevelop/Xamarin Studio)
+*.userprefs
+
+# Mono auto generated files
+mono_crash.*
+
+# Build results
+[Dd]ebug/
+[Dd]ebugPublic/
+[Rr]elease/
+[Rr]eleases/
+x64/
+x86/
+[Aa][Rr][Mm]/
+[Aa][Rr][Mm]64/
+bld/
+[Bb]in/
+[Oo]bj/
+[Ll]og/
+[Ll]ogs/
+
+# Visual Studio 2015/2017 cache/options directory
+.vs/
+# Uncomment if you have tasks that create the project's static files in wwwroot
+#wwwroot/
+
+# Visual Studio 2017 auto generated files
+Generated\ Files/
+
+# MSTest test Results
+[Tt]est[Rr]esult*/
+[Bb]uild[Ll]og.*
+
+# NUnit
+*.VisualState.xml
+TestResult.xml
+nunit-*.xml
+
+# Build Results of an ATL Project
+[Dd]ebugPS/
+[Rr]eleasePS/
+dlldata.c
+
+# Benchmark Results
+BenchmarkDotNet.Artifacts/
+
+# .NET Core
+project.lock.json
+project.fragment.lock.json
+artifacts/
+
+# StyleCop
+StyleCopReport.xml
+
+# Files built by Visual Studio
+*_i.c
+*_p.c
+*_h.h
+*.ilk
+*.meta
+*.obj
+*.iobj
+*.pch
+*.pdb
+*.ipdb
+*.pgc
+*.pgd
+*.rsp
+*.sbr
+*.tlb
+*.tli
+*.tlh
+*.tmp
+*.tmp_proj
+*_wpftmp.csproj
+*.log
+*.vspscc
+*.vssscc
+.builds
+*.pidb
+*.svclog
+*.scc
+
+# Chutzpah Test files
+_Chutzpah*
+
+# Visual C++ cache files
+ipch/
+*.aps
+*.ncb
+*.opendb
+*.opensdf
+*.sdf
+*.cachefile
+*.VC.db
+*.VC.VC.opendb
+
+# Visual Studio profiler
+*.psess
+*.vsp
+*.vspx
+*.sap
+
+# Visual Studio Trace Files
+*.e2e
+
+# TFS 2012 Local Workspace
+$tf/
+
+# Guidance Automation Toolkit
+*.gpState
+
+# ReSharper is a .NET coding add-in
+_ReSharper*/
+*.[Rr]e[Ss]harper
+*.DotSettings.user
+
+# TeamCity is a build add-in
+_TeamCity*
+
+# DotCover is a Code Coverage Tool
+*.dotCover
+
+# AxoCover is a Code Coverage Tool
+.axoCover/*
+!.axoCover/settings.json
+
+# Visual Studio code coverage results
+*.coverage
+*.coveragexml
+
+# NCrunch
+_NCrunch_*
+.*crunch*.local.xml
+nCrunchTemp_*
+
+# MightyMoose
+*.mm.*
+AutoTest.Net/
+
+# Web workbench (sass)
+.sass-cache/
+
+# Installshield output folder
+[Ee]xpress/
+
+# DocProject is a documentation generator add-in
+DocProject/buildhelp/
+DocProject/Help/*.HxT
+DocProject/Help/*.HxC
+DocProject/Help/*.hhc
+DocProject/Help/*.hhk
+DocProject/Help/*.hhp
+DocProject/Help/Html2
+DocProject/Help/html
+
+# Click-Once directory
+publish/
+
+# Publish Web Output
+*.[Pp]ublish.xml
+*.azurePubxml
+# Note: Comment the next line if you want to checkin your web deploy settings,
+# but database connection strings (with potential passwords) will be unencrypted
+*.pubxml
+*.publishproj
+
+# Microsoft Azure Web App publish settings. Comment the next line if you want to
+# checkin your Azure Web App publish settings, but sensitive information contained
+# in these scripts will be unencrypted
+PublishScripts/
+
+# NuGet Packages
+*.nupkg
+# NuGet Symbol Packages
+*.snupkg
+# The packages folder can be ignored because of Package Restore
+**/[Pp]ackages/*
+# except build/, which is used as an MSBuild target.
+!**/[Pp]ackages/build/
+# Uncomment if necessary however generally it will be regenerated when needed
+#!**/[Pp]ackages/repositories.config
+# NuGet v3's project.json files produces more ignorable files
+*.nuget.props
+*.nuget.targets
+
+# Microsoft Azure Build Output
+csx/
+*.build.csdef
+
+# Microsoft Azure Emulator
+ecf/
+rcf/
+
+# Windows Store app package directories and files
+AppPackages/
+BundleArtifacts/
+Package.StoreAssociation.xml
+_pkginfo.txt
+*.appx
+*.appxbundle
+*.appxupload
+
+# Visual Studio cache files
+# files ending in .cache can be ignored
+*.[Cc]ache
+# but keep track of directories ending in .cache
+!?*.[Cc]ache/
+
+# Others
+ClientBin/
+~$*
+*~
+*.dbmdl
+*.dbproj.schemaview
+*.jfm
+*.pfx
+*.publishsettings
+orleans.codegen.cs
+
+# Including strong name files can present a security risk
+# (https://github.com/github/gitignore/pull/2483#issue-259490424)
+#*.snk
+
+# Since there are multiple workflows, uncomment next line to ignore bower_components
+# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
+#bower_components/
+
+# RIA/Silverlight projects
+Generated_Code/
+
+# Backup & report files from converting an old project file
+# to a newer Visual Studio version. Backup files are not needed,
+# because we have git ;-)
+_UpgradeReport_Files/
+Backup*/
+UpgradeLog*.XML
+UpgradeLog*.htm
+ServiceFabricBackup/
+*.rptproj.bak
+
+# SQL Server files
+*.mdf
+*.ldf
+*.ndf
+
+# Business Intelligence projects
+*.rdl.data
+*.bim.layout
+*.bim_*.settings
+*.rptproj.rsuser
+*- [Bb]ackup.rdl
+*- [Bb]ackup ([0-9]).rdl
+*- [Bb]ackup ([0-9][0-9]).rdl
+
+# Microsoft Fakes
+FakesAssemblies/
+
+# GhostDoc plugin setting file
+*.GhostDoc.xml
+
+# Node.js Tools for Visual Studio
+.ntvs_analysis.dat
+node_modules/
+
+# Visual Studio 6 build log
+*.plg
+
+# Visual Studio 6 workspace options file
+*.opt
+
+# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
+*.vbw
+
+# Visual Studio LightSwitch build output
+**/*.HTMLClient/GeneratedArtifacts
+**/*.DesktopClient/GeneratedArtifacts
+**/*.DesktopClient/ModelManifest.xml
+**/*.Server/GeneratedArtifacts
+**/*.Server/ModelManifest.xml
+_Pvt_Extensions
+
+# Paket dependency manager
+.paket/paket.exe
+paket-files/
+
+# FAKE - F# Make
+.fake/
+
+# CodeRush personal settings
+.cr/personal
+
+# Python Tools for Visual Studio (PTVS)
+__pycache__/
+*.pyc
+
+# Cake - Uncomment if you are using it
+# tools/**
+# !tools/packages.config
+
+# Tabs Studio
+*.tss
+
+# Telerik's JustMock configuration file
+*.jmconfig
+
+# BizTalk build output
+*.btp.cs
+*.btm.cs
+*.odx.cs
+*.xsd.cs
+
+# OpenCover UI analysis results
+OpenCover/
+
+# Azure Stream Analytics local run output
+ASALocalRun/
+
+# MSBuild Binary and Structured Log
+*.binlog
+
+# NVidia Nsight GPU debugger configuration file
+*.nvuser
+
+# MFractors (Xamarin productivity tool) working folder
+.mfractor/
+
+# Local History for Visual Studio
+.localhistory/
+
+# BeatPulse healthcheck temp database
+healthchecksdb
+
+# Backup folder for Package Reference Convert tool in Visual Studio 2017
+MigrationBackup/
+
+# Ionide (cross platform F# VS Code tools) working folder
+.ionide/
+properties\launchSettings.json
diff --git a/AzureBatchTemplates/generator-job.json b/AzureBatchTemplates/generator-job.json
new file mode 100644
index 0000000..946ff96
--- /dev/null
+++ b/AzureBatchTemplates/generator-job.json
@@ -0,0 +1,71 @@
+{
+ "templateMetadata": {
+ "description": "Job template for Generator job"
+ },
+ "parameters": {
+ "jobName": {
+ "type": "string",
+ "metadata": {
+ "description": "Job name"
+ }
+ },
+ "poolId": {
+ "type": "string",
+ "metadata": {
+ "description": "Pool id"
+ }
+ },
+ "partitionStart": {
+ "type": "int",
+ "defaultValue": 0,
+ "metadata": {
+ "description": "Partition start index"
+ }
+ },
+ "partitionEnd": {
+ "type": "int",
+ "defaultValue": 6,
+ "metadata": {
+ "description": "Partition end index"
+ }
+ },
+ "size": {
+ "type": "string",
+ "defaultValue": "HundredTB",
+ "metadata": {
+ "description": "Size"
+ }
+ },
+ "outputContainerUrl": {
+ "type": "string",
+ "defaultValue": "YOUR BLOB CONNECTION STRING",
+ "metadata": {
+ "description": "Output container URL"
+ }
+ }
+ },
+ "job": {
+ "type": "Microsoft.Batch/batchAccounts/jobs",
+ "properties": {
+ "id": "[parameters('jobName')]",
+ "onAllTasksComplete": "terminateJob",
+ "poolInfo": {
+ "poolId": "[parameters('poolId')]"
+ },
+ "taskFactory": {
+ "type": "parametricSweep",
+ "parameterSets": [
+ {
+ "start": "[parameters('partitionStart')]",
+ "end": "[parameters('partitionEnd')]",
+ "step": 1
+ }
+ ],
+ "repeatTask": {
+ "displayName": "Task for partition {0}",
+ "commandLine": "%AZ_BATCH_APP_PACKAGE_GENERATOR#1.0%/publish/BenchmarkLogGenerator.exe -output:AzureStorage -cc:\"[parameters('outputContainerUrl')]\" -size:[parameters('size')] -partition:{0}"
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/AzureBatchTemplates/generator-pool.json b/AzureBatchTemplates/generator-pool.json
new file mode 100644
index 0000000..ab4cd2b
--- /dev/null
+++ b/AzureBatchTemplates/generator-pool.json
@@ -0,0 +1,43 @@
+{
+ "templateMetadata": {
+ "description": "Pool template for Generator app"
+ },
+ "parameters": {
+ "poolId": {
+ "type": "string",
+ "metadata": {
+ "description": "Id of Azure Batch pool"
+ }
+ },
+ "vmDedicatedCount": {
+ "type": "int",
+ "defaultValue": 7,
+ "metadata": {
+ "description": "The number of dedicated virtual machines"
+ }
+ }
+ },
+ "pool": {
+ "type": "Microsoft.Batch/batchAccounts/pools",
+ "apiVersion": "2019-03-01",
+ "properties": {
+ "id": "[parameters('poolId')]",
+ "virtualMachineConfiguration": {
+ "imageReference": {
+ "publisher": "MicrosoftWindowsServer",
+ "offer": "WindowsServer",
+ "sku": "2019-Datacenter"
+ },
+ "nodeAgentSKUId": "batch.node.windows amd64"
+ },
+ "vmSize": "Standard_D64_v3",
+ "targetDedicatedNodes": "[parameters('vmDedicatedCount')]",
+ "applicationPackageReferences": [
+ {
+ "applicationId": "GENERATOR",
+ "version": "1.0"
+ }
+ ]
+ }
+ }
+}
\ No newline at end of file
diff --git a/BenchmarkLogGenerator.csproj b/BenchmarkLogGenerator.csproj
new file mode 100644
index 0000000..fd34f34
--- /dev/null
+++ b/BenchmarkLogGenerator.csproj
@@ -0,0 +1,13 @@
+
+
+
+ Exe
+ netcoreapp3.1
+
+
+
+
+
+
+
+
diff --git a/BenchmarkLogGenerator.sln b/BenchmarkLogGenerator.sln
new file mode 100644
index 0000000..2b0ccc1
--- /dev/null
+++ b/BenchmarkLogGenerator.sln
@@ -0,0 +1,25 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio Version 16
+VisualStudioVersion = 16.0.29911.84
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BenchmarkLogGenerator", "BenchmarkLogGenerator.csproj", "{A2AC39C5-ABB4-4F3A-A404-1F62E0349AC6}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
+ Release|Any CPU = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {A2AC39C5-ABB4-4F3A-A404-1F62E0349AC6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {A2AC39C5-ABB4-4F3A-A404-1F62E0349AC6}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {A2AC39C5-ABB4-4F3A-A404-1F62E0349AC6}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {A2AC39C5-ABB4-4F3A-A404-1F62E0349AC6}.Release|Any CPU.Build.0 = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+ GlobalSection(ExtensibilityGlobals) = postSolution
+ SolutionGuid = {98FD40EF-A52C-42D9-973F-182B6B3CB9E6}
+ EndGlobalSection
+EndGlobal
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000..f9ba8cf
--- /dev/null
+++ b/CODE_OF_CONDUCT.md
@@ -0,0 +1,9 @@
+# Microsoft Open Source Code of Conduct
+
+This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
+
+Resources:
+
+- [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/)
+- [Microsoft Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)
+- Contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with questions or concerns
diff --git a/CommandLineArgs.cs b/CommandLineArgs.cs
new file mode 100644
index 0000000..1296b5f
--- /dev/null
+++ b/CommandLineArgs.cs
@@ -0,0 +1,46 @@
+using BenchmarkLogGenerator.Utilities;
+using System;
+using System.Collections.Generic;
+
+namespace BenchmarkLogGenerator
+{
+ internal class CommandLineArgs
+ {
+ [CommandLineArg(
+ "output",
+ "Where the output should be written to. Valid options are: LocalDisk, AzureStorage or EventHub",
+ Mandatory = true)]
+ public WriterType outputType= WriterType.LocalDisk;
+
+ [CommandLineArg(
+ "localPath",
+ "The root folder for the output",
+ Mandatory = false)]
+ public string localPath = null;
+
+ [CommandLineArg(
+ "azureStorageAccountConnections",
+ "A comma separated list of Azure storage account connections",
+ ShortName = "cc", Mandatory = false)]
+ public string blobConnectionString = null;
+
+ [CommandLineArg(
+ "eventHubConnection",
+ "The connection string for Azure EventHub",
+ ShortName = "ehc", Mandatory = false)]
+ public string eventHubConnectionString = null;
+
+ [CommandLineArg(
+ "size",
+ "The output size, possible values are OneGB, OneTB, HundredTB",
+ Mandatory = false, DefaultValue = BenchmarkDataSize.OneGB)]
+ public BenchmarkDataSize size = BenchmarkDataSize.OneGB;
+
+ [CommandLineArg(
+ "partition",
+ "The partition id, possible values are -1 to 9, where -1 means single partition",
+ Mandatory = false, ShortName = "p", DefaultValue = -1)]
+ public int partition = -1;
+
+ }
+}
diff --git a/Data/Logs.cs b/Data/Logs.cs
new file mode 100644
index 0000000..25fdd8d
--- /dev/null
+++ b/Data/Logs.cs
@@ -0,0 +1,163 @@
+namespace BenchmarkLogGenerator.Data
+{
+ class Logs
+ {
+ public static readonly string[] IngestionLogs = new string[]
+ {
+ "AadValidator.ValidateAudienceImpl.{0}: {1} Audiences=https://{2}",
+ @"Audience 'https://{2}' matches the valid {0} audience regex '^https://[\w\.\-]+\.{1}\.windows\.net/?$'",
+ "AadValidator.ValidateIssuerImpl.{0}: Start {1} Issuer=https://{2}/",
+ "Gateway {0} resolved primary {1} for service 'fabric://management.admin.svc/ - ': 'net.tcp://{2}/mgmt'",
+ "ResponseStreamEncoder {0}: {1}: State='enabled' Reason='Accept-Encoding set to gzip' of https://{2}",
+ "OwinResponseStreamCompressor: State='enabled' Reason='Accept-Encoding set to deflate'",
+ "GetDataPullJobsOperation.OperationAsync.{0}: Downloaded '{1}' messages from https://{2} on different queues"
+ };
+
+ public const string IngestCommand = "$$IngestionCommand table={0} format={1}";
+ public const string DownloadEvent = "Downloading file path: {0}";
+ public const string IngestionCompletion = "IngestionCompletionEvent: finished ingestion file path: {0}";
+ public const string CompletedMessage = "Completion Report (HttpPost.ExecuteAsync): Completed with HttpResponseMessage StatusCode={0}";
+
+ public static readonly string[] FileFormats = new string[] { "csv", "json", "parquet", "avro" };
+
+ public static readonly string[] StatusCodes = new string[]
+ {
+ "'OK (200)'",
+ "'Request Timeout (408)'",
+ "'Internal Server Error (500)'"
+ };
+
+ //10
+ public static readonly string[] StackTraces = new string[]
+ {
+ @" at BenchmarkLogGenerator.Flows.BootFlow.GetLevel(Int64 v) in C:\Src\Tools\BenchmarkLogGenerator\Flows\BootFlow.cs:line 85",
+ @" at BenchmarkLogGenerator.Flows.BootFlow.d__1.MoveNext() in C:\Src\Tools\BenchmarkLogGenerator\Flows\BootFlow.cs:line 47",
+ @" at BenchmarkLogGenerator.Scheduler.Flow.NextStep() in C:\Src\Tools\BenchmarkLogGenerator\Scheduler.cs:line 74",
+ @" at BenchmarkLogGenerator.Scheduler.Step.EnqueueNextStep(Scheduler scheduler) in C:\Src\Tools\BenchmarkLogGenerator\Scheduler.cs:line 112",
+ @" at BenchmarkLogGenerator.Scheduler.FlowDelayStep.Execute(Scheduler scheduler) in C:\Src\Tools\BenchmarkLogGenerator\Scheduler.cs:line 137",
+ @" at BenchmarkLogGenerator.Scheduler.Run() in C:\Src\Tools\BenchmarkLogGenerator\Scheduler.cs:line 28",
+ @" at BenchmarkLogGenerator.Generator.Run(Int32 sizeFactor) in C:\Src\Tools\BenchmarkLogGenerator\Generator.cs:line 84",
+ @" at BenchmarkLogGenerator.Generator.<>c__DisplayClass26_0.b__0() in C:\Src\Tools\BenchmarkLogGenerator\Generator.cs:line 74",
+ @" at System.Threading.ThreadHelper.ThreadStart_Context(Object state)",
+ @" at System.Threading.ExecutionContext.RunInternal(ExecutionContext executionContext)"
+ };
+
+ //104
+ public static readonly string[] ExceptionTypes = new string[]
+ {
+ "System.AccessViolation",
+ "System.AppDomainUnloaded",
+ "System.Argument",
+ "System.Arithmetic",
+ "System.ArrayTypeMismatch",
+ "System.BadImageFormat",
+ "System.CannotUnloadAppDomain",
+ "System.ContextMarshal",
+ "System.DataMisaligned",
+ "System.ExecutionEngine",
+ "System.Format",
+ "System.IndexOutOfRange",
+ "System.InsufficientExecutionStack",
+ "System.InvalidCast",
+ "System.InvalidOperation",
+ "System.InvalidProgram",
+ "System.MemberAccess",
+ "System.MulticastNotSupported",
+ "System.NotImplemented",
+ "System.NotSupported",
+ "System.NullReference",
+ "System.OperationCanceled",
+ "System.OutOfMemory",
+ "System.Rank",
+ "System.StackOverflow",
+ "System.Timeout",
+ "System.TypeInitialization",
+ "System.TypeLoad",
+ "System.TypeUnloaded",
+ "System.UnauthorizedAccess",
+ "System.UriTemplateMatch",
+ "System.Activities.Validation",
+ "System.Collections.Generic.KeyNotFound",
+ "System.ComponentModel.License",
+ "System.ComponentModel.Warning",
+ "System.ComponentModel.Design.Serialization.CodeDomSerializer",
+ "System.Configuration.Configuration",
+ "System.Configuration.Install.Install",
+ "System.Data.Data",
+ "System.Data.DBConcurrency",
+ "System.Data.OperationAborted",
+ "System.Data.OracleClient.Oracle",
+ "System.Data.SqlTypes.SqlType",
+ "System.Deployment.Application.Deployment",
+ "System.DirectoryServices.AccountManagement.Principal",
+ "System.Drawing.Printing.InvalidPrinter",
+ "System.EnterpriseServices.Registration",
+ "System.EnterpriseServices.ServicedComponent",
+ "System.IdentityModel.LimitExceeded",
+ "System.IdentityModel.SecurityMessageSerialization",
+ "System.IdentityModel.Tokens.SecurityToken",
+ "System.IO.InternalBufferOverflow",
+ "System.IO.InvalidData",
+ "System.IO.IO",
+ "System.Management.Management",
+ "System.Printing.Print",
+ "System.Reflection.AmbiguousMatch",
+ "System.Reflection.ReflectionTypeLoad",
+ "System.Resources.MissingManifestResource",
+ "System.Resources.MissingSatelliteAssembly",
+ "System.Runtime.InteropServices.External",
+ "System.Runtime.InteropServices.InvalidComObject",
+ "System.Runtime.InteropServices.InvalidOleVariantType",
+ "System.Runtime.InteropServices.MarshalDirective",
+ "System.Runtime.InteropServices.SafeArrayRankMismatch",
+ "System.Runtime.InteropServices.SafeArrayTypeMismatch",
+ "System.Runtime.Remoting.Remoting",
+ "System.Runtime.Remoting.Server",
+ "System.Runtime.Serialization.Serialization",
+ "System.Security.HostProtection",
+ "System.Security.Security",
+ "System.Security.Verification",
+ "System.Security.XmlSyntax",
+ "System.Security.Authentication.Authentication",
+ "System.Security.Cryptography.Cryptographic",
+ "System.Security.Policy.Policy",
+ "System.Security.Principal.IdentityNotMapped",
+ "System.ServiceModel.Dispatcher.InvalidBodyAccess",
+ "System.ServiceModel.Dispatcher.MultipleFilterMatches",
+ "System.ServiceProcess.Timeout",
+ "System.Threading.AbandonedMutex",
+ "System.Threading.SemaphoreFull",
+ "System.Threading.SynchronizationLock",
+ "System.Threading.ThreadAbort",
+ "System.Threading.ThreadInterrupted",
+ "System.Threading.ThreadStart",
+ "System.Threading.ThreadState",
+ "System.Transactions.Transaction",
+ "System.Web.Caching.DatabaseNotEnabledForNotification",
+ "System.Web.Caching.TableNotEnabledForNotification",
+ "System.Web.Management.SqlExecution",
+ "System.Web.Services.Protocols.Soap",
+ "System.Windows.Automation.ElementNotAvailable",
+ "System.Windows.Data.ValueUnavailable",
+ "System.Windows.Markup.XamlParse",
+ "System.Windows.Media.InvalidWmpVersion",
+ "System.Windows.Media.Animation.Animation",
+ "System.Workflow.Activities.EventDeliveryFailed",
+ "System.Workflow.Activities.WorkflowAuthorization",
+ "System.Workflow.Runtime.Hosting.Persistence",
+ "System.Workflow.Runtime.Tracking.TrackingProfileDeserialization",
+ "System.Xml.Xml",
+ "System.Xml.Schema.XmlSchema",
+ "System.Xml.XPath.XPath",
+ "System.Xml.Xsl.Xslt",
+ };
+
+ public readonly static string ExceptionHeader = @"Exception={0};
+ HResult=0x{1};
+ Message=exception happened;
+ Source=BenchmarkLogGenerator;
+ StackTrace:";
+
+ public const string CriticalMessage = "\"$$ALERT[NativeCrash]: Unexpected string size: 'single string size={0}, offsets array size={1}, string idx={2}, offsets32[idx+1]={3}, offsets32[idx]={4}'\"";
+ }
+}
diff --git a/Data/Names.cs b/Data/Names.cs
new file mode 100644
index 0000000..198825a
--- /dev/null
+++ b/Data/Names.cs
@@ -0,0 +1,2367 @@
+namespace BenchmarkLogGenerator.Data
+{
+ class Names
+ {
+ public static string[] ingestionComponents = new string[]{
+ "INGESTOR_GATEWAY",
+ "DOWNLOADER",
+ "INGESTOR_EXECUTER",
+ "FINALIZER"
+ };
+
+ //607
+ public static string[] Tables = new string[]
+ {
+ "babiesEvents",
+ "babyEvents",
+ "backEvents",
+ "badgeEvents",
+ "bagEvents",
+ "baitEvents",
+ "balanceEvents",
+ "ballEvents",
+ "balloonEvents",
+ "ballsEvents",
+ "bananaEvents",
+ "bandEvents",
+ "baseEvents",
+ "baseballEvents",
+ "basinEvents",
+ "basketEvents",
+ "basketballEvents",
+ "batEvents",
+ "bathEvents",
+ "battleEvents",
+ "beadEvents",
+ "beamEvents",
+ "beanEvents",
+ "bearEvents",
+ "bearsEvents",
+ "beastEvents",
+ "bedEvents",
+ "bedroomEvents",
+ "bedsEvents",
+ "beeEvents",
+ "beefEvents",
+ "beetleEvents",
+ "beggarEvents",
+ "beginnerEvents",
+ "behaviorEvents",
+ "beliefEvents",
+ "believeEvents",
+ "bellEvents",
+ "bellsEvents",
+ "berryEvents",
+ "bikeEvents",
+ "bikesEvents",
+ "birdEvents",
+ "birdsEvents",
+ "birthEvents",
+ "birthdayEvents",
+ "bitEvents",
+ "biteEvents",
+ "bladeEvents",
+ "bloodEvents",
+ "blowEvents",
+ "boardEvents",
+ "boatEvents",
+ "boatsEvents",
+ "bodyEvents",
+ "bombEvents",
+ "boneEvents",
+ "bookEvents",
+ "booksEvents",
+ "bootEvents",
+ "borderEvents",
+ "bottleEvents",
+ "boundaryEvents",
+ "boxEvents",
+ "boyEvents",
+ "boysEvents",
+ "brainEvents",
+ "brakeEvents",
+ "branchEvents",
+ "brassEvents",
+ "breadEvents",
+ "breakfastEvents",
+ "breathEvents",
+ "brickEvents",
+ "bridgeEvents",
+ "brotherEvents",
+ "brothersEvents",
+ "brushEvents",
+ "bubbleEvents",
+ "bucketEvents",
+ "buildingEvents",
+ "bulbEvents",
+ "bunEvents",
+ "burnEvents",
+ "burstEvents",
+ "bushesEvents",
+ "businessEvents",
+ "butterEvents",
+ "buttonEvents",
+ "dadEvents",
+ "daughterEvents",
+ "dayEvents",
+ "deathEvents",
+ "debtEvents",
+ "decisionEvents",
+ "deerEvents",
+ "degreeEvents",
+ "designEvents",
+ "desireEvents",
+ "deskEvents",
+ "destructionEvents",
+ "detailEvents",
+ "developmentEvents",
+ "digestionEvents",
+ "dimeEvents",
+ "dinnerEvents",
+ "dinosaursEvents",
+ "directionEvents",
+ "dirtEvents",
+ "discoveryEvents",
+ "discussionEvents",
+ "diseaseEvents",
+ "disgustEvents",
+ "distanceEvents",
+ "distributionEvents",
+ "divisionEvents",
+ "dockEvents",
+ "doctorEvents",
+ "dogEvents",
+ "dogsEvents",
+ "dollEvents",
+ "dollsEvents",
+ "donkeyEvents",
+ "doorEvents",
+ "downtownEvents",
+ "drainEvents",
+ "drawerEvents",
+ "dressEvents",
+ "drinkEvents",
+ "drivingEvents",
+ "dropEvents",
+ "drugEvents",
+ "drumEvents",
+ "duckEvents",
+ "ducksEvents",
+ "dustEvents",
+ "faceEvents",
+ "factEvents",
+ "fairiesEvents",
+ "fallEvents",
+ "familyEvents",
+ "fanEvents",
+ "fangEvents",
+ "farmEvents",
+ "farmerEvents",
+ "fatherEvents",
+ "fatherEvents",
+ "faucetEvents",
+ "fearEvents",
+ "feastEvents",
+ "featherEvents",
+ "feelingEvents",
+ "feetEvents",
+ "fictionEvents",
+ "fieldEvents",
+ "fifthEvents",
+ "fightEvents",
+ "fingerEvents",
+ "fingerEvents",
+ "fireEvents",
+ "firemanEvents",
+ "fishEvents",
+ "flagEvents",
+ "flameEvents",
+ "flavorEvents",
+ "fleshEvents",
+ "flightEvents",
+ "flockEvents",
+ "floorEvents",
+ "flowerEvents",
+ "flowersEvents",
+ "flyEvents",
+ "fogEvents",
+ "foldEvents",
+ "foodEvents",
+ "footEvents",
+ "forceEvents",
+ "forkEvents",
+ "formEvents",
+ "fowlEvents",
+ "frameEvents",
+ "frictionEvents",
+ "friendEvents",
+ "friendsEvents",
+ "frogEvents",
+ "frogsEvents",
+ "frontEvents",
+ "fruitEvents",
+ "fuelEvents",
+ "furnitureEvents",
+ "galleyEvents",
+ "gameEvents",
+ "gardenEvents",
+ "gateEvents",
+ "geeseEvents",
+ "ghostEvents",
+ "giantsEvents",
+ "giraffeEvents",
+ "girlEvents",
+ "girlsEvents",
+ "glassEvents",
+ "gloveEvents",
+ "glueEvents",
+ "goatEvents",
+ "goldEvents",
+ "goldfishEvents",
+ "good-byeEvents",
+ "gooseEvents",
+ "governmentEvents",
+ "governorEvents",
+ "gradeEvents",
+ "grainEvents",
+ "grandfatherEvents",
+ "grandmotherEvents",
+ "grapeEvents",
+ "grassEvents",
+ "gripEvents",
+ "groundEvents",
+ "groupEvents",
+ "growthEvents",
+ "guideEvents",
+ "guitarEvents",
+ "gunEvents",
+ "hairEvents",
+ "haircutEvents",
+ "hallEvents",
+ "hammerEvents",
+ "handEvents",
+ "handsEvents",
+ "harborEvents",
+ "harmonyEvents",
+ "hatEvents",
+ "hateEvents",
+ "headEvents",
+ "healthEvents",
+ "hearingEvents",
+ "heartEvents",
+ "heatEvents",
+ "helpEvents",
+ "henEvents",
+ "hillEvents",
+ "historyEvents",
+ "hobbiesEvents",
+ "holeEvents",
+ "holidayEvents",
+ "homeEvents",
+ "honeyEvents",
+ "hookEvents",
+ "hopeEvents",
+ "hornEvents",
+ "horseEvents",
+ "horsesEvents",
+ "hoseEvents",
+ "hospitalEvents",
+ "hotEvents",
+ "hourEvents",
+ "houseEvents",
+ "housesEvents",
+ "humorEvents",
+ "hydrantEvents",
+ "iceEvents",
+ "icicleEvents",
+ "ideaEvents",
+ "impulseEvents",
+ "incomeEvents",
+ "increaseEvents",
+ "industryEvents",
+ "inkEvents",
+ "insectEvents",
+ "instrumentEvents",
+ "insuranceEvents",
+ "interestEvents",
+ "inventionEvents",
+ "ironEvents",
+ "islandEvents",
+ "kettleEvents",
+ "keyEvents",
+ "kickEvents",
+ "kissEvents",
+ "kiteEvents",
+ "kittenEvents",
+ "kittensEvents",
+ "kittyEvents",
+ "kneeEvents",
+ "knifeEvents",
+ "knotEvents",
+ "knowledgeEvents",
+ "laborerEvents",
+ "laceEvents",
+ "ladybugEvents",
+ "lakeEvents",
+ "lampEvents",
+ "landEvents",
+ "languageEvents",
+ "laughEvents",
+ "lawyerEvents",
+ "leadEvents",
+ "leafEvents",
+ "learningEvents",
+ "leatherEvents",
+ "legEvents",
+ "legsEvents",
+ "letterEvents",
+ "lettersEvents",
+ "lettuceEvents",
+ "levelEvents",
+ "libraryEvents",
+ "liftEvents",
+ "lightEvents",
+ "limitEvents",
+ "lineEvents",
+ "linenEvents",
+ "lipEvents",
+ "liquidEvents",
+ "listEvents",
+ "lizardsEvents",
+ "loafEvents",
+ "lockEvents",
+ "locketEvents",
+ "lookEvents",
+ "lossEvents",
+ "loveEvents",
+ "lowEvents",
+ "lumberEvents",
+ "lunchEvents",
+ "lunchroomEvents",
+ "machineEvents",
+ "magicEvents",
+ "maidEvents",
+ "mailboxEvents",
+ "manEvents",
+ "managerEvents",
+ "mapEvents",
+ "marbleEvents",
+ "markEvents",
+ "marketEvents",
+ "maskEvents",
+ "massEvents",
+ "matchEvents",
+ "mealEvents",
+ "measureEvents",
+ "meatEvents",
+ "meetingEvents",
+ "memoryEvents",
+ "menEvents",
+ "metalEvents",
+ "miceEvents",
+ "middleEvents",
+ "milkEvents",
+ "mindEvents",
+ "mineEvents",
+ "ministerEvents",
+ "mintEvents",
+ "minuteEvents",
+ "mistEvents",
+ "mittenEvents",
+ "momEvents",
+ "moneyEvents",
+ "monkeyEvents",
+ "monthEvents",
+ "moonEvents",
+ "morningEvents",
+ "motherEvents",
+ "motionEvents",
+ "mountainEvents",
+ "mouthEvents",
+ "moveEvents",
+ "muscleEvents",
+ "musicEvents",
+ "oatmealEvents",
+ "observationEvents",
+ "oceanEvents",
+ "offerEvents",
+ "officeEvents",
+ "oilEvents",
+ "operationEvents",
+ "opinionEvents",
+ "orangeEvents",
+ "orangesEvents",
+ "orderEvents",
+ "organizationEvents",
+ "ornamentEvents",
+ "ovenEvents",
+ "owlEvents",
+ "ownerEvents",
+ "rabbitEvents",
+ "rabbitsEvents",
+ "railEvents",
+ "railwayEvents",
+ "rainEvents",
+ "rainstormEvents",
+ "rakeEvents",
+ "rangeEvents",
+ "ratEvents",
+ "rateEvents",
+ "rayEvents",
+ "reactionEvents",
+ "readingEvents",
+ "reasonEvents",
+ "receiptEvents",
+ "recessEvents",
+ "recordEvents",
+ "regretEvents",
+ "relationEvents",
+ "religionEvents",
+ "representativeEvents",
+ "requestEvents",
+ "respectEvents",
+ "restEvents",
+ "rewardEvents",
+ "rhythmEvents",
+ "riceEvents",
+ "riddleEvents",
+ "rifleEvents",
+ "ringEvents",
+ "ringsEvents",
+ "riverEvents",
+ "roadEvents",
+ "robinEvents",
+ "rockEvents",
+ "rodEvents",
+ "rollEvents",
+ "roofEvents",
+ "roomEvents",
+ "rootEvents",
+ "roseEvents",
+ "routeEvents",
+ "rubEvents",
+ "ruleEvents",
+ "runEvents",
+ "sackEvents",
+ "sailEvents",
+ "saltEvents",
+ "sandEvents",
+ "scaleEvents",
+ "scarecrowEvents",
+ "scarfEvents",
+ "sceneEvents",
+ "scentEvents",
+ "schoolEvents",
+ "scienceEvents",
+ "scissorsEvents",
+ "screwEvents",
+ "seaEvents",
+ "seashoreEvents",
+ "seatEvents",
+ "secretaryEvents",
+ "seedEvents",
+ "selectionEvents",
+ "selfEvents",
+ "senseEvents",
+ "servantEvents",
+ "shadeEvents",
+ "shakeEvents",
+ "shameEvents",
+ "shapeEvents",
+ "sheepEvents",
+ "sheetEvents",
+ "shelfEvents",
+ "shipEvents",
+ "shirtEvents",
+ "shockEvents",
+ "shoeEvents",
+ "shoesEvents",
+ "shopEvents",
+ "showEvents",
+ "sideEvents",
+ "sidewalkEvents",
+ "signEvents",
+ "silkEvents",
+ "silverEvents",
+ "sinkEvents",
+ "sisterEvents",
+ "sistersEvents",
+ "sizeEvents",
+ "skateEvents",
+ "skinEvents",
+ "skirtEvents",
+ "skyEvents",
+ "slaveEvents",
+ "sleepEvents",
+ "sleetEvents",
+ "slipEvents",
+ "slopeEvents",
+ "smashEvents",
+ "smellEvents",
+ "smileEvents",
+ "smokeEvents",
+ "snailEvents",
+ "snailsEvents",
+ "snakeEvents",
+ "snakesEvents",
+ "sneezeEvents",
+ "snowEvents",
+ "soapEvents",
+ "societyEvents",
+ "sockEvents",
+ "sodaEvents",
+ "sofaEvents",
+ "sonEvents",
+ "songEvents",
+ "songsEvents",
+ "sortEvents",
+ "soundEvents",
+ "soupEvents",
+ "spaceEvents",
+ "spadeEvents",
+ "sparkEvents",
+ "spidersEvents",
+ "spongeEvents",
+ "spoonEvents",
+ "spotEvents",
+ "springEvents",
+ "spyEvents",
+ "squareEvents",
+ "squirrelEvents",
+ "stageEvents",
+ "stampEvents",
+ "starEvents",
+ "startEvents",
+ "statementEvents",
+ "stationEvents",
+ "steamEvents",
+ "steelEvents",
+ "stemEvents",
+ "stepEvents",
+ "stewEvents",
+ "stickEvents",
+ "sticksEvents",
+ "stitchEvents",
+ "stockingEvents",
+ "stomachEvents",
+ "stoneEvents",
+ "stopEvents",
+ "storeEvents",
+ "storyEvents",
+ "stoveEvents",
+ "strangerEvents",
+ "strawEvents",
+ "streamEvents",
+ "streetEvents",
+ "stretchEvents",
+ "stringEvents",
+ "structureEvents",
+ "substanceEvents",
+ "sugarEvents",
+ "suggestionEvents",
+ "suitEvents",
+ "summerEvents",
+ "sunEvents",
+ "supportEvents",
+ "surpriseEvents",
+ "sweaterEvents",
+ "swimEvents",
+ "swingEvents",
+ "systemEvents",
+ "tableEvents",
+ "tailEvents",
+ "talkEvents",
+ "tankEvents",
+ "tasteEvents",
+ "taxEvents",
+ "teachingEvents",
+ "teamEvents",
+ "teethEvents",
+ "temperEvents",
+ "tendencyEvents",
+ "tentEvents",
+ "territoryEvents",
+ "testEvents",
+ "textureEvents",
+ "theoryEvents",
+ "thingEvents",
+ "thingsEvents",
+ "thoughtEvents",
+ "threadEvents",
+ "thrillEvents",
+ "throatEvents",
+ "throneEvents",
+ "thumbEvents",
+ "thunderEvents",
+ "ticketEvents",
+ "tigerEvents",
+ "timeEvents",
+ "tinEvents",
+ "titleEvents",
+ "toadEvents",
+ "toeEvents",
+ "toesEvents",
+ "tomatoesEvents",
+ "tongueEvents",
+ "toothEvents",
+ "toothbrushEvents",
+ "toothpasteEvents",
+ "topEvents",
+ "touchEvents",
+ "townEvents",
+ "toyEvents",
+ "toysEvents",
+ "tradeEvents",
+ "trailEvents",
+ "trainEvents",
+ "trainsEvents",
+ "trampEvents",
+ "transportEvents",
+ "trayEvents",
+ "treatmentEvents"
+ };
+
+
+ public static string[] Components = new string[] {
+ "ACONFIGURATION",
+ "ACTIVEDIRECTORYCLIENT",
+ "ADMINSERVICE1",
+ "ADMINSERVICE2",
+ "ANALYTICSGATEWAYTRACE",
+ "BACKENDWATCHDOG1",
+ "BACKENDWATCHDOG2",
+ "BCONFIGURATION",
+ "BROKERTRACE",
+ "BUFFEREDRESPONSE",
+ "CACHE",
+ "CLOUDCACHE",
+ "CLOUDFILEPROVIDERTRACE",
+ "CLOUDREPORTSERVER",
+ "CLOUDSTORAGE",
+ "CLUSTERHEALTHWATCHDOG",
+ "COMMON1",
+ "COMMON2",
+ "COMMON3",
+ "COMMON4",
+ "COMMUNICATION",
+ "COMMUNICATIONRUNTIME",
+ "COMPANIONPROVIDER",
+ "CONFIGMANAGER",
+ "CONNECTION",
+ "CONTENTPROVIDER",
+ "CONTENTPROVIDERSCLOUD",
+ "COORDINATOR",
+ "DATAACCESS",
+ "DATABASE",
+ "DATABASEBACKUPREPLICATOR",
+ "DATABASEMANAGEMENT",
+ "DATAEXTENSION",
+ "DATASHAPEQUERYTRANSLATION",
+ "DIRECTORYSERVICE1",
+ "DIRECTORYSERVICE2",
+ "DISPATCHERTRACE",
+ "DOSP",
+ "ECSCLIENTTRACE",
+ "ENGINE",
+ "EVENTING",
+ "EXECUTIONLOG",
+ "EXECUTORTRACE",
+ "EXPLORESERVICE",
+ "EXPLORESERVICEWATCHDOG",
+ "EXTERNALDATASOURCETRACE",
+ "EXTERNALSERVICESWATCHDOG1",
+ "EXTERNALSERVICESWATCHDOG2",
+ "FABRICCLIENT",
+ "FABRICHOST",
+ "FABRICINTEGRATOR",
+ "GATEWAY",
+ "GATEWAYWATCHDOG",
+ "GLOBALSERVICE",
+ "HEALTHSERVICE",
+ "HTTPLISTENER",
+ "HTTPWEBREQUESTFLOW",
+ "INFO",
+ "INFOSERVICETRACE",
+ "INFOSYNONYMSERVICETRACE",
+ "INTEGRATIONDATABASE",
+ "INTEGRATIONSERVICEFLOWS",
+ "INTEGRATIONSERVICETRACE",
+ "INTERPRETATIONSERVICE",
+ "JOBSCHEDULER",
+ "JOBSCHEDULINGTRACER",
+ "LIBRARY",
+ "METRICCOLLECTORSERVICE",
+ "MODELOPERATOR",
+ "MODULARIZATIONFRAMEWORK",
+ "MONITOREDUTILS",
+ "MONITORING2",
+ "MULTIASYNCOPERATION",
+ "NODEAGENT",
+ "NOTIFICATIONS",
+ "ONPREMISES",
+ "PARSER",
+ "PROCESSING",
+ "PROVIDER",
+ "PROVIDERSCOMMON",
+ "PROVIDERSCOMMONTRACE",
+ "PUSHDATASERVICETRACE",
+ "REPLICATOR",
+ "REPORTAUTHORING",
+ "REPORTCOMMAND",
+ "REPORTCOMMANDHANDLER",
+ "REPORTCOMMONSERVICES",
+ "REPORTDATASHAPEPROCESSING",
+ "REPORTDUMPER",
+ "REPORTNETWORKING",
+ "REPORTRENDERING",
+ "REPORTSERVERSERVICETRACE",
+ "REQUESTBLOCKER",
+ "REQUESTPROTECTION",
+ "RESTAPROCESSORTRACE",
+ "RUNNER1",
+ "RUNNER2",
+ "RUNNER3",
+ "RUNNINGJOBS",
+ "RUNTIMEFABRIC",
+ "CONTENTPROVIDER1",
+ "CONTENTPROVIDER2",
+ "SEARCHMETRICSPROVIDER",
+ "SECRETMANAGER",
+ "SECURITYTRACE",
+ "SEEKABLESTREAMCREATORFACTORYTRACE",
+ "SEQUENCER",
+ "SERVERMANAGER",
+ "SERVICECOMMON",
+ "SERVICECONTRACTS",
+ "SERVICEWATCHDOG",
+ "SESSION",
+ "SPCLIENTTRACE",
+ "STATEMANAGERSERVICE",
+ "STATEMANAGERWATCHDOG",
+ "STORAGE",
+ "TENANTMANAGER",
+ "THROTTLER",
+ "THUMBNAILSSERVICETRACE",
+ "TRACING",
+ "UTILS",
+ "WATCHDOG",
+ "WEBAPICOMMON",
+ "WORKERSERVICE",
+ "WORKERSERVICECONTENT",
+ "XLTOOLKIT",
+ "XML",
+ "ZIPPACKAGETRACE",
+ };
+
+ public static string[] Sources = new string[]
+ {
+ "IMAGINEFIRST",
+ "CHALLENGEDIMAGINE",
+ "WITHCHALLENGED",
+ "FOLLOWINGWITH",
+ "TASKFOLLOWING",
+ "DESIGNTASK",
+ "CLOUDDESIGN",
+ "SERVICECLOUD",
+ "CAPABLESERVICE",
+ "ACCEPTINGCAPABLE",
+ "HUNDREDSACCEPTING",
+ "BILLIONSHUNDREDS",
+ "RECORDSBILLIONS",
+ "DAILYRECORDS",
+ "BASISDAILY",
+ "STORINGBASIS",
+ "THISSTORING",
+ "DATATHIS",
+ "RELIABLYDATA",
+ "WEEKSRELIABLY",
+ "MONTHSWEEKS",
+ "ANSWERINGMONTHS",
+ "COMPLEXANSWERING",
+ "ANALYTICSCOMPLEX",
+ "QUERIESANALYTICS",
+ "MAINTAININGQUERIES",
+ "LATENCYMAINTAINING",
+ "SECONDSLATENCY",
+ "DELAYSECONDS",
+ "FROMDELAY",
+ "INGESTIONFROM",
+ "QUERYINGESTION",
+ "FINALLYQUERY",
+ "COMPLETINGFINALLY",
+ "THOSECOMPLETING",
+ "EVENTHOSE",
+ "WHENEVEN",
+ "COMBINATIONWHEN",
+ "STRUCTUREDCOMBINATION",
+ "SEMISTRUCTURED",
+ "FREESEMI",
+ "TEXTFREE",
+ "UNDERTOOKTEXT",
+ "STARTEDUNDERTOOK",
+ "DEVELOPINGSTARTED",
+ "AZUREDEVELOPING",
+ "EXPLORERAZURE",
+ "UNDEREXPLORER",
+ "CODENAMEUNDER",
+ "COREINITIAL",
+ "TEAMCORE",
+ "CONSISTEDTEAM",
+ "FOURCONSISTED",
+ "DEVELOPERSFOUR",
+ "WORKINGDEVELOPERS",
+ "MICROSOFTWORKING",
+ "POWERMICROSOFT",
+ "TROUBLESHOOTINGPOWER",
+ "NEEDSTROUBLESHOOTING",
+ "WANTEDNEEDS",
+ "MASSIVEWANTED",
+ "TELEMETRYMASSIVE",
+ "STREAMTELEMETRY",
+ "PRODUCEDSTREAM",
+ "FINDINGPRODUCED",
+ "SUITABLEFINDING",
+ "SOLUTIONSUITABLE",
+ "DECIDEDSOLUTION",
+ "CREATEDECIDED",
+ "TURNEDCREATE",
+ "WERENTTURNED",
+ "ONLYWERENT",
+ "PEOPLEONLY",
+ "NEEDEDPEOPLE",
+ "KINDNEEDED",
+ "TECHNOLOGYKIND",
+ "WITHINTECHNOLOGY",
+ "WORKWITHIN",
+ "FIRSTWORK",
+ "INTERNALFIRST",
+ "CUSTOMERSINTERNAL",
+ "ADOPTIONCUSTOMERS",
+ "STEADYADOPTION",
+ "CLIMBSTEADY",
+ "NEARLYCLIMB",
+ "FIVENEARLY",
+ "YEARSFIVE",
+ "LATERYEARS",
+ "BRAINCHILDLATER",
+ "PUBLICBRAINCHILD",
+ "PREVIEWPUBLIC",
+ "WATCHPREVIEW",
+ "SCOTTWATCH",
+ "GUTHRIESSCOTT",
+ "KEYNOTEGUTHRIES",
+ "READKEYNOTE",
+ "MOREREAD",
+ "ABOUTMORE",
+ "WHATABOUT",
+ "WEREWHAT",
+ "UNVEILINGWERE",
+ "ANNOUNCEMENTUNVEILING",
+ "BLOGANNOUNCEMENT",
+ "POSTBLOG",
+ "DESCRIBEPOST",
+ "VERYDESCRIBE",
+ "BASICSVERY",
+ "BEHINDBASICS",
+ "DETAILSBEHIND",
+ "WILLDETAILS",
+ "AVAILABLEWILL",
+ "UPCOMINGAVAILABLE",
+ "WHITEUPCOMING",
+ "PAPERWHITE",
+ "THATPAPER",
+ "INGESTSTHAT",
+ "UNSTRUCTUREDINGESTS",
+ "THENUNSTRUCTURED",
+ "STORESTHEN",
+ "ANSWERSSTORES",
+ "ANALYTICANSWERS",
+ "COMMONANALYTIC",
+ "INGESTINGCOMMON",
+ "QUERYINGINGESTING",
+ "STREAMSQUERYING",
+ "EXAMPLESTREAMS",
+ "DATABASEEXAMPLE",
+ "USESDATABASE",
+ "TROUBLESHOOTUSES",
+ "MONITORINGTROUBLESHOOT",
+ "FINDMONITORING",
+ "ANOMALIESFIND",
+ "SERVESANOMALIES",
+ "TAKINGSERVES",
+ "AUTOTAKING",
+ "REMEDIATIONAUTO",
+ "ACTIONSREMEDIATION",
+ "ALSOACTIONS",
+ "USEDALSO",
+ "OFFICEUSED",
+ "CLIENTOFFICE",
+ "GIVINGCLIENT",
+ "ENGINEERSGIVING",
+ "ABILITYENGINEERS",
+ "ANALYZEABILITY",
+ "USERSANALYZE",
+ "INTERACTUSERS",
+ "INDIVIDUALINTERACT",
+ "SUITEINDIVIDUAL",
+ "APPLICATIONSSUITE",
+ "ANOTHERAPPLICATIONS",
+ "DEPICTSANOTHER",
+ "MONITORDEPICTS",
+ "STOREMONITOR",
+ "THEREFORESTORE",
+ "HAVETHEREFORE",
+ "EVERHAVE",
+ "WRITTENEVER",
+ "BROWSEDWRITTEN",
+ "THROUGHBROWSED",
+ "YOURTHROUGH",
+ "ACTIVITYYOUR",
+ "LOGSACTIVITY",
+ "ALREADYLOGS",
+ "USERALREADY",
+ "THEIRUSER",
+ "ORGANIZEDTHEIR",
+ "TRADITIONALORGANIZED",
+ "RELATIONALTRADITIONAL",
+ "MODELRELATIONAL",
+ "TABLESMODEL",
+ "TABLETABLES",
+ "STRONGLYTABLE",
+ "TYPEDSTRONGLY",
+ "SCHEMATYPED",
+ "ORDEREDSCHEMA",
+ "LISTORDERED",
+ "COLUMNSLIST",
+ "EACHCOLUMNS",
+ "COLUMNEACH",
+ "HAVINGCOLUMN",
+ "NAMEHAVING",
+ "SCALARNAME",
+ "TYPESCALAR",
+ "TYPESTYPE",
+ "REALTYPES",
+ "DATETIMEREAL",
+ "TIMESPANDATETIME",
+ "DYNAMICTIMESPAN",
+ "STRINGDYNAMIC",
+ "SIMILARSTRING",
+ "JSONSIMILAR",
+ "HOLDJSON",
+ "SINGLEHOLD",
+ "VALUESINGLE",
+ "OTHERVALUE",
+ "ARRAYOTHER",
+ "DICTIONARYARRAY",
+ "SUCHDICTIONARY",
+ "VALUESSUCH",
+ "CONTAINEDVALUES",
+ "DATABASESCONTAINED",
+ "DEPLOYMENTDATABASES",
+ "CLUSTERDEPLOYMENT",
+ "NODESCLUSTER",
+ "HOSTNODES",
+ "MULTIPLEHOST",
+ "ILLUSTRATEMULTIPLE",
+ "BELOWILLUSTRATE",
+ "SOMEBELOW",
+ "NUMBERSSOME",
+ "UTILIZEDNUMBERS",
+ "ITSELFUTILIZED",
+ "LARGESTITSELF",
+ "ACCEPTSLARGEST",
+ "APPROXIMATELYACCEPTS",
+ "BILLIONAPPROXIMATELY",
+ "TOTALBILLION",
+ "RETAINEDTOTAL",
+ "PURPOSESRETAINED",
+ "DAYSPURPOSES",
+ "COUNTDAYS",
+ "THESECOUNT",
+ "TOOKTHESE",
+ "COMPLETETOOK",
+ "TIMESTAMPWHERE",
+ "WHILETIMESTAMP",
+ "EXECUTINGWHILE",
+ "SENTEXECUTING",
+ "SAMESENT",
+ "SHOWNSAME",
+ "RETRIEVESHOWN",
+ "ACCORDINGRETRIEVE",
+ "CORRELATIONACCORDING",
+ "HERECORRELATION",
+ "FORCEDHERE",
+ "TERMFORCED",
+ "INDEXTERM",
+ "CLIENTACTIVITYIDINDEX",
+ "OPERATORCLIENTACTIVITYID",
+ "SIMULATINGOPERATOR",
+ "TYPICALSIMULATING",
+ "POINTTYPICAL",
+ "FASTERPOINT",
+ "THANFASTER",
+ "PREVIOUSTHAN",
+ "THOUGHPREVIOUS",
+ "MUCHTHOUGH",
+ "RETURNEDMUCH",
+ "FACTRETURNED",
+ "INDEXESFACT",
+ "CONJUNCTIONINDEXES",
+ "STORAGECONJUNCTION",
+ "HEARTSTORAGE",
+ "ENGINEHEART",
+ "UNIQUEENGINE",
+ "THREEUNIQUE",
+ "HIGHLYTHREE",
+ "SUCCESSFULHIGHLY",
+ "TECHNOLOGIESSUCCESSFUL",
+ "INDEXINGTECHNOLOGIES",
+ "SHARDINGINDEXING",
+ "SHARDEDSHARDING",
+ "MAKESSHARDED",
+ "POSSIBLEMAKES",
+ "HUGEPOSSIBLE",
+ "SETSHUGE",
+ "ARRANGEDSETS",
+ "ORDERARRANGED",
+ "COMPRESSESORDER",
+ "BETTERCOMPRESSES",
+ "STOREDBETTER",
+ "PERFORMANCESTORED",
+ "IMPROVEDPERFORMANCE",
+ "ALLOWSIMPROVED",
+ "UTILIZEALLOWS",
+ "COMPUTEUTILIZE",
+ "RESOURCESCOMPUTE",
+ "ARRANGINGRESOURCES",
+ "SYSTEMARRANGING",
+ "AVOIDSYSTEM",
+ "LOADINGAVOID",
+ "REQUIREDLOADING",
+ "PARTICULARREQUIRED",
+ "MAKEPARTICULAR",
+ "EFFICIENTLYMAKE",
+ "SKIPEFFICIENTLY",
+ "ENTIRESKIP",
+ "BATCHESENTIRE",
+ "PREDICATEDBATCHES",
+ "FUNDAMENTALLYPREDICATED",
+ "BLOBFUNDAMENTALLY",
+ "SHARDBLOB",
+ "COMPOSEDSHARD",
+ "BLOBSCOMPOSED",
+ "ONCEBLOBS",
+ "CREATEDONCE",
+ "PROCESSCREATED",
+ "IMMUTABLEPROCESS",
+ "ARTIFACTSIMMUTABLE",
+ "KEPTARTIFACTS",
+ "WITHOUTKEPT",
+ "CHANGEWITHOUT",
+ "UNTILCHANGE",
+ "DELETEDUNTIL",
+ "NUMBERDELETED",
+ "IMPORTANTNUMBER",
+ "IMPLICATIONSIMPORTANT",
+ "CACHEIMPLICATIONS",
+ "MANAGEMENTCACHE",
+ "COORDINATIONMANAGEMENT",
+ "BETWEENCOORDINATION",
+ "THEMBETWEEN",
+ "CLUSTERSTHEM",
+ "REFERCLUSTERS",
+ "ADDSREFER",
+ "ROBUSTNESSADDS",
+ "THERESROBUSTNESS",
+ "CODETHERES",
+ "SURGICALLYCODE",
+ "MODIFYSURGICALLY",
+ "PARTSMODIFY",
+ "EXISTINGPARTS",
+ "TRAVELEXISTING",
+ "BACKTRAVEL",
+ "TIMEBACK",
+ "SNAPSHOTTIME",
+ "LONGSNAPSHOT",
+ "HARDLONG",
+ "PROPRIETARYHARD",
+ "FORMATPROPRIETARY",
+ "SHARDSFORMAT",
+ "CUSTOMSHARDS",
+ "BUILTCUSTOM",
+ "MEMORYBUILT",
+ "MAPPEDMEMORY",
+ "OPERATIONSMAPPED",
+ "INCLUDINGOPERATIONS",
+ "MERGEINCLUDING",
+ "THEREMERGE",
+ "NEEDTHERE",
+ "TRANSFORMNEED",
+ "PRIORTRANSFORM",
+ "LINEPRIOR",
+ "SPEEDLINE",
+ "LIKESPEED",
+ "THINGSLIKE",
+ "APARTTHINGS",
+ "MANYAPART",
+ "PRINCIPLESMANY",
+ "INDEEDPRINCIPLES",
+ "BUILDINGINDEED",
+ "INVERTEDBUILDING",
+ "BLOOMINVERTED",
+ "FILTERSBLOOM",
+ "CARDINALITYFILTERS",
+ "RARELYCARDINALITY",
+ "USEFULRARELY",
+ "FIELDSUSEFUL",
+ "HASHFIELDS",
+ "OFTENHASH",
+ "EXCEEDSOFTEN",
+ "SIZEEXCEEDS",
+ "CONSIDERABLESIZE",
+ "DEFAULTCONSIDERABLE",
+ "CASEDEFAULT",
+ "INDEXEDCASE",
+ "HIGHINDEXED",
+ "MEANINGHIGH",
+ "APPROACHESMEANING",
+ "DEFAULTSAPPROACHES",
+ "CREATINGDEFAULTS",
+ "TWISTSCREATING",
+ "LEVELTWISTS",
+ "INGESTEDLEVEL",
+ "PARALLELINGESTED",
+ "GRANULARITYPARALLEL",
+ "INSTEADGRANULARITY",
+ "HOLDINGINSTEAD",
+ "RECORDHOLDING",
+ "MISSRECORD",
+ "INFORMATIONMISS",
+ "KEEPINFORMATION",
+ "BLOCKKEEP",
+ "STILLBLOCK",
+ "EFFICIENTSTILL",
+ "SKIPPINGEFFICIENT",
+ "OCCURRINGSKIPPING",
+ "TERMSOCCURRING",
+ "SMALLTERMS",
+ "ENOUGHSMALL",
+ "GENERATEENOUGH",
+ "LOADGENERATE",
+ "COURSELOAD",
+ "INDICATESCOURSE",
+ "MUSTINDICATES",
+ "SCANNEDMUST",
+ "DETERMINESCANNED",
+ "WHICHDETERMINE",
+ "MATCHESWHICH",
+ "PREDICATEMATCHES",
+ "MOSTPREDICATE",
+ "CASESMOST",
+ "RESULTSCASES",
+ "POTENTIALLYRESULTS",
+ "CONTINUOUSLYPOTENTIALLY",
+ "OPTIMIZECONTINUOUSLY",
+ "BACKGROUNDOPTIMIZE",
+ "MERGEDBACKGROUND",
+ "TOGETHERMERGED",
+ "IMPROVINGTOGETHER",
+ "COMPRESSIONIMPROVING",
+ "BECAUSECOMPRESSION",
+ "THEYBECAUSE",
+ "CONTAINTHEY",
+ "COMESCONTAIN",
+ "WANTCOMES",
+ "BEYONDWANT",
+ "CERTAINBEYOND",
+ "STOPCERTAIN",
+ "GETTINGSTOP",
+ "JUSTGETTING",
+ "MERGESJUST",
+ "USUALLYMERGES",
+ "MERGINGUSUALLY",
+ "COMPRESSEDMERGING",
+ "STANDARDCOMPRESSED",
+ "ALGORITHMSSTANDARD",
+ "COMPRESSALGORITHMS",
+ "ALGORITHMCOMPRESS",
+ "EXCELLENTALGORITHM",
+ "REASONABLEEXCELLENT",
+ "RATIOREASONABLE",
+ "ESTIMATERATIO",
+ "VIRTUALLYESTIMATE",
+ "ALWAYSVIRTUALLY",
+ "PREFERREDALWAYS",
+ "OVERPREFERRED",
+ "KEEPINGOVER",
+ "UNCOMPRESSEDKEEPING",
+ "SIMPLYUNCOMPRESSED",
+ "SAVINGSIMPLY",
+ "MOVINGSAVING",
+ "INTOMOVING",
+ "WORTHINTO",
+ "DECOMPRESSWORTH",
+ "ADDITIONALDECOMPRESS",
+ "SUPPORTEDADDITIONAL",
+ "LZMASUPPORTED",
+ "BROTLILZMA",
+ "HOLDSBROTLI",
+ "LOADEDHOLDS",
+ "INTERESTINGLOADED",
+ "TRADEINTERESTING",
+ "PERFORMINGTRADE",
+ "VERTICALPERFORMING",
+ "SERVERVERTICAL",
+ "ANALYSISSERVER",
+ "TABULARANALYSIS",
+ "MODELSTABULAR",
+ "OPTIMIZATIONMODELS",
+ "LOOKSOPTIMIZATION",
+ "WAYSLOOKS",
+ "SORTWAYS",
+ "BEFORESORT",
+ "COMPRESSINGBEFORE",
+ "RESULTINGCOMPRESSING",
+ "RATIOSRESULTING",
+ "TIMESRATIOS",
+ "AVOIDEDTIMES",
+ "COSTAVOIDED",
+ "QUICKLYCOST",
+ "DOESQUICKLY",
+ "ENABLEDOES",
+ "INDICATEENABLE",
+ "DOMINANTINDICATE",
+ "PATTERNDOMINANT",
+ "MIGHTPATTERN",
+ "FUTUREMIGHT",
+ "METADATAFUTURE",
+ "ALONGSIDEMETADATA",
+ "MAINTAINSALONGSIDE",
+ "DESCRIBESMAINTAINS",
+ "VARIOUSDESCRIBES",
+ "POLICYVARIOUS",
+ "OBJECTSPOLICY",
+ "DURINGOBJECTS",
+ "GROOMINGDURING",
+ "ACTIVITIESGROOMING",
+ "SECURITYACTIVITIES",
+ "POLICIESSECURITY",
+ "HEADPOLICIES",
+ "POINTERHEAD",
+ "RELEVANTPOINTER",
+ "LATESTRELEVANT",
+ "ADVANTAGESLATEST",
+ "NOTEDADVANTAGES",
+ "ABOVENOTED",
+ "IMMUTABILITYABOVE",
+ "ISOLATIONIMMUTABILITY",
+ "EARLYISOLATION",
+ "DECISIONSEARLY",
+ "TAKENDECISIONS",
+ "DESIGNERSTAKEN",
+ "ENSUREDESIGNERS",
+ "FUNDAMENTALENSURE",
+ "SERVICESFUNDAMENTAL",
+ "NETWORKINGSERVICES",
+ "STRICTLYNETWORKING",
+ "ADHERESSTRICTLY",
+ "PRINCIPLEADHERES",
+ "PERSISTENTPRINCIPLE",
+ "THOUGHTPERSISTENT",
+ "MERELYTHOUGHT",
+ "SEVERALMERELY",
+ "INDEPENDENTSEVERAL",
+ "SCALEINDEPENDENT",
+ "INDEPENDENTLYSCALE",
+ "GROWSINDEPENDENTLY",
+ "RUNNINGGROWS",
+ "CONCURRENTLYRUNNING",
+ "TRANSACTIONSCONCURRENTLY",
+ "SECONDTRANSACTIONS",
+ "RESILIENCYSECOND",
+ "FAILURESRESILIENCY",
+ "SWITCHFAILURES",
+ "TRAFFICSWITCH",
+ "MIGRATIONTRAFFIC",
+ "APPLYINGMIGRATION",
+ "PROCEDUREAPPLYING",
+ "BEINGPROCEDURE",
+ "HIGHERBEING",
+ "OLDERHIGHER",
+ "USINGOLDER",
+ "DIFFERENTUSING",
+ "WORKLOADSDIFFERENT",
+ "ACTSWORKLOADS",
+ "LEADERACTS",
+ "GIVENLEADER",
+ "PERMISSIONGIVEN",
+ "WRITEPERMISSION",
+ "OTHERSWRITE",
+ "FOLLOWERSOTHERS",
+ "MODEFOLLOWERS",
+ "FITNESSMODE",
+ "CLOSELYFITNESS",
+ "RELATEDCLOSELY",
+ "TAILOREDRELATED",
+ "WORKLOADTAILORED",
+ "REQUIREMENTSWORKLOAD",
+ "PRECISELYREQUIREMENTS",
+ "HANDLEPRECISELY",
+ "DURABLEHANDLE",
+ "SKUSDURABLE",
+ "APPROPRIATESKUS",
+ "LASTAPPROPRIATE",
+ "LEASTLAST",
+ "RELYINGLEAST",
+ "DOINGRELYING",
+ "BESTDOING",
+ "REPLICATIONBEST",
+ "MEANSREPLICATION",
+ "LITTLEMEANS",
+ "HAPPENLITTLE",
+ "SIMPLIFYINGHAPPEN",
+ "CONSIDERABLYSIMPLIFYING",
+ "ESSENTIALLYCONSIDERABLY",
+ "WRITESESSENTIALLY",
+ "COORDINATEDWRITES",
+ "CACHINGCOORDINATED",
+ "CAREFULCACHING",
+ "ISOLATECAREFUL",
+ "FULLISOLATE",
+ "LOCALFULL",
+ "VOLATILELOCAL",
+ "SOPHISTICATEDVOLATILE",
+ "MULTISOPHISTICATED",
+ "HIERARCHYMULTI",
+ "SUREHIERARCHY",
+ "CACHEDSURE",
+ "CRITICALLYCACHED",
+ "DEPENDSCRITICALLY",
+ "CONSISTSDEPENDS",
+ "TIERSCONSISTS",
+ "RELIABLETIERS",
+ "MANAGEDRELIABLE",
+ "DISKSMANAGED",
+ "ASPECTDISKS",
+ "WORKSASPECT",
+ "COMPLETELYWORKS",
+ "HELDCOMPLETELY",
+ "DECOMPRESSEDHELD",
+ "ACTUALDECOMPRESSED",
+ "OPTIMALACTUAL",
+ "LIMITEDOPTIMAL",
+ "COSTLYLIMITED",
+ "DISTRIBUTEDCOSTLY",
+ "IMPACTEDDISTRIBUTED",
+ "SCENARIOIMPACTED",
+ "EXCELSCENARIO",
+ "AMOUNTSEXCEL",
+ "TREATSAMOUNTS",
+ "TEMPORARYTREATS",
+ "AGGREGATEDTEMPORARY",
+ "DISKAGGREGATED",
+ "INCLUDESDISK",
+ "TRANSITINCLUDES",
+ "RATHERTRANSIT",
+ "SHORTRATHER",
+ "TIMEOUTSSHORT",
+ "MINUTESTIMEOUTS",
+ "INCREASEMINUTES",
+ "TIMEOUTINCREASE",
+ "ASSUMPTIONTIMEOUT",
+ "SHOULDASSUMPTION",
+ "FASTSHOULD",
+ "PROVIDEFAST",
+ "STAMPEDPROVIDE",
+ "PLANSTAMPED",
+ "SINCEPLAN",
+ "TAKESSINCE",
+ "REFERENCETAKES",
+ "ADDITIONALLYREFERENCE",
+ "SUBJECTADDITIONALLY",
+ "INCREASEDSUBJECT",
+ "HOURINCREASED",
+ "SUFFICIENTHOUR",
+ "GUARANTEESUFFICIENT",
+ "LINGERGUARANTEE",
+ "DELETELINGER",
+ "LONGERDELETE",
+ "PERHAPSLONGER",
+ "NOTABLEPERHAPS",
+ "IMPLEMENTSNOTABLE",
+ "LANGUAGEIMPLEMENTS",
+ "OPTIMIZEDLANGUAGE",
+ "BOTHOPTIMIZED",
+ "EASEBOTH",
+ "EXPRESSIVENESSEASE",
+ "TELLEXPRESSIVENESS",
+ "PLEASURETELL",
+ "AUTHORPLEASURE",
+ "EXPRESSEDAUTHOR",
+ "SYNTAXEXPRESSED",
+ "LANGUAGESSYNTAX",
+ "COMPUTATIONLANGUAGES",
+ "PRIMARILYCOMPUTATION",
+ "MODELEDPRIMARILY",
+ "AFTERMODELED",
+ "FLOWAFTER",
+ "UNIXFLOW",
+ "PIPELINEUNIX",
+ "COMMANDSPIPELINE",
+ "REGARDCOMMANDS",
+ "MAJORREGARD",
+ "STEPMAJOR",
+ "FORWARDSTEP",
+ "TOOLSETFORWARD",
+ "AROUNDTOOLSET",
+ "ASPECTSAROUND",
+ "PROPELLEDASPECTS",
+ "TAKEPROPELLED",
+ "ONLINETAKE",
+ "PLURALSIGHTONLINE",
+ "FEATUREPLURALSIGHT",
+ "ENGINESFEATURE",
+ "LAYERENGINES",
+ "NATIVELYLAYER",
+ "SUPPORTSNATIVELY",
+ "CROSSSUPPORTS",
+ "OPTIMIZERCROSS",
+ "SUPPORTOPTIMIZER",
+ "ARRANGESUPPORT",
+ "REMOTEDARRANGE",
+ "REDUCEREMOTED",
+ "AMOUNTREDUCE",
+ "EXCHANGEDAMOUNT",
+ "PLATFORMEXCHANGED",
+ "INTERACTIVEPLATFORM",
+ "INTRODUCTIONINTERACTIVE",
+ "WORLDINTRODUCTION",
+ "GROWINGWORLD",
+ "STEADILYGROWING",
+ "LARGESTEADILY",
+ "ALONGLARGE",
+ "MODERNALONG",
+ "SOLUTIONSMODERN",
+ "TYPICALLYSOLUTIONS",
+ "ADDRESSINGTYPICALLY",
+ "USUALADDRESSING",
+ "INCLUDEUSUAL",
+ "COLLECTIONINCLUDE",
+ "ENRICHMENTCOLLECTION",
+ "BATCHENRICHMENT",
+ "PROCESSINGBATCH",
+ "DASHBOARDSPROCESSING",
+ "EXPLORATIONDASHBOARDS",
+ "REPORTINGEXPLORATION",
+ "AGGREGATIONREPORTING",
+ "SUMMARIZATIONAGGREGATION",
+ "TRAININGSUMMARIZATION",
+ "SERVICINGTRAINING",
+ "ARCHIVALSERVICING",
+ "WHITEPAPERARCHIVAL",
+ "PRESENTWHITEPAPER",
+ "CODENAMEDPRESENT",
+ "UNDERLIESCODENAMED",
+ "OFFERINGUNDERLIES",
+ "FITSOFFERING",
+ "SPECIFICFITS",
+ "LANDSCAPESPECIFIC",
+ "ILLUSTRATEDLANDSCAPE",
+ "LETSILLUSTRATED",
+ "DEPLOYEDLETS",
+ "THOUSANDSDEPLOYED",
+ "HANDLINGTHOUSANDS",
+ "STRICTHANDLING",
+ "SLASSTRICT",
+ "INTERACTINGSLAS",
+ "EXTERNALINTERACTING",
+ "PROPERLYEXTERNAL",
+ "INSTRUMENTEDPROPERLY",
+ "EMITSINSTRUMENTED",
+ "ENORMOUSEMITS",
+ "SIGNALSENORMOUS",
+ "GENERICSIGNALS",
+ "EVENTSGENERIC",
+ "USAGEEVENTS",
+ "APPLICATIONUSAGE",
+ "AUTHENTICATIONAPPLICATION",
+ "STARTAUTHENTICATION",
+ "PROGRESSSTART",
+ "TRANSACTIONPROGRESS",
+ "HEALTHTRANSACTION",
+ "ALERTSHEALTH",
+ "TRACEALERTS",
+ "DEBUGTRACE",
+ "STATEMENTSDEBUG",
+ "ADDEDSTATEMENTS",
+ "SUBSETSADDED",
+ "ROUTEDSUBSETS",
+ "SYSTEMSROUTED",
+ "CHOSENSYSTEMS",
+ "ALERTINGCHOSEN",
+ "COUNTERSALERTING",
+ "SERIESCOUNTERS",
+ "OPERATIONALSERIES",
+ "WAREHOUSEOPERATIONAL",
+ "FURTHERWAREHOUSE",
+ "BUSINESSFURTHER",
+ "DESIGNEDBUSINESS",
+ "UNPROCESSEDDESIGNED",
+ "FORMUNPROCESSED",
+ "INTERVALFORM",
+ "SLIDINGINTERVAL",
+ "WINDOWSLIDING",
+ "SUBSETWINDOW",
+ "RANGINGSUBSET",
+ "ALLOWRANGING",
+ "DIVERSEALLOW",
+ "FORMALLYDIVERSE",
+ "MAINFORMALLY",
+ "FEATURESMAIN",
+ "VELOCITYFEATURES",
+ "MILLIONSVELOCITY",
+ "LINEARMILLIONS",
+ "INDUSTRYLINEAR",
+ "LEADINGINDUSTRY",
+ "SCALABLELEADING",
+ "RICHSCALABLE",
+ "POWERFULRICH",
+ "CAPABILITIESPOWERFUL",
+ "SUPPORTINGCAPABILITIES",
+ "WHOLESUPPORTING",
+ "COMPLEXITYWHOLE",
+ "SPECTRUMCOMPLEXITY",
+ "SIMPLESPECTRUM",
+ "KEYWORDSIMPLE",
+ "SEARCHKEYWORD",
+ "BEHAVIORALSEARCH",
+ "PRODUCTIVEBEHAVIORAL",
+ "TARGETPRODUCTIVE",
+ "CHARACTERIZEDTARGET",
+ "FOLLOWSCHARACTERIZED",
+ "CHRONOLOGICALFOLLOWS",
+ "INSERTCHRONOLOGICAL",
+ "APPENDINSERT",
+ "BULKAPPEND",
+ "UPDATEBULK",
+ "ALMOSTUPDATE",
+ "NEVERALMOST",
+ "OVERVIEWNEVER",
+ "CUSTOMEROVERVIEW",
+ "TENANTCUSTOMER",
+ "DISTINCTTENANT",
+ "TANDEMDISTINCT",
+ "VIRTUALTANDEM",
+ "MACHINESVIRTUAL",
+ "RESPONSIBLEMACHINES",
+ "INCOMINGRESPONSIBLE",
+ "SERVINGINCOMING",
+ "EXPOSESSERVING",
+ "ENDPOINTEXPOSES",
+ "SENDINGENDPOINT",
+ "CONTROLSENDING",
+ "CONNECTINGCONTROL",
+ "PIPELINESCONNECTING",
+ "ORCHESTRATINGPIPELINES",
+ "CONTINUOUSORCHESTRATING",
+ "ROBUSTCONTINUOUS",
+ "FAILUREROBUST",
+ "BACKPRESSUREFAILURE",
+ "CONDITIONSBACKPRESSURE",
+ "INVOCATIONCONDITIONS",
+ "PERIODICINVOCATION",
+ "TASKSPERIODIC",
+ "OPTIONALTASKS",
+ "PRACTICEOPTIONAL",
+ "VASTPRACTICE",
+ "MAJORITYVAST",
+ "DEPLOYMENTSMAJORITY",
+ "FOCUSDEPLOYMENTS",
+ "DOCUMENTFOCUS",
+ "ALTHOUGHDOCUMENT",
+ "WELLALTHOUGH",
+ "OCCASIONALLYWELL",
+ "DISCUSSINGOCCASIONALLY",
+ "FUNCTIONALITYDISCUSSING",
+ "LOGICALFUNCTIONALITY",
+ "FAMILIARLOGICAL",
+ "CONTAINSFAMILIAR",
+ "FUNCTIONSCONTAINS",
+ "DEFINESFUNCTIONS",
+ "AUTHORIZATIONDEFINES",
+ "RETENTIONAUTHORIZATION",
+ "ENCODINGRETENTION",
+ "ATTACHEDENCODING",
+ "SOMETIMESATTACHED",
+ "FIELDSOMETIMES",
+ "UNLIKEFIELD",
+ "RDBMSUNLIKE",
+ "PRIMARYRDBMS",
+ "FOREIGNPRIMARY",
+ "CONSTRAINTSFOREIGN",
+ "UNIQUENESSCONSTRAINTS",
+ "NECESSARYUNIQUENESS",
+ "RELATIONSHIPSNECESSARY",
+ "ESTABLISHEDRELATIONSHIPS",
+ "REASONSESTABLISHED",
+ "LACKREASONS",
+ "FORMALLACK",
+ "WOULDFORMAL",
+ "CONSTANTLYWOULD",
+ "VIOLATEDCONSTANTLY",
+ "NOISYVIOLATED",
+ "INTENDEDNOISY",
+ "ENFORCEMENTINTENDED",
+ "RESULTENFORCEMENT",
+ "SUBSTANTIALRESULT",
+ "NEGATIVESUBSTANTIAL",
+ "IMPACTNEGATIVE",
+ "RATEIMPACT",
+ "INTERACTIONRATE",
+ "FALLSINTERACTION",
+ "BROADFALLS",
+ "CATEGORIESBROAD",
+ "APPLYCATEGORIES",
+ "VARIETYAPPLY",
+ "COMPOSABLEVARIETY",
+ "OPERATORSCOMPOSABLE",
+ "EXPRESSOPERATORS",
+ "DESIREDEXPRESS",
+ "MUTATEDESIRED",
+ "INSPECTMUTATE",
+ "FREQUENTINSPECT",
+ "COMMANDFREQUENT",
+ "INGESTCOMMAND",
+ "APPENDINGINGEST",
+ "HORIZONTALAPPENDING",
+ "POSSIBLYHORIZONTAL",
+ "DISCUSSEDPOSSIBLY",
+ "PARTDISCUSSED",
+ "CONCERNEDPART",
+ "ENTITYCONCERNED",
+ "HOWEVERENTITY",
+ "EXPOSINGHOWEVER",
+ "NOTIONEXPOSING",
+ "THEYRENOTION",
+ "ENTIRELYTHEYRE",
+ "HIDDENENTIRELY",
+ "RBACHIDDEN",
+ "STYLERBAC",
+ "ASSOCIATEDSTYLE",
+ "REFERENCESASSOCIATED",
+ "TREEREFERENCES",
+ "CONSISTENTTREE",
+ "DESCRIPTIONCONSISTENT",
+ "INSTANCEDESCRIPTION",
+ "ARCHITECTUREINSTANCE",
+ "CONNECTEDARCHITECTURE",
+ "VNETCONNECTED",
+ "GATEWAYVNET",
+ "ACCESSIBLEGATEWAY",
+ "EXTERNALLYACCESSIBLE",
+ "BALANCEREXTERNALLY",
+ "PROVISIONEDBALANCER",
+ "SCALEDPROVISIONED",
+ "DOWNSCALED",
+ "EITHERDOWN",
+ "AUTOMATICALLYEITHER",
+ "MANUALLYAUTOMATICALLY",
+ "RESPONSEMANUALLY",
+ "CHANGINGRESPONSE",
+ "VOLUMECHANGING",
+ "MINIMUMVOLUME",
+ "MAXIMUMMINIMUM",
+ "TESTEDMAXIMUM",
+ "PRODUCTIONTESTED",
+ "NODEPRODUCTION",
+ "FULFILLSNODE",
+ "ROLESFULFILLS",
+ "ADMINROLES",
+ "VIEWADMIN",
+ "INITIATINGVIEW",
+ "EXECUTIONINITIATING",
+ "ROLEEXECUTION",
+ "CONTRIBUTESROLE",
+ "SPACECONTRIBUTES",
+ "EXECUTESSPACE",
+ "FRAGMENTSEXECUTES",
+ "CREATESFRAGMENTS",
+ "CACHESCREATES",
+ "EXPOSECACHES",
+ "COMMUNICATIONEXPOSE",
+ "CALLSCOMMUNICATION",
+ "PERFORMSCALLS",
+ "DISPATCHESPERFORMS",
+ "REQUESTDISPATCHES",
+ "HANDLERREQUEST",
+ "STATEHANDLER",
+ "RELIESSTATE",
+ "HEAVILYRELIES",
+ "REPRESENTEDHEAVILY",
+ "SECTIONREPRESENTED",
+ "CONTAINERSSECTION",
+ "ACCOUNTCONTAINERS",
+ "CONTAINERACCOUNT",
+ "CONFIGUREDCONTAINER",
+ "ADMINISTRATORCONFIGURED",
+ "RESIDESADMINISTRATOR",
+ "DEFINITIONRESIDES",
+ "CORRESPONDINGDEFINITION",
+ "SPREADCORRESPONDING",
+ "ACROSSSPREAD",
+ "ACCOUNTSACROSS",
+ "BALANCINGACCOUNTS",
+ "ALLOWINGBALANCING",
+ "MANIPULATIONSALLOWING",
+ "GROUPMANIPULATIONS",
+ "VERSIONSGROUP",
+ "LOADSVERSIONS",
+ "STRUCTURELOADS",
+ "CLONINGSTRUCTURE",
+ "COPYCLONING",
+ "CLONESCOPY",
+ "MODIFIESCLONES",
+ "OBJECTMODIFIES",
+ "COMMITSOBJECT",
+ "VISIBLECOMMITS",
+ "RESTVISIBLE",
+ "DEPENDINGREST",
+ "COULDDEPENDING",
+ "DELTACOULD",
+ "SAVEDELTA",
+ "BANDWIDTHSAVE",
+ "COMMITBANDWIDTH",
+ "CURRENTCOMMIT",
+ "BECOMESCURRENT",
+ "UNAVAILABLEBECOMES",
+ "IMMEDIATELYUNAVAILABLE",
+ "ELECTEDIMMEDIATELY",
+ "SURVEYSELECTED",
+ "COLLECTSURVEYS",
+ "RUNTIMECOLLECT",
+ "CONTINUESRUNTIME",
+ "NORMALCONTINUES",
+ "OPERATIONNORMAL",
+ "EXCEPTIONOPERATION",
+ "COMPLEMENTARYEXCEPTION",
+ "APPENDSCOMPLEMENTARY",
+ "DELETESAPPENDS",
+ "DETAILEDDELETES",
+ "STEPSDETAILED",
+ "ARRIVESSTEPS",
+ "SPECIFIESARRIVES",
+ "SOURCESSPECIFIES",
+ "FILESOURCES",
+ "URLSFILE",
+ "FINDSURLS",
+ "PERFORMFINDS",
+ "FORWARDSPERFORM",
+ "FETCHESFORWARDS",
+ "PROCESSESFETCHES",
+ "RETURNSPROCESSES",
+ "UNCOMMITTEDRETURNS",
+ "STARTSUNCOMMITTED",
+ "ATTACHESSTARTS",
+ "COMPLETEDATTACHES",
+ "MANUALCOMPLETED",
+ "DROPMANUAL",
+ "REASONDROP",
+ "GARBAGEREASON",
+ "COLLECTEDGARBAGE",
+ "REACHESCOLLECTED",
+ "THRESHOLDREACHES",
+ "SCHEMETHRESHOLD",
+ "ENABLESSCHEME",
+ "SUCCESSFULLYENABLES",
+ "CONTINUESUCCESSFULLY",
+ "EXECUTECONTINUE",
+ "STALEEXECUTE",
+ "REVERTSTALE",
+ "VERSIONREVERT",
+ "MISTAKEVERSION",
+ "PURGEMISTAKE",
+ "EXECUTEDPURGE",
+ "PRIVACYEXECUTED",
+ "DISTRIBUTIONPRIVACY",
+ "ASSIGNEDDISTRIBUTION",
+ "FUNCTIONASSIGNED",
+ "SOFTFUNCTION",
+ "ASSIGNMENTSOFT",
+ "MANAGEASSIGNMENT",
+ "LIFETIMEMANAGE",
+ "LOCALLYLIFETIME",
+ "CONSEQUENTLYLOCALLY",
+ "WHENEVERCONSEQUENTLY",
+ "PARTICIPATINGWHENEVER",
+ "GOESPARTICIPATING",
+ "REDISTRIBUTESGOES",
+ "PROPORTIONALREDISTRIBUTES",
+ "REMAININGPROPORTIONAL",
+ "ACCEPTREMAINING",
+ "PHILOSOPHYACCEPT",
+ "MINDPHILOSOPHY",
+ "GOALSMIND",
+ "BARRIERGOALS",
+ "ENTRYBARRIER",
+ "EASYENTRY",
+ "SMOOTHEASY",
+ "TRANSITIONSMOOTH",
+ "LINERSTRANSITION",
+ "SCRIPTSLINERS",
+ "DRAWSSCRIPTS",
+ "INSPIRATIONDRAWS",
+ "SHELLSINSPIRATION",
+ "LINQSHELLS",
+ "FRAMEWORKLINQ",
+ "SEMANTICSFRAMEWORK",
+ "SEQUENCESEMANTICS",
+ "PRODUCINGSEQUENCE",
+ "EXPRESSIONPRODUCING",
+ "STATEMENTEXPRESSION",
+ "STARTINGSTATEMENT",
+ "CONCATENATEDSTARTING",
+ "PIPECONCATENATED",
+ "CHARACTERPIPE",
+ "CONVENIENTCHARACTER",
+ "MECHANISMCONVENIENT",
+ "COMPOSITIONMECHANISM",
+ "CONCEPTUALLYCOMPOSITION",
+ "OUTPUTCONCEPTUALLY",
+ "LEFTOUTPUT",
+ "INPUTLEFT",
+ "NEXTINPUT",
+ "RIGHTNEXT",
+ "PARAMETERIZEDRIGHT",
+ "EXPRESSIONSPARAMETERIZED",
+ "OPERATEEXPRESSIONS",
+ "CONTEXTOPERATE",
+ "CALLEDCONTEXT",
+ "STORMEVENTSCALLED",
+ "IMPLICITSTORMEVENTS",
+ "PICKSIMPLICIT",
+ "STARTTIMEPICKS",
+ "MONTHSTARTTIME",
+ "NOVEMBERMONTH",
+ "KEEPSNOVEMBER",
+ "FLORIDAKEEPS",
+ "SURVIVINGFLORIDA",
+ "SOURCESURVIVING",
+ "SPECIFIEDSOURCE",
+ "EXTENDSPECIFIED",
+ "COMPUTESEXTEND",
+ "SUMMARIZECOMPUTES",
+ "AGGREGATESUMMARIZE",
+ "BELONGAGGREGATE",
+ "JOINBELONG",
+ "ROWSJOIN",
+ "MATCHINGROWS",
+ "FLAVORSMATCHING",
+ "INNERFLAVORS",
+ "OUTERINNER",
+ "UNIONOUTER",
+ "INVOLVEDUNION",
+ "INVOLVESINVOLVED",
+ "EXPLANATIONINVOLVES",
+ "SUFFICEEXPLANATION",
+ "DISCUSSIONSUFFICE",
+ "RECOMMENDDISCUSSION",
+ "MATERIALSRECOMMEND",
+ "STUDYMATERIALS",
+ "BROADERSTUDY",
+ "SCRATCHBROADER",
+ "HTTPSSCRATCH",
+ "COURSESHTTPS",
+ "DOCUMENTATIONCOURSES",
+ "UPONDOCUMENTATION",
+ "RECEIVINGUPON",
+ "PARSESRECEIVING",
+ "ABSTRACTPARSES",
+ "CONSTRUCTSABSTRACT",
+ "SERVECONSTRUCTS",
+ "SYNTACTICSERVE",
+ "CONVENIENCESYNTACTIC",
+ "SUGARCONVENIENCE",
+ "LOWEREDSUGAR",
+ "PRIMITIVELOWERED",
+ "STAGEPRIMITIVE",
+ "SEMANTICSTAGE",
+ "PASSSEMANTIC",
+ "OBJECTIVESPASS",
+ "RESOLUTIONOBJECTIVES",
+ "ENTITIESRESOLUTION",
+ "REFERENCEDENTITIES",
+ "NAMESREFERENCED",
+ "RESOLVEDNAMES",
+ "VERIFYRESOLVED",
+ "PERMISSIONSVERIFY",
+ "ACCESSPERMISSIONS",
+ "CHECKACCESS",
+ "INFERENCECHECK",
+ "EXPECTEDINFERENCE",
+ "ARGUMENTSEXPECTED",
+ "ANALYZERARGUMENTS",
+ "BUILDSANALYZER",
+ "RELOPBUILDS",
+ "SIMPLIFIEDRELOP",
+ "VERIFIEDSIMPLIFIED",
+ "REPRESENTVERIFIED",
+ "SELECTIONREPRESENT",
+ "PROJECTIONSELECTION",
+ "CONSUMESPROJECTION",
+ "INPUTSCONSUMES",
+ "OUTPUTSINPUTS",
+ "CHILDOUTPUTS",
+ "PRODUCESCHILD",
+ "REPRESENTATIONPRODUCES",
+ "OPTIMIZATIONSREPRESENTATION",
+ "PERFORMEDOPTIMIZATIONS",
+ "UNDERGOESPERFORMED",
+ "GOALUNDERGOES",
+ "NOTEGOAL",
+ "LEAVESNOTE",
+ "ACCESSESLEAVES",
+ "REWRITEACCESSES",
+ "RULESREWRITE",
+ "RULERULES",
+ "MATCHRULE",
+ "TRANSFORMATIONMATCH",
+ "ACHIEVEDTRANSFORMATION",
+ "ITERATIVELYACHIEVED",
+ "APPLIEDITERATIVELY",
+ "UNCONDITIONALLYAPPLIED",
+ "SATISFYUNCONDITIONALLY",
+ "BASEDSATISFY",
+ "ESTIMATIONBASED",
+ "REORDERINGESTIMATION",
+ "GROUPEDREORDERING",
+ "PASSESGROUPED",
+ "REWRITESPASSES",
+ "PUSHREWRITES",
+ "RECOGNIZESPUSH",
+ "FOLLOWEDRECOGNIZES",
+ "FILTERFOLLOWED",
+ "DEPENDFILTER",
+ "COMPUTEDDEPEND",
+ "PRECEDINGCOMPUTED",
+ "REORDERSPRECEDING",
+ "THUSREORDERS",
+ "PUSHINGTHUS",
+ "TOWARDSPUSHING",
+ "LEAFTOWARDS",
+ "PROPAGATIONLEAF",
+ "PROJECTIONSPROPAGATION",
+ "CONSTANTPROJECTIONS",
+ "FOLDINGCONSTANT",
+ "ELIMINATIONFOLDING",
+ "UNUSEDELIMINATION",
+ "PHASEUNUSED",
+ "CONSIDERINGPHASE",
+ "GENERALCONSIDERING",
+ "APPROACHGENERAL",
+ "COMPRISEAPPROACH",
+ "PRACTICALCOMPRISE",
+ "EMPLOYPRACTICAL",
+ "UNIONSEMPLOY",
+ "CONSIDERUNIONS",
+ "QUESTIONCONSIDER",
+ "REPLACESQUESTION",
+ "DIAGRAMREPLACES",
+ "RUNSDIAGRAM",
+ "DEDICATEDRUNS",
+ "ATTEMPTDEDICATED",
+ "PROPAGATEATTEMPT",
+ "MOSTLYPROPAGATE",
+ "TRANSFORMATIONSMOSTLY",
+ "ONESTRANSFORMATIONS",
+ "APPLICABLEONES",
+ "REGULARAPPLICABLE",
+ "PUSHEDREGULAR",
+ "SPLITPUSHED",
+ "MERGERSPLIT",
+ "ADJUSTINGMERGER",
+ "ACCORDINGLYADJUSTING",
+ "REPLICATEDACCORDINGLY",
+ "DEALREPLICATED",
+ "UNIFORMLYDEAL",
+ "SYMMETRYUNIFORMLY",
+ "TREESSYMMETRY",
+ "COMPACTTREES",
+ "COLLAPSEDCOMPACT",
+ "SEPARATECOLLAPSED",
+ "PARAMETERIZESEPARATE",
+ "TURNPARAMETERIZE",
+ "REMAINSTURN",
+ "DECIDEREMAINS",
+ "DATANODEDECIDE",
+ "GUIDEDDATANODE",
+ "HEURISTICSGUIDED",
+ "DECISIONHEURISTICS",
+ "MOVEMENTDECISION",
+ "STRATEGIESMOVEMENT",
+ "WEVESTRATEGIES",
+ "FOCUSEDWEVE",
+ "SCATTERFOCUSED",
+ "GATHERSCATTER",
+ "LOGICGATHER",
+ "CLOSELOGIC",
+ "STRATEGYCLOSE",
+ "REDUCTIONSTRATEGY",
+ "MESSAGEREDUCTION",
+ "ERRORMESSAGE",
+ "DAYOFMONTHERROR",
+ "HAPPENSDAYOFMONTH",
+ "HEAVYHAPPENS",
+ "FILTERINGHEAVY",
+ "GROUPSFILTERING",
+ "LIGHTWEIGHTGROUPS",
+ "LEVELSLIGHTWEIGHT",
+ "KEYSLEVELS",
+ "RESPECTIVELYKEYS",
+ "EASILYRESPECTIVELY",
+ "EXCEEDEASILY",
+ "BUDGETEXCEED",
+ "AGGREGATIONSBUDGET",
+ "BECOMEAGGREGATIONS",
+ "SIGNIFICANTBECOME",
+ "SEQUENTIALSIGNIFICANT",
+ "BOTTLENECKSSEQUENTIAL",
+ "OVERCOMEBOTTLENECKS",
+ "LIMITATIONSOVERCOME",
+ "PLANNERLIMITATIONS",
+ "IMPLEMENTPLANNER",
+ "SHUFFLINGIMPLEMENT",
+ "COMBINEDSHUFFLING",
+ "RESILIENTCOMBINED",
+ "ESSENCERESILIENT",
+ "PARTITIONESSENCE",
+ "DISJOINTPARTITION",
+ "ENDSDISJOINT",
+ "PROCESSEDENDS",
+ "IMPLEMENTATIONPROCESSED",
+ "SHUFFLEIMPLEMENTATION",
+ "FLEXIBLESHUFFLE",
+ "SELECTPARTITIONFLEXIBLE",
+ "LOGICALLYSELECTPARTITION",
+ "SCANSLOGICALLY",
+ "RETAINSSCANS",
+ "BELONGSRETAINS",
+ "REQUESTEDBELONGS",
+ "PARTITIONSREQUESTED",
+ "CRUCIALPARTITIONS",
+ "DIFFERENCECRUCIAL",
+ "LATTERDIFFERENCE",
+ "BUFFERINGLATTER",
+ "PARTITIONEDBUFFERING",
+ "FIFOPARTITIONED",
+ "REQUESTSFIFO",
+ "LOCATESREQUESTS",
+ "PUMPSLOCATES",
+ "BUFFEREDPUMPS",
+ "PLACEBUFFERED",
+ "DISCUSSPLACE",
+ "APPEARANCEDISCUSS",
+ "ESTIMATESAPPEARANCE",
+ "SIDESESTIMATES",
+ "DECIDESSIDES",
+ "LESSDECIDES",
+ "NAÏVELESS",
+ "BRINGNAÏVE",
+ "SIGNIFICANTLYBRING",
+ "SMALLERSIGNIFICANTLY",
+ "SIDESMALLER",
+ "BROADCASTSIDE",
+ "EVALUATEBROADCAST",
+ "EMBEDEVALUATE",
+ "DISTRIBUTEEMBED",
+ "OTHERWISEDISTRIBUTE",
+ "SHUFFLEDOTHERWISE",
+ "PARTITIONINGSHUFFLED",
+ "CONSIDERATIONSPARTITIONING",
+ "IRRELEVANTCONSIDERATIONS",
+ "BUILDIRRELEVANT",
+ "NEWLYBUILD",
+ "FORMEDNEWLY",
+ "UNNECESSARYFORMED",
+ "BENEFITUNNECESSARY",
+ "SUBSEQUENTBENEFIT",
+ "GRANULARSUBSEQUENT",
+ "FINALGRANULAR",
+ "INITIATEDFINAL",
+ "WALKINGINITIATED",
+ "INSTANTIATINGWALKING",
+ "ITERATORINSTANTIATING",
+ "DEMANDITERATOR",
+ "PULLDEMAND",
+ "DELIVERSPULL",
+ "COLUMNARDELIVERS",
+ "INSTANTIATECOLUMNAR",
+ "ITERATORSINSTANTIATE",
+ "INVOKEITERATORS",
+ "METHODINVOKE",
+ "SPECIFICATIONMETHOD",
+ "DELIVERSPECIFICATION",
+ "CONSUMERDELIVER",
+ "RETURNCONSUMER",
+ "REDUCEDRETURN",
+ "CONSUMEREDUCED",
+ "UPDATINGCONSUME",
+ "GRIDUPDATING",
+ "HASHJOINGRID",
+ "UNDERLYINGHASHJOIN",
+ "PREPAREUNDERLYING",
+ "INVOCATIONSPREPARE",
+ "HASHJOINSINVOCATIONS",
+ "PROBEHASHJOINS",
+ "LOCATEPROBE",
+ "ENTRIESLOCATE",
+ "JOINEDENTRIES",
+ "SPECIALJOINED",
+ "TACKLESPECIAL",
+ "AFFECTTACKLE",
+ "PARALLELSUBQUERYAFFECT",
+ "REMOTESUBQUERYPARALLELSUBQUERY",
+ "IDENTITYREMOTESUBQUERY",
+ "REMOTEIDENTITY",
+ "INJECTEDREMOTE",
+ "JUNCTIONSINJECTED",
+ "REPRESENTSJUNCTIONS",
+ "IMPLEMENTEDREPRESENTS",
+ "NEITHERIMPLEMENTED",
+ "PHYSICALNEITHER",
+ "ENCODEDPHYSICAL",
+ "NEVERTHELESSENCODED",
+ "PLANNINGNEVERTHELESS",
+ "OVERHEADPLANNING",
+ "INHIBITSOVERHEAD",
+ "PARALLELISMINHIBITS",
+ "ENCODESPARALLELISM",
+ "SUPPLIEDENCODES",
+ "BUFFERSUPPLIED",
+ "PARSEDBUFFER",
+ "BUFFERSPARSED",
+ "ACCUMULATEDBUFFERS",
+ "FIXEDACCUMULATED",
+ "BYTESFIXED",
+ "VARIABLEBYTES",
+ "SIZEDVARIABLE",
+ "ATTEMPTSSIZED",
+ "BLOCKSATTEMPTS",
+ "RANGESBLOCKS",
+ "ADDITIONRANGES",
+ "BUILDERADDITION",
+ "COMPONENTBUILDER",
+ "CONSUMEDCOMPONENT",
+ "SEALCONSUMED",
+ "SPILLSSEAL",
+ "STRUCTURESSPILLS",
+ "SEALEDSTRUCTURES",
+ "DIRECTORYSEALED",
+ "COMPRISINGDIRECTORY",
+ "OFFSETSCOMPRISING",
+ "TRIVIALOFFSETS",
+ "PARSINGTRIVIAL",
+ "MINIMIZEPARSING",
+ "ACCESSINGMINIMIZE",
+ "PERFECTLYACCESSING",
+ "VALIDPERFECTLY",
+ "PLAINVALID",
+ "PARSEPLAIN",
+ "EXTRACTPARSE",
+ "PROPERTIESEXTRACT",
+ "PROVIDESPROPERTIES",
+ "FINEPROVIDES",
+ "TUNEDFINE",
+ "SPEEDINGTUNED",
+ "PROPERTYSPEEDING",
+ "MAGNITUDEPROPERTY",
+ "BINARYMAGNITUDE",
+ "NESTEDBINARY",
+ "PRIMITIVESNESTED",
+ "ARRAYSPRIMITIVES",
+ "BAGSARRAYS",
+ "ELEMENTSBAGS",
+ "ENCODINGSELEMENTS",
+ "STRINGSENCODINGS",
+ "LENGTHSTRINGS",
+ "PREFIXEDLENGTH",
+ "REPLACEDPREFIXED",
+ "TOKENREPLACED",
+ "SCHEMESTOKEN",
+ "HOMOGENEOUSSCHEMES",
+ "NUMERICHOMOGENEOUS",
+ "OFFSETNUMERIC",
+ "PAIRSOFFSET",
+ "DEEPLYPAIRS",
+ "ACCESSEDDEEPLY",
+ "PATHACCESSED",
+ "OPPOSEDPATH",
+ "EVERYOPPOSED",
+ "TUNEEVERY",
+ "DISABLETUNE",
+ "SCOPEDISABLE",
+ "ANALYZEDSCOPE",
+ "NORMALIZEDANALYZED",
+ "POSITIONSNORMALIZED",
+ "CONTAININGPOSITIONS",
+ "ORDINALSCONTAINING",
+ "RECORDEDORDINALS",
+ "SORTEDRECORDED",
+ "RANGESORTED",
+ "ENUMERATESRANGE",
+ "ATOMICENUMERATES",
+ "REBUILDATOMIC",
+ "MENTIONEDREBUILD",
+ "EARLIERMENTIONED",
+ "CORRESPONDEARLIER",
+ "DIRECTLYCORRESPOND",
+ "IMPACTSDIRECTLY",
+ "NEGATIVELYIMPACTS",
+ "MAINTENANCENEGATIVELY",
+ "MONITORSMAINTENANCE",
+ "ORCHESTRATESMONITORS",
+ "FULLYORCHESTRATES",
+ "DECODESFULLY",
+ "LARGERDECODES",
+ "MOTIVATIONLARGER",
+ "ELIMINATEMOTIVATION",
+ "REFERENCINGELIMINATE",
+ "REBUILDINGREFERENCING",
+ "INDICESREBUILDING",
+ "CHAININDICES",
+ "ADJACENTCHAIN",
+ "BOTTOMADJACENT",
+ "ABLEBOTTOM",
+ "ADVANTAGEABLE",
+ "SPECIFICSADVANTAGE",
+ "LOOKSPECIFICS",
+ "STAGESLOOK",
+ "TRAVERSESTAGES",
+ "COMPAREDTRAVERSE",
+ "CANDIDATECOMPARED",
+ "COMBINECANDIDATE",
+ "INTERSECTIONCOMBINE",
+ "NEGATIONINTERSECTION",
+ "ORIGINALNEGATION",
+ "ITERATEORIGINAL",
+ "POSITIONITERATE",
+ "OBTAINEDPOSITION",
+ "AGAINSTOBTAINED",
+ "LAZILYAGAINST",
+ "FETCHLAZILY",
+ "SLICESFETCH",
+ "PROPAGATEDSLICES",
+ "CORRECTPROPAGATED",
+ "GLOBALCORRECT",
+ "SHARDACCESSGLOBAL",
+ "RECOGNIZESHARDACCESS",
+ "PROCEEDRECOGNIZE",
+ "FETCHINGPROCEED",
+ "FEEDFETCHING",
+ "TIMESTAMPSFEED",
+ "PASSEDTIMESTAMPS",
+ "EVALUATORPASSED",
+ "VARIATIONEVALUATOR",
+ "HEAPVARIATION",
+ "ACCUMULATEHEAP",
+ "DETERMINEDACCUMULATE",
+ "STREAMINGDETERMINED",
+ "EXPLAINEDSTREAMING",
+ "REQUIRESEXPLAINED",
+ "RELATIVELYREQUIRES",
+ "CHUNKSRELATIVELY",
+ "INTERMEDIATECHUNKS",
+ "PORTIONSINTERMEDIATE",
+ "MEGABYTESPORTIONS",
+ "MAKINGMEGABYTES",
+ "EFFECTMAKING",
+ "ENABLEDEFFECT",
+ "IDENTIFIEDENABLED",
+ "INSERTEDIDENTIFIED",
+ "ORDINALINSERTED",
+ "MONOTONOUSLYORDINAL",
+ "INCREASINGMONOTONOUSLY",
+ "COMMITTEDINCREASING",
+ "AHEADCOMMITTED",
+ "QUERIEDAHEAD",
+ "ROWSTOREQUERIED",
+ "FORCINGROWSTORE",
+ "SCANFORCING",
+ "MAINTAINEDSCAN",
+ "TRANSFERREDMAINTAINED",
+ "EFFICIENCYTRANSFERRED",
+ "ADVANCEDEFFICIENCY",
+ "PROVIDEDADVANCED",
+ "BASICPROVIDED",
+ "RELYBASIC",
+ "MECHANISMSRELY",
+ "DESCRIBEDMECHANISMS",
+ "OPERATESDESCRIBED",
+ "IDENTIFYOPERATES",
+ "EXPANDEDIDENTIFY",
+ "KNOWLEDGEEXPANDED",
+ "TOPOLOGYKNOWLEDGE",
+ "FUNCTIONALLYTOPOLOGY",
+ "COMPUTATIONSFUNCTIONALLY",
+ "OPPORTUNITYCOMPUTATIONS",
+ "OFFLOADOPPORTUNITY",
+ "TRANSFEROFFLOAD",
+ "COSTSTRANSFER",
+ "NETWORKCOSTS",
+ "REGIONSNETWORK",
+ "IDENTIFIESREGIONS",
+ "DELEGATEDIDENTIFIES",
+ "FEASIBILITYDELEGATED",
+ "RENDEREDFEASIBILITY",
+ "STANDALONERENDERED",
+ "ENCAPSULATESSTANDALONE",
+ "PARTIALENCAPSULATES",
+ "INSTRUCTSPARTIAL",
+ "POLICYSINSTRUCTS",
+ "CREATIONPOLICYS",
+ "FILTEREDCREATION",
+ "WORDSFILTERED",
+ "ESTABLISHWORDS",
+ "FLOWSESTABLISH",
+ "DEPENDENCIESFLOWS",
+ "EXTENDSDEPENDENCIES",
+ "COMMITTINGEXTENDS",
+ "ITERATESCOMMITTING",
+ "PLANSITERATES",
+ "INITIATESPLANS",
+ "DERIVEDINITIATES",
+ "READYDERIVED",
+ "CASCADINGREADY",
+ "UPDATESCASCADING",
+ "FOLLOWERUPDATES",
+ "FOLLOWFOLLOWER",
+ "UTILIZINGFOLLOW",
+ "PERIODICALLYUTILIZING",
+ "SYNCHRONIZESPERIODICALLY",
+ "CHANGESSYNCHRONIZES",
+ "RESPECTCHANGES",
+ "VARYRESPECT",
+ "OVERALLVARY",
+ "LEADOVERALL",
+ "NEARLEAD",
+ "POWERINGNEAR",
+ "SCENARIOSPOWERING",
+ "SCIENTISTSSCENARIOS",
+ "WISHESSCIENTISTS",
+ "OCCASIONALWISHES",
+ "EXPERIMENTSOCCASIONAL",
+ "STANDEXPERIMENTS",
+ "HEAVIERSTAND",
+ "RESOURCEHEAVIER",
+ "INTENSIVERESOURCE",
+ "ANALYTICALINTENSIVE",
+ "INTERFERINGANALYTICAL",
+ "SEPTEMBERINTERFERING",
+ "EXTENSIVELYSEPTEMBER",
+ "TEAMSEXTENSIVELY",
+ "PROVIDINGTEAMS",
+ "COMPREHENSIVEPROVIDING",
+ "PRODUCTSCOMPREHENSIVE",
+ "PETABYTESPRODUCTS"
+ };
+ }
+}
diff --git a/Enums.cs b/Enums.cs
new file mode 100644
index 0000000..474c96c
--- /dev/null
+++ b/Enums.cs
@@ -0,0 +1,28 @@
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace BenchmarkLogGenerator
+{
+ public enum WriterType
+ {
+ EventHub,
+ LocalDisk,
+ AzureStorage
+ }
+
+ public enum BenchmarkDataSize
+ {
+ OneGB,
+ OneTB,
+ HundredTB
+ }
+
+ public enum Level
+ {
+ Information,
+ Warning,
+ Error,
+ Critical
+ }
+}
diff --git a/Flows/IngestionFlow.cs b/Flows/IngestionFlow.cs
new file mode 100644
index 0000000..993eb15
--- /dev/null
+++ b/Flows/IngestionFlow.cs
@@ -0,0 +1,199 @@
+namespace BenchmarkLogGenerator.Flows
+{
+ using BenchmarkLogGenerator.Data;
+ using Microsoft.Azure.Amqp;
+ using System;
+ using System.Collections.Generic;
+ using System.IO;
+ using System.IO.Enumeration;
+ using System.Reflection.Metadata.Ecma335;
+ using System.Security.Claims;
+ using Step = Scheduler.Step;
+
+ public static class IngestionFlow
+ {
+ public static IEnumerable Generate(int index, int logsPerSessionFactor, int numSessionsFactor, string source)
+ {
+ var rng = Generator.Current.Rng;
+
+ //increase number of sessions by sizeFactor
+ int numSessions = numSessionsFactor*(4 * (80 + (rng.Next() % 1000)));
+ for (int i = 0; i < numSessions; ++i)
+ {
+ var cid = ToGuid(index, i);
+
+ yield return Gen.Sleep(TimeSpan.FromSeconds(rng.Next() % 64));
+ //traces
+
+ int rnd = rng.Next();
+ Gen.Spawn(IngestionSession(i, cid, rnd, rng, source, logsPerSessionFactor));
+ }
+ }
+
+ public static IEnumerable IngestionSession(int index, string cid, long rnd, Random rng, string source, int logsPerSessionFactor)
+ {
+ long stepLength = 6400000;
+ var format = Logs.FileFormats[rnd % 4];
+ //increase the number of logs per session by factor
+ int numFiles = logsPerSessionFactor * (5 + (int)rnd % 100);
+ List files = new List();
+
+ for (int i = 0; i < numFiles; i++)
+ {
+ var fileName = $"\"\"https://benchmarklogs3.blob.core.windows.net/benchmark/2014/{source}_{index}_{i}.{format}.gz\"\"";
+ files.Add(fileName);
+ }
+
+ yield return Gen.Sleep(TimeSpan.FromTicks(rnd % stepLength));
+ var rand1000 = rnd % 1000;
+ var node = $"Engine{(rand1000).ToString().PadLeft(12, '0')}";
+ var message = string.Format(Logs.IngestCommand, Names.Tables[rnd % 607], Logs.FileFormats[rnd%4]);
+ var filesArray = $"\"[{string.Join(",", files)}]\"";
+ bool isCritical = rng.Next() % 10000 <= 25;
+ bool isError = rng.Next() % 10000 <= 250;
+ int numTracesForSession = 50 + rng.Next() % 5000;
+
+ // first step send ingestion command
+ Gen.TraceInfo(node, Level.Information.ToString(), Names.ingestionComponents[0], cid, message, filesArray);
+
+ foreach (var file in files)
+ {
+ yield return Gen.Sleep(TimeSpan.FromTicks(rnd % stepLength));
+ Gen.TraceInfo(node, Level.Information.ToString(), Names.ingestionComponents[1], cid, string.Format(Logs.DownloadEvent, file), GetDownloadProperties(rnd));
+ //noise loop
+ int numIterations = (int)(numTracesForSession/files.Count);
+ for(int i=0; i <= numIterations; i++)
+ {
+ yield return Gen.Sleep(TimeSpan.FromTicks(rnd % stepLength));
+ rand1000 = rng.Next() % 1000;
+ node = $"Engine{(rand1000).ToString().PadLeft(12, '0')}";
+ var level = GetLevel(rand1000);
+ var component = Names.Components[rng.Next() % 128];
+ int messageIndex = (int)rng.Next() % 7;
+ message = GetMessage(messageIndex, new object[] { node, rand1000, node + "." + component + ".com" });
+ Gen.TraceInfo(node, level, component, cid, message, "");
+ }
+ yield return Gen.Sleep(TimeSpan.FromTicks(rnd % stepLength));
+ if (isError || isCritical)
+ {
+ //no ingestion - emit error trace
+ Gen.TraceInfo(node, Level.Error.ToString(), Names.Components[rng.Next() % 128], cid, GetException(rng.Next()), "");
+ }
+ else
+ {
+ //ingest
+ Gen.TraceInfo(node, Level.Information.ToString(), Names.ingestionComponents[2], cid, string.Format(Logs.IngestionCompletion, file), GetIngestionProperties(rnd, format));
+ }
+ }
+ yield return Gen.Sleep(TimeSpan.FromTicks(rnd % stepLength));
+ if (isCritical)
+ {
+ //emit critical trace
+ Gen.TraceInfo(node, Level.Critical.ToString(), Names.Components[rng.Next() % 128], cid, string.Format(Logs.CriticalMessage, rnd, rnd % 100000, rnd % 60000, rnd%40000, rnd%250000),"");
+ Gen.TraceInfo(node, Level.Information.ToString(), Names.ingestionComponents[3], cid, string.Format(Logs.CompletedMessage, Logs.StatusCodes[2]), "");
+ }
+ else if (isError)
+ {
+ Gen.TraceInfo(node, Level.Information.ToString(), Names.ingestionComponents[3], cid, string.Format(Logs.CompletedMessage, Logs.StatusCodes[1]), "");
+ }
+ else
+ {
+ Gen.TraceInfo(node, Level.Information.ToString(), Names.ingestionComponents[3], cid, string.Format(Logs.CompletedMessage, Logs.StatusCodes[0]), "");
+ }
+ }
+
+ private static string GetDownloadProperties(long rnd)
+ {
+ long size = (long)rnd % 10_000_000_000L;
+ long rowCount = (long)size / ((rnd % 1000) + 999);
+ double durationInSeconds = (size * 1.0) / (15 * 1024 * 1024) + (rnd % 17);
+ var duration = TimeSpan.FromSeconds(durationInSeconds).ToString();
+ return $"\"{{\"\"compressedSize\"\": {size},\"\"OriginalSize\"\": {size*8},\"\"downloadDuration\"\": \"\"{duration}\"\" }}\"";
+ }
+
+ private static string GetIngestionProperties(long rnd, string format)
+ {
+ long size = (long)rnd % 10_000_000_000L;
+ long rowCount = (long)size / ((rnd % 1000) + 999);
+ double durationInSeconds = (size * 1.0) / (15 * 1024 * 1024) + (rnd % 17);
+ double cpuTimeInSeconds = (size * 1.0) / (10 * 1024 * 1024) + (rnd % 110737);
+ var duration = TimeSpan.FromSeconds(durationInSeconds).ToString();
+ var cpuTime = TimeSpan.FromSeconds(cpuTimeInSeconds).ToString();
+ return $"\"{{\"\"size\"\": {size}, \"\"format\"\":\"\"{format}\"\", \"\"rowCount\"\":{rowCount}, \"\"cpuTime\"\":\"\"{cpuTime}\"\",\"\"duration\"\": \"\"{duration}\"\" }}\"";
+ }
+
+ private static string GetLevel(long v)
+ {
+ if (v <= 100)
+ return Level.Warning.ToString();
+ return Level.Information.ToString();
+ }
+
+ private static string GetMessage(int v, object[] replacments)
+ {
+ var m = Logs.IngestionLogs[v];
+ return string.Format(m, replacments);
+ }
+
+ private static string GetException(long v)
+ {
+ var startPosition = v % 10;
+ var numberOfStackSteps = 10 * startPosition;
+ List steps = new List() { string.Format(Logs.ExceptionHeader, Logs.ExceptionTypes[GetExceptionIndex(v)], (v % 10000).ToString().PadLeft(8, '0'))};
+ for (long i = startPosition; i<= numberOfStackSteps; i++ )
+ {
+ steps.Add(Logs.StackTraces[i % 10]);
+ }
+ return $"\"{string.Join(Environment.NewLine, steps.ToArray())}\"";
+ }
+
+ private static int GetExceptionIndex(long v)
+ {
+ if (v % 10 <= 4)
+ return (int)v % 5;
+ if (v % 10 >= 5 && v % 10 <= 6)
+ return (int)v % 40;
+ if (v % 10 == 7)
+ return (int)v % 80;
+ return (int)v % 104;
+ }
+
+ public static IEnumerable PeriodicTrace()
+ {
+ for (int i = 0; i < 100; i++)
+ {
+ yield return Gen.Sleep(TimeSpan.FromMinutes(1));
+ //Gen.TraceInfo("Generator: periodic trace");
+ }
+ }
+
+ public static string ToGuid(int index, int iterator)
+ {
+ ulong a;
+ ulong b;
+ unchecked
+ {
+ var x = unchecked((ulong)index);
+ var y = unchecked((ulong)iterator);
+ a = Scramble(Scramble(0x165667B19E3779F9UL, x), y);
+ b = Scramble(Scramble(a, y), x);
+ }
+
+ Span bytes = stackalloc byte[16];
+ BitConverter.TryWriteBytes(bytes.Slice(0, 8), a);
+ BitConverter.TryWriteBytes(bytes.Slice(8, 8), b);
+ return (new Guid(bytes)).ToString();
+ }
+
+ private static ulong Scramble(ulong h, ulong n)
+ {
+ unchecked
+ {
+ h += n * 0xC2B2AE3D27D4EB4FUL;
+ h = (h << 31) | (h >> 33);
+ h *= 0x9E3779B185EBCA87UL;
+ return h;
+ }
+ }
+ }
+}
diff --git a/Generator.cs b/Generator.cs
new file mode 100644
index 0000000..9555915
--- /dev/null
+++ b/Generator.cs
@@ -0,0 +1,96 @@
+using BenchmarkLogGenerator.Data;
+using BenchmarkLogGenerator.Flows;
+using System;
+using System.Collections.Generic;
+using System.Globalization;
+using System.Text;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace BenchmarkLogGenerator
+{
+ using Step = Scheduler.Step;
+ using Event = Scheduler.Event;
+
+ public static class Gen
+ {
+ public static void TraceInfo(string node, string level, string component, string cid, string message, string properties)
+ {
+ /* - TODO check if we need to generate the strings as invariant format */
+ var gen = Generator.Current;
+ var now = gen.Now;
+ Generator.Current.LogWriter.Write(
+ now,
+ Generator.Current.Source,
+ node,
+ level,
+ component,
+ cid,
+ message,
+ properties);
+ }
+
+ public static void CloseTracer()
+ {
+ Generator.Current.LogWriter.Close();
+ }
+
+ public static Step Sleep(TimeSpan duration)
+ {
+ return Generator.Current.Scheduler.DelayFlow(duration);
+ }
+
+ public static Event Spawn(IEnumerable steps)
+ {
+ return Generator.Current.Scheduler.ScheduleNewFlow(steps);
+ }
+ }
+
+ public sealed class Generator
+ {
+ public Scheduler Scheduler { get; private set; }
+
+ public int Seed { get; set; }
+
+ public int SessionCount { get; set; }
+
+ public Random Rng { get; private set; }
+
+ public LogWriter LogWriter { get; private set; }
+
+ public string Source;
+
+ public DateTime Now
+ {
+ get { return Scheduler.Now; }
+ }
+
+ [ThreadStatic] static Generator s_current;
+
+ public static Generator Current => s_current;
+
+ public static void Run(int index, LogWriter writer, int logsPerSessionFactor, int numSessionsFactor)
+ {
+ var gen = new Generator(index, writer);
+ gen.SetOnThread();
+ Gen.Spawn(IngestionFlow.Generate(index, logsPerSessionFactor, numSessionsFactor, gen.Source));
+ gen.Scheduler.Run();
+ Gen.CloseTracer();
+ }
+
+ public Generator(int seed, LogWriter writer)
+ {
+ Scheduler = new Scheduler(new DateTime(2014, 3, 8, 0, 0, 0, DateTimeKind.Utc));
+ Rng = new Random(5546548 + seed % 100);
+ Seed = seed;
+ Source = Names.Sources[seed % 1610] + seed;
+ LogWriter = writer;
+ LogWriter.Source = Source;
+ }
+
+ private void SetOnThread()
+ {
+ s_current = this;
+ }
+ }
+}
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..9e841e7
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,21 @@
+ MIT License
+
+ Copyright (c) Microsoft Corporation.
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE
diff --git a/LogWriter.cs b/LogWriter.cs
new file mode 100644
index 0000000..63433aa
--- /dev/null
+++ b/LogWriter.cs
@@ -0,0 +1,253 @@
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.IO;
+using System.IO.Compression;
+using System.Text;
+using System.Threading;
+using Azure.Storage.Blobs;
+using BenchmarkLogGenerator.Utilities;
+using Microsoft.Azure.EventHubs;
+
+namespace BenchmarkLogGenerator
+{
+
+ public abstract class LogWriter : IDisposable
+ {
+ public abstract void Write(DateTime timestamp, string source, string node, string level, string component, string cid, string message, string properties);
+ public abstract void Close();
+ public string Source { get; set; }
+ #region IDisposable Support
+
+ public abstract void Dispose(bool disposing);
+ public void Dispose()
+ {
+ Dispose(true);
+ }
+ #endregion
+ }
+
+ public class FileLogWriter : LogWriter
+ {
+ private const int c_maxFileSize = 100000;
+ private const string c_compressedSuffix = "csv.gz";
+ private static readonly byte[] s_newLine = Encoding.UTF8.GetBytes(Environment.NewLine);
+
+ private bool writeToStorage;
+ private int year, month, day, hour;
+ string localPath = string.Empty;
+ string fileName = string.Empty;
+ private MemoryStream activeFile;
+ private DirectoryInfo currentDirectory;
+ private string rootDirectory;
+ private int rowCount;
+ private BlobContainerClient blobContainerClient;
+ private string blobConnectionString;
+ private int fileCounter;
+ private string id;
+ private Stopwatch sw = new Stopwatch();
+ private bool disposedValue = false; // To detect redundant calls
+
+ public FileLogWriter(string targetPath, bool isAzureStorage, string writerId, BlobContainerClient client)
+ {
+ writeToStorage = isAzureStorage;
+ id = writerId;
+ if (writeToStorage)
+ {
+ rootDirectory = Directory.CreateDirectory(Directory.GetCurrentDirectory() + "\\temp").FullName;
+ blobConnectionString = targetPath;
+ blobContainerClient = client;
+ }
+ else
+ {
+ rootDirectory = targetPath;
+ }
+ sw.Start();
+ }
+
+ public override void Write(DateTime timestamp, string source, string node, string level, string component, string cid, string message, string properties)
+ {
+ try
+ {
+ if (year != timestamp.Year || month != timestamp.Month || day != timestamp.Day || hour != timestamp.Hour)
+ {
+ if (activeFile != null)
+ {
+ WriteFile();
+ activeFile.Close();
+ fileCounter++;
+ }
+ //update existing values
+ year = timestamp.Year;
+ month = timestamp.Month;
+ day = timestamp.Day;
+ hour = timestamp.Hour;
+ localPath = string.Join('\\', year, month.ToString("D2"), day.ToString("D2"), hour.ToString("D2"));
+ //Need new file
+ currentDirectory = Directory.CreateDirectory(string.Join('\\', rootDirectory, localPath));
+ activeFile = new MemoryStream();
+ }
+ else if (rowCount >= c_maxFileSize)
+ {
+ //compress file
+ WriteFile();
+ activeFile.Close();
+
+ fileCounter++;
+ activeFile = new MemoryStream();
+ rowCount = 0;
+ }
+ var log = string.Join(",", timestamp.FastToString(), source, node, level, component, cid, message, properties);
+ activeFile.Write(Encoding.UTF8.GetBytes(log));
+ activeFile.Write(s_newLine);
+
+ rowCount++;
+ }
+ catch (Exception ex)
+ {
+ var color = Console.ForegroundColor;
+ Console.ForegroundColor = ConsoleColor.Red;
+ Console.WriteLine("Unexpected Error");
+ Console.WriteLine(ex.Message);
+ Console.ForegroundColor = color;
+ }
+ }
+
+ private void WriteFile()
+ {
+ var compressedFileName = GetFileName(currentDirectory.FullName, c_compressedSuffix);
+ if (File.Exists(compressedFileName))
+ {
+ File.Delete(compressedFileName);
+ }
+
+ using (FileStream compressedFileStream = File.Create(compressedFileName))
+ {
+ using (GZipStream compressionStream = new GZipStream(compressedFileStream, CompressionMode.Compress))
+ {
+ activeFile.Position = 0;
+ activeFile.CopyTo(compressionStream);
+ }
+ }
+
+ if (writeToStorage)
+ {
+ using (FileStream uploadFileStream = File.OpenRead(compressedFileName))
+ {
+ var fileName = GetFileName(localPath, c_compressedSuffix);
+ var blobClient = blobContainerClient.GetBlobClient(fileName);
+ //retry blob ingestion
+ bool ingestionDone = false;
+ while (!ingestionDone)
+ {
+ try
+ {
+ blobClient.Upload(uploadFileStream, true, new CancellationToken());
+ ingestionDone = true;
+ }
+ catch (Exception ex)
+ {
+ var color = Console.ForegroundColor;
+ Console.ForegroundColor = ConsoleColor.Red;
+ Console.WriteLine("Blob upload error");
+ Console.WriteLine(ex.Message);
+ Console.ForegroundColor = color;
+ Thread.Sleep(100);
+ }
+ }
+ }
+ File.Delete(compressedFileName);
+ }
+ }
+
+ private string GetFileName(string root, string suffix)
+ {
+ return $"{root}\\{this.Source}_{fileCounter}.{suffix}";
+ }
+
+ public override void Dispose(bool disposing)
+ {
+ if (!disposedValue)
+ {
+ if (disposing)
+ {
+ WriteFile();
+ activeFile.Dispose();
+ this.sw.Stop();
+ Console.WriteLine($"Last file closed: {this.id} time elapsed: {Math.Round(sw.Elapsed.TotalMinutes, 2)} minutes");
+ }
+ disposedValue = true;
+ }
+ }
+
+ public override void Close()
+ {
+ this.Dispose();
+ }
+ }
+
+ public class EventHubWriter : LogWriter
+ {
+ List logs = new List();
+ private bool disposedValue = false; // To detect redundant calls
+
+ int ehSendCounter;
+ // local root directory to write to
+ public string RootDirectory { get; set; }
+ //EventHub connection string
+ public string EventHubConnectionString { get; set; }
+ //Azure storage container connection string
+ public string AzureStorageContainerConnectionString { get; set; }
+
+ EventHubClient eventHubClient;
+ public EventHubWriter(string connectionString)
+ {
+ var builder = new EventHubsConnectionStringBuilder(connectionString)
+ {
+ TransportType = TransportType.Amqp,
+ OperationTimeout = TimeSpan.FromSeconds(120)
+ };
+
+ eventHubClient = EventHubClient.Create(builder);
+
+ }
+
+ public override void Write(DateTime timestamp, string source, string node, string level, string component, string cid, string message, string properties)
+ {
+ var log = string.Join(",", timestamp.FastToString(), source, node, level, component, cid, message, properties);
+ logs.Add(log);
+ if (logs.Count >= 2000)
+ {
+ ehSendCounter++;
+ SendToEventHub();
+ Console.WriteLine($"Message {ehSendCounter} sent");
+ logs = new List();
+ }
+ }
+
+ private void SendToEventHub()
+ {
+ string recordString = string.Join(Environment.NewLine, logs);
+ EventData eventData = new EventData(Encoding.UTF8.GetBytes(recordString));
+ eventHubClient.SendAsync(eventData).Wait();
+ }
+
+ public override void Dispose(bool disposing)
+ {
+ if (!disposedValue)
+ {
+ if (disposing)
+ {
+ SendToEventHub();
+ Console.WriteLine($"Final message to EH");
+ }
+ disposedValue = true;
+ }
+ }
+
+ public override void Close()
+ {
+ this.Dispose();
+ }
+ }
+}
diff --git a/PriorityQueue.cs b/PriorityQueue.cs
new file mode 100644
index 0000000..cf8d2ac
--- /dev/null
+++ b/PriorityQueue.cs
@@ -0,0 +1,169 @@
+using System;
+using System.Collections.Generic;
+
+namespace BenchmarkLogGenerator
+{
+ public sealed class PriorityQueue where T : IComparable
+ {
+ private long m_count = long.MinValue;
+ private IndexedItem[] m_items;
+ private int m_size;
+
+ public PriorityQueue()
+ : this(16)
+ {
+ }
+
+ public PriorityQueue(int capacity)
+ {
+ m_items = new IndexedItem[capacity];
+ m_size = 0;
+ }
+
+ private bool IsHigherPriority(int left, int right)
+ {
+ return m_items[left].CompareTo(m_items[right]) < 0;
+ }
+
+ private int Percolate(int index)
+ {
+ if (index >= m_size || index < 0)
+ {
+ return index;
+ }
+
+ var parent = (index - 1) / 2;
+ while (parent >= 0 && parent != index && IsHigherPriority(index, parent))
+ {
+ // swap index and parent
+ var temp = m_items[index];
+ m_items[index] = m_items[parent];
+ m_items[parent] = temp;
+
+ index = parent;
+ parent = (index - 1) / 2;
+ }
+
+ return index;
+ }
+
+ private void Heapify(int index)
+ {
+ if (index >= m_size || index < 0)
+ {
+ return;
+ }
+
+ while (true)
+ {
+ var left = 2 * index + 1;
+ var right = 2 * index + 2;
+ var first = index;
+
+ if (left < m_size && IsHigherPriority(left, first))
+ {
+ first = left;
+ }
+
+ if (right < m_size && IsHigherPriority(right, first))
+ {
+ first = right;
+ }
+
+ if (first == index)
+ {
+ break;
+ }
+
+ // swap index and first
+ var temp = m_items[index];
+ m_items[index] = m_items[first];
+ m_items[first] = temp;
+
+ index = first;
+ }
+ }
+
+ public int Count => m_size;
+
+ public T Peek()
+ {
+ if (m_size == 0)
+ {
+ throw new InvalidOperationException("Heap empty");
+ }
+
+ return m_items[0].Value;
+ }
+
+ private void RemoveAt(int index)
+ {
+ m_items[index] = m_items[--m_size];
+ m_items[m_size] = default;
+
+ if (Percolate(index) == index)
+ {
+ Heapify(index);
+ }
+
+ if (m_size < m_items.Length / 4)
+ {
+ var temp = m_items;
+ m_items = new IndexedItem[m_items.Length / 2];
+ Array.Copy(temp, 0, m_items, 0, m_size);
+ }
+ }
+
+ public T Dequeue()
+ {
+ var result = Peek();
+ RemoveAt(0);
+ return result;
+ }
+
+ public void Enqueue(T item)
+ {
+ if (m_size >= m_items.Length)
+ {
+ var temp = m_items;
+ m_items = new IndexedItem[m_items.Length * 2];
+ Array.Copy(temp, m_items, temp.Length);
+ }
+
+ var index = m_size++;
+ m_items[index] = new IndexedItem { Value = item, Id = ++m_count };
+ Percolate(index);
+ }
+
+ public bool Remove(T item)
+ {
+ for (var i = 0; i < m_size; ++i)
+ {
+ if (EqualityComparer.Default.Equals(m_items[i].Value, item))
+ {
+ RemoveAt(i);
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ private struct IndexedItem : IComparable
+ {
+ public T Value;
+ public long Id;
+
+ public int CompareTo(IndexedItem other)
+ {
+ var c = Value.CompareTo(other.Value);
+ if (c == 0)
+ {
+ c = Id.CompareTo(other.Id);
+ }
+
+ return c;
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/Program.cs b/Program.cs
new file mode 100644
index 0000000..4ce1ee1
--- /dev/null
+++ b/Program.cs
@@ -0,0 +1,221 @@
+using Azure.Core;
+using Azure.Storage.Blobs;
+using BenchmarkLogGenerator.Utilities;
+using System;
+using System.Diagnostics;
+using System.Threading.Tasks;
+
+namespace BenchmarkLogGenerator
+{
+ class Program
+ {
+ static CommandLineArgs m_args = new CommandLineArgs();
+ private static readonly string[] m_basicHelpHints = { "/?", "-?", "?", "/help", "-help", "help" };
+ static void Main(string[] args)
+ {
+ if (args.Length > 0)
+ {
+ if (m_basicHelpHints.SafeFastAny(h => string.Equals(h, args[0], StringComparison.OrdinalIgnoreCase)))
+ {
+ PrintUsage();
+ }
+ else
+ {
+ CommandLineArgsParser.Parse(args, m_args, null, true);
+ }
+ }
+ else
+ {
+ PrintUsage();
+ }
+ if (m_args.outputType == WriterType.LocalDisk && m_args.localPath == null
+ || m_args.outputType == WriterType.EventHub && m_args.eventHubConnectionString == null
+ || m_args.outputType == WriterType.AzureStorage && m_args.blobConnectionString == null)
+ {
+ CommandLineArgsParser.WriteHelpStringToConsoleAndQuit(new string[] { }, new CommandLineArgs(), $"The output type of {m_args.outputType} was specified without the corrosponding connection/path");
+ }
+
+ Console.WriteLine("Starting...");
+ string containerName = "";
+ BlobContainerClient container = null;
+ if(m_args.outputType == WriterType.AzureStorage)
+ {
+ try
+ {
+ string[] connectionStrings = m_args.blobConnectionString.Split(",");
+ string blobConnectionString = "";
+ string partitionName = "";
+ if (m_args.partition == -1)
+ {
+ blobConnectionString = connectionStrings[0];
+ }
+ else
+ {
+ blobConnectionString = connectionStrings[m_args.partition];
+ partitionName = $"-p{m_args.partition}";
+ }
+
+ var blobOptions = new BlobClientOptions();
+ blobOptions.Retry.MaxRetries = 3;
+ blobOptions.Retry.Mode = RetryMode.Exponential;
+
+ BlobServiceClient blobClient = new BlobServiceClient(blobConnectionString, blobOptions);
+ //BlobServiceClient blobClient = new BlobServiceClient(blobConnectionString);
+ containerName = $"logsbenchmark-{m_args.size}{partitionName}".ToLower();
+ var response = blobClient.CreateBlobContainer(containerName);
+ container = response.Value;
+ }
+ catch (Exception ex)
+ {
+ var color = Console.ForegroundColor;
+ Console.ForegroundColor = ConsoleColor.Red;
+ Console.WriteLine($"Error creating container {containerName}. Please verify that the container does not exist");
+ Console.WriteLine($"Exception Message: {ex.Message}");
+ Console.ForegroundColor = color;
+ Environment.Exit(1);
+ }
+ }
+
+ if (m_args.outputType == WriterType.LocalDisk && m_args.size == BenchmarkDataSize.HundredTB)
+ {
+ Console.WriteLine("For 100TB data size, please use Azure storage outputType.");
+ Environment.Exit(0);
+ }
+
+ Stopwatch sw = new Stopwatch();
+ sw.Start();
+ int logsPerSessionFactor = GetLogsPerSessionFactor(m_args.size);
+ int numSessionsFactor = GetNumSessionsFactor(m_args.size);
+ int start = GetStart(m_args.partition, m_args.size);
+ int end = GetEnd(m_args.partition, m_args.size);
+
+ var res = Parallel.For(start, end, index =>
+ {
+ Generator.Run(index, GetWriter($"iteration count: {index} ", container), logsPerSessionFactor, numSessionsFactor);
+ });
+
+ sw.Stop();
+ Console.WriteLine($"Total time {sw.ElapsedMilliseconds} ms");
+ }
+
+ //Factors:
+ //OneTB 6X more sessions, 100X more sources
+ //HundredTB 10X more sessions, 10X more logs per session, 10X more sources
+ //Expected timespan period for logs:
+ //~1 day for 1GB
+ //~9 days for 1TB
+ //~90 days for 100TB
+
+ private static int GetNumSessionsFactor(BenchmarkDataSize size)
+ {
+ switch (size)
+ {
+ case BenchmarkDataSize.OneGB:
+ return 1;
+ case BenchmarkDataSize.OneTB:
+ return 6;
+ case BenchmarkDataSize.HundredTB:
+ return 60;
+ default:
+ return 1;
+ }
+ }
+
+
+ private static int GetLogsPerSessionFactor(BenchmarkDataSize size)
+ {
+ switch (size)
+ {
+ case BenchmarkDataSize.OneGB:
+ return 1;
+ case BenchmarkDataSize.OneTB:
+ return 1;
+ case BenchmarkDataSize.HundredTB:
+ return 1;
+ default:
+ return 1;
+ }
+ }
+
+ private static int GetStart(int partition, BenchmarkDataSize size)
+ {
+ if (size == BenchmarkDataSize.HundredTB && partition > -1)
+ {
+ return partition * 100;
+ }
+ return 0;
+ }
+
+ private static int GetEnd(int partition, BenchmarkDataSize size)
+ {
+ if (size == BenchmarkDataSize.HundredTB && partition > -1)
+ {
+ return (partition + 1) * 100 - 1;
+ }
+ return NumThreads(size) * NumIterations(size);
+ }
+
+ private static int NumThreads(BenchmarkDataSize size)
+ {
+ switch (size)
+ {
+ case BenchmarkDataSize.OneGB:
+ return 1;
+ case BenchmarkDataSize.OneTB:
+ return 100;
+ case BenchmarkDataSize.HundredTB:
+ return 100;
+ default:
+ return 100;
+ }
+ }
+
+ private static int NumIterations(BenchmarkDataSize size)
+ {
+ switch (size)
+ {
+ case BenchmarkDataSize.OneGB:
+ return 1;
+ case BenchmarkDataSize.OneTB:
+ return 1;
+ case BenchmarkDataSize.HundredTB:
+ return 10;
+ default:
+ return 1;
+ }
+ }
+
+ private static void PrintUsage()
+ {
+ var esb = new ExtendedStringBuilder();
+ esb.AppendLine();
+ esb.AppendLine("The BenchmarkLogGenerator is a tool to generate logs for benchmark testing");
+ esb.AppendLine();
+ esb.AppendLine("It is invoked with the following parameters:");
+ esb.AppendLine("-output:Where the output should be written to. Possible values are: LocalDisk, AzureBlobStorage or EventHub");
+ esb.AppendLine("-localPath: The root folder");
+ esb.AppendLine("-azureStorageAccountConnections: A comma separated list of Azure storage account connections (can be single connection), containers will be created automaticly using the following template: logsBenchmark-{size}-p{partition}");
+ esb.AppendLine("-eventHubConnection: The connection string for Azure EventHub");
+ esb.AppendLine("-size: The output size, possible values are OneGB, OneTB, HundredTB. Default is OneGB");
+ esb.AppendLine("-partition: The applicable partition, between -1 to 9, where -1 means single partition. Only relevant for HundredTB size. Default is -1");
+ esb.AppendLine();
+ CommandLineArgsParser.PrintUsage(esb);
+
+ }
+
+ private static LogWriter GetWriter(string writerId, BlobContainerClient container)
+ {
+ switch (m_args.outputType)
+ {
+ case WriterType.LocalDisk:
+ return new FileLogWriter(m_args.localPath, false, writerId, null);
+ case WriterType.AzureStorage:
+ return new FileLogWriter(m_args.blobConnectionString, true, writerId, container);
+ case WriterType.EventHub:
+ return new EventHubWriter(m_args.eventHubConnectionString);
+ default:
+ return null;
+ }
+ }
+ }
+}
diff --git a/Properties/launchSettings.json b/Properties/launchSettings.json
new file mode 100644
index 0000000..f8c608a
--- /dev/null
+++ b/Properties/launchSettings.json
@@ -0,0 +1,8 @@
+{
+ "profiles": {
+ "BenchmarkLogGenerator": {
+ "commandName": "Project",
+ "commandLineArgs": "-output:AzureStorage -cc:\"DefaultEndpointsProtocol=https;AccountName=STORAGE_ACCOUNTNAME;AccountKey=KEY;EndpointSuffix=core.windows.net\" "
+ }
+ }
+}
\ No newline at end of file
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..8eeee9c
--- /dev/null
+++ b/README.md
@@ -0,0 +1,14 @@
+
+# Contributing
+
+This project welcomes contributions and suggestions. Most contributions require you to agree to a
+Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
+the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com.
+
+When you submit a pull request, a CLA bot will automatically determine whether you need to provide
+a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions
+provided by the bot. You will only need to do this once across all repos using our CLA.
+
+This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
+For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
+contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 0000000..f7b8998
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,41 @@
+
+
+## Security
+
+Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/).
+
+If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://docs.microsoft.com/en-us/previous-versions/tn-archive/cc751383(v=technet.10)), please report it to us as described below.
+
+## Reporting Security Issues
+
+**Please do not report security vulnerabilities through public GitHub issues.**
+
+Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://msrc.microsoft.com/create-report).
+
+If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://www.microsoft.com/en-us/msrc/pgp-key-msrc).
+
+You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/msrc).
+
+Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue:
+
+ * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.)
+ * Full paths of source file(s) related to the manifestation of the issue
+ * The location of the affected source code (tag/branch/commit or direct URL)
+ * Any special configuration required to reproduce the issue
+ * Step-by-step instructions to reproduce the issue
+ * Proof-of-concept or exploit code (if possible)
+ * Impact of the issue, including how an attacker might exploit the issue
+
+This information will help us triage your report more quickly.
+
+If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://microsoft.com/msrc/bounty) page for more details about our active programs.
+
+## Preferred Languages
+
+We prefer all communications to be in English.
+
+## Policy
+
+Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://www.microsoft.com/en-us/msrc/cvd).
+
+
\ No newline at end of file
diff --git a/Scheduler.cs b/Scheduler.cs
new file mode 100644
index 0000000..2bd603e
--- /dev/null
+++ b/Scheduler.cs
@@ -0,0 +1,318 @@
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+
+namespace BenchmarkLogGenerator
+{
+ public class Scheduler
+ {
+ private PriorityQueue m_queue;
+
+ public DateTime Now { get; private set; }
+
+ public Scheduler(DateTime now)
+ {
+ m_queue = new PriorityQueue(1024);
+ Now = now;
+ }
+
+ public void Run()
+ {
+ while (m_queue.Count != 0)
+ {
+ var step = m_queue.Dequeue();
+ if (step.DueTime > Now)
+ {
+ Now = step.DueTime;
+ }
+ step.Execute(this);
+ }
+ }
+
+ public Event ScheduleNewFlow(IEnumerable steps)
+ {
+ var completionEvent = NewEvent();
+ var flow = new Flow(steps.GetEnumerator(), completionEvent);
+ var step = new FlowDelayStep(DateTimeUtil.Zero, flow);
+ Enqueue(step);
+ return completionEvent;
+ }
+
+ public Step DelayFlow(TimeSpan duration)
+ {
+ return new FlowDelayStep(DateTimeUtil.Add(Now, duration));
+ }
+
+ public Event NewEvent()
+ {
+ return new Event(this);
+ }
+
+ private void Enqueue(Step step)
+ {
+ if (step == null)
+ {
+ return;
+ }
+
+ m_queue.Enqueue(step);
+ }
+
+ internal sealed class Flow
+ {
+ private IEnumerator m_steps;
+ private Event m_completionEvent;
+
+ internal Flow(IEnumerator steps, Event completionEvent)
+ {
+ m_steps = steps;
+ m_completionEvent = completionEvent;
+ }
+
+ public Step NextStep()
+ {
+ if (m_steps.MoveNext())
+ {
+ var step = m_steps.Current;
+ step.Flow = this;
+ return step;
+ }
+ else
+ {
+ m_completionEvent.Signal();
+ return null;
+ }
+ }
+ }
+
+ public abstract class Step : IComparable
+ {
+ public DateTime DueTime { get; private set; }
+ internal Flow Flow { get; set; }
+
+ public Step(DateTime dueTime)
+ {
+ DueTime = dueTime;
+ }
+
+ public abstract void Execute(Scheduler scheduler);
+
+ public int CompareTo(Step other)
+ {
+ return DueTime.CompareTo(other.DueTime);
+ }
+
+ protected void EnqueueNextStep(Scheduler scheduler)
+ {
+ if (Flow == null)
+ {
+ return;
+ }
+
+ var step = Flow.NextStep();
+ if (step == null)
+ {
+ return;
+ }
+
+ // TODO: fast path (avoid enqueue)
+
+ scheduler.Enqueue(step);
+ }
+ }
+
+ internal sealed class FlowDelayStep : Step
+ {
+ public FlowDelayStep(DateTime dueTime) : base(dueTime)
+ {
+ }
+
+ public FlowDelayStep(DateTime dueTime, Flow flow) : base(dueTime)
+ {
+ Flow = flow;
+ }
+
+ public override void Execute(Scheduler scheduler)
+ {
+ EnqueueNextStep(scheduler);
+ }
+ }
+
+ internal sealed class BlockOnEventStep : Step
+ {
+ private Event m_event;
+
+ public BlockOnEventStep(Event evt) : base(DateTimeUtil.Zero)
+ {
+ m_event = evt;
+ }
+
+ public override void Execute(Scheduler scheduler)
+ {
+ if (m_event.IsSet)
+ {
+ EnqueueNextStep(scheduler);
+ }
+ else
+ {
+ m_event.AddFlow(Flow);
+ }
+ }
+ }
+
+ public class Event
+ {
+ public bool IsSet { get; private set; }
+
+ private Scheduler m_scheduler;
+ private List m_blockedFlows;
+ private List m_chainedEvents;
+
+ public Event(Scheduler scheduler)
+ {
+ m_scheduler = scheduler;
+ m_blockedFlows = null;
+ m_chainedEvents = null;
+ }
+
+ public Step Wait()
+ {
+ return new BlockOnEventStep(this);
+ }
+
+ public static Event WhenAny(params Event[] events)
+ {
+ var scheduler = events[0].m_scheduler;
+ var whenAny = new Event(scheduler);
+ foreach (var evt in events)
+ {
+ evt.AddChained(whenAny);
+ }
+ return whenAny;
+ }
+
+ public static Event WhenAll(params Event[] events)
+ {
+ var scheduler = events[0].m_scheduler;
+ var whenAll = new WhenAllEvent(scheduler, events.Length);
+ foreach (var evt in events)
+ {
+ evt.AddChained(whenAll);
+ }
+ return whenAll;
+ }
+
+ internal void AddFlow(Flow flow)
+ {
+ if (IsSet)
+ {
+ m_scheduler.Enqueue(flow.NextStep());
+ }
+ else
+ {
+ AddBlockedFlow(flow);
+ }
+ }
+
+ public void AddChained(Event evt)
+ {
+ if (IsSet)
+ {
+ evt.Signal();
+ }
+ else
+ {
+ AddChainedEvent(evt);
+ }
+ }
+
+ public virtual void Signal()
+ {
+ if (!IsSet)
+ {
+ TransitionToSet();
+ }
+ }
+
+ protected void TransitionToSet()
+ {
+ IsSet = true;
+
+ if (m_chainedEvents != null)
+ {
+ foreach (var evt in m_chainedEvents)
+ {
+ evt.Signal();
+ }
+ m_chainedEvents = null;
+ }
+
+ if (m_blockedFlows != null)
+ {
+ foreach (var flow in m_blockedFlows)
+ {
+ m_scheduler.Enqueue(flow.NextStep());
+ }
+ m_blockedFlows = null;
+ }
+ }
+
+ private void AddBlockedFlow(Flow flow)
+ {
+ if (m_blockedFlows == null)
+ {
+ m_blockedFlows = new List();
+ }
+ m_blockedFlows.Add(flow);
+ }
+
+ private void AddChainedEvent(Event evt)
+ {
+ if (m_chainedEvents == null)
+ {
+ m_chainedEvents = new List();
+ }
+ m_chainedEvents.Add(evt);
+ }
+ }
+
+ public sealed class WhenAllEvent : Event
+ {
+ private int m_count;
+
+ public WhenAllEvent(Scheduler scheduler, int count) : base(scheduler)
+ {
+ m_count = count;
+ }
+
+ public override void Signal()
+ {
+ if (!IsSet)
+ {
+ Debug.Assert(m_count != 0);
+ m_count -= 1;
+ if (m_count == 0)
+ {
+ TransitionToSet();
+ }
+ }
+ }
+ }
+ }
+
+ public static class DateTimeUtil
+ {
+ public static DateTime Zero
+ {
+ get
+ {
+ return new DateTime(0, DateTimeKind.Utc);
+ }
+ }
+
+ public static DateTime Add(DateTime dt, TimeSpan ts)
+ {
+ return DateTime.SpecifyKind(dt.Add(ts), DateTimeKind.Utc);
+ }
+ }
+}
diff --git a/Utilities/CommandLineArgsParser.cs b/Utilities/CommandLineArgsParser.cs
new file mode 100644
index 0000000..305e12a
--- /dev/null
+++ b/Utilities/CommandLineArgsParser.cs
@@ -0,0 +1,647 @@
+using Microsoft.IdentityModel.Clients.ActiveDirectory;
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.IO;
+using System.Linq;
+using System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+namespace BenchmarkLogGenerator.Utilities
+{
+
+ #region class CommandLineArgAttribute
+ [AttributeUsage(AttributeTargets.Field, AllowMultiple = false)]
+ public class CommandLineArgAttribute : Attribute
+ {
+ ///
+ /// The short name of the command-line switch.
+ ///
+ public string ShortName;
+
+ ///
+ /// Additional alises of the command-line switch.
+ ///
+ public string[] Aliases;
+
+ ///
+ /// The long name of the command-line switch.
+ ///
+ public string FullName;
+
+ ///
+ /// A human-readable description of the comand-line switch.
+ ///
+ public string Description;
+
+ ///
+ /// Default value to be used if the user doesn't specify the command-line switch.
+ ///
+ public object DefaultValue;
+
+ ///
+ /// Is this a mandatory switch?
+ ///
+ public bool Mandatory;
+
+ ///
+ /// Is this argument a secret? (If so, we won't print it.)
+ ///
+ public bool IsSecret;
+
+ ///
+ /// Does this switch contain "known" secrets? (If so, we don't print them.)
+ /// Known secrets are secrets whose pattern is recognized by .
+ ///
+ public bool ContainsKnownSecrets;
+
+ ///
+ /// Does this switch support encryption? (If so, it can be attempted at being decrypted after parsing, using the 'Decrypt()' method)
+ ///
+ public bool SupportsEncryption;
+
+ ///
+ /// Trigger -- the name of a parameterless instance method that will get invoked
+ /// following processing of the field.
+ ///
+ public string WhenSet;
+
+ ///
+ /// Can the user include the switch without providing a value?
+ /// Applies only to string switches.
+ ///
+ public bool AllowNull;
+
+ public CommandLineArgAttribute(string fullName, string description)
+ {
+ ShortName = fullName;
+ FullName = fullName;
+ Description = description;
+ }
+
+ public CommandLineArgAttribute(string fullName, string description, params string[] aliases)
+ {
+ ShortName = fullName;
+ FullName = fullName;
+ Description = description;
+ Aliases = aliases;
+ }
+
+ public IEnumerable GetShortNameAndAliases()
+ {
+ // TODO: We might want to cache this...
+ HashSet ret = new HashSet(StringComparer.OrdinalIgnoreCase);
+ ret.Add(FullName);
+
+ if (!string.IsNullOrWhiteSpace(ShortName))
+ {
+ ret.Add(ShortName);
+ }
+
+ if (Aliases.SafeFastAny())
+ {
+ foreach (var alias in Aliases)
+ {
+ if (!string.IsNullOrWhiteSpace(alias))
+ {
+ ret.Add(alias);
+ }
+ }
+
+ }
+
+ ret.Remove(FullName);
+ return ret;
+ }
+
+ }
+ #endregion
+
+ #region class CommandLineArgsParser
+ public class CommandLineArgsParser
+ {
+ #region Private constants
+ private static char[] c_quote = new char[] { '"' };
+ private static char c_multiValueSeparator = '\x03';
+ private static string c_multiValueSeparatorStr = "\x03";
+ private static char[] c_multiValueSeparatorArray = new[] { c_multiValueSeparator };
+ private const BindingFlags WhenXxxLookup = BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic;
+ #endregion
+
+ public static T Parse(IEnumerable args, T target, Action faultAction = null, bool autoHelp = false, string envVar = null)
+ {
+ if (autoHelp && faultAction == null)
+ {
+ faultAction = WriteHelpStringToConsoleAndQuit;
+ }
+
+ LinkedList freeArgs = null; // All arguments that appear before the first arg with '/' or '-'
+ Dictionary candidateArgs = null;
+ GetFreeAndCandidateArgs(args, envVar, ref freeArgs, ref candidateArgs);
+
+ if (autoHelp)
+ {
+ if (candidateArgs.ContainsKey("h") || candidateArgs.ContainsKey("?") || candidateArgs.ContainsKey("help"))
+ {
+ WriteHelpStringToConsoleAndQuit(args, target, null, markdown: false);
+ }
+ }
+
+ AssignArgsToTargetGetValue(args, target, faultAction, freeArgs, candidateArgs);
+
+ return target;
+ }
+ public static void WriteHelpStringToConsoleAndQuit(string[] args, T target, string fault)
+ {
+ WriteHelpStringToConsoleAndQuit(args, target, fault, markdown: false);
+ }
+
+ public static void WriteHelpStringToConsoleAndQuit(string[] args, T target, string fault, bool markdown)
+ {
+ WriteHelpStringToConsoleAndQuit(args as IEnumerable, target, fault, markdown);
+ }
+ private static void WriteHelpStringToConsoleAndQuit(IEnumerable args, T target, string fault, bool markdown)
+ {
+ var esb = new ExtendedStringBuilder();
+ if (!string.IsNullOrWhiteSpace(fault))
+ {
+ esb.Indent();
+ esb.AppendLine("Bad input:");
+ esb.Indent();
+ esb.AppendLine(fault);
+ esb.AppendLine();
+ esb.Unindent();
+ esb.Unindent();
+ }
+ CommandLineArgsParser.WriteHelpString(esb, target);
+ Console.WriteLine(esb.ToString());
+ Environment.Exit(0);
+ }
+
+ ///
+ /// Given one or more objects of a type whose public fields are attributed
+ /// by (and the first of said objects
+ /// potentially attributed by ),
+ /// writes a corresponding help string to the string builder.
+ ///
+ public static void WriteHelpString(ExtendedStringBuilder esb, params object[] targets)
+ {
+ // Gather all the args
+ var names = new HashSet();
+ var args = new List>();
+ var descriptions = new List();
+ GatherArgsForHelpString(targets, names, args, descriptions);
+
+ // Synopsis line
+ esb.Indent();
+ esb.AppendLine("Synopsis:");
+ esb.Indent();
+ var attributes = string.Join(" ", args.Select(arg => FormatArg(arg, false)));
+ esb.AppendLine(attributes);
+ esb.Unindent();
+ esb.AppendLine();
+
+ // Description
+ if (descriptions.Count > 0)
+ {
+ esb.AppendLine("Description:");
+ esb.Indent();
+ // TODO: Smart algorithm to break lines might be added here...
+ foreach (var description in descriptions)
+ {
+ foreach (var line in description.SplitLines())
+ {
+ esb.AppendLine(line);
+ }
+ }
+ esb.Unindent();
+ esb.AppendLine();
+ }
+
+ // Arguments
+ esb.AppendLine("Arguments:");
+ bool firstArgument = true;
+ esb.Indent();
+ foreach (var arg in args)
+ {
+ if (firstArgument)
+ {
+ firstArgument = false;
+ }
+ else
+ {
+ esb.AppendLine();
+ }
+
+ if (arg.Item1.Mandatory)
+ {
+ esb.AppendLine("[Mandatory: True]");
+ }
+ var fullName = arg.Item1.FullName;
+ var aliases = arg.Item1.GetShortNameAndAliases();
+ esb.AppendLine(FormatArg(arg, true));
+ foreach (var alias in aliases)
+ {
+ esb.AppendLine("[-" + alias + ":...]");
+ }
+
+ esb.Indent();
+ foreach (var line in arg.Item1.Description.SplitLines())
+ {
+ esb.AppendLine(line); // TODO: break long lines
+ }
+ esb.Unindent();
+ }
+
+ if (!firstArgument)
+ {
+ esb.AppendLine();
+ }
+
+ esb.Unindent();
+ esb.Unindent();
+
+ PrintUsage(esb);
+ }
+
+ public static void PrintUsage(ExtendedStringBuilder esb)
+ {
+ esb.AppendLine("Usage examples:");
+ esb.AppendLine();
+ esb.Indent();
+ esb.AppendLine(@"[Write to local disk]");
+ esb.AppendLine(@"BenchmarkLogGenerator -output:LocalDisk -localPath:""c:\users\foo\documents""");
+ esb.AppendLine();
+ esb.AppendLine(@"[Write to Azure Storage container]");
+ esb.AppendLine(@"BenchmarkLogGenerator -output:AzureStorage -azureStorageAccountConnection:""DefaultEndpointsProtocol=https;AccountName=NAME;AccountKey=KEY;EndpointSuffix=core.windows.net""");
+ esb.AppendLine();
+ esb.AppendLine(@"[Write to EventHub]");
+ esb.AppendLine(@"BenchmarkLogGenerator -output:EventHub -eventHubConnection:""Endpoint=sb://EVENTHUB_NAMESPACE.windows.net/;SharedAccessKeyName=readwrite;SharedAccessKey=KEY""");
+ esb.AppendLine();
+ Console.WriteLine(esb.ToString());
+ Environment.Exit(1);
+ }
+
+ private static void GatherArgsForHelpString(object[] targets, HashSet names, List> args, List descriptions)
+ {
+ foreach (var target in targets)
+ {
+ GatherArgsForHelpString(target, names, args, descriptions);
+ }
+ }
+
+ private static void GatherArgsForHelpString(object target, HashSet names, List> args, List descriptions)
+ {
+ // Desciption at the target level
+ var targetAttribute = target.GetType().GetCustomAttribute();
+ if (targetAttribute != null && !string.IsNullOrWhiteSpace(targetAttribute.Description))
+ {
+ descriptions.Add(targetAttribute.Description);
+ }
+
+ int insertIndex = 0;
+ foreach (var field in target.GetType().GetFieldsOrdered())
+ {
+ var attribute = (CommandLineArgAttribute)field.GetCustomAttribute(typeof(CommandLineArgAttribute));
+ if (attribute == null)
+ {
+ continue;
+ }
+
+ if (field.FieldType.GetCustomAttribute(typeof(CommandLineArgAttribute)) != null)
+ {
+ // Nested
+ GatherArgsForHelpString(field.GetValue(target), names, args, descriptions);
+ continue;
+ }
+
+ var item = new Tuple(attribute, field);
+ if (string.IsNullOrWhiteSpace(attribute.FullName))
+ {
+ args.Insert(insertIndex, item);
+ insertIndex++;
+ }
+ else
+ {
+ if (!names.Contains(attribute.FullName))
+ {
+ names.Add(attribute.FullName);
+ args.Add(item);
+ }
+ else
+ {
+ // TODO: We should add validation logic here, or add a disambiguation, or...
+ }
+ }
+ }
+ }
+
+ private static void GetFreeAndCandidateArgs(IEnumerable args, string envVar, ref LinkedList freeArgs, ref Dictionary candidateArgs)
+ {
+ freeArgs = freeArgs ?? new LinkedList(); // All arguments that appear before the first arg with '/' or '-'
+ candidateArgs = candidateArgs ?? new Dictionary(StringComparer.OrdinalIgnoreCase);
+ bool acceptingFreeArgs = true;
+
+
+
+ // Make a list of all arg candidates
+ foreach (var arg in args)
+ {
+ string a = arg.Trim();
+ if (!StartsWithSwitchCharacter(a))
+ {
+ if (acceptingFreeArgs)
+ {
+ freeArgs.AddLast(a);
+ }
+ continue;
+ }
+
+ acceptingFreeArgs = false;
+ a = a.Substring(1);
+ string value;
+ a = a.SplitFirst(':', out value);
+ if (string.IsNullOrEmpty(a))
+ {
+ continue;
+ }
+
+ if (candidateArgs.ContainsKey(a))
+ {
+ // This is a hack to support multi-valued args:
+ // When we encounter more than one value for a switch, we concat the old value with
+ // the new value using a control character nobody would provide normally.
+ candidateArgs[a] = candidateArgs[a] + c_multiValueSeparator + value;
+ continue;
+ }
+
+ candidateArgs.Add(a, value);
+ }
+ }
+
+ private static bool StartsWithSwitchCharacter(string what)
+ {
+ if (string.IsNullOrEmpty(what))
+ {
+ return false;
+ }
+
+ char c = what[0];
+ // The last one is a Unicode dash, which is often generated by auto-correct tools
+ // such as Microsoft Word
+ return c == '-' || c == '/' || c == '\u2013';
+ }
+
+ private static void AssignArgsToTargetGetValue(IEnumerable args, T target, Action faultAction, LinkedList freeArgs, Dictionary candidateArgs)
+ {
+ var targetType = target.GetType();
+ foreach (var field in targetType.GetFieldsOrdered())
+ {
+ var attribute = (CommandLineArgAttribute)field.GetCustomAttribute(typeof(CommandLineArgAttribute));
+ if (attribute == null)
+ {
+ continue;
+ }
+ var aliases = attribute.GetShortNameAndAliases();
+
+ bool needsSpecifying = attribute.Mandatory;
+ string value;
+
+ if (field.FieldType.GetCustomAttribute(typeof(CommandLineArgAttribute)) != null)
+ {
+ var fieldValue = field.GetValue(target);
+ if (fieldValue != null)
+ {
+ // Nested parse
+ // TODO: This is typeless (T==object), ignoring faultAction and/or autoHelp
+ // TODO: This is done without any prefix on nested arg names
+ var trampolineT = typeof(CommandLineArgsParser).GetMethod("ParseTrampoline", BindingFlags.NonPublic | BindingFlags.Static);
+ if (trampolineT != null)
+ {
+ // private static void ParseTrampoline(IEnumerable args, T target)
+ // TODO: Add support for autoHelp, envVar to ParseTrampoline and add them here
+ var trampoline = trampolineT.MakeGenericMethod(new Type[] { fieldValue.GetType() });
+ trampoline.Invoke(null, new object[] { args, fieldValue });
+ }
+ else
+ {
+ // Fallback to typeless (T is System.Object) with no faultAction and default autoHelp.
+ // TODO: This is done without any prefix on nested arg names
+ Parse(args, fieldValue);
+ }
+
+ }
+ }
+ else if (string.IsNullOrWhiteSpace(attribute.FullName) && freeArgs.Count > 0)
+ {
+ // A switch with no name is assumed to refer to the "free args",
+ // and we have some unconsumed free args, so the next one is used
+ // to assign a valur to the switch
+ value = freeArgs.First();
+ freeArgs.RemoveFirst();
+
+ needsSpecifying = false;
+ SetField(target, field, "[free arg]", value, allowNull: false);
+ }
+ else if (!string.IsNullOrWhiteSpace(attribute.FullName) && TryGetValue(candidateArgs, attribute.FullName, aliases, out value))
+ {
+ needsSpecifying = false;
+ SetField(target, field, attribute.FullName, value, allowNull: attribute.AllowNull);
+ }
+ else if (attribute.DefaultValue != null)
+ {
+ needsSpecifying = false;
+ field.SetValue(target, attribute.DefaultValue);
+ }
+
+ if (needsSpecifying)
+ {
+ var fault = string.Format("Argument '" + attribute.FullName + "' must be specified as it is marked as mandatory");
+ if (faultAction != null)
+ {
+ var argsArray = args.ToArray();
+ faultAction(argsArray, target, fault);
+ }
+ }
+
+ if (attribute.WhenSet != null)
+ {
+ var method = targetType.GetMethod(attribute.WhenSet, WhenXxxLookup);
+ //Ensure.ArgIsNotNull(method, "method(" + attribute.WhenSet + ")"); // TODO: Could use a better exception here...
+ method.Invoke(target, null);
+ }
+ }
+ }
+
+ private static void SetField(object target, FieldInfo field, string switchName, string value, bool allowNull)
+ {
+ // Support for nullable types - get the real field type
+ var fieldType = field.FieldType.GetTypeWithNullableSupport();
+
+ // TODO: Each call to Parse below may throw an exception.
+ // Since this is user-input, we should catch the exception and
+ // provide some means to indicate a user input error instead
+ // of crashing the caller.
+ try
+ {
+ if (fieldType == typeof(string))
+ {
+ value = GetLastValueIfMultivalue(value);
+ if (allowNull && value == null)
+ {
+ field.SetValue(target, null);
+ }
+ else
+ {
+ VerifyNoNullValue(switchName, value);
+ VerifyNoMultiValue(switchName, value);
+ field.SetValue(target, value.Trim(c_quote));
+ }
+ }
+ else if(fieldType == typeof(WriterType))
+ {
+ value = GetLastValueIfMultivalue(value);
+ VerifyNoNullValue(switchName, value);
+ VerifyNoMultiValue(switchName, value);
+ field.SetValue(target, Enum.Parse(typeof(WriterType), value, true));
+ }
+ else if (fieldType == typeof(BenchmarkDataSize))
+ {
+ value = GetLastValueIfMultivalue(value);
+ VerifyNoNullValue(switchName, value);
+ VerifyNoMultiValue(switchName, value);
+ field.SetValue(target, Enum.Parse(typeof(BenchmarkDataSize), value, true));
+ }
+ else if (fieldType == typeof(int))
+ {
+ value = GetLastValueIfMultivalue(value);
+ VerifyNoNullValue(switchName, value);
+ field.SetValue(target, int.Parse(value));
+ }
+ }
+ catch (Exception)
+ {
+ WriteHelpStringToConsoleAndQuit(new string[] { }, target, string.Format("CommandLineArgsParser failed to parse argument '{0}' of type '{1}' with value '{2}'",
+ switchName, field.FieldType.ToString(), value));
+ }
+ }
+
+ private static void VerifyNoNullValue(string switchName, string value)
+ {
+ if (value == null)
+ {
+ throw new ArgumentNullException(string.Format("No value provided for switch -{0}. Please use the format: -{0}:VALUE", switchName));
+ }
+ }
+
+ private static void VerifyNoMultiValue(string switchName, string value)
+ {
+ if (string.IsNullOrWhiteSpace(value))
+ {
+ return;
+ }
+ if (value.Contains(c_multiValueSeparator))
+ {
+ int count = value.Split(c_multiValueSeparator).Length;
+ throw new ArgumentOutOfRangeException(switchName,
+ count.ToString() + " appearences of the switch -" + switchName + ": expecting at most one.");
+ }
+ }
+
+ private static string GetLastValueIfMultivalue(string value)
+ {
+ if (string.IsNullOrEmpty(value) || value.IndexOf(c_multiValueSeparator) < 0)
+ {
+ return value;
+ }
+
+ var values = value.Split(c_multiValueSeparatorArray);
+ return values[values.Length - 1];
+ }
+
+ private static string FormatArg(Tuple pair, bool includeDefaultValue)
+ {
+ var attribute = pair.Item1;
+ var fieldInfo = pair.Item2;
+
+ var ret = new System.Text.StringBuilder();
+ if (!attribute.Mandatory)
+ {
+ ret.Append('[');
+ }
+
+ bool hasName = false;
+ if (!string.IsNullOrWhiteSpace(attribute.FullName))
+ {
+ ret.Append('-');
+ ret.Append(attribute.FullName);
+ hasName = true;
+ }
+
+ // Support for nullable types - get the real field type
+ var fieldType = fieldInfo.FieldType.GetTypeWithNullableSupport();
+
+ if (typeof(IEnumerable).IsAssignableFrom(fieldType))
+ {
+ ret.Append(hasName ? ":string*" : "string*");
+ }
+ else if (fieldType == typeof(bool))
+ {
+ ret.Append(hasName ? "[:true-or-false]" : "[true-or-false]");
+ }
+ else
+ {
+ ret.Append((hasName ? ":" : "") + fieldType.Name);
+ }
+
+ if (includeDefaultValue && attribute.DefaultValue != null)
+ {
+ var defaultValueAsString = attribute.DefaultValue.ToString();
+ if (!string.IsNullOrWhiteSpace(defaultValueAsString))
+ {
+ ret.Append(" (default is: " + attribute.DefaultValue.ToString() + ")");
+ }
+ }
+
+ if (!attribute.Mandatory)
+ {
+ ret.Append(']');
+ }
+
+ return ret.ToString();
+ }
+
+ private static bool TryGetValue(Dictionary candidateArgs, string fullName, IEnumerable aliases, out string value)
+ {
+ string v;
+ value = null;
+ var ret = false;
+
+ if (candidateArgs.TryGetValue(fullName, out v))
+ {
+ value = v; // No need for string.Join here
+ ret = true;
+ }
+
+ foreach (var alias in aliases)
+ {
+ if (candidateArgs.TryGetValue(alias, out v))
+ {
+ if (ret == false)
+ {
+ value = v;
+ }
+ else
+ {
+ value = string.Join(c_multiValueSeparatorStr, v, value);
+ }
+ ret = true;
+ }
+ }
+
+ return ret;
+ }
+ }
+ #endregion
+}
\ No newline at end of file
diff --git a/Utilities/ExtendedArray.cs b/Utilities/ExtendedArray.cs
new file mode 100644
index 0000000..c20ddba
--- /dev/null
+++ b/Utilities/ExtendedArray.cs
@@ -0,0 +1,38 @@
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace BenchmarkLogGenerator.Utilities
+{
+ public static class ExtendedArray
+ {
+ public static T[] SlowRemoveByIndex(this T[] array, int index)
+ {
+ if (array == null || array.Length == 0 || index < 0 || index >= array.Length)
+ {
+ // Nothing really to do
+ return array;
+ }
+
+ if (array.Length == 1 && index == 0)
+ {
+ // Just removed the last element
+ return new T[0];
+ }
+
+ var ret = new T[array.Length - 1];
+
+ // We could use two Array.Copy calls, but this is simpler
+ // (and easier to prove is right):
+ for (int i = 0, j = 0; i < array.Length; i++)
+ {
+ if (i != index)
+ {
+ ret[j] = array[i];
+ j++;
+ }
+ }
+ return ret;
+ }
+ }
+}
diff --git a/Utilities/ExtendedDateTime.cs b/Utilities/ExtendedDateTime.cs
new file mode 100644
index 0000000..81a339a
--- /dev/null
+++ b/Utilities/ExtendedDateTime.cs
@@ -0,0 +1,285 @@
+// ----------------------------------------------------------------------------
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// ----------------------------------------------------------------------------
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Globalization;
+using System.Linq;
+using System.Runtime.Serialization;
+
+namespace BenchmarkLogGenerator.Utilities
+{
+
+ public static class ExtendedDateTime
+ {
+ #region Public constants
+ ///
+ /// The min value of a DateTime, in UTC.
+ ///
+ public static readonly DateTime MinValueUtc = new DateTime(DateTime.MinValue.Ticks, DateTimeKind.Utc);
+
+ ///
+ /// The max value of a DateTime, in UTC.
+ ///
+ public static readonly DateTime MaxValueUtc = new DateTime(DateTime.MaxValue.Ticks, DateTimeKind.Utc);
+
+ ///
+ /// A list of datetime formats which we support but aren't supported by the default IFormatProvider
+ /// which we use for DateTime.Parse.
+ ///
+ public static readonly Dictionary SupportedNonStandardFormats = new Dictionary()
+ {
+ { 4, new [] { "yyyy" } },
+ { 6, new [] { "yyyyMM" } },
+ { 8, new [] { "yyyyMMdd" } },
+ { 10, new [] { "yyyyMMddHH" } },
+ { 12, new [] { "yyyyMMddHHmm" } },
+ { 14, new [] { "yyyyMMddHHmmss" } },
+ { 17, new [] { "yyyyMMdd HH:mm:ss" } },
+ { 19, new [] { "yyyyMMdd HH:mm:ss.f" } },
+ { 20, new [] { "yyyyMMdd HH:mm:ss.ff" } },
+ { 21, new [] { "yyyyMMdd HH:mm:ss.fff" } },
+ { 22, new [] { "yyyyMMdd HH:mm:ss.ffff" } },
+ { 23, new [] { "yyyyMMdd HH:mm:ss.fffff" } },
+ { 24, new [] { "yyyyMMdd HH:mm:ss.ffffff" } },
+ { 25, new [] { "yyyyMMdd HH:mm:ss.fffffff" } },
+ };
+
+ ///
+ /// Jan 1 1970 ("epoch")
+ ///
+ public static readonly DateTime EpochStart = new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc);
+ #endregion
+
+ #region Constants
+ private static readonly int s_numCharactersInIso8601 = MinValueUtc.ToString("O").Length;
+ #endregion
+
+ #region fast tostring()
+ ///
+ /// This function provides an optimized implementation of
+ /// for the case of the format string being "O" (the round-trip format, a.k.a. ISO8601).
+ ///
+ public static string FastToString(this DateTime value)
+ {
+ var sb = UtilsStringBuilderCache.Acquire();
+ FastAppendToStringBuilder(value, sb);
+ return UtilsStringBuilderCache.GetStringAndRelease(sb);
+ }
+
+ public static void FastAppendToStringBuilder(this DateTime value, System.Text.StringBuilder sb)
+ {
+ sb.EnsureCapacity(s_numCharactersInIso8601); // TODO: Make sure that this ensures the _remaining_ capacity! Also note that the capacity is different for UTC and LOCAL
+
+ int year, month, day, hour, minute, second;
+ long fraction;
+ FastGetParts(value, out year, out month, out day, out hour, out minute, out second, out fraction);
+
+ FastAppendFormattedInt4(sb, year);
+ sb.Append('-');
+ FastAppendFormattedInt2(sb, month);
+ sb.Append('-');
+ FastAppendFormattedInt2(sb, day);
+ sb.Append('T');
+ FastAppendFormattedInt2(sb, hour);
+ sb.Append(':');
+ FastAppendFormattedInt2(sb, minute);
+ sb.Append(':');
+ FastAppendFormattedInt2(sb, second);
+ sb.Append('.');
+ FastAppendFormattedInt7(sb, fraction);
+
+ switch (value.Kind)
+ {
+ case DateTimeKind.Local:
+ TimeSpan offset = TimeZoneInfo.Local.GetUtcOffset(value);
+ if (offset >= TimeSpan.Zero)
+ {
+ sb.Append('+');
+ }
+ else
+ {
+ sb.Append('-');
+ offset = offset.Negate();
+ }
+
+ FastAppendFormattedInt2(sb, offset.Hours);
+ sb.Append(':');
+ FastAppendFormattedInt2(sb, offset.Minutes);
+ break;
+
+ case DateTimeKind.Unspecified:
+ break;
+
+ case DateTimeKind.Utc:
+ sb.Append('Z');
+ break;
+ }
+ }
+
+ private static void FastAppendFormattedInt7(System.Text.StringBuilder sb, long value)
+ {
+ char g = (char)('0' + value % 10);
+ value = value / 10;
+ char f = (char)('0' + value % 10);
+ value = value / 10;
+ char e = (char)('0' + value % 10);
+ value = value / 10;
+ char d = (char)('0' + value % 10);
+ value = value / 10;
+ char c = (char)('0' + value % 10);
+ value = value / 10;
+ char b = (char)('0' + value % 10);
+ value = value / 10;
+ char a = (char)('0' + value % 10);
+ sb.Append(a);
+ sb.Append(b);
+ sb.Append(c);
+ sb.Append(d);
+ sb.Append(e);
+ sb.Append(f);
+ sb.Append(g);
+ }
+
+ private static void FastAppendFormattedInt4(System.Text.StringBuilder sb, int value)
+ {
+ char d = (char)('0' + value % 10);
+ value = value / 10;
+ char c = (char)('0' + value % 10);
+ value = value / 10;
+ char b = (char)('0' + value % 10);
+ value = value / 10;
+ char a = (char)('0' + value % 10);
+ sb.Append(a);
+ sb.Append(b);
+ sb.Append(c);
+ sb.Append(d);
+ }
+
+ private static void FastAppendFormattedInt2(System.Text.StringBuilder sb, int value)
+ {
+ char b = (char)('0' + value % 10);
+ value = value / 10;
+ char a = (char)('0' + value % 10);
+ sb.Append(a);
+ sb.Append(b);
+ }
+
+ public static void FastGetParts(this DateTime value, out int year, out int month, out int day, out int hour, out int minute, out int second, out long fraction)
+ {
+ Int64 ticks = value.Ticks;
+ // n = number of days since 1/1/0001
+ int n = (int)(ticks / TicksPerDay);
+ // y400 = number of whole 400-year periods since 1/1/0001
+ int y400 = n / DaysPer400Years;
+ // n = day number within 400-year period
+ n -= y400 * DaysPer400Years;
+ // y100 = number of whole 100-year periods within 400-year period
+ int y100 = n / DaysPer100Years;
+ // Last 100-year period has an extra day, so decrement result if 4
+ if (y100 == 4) y100 = 3;
+ // n = day number within 100-year period
+ n -= y100 * DaysPer100Years;
+ // y4 = number of whole 4-year periods within 100-year period
+ int y4 = n / DaysPer4Years;
+ // n = day number within 4-year period
+ n -= y4 * DaysPer4Years;
+ // y1 = number of whole years within 4-year period
+ int y1 = n / DaysPerYear;
+ // Last year has an extra day, so decrement result if 4
+ if (y1 == 4) y1 = 3;
+
+ year = y400 * 400 + y100 * 100 + y4 * 4 + y1 + 1;
+
+ // n = day number within year
+ n -= y1 * DaysPerYear;
+
+ // If day-of-year was requested, return it
+ //if (part == DatePartDayOfYear) return n + 1;
+
+ // Leap year calculation looks different from IsLeapYear since y1, y4,
+ // and y100 are relative to year 1, not year 0
+ bool leapYear = y1 == 3 && (y4 != 24 || y100 == 3);
+ int[] days = leapYear ? DaysToMonth366 : DaysToMonth365;
+ // All months have less than 32 days, so n >> 5 is a good conservative
+ // estimate for the month
+ int m = n >> 5 + 1;
+ // m = 1-based month number
+ while (n >= days[m]) m++;
+
+ month = m;
+
+ // 1-based day-of-month
+ day = n - days[m - 1] + 1;
+
+ hour = value.Hour;
+
+ minute = value.Minute;
+
+ second = value.Second;
+
+ fraction = ticks % TicksPerSecond;
+ }
+ #endregion
+
+ #region Constants
+ // Number of 100ns ticks per time unit
+ private const long TicksPerMillisecond = 10000;
+ private const long TicksPerSecond = TicksPerMillisecond * 1000;
+ private const long TicksPerMinute = TicksPerSecond * 60;
+ private const long TicksPerHour = TicksPerMinute * 60;
+ private const long TicksPerDay = TicksPerHour * 24;
+
+ // Number of milliseconds per time unit
+ private const int MillisPerSecond = 1000;
+ private const int MillisPerMinute = MillisPerSecond * 60;
+ private const int MillisPerHour = MillisPerMinute * 60;
+ private const int MillisPerDay = MillisPerHour * 24;
+
+ // Number of days in a non-leap year
+ private const int DaysPerYear = 365;
+ // Number of days in 4 years
+ private const int DaysPer4Years = DaysPerYear * 4 + 1; // 1461
+ // Number of days in 100 years
+ private const int DaysPer100Years = DaysPer4Years * 25 - 1; // 36524
+ // Number of days in 400 years
+ private const int DaysPer400Years = DaysPer100Years * 4 + 1; // 146097
+
+ // Number of days from 1/1/0001 to 12/31/1600
+ private const int DaysTo1601 = DaysPer400Years * 4; // 584388
+ // Number of days from 1/1/0001 to 12/30/1899
+ private const int DaysTo1899 = DaysPer400Years * 4 + DaysPer100Years * 3 - 367;
+ // Number of days from 1/1/0001 to 12/31/1969
+ internal const int DaysTo1970 = DaysPer400Years * 4 + DaysPer100Years * 3 + DaysPer4Years * 17 + DaysPerYear; // 719,162
+ // Number of days from 1/1/0001 to 12/31/9999
+ private const int DaysTo10000 = DaysPer400Years * 25 - 366; // 3652059
+
+ internal const long MinTicks = 0;
+ internal const long MaxTicks = DaysTo10000 * TicksPerDay - 1;
+ private const long MaxMillis = (long)DaysTo10000 * MillisPerDay;
+
+ private const long FileTimeOffset = DaysTo1601 * TicksPerDay;
+ private const long DoubleDateOffset = DaysTo1899 * TicksPerDay;
+ // The minimum OA date is 0100/01/01 (Note it's year 100).
+ // The maximum OA date is 9999/12/31
+ private const long OADateMinAsTicks = (DaysPer100Years - DaysPerYear) * TicksPerDay;
+ // All OA dates must be greater than (not >=) OADateMinAsDouble
+ private const double OADateMinAsDouble = -657435.0;
+ // All OA dates must be less than (not <=) OADateMaxAsDouble
+ private const double OADateMaxAsDouble = 2958466.0;
+
+ private const int DatePartYear = 0;
+ private const int DatePartDayOfYear = 1;
+ private const int DatePartMonth = 2;
+ private const int DatePartDay = 3;
+
+ private static readonly int[] DaysToMonth365 = {
+ 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365};
+ private static readonly int[] DaysToMonth366 = {
+ 0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366};
+ #endregion
+
+ }
+
+}
\ No newline at end of file
diff --git a/Utilities/ExtendedString.cs b/Utilities/ExtendedString.cs
new file mode 100644
index 0000000..461b3d8
--- /dev/null
+++ b/Utilities/ExtendedString.cs
@@ -0,0 +1,67 @@
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace BenchmarkLogGenerator.Utilities
+{
+ public static class ExtendedString
+ {
+ private static readonly string[] c_newlineAsStringArray = new string[] { Environment.NewLine };
+ public static string SplitFirst(this string what, char delimiter, out string remaining)
+ {
+ if (what == null)
+ {
+ remaining = null;
+ return null;
+ }
+
+ int delimiterIndex = what.IndexOf(delimiter);
+ if (delimiterIndex < 0)
+ {
+ remaining = null;
+ return what;
+ }
+
+ var first = what.Substring(0, delimiterIndex);
+ remaining = what.Substring(delimiterIndex + 1);
+ return first;
+ }
+
+ public static string[] SplitLines(this string what, StringSplitOptions options = StringSplitOptions.None, StringComparison comparison = StringComparison.Ordinal)
+ {
+ // TODO: A quick-and-dirty implementation. Might consider writing
+ // our own in the future
+ if (options == StringSplitOptions.RemoveEmptyEntries)
+ {
+ return what.Split(c_newlineAsStringArray, options);
+ }
+
+ // TODO: Need to make sure this follows the string.Split() conventions:
+ if (what == null)
+ {
+ return new string[0];
+ }
+
+ if (what == string.Empty)
+ {
+ return new string[] { string.Empty };
+ }
+
+ if (!what.EndsWith(Environment.NewLine, comparison))
+ {
+ return what.Split(c_newlineAsStringArray, StringSplitOptions.None);
+ }
+
+ // The string ends in a newline. String.Split will return an "extra"
+ // entry for the empty space between the newline and the end-of-string.
+ // Remove that baggage:
+ var ret = what.Split(c_newlineAsStringArray, StringSplitOptions.None);
+ var length = ret.Length;
+ if (length > 1)
+ {
+ ret = ret.SlowRemoveByIndex(length - 1);
+ }
+ return ret;
+ }
+ }
+}
diff --git a/Utilities/ExtendedStringBuilder.cs b/Utilities/ExtendedStringBuilder.cs
new file mode 100644
index 0000000..52575b7
--- /dev/null
+++ b/Utilities/ExtendedStringBuilder.cs
@@ -0,0 +1,250 @@
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace BenchmarkLogGenerator.Utilities
+{
+ public static class ExtendedEnumerable
+ {
+ public static bool SafeFastAny(this IEnumerable collection, Func predicate)
+ {
+ if (!collection.SafeFastAny())
+ {
+ return false;
+ }
+
+ return collection.Any(predicate);
+ }
+ public static bool SafeFastAny(this IEnumerable collection)
+ {
+ if (collection == null)
+ {
+ return false;
+ }
+
+ { // Scope to ensure no cross-talk with next block
+
+ if (collection is System.Collections.ICollection asICollection)
+ {
+ return asICollection.Count != 0;
+ }
+ }
+
+ var e = collection.GetEnumerator();
+ using (e as IDisposable)
+ {
+ if (e.MoveNext()) return true;
+ }
+ return false;
+ }
+ }
+ public sealed class ExtendedStringBuilder
+ {
+ #region fields and properties
+ private const int c_tabSize = 4;
+
+ private int m_tabSize;
+ private int m_indentation;
+ private char[] m_indentors;
+ private bool m_firstColumn;
+
+ public StringBuilder StringBuilder { get; private set; }
+ #endregion
+
+ #region constructor
+ ///
+ /// Creates the StringBuilder with no inedtation
+ ///
+ public ExtendedStringBuilder()
+ : this(c_tabSize, 0, null)
+ {
+ }
+
+ ///
+ /// Constructor.
+ ///
+ /// How many spaces to indent by
+ /// Initial indentation to start with
+ /// Two characters -- one to indent, another to unindent (null means don't add indent/unindent chars)
+ public ExtendedStringBuilder(int tabSize, int initialIndentation, char[] indentors)
+ {
+ m_tabSize = tabSize;
+ m_indentation = initialIndentation;
+ m_indentors = indentors;
+ m_firstColumn = true;
+
+ StringBuilder = new StringBuilder();
+ }
+ #endregion
+
+ #region public methods
+ ///
+ /// Appends multiple lines to a string builder
+ ///
+ public void AppendLines(string str)
+ {
+ if (string.IsNullOrEmpty(str))
+ {
+ StringBuilder.AppendLine();
+ m_firstColumn = true;
+ return;
+ }
+
+ int current = 0;
+ while (true)
+ {
+ AppendIndent();
+ var nl = str.IndexOf('\r', current);
+ if (nl < 0)
+ {
+ // Everything remaining is a single line
+ if (current == 0)
+ {
+ StringBuilder.AppendLine(str);
+ }
+ else
+ {
+ for (; current < str.Length; current++)
+ {
+ StringBuilder.Append(str[current]);
+ }
+ StringBuilder.AppendLine();
+ }
+ m_firstColumn = true;
+ return;
+ }
+
+ for (; current < nl; current++)
+ {
+ StringBuilder.Append(str[current]);
+ }
+ StringBuilder.AppendLine();
+ m_firstColumn = true;
+ current++; // Move beyond the \r
+
+ if (current < str.Length)
+ {
+ if (str[current] == '\n')
+ {
+ current++;
+ }
+ }
+ if (current == str.Length)
+ {
+ return;
+ }
+ }
+ }
+
+ private void AppendIndent()
+ {
+ if (m_firstColumn)
+ {
+ for (int i = 0; i < m_indentation * m_tabSize; i++)
+ {
+ StringBuilder.Append(' ');
+ }
+ m_firstColumn = false;
+ }
+ }
+
+ private void UpdateFirstColumn(string writtenLast)
+ {
+ if (!string.IsNullOrEmpty(writtenLast))
+ {
+ m_firstColumn = writtenLast.EndsWith(System.Environment.NewLine);
+ }
+ }
+
+ ///
+ /// Appends a string to the string builder
+ ///
+ /// The string to append
+ public void Append(string str)
+ {
+ AppendIndent();
+ StringBuilder.Append(str);
+ UpdateFirstColumn(str);
+ }
+
+ ///
+ /// Appends a string to the string builder (no indent).
+ ///
+ /// The string to append
+ public void AppendNoIndent(string str)
+ {
+ StringBuilder.Append(str);
+ UpdateFirstColumn(str);
+ }
+
+ ///
+ /// Appends a line to the string builder
+ ///
+ /// The line to append
+ public void AppendLine(string str)
+ {
+ AppendIndent();
+ StringBuilder.AppendLine(str);
+ m_firstColumn = true;
+ }
+
+ ///
+ /// Appends a character to the string builder
+ ///
+ /// The character to append
+ public void AppendLine(char c)
+ {
+ for (int i = 0; i < m_indentation * m_tabSize; i++)
+ {
+ StringBuilder.Append(' ');
+ }
+ StringBuilder.Append(c);
+ StringBuilder.AppendLine();
+ m_firstColumn = true;
+ }
+
+ ///
+ /// Appends an empty line to the string builder
+ ///
+ public void AppendLine()
+ {
+ StringBuilder.AppendLine();
+ m_firstColumn = true;
+ }
+
+ ///
+ /// returns the built string
+ ///
+ override public string ToString()
+ {
+ return StringBuilder.ToString();
+ }
+
+ ///
+ /// Increase the indent level, potentially adding an indentor line.
+ ///
+ public void Indent()
+ {
+ if (m_indentors != null)
+ {
+ AppendLine(m_indentors[0]);
+ }
+ m_indentation++;
+ }
+
+ ///
+ /// Decrease the indent level, potentially adding an unindentor line
+ ///
+ public void Unindent()
+ {
+ m_indentation--;
+ if (m_indentors != null)
+ {
+ AppendLine(m_indentors[1]);
+ }
+ }
+ #endregion
+ }
+}
diff --git a/Utilities/ExtendedType.cs b/Utilities/ExtendedType.cs
new file mode 100644
index 0000000..496cfc0
--- /dev/null
+++ b/Utilities/ExtendedType.cs
@@ -0,0 +1,27 @@
+using System;
+using System.Linq;
+using System.Reflection;
+
+namespace BenchmarkLogGenerator.Utilities
+{
+ public static class ExtendedType
+ {
+ public static FieldInfo[] GetFieldsOrdered(this Type type)
+ {
+ return type.GetFields().OrderBy(fi => fi.MetadataToken).ToArray();
+ }
+
+ public static Type GetTypeWithNullableSupport(this Type type)
+ {
+ if (type.Name.StartsWith("Nullable"))
+ {
+ var declaredFields = ((TypeInfo)type).DeclaredFields;
+ var valueFieldInfo = declaredFields.Where(fi => string.Equals(fi.Name, "value", StringComparison.OrdinalIgnoreCase)).FirstOrDefault();
+
+ return (valueFieldInfo != default(FieldInfo)) ? valueFieldInfo.FieldType : type;
+ }
+
+ return type;
+ }
+ }
+}
diff --git a/Utilities/StringBuilderCache.cs b/Utilities/StringBuilderCache.cs
new file mode 100644
index 0000000..422ad1f
--- /dev/null
+++ b/Utilities/StringBuilderCache.cs
@@ -0,0 +1,140 @@
+using System;
+using System.Text;
+
+namespace BenchmarkLogGenerator.Utilities
+{
+ #region class StringBuilderCache
+ ///
+ /// A helper for creating a per-thread ([ThreadStatic]) StringBuilder cache.
+ ///
+ /// This code is stolen from the .NET Framework source code. The code requires
+ /// the caller to declare a [ThreadStatic] field member of type StringBuilder,
+ /// and provide a reference to that field with each operation. See
+ /// for an example.
+ ///
+ /// Note that it's not advisable to share such objects if their lifetime
+ /// overlaps (which is why is made
+ /// internal -- to prevent people from making mistakes).
+ ///
+ public static class StringBuilderCache
+ {
+ private const int MAX_BUILDER_SIZE = 24 * 1024;
+ private const int DEFAULT_CAPACITY = 16;
+
+ ///
+ /// Given a [ThreadStatic] field, returns a "clean" instance of .
+ ///
+ /// [ThreadStatic] static System.Text.StringBuilder s_myStringBuilderCache
+ /// Capacity to ensure the returned object holds.
+ /// The maximum size to allow the string builder to grow to.
+ ///
+ public static StringBuilder Acquire(ref StringBuilder threadStaticStringBuilder, int capacity = DEFAULT_CAPACITY, int maxBuilderSize = MAX_BUILDER_SIZE)
+ {
+ if (capacity <= maxBuilderSize)
+ {
+ StringBuilder sb = threadStaticStringBuilder;
+ if (sb != null)
+ {
+ // Avoid stringbuilder block fragmentation by getting a new StringBuilder
+ // when the requested size is larger than the current capacity
+ if (capacity <= sb.Capacity)
+ {
+ threadStaticStringBuilder = null;
+ sb.Clear();
+ return sb;
+ }
+ }
+ }
+ return new StringBuilder(capacity);
+ }
+
+ ///
+ /// Given a [ThreadStatic] field, returns an instance of with the given initial value.
+ ///
+ /// [ThreadStatic] static System.Text.StringBuilder s_myStringBuilderCache
+ /// Initial value to assign the being returned.
+ ///
+ public static StringBuilder Acquire(ref StringBuilder threadStaticStringBuilder, string value)
+ {
+ StringBuilder sb = Acquire(ref threadStaticStringBuilder, System.Math.Max(value.Length, DEFAULT_CAPACITY));
+ sb.Append(value);
+ return sb;
+ }
+
+ ///
+ /// Given a [ThreadStatic] field and an existing that was acquired from it,
+ /// release the acquired instance to make it available in the future to other functions.
+ ///
+ /// [ThreadStatic] static System.Text.StringBuilder s_myStringBuilderCache
+ ///
+ public static void Release(ref StringBuilder threadStaticStringBuilder, StringBuilder sb, int maxBuilderSize = MAX_BUILDER_SIZE)
+ {
+ if (sb.Capacity <= maxBuilderSize)
+ {
+ threadStaticStringBuilder = sb;
+ }
+ }
+
+ ///
+ /// Given a [ThreadStatic] field and an existing that was acquired from it,
+ /// release the acquired instance to make it available in the future to other functions.
+ /// Returns the string held in the returned .
+ ///
+ /// [ThreadStatic] static System.Text.StringBuilder s_myStringBuilderCache
+ ///
+ public static string GetStringAndRelease(ref StringBuilder threadStaticStringBuilder, StringBuilder sb, int maxBuilderSize = MAX_BUILDER_SIZE)
+ {
+ string result = sb.ToString();
+ Release(ref threadStaticStringBuilder, sb, maxBuilderSize);
+ return result;
+ }
+
+ public static string GetStringAndClear(ref StringBuilder threadStaticStringBuilder, StringBuilder sb)
+ {
+ string result = sb.ToString();
+ sb.Clear();
+ return result;
+ }
+ }
+ #endregion
+
+ #region class UtilsStringBuilderCache
+ ///
+ /// A per-thread cache of up to one object.
+ /// This code is stolen from the .NET Framework source code. It is explicitly
+ ///
+ internal static class UtilsStringBuilderCache
+ {
+ private const int MAX_BUILDER_SIZE = 24*1024; // Originally 260
+ private const int DEFAULT_CAPACITY = 16;
+
+ [ThreadStatic]
+ private static StringBuilder t_cachedInstance;
+
+ public static StringBuilder Acquire(int capacity = DEFAULT_CAPACITY)
+ {
+ return StringBuilderCache.Acquire(ref t_cachedInstance, capacity);
+ }
+
+ public static StringBuilder Acquire(string value)
+ {
+ return StringBuilderCache.Acquire(ref t_cachedInstance, value);
+ }
+
+ public static void Release(StringBuilder sb)
+ {
+ StringBuilderCache.Release(ref t_cachedInstance, sb);
+ }
+
+ public static string GetStringAndRelease(StringBuilder sb)
+ {
+ return StringBuilderCache.GetStringAndRelease(ref t_cachedInstance, sb);
+ }
+
+ public static string GetStringAndClear(StringBuilder sb)
+ {
+ return StringBuilderCache.GetStringAndClear(ref t_cachedInstance, sb);
+ }
+ }
+ #endregion
+}
\ No newline at end of file
diff --git a/runtimeconfig.template.json b/runtimeconfig.template.json
new file mode 100644
index 0000000..2fd3a67
--- /dev/null
+++ b/runtimeconfig.template.json
@@ -0,0 +1,5 @@
+{
+ "configProperties": {
+ "System.GC.Server": true
+ }
+}
\ No newline at end of file