This commit is contained in:
Jayesh Prajapati 2023-07-21 17:02:55 +05:30
Родитель 6697d5867c da0a6c8a22
Коммит 9d79cccca5
208 изменённых файлов: 24026 добавлений и 10466 удалений

Просмотреть файл

@ -5,6 +5,7 @@ jobs:
variables:
buildConfiguration: 'Release'
dotnetSdkVersion: '3.1.401'
PRNUM: $(System.PullRequest.PullRequestNumber)
steps:
- task: UseDotNet@2
displayName: 'Use .NET Core SDK $(dotnetSdkVersion)'

Просмотреть файл

@ -5,6 +5,7 @@ jobs:
variables:
buildConfiguration: 'Release'
dotnetSdkVersion: '6.0.x'
PRNUM: $(System.PullRequest.PullRequestNumber)
steps:
- task: UseDotNet@2
displayName: 'Use .NET Core SDK $(dotnetSdkVersion)'

6
.github/workflows/AddLabel.yaml поставляемый
Просмотреть файл

@ -10,13 +10,13 @@ on:
jobs:
solutionNameDetails:
if: ${{ !github.event.pull_request.head.repo.fork && !contains(github.event.pull_request.labels.*.name, 'P0')}}
if: ${{ github.actor != 'dependabot[bot]' && !github.event.pull_request.head.repo.fork && !contains(github.event.pull_request.labels.*.name, 'P0')}}
uses: ./.github/workflows/getSolutionName.yaml
secrets: inherit
solutionPublisherDetail:
needs: solutionNameDetails
if: ${{ needs.solutionNameDetails.outputs.solutionName != '' && !github.event.pull_request.head.repo.fork }}
if: ${{ github.actor != 'dependabot[bot]' && needs.solutionNameDetails.outputs.solutionName != '' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/workflows/neworexistingsolution.yaml
with:
solutionName: "${{ needs.solutionNameDetails.outputs.solutionName }}"
@ -25,7 +25,7 @@ jobs:
Labeler:
runs-on: ubuntu-latest
needs: solutionPublisherDetail
if: ${{ needs.solutionPublisherDetail.outputs.solutionPublisherId != '' && !contains(fromJson(vars.INTERNAL_PUBLISHERS),needs.solutionPublisherDetail.outputs.solutionPublisherId) }}
if: ${{ github.actor != 'dependabot[bot]' && needs.solutionPublisherDetail.outputs.solutionPublisherId != '' && !contains(fromJson(vars.INTERNAL_PUBLISHERS),needs.solutionPublisherDetail.outputs.solutionPublisherId) }}
steps:
- name: Add Label Notification
uses: actions/github-script@v6

2
.github/workflows/getSolutionName.yaml поставляемый
Просмотреть файл

@ -28,7 +28,7 @@ jobs:
$instrumentationKey = "${{ vars.APPINSIGHTS }}"
$runId = "${{ github.run_id }}"
$pullRequestNumber = "${{ github.event.client_payload.pull_request.number && github.event.client_payload.pull_request.number || github.event.client_payload.pullRequestNumber }}"
Write-Host "pullRequestNumber $pullRequestNumber, pullRequestNumber $pullRequestNumber1"
Write-Host "pullRequestNumber $pullRequestNumber"
Set-PSRepository PSGallery -InstallationPolicy Trusted
Install-Module powershell-yaml
./.script/package-automation/getSolutionName.ps1 $runId $pullRequestNumber $instrumentationKey

Просмотреть файл

@ -2,7 +2,7 @@ name: Slash Command Dispatch
env:
REPO_OWNER: "${{ vars.REPO_OWNER }}"
REPO_NAME: "${{ vars.REPO_NAME }}"
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
GITHUB_TOKEN: "${{ secrets.PAT }}"
on:
issue_comment:
types: [created]

Просмотреть файл

@ -96,9 +96,12 @@ function getConnectorCategory(dataTypes : any, instructionSteps:[])
{
return ConnectorCategory.Dynamics365Activity;
}
else if (dataTypes[0].name.includes("BloodHoundEnterprise"))
{
return ConnectorCategory.BloodHoundEnterprise;
}
return "";
}
}
let fileTypeSuffixes = ["json"];
let filePathFolderPrefixes = ["DataConnectors","Solutions"];
let fileKinds = ["Added", "Modified"];

Просмотреть файл

@ -40,7 +40,12 @@ try
}
}
if ($solutionName -eq '')
if ($solutionName -eq 'SAP')
{
Write-Host "Skipping Github workflow for SAP Solution as solution dont have data file and SolutionMetadata file!"
Write-Output "solutionName=" >> $env:GITHUB_OUTPUT
}
elseif ($solutionName -eq '')
{
Write-Host "Skipping Github workflow as Solution name cannot be blank."
Write-Output "solutionName=" >> $env:GITHUB_OUTPUT

Просмотреть файл

@ -415,6 +415,7 @@ try {
$dataConnectorFilesResultArray = GetValidDataConnectorFileNames($newDataConnectorFiles) | ConvertTo-Json -AsArray
$dataConnectoryWithoutSpaceArrayAttributeExist = [bool]($dataFileContentObject.PSobject.Properties.name -match ([regex]::Escape("DataConnectors")))
if (!$dataConnectoryWithoutSpaceArrayAttributeExist) {
$dataFileContentObject.PSObject.Properties.Remove('Data Connectors')
$dataFileContentObject | ForEach-Object {
$_ | Add-Member -MemberType NoteProperty -Name 'Data Connectors' -Value $dataConnectorFilesResultArray -PassThru
}

Просмотреть файл

@ -1,16 +0,0 @@
{
"FunctionName": "_ASIM_GetSourceBySourceType",
"FunctionParameters": [
{
"Name": "SourceType",
"Type": "string",
"IsRequired": true
}
],
"FunctionResultColumns": [
{
"Name": "print_0",
"Type": "dynamic"
}
]
}

Просмотреть файл

@ -0,0 +1,121 @@
{
"Name": "BloodHoundEnterprise",
"Properties": [
{
"Name": "TimeGenerated",
"Type": "DateTime"
},
{
"Name": "EventVendor",
"Type": "String"
},
{
"Name": "EventProduct",
"Type": "String"
},
{
"Name": "domain_sid",
"Type": "String"
},
{
"Name": "exposure_index",
"Type": "Real"
},
{
"Name": "tier_zero_count",
"Type": "Real"
},
{
"Name": "critical_risk_count",
"Type": "Real"
},
{
"Name": "domain_id",
"Type": "String"
},
{
"Name": "non_tier_zero_principal",
"Type": "String"
},
{
"Name": "tier_zero_principal",
"Type": "String"
},
{
"Name": "group",
"Type": "String"
},
{
"Name": "principal",
"Type": "String"
},
{
"Name": "path_id",
"Type": "String"
},
{
"Name": "user",
"Type": "String"
},
{
"Name": "finding_id",
"Type": "String"
},
{
"Name": "path_title",
"Type": "String"
},
{
"Name": "path_type",
"Type": "String"
},
{
"Name": "exposure",
"Type": "Real"
},
{
"Name": "finding_count",
"Type": "Real"
},
{
"Name": "principal_count",
"Type": "Real"
},
{
"Name": "id",
"Type": "Real"
},
{
"Name": "created_at",
"Type": "Datetime"
},
{
"Name": "updated_at",
"Type": "Datetime"
},
{
"Name": "deleted_at",
"Type": "Datetime"
},
{
"Name": "deleted_at_v",
"Type": "Bool"
},
{
"Name": "domain_impact_value",
"Type": "Real"
},
{
"Name": "domain_name",
"Type": "String"
},
{
"Name": "domain_type",
"Type": "String"
},
{
"Name": "data_type",
"Type": "String"
}
]
}

Просмотреть файл

@ -0,0 +1,249 @@
{
"name": "Sevco_Devices_CL",
"Properties":[
{
"Name": "TenantId",
"Type": "string"
},
{
"Name": "SourceSystem",
"Type": "string"
},
{
"Name": "MG",
"Type": "string"
},
{
"Name": "ManagementGroupName",
"Type": "string"
},
{
"Name": "TimeGenerated",
"Type": "datetime"
},
{
"Name": "Computer",
"Type": "string"
},
{
"Name": "RawData",
"Type": "string"
},
{
"Name": "asset_version_t",
"Type": "datetime"
},
{
"Name": "event_asset_id_g",
"Type": "string"
},
{
"Name": "asset_attributes_geo_ip_locality_s",
"Type": "string"
},
{
"Name": "asset_config_ids_s",
"Type": "string"
},
{
"Name": "asset_attributes_network_location_s",
"Type": "string"
},
{
"Name": "asset_first_observed_timestamp_s",
"Type": "string"
},
{
"Name": "asset_last_observed_timestamp_s",
"Type": "string"
},
{
"Name": "asset_attributes_imei_s",
"Type": "string"
},
{
"Name": "asset_id_g",
"Type": "string"
},
{
"Name": "asset_org_id_g",
"Type": "string"
},
{
"Name": "asset_version_s",
"Type": "string"
},
{
"Name": "asset_first_observed_timestamp_t",
"Type": "datetime"
},
{
"Name": "asset_last_observed_timestamp_t",
"Type": "datetime"
},
{
"Name": "asset_last_activity_timestamp_s",
"Type": "string"
},
{
"Name": "asset_asset_type_s",
"Type": "string"
},
{
"Name": "asset_attributes_hostnames_s",
"Type": "string"
},
{
"Name": "asset_attributes_fqdn_s",
"Type": "string"
},
{
"Name": "asset_attributes_os_s",
"Type": "string"
},
{
"Name": "asset_attributes_ips_s",
"Type": "string"
},
{
"Name": "asset_attributes_mac_addresses_s",
"Type": "string"
},
{
"Name": "asset_attributes_distinguished_name_s",
"Type": "string"
},
{
"Name": "asset_attributes_additional_attributes_manufacturer_s",
"Type": "string"
},
{
"Name": "asset_attributes_additional_attributes_model_s",
"Type": "string"
},
{
"Name": "asset_attributes_active_directory_domain_s",
"Type": "string"
},
{
"Name": "asset_attributes_serial_number_s",
"Type": "string"
},
{
"Name": "asset_attributes_controls_s",
"Type": "string"
},
{
"Name": "asset_attributes_asset_classification_category_s",
"Type": "string"
},
{
"Name": "asset_attributes_hostname_s",
"Type": "string"
},
{
"Name": "asset_attributes_os_platform_s",
"Type": "string"
},
{
"Name": "asset_attributes_os_release_s",
"Type": "string"
},
{
"Name": "asset_attributes_internal_ips_s",
"Type": "string"
},
{
"Name": "asset_attributes_external_ips_s",
"Type": "string"
},
{
"Name": "asset_attributes_mac_manufacturers_s",
"Type": "string"
},
{
"Name": "asset_attributes_associated_usernames_s",
"Type": "string"
},
{
"Name": "asset_attributes_geo_ip_associated_ip_s",
"Type": "string"
},
{
"Name": "asset_attributes_geo_ip_city_s",
"Type": "string"
},
{
"Name": "asset_attributes_geo_ip_country_s",
"Type": "string"
},
{
"Name": "asset_attributes_geo_ip_region_s",
"Type": "string"
},
{
"Name": "asset_attributes_geo_ip_latitude_d",
"Type": "real"
},
{
"Name": "asset_attributes_geo_ip_longitude_d",
"Type": "real"
},
{
"Name": "asset_sources_s",
"Type": "string"
},
{
"Name": "asset_source_ids_s",
"Type": "string"
},
{
"Name": "asset_tags_s",
"Type": "string"
},
{
"Name": "event_event_type_s",
"Type": "string"
},
{
"Name": "event_correlation_timestamp_s",
"Type": "string"
},
{
"Name": "event_asset_version_s",
"Type": "string"
},
{
"Name": "event_asset_type_s",
"Type": "string"
},
{
"Name": "event_asset_id_s",
"Type": "string"
},
{
"Name": "event_source_id_s",
"Type": "string"
},
{
"Name": "event_config_id_g",
"Type": "string"
},
{
"Name": "event_deleted_b",
"Type": "bool"
},
{
"Name": "event_updates_s",
"Type": "string"
},
{
"Name": "Type",
"Type": "string"
},
{
"Name": "_ResourceId",
"Type": "string"
}
]
}

Просмотреть файл

@ -0,0 +1,17 @@
{
"Name": "_ASIM_GetSourceBySourceType",
"Properties": [
{
"Name": "SourceType",
"Type": "string"
},
{
"Name": "Source",
"Type": "String"
},
{
"Name": "print_0",
"Type": "dynamic"
}
]
}

Просмотреть файл

@ -11,9 +11,16 @@ namespace Kqlvalidations.Tests.FunctionSchemasLoaders
public IEnumerable<FunctionSchema> Load()
{
List<string> commonFunctionsYamlFiles = (new CommonFunctionsYamlFilesLoader()).GetFilesNames();
return commonFunctionsYamlFiles.Select(GetFunction).ToList();
if (commonFunctionsYamlFiles.Count == 1 && commonFunctionsYamlFiles[0] == "NoFile.yaml")
{
return Enumerable.Empty<FunctionSchema>(); // Return an empty collection
}
return commonFunctionsYamlFiles.Select(GetFunction).ToList();
}
/// <summary>
/// Extracts the fuction's name, parameters and result columns from the yaml file and creates the FunctionSchema.
/// </summary>

Просмотреть файл

@ -47,6 +47,12 @@ namespace Kqlvalidations.Tests.FunctionSchemasLoaders
{
var parsersYamlFilesLoader = new ParsersYamlFilesLoader();
var parsersYamlFiles = parsersYamlFilesLoader.GetFilesNames();
if (parsersYamlFiles.Count == 1 && parsersYamlFiles[0] == "NoFile.yaml")
{
return Enumerable.Empty<FunctionSchema>(); // Return an empty collection
}
return parsersYamlFiles.Select(fileName =>
{
var schema = fileName.Split(Path.DirectorySeparatorChar)[^3];

Просмотреть файл

@ -1,12 +1,11 @@
using Newtonsoft.Json.Linq;
using Newtonsoft.Json.Schema.Generation;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using Newtonsoft.Json.Schema;
using Octokit;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using Newtonsoft.Json;
namespace Kqlvalidations.Tests
{
@ -25,53 +24,84 @@ namespace Kqlvalidations.Tests
public override List<string> GetFilesNames()
{
List<string> validFiles = new List<string>();
try
{
var directoryPaths = GetDirectoryPaths();
int prNumber = 0;
int.TryParse(System.Environment.GetEnvironmentVariable("PRNUM"), out prNumber);
//assign pr number to debug with a pr
//prNumber=8414;
var basePath = Utils.GetTestDirectory(TestFolderDepth);
var prFilesListModified = new List<string>();
return directoryPaths.Aggregate(new List<string>(), (accumulator, directoryPath) =>
if (prNumber != 0)
{
var files = Directory.GetFiles(directoryPath, "*.json", SearchOption.AllDirectories)?.ToList();
if (files != null)
try
{
files.ForEach(filePath =>
var client = new GitHubClient(new ProductHeaderValue("MicrosoftSentinelValidationApp"));
var prFiles = client.PullRequest.Files("Azure", "Azure-Sentinel", prNumber).Result;
foreach (var file in prFiles)
{
try
{
JSchema dataConnectorJsonSchema = JSchema.Parse(File.ReadAllText("DataConnectorSchema.json"));
var jsonString = File.ReadAllText(filePath);
JObject dataConnectorJsonObject = JObject.Parse(jsonString);
if (dataConnectorJsonObject.IsValid(dataConnectorJsonSchema))
{
validFiles.Add(filePath);
}
else
{
throw new Exception("Invalid JSON schema for file: " + filePath);
}
}
catch (JsonReaderException ex)
{
Console.WriteLine("Invalid JSON file: " + filePath);
Console.WriteLine("Error message: " + ex.Message);
}
catch (Exception ex)
{
Console.WriteLine("An error occurred while processing file: " + filePath);
Console.WriteLine("Error message: " + ex.Message);
}
});
var modifiedFile = Path.Combine(basePath, file.FileName.Replace('/', Path.DirectorySeparatorChar));
prFilesListModified.Add(modifiedFile);
}
}
else
catch (Exception ex)
{
Console.WriteLine("No JSON files found in directory: " + directoryPath);
// Exception occurred during PR file retrieval, set prFilesListModified to null
Console.WriteLine("Error occured while getting the files from PR. Error message: " + ex.Message + " Stack trace: " + ex.StackTrace);
prFilesListModified = null;
}
}
directoryPaths.ForEach(directoryPath =>
{
var files = Directory.GetFiles(directoryPath, "*.json", SearchOption.AllDirectories);
if (prNumber != 0 && prFilesListModified != null)
{
files = files.Where(file => prFilesListModified.Contains(file)).ToArray();
}
return accumulator.Concat(validFiles).ToList();
foreach (var filePath in files)
{
try
{
JSchema dataConnectorJsonSchema = JSchema.Parse(File.ReadAllText("DataConnectorSchema.json"));
var jsonString = File.ReadAllText(filePath);
JObject dataConnectorJsonObject = JObject.Parse(jsonString);
if (dataConnectorJsonObject.IsValid(dataConnectorJsonSchema))
{
validFiles.Add(filePath);
}
else
{
throw new Exception("Invalid JSON schema for file: " + filePath);
}
}
catch (JsonReaderException ex)
{
Console.WriteLine("Invalid JSON file: " + filePath);
Console.WriteLine("Error message: " + ex.Message);
}
catch (Exception ex)
{
Console.WriteLine("An error occurred while processing file: " + filePath);
Console.WriteLine("Error message: " + ex.Message);
}
}
});
if (validFiles.Count == 0)
{
validFiles.Add("NoFile.json");
}
return validFiles;
}
catch (Exception ex)
{

Просмотреть файл

@ -1,4 +1,5 @@
using System.Collections.Generic;
using Octokit;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@ -12,6 +13,11 @@ namespace Kqlvalidations.Tests
public virtual List<string> GetFilesNames()
{
int prNumber = int.Parse(System.Environment.GetEnvironmentVariable("PRNUM"));
var client = new GitHubClient(new ProductHeaderValue("MicrosoftSentinelValidationApp"));
var prFiles = client.PullRequest.Files("Azure", "Azure-Sentinel", prNumber).Result;
var prFileNames = prFiles.Select(file => file.FileName.Replace("/", "\\")).ToList();
var directoryPaths = GetDirectoryPaths();
if (directoryPaths == null)
@ -19,12 +25,10 @@ namespace Kqlvalidations.Tests
return new List<string>();
}
return directoryPaths.Aggregate(new List<string>(), (accumulator, directoryPath) =>
{
var files = Directory.GetFiles(directoryPath, "*.json", SearchOption.AllDirectories).ToList();
return accumulator.Concat(files).ToList();
});
return directoryPaths
.SelectMany(directoryPath => Directory.GetFiles(directoryPath, "*.json", SearchOption.AllDirectories))
.Where(file => prFileNames.Any(prFile => file.Contains(prFile)))
.ToList();
}
}
}

Просмотреть файл

@ -34,6 +34,11 @@ namespace Kqlvalidations.Tests
[ClassData(typeof(DataConnectorFilesTestData))]
public void Validate_DataConnectors_HaveValidKql(string fileName, string encodedFilePath)
{
if (fileName == "NoFile.json")
{
Assert.True(true);
return;
}
var dataConnector = ReadAndDeserializeDataConnectorJson(encodedFilePath);
var id = (string)dataConnector.Id;
//we ignore known issues
@ -74,6 +79,11 @@ namespace Kqlvalidations.Tests
[ClassData(typeof(HuntingQueriesYamlFilesTestData))]
public void Validate_HuntingQueries_HaveValidKql(string fileName, string encodedFilePath)
{
if (fileName == "NoFile.yaml")
{
Assert.True(true);
return;
}
var res = ReadAndDeserializeYaml(encodedFilePath);
var id = (string)res["id"];
@ -93,6 +103,11 @@ namespace Kqlvalidations.Tests
[ClassData(typeof(DetectionsYamlFilesTestData))]
public void Validate_DetectionQueries_HaveValidKql(string fileName, string encodedFilePath)
{
if (fileName == "NoFile.yaml")
{
Assert.True(true);
return;
}
var res = ReadAndDeserializeYaml(encodedFilePath);
var id = (string)res["id"];
@ -113,6 +128,11 @@ namespace Kqlvalidations.Tests
[ClassData(typeof(HuntingQueriesYamlFilesTestData))]
public void Validate_HuntingQueries_SkippedTemplatesDoNotHaveValidKql(string fileName, string encodedFilePath)
{
if (fileName == "NoFile.yaml")
{
Assert.True(true);
return;
}
var res = ReadAndDeserializeYaml(encodedFilePath);
var id = (string)res["id"];
@ -131,6 +151,11 @@ namespace Kqlvalidations.Tests
[ClassData(typeof(DetectionsYamlFilesTestData))]
public void Validate_DetectionQueries_SkippedTemplatesDoNotHaveValidKql(string fileName, string encodedFilePath)
{
if (fileName == "NoFile.yaml")
{
Assert.True(true);
return;
}
var res = ReadAndDeserializeYaml(encodedFilePath);
var id = (string)res["id"];
@ -159,6 +184,11 @@ namespace Kqlvalidations.Tests
[ClassData(typeof(ExplorationQueriesYamlFilesTestData))]
public void Validate_ExplorationQueries_HaveValidKql(string fileName, string encodedFilePath)
{
if (fileName == "NoFile.yaml")
{
Assert.True(true);
return;
}
var res = ReadAndDeserializeYaml(encodedFilePath);
var id = (string)res["Id"];
@ -176,6 +206,11 @@ namespace Kqlvalidations.Tests
[ClassData(typeof(ExplorationQueriesYamlFilesTestData))]
public void Validate_ExplorationQueries_SkippedTemplatesDoNotHaveValidKql(string fileName, string encodedFilePath)
{
if (fileName == "NoFile.yaml")
{
Assert.True(true);
return;
}
var res = ReadAndDeserializeYaml(encodedFilePath);
var id = (string)res["Id"];
@ -194,6 +229,11 @@ namespace Kqlvalidations.Tests
[ClassData(typeof(ParsersYamlFilesTestData))]
public void Validate_ParsersFunctions_HaveValidKql(string fileName, string encodedFilePath)
{
if (fileName == "NoFile.yaml")
{
Assert.True(true);
return;
}
Dictionary<object, object> yaml = ReadAndDeserializeYaml(encodedFilePath);
var queryParamsAsLetStatements = GenerateFunctionParametersAsLetStatements(yaml);
@ -214,6 +254,11 @@ namespace Kqlvalidations.Tests
[ClassData(typeof(CommonFunctionsYamlFilesTestData))]
public void Validate_CommonFunctions_HaveValidKql(string fileName, string encodedFilePath)
{
if (fileName == "NoFile.yaml")
{
Assert.True(true);
return;
}
Dictionary<object, object> yaml = ReadAndDeserializeYaml(encodedFilePath);
var queryParamsAsLetStatements = GenerateFunctionParametersAsLetStatements(yaml, "FunctionParams");

Просмотреть файл

@ -7,6 +7,7 @@
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.11.0" />
<PackageReference Include="Newtonsoft.Json.Schema" Version="3.0.14" />
<PackageReference Include="Octokit" Version="6.2.1" />
<PackageReference Include="xunit" Version="2.4.1" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.4.3">
<PrivateAssets>all</PrivateAssets>

Просмотреть файл

@ -1,6 +1,8 @@
using System.Collections.Generic;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Octokit;
namespace Kqlvalidations.Tests
{
@ -9,15 +11,61 @@ namespace Kqlvalidations.Tests
protected const int TestFolderDepth = 6;
protected abstract List<string> GetDirectoryPaths();
public List<string> GetFilesNames()
{
var directoryPaths = GetDirectoryPaths();
return directoryPaths.Aggregate(new List<string>(), (accumulator, directoryPath) =>
int prNumber = 0;
int.TryParse(System.Environment.GetEnvironmentVariable("PRNUM"), out prNumber);
//assign pr number to debug with a pr
//prNumber=8414;
if (prNumber == 0)
{
var files = Directory.GetFiles(directoryPath, "*.yaml", SearchOption.AllDirectories).ToList();
return accumulator.Concat(files).ToList();
});
Console.WriteLine("PR Number is not set. Running all tests");
return GetDirectoryPaths()
.SelectMany(directoryPath => Directory.GetFiles(directoryPath, "*.yaml", SearchOption.AllDirectories))
.ToList();
}
else
{
try
{
var client = new GitHubClient(new ProductHeaderValue("MicrosoftSentinelValidationApp"));
var prFiles = client.PullRequest.Files("Azure", "Azure-Sentinel", prNumber).Result;
var prFilesListModified = new List<string>();
var basePath = Utils.GetTestDirectory(TestFolderDepth);
foreach (var file in prFiles)
{
var modifiedFile = Path.Combine(basePath, file.FileName.Replace('/', Path.DirectorySeparatorChar));
prFilesListModified.Add(modifiedFile);
}
var validFiles = GetDirectoryPaths()
.SelectMany(directoryPath => Directory.GetFiles(directoryPath, "*.yaml", SearchOption.AllDirectories))
.Where(file => prFilesListModified.Any(prFile => file.Contains(prFile)))
.ToList();
if (validFiles.Count == 0)
{
validFiles.Add("NoFile.yaml");
}
return validFiles;
}
catch (Exception ex)
{
// Exception occurred, return all files without filtering if there is any error in fetching PR Files
Console.WriteLine("Error occured while getting the files from PR. Error message: " + ex.Message + " Stack trace: " + ex.StackTrace);
return GetDirectoryPaths()
.SelectMany(directoryPath => Directory.GetFiles(directoryPath, "*.yaml", SearchOption.AllDirectories))
.ToList();
}
}
}
}
}

Просмотреть файл

@ -7,6 +7,7 @@
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="5.10.3" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.6.1" />
<PackageReference Include="Octokit" Version="7.0.1" />
<PackageReference Include="xunit" Version="2.4.1" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.4.3">
<PrivateAssets>all</PrivateAssets>

Просмотреть файл

@ -3,11 +3,13 @@ using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using FluentAssertions;
using Microsoft.Azure.Sentinel.Analytics.Management.AnalyticsTemplatesService.Interface.Model;
using Microsoft.Azure.Sentinel.ApiContracts.ModelValidation;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using Octokit;
using Xunit;
using YamlDotNet.Serialization;
@ -22,6 +24,11 @@ namespace Kqlvalidations.Tests
[ClassData(typeof(DetectionsYamlFilesTestData))]
public void Validate_DetectionTemplates_HasValidTemplateStructure(string detectionsYamlFileName)
{
if (detectionsYamlFileName == "NoFile.yaml")
{
Assert.True(true);
return;
}
var yaml = GetYamlFileAsString(detectionsYamlFileName);
//we ignore known issues (in progress)
@ -51,6 +58,11 @@ namespace Kqlvalidations.Tests
[ClassData(typeof(DetectionsYamlFilesTestData))]
public void Validate_DetectionTemplates_HasValidConnectorIds(string detectionsYamlFileName)
{
if (detectionsYamlFileName == "NoFile.yaml")
{
Assert.True(true);
return;
}
var yaml = GetYamlFileAsString(detectionsYamlFileName);
var deserializer = new DeserializerBuilder().Build();
Dictionary<object, object> res = deserializer.Deserialize<dynamic>(yaml);
@ -76,6 +88,11 @@ namespace Kqlvalidations.Tests
[ClassData(typeof(DetectionsYamlFilesTestData))]
public void Validate_DetectionTemplates_TemplatesThatAreInTheWhiteListShouldNotPassTheValidation(string detectionsYamlFileName)
{
if (detectionsYamlFileName == "NoFile.yaml")
{
Assert.True(true);
return;
}
var yaml = GetYamlFileAsString(detectionsYamlFileName);
//we ignore known issues (in progress)
@ -141,33 +158,38 @@ namespace Kqlvalidations.Tests
Assert.True(duplicationsById.Count() == 0, $"There should not be 2 templates with the same ID, but the id {duplicatedId} is duplicated.");
}
[Fact]
public void Validate_DetectionTemplates_RuleKindsAreValid()
[Theory]
[ClassData(typeof(DetectionsYamlFilesTestData))]
public void Validate_DetectionTemplates_RuleKindsAreValid(string detectionsYamlFileName)
{
List<string> detectionPath = DetectionsYamlFilesTestData.GetDetectionPaths();
var yamlFiles = Directory.GetFiles(detectionPath[0], "*.yaml", SearchOption.AllDirectories).ToList();
yamlFiles.AddRange(Directory.GetFiles(detectionPath[1], "*.yaml", SearchOption.AllDirectories).ToList().Where(s => s.Contains("Analytic Rules"))); // Extending it to solution folder for detection validation
var templatesAsStrings = yamlFiles.Select(yaml => GetYamlFileAsString(Path.GetFileName(yaml)));
var templatesAsObjects = templatesAsStrings.Select(yaml => JObject.Parse(ConvertYamlToJson(yaml)));
var templatesAfterRemovingSkipFiles = templatesAsObjects
.Where(template => !TemplatesSchemaValidationsReader.WhiteListStructureTestsTemplateIds.Contains(template["id"].ToString()));
var invalidTemplateRuleKindsAndIds = templatesAfterRemovingSkipFiles
.Where(template => !Enum.TryParse(typeof(AlertRuleKind), template["kind"].ToString(), ignoreCase: false, out _))
.Select(template => (templdateId: template["id"].ToString(), templateKind: template["kind"].Value<string>()))
.ToList();
string exceptionMessage = "";
var validEnumValues = string.Join(", ", Enum.GetNames(typeof(AlertRuleKind)));
if (invalidTemplateRuleKindsAndIds.Any())
if (detectionsYamlFileName == "NoFile.yaml")
{
exceptionMessage += string.Join(", ", invalidTemplateRuleKindsAndIds.Select(invalidTemplate => $"(id: {invalidTemplate.templdateId}, invalid kind: {invalidTemplate.templateKind})"));
Assert.True(true);
return;
}
Assert.False(invalidTemplateRuleKindsAndIds.Any(), $"Invalid rule kind(s) encountered for the following template(s): {exceptionMessage}. Valid kind values (case sensitively) are: {validEnumValues}");
var yaml = GetYamlFileAsString(detectionsYamlFileName);
// We ignore known issues (in progress)
foreach (var templateToSkip in TemplatesSchemaValidationsReader.WhiteListStructureTestsTemplateIds)
{
if (yaml.Contains(templateToSkip))
{
return;
}
}
var templateObject = JObject.Parse(ConvertYamlToJson(yaml));
var ruleKind = templateObject["kind"].ToString();
var validRuleKinds = Enum.GetNames(typeof(AlertRuleKind));
bool isRuleKindValid = validRuleKinds.Contains(ruleKind, StringComparer.OrdinalIgnoreCase);
Assert.True(isRuleKindValid, $"Invalid rule kind '{ruleKind}' encountered in template '{detectionsYamlFileName}'. Valid rule kinds are: {string.Join(", ", validRuleKinds)}");
}
private string GetYamlFileAsString(string detectionsYamlFileName)
{
var detectionsYamlFile = "";

Просмотреть файл

@ -1,9 +1,9 @@
using System;
using Octokit;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Text;
namespace Kqlvalidations.Tests
{
@ -67,10 +67,45 @@ namespace Kqlvalidations.Tests
private static List<string> GetDetectionFiles(List<string> detectionPaths)
{
var files = Directory.GetFiles(detectionPaths[0], "*.yaml", SearchOption.AllDirectories).ToList();
files.AddRange(Directory.GetFiles(detectionPaths[1], "*.yaml", SearchOption.AllDirectories).ToList().Where(s => s.Contains("Analytic Rules")));
int prNumber = 0;
int.TryParse(Environment.GetEnvironmentVariable("PRNUM"), out prNumber);
//assign pr number to debug with a pr
//prNumber=8414;
var files = Directory.GetFiles(detectionPaths[0], "*.yaml", SearchOption.AllDirectories)
.Concat(Directory.GetFiles(detectionPaths[1], "*.yaml", SearchOption.AllDirectories)
.Where(s => s.Contains("Analytic Rules")));
return files;
if (prNumber != 0)
{
try
{
var client = new GitHubClient(new ProductHeaderValue("MicrosoftSentinelValidationApp"));
var prFiles = client.PullRequest.Files("Azure", "Azure-Sentinel", prNumber).Result;
var prFilesListModified = new List<string>();
var basePath = GetRootPath();
foreach (var file in prFiles)
{
var modifiedFile = Path.Combine(basePath, file.FileName.Replace('/', Path.DirectorySeparatorChar));
prFilesListModified.Add(modifiedFile);
}
files = files.Where(file => prFilesListModified.Any(prFile => file.Contains(prFile)));
}
catch (Exception ex)
{
Console.WriteLine("Error occured while getting the files from PR. Error message: " + ex.Message + " Stack trace: " + ex.StackTrace);
}
}
var fileList = files.ToList();
if (fileList.Count == 0)
{
fileList.Add("NoFile.yaml");
}
return fileList;
}
}
}

Просмотреть файл

@ -39,6 +39,7 @@
"Bitglass",
"BitSight",
"BlackberryCylancePROTECT",
"BloodHoundEnterprise",
"BoschAIShield",
"BoxDataConnector",
"BroadcomSymantecDLP",
@ -66,6 +67,7 @@
"CyberArk",
"CyberArkEPM",
"CyberpionSecurityLogs",
"CynerioSecurityEvents",
"DDOS",
"DNS",
"Darktrace",

Просмотреть файл

@ -198,5 +198,6 @@ export enum ConnectorCategory {
AzureDevOpsAuditing="AzureDevOpsAuditing",
ThreatIntelligenceIndicator="ThreatIntelligenceIndicator",
MicrosoftPurviewInformationProtection="MicrosoftPurviewInformationProtection",
Dynamics365Activity="Dynamics365Activity"
Dynamics365Activity="Dynamics365Activity",
BloodHoundEnterprise="BloodHoundEnterprise"
}

Просмотреть файл

@ -0,0 +1,458 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "object",
"properties": {
"id": {
"type": "string"
},
"title": {
"type": "string"
},
"publisher": {
"type": "string"
},
"descriptionMarkdown": {
"type": "string"
},
"graphQueries": {
"type": "array",
"items": [
{
"type": "object",
"properties": {
"metricName": {
"type": "string"
},
"legend": {
"type": "string"
},
"baseQuery": {
"type": "string"
}
},
"required": [
"metricName",
"legend",
"baseQuery"
]
}
]
},
"sampleQueries": {
"type": "array",
"items": [
{
"type": "object",
"properties": {
"description": {
"type": "string"
},
"query": {
"type": "string"
}
},
"required": [
"description",
"query"
]
}
]
},
"dataTypes": {
"type": "array",
"items": [
{
"type": "object",
"properties": {
"name": {
"type": "string"
},
"lastDataReceivedQuery": {
"type": "string"
}
},
"required": [
"name",
"lastDataReceivedQuery"
]
}
]
},
"connectivityCriterias": {
"type": "array",
"items": [
{
"type": "object",
"properties": {
"type": {
"type": "string"
},
"value": {
"type": "array",
"items": [
{
"type": "string"
}
]
}
},
"required": [
"type",
"value"
]
}
]
},
"availability": {
"type": "object",
"properties": {
"status": {
"type": "integer"
},
"isPreview": {
"type": "boolean"
}
},
"required": [
"status",
"isPreview"
]
},
"permissions": {
"type": "object",
"properties": {
"resourceProvider": {
"type": "array",
"items": [
{
"type": "object",
"properties": {
"provider": {
"type": "string"
},
"permissionsDisplayText": {
"type": "string"
},
"providerDisplayName": {
"type": "string"
},
"scope": {
"type": "string"
},
"requiredPermissions": {
"type": "object",
"properties": {
"write": {
"type": "boolean"
},
"read": {
"type": "boolean"
},
"delete": {
"type": "boolean"
}
},
"required": [
"write",
"read",
"delete"
]
}
},
"required": [
"provider",
"permissionsDisplayText",
"providerDisplayName",
"scope",
"requiredPermissions"
]
},
{
"type": "object",
"properties": {
"provider": {
"type": "string"
},
"permissionsDisplayText": {
"type": "string"
},
"providerDisplayName": {
"type": "string"
},
"scope": {
"type": "string"
},
"requiredPermissions": {
"type": "object",
"properties": {
"action": {
"type": "boolean"
}
},
"required": [
"action"
]
}
},
"required": [
"provider",
"permissionsDisplayText",
"providerDisplayName",
"scope",
"requiredPermissions"
]
}
]
},
"customs": {
"type": "array",
"items": [
{
"type": "object",
"properties": {
"name": {
"type": "string"
},
"description": {
"type": "string"
}
},
"required": [
"name",
"description"
]
},
{
"type": "object",
"properties": {
"name": {
"type": "string"
},
"description": {
"type": "string"
}
},
"required": [
"name",
"description"
]
}
]
}
},
"required": [
"resourceProvider",
"customs"
]
},
"instructionSteps": {
"type": "array",
"items": [
{
"type": "object",
"properties": {
"title": {
"type": "string"
},
"description": {
"type": "string"
}
},
"required": [
"title",
"description"
]
},
{
"type": "object",
"properties": {
"title": {
"type": "string"
},
"description": {
"type": "string"
}
},
"required": [
"title",
"description"
]
},
{
"type": "object",
"properties": {
"title": {
"type": "string"
},
"description": {
"type": "string"
},
"instructions": {
"type": "array",
"items": [
{
"type": "object",
"properties": {
"parameters": {
"type": "object",
"properties": {
"fillWith": {
"type": "array",
"items": [
{
"type": "string"
}
]
},
"label": {
"type": "string"
}
},
"required": [
"fillWith",
"label"
]
},
"type": {
"type": "string"
}
},
"required": [
"parameters",
"type"
]
},
{
"type": "object",
"properties": {
"parameters": {
"type": "object",
"properties": {
"fillWith": {
"type": "array",
"items": [
{
"type": "string"
}
]
},
"label": {
"type": "string"
}
},
"required": [
"fillWith",
"label"
]
},
"type": {
"type": "string"
}
},
"required": [
"parameters",
"type"
]
}
]
}
},
"required": [
"title",
"description",
"instructions"
]
},
{
"type": "object",
"properties": {
"title": {
"type": "string"
},
"description": {
"type": "string"
}
},
"required": [
"title",
"description"
]
},
{
"type": "object",
"properties": {
"title": {
"type": "string"
},
"description": {
"type": "string"
}
},
"required": [
"title",
"description"
]
},
{
"type": "object",
"properties": {
"title": {
"type": "string"
},
"description": {
"type": "string"
}
},
"required": [
"title",
"description"
]
},
{
"type": "object",
"properties": {
"title": {
"type": "string"
},
"description": {
"type": "string"
}
},
"required": [
"title",
"description"
]
}
]
},
"metadata": {
"type": "object",
"properties": {
"version": {
"type": "string"
},
"kind": {
"type": "string"
}
},
"required": [
"version",
"kind"
]
}
},
"required": [
"id",
"title",
"publisher",
"descriptionMarkdown",
"graphQueries",
"sampleQueries",
"dataTypes",
"connectivityCriterias",
"availability",
"permissions",
"instructionSteps",
"metadata"
]
}

Просмотреть файл

@ -18,23 +18,22 @@ export async function isVersionIncrementedOnModification(items: Array<WorkbookMe
const pr = await GetPRDetails();
if(pr){ // pr may return undefined
const changedFiles = await GetDiffFiles(fileKinds, fileTypeSuffixes, filePathFolderPrefixes);
const changedFiles = await GetDiffFiles(fileKinds, fileTypeSuffixes, filePathFolderPrefixes);
if(changedFiles && changedFiles.length > 0){
const options = [pr.targetBranch, pr.sourceBranch, gitDiffFileFullContentOption, `${workbooksDirectoryPath}/WorkbooksMetadata.json`];
const diffSummary = await git.diff(options);
const diffLinesArray = diffSummary.split('\n').map(l => l.trim());
const versionChanges = extractVersionChangesByWorkbook(diffLinesArray);
const diffSummary = await git.diff(options);
const diffLinesArray = diffSummary.split('\n').map(l => l.trim());
const versionChanges = extractVersionChangesByWorkbook(diffLinesArray);
items
.filter((workbookMetadata: WorkbookMetadata) => changedFiles.includes(`${workbooksDirectoryPath}/${workbookMetadata.templateRelativePath}`))
.forEach((workbookMetadata: WorkbookMetadata) => {
const templateRelativePath = workbookMetadata.templateRelativePath;
if(versionChanges[templateRelativePath] == null){
const templateRelativePath = workbookMetadata.templateRelativePath;
if (versionChanges[templateRelativePath] == null) {
// If the workbook has changed but the version was not updated (a matching key was not found in the versionChanges dictionary) - throw error
throw new WorkbookValidationError(`The workbook ${workbookMetadata.templateRelativePath} has been modified but the version has not been incremented in the ${workbooksDirectoryPath}/WorkbooksMetadata.json file.`);
}
else{
else {
const isNewVersionGreaterThanOldVersion = versionChanges[templateRelativePath]["newVersion"] > versionChanges[templateRelativePath]["oldVersion"];
if(!isNewVersionGreaterThanOldVersion){ // If the version was updated but the new version is not greater than old version - throw error
@ -47,42 +46,53 @@ export async function isVersionIncrementedOnModification(items: Array<WorkbookMe
}
function extractVersionChangesByWorkbook(diffLines: string[]){
let currentLine = 0;
let workbookVersionChanges: any = {};
while(diffLines[currentLine++] != '['){} // Skip to beginning of Workbooks array
function extractVersionChangesByWorkbook(diffLines: string[]) {
let currentLine = 0;
const workbookVersionChanges: any = {};
const replaceQuotesRegex = /\"/gi;
while(diffLines[currentLine] != "]"){
if(diffLines[currentLine] == "{"){ // Beginning of a workbook metadata object
currentLine++;
let templateRelativePath, newVersion, oldVersion;
const replaceQuotesRegex = /\"/gi; // If the replace method receives a string as the first parameter, then only the first occurrence is replaced. To replace all, a regex is required.
while (currentLine < diffLines.length && diffLines[currentLine] !== '[') {
currentLine++; // Skip to beginning of Workbooks array
}
while(!(diffLines[currentLine] == "}" || diffLines[currentLine] == "},")){ // While current line is not end of object
if(diffLines[currentLine].startsWith('"templateRelativePath":')){
templateRelativePath = diffLines[currentLine].split(':')[1].trim().replace(replaceQuotesRegex, "").replace(',', "");
}
while (currentLine < diffLines.length && diffLines[currentLine] !== ']') {
if (diffLines[currentLine] === '{') {
let templateRelativePath: string | null = null;
let newVersion: string | null = null;
let oldVersion: string | null = null;
// The '+' may be added to a line as part of the 'git diff' output
if(diffLines[currentLine].startsWith('+') && diffLines[currentLine].includes('"version":')){ // We are only interested in changes of the version value of an existing workbook
newVersion = diffLines[currentLine].split(':')[1].trim().replace(replaceQuotesRegex, "").replace(',', "");
}
currentLine++; // Beginning of a workbook metadata object
// The '-' may be added to a line as part of the 'git diff' output
if(diffLines[currentLine].startsWith('-') && diffLines[currentLine].includes('"version":')){ // We are only interested in changes of the version value of an existing workbook
oldVersion = diffLines[currentLine].split(':')[1].trim().replace(replaceQuotesRegex, "").replace(',', "");
while (currentLine < diffLines.length && diffLines[currentLine] !== '}') {
const line = diffLines[currentLine];
if (line.trim().startsWith('"templateRelativePath":')) {
templateRelativePath = line.split(':')[1].trim().replace(replaceQuotesRegex, "").replace(',', "");
}
if ((line.trim().startsWith('+') || line.trim().startsWith('-')) && line.includes('"version":')) {
const version = line.split(':')[1].trim().replace(replaceQuotesRegex, "").replace(',', "");
if (line.trim().startsWith('+')) {
newVersion = version;
} else {
oldVersion = version;
}
}
currentLine++;
}
if (templateRelativePath && newVersion && oldVersion) {
workbookVersionChanges[templateRelativePath] = { "newVersion": newVersion, "oldVersion": oldVersion };
}
}
currentLine++;
}
// Here we finish iterating over the current workbook metadata object. We will add the parsed workbook changes only if all fields are populated.
if(templateRelativePath != null && newVersion != null && oldVersion != null){
workbookVersionChanges[templateRelativePath] = {"newVersion": newVersion, "oldVersion": oldVersion};
}
}
currentLine++;
}
return workbookVersionChanges;
}
return workbookVersionChanges;
}

Просмотреть файл

@ -14,27 +14,27 @@
b. **Select Subscription**: Choose the subscription to use under resources.
(https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/Images/subscription.png)
![Select Subscription](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/Images/subscription.png)
c. Right click on the functions and select **Create new Function App in Azure** (Don't choose the Advanced option)
(https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/Images/CreatenewFunctionApp.png)
![Create new Function App in Azure](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/Images/CreatenewFunctionApp.png)
d. **Enter a globally unique name for the function app**: Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. UmbrellaXYZ).
(https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/Images/globallyuniquename.png)
![Enter a globally unique name](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/Images/globallyuniquename.png)
e. **Select a runtime**: Choose Python 3.8.
(https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/Images/Selectaruntime.png)
![Select a runtime](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/Images/Selectaruntime.png)
f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.
(https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/Images/Selectalocation.png)
![Select a location for new resources](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/Images/Selectalocation.png)
- Deployment will begin. A notification is displayed after your function app is created.
- Deploy the function in Function app: Once the function app is created click on deploy button under workspace section. Select the Subcription and the function app in which function needs to be deployed.
(https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/Images/Deploythefunction.png)
![Deploy the function in Function app](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/Images/Deploythefunction.png)
- Go to Azure Portal for the Function App configuration.

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,28 @@
param([string] $PackageUri, [string] $SubscriptionId, [string] $ResourceGroupName, [string] $FunctionAppName, [string] $FAScope, [string] $VnetScope, [string] $UAMIPrincipalId, [string] $RestrictedIPs)
Set-AzContext -Subscription $SubscriptionId
#Give Function App some time to fully finish provisioning.
Start-Sleep -Seconds 60
#Download Function App package and publish.
Invoke-WebRequest -Uri $PackageUri -OutFile functionPackage.zip
Publish-AzWebapp -ResourceGroupName $ResourceGroupName -Name $FunctionAppName -ArchivePath functionPackage.zip -Force
#Add IP restrictions on Function App if specified.
if ($RestrictedIPs -eq 'None') {
$resource = Get-AzResource -ResourceType Microsoft.Web/sites -ResourceGroupName $ResourceGroupName -ResourceName $FunctionAppName
$resource.Properties.publicNetworkAccess = 'Disabled'
$resource | Set-AzResource -Force
}
elseif ($RestrictedIPs -ne '') {
Add-AzWebAppAccessRestrictionRule -ResourceGroupName $ResourceGroupName -WebAppName $FunctionAppName `
-Name "Allowed" -IpAddress $RestrictedIPs.Replace(' ', ',') -Priority 100 -Action Allow
Add-AzWebAppAccessRestrictionRule -ResourceGroupName $ResourceGroupName -WebAppName $FunctionAppName `
-Name "Allowed" -IpAddress $RestrictedIPs.Replace(' ', ',') -Priority 100 -Action Allow -TargetScmSite
}
#Cleanup the Service Principal Owner role assignments now that access is no longer needed.
Remove-AzRoleAssignment -ObjectId $UAMIPrincipalId -RoleDefinitionName Owner -Scope $FAScope
if ($VnetScope -ne '') { Remove-AzRoleAssignment -ObjectId $UAMIPrincipalId -RoleDefinitionName Owner -Scope $VnetScope }

Двоичный файл не отображается.

Просмотреть файл

@ -0,0 +1,10 @@
{
"bindings": [
{
"name": "Timer",
"type": "timerTrigger",
"direction": "in",
"schedule": "0 0 0 * * *"
}
]
}

Двоичный файл не отображается.

Двоичный файл не отображается.

Двоичный файл не отображается.

Двоичный файл не отображается.

Просмотреть файл

@ -0,0 +1,11 @@
# TimerTrigger - PowerShell
The `TimerTrigger` makes it incredibly easy to have your functions executed on a schedule. This sample demonstrates a simple use case of calling your function every 5 minutes.
## How it works
For a `TimerTrigger` to work, you provide a schedule in the form of a [cron expression](https://en.wikipedia.org/wiki/Cron#CRON_expression)(See the link for full details). A cron expression is a string with 6 separate expressions which represent a given schedule via patterns. The pattern we use to represent every 5 minutes is `0 */5 * * * *`. This, in plain text, means: "When seconds is equal to 0, minutes is divisible by 5, for any hour, day of the month, month, and day of the week".
## Learn more
<TODO> Documentation

Просмотреть файл

@ -0,0 +1,317 @@
# Input bindings are passed in via param block.
param($Timer)
#Define global variables/parameters.
$ErrorActionPreference = 'Stop'
$lawResourceId = $env:LawResourceId
$dcrImmutableId = $env:DcrImmutableId
$dceUri = $env:DceUri
$uamiClientId = $env:UamiClientId
$transactionId = (New-Guid).Guid
$fullImport = $env:FullImport
#Function to create HTTP headers for REST API calls.
function Get-RequestHeaders {
param ($Token)
return @{"Authorization" = "Bearer $Token"; "Content-Type" = "application/json" }
}
#Function to get data via REST API and send to Azure Monitor.
function Import-Data {
param ($SourceUri, $SourceToken, $Table, $DataSourceName, $JsonDepth, $AzureResources, $BatchSize, $DelayTime)
$count = 1
$startIndex = 0
$queryString = ''
$totalObjectsReceived = 0
$totalConfigurationObjectsReceived = 0
#Start loop to get data from source REST API and ingest into Azure Monitor.
do {
Write-Host ("Getting $DataSourceName, request #$count...")
$startTime = Get-Date
#Get data from source REST API.
if ($DataSourceName -eq 'NIST CVE KB') {
$response = Invoke-RestMethod -Method Get -Uri ($SourceUri + $queryString) -MaximumRetryCount 2 -RetryIntervalSec 31
$content = $response.vulnerabilities | Select-Object -ExpandProperty cve
}
else {
$response = Invoke-RestMethod -Method Get -Uri $SourceUri -Headers (Get-RequestHeaders -Token $SourceToken.Token) -MaximumRetryCount 2 -RetryIntervalSec 5
$content = $response.value
}
if ($content.Count -eq 0) {
Write-Host "No objects were received."
return
}
#Add TimeGenerated and transactionId properties to all records/objects received and rename id property if it exists because Azure Monitor reserves this column value.
$content | Add-Member -NotePropertyName 'TimeGenerated' -NotePropertyValue (Get-Date -Format 'yyyy-MM-ddTHH:mm:ssZ' -AsUTC)
$content | Add-Member -NotePropertyName 'transactionId' -NotePropertyValue $transactionId
switch ($DataSourceName) {
'MDVM Recommendations' {
$content | Add-Member -MemberType AliasProperty -Name recId -Value id
}
'MDVM Secure Configurations by Device' {
$content = $content | Where-Object { ($_.isApplicable -eq $true) -and ($_.IsCompliant -eq $false) }
if ($null -ne $AzureResources) {
Write-Host "Looking up Azure Resource IDs..."
$content = Add-AzureResourceId -Data $content -AzureResources $AzureResources
}
}
'MDVM Vulnerabilities by Device' {
$content | Add-Member -MemberType AliasProperty -Name vulnId -Value id
if ($null -ne $AzureResources) {
Write-Host "Looking up Azure Resource IDs..."
$content = Add-AzureResourceId -Data $content -AzureResources $AzureResources
}
}
'MDVM CVE KB' {
$content | Add-Member -MemberType AliasProperty -Name cveId -Value id
}
'NIST CVE KB' {
$content | Add-Member -MemberType AliasProperty -Name cveId -Value id
#Split off the configrations object and create a seperate array for these items so they can be ingested into their own Azure Monitor table.
$configurations = New-Object System.Collections.ArrayList
foreach ($item in $content | Where-Object configurations -ne $null) {
$configurationNumber = 0
foreach ($configuration in $item.configurations) {
$configurationNumber += 1
$nodes = $configuration | Select-Object -ExpandProperty nodes
foreach ($node in $nodes) {
$cpes = $node | Select-Object -ExpandProperty cpeMatch
$cpes | Add-Member -NotePropertyMembers @{
configurationNumber = $configurationNumber
configurationOperator = $configuration.operator
configurationNegate = $configuration.negate
nodeOperator = $node.operator
nodeNegate = $node.negate
cveId = $item.cveId
TimeGenerated = $item.TimeGenerated
transactionId = $transactionId
} -PassThru | Out-Null
foreach ($cpe in $cpes) {
$configurations.Add($cpe) | Out-Null
}
}
}
$item.PSObject.Properties.Remove('configurations')
}
}
}
#Update objects received variables.
$objectsReceived = $content.Count
$totalObjectsReceived += $objectsReceived
#Send received data to Azure Monitor.
Send-DataToAzureMonitor -Data $content -BatchSize $BatchSize -TableName "Custom-$Table" -JsonDepth $JsonDepth
#If there was configuration data included in NIST data, send that to seperate table in Azure Monitor.
if ($configurations.Count -ne 0) {
$configurationObjectsReceived = $configurations.Count
$totalConfigurationObjectsReceived += $configurationObjectsReceived
Write-Host "Sending NIST Configurations KB..."
Send-DataToAzureMonitor -Data $configurations -BatchSize 30000 -TableName "Custom-MDVMNISTConfigurations_CL" -JsonDepth $JsonDepth
}
else {
$configurationObjectsReceived = 0
}
#Check if there is more data to be requested from source REST API and if so, update the next request URI.
Write-Host ("Objects received and sent to Azure Monitor: $objectsReceived $($DataSourceName -eq 'NIST CVE KB' ? "(Configuration Objects: $configurationObjectsReceived)": '')")
if ($DataSourceName -eq 'NIST CVE KB') {
$startIndex += $response.resultsPerPage
if ($SourceUri -like '*lastModStartDate*') { $queryString = "&startIndex=$startIndex" } else { $queryString = "?startIndex=$startIndex" }
if ($startIndex -ge $response.totalResults) { $loopDone = $true }
}
else {
$SourceUri = $response.'@odata.nextLink'
if ($null -eq $SourceUri) { $loopDone = $true }
}
#Check the status of the async Azure Monitor ingestion jobs and write error if there are any unsuccessful jobs.
Get-FailedJobs -Jobs $azMonJobs
#Check how much time has elapsed since the last source API request and sleep if we are exceeding the API throttling/delay limits.
$timeDiff = New-TimeSpan -Start $startTime -End (Get-Date)
if ( $timeDiff.Milliseconds -lt $DelayTime -and $loopDone -ne $true) { Start-Sleep -Milliseconds ($DelayTime - $timeDiff.Milliseconds) }
$count += 1
} until ($loopDone -eq $true)
#After all source API data has been processed, wait for any pending/running Azure Monitor ingestion jobs to complete.
$pendingAzMonJobs = $azMonJobs | Where-Object { ($_.IsCompleted -eq $false) -or ($_.IsCompletedSuccessfully -eq $false) }
while ($pendingAzMonJobs.Count -ne 0) {
Start-Sleep -Seconds 1
$secondsSpent += 1
Get-FailedJobs -Jobs $pendingAzMonJobs
$pendingAzMonJobs = $azMonJobs | Where-Object { ($_.IsCompleted -eq $false) -or ($_.IsCompletedSuccessfully -eq $false) }
if ($secondsSpent -eq 120) {
Write-Error ("Azure Monitor async jobs have not completed after 2 minutes:" + ($pendingAzMonJobs | Where-Object { ($_.IsCompleted -eq $false) -or ($_.IsCompletedSuccessfully -eq $false) } | Select-Object Id, IsCompleted, Status, Exception | Format-Table | Out-String)) -ErrorAction Continue
break
}
}
$azMonJobs.Clear()
#Update total objects received metrics so we can compare later to total number of objects written to Azure Monitor and check for any mismatches.
if ($tableStats | Where-Object TableName -eq $Table) {
($tableStats | Where-Object TableName -eq $Table).TotalObjectsReceived += $totalObjectsReceived
}
else {
$tableStatsObject = New-Object psobject
$tableStatsObject | Add-Member -NotePropertyName 'TableName' -NotePropertyValue $Table
$tableStatsObject | Add-Member -NotePropertyName 'TotalObjectsReceived' -NotePropertyValue $totalObjectsReceived
$tableStats.Add($tableStatsObject) | Out-Null
if ($totalConfigurationObjectsReceived -gt 0) {
$tableStatsObject = New-Object psobject
$tableStatsObject | Add-Member -NotePropertyName 'TableName' -NotePropertyValue 'MDVMNISTConfigurations_CL'
$tableStatsObject | Add-Member -NotePropertyName 'TotalObjectsReceived' -NotePropertyValue $totalConfigurationObjectsReceived
$tableStats.Add($tableStatsObject) | Out-Null
}
}
}
#Function to split data into specified batch sizes (so we do not exceed the maximum body size) and send to Azure Monitor.
function Send-DataToAzureMonitor {
param ($Data, $BatchSize, $TableName, $JsonDepth)
$skip = 0
do {
$batchedData = $Data | Select-Object -Skip $skip | Select-Object -First $BatchSize
$azMonJobs.Add($logIngestionClient.UploadAsync($dcrImmutableId, $TableName, ($batchedData | ConvertTo-Json -Depth $JsonDepth -AsArray))) | Out-Null
$skip += $BatchSize
} until (
$skip -ge $Data.Count
)
}
#Function to lookup and add Azure Resource ID based on MDVM device name.
function Add-AzureResourceId {
param($Data, $AzureResources)
foreach ($item in $Data) {
if ($item.deviceName.IndexOf('.') -eq -1) { $azSearch = $AzureResources.($item.deviceName.ToLower) } else { $azSearch = $AzureResources.($item.deviceName.Substring(0, $item.deviceName.IndexOf('.')).ToLower()) }
if ($null -ne $azSearch) {
$item | Add-Member -NotePropertyName azResourceId -NotePropertyValue $azSearch.id
}
else {
$item | Add-Member -NotePropertyName azResourceId -NotePropertyValue ''
}
}
return $Data
}
#Function to check the status of the async Azure Monitor ingestion jobs and write error if there are any unsuccessful jobs.
function Get-FailedJobs {
param ($Jobs)
$failedJobs = $Jobs | Where-Object { ($_.IsCompleted -eq $true) -and ($_.IsCompletedSuccessfully -eq $false) }
if ($failedJobs) {
foreach ($job in ($failedJobs)) {
Write-Error ("Error on Azure Monitor async job ID: " + $job.Id + ". Error Details: " + $job.Exception.message) -ErrorAction Continue
$azMonJobs.Remove($job)
}
}
}
#Add required .Net assemblies to handle the Azure Monitor ingestion.
Add-Type -Path .\GetMDVMData\libs\Azure.Monitor.Ingestion.dll
Add-Type -Path .\GetMDVMData\libs\Azure.Identity.dll
#Connect Azure Powershell via User Assigned Managed Identity.
Connect-AzAccount -Identity -AccountId $uamiClientID -Subscription $lawResourceId.Split('/')[2] | Out-Null
#Create Azure.Identity credential via User Assigned Managed Identity.
$credential = New-Object Azure.Identity.ManagedIdentityCredential($uamiClientId)
#Create LogsIngestionClient to handle sending data to Azure Monitor.
$logIngestionClient = New-Object Azure.Monitor.Ingestion.LogsIngestionClient($dceURI, $credential)
#Create array to hold object counts for each data source so we can compare to total records written later.
$tableStats = New-Object System.Collections.ArrayList
#Create array to hold Azure Monitor jobs status.
$azMonJobs = New-Object System.Collections.ArrayList
#Get Log Analytics workspace Id to be used later when querying data in the workspace.
$lawId = (Get-AzOperationalInsightsWorkspace -ResourceGroupName $lawResourceId.Split('/')[4] -Name $lawResourceId.Split('/')[8]).CustomerId
#Get OAuth token for Defender API.
$defenderToken = Get-AzAccessToken -ResourceUrl 'https://api.securitycenter.microsoft.com'
#Get Azure VM inventory and create hash table so we can quickly lookup and add Azure Resource ID to MDVM data later.
$azureVMsQuery = "resources
| where type in ('microsoft.compute/virtualmachines', 'microsoft.hybridcompute/machines')
| extend deviceName = properties.extended.instanceView.computerName
| extend deviceName = iif(deviceName == '', name, deviceName)
| project id = tolower(id), deviceName = tolower(deviceName)"
do {
$response = Search-AzGraph -Query $azureVMsQuery -SkipToken $response.SkipToken -First 1000
$azResources += $response
} until ($null -eq $response.SkipToken)
$azResources = $azResources | Group-Object -AsHashTable -Property deviceName
#Ingest MDVM Vulnerabilities by Device (Full).
$defenderUri = "https://api.securitycenter.microsoft.com/api/machines/SoftwareVulnerabilitiesByMachine"
Import-Data -SourceUri $defenderUri -SourceToken $defenderToken -DceUri $dceURI -DataSourceName 'MDVM Vulnerabilities by Device' `
-JsonDepth 2 -Table 'MDVMVulnerabilitiesByDevice_CL' -AzureResources $azResources -BatchSize 15000 -DelayTime 500
#Ingest MDVM Recommendations (Full).
$defenderUri = 'https://api.securitycenter.microsoft.com/api/recommendations'
Import-Data -SourceUri $defenderUri -SourceToken $defenderToken -DataSourceName 'MDVM Recommendations' `
-JsonDepth 2 -Table 'MDVMRecommendations_CL' -BatchSize 10000 -DelayTime 500
#Ingest MDVM Secure Configurations by Device (Full).
$defenderUri = 'https://api.securitycenter.microsoft.com/api/machines/SecureConfigurationsAssessmentByMachine'
Import-Data -SourceUri $defenderUri -SourceToken $defenderToken -DataSourceName 'MDVM Secure Configurations by Device' `
-JsonDepth 2 -Table 'MDVMSecureConfigurationsByDevice_CL' -AzureResources $azResources -BatchSize 20000
#Ingest MDVM CVE KB (Incremental).
$lawQuery = 'MDVMCVEKB_CL | order by todatetime(updatedOn) desc | take 1 | project updatedOn'
$mdvmKbLastUpdate = Invoke-AzOperationalInsightsQuery -WorkspaceId $lawId -Query $lawQuery -Timespan 730D
$lawQuery = 'MDVMCVEKB_CL | summarize min(TimeGenerated) | project OldestRecord = format_timespan(now() - min_TimeGenerated, "d")'
$mdvmKBOldestRecord = Invoke-AzOperationalInsightsQuery -WorkspaceId $lawId -Query $lawQuery -Timespan 730D
$mdvmKbRetention = (Get-AzOperationalInsightsTable -ResourceGroupName ($lawResourceId.Split('/'))[4] -WorkspaceName ($lawResourceId.Split('/'))[8] -TableName 'MDVMCVEKB_CL' | Select-Object RetentionInDays)[0].RetentionInDays
if ($null -eq $mdvmKbLastUpdate.Results.updatedOn -Or $mdvmKBOldestRecord.Results.OldestRecord -ge ($mdvmKbRetention - 5) -Or $fullImport -eq 1) {
$defenderUri = 'https://api.securitycenter.windows.com/api/Vulnerabilities'
}
else {
$defenderUri = 'https://api.securitycenter.windows.com/api/Vulnerabilities?$filter=updatedOn+gt+' + $mdvmKbLastUpdate.Results.updatedOn
Write-Host ("Checking for MDVM CVE KB data updated since " + $mdvmKbLastUpdate.Results.updatedOn + "...")
}
Import-Data -SourceUri $defenderUri -SourceToken $defenderToken -DataSourceName 'MDVM CVE KB' `
-JsonDepth 2 -Table 'MDVMCVEKB_CL' -BatchSize 10000 -DelayTime 500
#Ingest NIST CVE KB data (Incremental).
$lawQuery = 'MDVMNISTCVEKB_CL | summarize LastTimeModified = max(lastModified) | project LastTimeModified'
$nistKbLastUpdate = Invoke-AzOperationalInsightsQuery -WorkspaceId $lawId -Query $lawQuery -Timespan 730D
$lawQuery = 'MDVMNISTCVEKB_CL | summarize min(TimeGenerated) | project OldestRecord = format_timespan(now() - min_TimeGenerated, "d")'
$nistKBOldestRecord = Invoke-AzOperationalInsightsQuery -WorkspaceId $lawId -Query $lawQuery -Timespan 730D
$nistKbRetention = (Get-AzOperationalInsightsTable -ResourceGroupName ($lawResourceId.Split('/'))[4] -WorkspaceName ($lawResourceId.Split('/'))[8] -TableName 'MDVMNISTCVEKB_CL' | Select-Object RetentionInDays)[0].RetentionInDays
if ($null -eq $nistKbLastUpdate.Results.LastTimeModified -Or $nistKBOldestRecord.Results.OldestRecord -ge ($nistKbRetention - 5) -Or $fullImport -eq 1) {
$nistUri = 'https://services.nvd.nist.gov/rest/json/cves/2.0'
}
else {
$lastModifiedTime = (([datetime]$nistKbLastUpdate.Results.LastTimeModified).AddMilliseconds(1)).ToUniversalTime().ToString('yyyy-MM-ddTHH:mm:ss.fffZ')
$nistUri = 'https://services.nvd.nist.gov/rest/json/cves/2.0/?lastModStartDate=' + $lastModifiedTime + '&lastModEndDate=' + (Get-Date -Format 'yyyy-MM-ddTHH:mm:ss.fffZ' -AsUTC)
Write-Host ("Checking for NIST CVE KB data updated since " + $lastModifiedTime + "...")
}
Import-Data -SourceUri $nistUri -DataSourceName 'NIST CVE KB' -JsonDepth 8 -Table 'MDVMNISTCVEKB_CL' -BatchSize 2000 -DelayTime 7000
#Get count of total objects written to Azure Monitor and check for any mismatches against count of total objects received.
Write-Host "Waiting 3 minutes to allow for all data to get written to Azure Monitor before checking for any mismatches..."
Start-Sleep -Seconds 180
$lawQuery = "union withsource=MDVMTableName MDVM*
| where transactionId == '$transactionId'
| summarize Count = count() by MDVMTableName, transactionId"
$lawCounts = (Invoke-AzOperationalInsightsQuery -WorkspaceId $lawId -Query $lawQuery -Timespan 1D ).Results
foreach ($table in $tableStats) {
$table | Add-Member -NotePropertyName TotalRecordsWrittenToAzureMonitor -NotePropertyValue (($lawCounts | Where-Object MDVMTableName -eq $table.TableName).Count)
if (($table.TotalObjectsReceived -ne ($lawCounts | Where-Object MDVMTableName -eq $table.TableName).Count)) { $mismatch = $true } else { $mismatch = $false }
$table | Add-Member -NotePropertyName Mismatch -NotePropertyValue $mismatch
}
#If there are any mismatches, write error, otherwise write success message.
if ($tableStats | Where-Object Mismatch -eq $true) {
Write-Error -Message ("There is a mismatch between data received and written to Azure Monitor. Per table details are below:`n" + ($tableStats | Format-Table | Out-String))
}
else {
Write-Host ("All data has been successfully written to Azure Monitor. Per table details are below: `n" + ($tableStats | Format-Table | Out-String))
}

Просмотреть файл

@ -0,0 +1,11 @@
{
"version": "2.0",
"managedDependency": {
"Enabled": true
},
"extensionBundle": {
"id": "Microsoft.Azure.Functions.ExtensionBundle",
"version": "[3.*, 4.0.0)"
},
"functionTimeout": "04:00:00"
}

Просмотреть файл

@ -0,0 +1,22 @@
# Azure Functions profile.ps1
#
# This profile.ps1 will get executed every "cold start" of your Function App.
# "cold start" occurs when:
#
# * A Function App starts up for the very first time
# * A Function App starts up after being de-allocated due to inactivity
#
# You can define helper functions, run commands, or specify environment variables
# NOTE: any variables defined that are not environment variables will get reset after the first execution
# Authenticate with Azure PowerShell using MSI.
# Remove this if you are not planning on using MSI or Azure PowerShell.
if ($env:MSI_SECRET) {
Disable-AzContextAutosave -Scope Process | Out-Null
#Connect-AzAccount -Identity
}
# Uncomment the next line to enable legacy AzureRm alias in Azure PowerShell.
# Enable-AzureRmAlias
# You can also define functions or aliases that can be referenced in any of your PowerShell functions.

Просмотреть файл

@ -0,0 +1,11 @@
# This file enables modules to be automatically managed by the Functions service.
# See https://aka.ms/functionsmanageddependency for additional information.
#
@{
# For latest supported version, go to 'https://www.powershellgallery.com/packages/Az'.
# To use the Az module in your function app, please uncomment the line below.
# 'Az' = '9.*'
'Az.Accounts' = '2.*'
'Az.OperationalInsights' = '3.*'
'Az.ResourceGraph' = '0.*'
}

Просмотреть файл

@ -0,0 +1,338 @@
@description('Globally unique name for Key Vault used to store Function App secrets.')
param KeyVaultName string = 'kv-mdvm-${uniqueString(resourceGroup().id)}'
@description('Name for Function App resource that will be deployed.')
param FunctionAppName string = 'fa-mdvm-${uniqueString(resourceGroup().id)}'
@description('Name for User Assigned Managed Identity that the Function App will use to authenticate to the Defender API and other Azure resources.')
param UserAssignedManagedIdentityName string = 'uami-mdvm-${uniqueString(resourceGroup().id)}'
@description('Select to enable Application Insights for the Function App. This will allow you to monitor the status of the Function App for any errors. The Log Analytics Workspace specified in the "Log Analytics Resource Id" Parameter will be used to store the Application Insights data.')
param DeployApplicationInsights bool = true
@description('Name for the Applications Insights resource that will be used by the Function App if enabled in the DeployApplicationInsights parameter.')
param AppInsightsName string = 'ai-mdvm-${uniqueString(resourceGroup().id)}'
@description('Uri where the Function App package is located. Use default value unless you are hosting the package somewhere else.')
param FunctionAppPackageUri string = 'https://raw.githubusercontent.com/anders-alex/Azure-Sentinel/DataConnector-M365Defender-VulnerabilityManagement/DataConnectors/M365Defender-VulnerabilityManagement/functionPackage.zip'
@description('Uri where the post deployment script is located. This is used to publish the Function App code after the resources have been deploted. Use default value unless you are hosting the script somewhere else.')
param DeploymentScriptUri string = 'https://raw.githubusercontent.com/anders-alex/Azure-Sentinel/DataConnector-M365Defender-VulnerabilityManagement/DataConnectors/M365Defender-VulnerabilityManagement/deploymentScript.ps1'
@description('Name for App Service Plan resource that will be deployed. This is where the Function App will run.')
param AppServicePlanName string = 'asp-mdvm-${uniqueString(resourceGroup().id)}'
@description('Globally unique name for the Storage Account used by the Function App.')
param StorageAccountName string = 'samdvm${uniqueString(resourceGroup().id)}'
@description('Name for Data Collection Endpoint used to ingest data into Log Analytics workspace.')
param DataCollectionEndpointName string = 'dce-mdvm-${uniqueString(resourceGroup().id)}'
@description('Name for Data Collection Rule used to ingest data into Log Analytics workspace.')
param DataCollectionRuleName string = 'dcr-mdmv-${uniqueString(resourceGroup().id)}'
@description('Azure Resource Id of the Log Analytics Workspace where you like the MDVM and optional Function App Application Insights data to reside. The format is: "/subscriptions/xxxxxxxx-xxxxxxxx-xxxxxxxx-xxxxxxxx-xxxxxxxx/resourcegroups/xxxxxxxx/providers/microsoft.operationalinsights/workspaces/xxxxxxxx"')
param LogAnalyticsWorkspaceResourceId string
@description('Azure location/region of the Log Analytics Workspace referenced in the LogAnalyticsWorkspaceResourceId parameter.')
@allowed(
[
'asia'
'asiapacific'
'australia'
'australiacentral'
'australiacentral2'
'australiaeast'
'australiasoutheast'
'brazil'
'brazilsouth'
'brazilsoutheast'
'canada'
'canadacentral'
'canadaeast'
'centralindia'
'centralus'
'centraluseuap'
'eastasia'
'eastus'
'eastus2'
'eastus2euap'
'europe'
'france'
'francecentral'
'francesouth'
'germany'
'germanynorth'
'germanywestcentral'
'global'
'india'
'japan'
'japaneast'
'japanwest'
'korea'
'koreacentral'
'koreasouth'
'northcentralus'
'northeurope'
'norway'
'norwayeast'
'norwaywest'
'qatarcentral'
'southafrica'
'southafricanorth'
'southafricawest'
'southcentralus'
'southeastasia'
'southindia'
'swedencentral'
'switzerland'
'switzerlandnorth'
'switzerlandwest'
'uaecentral'
'uaenorth'
'uksouth'
'ukwest'
'unitedstates'
'westcentralus'
'westeurope'
'westindia'
'westus'
'westus2'
'westus3'
]
)
param LogAnalyticsWorkspaceLocation string
var Location = resourceGroup().location
resource userAssignedMi 'Microsoft.ManagedIdentity/userAssignedIdentities@2022-01-31-preview' = {
name: UserAssignedManagedIdentityName
location: Location
}
resource storageAccount 'Microsoft.Storage/storageAccounts@2021-08-01' = {
name: StorageAccountName
location: Location
sku: {
name: 'Standard_LRS'
}
kind: 'StorageV2'
properties: {
allowBlobPublicAccess: false
}
}
resource fileShare 'Microsoft.Storage/storageAccounts/fileServices/shares@2022-09-01' = {
name: '${storageAccount.name}/default/${toLower(FunctionAppName)}'
}
resource keyVault 'Microsoft.KeyVault/vaults@2022-07-01' = {
name: KeyVaultName
location: Location
properties: {
sku: {
family: 'A'
name: 'premium'
}
tenantId: subscription().tenantId
accessPolicies: [
{
objectId: userAssignedMi.properties.principalId
permissions: {
secrets: [
'get'
'set'
'list'
'delete'
]
}
tenantId: subscription().tenantId
}
]
}
}
module keyVaultUpdateNetworAcl 'modules/keyVault.bicep' = {
name: 'keyVaultUpdateNetworkAcl'
params: {
kvName: keyVault.name
location: keyVault.location
skuFamily: keyVault.properties.sku.family
skuName: keyVault.properties.sku.name
principalId: userAssignedMi.properties.principalId
aclBypass: 'None'
aclDefaultAction: 'Deny'
aclIpRules: functionApp.properties.possibleOutboundIpAddresses
secretPermissions: keyVault.properties.accessPolicies[0].permissions.secrets
}
}
resource keyVaultSecretStorageAccountConnectionString 'Microsoft.KeyVault/vaults/secrets@2022-07-01' = {
parent: keyVault
name: 'StorageAccountConnectionString'
properties: {
value: 'DefaultEndpointsProtocol=https;AccountName=${StorageAccountName};EndpointSuffix=${environment().suffixes.storage};AccountKey=${storageAccount.listKeys().keys[0].value}'
}
}
resource hostingPlan 'Microsoft.Web/serverfarms@2021-03-01' = {
name: AppServicePlanName
location: Location
sku: {
name: 'P1v2'
tier: 'PremiumV2'
}
kind: 'linux'
properties: {
reserved: true
}
}
resource functionApp 'Microsoft.Web/sites@2022-03-01' = {
name: FunctionAppName
location: Location
dependsOn: [
keyVaultSecretStorageAccountConnectionString
fileShare
]
kind: 'functionapp'
identity: {
type: 'UserAssigned'
userAssignedIdentities: {
'${userAssignedMi.id}': {}
}
}
properties: {
serverFarmId: hostingPlan.id
keyVaultReferenceIdentity: userAssignedMi.id
httpsOnly: true
clientCertEnabled: true
clientCertMode: 'OptionalInteractiveUser'
siteConfig: {
alwaysOn: true
linuxFxVersion: 'PowerShell|7.2'
use32BitWorkerProcess: false
ftpsState: 'Disabled'
minTlsVersion: '1.2'
appSettings: [
{
name: 'AzureWebJobsStorage'
value: '@Microsoft.KeyVault(VaultName=${KeyVaultName};SecretName=StorageAccountConnectionString)'
}
{
name: 'WEBSITE_CONTENTAZUREFILECONNECTIONSTRING'
value: '@Microsoft.KeyVault(VaultName=${KeyVaultName};SecretName=StorageAccountConnectionString)'
}
{
name: 'AzureWebJobsSecretStorageType'
value: 'keyvault'
}
{
name: 'AzureWebJobsSecretStorageKeyVaultUri'
value: 'https://${KeyVaultName}.vault.azure.net/'
}
{
name: 'AzureWebJobsSecretStorageKeyVaultClientId'
value: userAssignedMi.properties.clientId
}
{
name: 'WEBSITE_CONTENTSHARE'
value: toLower(FunctionAppName)
}
{
name: 'WEBSITE_SKIP_CONTENTSHARE_VALIDATION'
value: '1'
}
{
name: 'FUNCTIONS_EXTENSION_VERSION'
value: '~4'
}
{
name: 'APPINSIGHTS_INSTRUMENTATIONKEY'
value: DeployApplicationInsights == true ? applicationInsights.properties.InstrumentationKey : ''
}
{
name: 'FUNCTIONS_WORKER_RUNTIME'
value: 'powershell'
}
{
name: 'WEBSITE_RUN_FROM_PACKAGE'
value: '1'
}
{
name: 'LawResourceId'
value: LogAnalyticsWorkspaceResourceId
}
{
name: 'DcrImmutableId'
value: createCustomTables.outputs.DcrImmutableId
}
{
name: 'DceUri'
value: createCustomTables.outputs.DceUri
}
{
name: 'UamiClientId'
value: userAssignedMi.properties.clientId
}
{
name: 'FullImport'
value: '0'
}
]
}
}
}
resource applicationInsights 'Microsoft.Insights/components@2020-02-02' = if (DeployApplicationInsights == true) {
name: AppInsightsName
location: Location
kind: 'web'
properties: {
Application_Type: 'web'
Request_Source: 'rest'
WorkspaceResourceId: LogAnalyticsWorkspaceResourceId
}
}
module createCustomTables 'modules/customDcrTables.bicep' = {
name: 'createCustomTables'
params: {
LogAnalyticsWorkspaceLocation: LogAnalyticsWorkspaceLocation
LogAnalyticsWorkspaceResourceId: LogAnalyticsWorkspaceResourceId
DataCollectionEndpointName: DataCollectionEndpointName
DataCollectionRuleName: DataCollectionRuleName
ServicePrincipalId: userAssignedMi.properties.principalId
}
}
module roleAssignmentLaw 'modules/lawRoleAssignment.bicep' = {
scope: resourceGroup(split(LogAnalyticsWorkspaceResourceId, '/')[2], split(LogAnalyticsWorkspaceResourceId, '/')[4])
name: 'rbacAssignmentLaw'
params: {
principalId: userAssignedMi.properties.principalId
roleDefId: '/providers/Microsoft.Authorization/roleDefinitions/43d0d8ad-25c7-4714-9337-8ba259a9fe05'
scopedResourceName: split(LogAnalyticsWorkspaceResourceId, '/')[8]
}
}
resource roleAssignmentFa 'Microsoft.Authorization/roleAssignments@2022-04-01' = {
name: guid(subscription().id, resourceGroup().id, functionApp.id)
scope: functionApp
properties: {
principalId: userAssignedMi.properties.principalId
roleDefinitionId: '/providers/Microsoft.Authorization/roleDefinitions/8e3af657-a8ff-443c-a75c-2fe8c4bcb635'
principalType: 'ServicePrincipal'
}
}
resource deploymentScript 'Microsoft.Resources/deploymentScripts@2020-10-01' = {
name: 'deployCode'
location: Location
kind: 'AzurePowerShell'
identity: {
type: 'UserAssigned'
userAssignedIdentities: {
'${userAssignedMi.id}': {}
}
}
properties: {
azPowerShellVersion: '8.3'
retentionInterval: 'PT1H'
timeout: 'PT5M'
cleanupPreference: 'Always'
primaryScriptUri: DeploymentScriptUri
arguments: '-PackageUri ${FunctionAppPackageUri} -SubscriptionId ${split(subscription().id, '/')[2]} -ResourceGroupName ${resourceGroup().name} -FunctionAppName ${functionApp.name} -FAScope ${functionApp.id} -UAMIPrincipalId ${userAssignedMi.properties.principalId}'
}
}
output UserAssignedManagedIdentityPrincipalId string = userAssignedMi.properties.principalId
output UserAssignedManagedIdentityPrincipalName string = userAssignedMi.name

Просмотреть файл

@ -0,0 +1,576 @@
@description('Globally unique name for Key Vault used to store Function App secrets.')
param KeyVaultName string = 'kv-mdvm-${uniqueString(resourceGroup().id)}'
@description('Name for Key Vault Private Endpoint')
param KeyVaultPrivateEndpointName string = 'pe-kv-mdvm-${uniqueString(resourceGroup().id)}'
@description('Name for Function App resource that will be deployed.')
param FunctionAppName string = 'fa-mdvm-${uniqueString(resourceGroup().id)}'
@description('Name for User Assigned Managed Identity that the Function App will use to authenticate to the Defender API and other Azure resources.')
param UserAssignedManagedIdentityName string = 'uami-mdvm-${uniqueString(resourceGroup().id)}'
@description('Select to enable Application Insights for the Function App. This will allow you to monitor the status of the Function App for any errors. The Log Analytics Workspace specified in the "Log Analytics Resource Id" Parameter will be used to store the Application Insights data.')
param DeployApplicationInsights bool = true
@description('Name for the Applications Insights resource that will be used by the Function App if enabled in the DeployApplicationInsights parameter.')
param AppInsightsName string = 'ai-mdvm-${uniqueString(resourceGroup().id)}'
@description('Uri where the Function App package is located. Use default value unless you are hosting the package somewhere else.')
param FunctionAppPackageUri string = 'https://raw.githubusercontent.com/anders-alex/Azure-Sentinel/DataConnector-M365Defender-VulnerabilityManagement/DataConnectors/M365Defender-VulnerabilityManagement/functionPackage.zip'
@description('Uri where the post deployment script is located. This is used to publish the Function App code after the resources have been deploted. Use default value unless you are hosting the script somewhere else.')
param DeploymentScriptUri string = 'https://raw.githubusercontent.com/anders-alex/Azure-Sentinel/DataConnector-M365Defender-VulnerabilityManagement/DataConnectors/M365Defender-VulnerabilityManagement/deploymentScript.ps1'
@description('Name for App Service Plan resource that will be deployed. This is where the Function App will run.')
param AppServicePlanName string = 'asp-mdvm-${uniqueString(resourceGroup().id)}'
@description('Globally unique name for the Storage Account used by the Function App.')
param StorageAccountName string = 'samdvm${uniqueString(resourceGroup().id)}'
@description('Name for Storage Account - Blob Private Endpoint')
param StorageAccountBlobPrivateEndpointName string = 'pe-blob-mdvm-${uniqueString(resourceGroup().id)}'
@description('Name for Storage Account - File Private Endpoint')
param StorageAccountFilePrivateEndpointName string = 'pe-file-mdvm-${uniqueString(resourceGroup().id)}'
@description('Name for Data Collection Endpoint used to ingest data into Log Analytics workspace.')
param DataCollectionEndpointName string = 'dce-mdvm-${uniqueString(resourceGroup().id)}'
@description('Name for Data Collection Rule used to ingest data into Log Analytics workspace.')
param DataCollectionRuleName string = 'dcr-mdvm-${uniqueString(resourceGroup().id)}'
@description('Azure Resource Id of the Log Analytics Workspace where you like the MDVM and optional Function App Application Insights data to reside. The format is: "/subscriptions/xxxxxxxx-xxxxxxxx-xxxxxxxx-xxxxxxxx-xxxxxxxx/resourcegroups/xxxxxxxx/providers/microsoft.operationalinsights/workspaces/xxxxxxxx"')
param LogAnalyticsWorkspaceResourceId string
@description('Azure location/region of the Log Analytics Workspace referenced in the LogAnalyticsWorkspaceResourceId parameter.')
@allowed(
[
'asia'
'asiapacific'
'australia'
'australiacentral'
'australiacentral2'
'australiaeast'
'australiasoutheast'
'brazil'
'brazilsouth'
'brazilsoutheast'
'canada'
'canadacentral'
'canadaeast'
'centralindia'
'centralus'
'centraluseuap'
'eastasia'
'eastus'
'eastus2'
'eastus2euap'
'europe'
'france'
'francecentral'
'francesouth'
'germany'
'germanynorth'
'germanywestcentral'
'global'
'india'
'japan'
'japaneast'
'japanwest'
'korea'
'koreacentral'
'koreasouth'
'northcentralus'
'northeurope'
'norway'
'norwayeast'
'norwaywest'
'qatarcentral'
'southafrica'
'southafricanorth'
'southafricawest'
'southcentralus'
'southeastasia'
'southindia'
'swedencentral'
'switzerland'
'switzerlandnorth'
'switzerlandwest'
'uaecentral'
'uaenorth'
'uksouth'
'ukwest'
'unitedstates'
'westcentralus'
'westeurope'
'westindia'
'westus'
'westus2'
'westus3'
]
)
param LogAnalyticsWorkspaceLocation string
@description('Specify a comma separated list of CIDR formatted IP address ranges to restrict connecting to the Function App from (i.e. 192.168.1.0/24,172.16.2.5/32).')
param TrustedIPAddressRanges string = '0.0.0.0/0'
@description('Name for Virtual Network resource that will be deployed.')
param VirtualNetworkName string = 'vnet-mdvm-${uniqueString(resourceGroup().id)}'
@description('Name for Virtual Network resource that will be deployed.')
param VirtualNetworkIPAddressPrefix string = '10.0.0.0/16'
@description('Azure Resource Id of the Virtual Network to place private endpoints and Function App VNet integration.')
param PrivateEndpointSubnetIPAddressPrefix string = '10.0.0.0/24'
@description('Azure Resource Id of the Virtual Network to place private endpoints and Function App VNet integration.')
param VNetIntegrationSubnetIPAddressPrefix string = '10.0.1.0/24'
var Location = resourceGroup().location
resource userAssignedMi 'Microsoft.ManagedIdentity/userAssignedIdentities@2022-01-31-preview' = {
name: UserAssignedManagedIdentityName
location: Location
}
resource storageAccount 'Microsoft.Storage/storageAccounts@2021-08-01' = {
name: StorageAccountName
location: Location
sku: {
name: 'Standard_LRS'
}
kind: 'StorageV2'
properties: {
allowBlobPublicAccess: false
networkAcls: {
defaultAction: 'Deny'
bypass: 'None'
}
publicNetworkAccess: 'Disabled'
}
}
resource fileShare 'Microsoft.Storage/storageAccounts/fileServices/shares@2022-09-01' = {
name: '${storageAccount.name}/default/${toLower(FunctionAppName)}'
}
resource keyVault 'Microsoft.KeyVault/vaults@2022-07-01' = {
name: KeyVaultName
location: Location
properties: {
sku: {
family: 'A'
name: 'premium'
}
tenantId: subscription().tenantId
accessPolicies: [
{
objectId: userAssignedMi.properties.principalId
permissions: {
secrets: [
'get'
'set'
'list'
'delete'
]
}
tenantId: subscription().tenantId
}
]
networkAcls: {
bypass: 'None'
defaultAction: 'Deny'
}
publicNetworkAccess: 'Disabled'
}
}
resource keyVaultSecretStorageAccountConnectionString 'Microsoft.KeyVault/vaults/secrets@2022-07-01' = {
parent: keyVault
name: 'StorageAccountConnectionString'
properties: {
value: 'DefaultEndpointsProtocol=https;AccountName=${StorageAccountName};EndpointSuffix=${environment().suffixes.storage};AccountKey=${storageAccount.listKeys().keys[0].value}'
}
}
resource hostingPlan 'Microsoft.Web/serverfarms@2021-03-01' = {
name: AppServicePlanName
location: Location
sku: {
name: 'P1v2'
tier: 'PremiumV2'
}
kind: 'linux'
properties: {
reserved: true
}
}
resource functionApp 'Microsoft.Web/sites@2022-03-01' = {
name: FunctionAppName
location: Location
dependsOn: [
keyVaultSecretStorageAccountConnectionString
fileShare
peBlob
peFile
peKeyVault
privateDnsZoneBlob
privateDnsZoneFile
privateDnsZoneKeyVault
]
kind: 'functionapp'
identity: {
type: 'UserAssigned'
userAssignedIdentities: {
'${userAssignedMi.id}': {}
}
}
properties: {
serverFarmId: hostingPlan.id
keyVaultReferenceIdentity: userAssignedMi.id
httpsOnly: true
clientCertEnabled: true
clientCertMode: 'OptionalInteractiveUser'
virtualNetworkSubnetId: virtualNetwork.properties.subnets[1].id
siteConfig: {
alwaysOn: true
linuxFxVersion: 'PowerShell|7.2'
use32BitWorkerProcess: false
ftpsState: 'Disabled'
minTlsVersion: '1.2'
vnetRouteAllEnabled: true
http20Enabled: true
appSettings: [
{
name: 'AzureWebJobsStorage'
value: '@Microsoft.KeyVault(VaultName=${KeyVaultName};SecretName=StorageAccountConnectionString)'
}
{
name: 'WEBSITE_CONTENTAZUREFILECONNECTIONSTRING'
value: '@Microsoft.KeyVault(VaultName=${KeyVaultName};SecretName=StorageAccountConnectionString)'
}
{
name: 'AzureWebJobsSecretStorageType'
value: 'keyvault'
}
{
name: 'AzureWebJobsSecretStorageKeyVaultUri'
value: 'https://${KeyVaultName}.vault.azure.net/'
}
{
name: 'AzureWebJobsSecretStorageKeyVaultClientId'
value: userAssignedMi.properties.clientId
}
{
name: 'WEBSITE_CONTENTSHARE'
value: toLower(FunctionAppName)
}
{
name: 'WEBSITE_SKIP_CONTENTSHARE_VALIDATION'
value: '1'
}
{
name: 'WEBSITE_DNS_SERVER'
value: '168.63.129.16'
}
{
name: 'WEBSITE_CONTENTOVERVNET'
value: '1'
}
{
name: 'FUNCTIONS_EXTENSION_VERSION'
value: '~4'
}
{
name: 'APPINSIGHTS_INSTRUMENTATIONKEY'
value: DeployApplicationInsights == true ? applicationInsights.properties.InstrumentationKey : ''
}
{
name: 'FUNCTIONS_WORKER_RUNTIME'
value: 'powershell'
}
{
name: 'WEBSITE_RUN_FROM_PACKAGE'
value: '1'
}
{
name: 'LawResourceId'
value: LogAnalyticsWorkspaceResourceId
}
{
name: 'DcrImmutableId'
value: createCustomTables.outputs.DcrImmutableId
}
{
name: 'DceUri'
value: createCustomTables.outputs.DceUri
}
{
name: 'UamiClientId'
value: userAssignedMi.properties.clientId
}
{
name: 'FullImport'
value: '0'
}
]
}
}
}
resource applicationInsights 'Microsoft.Insights/components@2020-02-02' = if (DeployApplicationInsights == true) {
name: AppInsightsName
location: Location
kind: 'web'
properties: {
Application_Type: 'web'
Request_Source: 'rest'
WorkspaceResourceId: LogAnalyticsWorkspaceResourceId
}
}
resource virtualNetwork 'Microsoft.Network/virtualNetworks@2022-07-01' = {
name: VirtualNetworkName
location: Location
properties: {
addressSpace: {
addressPrefixes: [
VirtualNetworkIPAddressPrefix
]
}
subnets: [
{
name: 'privateEndpoints'
properties: {
addressPrefix: PrivateEndpointSubnetIPAddressPrefix
}
}
{
name: 'functionAppVnetIntegration'
properties: {
addressPrefix: VNetIntegrationSubnetIPAddressPrefix
delegations: [
{
name: 'delegation'
properties: {
serviceName: 'Microsoft.Web/serverFarms'
}
}
]
}
}
]
}
}
resource peKeyVault 'Microsoft.Network/privateEndpoints@2022-07-01' = {
name: KeyVaultPrivateEndpointName
location: Location
properties: {
subnet: {
id: virtualNetwork.properties.subnets[0].id
}
privateLinkServiceConnections: [
{
name: KeyVaultPrivateEndpointName
properties: {
privateLinkServiceId: keyVault.id
groupIds: [
'vault'
]
}
}
]
}
}
resource peBlob 'Microsoft.Network/privateEndpoints@2022-07-01' = {
name: StorageAccountBlobPrivateEndpointName
location: Location
properties: {
subnet: {
id: virtualNetwork.properties.subnets[0].id
}
privateLinkServiceConnections: [
{
name: StorageAccountBlobPrivateEndpointName
properties: {
privateLinkServiceId: storageAccount.id
groupIds: [
'blob'
]
}
}
]
}
}
resource peFile 'Microsoft.Network/privateEndpoints@2022-07-01' = {
name: StorageAccountFilePrivateEndpointName
location: Location
properties: {
subnet: {
id: virtualNetwork.properties.subnets[0].id
}
privateLinkServiceConnections: [
{
name: StorageAccountFilePrivateEndpointName
properties: {
privateLinkServiceId: storageAccount.id
groupIds: [
'file'
]
}
}
]
}
}
resource privateDnsZoneBlob 'Microsoft.Network/privateDnsZones@2020-06-01' = {
name: 'privatelink.blob.core.windows.net'
location: 'global'
dependsOn: [
virtualNetwork
]
}
resource privateDnsZoneFile 'Microsoft.Network/privateDnsZones@2020-06-01' = {
name: 'privatelink.file.core.windows.net'
location: 'global'
dependsOn: [
virtualNetwork
]
}
resource privateDnsZoneKeyVault 'Microsoft.Network/privateDnsZones@2020-06-01' = {
name: 'privatelink.vaultcore.azure.net'
location: 'global'
dependsOn: [
virtualNetwork
]
}
resource privateDnsZoneLinkBlob 'Microsoft.Network/privateDnsZones/virtualNetworkLinks@2020-06-01' = {
name: '${privateDnsZoneBlob.name}-link'
parent: privateDnsZoneBlob
location: 'global'
properties: {
registrationEnabled: false
virtualNetwork: {
id: virtualNetwork.id
}
}
}
resource privateDnsZoneLinkFile 'Microsoft.Network/privateDnsZones/virtualNetworkLinks@2020-06-01' = {
name: '${privateDnsZoneFile.name}-link'
parent: privateDnsZoneFile
location: 'global'
properties: {
registrationEnabled: false
virtualNetwork: {
id: virtualNetwork.id
}
}
}
resource privateDnsZoneLinkKeyVault 'Microsoft.Network/privateDnsZones/virtualNetworkLinks@2020-06-01' = {
name: '${privateDnsZoneKeyVault.name}-link'
parent: privateDnsZoneKeyVault
location: 'global'
properties: {
registrationEnabled: false
virtualNetwork: {
id: virtualNetwork.id
}
}
}
resource peDnsGroupBlob 'Microsoft.Network/privateEndpoints/privateDnsZoneGroups@2022-07-01' = {
name: '${peBlob.name}/dnsGroup'
properties: {
privateDnsZoneConfigs: [
{
name: 'config1'
properties: {
privateDnsZoneId: privateDnsZoneBlob.id
}
}
]
}
}
resource peDnsGroupFile 'Microsoft.Network/privateEndpoints/privateDnsZoneGroups@2022-07-01' = {
name: '${peFile.name}/dnsGroup'
properties: {
privateDnsZoneConfigs: [
{
name: 'config1'
properties: {
privateDnsZoneId: privateDnsZoneFile.id
}
}
]
}
}
resource peDnsGroupKeyVault 'Microsoft.Network/privateEndpoints/privateDnsZoneGroups@2022-07-01' = {
name: '${peKeyVault.name}/dnsGroup'
properties: {
privateDnsZoneConfigs: [
{
name: 'config1'
properties: {
privateDnsZoneId: privateDnsZoneKeyVault.id
}
}
]
}
}
module createCustomTables 'modules/customDcrTables.bicep' = {
name: 'createCustomTables'
params: {
LogAnalyticsWorkspaceLocation: LogAnalyticsWorkspaceLocation
LogAnalyticsWorkspaceResourceId: LogAnalyticsWorkspaceResourceId
DataCollectionEndpointName: DataCollectionEndpointName
DataCollectionRuleName: DataCollectionRuleName
ServicePrincipalId: userAssignedMi.properties.principalId
}
}
module roleAssignmentLaw 'modules/lawRoleAssignment.bicep' = {
scope: resourceGroup(split(LogAnalyticsWorkspaceResourceId, '/')[2], split(LogAnalyticsWorkspaceResourceId, '/')[4])
name: 'rbacAssignmentLaw'
params: {
principalId: userAssignedMi.properties.principalId
roleDefId: '/providers/Microsoft.Authorization/roleDefinitions/43d0d8ad-25c7-4714-9337-8ba259a9fe05'
scopedResourceName: split(LogAnalyticsWorkspaceResourceId, '/')[8]
}
}
resource roleAssignmentFa 'Microsoft.Authorization/roleAssignments@2022-04-01' = {
name: guid(subscription().id, resourceGroup().id, functionApp.id)
scope: functionApp
properties: {
principalId: userAssignedMi.properties.principalId
roleDefinitionId: '/providers/Microsoft.Authorization/roleDefinitions/8e3af657-a8ff-443c-a75c-2fe8c4bcb635'
principalType: 'ServicePrincipal'
}
}
resource roleAssignmentVnet 'Microsoft.Authorization/roleAssignments@2022-04-01' = {
name: guid(subscription().id, resourceGroup().id, virtualNetwork.id)
scope: virtualNetwork
properties: {
principalId: userAssignedMi.properties.principalId
roleDefinitionId: '/providers/Microsoft.Authorization/roleDefinitions/8e3af657-a8ff-443c-a75c-2fe8c4bcb635'
principalType: 'ServicePrincipal'
}
}
resource deploymentScript 'Microsoft.Resources/deploymentScripts@2020-10-01' = {
name: 'deployCode'
location: Location
kind: 'AzurePowerShell'
identity: {
type: 'UserAssigned'
userAssignedIdentities: {
'${userAssignedMi.id}': {}
}
}
properties: {
azPowerShellVersion: '8.3'
retentionInterval: 'PT1H'
timeout: 'PT5M'
cleanupPreference: 'Always'
primaryScriptUri: DeploymentScriptUri
arguments: '-PackageUri ${FunctionAppPackageUri} -SubscriptionId ${split(subscription().id, '/')[2]} -ResourceGroupName ${resourceGroup().name} -FunctionAppName ${functionApp.name} -FAScope ${functionApp.id} -VnetScope ${virtualNetwork.id} -UAMIPrincipalId ${userAssignedMi.properties.principalId} -RestrictedIPs ${TrustedIPAddressRanges}'
}
}
output UserAssignedManagedIdentityPrincipalId string = userAssignedMi.properties.principalId
output UserAssignedManagedIdentityPrincipalName string = userAssignedMi.name

Просмотреть файл

@ -0,0 +1,6 @@
Compress-Archive -Path .\DataConnectors\M365Defender-VulnerabilityManagement\functionPackage\* -DestinationPath .\DataConnectors\M365Defender-VulnerabilityManagement\functionPackage.zip -Force
bicep build .\DataConnectors\M365Defender-VulnerabilityManagement\main.bicep --outfile .\DataConnectors\M365Defender-VulnerabilityManagement\azureDeploy.json
bicep build .\DataConnectors\M365Defender-VulnerabilityManagement\mainNetworkRestricted.bicep --outfile .\DataConnectors\M365Defender-VulnerabilityManagement\azureDeployNetworkRestricted.json
bicep build .\DataConnectors\M365Defender-VulnerabilityManagement\modules\customDcrTables.bicep --outfile .\DataConnectors\M365Defender-VulnerabilityManagement\maintenance\customDcrTables.json
bicep build .\DataConnectors\M365Defender-VulnerabilityManagement\workbooks\main.bicep --outfile .\DataConnectors\M365Defender-VulnerabilityManagement\workbooks\azureDeploy.json

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,8 @@
param(
[string] $PackageUri = 'https://raw.githubusercontent.com/anders-alex/Azure-Sentinel/DataConnector-M365Defender-VulnerabilityManagement/DataConnectors/M365Defender-VulnerabilityManagement/functionPackage.zip',
[string] $ResourceGroupName,
[string] $FunctionAppName
)
Invoke-WebRequest -Uri $PackageUri -OutFile functionPackage.zip
Publish-AzWebapp -ResourceGroupName $ResourceGroupName -Name $FunctionAppName -ArchivePath functionPackage.zip -Force

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,35 @@
param kvName string
param location string
param skuFamily string
param skuName string
param principalId string
param secretPermissions array
param aclIpRules string = ''
param aclBypass string = 'None'
param aclDefaultAction string = 'AzureServices'
resource keyVault 'Microsoft.KeyVault/vaults@2022-07-01' = {
name: kvName
location: location
properties: {
sku: {
family: skuFamily
name: skuName
}
tenantId: subscription().tenantId
accessPolicies: [
{
objectId: principalId
permissions: {
secrets: secretPermissions
}
tenantId: subscription().tenantId
}
]
networkAcls: {
bypass: aclBypass
defaultAction: aclDefaultAction
ipRules: aclIpRules == '' ? [] : json('${'[{"value": "'}${replace(aclIpRules, ',', '"},{"value": "')}${'"}]'}')
}
}
}

Просмотреть файл

@ -0,0 +1,22 @@
param lawName string
param tableName string
param plan string
param columns array
param retention int = -1
resource logAnalyticsWorkspace 'Microsoft.OperationalInsights/workspaces@2022-10-01' existing = {
name: lawName
}
resource table 'Microsoft.OperationalInsights/workspaces/tables@2022-10-01' = {
parent: logAnalyticsWorkspace
name: tableName
properties: {
schema: {
name: tableName
columns: columns
}
plan: plan
retentionInDays: retention != -1 ? retention : ''
}
}

Просмотреть файл

@ -0,0 +1,17 @@
param scopedResourceName string
param roleDefId string
param principalId string
resource scopedResource 'Microsoft.OperationalInsights/workspaces@2022-10-01' existing = {
name: scopedResourceName
}
resource roleAssignment 'Microsoft.Authorization/roleAssignments@2020-10-01-preview' = {
name: guid(scopedResource.id, roleDefId, principalId)
scope: scopedResource
properties: {
roleDefinitionId: roleDefId
principalId: principalId
principalType: 'ServicePrincipal'
}
}

Просмотреть файл

@ -0,0 +1,82 @@
# Microsoft Defender Vulnerability Management Sentinel Data Connector (Preview)
Author: Alex Anders
This custom data connector uses a Function App to pull Microsoft Defender Vulnerability Management (MDVM) data from the M365 Defender API and ingests into the selected Log Analytics workspace via the Azure Monitor DCR API. Public NIST CVE information is also ingested to enrich the MDVM data. Six custom tables are created in the workspace:
- *MDVMCVEKB_CL* - MDVM CVE knowledge base.
- *MDVMNISTCVEKB_CL* - NIST CVE knowledge base
- *MDVMNISTConfigurations_CL* - NIST CVE knowledge base: known vulnerable software configurations.
- *MDVMRecommendations_CL* - MDVM recommendations.
- *MDVMSecureConfigurationsByDevice_CL* - Secure configuration assessment details for each device.
- *MDVMVulnerabilitiesByDevice_CL* - Vulnerability assessment details for each device.
## **Pre-requisites**
1. An Azure Subscription
2. An Azure Sentinel/Log Analytics workspace
3. Permissions required to deploy resources:
- Owner permissions on the target resource group.
- Log Analytics Contributor or higher permissions on the destination Log Analytics workspace.
4. Permissions required for assigning the needed permissions post deployment:
- Global Admin or Application Administrator privileges on Defender Azure AD tenant. This is to give the solution access to the Defender API.
- Owner or User Access Administrator access to subscriptions containing Virtual Machines or Arc Server resources. This is to provide the solution reader access.
## **Deployment Process**
## 1. Deploy Azure Resources
1. Click the appropriate **Deploy to Azure** button below.
2. Once in the Azure Portal, select the **Subscription** and **Resource Group** to deploy the resources into.
3. Populate the required **Log Analytics Workspace ID** and **Location** parameters. Modify the default parameters as needed but most users can leave these alone.
4. Click **Review and Create**.
5. Click **Create**.
6. When the deployment has completed, grab the UserAssignedManagedIdentityPrincipalId and UserAssignedManagedIdentityPrincipalName values from the deployment Outputs section. These will be used in the next step.
## 2. Assign Needed Permissions
After the resources have been deployed, we need to assign the appropriate M365 Defender API and Azure permissions to the newly created User Assigned Managed Identity by doing the following:
1. From a PowerShell prompt, connect to Azure via [Connect-AzAccount -TenantId [The Tenant ID your Defender instance resides in.]](https://learn.microsoft.com/en-us/powershell/module/az.accounts/connect-azaccount?view=azps-9.2.0) with an account that has the Global Admin or Application Administrator role assigned. Then, run the following PowerShell commands:
```PowerShell
#UserAssignedManagedIdentityPrincipalId value Copied from step 1.6 above. INSERT THE VALUE BETWEEN THE SINGLE QUOTES BELOW.
$managedIdentityPrincipalId = ''
$permissions = "SecurityRecommendation.Read.All", "Vulnerability.Read.All"
#Lookup Resource and App Roles (permissions).
$resource = Get-AzADServicePrincipal -Filter "DisplayName eq 'WindowsDefenderATP'"
$appRoles = $resource.AppRole | Where-Object Value -in $permissions
#Assign App Roles to Managed Identity.
foreach ($appRole in $appRoles) {
$body = @{
principalId = $ManagedIdentityPrincipalId
resourceId = $resource.Id
appRoleId = $appRole.id
}
(Invoke-AzRestMethod -Method POST -Uri ("https://graph.microsoft.com/v1.0/servicePrincipals/" + $resource.Id + "/appRoleAssignedTo") -Payload (ConvertTo-Json $body)).Content | ConvertFrom-Json
}
```
2. Assign the User Assigned Managed Identity Reader access to all management groups/subscriptions that contain Virtual Machine or Arc Server resources. Do this by:
1. Navigate to the appropriate management group/subscription in the Azure portal.
2. Select the **Access Control (IAM)** menu.
3. Select **Add** => **Add Role Assignment**.
4. Select the **Reader** Role and click Next.
5. Select **Managed Identity**, Select **Members**, and search for the **User Assigned Managed Identity** created during the deployment. The name was capture in step 6 above.
6. Click Next, then Review and Assign.
<br>
### Non-Network Restricted Deployment
No virtual network or Private Endpoints are deployed and public network access to the Function App and Storage Account is unrestricted. The Key Vault is restricted to only allow access from Function App public IP addresses. Use this for test environments or if you prefer to implement network restrictions yourself after deployment.
[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fanders-alex%2FAzure-Sentinel%2FDataConnector-M365Defender-VulnerabilityManagement%2FDataConnectors%2FM365Defender-VulnerabilityManagement%2FazureDeploy.json)
### Network Restricted Deployment
Function App public access is restricted and a virtual network along with the appropriate Private DNS Zones are created to provide out of the box Private Endpoint connectivity between the Function App and its dependencies (Key Vault and Storage Account).
[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fanders-alex%2FAzure-Sentinel%2FDataConnector-M365Defender-VulnerabilityManagement%2FDataConnectors%2FM365Defender-VulnerabilityManagement%2FazureDeployNetworkRestricted.json)
### Workbook Deployment
A modified version of the Defender for Cloud "Vulnerability Assessment Findings" workbook to include the MDVM data collected by this connector.
[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fanders-alex%2FAzure-Sentinel%2FDataConnector-M365Defender-VulnerabilityManagement%2FDataConnectors%2FM365Defender-VulnerabilityManagement%2Fworkbooks%2FazureDeploy.json)
![image](https://user-images.githubusercontent.com/50784041/232255325-974cce56-b0ca-41df-827e-f97f65589e33.png)
![image](https://user-images.githubusercontent.com/50784041/232255372-23ec5de4-8970-4ee3-9445-dfdf520fe1bc.png)

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

Просмотреть файл

@ -0,0 +1,11 @@
# Microsoft Defender Vulnerability Management Workbooks (Preview)
Author: Alex Anders
This set of Azure Workbooks can be used to visualize and filter the MDVM data ingested.
### Worbook Deployment
Click the below Deploy to Azure button to deploy all workbooks into the specified resource group.
[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fanders-alex%2FAzure-Sentinel%2FDataConnector-M365Defender-VulnerabilityManagement%2FDataConnectors%2FM365Defender-VulnerabilityManagement%2Fworkbooks%2FazureDeploy.json)

Просмотреть файл

@ -23,20 +23,37 @@ query: |
let threshold = 2;
imAuthentication
| where TimeGenerated > timeframe
| where EventType=='Logon' and EventResult=='Success'
| where EventType == 'Logon'
and EventResult == 'Success'
| where isnotempty(SrcGeoCountry)
| summarize StartTime = min(TimeGenerated), EndTime = max(TimeGenerated), Vendors=make_set(EventVendor), Products=make_set(EventProduct)
, NumOfCountries = dcount(SrcGeoCountry)
by TargetUserId, TargetUsername, TargetUserType
| summarize
StartTime = min(TimeGenerated)
, EndTime = max(TimeGenerated)
, Vendors = make_set(EventVendor, 128)
, Products = make_set(EventProduct, 128)
, NumOfCountries = dcount(SrcGeoCountry)
, Countries = make_set(SrcGeoCountry, 128)
by TargetUserId, TargetUsername, TargetUserType
| where NumOfCountries >= threshold
| extend timestamp = StartTime, AccountCustomEntity = TargetUsername
| extend
Name = iif(
TargetUsername contains "@"
, tostring(split(TargetUsername, '@', 0)[0])
, TargetUsername
),
UPNSuffix = iif(
TargetUsername contains "@"
, tostring(split(TargetUsername, '@', 1)[0])
, ""
)
entityMappings:
- entityType: Account
fieldMappings:
- identifier: FullName
columnName: AccountCustomEntity
version: 1.2.2
- identifier: Name
columnName: Name
- identifier: UPNSuffix
columnName: UPNSuffix
version: 1.2.3
kind: Scheduled
metadata:
source:
@ -46,4 +63,4 @@ metadata:
support:
tier: Community
categories:
domains: [ "Security - Network" ]
domains: [ "Security - Network" ]

Просмотреть файл

@ -28,23 +28,23 @@ tactics:
relevantTechniques:
- T1566
query: |
//Finding MDO Security alerts and extracting the Entities user, Domain, Ip, and URL.
//Finding MDO Security alerts and extracting the Entities user, Domain, Ip, and URL.
let Alert_List= dynamic([
"Phishing link click observed in Network Traffic",
"Phishing link click observed in Network Traffic",
"Phish delivered due to an IP allow policy",
"A potentially malicious URL click was detected",
"High Risk Sign-in Observed in Network Traffic",
"A user clicked through to a potentially malicious URL",
"Suspicious network connection to AitM phishing site",
"Messages containing malicious entity not removed after delivery",
"Suspicious network connection to AitM phishing site",
"Messages containing malicious entity not removed after delivery",
"Email messages containing malicious URL removed after delivery",
"Email reported by user as malware or phish",
"Phish delivered due to an ETR override",
"Phish not zapped because ZAP is disabled"]);
SecurityAlert
|where ProviderName in~ ("Office 365 Advanced Threat Protection", "OATP")
| where AlertName in~ (Alert_List)
//extracting Alert Entities
| where AlertName in~ (Alert_List)
//extracting Alert Entities
| extend Entities = parse_json(Entities)
| mv-apply Entity = Entities on
(
@ -57,7 +57,7 @@ query: |
| extend EntityUrl = tostring(Entity.Url)
)
| summarize AccountUpn=tolower(tostring(take_any(EntityUPN))),Url=tostring(tolower(take_any(EntityUrl))),AlertTime= min(TimeGenerated)by SystemAlertId, ProductName
// filtering 3pnetwork devices
// filtering 3pnetwork devices
| join kind= inner (CommonSecurityLog
| where DeviceVendor has_any ("Palo Alto Networks", "Fortinet", "Check Point", "Zscaler")
| where DeviceAction != "Block"
@ -80,7 +80,7 @@ query: |
IndicatorThreatType,
ThreatSeverity,AdditionalExtensions,
ThreatConfidence)on $left.Url == $right.RequestURL and $left.AccountUpn == $right.SourceUserName
// Applied the condition where alert trigger 1st and then the 3p Network activity execution
// Applied the condition where alert trigger 1st and then the 3p Network activity execution
| where AlertTime between ((3plogTime - 1h) .. (3plogTime + 1h))
entityMappings:
- entityType: Account
@ -96,6 +96,6 @@ entityMappings:
- entityType: DNS
fieldMappings:
- identifier: DomainName
columnName: DestinationHostName
columnName: DestinationHostName
kind: Scheduled
version: 1.0.1
version: 1.0.2

Просмотреть файл

@ -1,12 +1,12 @@
id: 042f2801-a375-4cfd-bd29-041fc7ed88a0
name: Risky user signin observed in non-Microsoft network device
name: Risky user signin observed in non-Microsoft network device
description: |
'This content is utilized to identify instances of successful login by risky users, who have been observed engaging in potentially suspicious network activity on non-Microsoft network devices.'
severity: Medium
requiredDataConnectors:
- connectorId: AzureActiveDirectory
dataTypes:
- SigninLogs
- SigninLogs
- connectorId: PaloAltoNetworks
dataTypes:
- CommonSecurityLog (PaloAlto)
@ -84,10 +84,10 @@ entityMappings:
- entityType: DNS
fieldMappings:
- identifier: DomainName
columnName: DestinationHostName
columnName: DestinationHostName
- entityType: Host
fieldMappings:
- identifier: FullName
columnName: SourceSystem
kind: Scheduled
version: 1.0.1
version: 1.0.2

Просмотреть файл

@ -2,7 +2,7 @@ id: 779731f7-8ba0-4198-8524-5701b7defddc
name: M365D Alerts Correlation to non-Microsoft Network device network activity involved in successful sign-in Activity
description: |
'This content is employed to correlate with Microsoft 365 Defender phishing-related alerts. It focuses on instances where a user successfully connects to a phishing URL from a non-Microsoft network device and subsequently makes successful sign-in attempts from the phishing IP address.'
severity: Medium
severity: Medium
requiredDataConnectors:
- connectorId: OfficeATP
dataTypes:
@ -18,7 +18,7 @@ requiredDataConnectors:
- CommonSecurityLog (CheckPoint)
- connectorId: Zscaler
dataTypes:
- CommonSecurityLog (Zscaler)
- CommonSecurityLog (Zscaler)
queryFrequency: 1d
queryPeriod: 1d
triggerOperator: gt
@ -29,13 +29,13 @@ relevantTechniques:
- T1078
query: |
let Alert_List= dynamic([
"Phishing link click observed in Network Traffic",
"Phishing link click observed in Network Traffic",
"Phish delivered due to an IP allow policy",
"A potentially malicious URL click was detected",
"High Risk Sign-in Observed in Network Traffic",
"A user clicked through to a potentially malicious URL",
"Suspicious network connection to AitM phishing site",
"Messages containing malicious entity not removed after delivery",
"Suspicious network connection to AitM phishing site",
"Messages containing malicious entity not removed after delivery",
"Email messages containing malicious URL removed after delivery",
"Email reported by user as malware or phish",
"Phish delivered due to an ETR override",
@ -44,7 +44,7 @@ query: |
| where AlertName in~ (Alert_List)
//Findling Alerts which has the URL
| where Entities has "url"
//extracting Entities
//extracting Entities
| extend Entities = parse_json(Entities)
| mv-apply Entity = Entities on
(
@ -56,10 +56,10 @@ query: |
AlertTime= min(TimeGenerated),
make_set(SystemAlertId, 100)
by ProductName, AlertName
// matching with 3rd party network logs and 3p Alerts
// matching with 3rd party network logs and 3p Alerts
| join kind= inner (CommonSecurityLog
| where DeviceVendor has_any ("Palo Alto Networks", "Fortinet", "Check Point", "Zscaler")
| where DeviceProduct startswith "FortiGate" or DeviceProduct startswith "PAN" or DeviceProduct startswith "VPN" or DeviceProduct startswith "FireWall" or DeviceProduct startswith "NSSWeblog" or DeviceProduct startswith "URL"
| where DeviceProduct startswith "FortiGate" or DeviceProduct startswith "PAN" or DeviceProduct startswith "VPN" or DeviceProduct startswith "FireWall" or DeviceProduct startswith "NSSWeblog" or DeviceProduct startswith "URL"
| where DeviceAction != "Block"
| where isnotempty(RequestURL)
| project
@ -78,7 +78,7 @@ query: |
SourceUserID,
SourceHostName)
on $left.Url == $right.RequestURL
// matching successful Login from suspicious IP
// matching successful Login from suspicious IP
| join kind=inner (SigninLogs
//filtering the Successful Login
| where ResultType == 0
@ -98,8 +98,8 @@ query: |
UserPrincipalName=tostring(tolower(UserPrincipalName)),
Name = tostring(split(UserPrincipalName, "@")[0]),
UPNSuffix =tostring(split(UserPrincipalName, "@")[1]))
on $left.DestinationIP == $right.IPAddress and $left.SourceUserName == $right.UserPrincipalName
| where SigniningTime between ((AlertTime - 6h) .. (AlertTime + 6h)) and 3plogTime between ((AlertTime - 6h) .. (AlertTime + 6h))
on $left.DestinationIP == $right.IPAddress and $left.SourceUserName == $right.UserPrincipalName
| where SigniningTime between ((AlertTime - 6h) .. (AlertTime + 6h)) and 3plogTime between ((AlertTime - 6h) .. (AlertTime + 6h))
entityMappings:
- entityType: Account
fieldMappings:
@ -114,7 +114,7 @@ entityMappings:
- entityType: DNS
fieldMappings:
- identifier: DomainName
columnName: DestinationHostName
columnName: DestinationHostName
- entityType: Host
fieldMappings:
- identifier: FullName
@ -124,4 +124,4 @@ entityMappings:
- identifier: Url
columnName: RequestURL
kind: Scheduled
version: 1.0.1
version: 1.0.2

6
Logos/BHE_Logo.svg Normal file
Просмотреть файл

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-16"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg xmlns="http://www.w3.org/2000/svg" xml:space="preserve" width="2743px" height="2009px" version="1.1" shape-rendering="geometricPrecision" text-rendering="geometricPrecision" image-rendering="optimizeQuality" fill-rule="evenodd" clip-rule="evenodd"
viewBox="0 0 2743 2009">
<path fill="#413B96" d="M1502 473c-88,0 -164,47 -205,118l-410 0c-178,0 -333,-99 -414,-245l-63 -110c364,0 728,0 1092,0 175,0 328,96 409,237 158,42 316,84 473,127l-291 503 -436 165 -155 269 -274 -473 -273 0 546 945 329 -569 437 -165 476 -824 -687 -184c-151,-167 -311,-263 -541,-267l-1515 0 275 476c134,230 377,356 626,352l396 0c41,70 117,118 205,118 130,0 236,-106 236,-237 0,-130 -106,-236 -236,-236z"/>
</svg>

После

Ширина:  |  Высота:  |  Размер: 829 B

Просмотреть файл

@ -25,7 +25,7 @@
"type": "string"
},
"Reporting Quantity": {
"defaultValue": "Reporting Quantity (GB) (type integer, 10)",
"defaultValue": "Reporting Quantity (MB) (type integer, 10)",
"type": "int"
},
"Lookback": {

Просмотреть файл

@ -0,0 +1,54 @@
[
{
"domain_sid": "S-1-5-21-3130019616-2776909439-2417379567",
"exposure_index": 1,
"tier_zero_count": 1,
"critical_risk_count": 1,
"id": 17999,
"created_at": "2023-05-01T15:35:55.830691Z",
"updated_at": "2023-05-01T15:35:55.830691Z",
"deleted_at": {
"Time": "0001-01-01T00:00:00Z",
"Valid": false
},
"domain_id": "S-1-5-21-3130019616-2776909439-2417379567",
"domain_impact_value": 100,
"domain_name": "TESTLAB.LOCAL",
"domain_type": "active-directory",
"exposure": "100",
"data_type": "posture"
},
{
"finding_id": "T0FindingId",
"domain_id": "S-1-5-21-3130019616-2776909439-2417379567",
"path_title": "Example Path Title",
"path_type": "Tier Zero Attack Paths",
"exposure": 0,
"finding_count": 1,
"principal_count": 1,
"id": 949999,
"created_at": "2023-05-01T15:35:43.231504Z",
"updated_at": "2023-05-01T15:35:43.231504Z",
"deleted_at": {
"Time": "0001-01-01T00:00:00Z",
"Valid": false
},
"severity": "Low",
"domain_impact_value": 100,
"domain_name": "TESTLAB.LOCAL",
"domain_type": "active-directory",
"data_type": "paths"
},
{
"domain_id": "S-1-5-21-3130019616-2776909439-2417379567",
"domain_name": "TESTLAB.LOCAL",
"path_id": "T0FindingId",
"path_title": "Example Path Title",
"group": null,
"principal": null,
"non_tier_zero_principal": "NON TIER ZERO_TESTLAB.LOCAL",
"tier_zero_principal": "TIER ZERO_TESTLAB.LOCAL",
"user": null,
"data_type": "path_principals"
}
]

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

Просмотреть файл

@ -1,6 +1,6 @@
{
"id": "AbnormalSecurity",
"title": "AbnormalSecurity (using Azure Function)",
"title": "AbnormalSecurity ",
"publisher": "AbnormalSecurity",
"descriptionMarkdown": "The Abnormal Security data connector provides the capability to ingest threat and case logs into Microsoft Sentinel using the [Abnormal Security Rest API.](https://app.swaggerhub.com/apis/abnormal-security/abx/)",
"graphQueries": [

Просмотреть файл

@ -7,7 +7,7 @@
"Data Connectors/AbnormalSecurity_API_FunctionApp.json"
],
"BasePath": "C:\\GitHub\\Azure-Sentinel\\Solutions\\AbnormalSecurity",
"Version": "2.0.2",
"Version": "3.0.0",
"Metadata": "SolutionMetadata.json",
"TemplateSpec": true,
"Is1PConnector": false

Двоичные данные
Solutions/AbnormalSecurity/Package/3.0.0.zip Normal file

Двоичный файл не отображается.

Просмотреть файл

@ -6,7 +6,7 @@
"config": {
"isWizard": false,
"basics": {
"description": "<img src=\"https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/AbnormalSecurity/Data%20Connectors/Logo/abnormalsecurity.svg\" width=\"60px\" height=\"60px\">\n\n**Note:** _There may be [known issues](https://aka.ms/sentinelsolutionsknownissues) pertaining to this Solution, please refer to them before installing._\n\nThe Abnormal Security Events solution provides the capability to ingest threat and case logs into Microsoft Sentinel using the [Abnormal Security Rest API](https://app.swaggerhub.com/apis/abnormal-security/abx/).\r \n \r \n **Underlying Microsoft Technologies used:** \r \n \r \n This solution takes a dependency on the following technologies, and some of these dependencies either may be in [Preview](https://azure.microsoft.com/support/legal/preview-supplemental-terms/) state or might result in additional ingestion or operational costs:\r \n \r \n a. [Azure Monitor HTTP Data Collector API](https://docs.microsoft.com/azure/azure-monitor/logs/data-collector-api) \r \n \r \n b. [Azure Functions](https://azure.microsoft.com/services/functions/#overview)\n\n**Data Connectors:** 1\n\n[Learn more about Microsoft Sentinel](https://aka.ms/azuresentinel) | [Learn more about Solutions](https://aka.ms/azuresentinelsolutionsdoc)",
"description": "<img src=\"https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/AbnormalSecurity/Data%20Connectors/Logo/abnormalsecurity.svg\" width=\"60px\" height=\"60px\">\n\n**Note:** Please refer to the following before installing the solution: \r \n • Review the solution [Release Notes](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/AbnormalSecurity/ReleaseNotes.md)\r \n • There may be [known issues](https://aka.ms/sentinelsolutionsknownissues) pertaining to this Solution.\n\nThe Abnormal Security Events solution provides the capability to ingest threat and case logs into Microsoft Sentinel using the [Abnormal Security Rest API](https://app.swaggerhub.com/apis/abnormal-security/abx/).\r \n \r \n **Underlying Microsoft Technologies used:** \r \n \r \n This solution takes a dependency on the following technologies, and some of these dependencies either may be in [Preview](https://azure.microsoft.com/support/legal/preview-supplemental-terms/) state or might result in additional ingestion or operational costs:\r \n \r \n a. [Azure Monitor HTTP Data Collector API](https://docs.microsoft.com/azure/azure-monitor/logs/data-collector-api) \r \n \r \n b. [Azure Functions](https://azure.microsoft.com/services/functions/#overview)\n\n**Data Connectors:** 1\n\n[Learn more about Microsoft Sentinel](https://aka.ms/azuresentinel) | [Learn more about Solutions](https://aka.ms/azuresentinelsolutionsdoc)",
"subscription": {
"resourceProviders": [
"Microsoft.OperationsManagement/solutions",
@ -60,7 +60,7 @@
"name": "dataconnectors1-text",
"type": "Microsoft.Common.TextBlock",
"options": {
"text": "This solution installs the data connector for ingesting Abnormal Security Events logs into Microsoft Sentinel, using the Abnormal Security Events API. After installing the solution, configure and enable this data connector by following guidance in Manage solution view."
"text": "This Solution installs the data connector for Abnormal Security. You can get Abnormal Security custom log data in your Microsoft Sentinel workspace. After installing the solution, configure and enable this data connector by following guidance in Manage solution view."
}
},
{

Просмотреть файл

@ -30,49 +30,34 @@
}
},
"variables": {
"solutionId": "abnormalsecuritycorporation1593011233180.fe1b4806-215b-4610-bf95-965a7a65579c",
"_solutionId": "[variables('solutionId')]",
"email": "support@abnormalsecurity.com",
"_email": "[variables('email')]",
"workspaceResourceId": "[resourceId('microsoft.OperationalInsights/Workspaces', parameters('workspace'))]",
"_solutionName": "AbnormalSecurity",
"_solutionVersion": "3.0.0",
"solutionId": "abnormalsecuritycorporation1593011233180.fe1b4806-215b-4610-bf95-965a7a65579c",
"_solutionId": "[variables('solutionId')]",
"uiConfigId1": "AbnormalSecurity",
"_uiConfigId1": "[variables('uiConfigId1')]",
"dataConnectorContentId1": "AbnormalSecurity",
"_dataConnectorContentId1": "[variables('dataConnectorContentId1')]",
"dataConnectorId1": "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/dataConnectors', variables('_dataConnectorContentId1'))]",
"_dataConnectorId1": "[variables('dataConnectorId1')]",
"dataConnectorTemplateSpecName1": "[concat(parameters('workspace'),'-dc-',uniquestring(variables('_dataConnectorContentId1')))]",
"dataConnectorVersion1": "1.0.0"
"dataConnectorTemplateSpecName1": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat(concat(parameters('workspace'),'-dc-',uniquestring(variables('_dataConnectorContentId1'))),variables('dataConnectorVersion1')))]",
"dataConnectorVersion1": "1.0.0",
"_dataConnectorcontentProductId1": "[concat(take(variables('_solutionId'),50),'-','dc','-', uniqueString(concat(variables('_solutionId'),'-','DataConnector','-',variables('_dataConnectorContentId1'),'-', variables('dataConnectorVersion1'))))]",
"_solutioncontentProductId": "[concat(take(variables('_solutionId'),50),'-','sl','-', uniqueString(concat(variables('_solutionId'),'-','Solution','-',variables('_solutionId'),'-', variables('_solutionVersion'))))]"
},
"resources": [
{
"type": "Microsoft.Resources/templateSpecs",
"apiVersion": "2021-05-01",
"type": "Microsoft.OperationalInsights/workspaces/providers/contentTemplates",
"apiVersion": "2023-04-01-preview",
"name": "[variables('dataConnectorTemplateSpecName1')]",
"location": "[parameters('workspace-location')]",
"tags": {
"hidden-sentinelWorkspaceId": "[variables('workspaceResourceId')]",
"hidden-sentinelContentType": "DataConnector"
},
"properties": {
"description": "AbnormalSecurity data connector with template",
"displayName": "AbnormalSecurity template"
}
},
{
"type": "Microsoft.Resources/templateSpecs/versions",
"apiVersion": "2021-05-01",
"name": "[concat(variables('dataConnectorTemplateSpecName1'),'/',variables('dataConnectorVersion1'))]",
"location": "[parameters('workspace-location')]",
"tags": {
"hidden-sentinelWorkspaceId": "[variables('workspaceResourceId')]",
"hidden-sentinelContentType": "DataConnector"
},
"dependsOn": [
"[resourceId('Microsoft.Resources/templateSpecs', variables('dataConnectorTemplateSpecName1'))]"
"[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]"
],
"properties": {
"description": "AbnormalSecurity data connector with template version 2.0.2",
"description": "AbnormalSecurity data connector with template version 3.0.0",
"mainTemplate": {
"$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
"contentVersion": "[variables('dataConnectorVersion1')]",
@ -88,7 +73,7 @@
"properties": {
"connectorUiConfig": {
"id": "[variables('_uiConfigId1')]",
"title": "AbnormalSecurity (using Azure Function) (using Azure Function)",
"title": "AbnormalSecurity (using Azure Functions)",
"publisher": "AbnormalSecurity",
"descriptionMarkdown": "The Abnormal Security data connector provides the capability to ingest threat and case logs into Microsoft Sentinel using the [Abnormal Security Rest API.](https://app.swaggerhub.com/apis/abnormal-security/abx/)",
"graphQueries": [
@ -133,7 +118,7 @@
{
"type": "IsConnectedQuery",
"value": [
"ABNORMAL_THREAT_LOG_C\n | summarize LastLogReceived = max(TimeGenerated)\n | project IsConnected = LastLogReceived > ago(30d)"
"ABNORMAL_THREAT_MESSAGES_CL\n | summarize LastLogReceived = max(TimeGenerated)\n | project IsConnected = LastLogReceived > ago(30d)"
]
}
],
@ -266,7 +251,18 @@
}
}
]
}
},
"packageKind": "Solution",
"packageVersion": "[variables('_solutionVersion')]",
"packageName": "[variables('_solutionName')]",
"packageId": "[variables('_solutionId')]",
"contentSchemaVersion": "3.0.0",
"contentId": "[variables('_dataConnectorContentId1')]",
"contentKind": "DataConnector",
"displayName": "AbnormalSecurity (using Azure Functions)",
"contentProductId": "[variables('_dataConnectorcontentProductId1')]",
"id": "[variables('_dataConnectorcontentProductId1')]",
"version": "[variables('dataConnectorVersion1')]"
}
},
{
@ -306,7 +302,7 @@
"kind": "GenericUI",
"properties": {
"connectorUiConfig": {
"title": "AbnormalSecurity (using Azure Function) (using Azure Function)",
"title": "AbnormalSecurity (using Azure Functions)",
"publisher": "AbnormalSecurity",
"descriptionMarkdown": "The Abnormal Security data connector provides the capability to ingest threat and case logs into Microsoft Sentinel using the [Abnormal Security Rest API.](https://app.swaggerhub.com/apis/abnormal-security/abx/)",
"graphQueries": [
@ -341,7 +337,7 @@
{
"type": "IsConnectedQuery",
"value": [
"ABNORMAL_THREAT_LOG_C\n | summarize LastLogReceived = max(TimeGenerated)\n | project IsConnected = LastLogReceived > ago(30d)"
"ABNORMAL_THREAT_MESSAGES_CL\n | summarize LastLogReceived = max(TimeGenerated)\n | project IsConnected = LastLogReceived > ago(30d)"
]
}
],
@ -443,13 +439,20 @@
}
},
{
"type": "Microsoft.OperationalInsights/workspaces/providers/metadata",
"apiVersion": "2022-01-01-preview",
"type": "Microsoft.OperationalInsights/workspaces/providers/contentPackages",
"apiVersion": "2023-04-01-preview",
"location": "[parameters('workspace-location')]",
"properties": {
"version": "2.0.2",
"version": "3.0.0",
"kind": "Solution",
"contentSchemaVersion": "2.0.0",
"contentSchemaVersion": "3.0.0",
"displayName": "AbnormalSecurity",
"publisherDisplayName": "Abnormal Security",
"descriptionHtml": "<p><strong>Note:</strong> <em>There may be <a href=\"https://aka.ms/sentinelsolutionsknownissues\">known issues</a> pertaining to this Solution, please refer to them before installing.</em></p>\n<p>The Abnormal Security Events solution provides the capability to ingest threat and case logs into Microsoft Sentinel using the <a href=\"https://app.swaggerhub.com/apis/abnormal-security/abx/\">Abnormal Security Rest API</a>.</p>\n<p><strong>Underlying Microsoft Technologies used:</strong></p>\n<p>This solution takes a dependency on the following technologies, and some of these dependencies either may be in <a href=\"https://azure.microsoft.com/support/legal/preview-supplemental-terms/\">Preview</a> state or might result in additional ingestion or operational costs:</p>\n<ol type=\"a\">\n<li><p><a href=\"https://docs.microsoft.com/azure/azure-monitor/logs/data-collector-api\">Azure Monitor HTTP Data Collector API</a></p>\n</li>\n<li><p><a href=\"https://azure.microsoft.com/services/functions/#overview\">Azure Functions</a></p>\n</li>\n</ol>\n<p><strong>Data Connectors:</strong> 1</p>\n<p><a href=\"https://aka.ms/azuresentinel\">Learn more about Microsoft Sentinel</a> | <a href=\"https://aka.ms/azuresentinelsolutionsdoc\">Learn more about Solutions</a></p>\n",
"contentKind": "Solution",
"contentProductId": "[variables('_solutioncontentProductId')]",
"id": "[variables('_solutioncontentProductId')]",
"icon": "<img src=\"https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/AbnormalSecurity/Data%20Connectors/Logo/abnormalsecurity.svg\" width=\"60px\" height=\"60px\">",
"contentId": "[variables('_solutionId')]",
"parentId": "[variables('_solutionId')]",
"source": {

Просмотреть файл

@ -0,0 +1,3 @@
| **Version** | **Date Modified (DD-MM-YYYY)** | **Change History** |
|-------------|--------------------------------|---------------------------------------------|
| 3.0.0 | 29-06-2023 | Updating Azure Function to Azure Functions in **Data Connector** Description |

Просмотреть файл

@ -11,7 +11,7 @@
],
"Metadata": "SolutionMetadata.json",
"BasePath": "C:\\GitHub\\Azure-Sentinel\\Solutions\\AtlassianConfluenceAudit",
"Version": "2.0.5",
"Version": "3.0.0",
"TemplateSpec": true,
"Is1PConnector": false
}

Двоичные данные
Solutions/AtlassianConfluenceAudit/Package/3.0.0.zip Normal file

Двоичный файл не отображается.

Просмотреть файл

@ -6,7 +6,7 @@
"config": {
"isWizard": false,
"basics": {
"description": "<img src=\"https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Workbooks/Images/Logos/Azure_Sentinel.svg\"width=\"75px\"height=\"75px\">\n\n**Note:** _There may be [known issues](https://aka.ms/sentinelsolutionsknownissues) pertaining to this Solution, please refer to them before installing._\n\nThe [Atlassian Confluence Audit](https://www.atlassian.com/software/confluence) solution provides the capability to ingest [Confluence Audit Records](https://support.atlassian.com/confluence-cloud/docs/view-the-audit-log/) into Microsoft Sentinel.\n\n**Underlying Microsoft Technologies used:**\n\nThis solution takes a dependency on the following technologies, and some of these dependencies either may be in [Preview](https://azure.microsoft.com/support/legal/preview-supplemental-terms/) state or might result in additional ingestion or operational costs:\n\na. [Azure Monitor HTTP Data Collector API](https://docs.microsoft.com/azure/azure-monitor/logs/data-collector-api)\n\nb. [Azure Functions](https://azure.microsoft.com/services/functions/#overview)\n\n**Data Connectors:** 1, **Parsers:** 1\n\n[Learn more about Microsoft Sentinel](https://aka.ms/azuresentinel) | [Learn more about Solutions](https://aka.ms/azuresentinelsolutionsdoc)",
"description": "<img src=\"https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Workbooks/Images/Logos/Azure_Sentinel.svg\"width=\"75px\"height=\"75px\">\n\n**Note:** Please refer to the following before installing the solution: \r \n • Review the solution [Release Notes](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit/ReleaseNotes.md)\r \n • There may be [known issues](https://aka.ms/sentinelsolutionsknownissues) pertaining to this Solution.\n\nThe [Atlassian Confluence Audit](https://www.atlassian.com/software/confluence) solution provides the capability to ingest [Confluence Audit Records](https://support.atlassian.com/confluence-cloud/docs/view-the-audit-log/) into Microsoft Sentinel.\n\n**Underlying Microsoft Technologies used:**\n\nThis solution takes a dependency on the following technologies, and some of these dependencies either may be in [Preview](https://azure.microsoft.com/support/legal/preview-supplemental-terms/) state or might result in additional ingestion or operational costs:\n\na. [Azure Monitor HTTP Data Collector API](https://docs.microsoft.com/azure/azure-monitor/logs/data-collector-api)\n\nb. [Azure Functions](https://azure.microsoft.com/services/functions/#overview)\n\n**Data Connectors:** 1, **Parsers:** 1\n\n[Learn more about Microsoft Sentinel](https://aka.ms/azuresentinel) | [Learn more about Solutions](https://aka.ms/azuresentinelsolutionsdoc)",
"subscription": {
"resourceProviders": [
"Microsoft.OperationsManagement/solutions",

Просмотреть файл

@ -30,57 +30,43 @@
}
},
"variables": {
"solutionId": "azuresentinel.azure-sentinel-solution-atlassianconfluenceaudit",
"_solutionId": "[variables('solutionId')]",
"email": "support@microsoft.com",
"_email": "[variables('email')]",
"workspaceResourceId": "[resourceId('microsoft.OperationalInsights/Workspaces', parameters('workspace'))]",
"_solutionName": "AtlassianConfluenceAudit",
"_solutionVersion": "3.0.0",
"solutionId": "azuresentinel.azure-sentinel-solution-atlassianconfluenceaudit",
"_solutionId": "[variables('solutionId')]",
"uiConfigId1": "ConfluenceAuditAPI",
"_uiConfigId1": "[variables('uiConfigId1')]",
"dataConnectorContentId1": "ConfluenceAuditAPI",
"_dataConnectorContentId1": "[variables('dataConnectorContentId1')]",
"dataConnectorId1": "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/dataConnectors', variables('_dataConnectorContentId1'))]",
"_dataConnectorId1": "[variables('dataConnectorId1')]",
"dataConnectorTemplateSpecName1": "[concat(parameters('workspace'),'-dc-',uniquestring(variables('_dataConnectorContentId1')))]",
"dataConnectorTemplateSpecName1": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat(concat(parameters('workspace'),'-dc-',uniquestring(variables('_dataConnectorContentId1'))),variables('dataConnectorVersion1')))]",
"dataConnectorVersion1": "1.0.0",
"parserVersion1": "1.0.0",
"parserContentId1": "ConfluenceAudit-Parser",
"_parserContentId1": "[variables('parserContentId1')]",
"_dataConnectorcontentProductId1": "[concat(take(variables('_solutionId'),50),'-','dc','-', uniqueString(concat(variables('_solutionId'),'-','DataConnector','-',variables('_dataConnectorContentId1'),'-', variables('dataConnectorVersion1'))))]",
"parserName1": "ConfluenceAudit",
"_parserName1": "[concat(parameters('workspace'),'/',variables('parserName1'))]",
"parserId1": "[resourceId('Microsoft.OperationalInsights/workspaces/savedSearches', parameters('workspace'), variables('parserName1'))]",
"_parserId1": "[variables('parserId1')]",
"parserTemplateSpecName1": "[concat(parameters('workspace'),'-pr-',uniquestring(variables('_parserContentId1')))]"
"parserTemplateSpecName1": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat(concat(parameters('workspace'),'-pr-',uniquestring(variables('_parserContentId1'))),variables('parserVersion1')))]",
"parserVersion1": "1.0.0",
"parserContentId1": "ConfluenceAudit-Parser",
"_parserContentId1": "[variables('parserContentId1')]",
"_parsercontentProductId1": "[concat(take(variables('_solutionId'),50),'-','pr','-', uniqueString(concat(variables('_solutionId'),'-','Parser','-',variables('_parserContentId1'),'-', variables('parserVersion1'))))]",
"_solutioncontentProductId": "[concat(take(variables('_solutionId'),50),'-','sl','-', uniqueString(concat(variables('_solutionId'),'-','Solution','-',variables('_solutionId'),'-', variables('_solutionVersion'))))]"
},
"resources": [
{
"type": "Microsoft.Resources/templateSpecs",
"apiVersion": "2022-02-01",
"type": "Microsoft.OperationalInsights/workspaces/providers/contentTemplates",
"apiVersion": "2023-04-01-preview",
"name": "[variables('dataConnectorTemplateSpecName1')]",
"location": "[parameters('workspace-location')]",
"tags": {
"hidden-sentinelWorkspaceId": "[variables('workspaceResourceId')]",
"hidden-sentinelContentType": "DataConnector"
},
"properties": {
"description": "AtlassianConfluenceAudit data connector with template",
"displayName": "AtlassianConfluenceAudit template"
}
},
{
"type": "Microsoft.Resources/templateSpecs/versions",
"apiVersion": "2022-02-01",
"name": "[concat(variables('dataConnectorTemplateSpecName1'),'/',variables('dataConnectorVersion1'))]",
"location": "[parameters('workspace-location')]",
"tags": {
"hidden-sentinelWorkspaceId": "[variables('workspaceResourceId')]",
"hidden-sentinelContentType": "DataConnector"
},
"dependsOn": [
"[resourceId('Microsoft.Resources/templateSpecs', variables('dataConnectorTemplateSpecName1'))]"
"[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]"
],
"properties": {
"description": "AtlassianConfluenceAudit data connector with template version 2.0.5",
"description": "AtlassianConfluenceAudit data connector with template version 3.0.0",
"mainTemplate": {
"$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
"contentVersion": "[variables('dataConnectorVersion1')]",
@ -217,7 +203,7 @@
},
{
"type": "Microsoft.OperationalInsights/workspaces/providers/metadata",
"apiVersion": "2022-01-01-preview",
"apiVersion": "2023-04-01-preview",
"name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('DataConnector-', last(split(variables('_dataConnectorId1'),'/'))))]",
"properties": {
"parentId": "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/dataConnectors', variables('_dataConnectorContentId1'))]",
@ -242,12 +228,23 @@
}
}
]
}
},
"packageKind": "Solution",
"packageVersion": "[variables('_solutionVersion')]",
"packageName": "[variables('_solutionName')]",
"packageId": "[variables('_solutionId')]",
"contentSchemaVersion": "3.0.0",
"contentId": "[variables('_dataConnectorContentId1')]",
"contentKind": "DataConnector",
"displayName": "Atlassian Confluence Audit (using Azure Functions)",
"contentProductId": "[variables('_dataConnectorcontentProductId1')]",
"id": "[variables('_dataConnectorcontentProductId1')]",
"version": "[variables('dataConnectorVersion1')]"
}
},
{
"type": "Microsoft.OperationalInsights/workspaces/providers/metadata",
"apiVersion": "2022-01-01-preview",
"apiVersion": "2023-04-01-preview",
"name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('DataConnector-', last(split(variables('_dataConnectorId1'),'/'))))]",
"dependsOn": [
"[variables('_dataConnectorId1')]"
@ -404,33 +401,15 @@
}
},
{
"type": "Microsoft.Resources/templateSpecs",
"apiVersion": "2022-02-01",
"type": "Microsoft.OperationalInsights/workspaces/providers/contentTemplates",
"apiVersion": "2023-04-01-preview",
"name": "[variables('parserTemplateSpecName1')]",
"location": "[parameters('workspace-location')]",
"tags": {
"hidden-sentinelWorkspaceId": "[variables('workspaceResourceId')]",
"hidden-sentinelContentType": "Parser"
},
"properties": {
"description": "ConfluenceAudit Data Parser with template",
"displayName": "ConfluenceAudit Data Parser template"
}
},
{
"type": "Microsoft.Resources/templateSpecs/versions",
"apiVersion": "2022-02-01",
"name": "[concat(variables('parserTemplateSpecName1'),'/',variables('parserVersion1'))]",
"location": "[parameters('workspace-location')]",
"tags": {
"hidden-sentinelWorkspaceId": "[variables('workspaceResourceId')]",
"hidden-sentinelContentType": "Parser"
},
"dependsOn": [
"[resourceId('Microsoft.Resources/templateSpecs', variables('parserTemplateSpecName1'))]"
"[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]"
],
"properties": {
"description": "ConfluenceAudit Data Parser with template version 2.0.5",
"description": "ConfluenceAudit Data Parser with template version 3.0.0",
"mainTemplate": {
"$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
"contentVersion": "[variables('parserVersion1')]",
@ -439,7 +418,7 @@
"resources": [
{
"name": "[variables('_parserName1')]",
"apiVersion": "2020-08-01",
"apiVersion": "2022-10-01",
"type": "Microsoft.OperationalInsights/workspaces/savedSearches",
"location": "[parameters('workspace-location')]",
"properties": {
@ -448,6 +427,7 @@
"category": "Samples",
"functionAlias": "ConfluenceAudit",
"query": "\nlet Confluence_Audit_view = view () { \r\n Confluence_Audit_CL\r\n | extend \r\n EventVendor=\"Atlassian\",\r\n EventProduct=\"Confluence\",\r\n AuthorUsername=column_ifexists('author_username_s', ''),\r\n\t\t AuthorUserKey=column_ifexists('author_userKey_g', ''),\r\n AuthorAccountId=column_ifexists('author_accountId_s', ''),\r\n AuthorType=column_ifexists('author_type_s', ''),\r\n AuthorDisplayName=column_ifexists('author_displayName_s', ''),\r\n AuthorIsExternalCollaborator=column_ifexists('author_isExternalCollaborator_b', ''),\r\n AuthorAccountType=column_ifexists('author_accountType_s', ''),\r\n AuthorPublicName=column_ifexists('author_publicName_s', ''),\r\n AuthorExternalCollaborator=column_ifexists('author_externalCollaborator_b', ''),\r\n RemoteAddress=column_ifexists('remoteAddress_s', ''),\r\n CreationDate=column_ifexists('creationDate_d', ''),\r\n Summary=column_ifexists('summary_s', ''),\r\n Description=column_ifexists('description_s', ''),\r\n Category=column_ifexists('Category', ''),\r\n SysAdmin=column_ifexists('sysAdmin_b', ''),\r\n SuperAdmin=column_ifexists('superAdmin_b', ''),\r\n AffectedObjectName=column_ifexists('affectedObject_name_s', ''),\r\n AffectedObjectObjectType=column_ifexists('affectedObject_objectType_s', ''),\r\n ChangedValues=column_ifexists('changedValues_s', ''),\r\n AssociatedObjects=column_ifexists('associatedObjects_s', ''),\r\n UserIdentity=column_ifexists('author_accountId_s', ''),\r\n SrcUserName=column_ifexists('author_displayName_s', ''),\r\n DstUserSid=column_ifexists('author_userKey_s', ''),\r\n SrcIpAddr=column_ifexists('remoteAddress_s', ''),\r\n EventCreationTime=column_ifexists('creationDate_d', ''),\r\n EventMessage=column_ifexists('summary_s', ''),\r\n EventCategoryType =column_ifexists('Category', '') \r\n | project\r\n TimeGenerated, \r\n EventVendor,\r\n EventProduct,\r\n AuthorUsername,\r\n AuthorAccountId,\r\n AuthorType,\r\n AuthorDisplayName,\r\n AuthorIsExternalCollaborator,\r\n AuthorUserKey,\r\n AuthorAccountType,\r\n AuthorPublicName,\r\n AuthorExternalCollaborator,\r\n RemoteAddress,\r\n CreationDate,\r\n Summary,\r\n Description,\r\n Category,\r\n SysAdmin,\r\n SuperAdmin,\r\n AffectedObjectName,\r\n AffectedObjectObjectType,\r\n ChangedValues,\r\n AssociatedObjects,\r\n UserIdentity,\r\n SrcUserName,\r\n DstUserSid,\r\n SrcIpAddr,\r\n EventCreationTime,\r\n EventMessage,\r\n EventCategoryType \r\n};\r\nConfluence_Audit_view\r\n",
"functionParameters": "",
"version": 1,
"tags": [
{
@ -487,12 +467,23 @@
}
}
]
}
},
"packageKind": "Solution",
"packageVersion": "[variables('_solutionVersion')]",
"packageName": "[variables('_solutionName')]",
"packageId": "[variables('_solutionId')]",
"contentSchemaVersion": "3.0.0",
"contentId": "[variables('_parserContentId1')]",
"contentKind": "Parser",
"displayName": "ConfluenceAudit",
"contentProductId": "[variables('_parsercontentProductId1')]",
"id": "[variables('_parsercontentProductId1')]",
"version": "[variables('parserVersion1')]"
}
},
{
"type": "Microsoft.OperationalInsights/workspaces/savedSearches",
"apiVersion": "2021-06-01",
"apiVersion": "2022-10-01",
"name": "[variables('_parserName1')]",
"location": "[parameters('workspace-location')]",
"properties": {
@ -501,7 +492,14 @@
"category": "Samples",
"functionAlias": "ConfluenceAudit",
"query": "\nlet Confluence_Audit_view = view () { \r\n Confluence_Audit_CL\r\n | extend \r\n EventVendor=\"Atlassian\",\r\n EventProduct=\"Confluence\",\r\n AuthorUsername=column_ifexists('author_username_s', ''),\r\n\t\t AuthorUserKey=column_ifexists('author_userKey_g', ''),\r\n AuthorAccountId=column_ifexists('author_accountId_s', ''),\r\n AuthorType=column_ifexists('author_type_s', ''),\r\n AuthorDisplayName=column_ifexists('author_displayName_s', ''),\r\n AuthorIsExternalCollaborator=column_ifexists('author_isExternalCollaborator_b', ''),\r\n AuthorAccountType=column_ifexists('author_accountType_s', ''),\r\n AuthorPublicName=column_ifexists('author_publicName_s', ''),\r\n AuthorExternalCollaborator=column_ifexists('author_externalCollaborator_b', ''),\r\n RemoteAddress=column_ifexists('remoteAddress_s', ''),\r\n CreationDate=column_ifexists('creationDate_d', ''),\r\n Summary=column_ifexists('summary_s', ''),\r\n Description=column_ifexists('description_s', ''),\r\n Category=column_ifexists('Category', ''),\r\n SysAdmin=column_ifexists('sysAdmin_b', ''),\r\n SuperAdmin=column_ifexists('superAdmin_b', ''),\r\n AffectedObjectName=column_ifexists('affectedObject_name_s', ''),\r\n AffectedObjectObjectType=column_ifexists('affectedObject_objectType_s', ''),\r\n ChangedValues=column_ifexists('changedValues_s', ''),\r\n AssociatedObjects=column_ifexists('associatedObjects_s', ''),\r\n UserIdentity=column_ifexists('author_accountId_s', ''),\r\n SrcUserName=column_ifexists('author_displayName_s', ''),\r\n DstUserSid=column_ifexists('author_userKey_s', ''),\r\n SrcIpAddr=column_ifexists('remoteAddress_s', ''),\r\n EventCreationTime=column_ifexists('creationDate_d', ''),\r\n EventMessage=column_ifexists('summary_s', ''),\r\n EventCategoryType =column_ifexists('Category', '') \r\n | project\r\n TimeGenerated, \r\n EventVendor,\r\n EventProduct,\r\n AuthorUsername,\r\n AuthorAccountId,\r\n AuthorType,\r\n AuthorDisplayName,\r\n AuthorIsExternalCollaborator,\r\n AuthorUserKey,\r\n AuthorAccountType,\r\n AuthorPublicName,\r\n AuthorExternalCollaborator,\r\n RemoteAddress,\r\n CreationDate,\r\n Summary,\r\n Description,\r\n Category,\r\n SysAdmin,\r\n SuperAdmin,\r\n AffectedObjectName,\r\n AffectedObjectObjectType,\r\n ChangedValues,\r\n AssociatedObjects,\r\n UserIdentity,\r\n SrcUserName,\r\n DstUserSid,\r\n SrcIpAddr,\r\n EventCreationTime,\r\n EventMessage,\r\n EventCategoryType \r\n};\r\nConfluence_Audit_view\r\n",
"version": 1
"functionParameters": "",
"version": 1,
"tags": [
{
"name": "description",
"value": "ConfluenceAudit"
}
]
}
},
{
@ -535,13 +533,20 @@
}
},
{
"type": "Microsoft.OperationalInsights/workspaces/providers/metadata",
"apiVersion": "2022-01-01-preview",
"type": "Microsoft.OperationalInsights/workspaces/providers/contentPackages",
"apiVersion": "2023-04-01-preview",
"location": "[parameters('workspace-location')]",
"properties": {
"version": "2.0.5",
"version": "3.0.0",
"kind": "Solution",
"contentSchemaVersion": "2.0.0",
"contentSchemaVersion": "3.0.0",
"displayName": "AtlassianConfluenceAudit",
"publisherDisplayName": "Microsoft Sentinel, Microsoft Corporation",
"descriptionHtml": "<p><strong>Note:</strong> <em>There may be <a href=\"https://aka.ms/sentinelsolutionsknownissues\">known issues</a> pertaining to this Solution, please refer to them before installing.</em></p>\n<p>The <a href=\"https://www.atlassian.com/software/confluence\">Atlassian Confluence Audit</a> solution provides the capability to ingest <a href=\"https://support.atlassian.com/confluence-cloud/docs/view-the-audit-log/\">Confluence Audit Records</a> into Microsoft Sentinel.</p>\n<p><strong>Underlying Microsoft Technologies used:</strong></p>\n<p>This solution takes a dependency on the following technologies, and some of these dependencies either may be in <a href=\"https://azure.microsoft.com/support/legal/preview-supplemental-terms/\">Preview</a> state or might result in additional ingestion or operational costs:</p>\n<ol type=\"a\">\n<li><p><a href=\"https://docs.microsoft.com/azure/azure-monitor/logs/data-collector-api\">Azure Monitor HTTP Data Collector API</a></p>\n</li>\n<li><p><a href=\"https://azure.microsoft.com/services/functions/#overview\">Azure Functions</a></p>\n</li>\n</ol>\n<p><strong>Data Connectors:</strong> 1, <strong>Parsers:</strong> 1</p>\n<p><a href=\"https://aka.ms/azuresentinel\">Learn more about Microsoft Sentinel</a> | <a href=\"https://aka.ms/azuresentinelsolutionsdoc\">Learn more about Solutions</a></p>\n",
"contentKind": "Solution",
"contentProductId": "[variables('_solutioncontentProductId')]",
"id": "[variables('_solutioncontentProductId')]",
"icon": "<img src=\"https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Workbooks/Images/Logos/Azure_Sentinel.svg\"width=\"75px\"height=\"75px\">",
"contentId": "[variables('_solutionId')]",
"parentId": "[variables('_solutionId')]",
"source": {

Просмотреть файл

@ -0,0 +1,3 @@
| **Version** | **Date Modified (DD-MM-YYYY)** | **Change History** |
|-------------|--------------------------------|---------------------------------------------|
| 3.0.0 | 19-07-2023 |Initial Release |

Просмотреть файл

@ -28,7 +28,7 @@ query: |
table(tableName)
| where ResultType == "0"
| where AppDisplayName !in ("Office 365 Exchange Online", "Skype for Business Online") // To remove false-positives, add more Apps to this array
| project SuccessLogonTime = TimeGenerated, UserPrincipalName, SuccessIPAddress = IPAddress, AppDisplayName, SuccessIPBlock = strcat(split(IPAddress, ".")[0], ".", split(IPAddress, ".")[1]), Type
| project SuccessLogonTime = TimeGenerated, UserPrincipalName, SuccessIPAddress = IPAddress, AppDisplayName, SuccessIPBlock = iff(IPAddress contains ":", strcat(split(IPAddress, ":")[0], ":", split(IPAddress, ":")[1]), strcat(split(IPAddress, ".")[0], ".", split(IPAddress, ".")[1])), Type
| join kind= inner (
table(tableName)
| where ResultType !in ("0", "50140")
@ -59,5 +59,5 @@ entityMappings:
fieldMappings:
- identifier: Address
columnName: FailedIPAddress
version: 1.1.2
version: 1.1.3
kind: Scheduled

Просмотреть файл

@ -27,7 +27,8 @@ query: |
| mv-apply TargetResource = TargetResources on
(
where TargetResource.type =~ "User"
| extend Target = tostring(TargetResource.userPrincipalName),
| extend Target = tostring(TargetResource.userPrincipalName)
| extend Target = iff(TargetResources.type == "ServicePrincipal", tostring(TargetResources.displayName), Target),
props = TargetResource.modifiedProperties
)
| mv-apply Property = props on
@ -55,5 +56,5 @@ entityMappings:
columnName: InitiatorName
- identifier: UPNSuffix
columnName: InitiatorUPNSuffix
version: 1.0.4
version: 1.0.5
kind: Scheduled

Двоичные данные
Solutions/Azure Active Directory/Package/3.0.0.zip Normal file

Двоичный файл не отображается.

Просмотреть файл

@ -6,7 +6,7 @@
"config": {
"isWizard": false,
"basics": {
"description": "<img src=\"https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Workbooks/Images/Logos/azureactivedirectory_logo.svg\"width=\"75px\" height=\"75px\">\n\n**Note:** _There may be [known issues](https://aka.ms/sentinelsolutionsknownissues) pertaining to this Solution, please refer to them before installing._\n\nThe [ Azure Active Directory](https://docs.microsoft.com/azure/active-directory/fundamentals/active-directory-whatis) solution for Microsoft Sentinel enables you to ingest Azure Active Directory [Audit](https://docs.microsoft.com/azure/active-directory/reports-monitoring/concept-audit-logs), [Sign-in](https://docs.microsoft.com/azure/active-directory/reports-monitoring/concept-sign-ins), [Provisioning](https://docs.microsoft.com/azure/active-directory/reports-monitoring/concept-provisioning-logs), [Risk Events and Risky User/Service Principal](https://docs.microsoft.com/azure/active-directory/identity-protection/howto-identity-protection-investigate-risk#risky-users) logs using Diagnostic Settings into Microsoft Sentinel.\n\n**Data Connectors:** 1, **Workbooks:** 2, **Analytic Rules:** 59, **Playbooks:** 11\n\n[Learn more about Microsoft Sentinel](https://aka.ms/azuresentinel) | [Learn more about Solutions](https://aka.ms/azuresentinelsolutionsdoc)",
"description": "<img src=\"https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Workbooks/Images/Logos/azureactivedirectory_logo.svg\"width=\"75px\" height=\"75px\">\n\n**Note:** Please refer to the following before installing the solution: \r \n • Review the solution [Release Notes](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Active%20Directory/ReleaseNotes.md)\r \n • There may be [known issues](https://aka.ms/sentinelsolutionsknownissues) pertaining to this Solution.\n\nThe [ Azure Active Directory](https://docs.microsoft.com/azure/active-directory/fundamentals/active-directory-whatis) solution for Microsoft Sentinel enables you to ingest Azure Active Directory [Audit](https://docs.microsoft.com/azure/active-directory/reports-monitoring/concept-audit-logs), [Sign-in](https://docs.microsoft.com/azure/active-directory/reports-monitoring/concept-sign-ins), [Provisioning](https://docs.microsoft.com/azure/active-directory/reports-monitoring/concept-provisioning-logs), [Risk Events and Risky User/Service Principal](https://docs.microsoft.com/azure/active-directory/identity-protection/howto-identity-protection-investigate-risk#risky-users) logs using Diagnostic Settings into Microsoft Sentinel.\n\n**Data Connectors:** 1, **Workbooks:** 2, **Analytic Rules:** 59, **Playbooks:** 11\n\n[Learn more about Microsoft Sentinel](https://aka.ms/azuresentinel) | [Learn more about Solutions](https://aka.ms/azuresentinelsolutionsdoc)",
"subscription": {
"resourceProviders": [
"Microsoft.OperationsManagement/solutions",

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

Просмотреть файл

@ -0,0 +1,4 @@
| **Version** | **Date Modified (DD-MM-YYYY)** | **Change History** |
|-------------|--------------------------------|--------------------------------------------------------------------------|
| 3.0.0 | 19-07-2023 | 2 **Analytic Rules** updated in the solution |
| | |

Просмотреть файл

@ -0,0 +1,26 @@
id: df292d06-f348-41ad-b780-0abb5acfe9ab
name: BloodHound Enterprise - Number of critical attack paths increase
description: |
'The number of critical attack paths has increased over the past 7 days.'
severity: Medium
status: Available
requiredDataConnectors:
- connectorId: BloodHoundEnterprise
dataTypes:
- BloodHoundEnterprise
queryFrequency: 7d
queryPeriod: 7d
triggerOperator: gt
triggerThreshold: 0
tactics: []
relevantTechniques: []
query: |
BloodHoundEnterprise
| where data_type == "posture"
| where created_at > ago (7d)
| summarize min_critical_risk_count = min(critical_risk_count), arg_max(created_at, current_critical_risk_count = critical_risk_count) by domain_name
| extend difference = current_critical_risk_count - min_critical_risk_count
| where difference > 0
entityMappings: null
version: 1.0.0
kind: Scheduled

Просмотреть файл

@ -0,0 +1,26 @@
id: b1f6aed2-ebb9-4fe4-bd7c-6657d02a0cc8
name: BloodHound Enterprise - Exposure increase
description: |
'The exposure for a domain has increased by more than 5% over the past 7 days.'
severity: High
status: Available
requiredDataConnectors:
- connectorId: BloodHoundEnterprise
dataTypes:
- BloodHoundEnterprise
queryFrequency: 7d
queryPeriod: 7d
triggerOperator: gt
triggerThreshold: 0
tactics: []
relevantTechniques: []
query: |
BloodHoundEnterprise
| where data_type == "posture"
| where created_at > ago (7d)
| summarize min(exposure_index), arg_max(created_at, exposure_index) by domain_name
| extend min_exposure = min_exposure_index * 100, latest_exposure = exposure_index * 100
| where latest_exposure - min_exposure > 5
entityMappings: null
version: 1.0.0
kind: Scheduled

Просмотреть файл

@ -0,0 +1,26 @@
id: 13424be6-aed7-448b-afe5-c03d8b29b4fe
name: BloodHound Enterprise - Number of Tier Zero assets increase
description: |
'The number of Tier Zero assets has increased by more than 5% over the past 7 days.'
severity: Medium
status: Available
requiredDataConnectors:
- connectorId: BloodHoundEnterprise
dataTypes:
- BloodHoundEnterprise
queryFrequency: 7d
queryPeriod: 7d
triggerOperator: gt
triggerThreshold: 0
tactics: []
relevantTechniques: []
query: |
BloodHoundEnterprise
| where data_type == "posture"
| where created_at > ago (7d)
| summarize min_tier_zero = min(tier_zero_count), max_tier_zero = arg_max(created_at, current_tier_zero = tier_zero_count) by domain_name
| extend percent_difference = ((current_tier_zero - min_tier_zero) / min_tier_zero) * 100
| where percent_difference > 5
entityMappings: null
version: 1.0.0
kind: Scheduled

Просмотреть файл

@ -0,0 +1,191 @@
#!/usr/bin/env python
import os
import sys
import datetime
import logging
import re
import azure.functions as func
from .sentinel_connector import AzureSentinelConnector
from .state_manager import StateManager
# BHE client import
from .bhe_client import *
# Log Analytics Workspace Info
WORKSPACE_ID = os.environ['WorkspaceID']
SHARED_KEY = os.environ['WorkspaceKey']
logAnalyticsUri = os.environ.get('logAnalyticsUri')
LOG_TYPE = 'bloodhoundEnterprise'
# Azure Blob storage connection string
file_storage_connection_string = os.environ['AzureWebJobsStorage']
# Validate Log Analytics URI
if not logAnalyticsUri or str(logAnalyticsUri).isspace():
logAnalyticsUri = 'https://' + WORKSPACE_ID + '.ods.opinsights.azure.com'
pattern = r"https:\/\/([\w\-]+)\.ods\.opinsights\.azure.([a-zA-Z\.]+)$"
match = re.match(pattern,str(logAnalyticsUri))
if(not match):
raise Exception("Invalid Log Analytics Uri.")
# BHE instance info
bhe_domain = os.environ['BHEDomain']
token_id = os.environ['BHETokenId']
token_key = os.environ['BHETokenKey']
def stream_events(last_data_stream, timestamp_now):
# Connection to BHE domain
credentials = Credentials(token_id, token_key)
client = BHEClient(scheme='https', host=bhe_domain, port=443, credentials=credentials)
sentinel = AzureSentinelConnector(workspace_id=WORKSPACE_ID, logAnalyticsUri = logAnalyticsUri, shared_key=SHARED_KEY, log_type=LOG_TYPE, queue_size=10000, bulks_number=10)
# Check if BHE domain is reachable and creds are working
try:
status_code = client.get_api_version().status_code
except:
logging.info("BHE Cannot reach domain: %s" % bhe_domain)
raise ValueError("Cannot reach domain: %s" % bhe_domain)
else:
if status_code == 200:
logging.info("BHE API creds validated")
else:
logging.info("BHE Cannot log in using API keys. Status code: %s" % status_code)
raise ValueError("Cannot log in using API keys. Status code: %s" % status_code)
# Get available domains
domains = client.get_domains()
logging.info("BHE Number of domains %s" % len(domains))
for domain in domains:
if domain['collected']:
# Get paths for domain
attack_paths = client.get_paths(domain)
logging.info(("BHE Processing %s attack paths for domain %s" % (len(attack_paths), domain['name'])))
for attack_path in attack_paths:
logging.info("BHE Processing attack path %s for domain %s" % (attack_path.id, domain['name']))
# Add attack path principals to kv store
path_principals = client.get_path_principals(attack_path)
for principal_set in path_principals.impacted_principals:
# Create generic record
path_record = {
"domain_id": path_principals.domain_id,
"domain_name": path_principals.domain_name,
"path_id": path_principals.id,
"path_title": path_principals.title,
"group": None,
"principal": None,
"non_tier_zero_principal": None,
"tier_zero_principal": None,
"user": None,
"data_type": 'path_principals'
}
# Populate generic record and insert
if (path_principals.id.startswith('LargeDefault')):
path_record['group'] = principal_set['Group']
path_record['principal'] = principal_set['Principal']
sentinel.send(path_record)
elif 'Tier Zero Principal' in principal_set:
path_record['non_tier_zero_principal'] = principal_set['Non Tier Zero Principal']
path_record['tier_zero_principal'] = principal_set['Tier Zero Principal']
sentinel.send(path_record)
else:
path_record['user'] = principal_set['User']
sentinel.send(path_record)
path_events = client.get_path_timeline(
path = attack_path,
from_timestamp = last_data_stream,
to_timestamp = timestamp_now
)
for path_event in path_events:
path_event['domain_id'] = domain['id']
path_event['domain_impact_value'] = domain['impactValue']
path_event['domain_name'] = domain['name']
path_event['domain_type'] = domain['type']
path_event['data_type'] = 'paths'
sentinel.send(path_event)
logging.info("BHE Processing attack path %s done" % attack_path.id)
# Get posture data
posture_events = client.get_posture(
from_timestamp = last_data_stream,
to_timestamp = timestamp_now
)
logging.info("BHE Processing %s events of posture data" % len(posture_events))
# Create posture events in Sentinel
for posture_event in posture_events:
# Lookup domain name and type
domain = next(x for x in domains if x['id'] == posture_event['domain_sid'])
posture_event['domain_id'] = domain['id']
posture_event['domain_impact_value'] = domain['impactValue']
posture_event['domain_name'] = domain['name']
posture_event['domain_type'] = domain['type']
posture_event['exposure'] = str(int(float(posture_event["exposure_index"]) * 100))
posture_event['data_type'] = 'posture'
sentinel.send(posture_event)
logging.info("BHE Flushing the Sentinel queue")
sentinel.flush()
logging.info("BHE Streaming events done")
def generate_date():
tformat = '%Y-%m-%dT%H:%M:%S.%f'
current_time = datetime.datetime.now(datetime.timezone.utc).strftime(tformat)[:-3] + 'Z'
state = StateManager(connection_string=file_storage_connection_string)
past_time = state.get()
if past_time is not None:
logging.info("BHE The last time point is: {}".format(past_time))
else:
logging.info("BHE There is no last data stream, getting event from the beginning of time")
past_time = "1970-01-01T00:00:00.000Z"
return (past_time, current_time)
def update_date():
current_time = datetime.datetime.now(datetime.timezone.utc).strftime(tformat)[:-3] + 'Z'
state = StateManager(connection_string=file_storage_connection_string)
logging.info("BHE Setting last time point to: {}".format(current_time))
state.post(current_time)
def main(mytimer: func.TimerRequest) -> None:
utc_timestamp = datetime.datetime.utcnow().replace(
tzinfo=datetime.timezone.utc).isoformat()
if mytimer.past_due:
logging.info('The timer is past due!')
logging.info('Python timer trigger function ran at %s', utc_timestamp)
last_data_stream, timestamp_now = generate_date()
logging.info("BHE Last data stream %s" % last_data_stream)
stream_events(last_data_stream = last_data_stream, timestamp_now = timestamp_now)
update_date()

Просмотреть файл

@ -0,0 +1,263 @@
#!/usr/bin/env python
import hmac
import hashlib
import base64
import requests
import datetime
from typing import Optional
tformat = '%Y-%m-%dT%H:%M:%S.%f'
pathTypes = {
'Tier Zero Attack Paths': [
'NonT0DCSyncers',
'T0AddAllowedToAct',
'T0AddKeyCredentialLink',
'T0AddMember',
'T0AddSelf',
'T0Admins',
'T0AllExtendedRights',
'T0AllowedToAct',
'T0AllowedToDelegate',
'T0DCOM',
'T0ForceChangePassword',
'T0GenericAll',
'T0GenericWrite',
'T0HasSIDHistory',
'T0Logins',
'T0Owns',
'T0PSRemote',
'T0RDP',
'T0ReadGMSA',
'T0ReadLAPS',
'T0SQLAdmin',
'T0WriteDACL',
'T0WriteOwner',
'T0WriteSPN',
'T0WriteAccountRestrictions',
'T0SyncLAPSPassword'
],
'Abusable Kerberos Configurations': [
'ASREPRoasting',
'T0MarkSensitive',
'Kerberoasting',
'UnconstrainedAddKeyCredentialLink',
'UnconstrainedAdmins',
'UnconstrainedAllowedToDelegate',
'UnconstrainedDCOM',
'UnconstrainedForceChangePassword',
'UnconstrainedGenericAll',
'UnconstrainedGenericWrite',
'UnconstrainedOwns',
'UnconstrainedPSRemote',
'UnconstrainedRDP',
'UnconstrainedReadLAPS',
'UnconstrainedSQLAdmin',
'UnconstrainedWriteDACL',
'UnconstrainedWriteOwner',
'UnconstrainedWriteAccountRestrictions',
'UnconstrainedSyncLAPSPassword'
],
'Least Privilege Enforcement': [
'LargeDefaultGroupsOwns',
'LargeDefaultGroupsAddAllowedToAct',
'LargeDefaultGroupsAddKeyCredentialLink',
'LargeDefaultGroupsAddMember',
'LargeDefaultGroupsAddSelf',
'LargeDefaultGroupsAdmins',
'LargeDefaultGroupsAllExtendedRights',
'LargeDefaultGroupsDCOM',
'LargeDefaultGroupsGenericAll',
'LargeDefaultGroupsGenericWrite',
'LargeDefaultGroupsPSRemote',
'LargeDefaultGroupsRDP',
'LargeDefaultGroupsReadGMSA',
'LargeDefaultGroupsReadLAPS',
'LargeDefaultGroupsSQLAdmin',
'LargeDefaultGroupsWriteDacl',
'LargeDefaultGroupsWriteOwner',
'LargeDefaultGroupsWriteSPN',
'LargeDefaultGroupsForceChangePassword',
'LargeDefaultGroupsWriteAccountRestrictions',
'LargeDefaultGroupsSyncLAPSPassword'
]
}
def get_path_type(path_id) -> str:
for pathType, findingTypes in pathTypes.items():
for findingType in findingTypes:
if path_id == findingType:
return pathType
return 'Unknown'
class Credentials(object):
def __init__(self, token_id: str, token_key: str) -> None:
self.token_id = token_id
self.token_key = token_key
class AttackPath(object):
def __init__(self, id: str, title: str, domain) -> None:
self.id = id
self.title = title
self.type = get_path_type(id)
self.domain_id = domain['id']
self.domain_name = domain['name'].strip()
def __lt__(self, other):
return self.exposure < other.exposure
class BHEClient(object):
def __init__(self, scheme: str, host: str, port: int, credentials: Credentials) -> None:
self._scheme = scheme
self._host = host
self._port = port
self._credentials = credentials
def _format_url(self, uri: str) -> str:
formatted_uri = uri
if uri.startswith('/'):
formatted_uri = formatted_uri[1:]
return f'{self._scheme}://{self._host}:{self._port}/{formatted_uri}'
def _request(self, method: str, uri: str, body: Optional[bytes] = None) -> requests.Response:
digester = hmac.new(self._credentials.token_key.encode(), None, hashlib.sha256)
digester.update(f'{method}{uri}'.encode())
digester = hmac.new(digester.digest(), None, hashlib.sha256)
datetime_formatted = datetime.datetime.now().astimezone().isoformat('T')
digester.update(datetime_formatted[:13].encode())
digester = hmac.new(digester.digest(), None, hashlib.sha256)
if body is not None:
digester.update(body)
# Perform the request with the signed and expected headers
return requests.request(
method=method,
url=self._format_url(uri),
headers={
'User-Agent': 'bhe-sentinel-integration v0.0.1',
'Authorization': f'bhesignature {self._credentials.token_id}',
'RequestDate': datetime_formatted,
'Signature': base64.b64encode(digester.digest()),
'Content-Type': 'application/json',
},
data=body,
)
def get_api_version(self):
return self._request('GET', '/api/version')
def get_domains(self) -> list:
response = self._request('GET', '/api/v2/available-domains')
domain_data = response.json()['data']
return domain_data
def get_paths(self, domain) -> list:
response = self._request('GET', '/api/v2/domains/' + domain['id'] + '/available-types')
path_ids = response.json()['data']
paths = list()
for path_id in path_ids:
## Get nice title from API and strip newline
path_title = self._request('GET', '/ui/findings/' + path_id + '/title.md')
## Create attackpath object
path = AttackPath(path_id, path_title.text.strip(), domain)
paths.append(path)
return paths
def get_path_timeline(self, path, from_timestamp, to_timestamp):
## Sparkline data
response = self._request('GET', '/api/v2/domains/' + path.domain_id + '/sparkline?finding=' + path.id + '&from=' + from_timestamp + '&to=' + to_timestamp)
exposure_data = response.json()['data']
events = list()
for event in exposure_data:
e = {}
e['finding_id'] = path.id
e['domain_id'] = path.domain_id
e['path_title'] = path.title
e['path_type'] = path.type
e['exposure'] = event['CompositeRisk']
e['finding_count'] = event['FindingCount']
e['principal_count'] = event['ImpactedAssetCount']
e['id'] = event['id']
e['created_at'] = event['created_at']
e['updated_at'] = event['updated_at']
e['deleted_at'] = event['deleted_at']
## Determine severity from exposure
e['severity'] = self.get_severity(e['exposure'])
events.append(e)
return events
def get_path_principals(self, path: AttackPath) -> list:
# Get path details from API
response = self._request('GET', '/api/v2/domains/' + path.domain_id + '/details?finding=' + path.id + '&skip=0&limit=0&Accepted=eq:False')
payload = response.json()
# Build dictionary of impacted pricipals
if 'count' in payload:
path.impacted_principals = list()
for path_data in payload['data']:
# Check for both From and To to determine whether relational or configuration path
if (path.id.startswith('LargeDefault')):
# Get from and to principal names
if ('name' in path_data['FromPrincipalProps']):
from_principal = path_data['FromPrincipalProps']['name']
else:
from_principal = path_data['FromPrincipal']
if ('name' in path_data['ToPrincipalProps']):
to_principal = path_data['ToPrincipalProps']['name']
else:
to_principal = path_data['ToPrincipal']
principals = {
'Group': from_principal,
'Principal': to_principal
}
elif ('FromPrincipalProps' in path_data) and ('ToPrincipalProps' in path_data):
# Get from and to principal names
if ('name' in path_data['FromPrincipalProps']):
from_principal = path_data['FromPrincipalProps']['name']
else:
from_principal = path_data['FromPrincipal']
if ('name' in path_data['ToPrincipalProps']):
to_principal = path_data['ToPrincipalProps']['name']
else:
to_principal = path_data['ToPrincipal']
principals = {
'Non Tier Zero Principal': from_principal,
'Tier Zero Principal': to_principal
}
else:
principals = {
'User': path_data['Props']['name']
}
path.impacted_principals.append(principals)
path.principal_count = payload['count']
else:
path.principal_count = 0
return path
def get_posture(self, from_timestamp, to_timestamp) -> list:
response = self._request('GET', '/api/v2/posture-stats?from=' + from_timestamp + '&to=' + to_timestamp)
payload = response.json()
return payload["data"]
def get_severity(self, exposure) -> str:
severity = 'Low'
if exposure > 40: severity = 'Moderate'
if exposure > 80: severity = 'High'
if exposure > 95: severity = 'Critical'
return severity

Просмотреть файл

@ -0,0 +1,11 @@
{
"scriptFile": "__init__.py",
"bindings": [
{
"name": "mytimer",
"type": "timerTrigger",
"direction": "in",
"schedule": "0 40 */4 * * *"
}
]
}

Просмотреть файл

@ -0,0 +1,115 @@
import requests
import datetime
import logging
import json
import hashlib
import hmac
import base64
from threading import Thread
class AzureSentinelConnector:
def __init__(self, workspace_id, logAnalyticsUri, shared_key, log_type, queue_size=200, bulks_number=10, queue_size_bytes=25 * (2**20)):
self.workspace_id = workspace_id
self.logAnalyticsUri = logAnalyticsUri
self.shared_key = shared_key
self.log_type = log_type
self.queue_size = queue_size
self.bulks_number = bulks_number
self.queue_size_bytes = queue_size_bytes
self._queue = []
self._bulks_list = []
self.successfull_sent_events_number = 0
self.failed_sent_events_number = 0
def send(self, event):
self._queue.append(event)
if len(self._queue) >= self.queue_size:
self.flush(force=False)
def flush(self, force=True):
self._bulks_list.append(self._queue)
if force:
self._flush_bulks()
else:
if len(self._bulks_list) >= self.bulks_number:
self._flush_bulks()
self._queue = []
def _flush_bulks(self):
jobs = []
for queue in self._bulks_list:
if queue:
queue_list = self._split_big_request(queue)
if len(queue_list) == 1:
self._post_data(self.workspace_id, self.shared_key, queue_list[0], self.log_type)
else:
for q in queue_list:
jobs.append(Thread(target=self._post_data, args=(self.workspace_id, self.shared_key, q, self.log_type, )))
for job in jobs:
job.start()
for job in jobs:
job.join()
self._bulks_list = []
def __enter__(self):
pass
def __exit__(self, type, value, traceback):
self.flush()
def _build_signature(self, workspace_id, shared_key, date, content_length, method, content_type, resource):
x_headers = 'x-ms-date:' + date
string_to_hash = method + "\n" + str(content_length) + "\n" + content_type + "\n" + x_headers + "\n" + resource
bytes_to_hash = bytes(string_to_hash, encoding="utf-8")
decoded_key = base64.b64decode(shared_key)
encoded_hash = base64.b64encode(hmac.new(decoded_key, bytes_to_hash, digestmod=hashlib.sha256).digest()).decode()
authorization = "SharedKey {}:{}".format(workspace_id, encoded_hash)
return authorization
def _post_data(self, workspace_id, shared_key, body, log_type):
events_number = len(body)
body = json.dumps(body)
method = 'POST'
content_type = 'application/json'
resource = '/api/logs'
rfc1123date = datetime.datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
content_length = len(body)
signature = self._build_signature(workspace_id, shared_key, rfc1123date, content_length, method, content_type, resource)
uri = self.logAnalyticsUri + resource + '?api-version=2016-04-01'
headers = {
'content-type': content_type,
'Authorization': signature,
'Log-Type': log_type,
'x-ms-date': rfc1123date
}
try:
response = requests.post(uri, data=body, headers=headers)
except Exception as err:
logging.error("Error during sending events to Azure Sentinel: {}".format(err))
self.failed_sent_events_number += events_number
else:
if (response.status_code >= 200 and response.status_code <= 299):
logging.info('{} events have been successfully sent to Azure Sentinel'.format(events_number))
self.successfull_sent_events_number += events_number
else:
logging.error("Error during sending events to Azure Sentinel. Response code: {}".format(response.status_code))
self.failed_sent_events_number += events_number
def _check_size(self, queue):
data_bytes_len = len(json.dumps(queue).encode())
return data_bytes_len < self.queue_size_bytes
def _split_big_request(self, queue):
if self._check_size(queue):
return [queue]
else:
middle = int(len(queue) / 2)
queues_list = [queue[:middle], queue[middle:]]
return self._split_big_request(queues_list[0]) + self._split_big_request(queues_list[1])

Просмотреть файл

@ -0,0 +1,22 @@
from azure.storage.fileshare import ShareClient
from azure.storage.fileshare import ShareFileClient
from azure.core.exceptions import ResourceNotFoundError
class StateManager:
def __init__(self, connection_string, share_name='funcstatemarkershare', file_path='funcstatemarkerfile'):
self.share_cli = ShareClient.from_connection_string(conn_str=connection_string, share_name=share_name)
self.file_cli = ShareFileClient.from_connection_string(conn_str=connection_string, share_name=share_name, file_path=file_path)
def post(self, marker_text: str):
try:
self.file_cli.upload_file(marker_text)
except ResourceNotFoundError:
self.share_cli.create_share()
self.file_cli.upload_file(marker_text)
def get(self):
try:
return self.file_cli.download_file().readall().decode()
except ResourceNotFoundError:
return None

Просмотреть файл

@ -0,0 +1,125 @@
{
"id": "BloodHoundEnterprise",
"title": "BloodHound Enterprise",
"publisher": "SpecterOps",
"descriptionMarkdown": "The BloodHound Enterprise data connector provides the capability to ingest events from your BloodHound Enterprise instance.",
"graphQueries": [
{
"metricName": "BloodHound Enterprise events",
"legend": "BloodHoundEnterprise",
"baseQuery": "BloodHoundEnterprise"
}
],
"sampleQueries": [
{
"description" : "Data types from BloodHound Enterprise",
"query": "BloodHoundEnterprise\n | summarize count() by data_type"
}
],
"dataTypes": [
{
"name": "BloodHoundEnterprise",
"lastDataReceivedQuery": "BloodHoundEnterprise\n | summarize Time = max(TimeGenerated)\n | where isnotempty(Time)"
}
],
"connectivityCriterias": [
{
"type": "IsConnectedQuery",
"value": [
"BloodHoundEnterprise\n | summarize LastLogReceived = max(TimeGenerated)\n | project IsConnected = LastLogReceived > ago(7d)"
]
}
],
"availability": {
"status": 1,
"isPreview": true
},
"permissions": {
"resourceProvider": [
{
"provider": "Microsoft.OperationalInsights/workspaces",
"permissionsDisplayText": "read and write permissions on the workspace are required.",
"providerDisplayName": "Workspace",
"scope": "Workspace",
"requiredPermissions": {
"write": true,
"read": true,
"delete": true
}
},
{
"provider": "Microsoft.OperationalInsights/workspaces/sharedKeys",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key).",
"providerDisplayName": "Keys",
"scope": "Workspace",
"requiredPermissions": {
"action": true
}
}
],
"customs": [
{
"name": "Microsoft.Web/sites permissions",
"description": "Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/)."
},
{
"name": "BloodHound Enterprise API key pair",
"description": "A BloodHound Enterprise API key pair is requried. Refer to the documentation for more information: [Working with the BloodHound Enterprise API](https://support.bloodhoundenterprise.io/hc/en-us/articles/11311053342619-Working-with-the-BloodHound-Enterprise-API)."
}
]
},
"instructionSteps": [
{
"title": "",
"description": ">**NOTE:** This connector uses Azure Functions to connect to the BloodHound Enterprise instance to pull its logs into Azure Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details."
},
{
"title": "",
"description": "**STEP 1 - Configuration steps for the BloodHound Enterprise API**\n\nRefer to the documentation for more information to retreive API keys for your instance: [Working with the BloodHound Enterprise API](https://support.bloodhoundenterprise.io/hc/en-us/articles/11311053342619-Working-with-the-BloodHound-Enterprise-API)."
},
{
"title": "",
"description": "**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BloodHound Enterprise connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the BloodHound Enterprise API authorization key(s) or Token, readily available.",
"instructions": [
{
"parameters": {
"fillWith": [
"WorkspaceId"
],
"label": "Workspace ID"
},
"type": "CopyableLabel"
},
{
"parameters": {
"fillWith": [
"PrimaryKey"
],
"label": "Primary Key"
},
"type": "CopyableLabel"
}
]
},
{
"title": "",
"description": "**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the BloodHound Enterprise connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BloodHoundEnterprise-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **WorkspaceID**, **WorkspaceKey**, **BHETokenId**, **BHETokenKey**, and/or Other required fields.\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy."
},
{
"title": "",
"description": "**Option 2 - Manual Deployment of Azure Functions**\n\nUse the following step-by-step instructions to deploy the BloodHound Enterprise connector manually with Azure Functions."
},
{
"title": "",
"description": "**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BloodHoundEnterprise-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. bloodhoundenterpriseXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration."
},
{
"title": "",
"description": "**2. Configure the Function App**\n\n 1. In the Function App, select the Function App Name and select **Configuration**.\n\n 2. In the **Application settings** tab, select ** New application setting**.\n\n 3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\t BHETokenId\n\t\t BHETokenKey\n\t\t WorkspaceID\n\t\t WorkspaceKey\n\t\t logAnalyticsUri (optional)\n\n 4. Once all application settings have been entered, click **Save**."
}
],
"metadata": {
"version": "1.0.0",
"kind": "dataConnector"
}
}

Просмотреть файл

@ -0,0 +1,302 @@
{
"$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"FunctionName": {
"defaultValue": "BloodHound",
"minLength": 1,
"maxLength": 11,
"type": "string"
},
"WorkspaceID": {
"type": "string",
"defaultValue": "<workspaceID>",
"metadata": {
"description": "Specifies the Log Analytics Workspace Id associated with Sentinel"
}
},
"WorkspaceKey": {
"type": "securestring",
"defaultValue": "",
"metadata": {
"description": "Specifies the Log Analytics Workspace Key associated with Sentinel"
}
},
"BHEDomain": {
"type": "string",
"defaultValue": "<abc.bloodhoundenterprise.io>"
},
"BHETokenId": {
"type": "string",
"defaultValue": "<BHE Token Id>",
"metadata": {
"description": "Token ID from the BloodHound Enterprise API"
}
},
"BHETokenKey": {
"type": "securestring",
"defaultValue": "",
"metadata": {
"description": "Token Key from the BloodHound Enterprise API"
}
}
},
"variables": {
"FunctionName": "[concat(toLower(parameters('FunctionName')), uniqueString(resourceGroup().id))]",
"StorageSuffix": "[environment().suffixes.storage]",
"LogAnaltyicsUri": "[replace(environment().portal, 'https://portal', concat('https://', toLower(parameters('WorkspaceID')), '.ods.opinsights'))]",
"KeyVaultName": "[substring(variables('FunctionName'), 0, 22)]",
"WorkspaceKey": "WorkspaceKey",
"BHETokenId": "BHETokenId",
"BHETokenKey": "BHETokenKey"
},
"resources": [
{
"type": "Microsoft.Insights/components",
"apiVersion": "2015-05-01",
"name": "[variables('FunctionName')]",
"location": "[resourceGroup().location]",
"kind": "web",
"properties": {
"Application_Type": "web",
"ApplicationId": "[variables('FunctionName')]"
}
},
{
"type": "Microsoft.Storage/storageAccounts",
"apiVersion": "2019-06-01",
"name": "[tolower(variables('FunctionName'))]",
"location": "[resourceGroup().location]",
"sku": {
"name": "Standard_LRS",
"tier": "Standard"
},
"kind": "StorageV2",
"properties": {
"networkAcls": {
"bypass": "AzureServices",
"virtualNetworkRules": [],
"ipRules": [],
"defaultAction": "Allow"
},
"supportsHttpsTrafficOnly": true,
"encryption": {
"services": {
"file": {
"keyType": "Account",
"enabled": true
},
"blob": {
"keyType": "Account",
"enabled": true
}
},
"keySource": "Microsoft.Storage"
}
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]"
],
"sku": {
"name": "Standard_LRS",
"tier": "Standard"
},
"properties": {
"cors": {
"corsRules": []
},
"deleteRetentionPolicy": {
"enabled": false
}
}
},
{
"type": "Microsoft.Storage/storageAccounts/fileServices",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]"
],
"sku": {
"name": "Standard_LRS",
"tier": "Standard"
},
"properties": {
"cors": {
"corsRules": []
}
}
},
{
"type": "Microsoft.Web/sites",
"apiVersion": "2018-11-01",
"name": "[variables('FunctionName')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]",
"[resourceId('Microsoft.Insights/components', variables('FunctionName'))]"
],
"kind": "functionapp,linux",
"identity": {
"type": "SystemAssigned"
},
"properties": {
"name": "[variables('FunctionName')]",
"httpsOnly": true,
"clientAffinityEnabled": true,
"alwaysOn": true,
"reserved": true,
"siteConfig": {
"linuxFxVersion": "python|3.10"
}
},
"resources": [
{
"apiVersion": "2018-11-01",
"type": "config",
"name": "appsettings",
"dependsOn": [
"[concat('Microsoft.Web/sites/', variables('FunctionName'))]",
"[resourceId('Microsoft.KeyVault/vaults/', variables('KeyVaultName'))]",
"[resourceId('Microsoft.KeyVault/vaults/secrets', variables('KeyVaultName'), variables('BHETokenId'))]",
"[resourceId('Microsoft.KeyVault/vaults/secrets', variables('KeyVaultName'), variables('WorkspaceKey'))]",
"[resourceId('Microsoft.KeyVault/vaults/secrets', variables('KeyVaultName'), variables('BHETokenKey'))]"
],
"properties": {
"FUNCTIONS_EXTENSION_VERSION": "~4",
"FUNCTIONS_WORKER_RUNTIME": "python",
"APPINSIGHTS_INSTRUMENTATIONKEY": "[reference(resourceId('Microsoft.insights/components', variables('FunctionName')), '2015-05-01').InstrumentationKey]",
"APPLICATIONINSIGHTS_CONNECTION_STRING": "[reference(resourceId('microsoft.insights/components', variables('FunctionName')), '2015-05-01').ConnectionString]",
"AzureWebJobsStorage": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(variables('FunctionName')),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(variables('FunctionName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=',toLower(variables('StorageSuffix')))]",
"WorkspaceID": "[parameters('WorkspaceID')]",
"WorkspaceKey": "[concat('@Microsoft.KeyVault(SecretUri=', reference(variables('WorkspaceKey')).secretUriWithVersion, ')')]",
"BHEDomain": "[parameters('BHEDomain')]",
"BHETokenId": "[concat('@Microsoft.KeyVault(SecretUri=', reference(variables('BHETokenId')).secretUriWithVersion, ')')]",
"BHETokenKey": "[concat('@Microsoft.KeyVault(SecretUri=', reference(variables('BHETokenKey')).secretUriWithVersion, ')')]",
"logAnalyticsUri": "[variables('LogAnaltyicsUri')]",
"WEBSITE_RUN_FROM_PACKAGE": "https://aka.ms/sentinel-BloodHoundEnterprise-functionapp"
}
}
]
},
{
"type": "Microsoft.KeyVault/vaults",
"apiVersion": "2016-10-01",
"name": "[variables('KeyVaultName')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Web/sites', variables('FunctionName'))]"
],
"properties": {
"sku": {
"family": "A",
"name": "Standard"
},
"tenantId": "[subscription().tenantId]",
"accessPolicies": [
{
"tenantId": "[subscription().tenantId]",
"objectId": "[reference(resourceId('Microsoft.Web/sites', variables('FunctionName')),'2019-08-01', 'full').identity.principalId]",
"permissions": {
"secrets": [ "get",
"list"
]
}
}
],
"enabledForDeployment": false,
"enabledForDiskEncryption": false,
"enabledForTemplateDeployment": true,
"enableSoftDelete": true
},
"resources": [
{
"type": "secrets",
"apiVersion": "2016-10-01",
"name": "[variables('BHETokenId')]",
"dependsOn": [
"[resourceId('Microsoft.KeyVault/vaults/', variables('KeyVaultName'))]"
],
"properties": {
"value": "[parameters('BHETokenId')]",
"contentType": "string",
"attributes": {
"enabled": true
}
}
},
{
"type": "secrets",
"apiVersion": "2016-10-01",
"name": "[variables('BHETokenKey')]",
"dependsOn": [
"[resourceId('Microsoft.KeyVault/vaults/', variables('KeyVaultName'))]"
],
"properties": {
"value": "[parameters('BHETokenKey')]",
"contentType": "string",
"attributes": {
"enabled": true
}
}
},
{
"type": "secrets",
"apiVersion": "2016-10-01",
"name": "[variables('WorkspaceKey')]",
"dependsOn": [
"[resourceId('Microsoft.KeyVault/vaults/', variables('KeyVaultName'))]"
],
"properties": {
"value": "[parameters('WorkspaceKey')]",
"contentType": "string",
"attributes": {
"enabled": true
}
}
}
]
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/azure-webjobs-hosts')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"publicAccess": "None"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/azure-webjobs-secrets')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"publicAccess": "None"
}
},
{
"type": "Microsoft.Storage/storageAccounts/fileServices/shares",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/', tolower(variables('FunctionName')))]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/fileServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"shareQuota": 5120
}
}
]
}

Двоичные данные
Solutions/BloodHound Enterprise/Data Connectors/bhe-funcapp.zip Normal file

Двоичный файл не отображается.

Просмотреть файл

@ -0,0 +1,15 @@
{
"version": "2.0",
"logging": {
"applicationInsights": {
"samplingSettings": {
"isEnabled": true,
"excludedTypes": "Request"
}
}
},
"extensionBundle": {
"id": "Microsoft.Azure.Functions.ExtensionBundle",
"version": "[3.*, 4.0.0)"
}
}

Просмотреть файл

@ -0,0 +1,4 @@
{
"$schema": "http://json.schemastore.org/proxies",
"proxies": {}
}

Просмотреть файл

@ -0,0 +1,7 @@
# DO NOT include azure-functions-worker in this file
# The Python Worker is managed by Azure Functions platform
# Manually managing azure-functions-worker may cause unexpected issues
azure-functions
requests
azure-storage-file-share

Просмотреть файл

@ -0,0 +1,31 @@
{
"Name": "BloodHound Enterprise",
"Author": "SpecterOps - support@specterops.io",
"Logo": "<img src=\"https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Logos/BHE_Logo.svg\" width=\"75px\" height=\"75px\">",
"Description": "The BloodHound Enterprise Microsoft Sentinel solution ingests your BloodHound Enterprise posture and attack paths into Microsoft Sentinel. Use the dashboards to track the Active Directory and Azure attack paths of your environment. Create alerts to detect when new attack paths emerge or new the exposure increases.",
"WorkbookBladeDescription": "This Microsoft Sentinel Solution installs workbooks. Workbooks provide a flexible canvas for data monitoring, analysis, and the creation of rich visual reports within the Azure portal. They allow you to tap into one or many data sources from Microsoft Sentinel and combine them into unified interactive experiences.",
"AnalyticalRuleBladeDescription": "This solution installs the following analytic rule templates. After installing the solution, create and enable analytic rules in Manage solution view. ",
"Workbooks": [
"Workbooks/BloodHoundEnterpriseAttackPath.json",
"Workbooks/BloodHoundEnterprisePosture.json"
],
"Analytic Rules": [
"Analytic Rules/BloodHoundEnterpriseCriticalAttackPaths.yaml",
"Analytic Rules/BloodHoundEnterpriseExposure.yaml",
"Analytic Rules/BloodHoundEnterpriseTierZeroAssets.yaml"
],
"Parsers": [
"Parsers/BloodHoundEnterprise.txt"
],
"Data Connectors": [
"Data Connectors/BloodHoundEnterprise_API_FunctionApp.json"
],
"Playbooks": [],
"Hunting Queries": [],
"Watchlists": [],
"BasePath": "C:\\One\\Azure-Sentinel\\Solutions\\BloodHound Enterprise",
"Version": "2.0.0",
"Metadata": "SolutionMetadata.json",
"TemplateSpec": true,
"Is1PConnector": false
}

Двоичные данные
Solutions/BloodHound Enterprise/Package/3.0.0.zip Normal file

Двоичный файл не отображается.

Просмотреть файл

@ -0,0 +1,218 @@
{
"$schema": "https://schema.management.azure.com/schemas/0.1.2-preview/CreateUIDefinition.MultiVm.json#",
"handler": "Microsoft.Azure.CreateUIDef",
"version": "0.1.2-preview",
"parameters": {
"config": {
"isWizard": false,
"basics": {
"description": "<img src=\"https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Logos/BHE_Logo.svg\" width=\"75px\" height=\"75px\">\n\n**Note:** Please refer to the following before installing the solution: \r \n • Review the solution [Release Notes](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BloodHound%20Enterprise/ReleaseNotes.md)\r \n • There may be [known issues](https://aka.ms/sentinelsolutionsknownissues) pertaining to this Solution.\n\nThe BloodHound Enterprise Microsoft Sentinel solution ingests your BloodHound Enterprise posture and attack paths into Microsoft Sentinel. Use the dashboards to track the Active Directory and Azure attack paths of your environment. Create alerts to detect when new attack paths emerge or new the exposure increases.\n\n**Data Connectors:** 1, **Parsers:** 1, **Workbooks:** 2, **Analytic Rules:** 3\n\n[Learn more about Microsoft Sentinel](https://aka.ms/azuresentinel) | [Learn more about Solutions](https://aka.ms/azuresentinelsolutionsdoc)",
"subscription": {
"resourceProviders": [
"Microsoft.OperationsManagement/solutions",
"Microsoft.OperationalInsights/workspaces/providers/alertRules",
"Microsoft.Insights/workbooks",
"Microsoft.Logic/workflows"
]
},
"location": {
"metadata": {
"hidden": "Hiding location, we get it from the log analytics workspace"
},
"visible": false
},
"resourceGroup": {
"allowExisting": true
}
}
},
"basics": [
{
"name": "getLAWorkspace",
"type": "Microsoft.Solutions.ArmApiControl",
"toolTip": "This filters by workspaces that exist in the Resource Group selected",
"condition": "[greater(length(resourceGroup().name),0)]",
"request": {
"method": "GET",
"path": "[concat(subscription().id,'/providers/Microsoft.OperationalInsights/workspaces?api-version=2020-08-01')]"
}
},
{
"name": "workspace",
"type": "Microsoft.Common.DropDown",
"label": "Workspace",
"placeholder": "Select a workspace",
"toolTip": "This dropdown will list only workspace that exists in the Resource Group selected",
"constraints": {
"allowedValues": "[map(filter(basics('getLAWorkspace').value, (filter) => contains(toLower(filter.id), toLower(resourceGroup().name))), (item) => parse(concat('{\"label\":\"', item.name, '\",\"value\":\"', item.name, '\"}')))]",
"required": true
},
"visible": true
}
],
"steps": [
{
"name": "dataconnectors",
"label": "Data Connectors",
"bladeTitle": "Data Connectors",
"elements": [
{
"name": "dataconnectors1-text",
"type": "Microsoft.Common.TextBlock",
"options": {
"text": "This Solution installs the data connector for BloodHound Enterprise. You can get BloodHound Enterprise custom log data in your Microsoft Sentinel workspace. After installing the solution, configure and enable this data connector by following guidance in Manage solution view."
}
},
{
"name": "dataconnectors-parser-text",
"type": "Microsoft.Common.TextBlock",
"options": {
"text": "The Solution installs a parser that transforms the ingested data into Microsoft Sentinel normalized format. The normalized format enables better correlation of different types of data from different data sources to drive end-to-end outcomes seamlessly in security monitoring, hunting, incident investigation and response scenarios in Microsoft Sentinel."
}
},
{
"name": "dataconnectors-link2",
"type": "Microsoft.Common.TextBlock",
"options": {
"link": {
"label": "Learn more about connecting data sources",
"uri": "https://docs.microsoft.com/azure/sentinel/connect-data-sources"
}
}
}
]
},
{
"name": "workbooks",
"label": "Workbooks",
"subLabel": {
"preValidation": "Configure the workbooks",
"postValidation": "Done"
},
"bladeTitle": "Workbooks",
"elements": [
{
"name": "workbooks-text",
"type": "Microsoft.Common.TextBlock",
"options": {
"text": "This Microsoft Sentinel Solution installs workbooks. Workbooks provide a flexible canvas for data monitoring, analysis, and the creation of rich visual reports within the Azure portal. They allow you to tap into one or many data sources from Microsoft Sentinel and combine them into unified interactive experiences."
}
},
{
"name": "workbooks-link",
"type": "Microsoft.Common.TextBlock",
"options": {
"link": {
"label": "Learn more",
"uri": "https://docs.microsoft.com/azure/sentinel/tutorial-monitor-your-data"
}
}
},
{
"name": "workbook1",
"type": "Microsoft.Common.Section",
"label": "BloodHound Enterprise Attack Paths",
"elements": [
{
"name": "workbook1-text",
"type": "Microsoft.Common.TextBlock",
"options": {
"text": "Gain insights into BloodHound Enterprise attack paths."
}
}
]
},
{
"name": "workbook2",
"type": "Microsoft.Common.Section",
"label": "BloodHound Enterprise Posture",
"elements": [
{
"name": "workbook2-text",
"type": "Microsoft.Common.TextBlock",
"options": {
"text": "Gain insights into BloodHound Enterprise domain posture."
}
}
]
}
]
},
{
"name": "analytics",
"label": "Analytics",
"subLabel": {
"preValidation": "Configure the analytics",
"postValidation": "Done"
},
"bladeTitle": "Analytics",
"elements": [
{
"name": "analytics-text",
"type": "Microsoft.Common.TextBlock",
"options": {
"text": "This solution installs the following analytic rule templates. After installing the solution, create and enable analytic rules in Manage solution view. "
}
},
{
"name": "analytics-link",
"type": "Microsoft.Common.TextBlock",
"options": {
"link": {
"label": "Learn more",
"uri": "https://docs.microsoft.com/azure/sentinel/tutorial-detect-threats-custom?WT.mc_id=Portal-Microsoft_Azure_CreateUIDef"
}
}
},
{
"name": "analytic1",
"type": "Microsoft.Common.Section",
"label": "BloodHound Enterprise - Number of critical attack paths increase",
"elements": [
{
"name": "analytic1-text",
"type": "Microsoft.Common.TextBlock",
"options": {
"text": "The number of critical attack paths has increased over the past 7 days."
}
}
]
},
{
"name": "analytic2",
"type": "Microsoft.Common.Section",
"label": "BloodHound Enterprise - Exposure increase",
"elements": [
{
"name": "analytic2-text",
"type": "Microsoft.Common.TextBlock",
"options": {
"text": "The exposure for a domain has increased by more than 5% over the past 7 days."
}
}
]
},
{
"name": "analytic3",
"type": "Microsoft.Common.Section",
"label": "BloodHound Enterprise - Number of Tier Zero assets increase",
"elements": [
{
"name": "analytic3-text",
"type": "Microsoft.Common.TextBlock",
"options": {
"text": "The number of Tier Zero assets has increased by more than 5% over the past 7 days."
}
}
]
}
]
}
],
"outputs": {
"workspace-location": "[first(map(filter(basics('getLAWorkspace').value, (filter) => and(contains(toLower(filter.id), toLower(resourceGroup().name)),equals(filter.name,basics('workspace')))), (item) => item.location))]",
"location": "[location()]",
"workspace": "[basics('workspace')]"
}
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,35 @@
let BloodHound_view = view () {
bloodhoundEnterprise_CL
| extend
EventVendor="SpecterOps",
EventProduct="BloodHound Enterprise",
domain_sid=coalesce(column_ifexists('domain_sid_s', ''), column_ifexists('domain_sid_g', '')),
exposure_index=column_ifexists('exposure_index_d', ''),
tier_zero_count=column_ifexists('tier_zero_count_d', ''),
critical_risk_count=column_ifexists('critical_risk_count_d', ''),
domain_id=coalesce(column_ifexists('domain_id_g', ''), column_ifexists('domain_id_s', '')),
non_tier_zero_principal=column_ifexists('non_tier_zero_principal_s', ''),
tier_zero_principal=column_ifexists('tier_zero_principal_s', ''),
group=column_ifexists('group_s', ''),
principal=column_ifexists('principal_s', ''),
path_id=column_ifexists('path_id_s', ''),
user=column_ifexists('user_s', ''),
finding_id=column_ifexists('finding_id_s', ''),
path_title=column_ifexists('path_title_s', ''),
path_type=column_ifexists('path_type_s', ''),
exposure=column_ifexists('exposure_d', ''),
finding_count=column_ifexists('finding_count_d', ''),
principal_count=column_ifexists('principal_count_d', ''),
id=column_ifexists('id_d', ''),
created_at=coalesce(column_ifexists('created_at_t', ''), now()),
updated_at=column_ifexists('updated_at_t', ''),
deleted_at=column_ifexists('deleted_at_Time_t', ''),
deleted_at_v=column_ifexists('deleted_at_Valid_b', ''),
severity=column_ifexists('severity_s', ''),
domain_impact_value=column_ifexists('domain_impact_value_d', ''),
domain_name=column_ifexists('domain_name_s', ''),
domain_type=column_ifexists('domain_type_s', ''),
data_type=column_ifexists('data_type_s', '')
| project-away *_s, *_t, *_d, *_b, *_g
};
BloodHound_view

Просмотреть файл

@ -0,0 +1,3 @@
| **Version** | **Date Modified (DD-MM-YYYY)** | **Change History** |
|-------------|--------------------------------|---------------------------------------------|
| 3.0.0 | 20-07-2023 | Initial solution release |

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше