Merge branch 'Corelight' of github.com:socprime/Azure-Sentinel into Corelight

This commit is contained in:
Alex Verbniak 2021-03-16 10:19:58 +02:00
Родитель 41c8c38e0f d2f3de0bda
Коммит ce7bc2dbe4
196 изменённых файлов: 12428 добавлений и 970 удалений

Просмотреть файл

@ -67,7 +67,7 @@ function getConnectorCategory(dataTypes : any, instructionSteps:[])
}
let fileTypeSuffixes = ["json"];
let filePathFolderPrefixes = ["DataConnectors"];
let filePathFolderPrefixes = ["DataConnectors","Solutions"];
let fileKinds = ["Added", "Modified"];
let CheckOptions = {
onCheckFile: (filePath: string) => {

Просмотреть файл

@ -30,7 +30,7 @@ export async function IsIdHasChanged(filePath: string): Promise<ExitCode> {
let fileKinds = ["Modified"];
let fileTypeSuffixes = ["yaml", "yml", "json"];
let filePathFolderPrefixes = ["Detections"];
let filePathFolderPrefixes = ["Detections","Solutions"];
let CheckOptions = {
onCheckFile: (filePath: string) => {
return IsIdHasChanged(filePath);

Просмотреть файл

@ -1,45 +0,0 @@
{
"Name": "AppServiceAntivirusScanAuditLogs",
"Properties": [
{
"Name": "TimeGenerated",
"Type": "DateTime"
},
{
"Name": "TenantId",
"Type": "String"
},
{
"Name": "ScanStatus",
"Type": "String"
},
{
"Name": "TotalFilesScanned",
"Type": "Long"
},
{
"Name": "NumberOfInfectedFiles",
"Type": "Long"
},
{
"Name": "ListOfInfectedFiles",
"Type": "String"
},
{
"Name": "ErrorMessage",
"Type": "String"
},
{
"Name": "SourceSystem",
"Type": "String"
},
{
"Name": "Type",
"Type": "String"
},
{
"Name": "_ResourceId",
"Type": "String"
}
]
}

Просмотреть файл

@ -1,97 +0,0 @@
{
"Name": "AppServiceHTTPLogs",
"Properties": [
{
"Name": "TenantId",
"Type": "string"
},
{
"Name": "TimeGenerated",
"Type": "datetime"
},
{
"Name": "Category",
"Type": "string"
},
{
"Name": "CsMethod",
"Type": "string"
},
{
"Name": "CsUriStem",
"Type": "string"
},
{
"Name": "SPort",
"Type": "string"
},
{
"Name": "CIp",
"Type": "string"
},
{
"Name": "UserAgent",
"Type": "string"
},
{
"Name": "CsHost",
"Type": "string"
},
{
"Name": "ScStatus",
"Type": "long"
},
{
"Name": "ScSubStatus",
"Type": "string"
},
{
"Name": "ScWin32Status",
"Type": "string"
},
{
"Name": "ScBytes",
"Type": "long"
},
{
"Name": "CsBytes",
"Type": "long"
},
{
"Name": "TimeTaken",
"Type": "long"
},
{
"Name": "Result",
"Type": "string"
},
{
"Name": "Cookie",
"Type": "string"
},
{
"Name": "CsUriQuery",
"Type": "string"
},
{
"Name": "CsUsername",
"Type": "string"
},
{
"Name": "Referer",
"Type": "string"
},
{
"Name": "SourceSystem",
"Type": "string"
},
{
"Name": "Type",
"Type": "string"
},
{
"Name": "_ResourceId",
"Type": "string"
}
]
}

Просмотреть файл

@ -4,6 +4,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using Xunit;
using YamlDotNet.Serialization;
@ -37,8 +38,31 @@ namespace Kqlvalidations.Tests
{
return;
}
var lines = Regex.Split(queryStr, @"\n\r?");
var validationRes = _queryValidator.ValidateSyntax(queryStr);
Assert.True(validationRes.IsValid, validationRes.IsValid ? string.Empty : $"Template Id:{id} is not valid Errors:{validationRes.Diagnostics.Select(d => d.ToString()).ToList().Aggregate((s1, s2) => s1 + "," + s2)}");
var firstErrorLocation = (Line: 0, Col: 0);
if (!validationRes.IsValid)
{
firstErrorLocation = GetLocationInQuery(queryStr, validationRes.Diagnostics.First(d => d.Severity == "Error").Start);
}
Assert.True(validationRes.IsValid, validationRes.IsValid ? string.Empty : $"Template Id:{id} is not valid in Line:{firstErrorLocation.Line} col:{firstErrorLocation.Col} Errors:{validationRes.Diagnostics.Select(d => d.ToString()).ToList().Aggregate((s1, s2) => s1 + "," + s2)}");
}
private (int Line, int Col) GetLocationInQuery(string queryStr, int pos)
{
var lines = Regex.Split(queryStr, "\n");
var curlineIndex = 0;
var curPos = 0;
while (lines.Length > curlineIndex && pos > curPos + lines[curlineIndex].Length + 1)
{
curPos += lines[curlineIndex].Length + 1;
curlineIndex++;
}
var col = (pos - curPos + 1);
return (curlineIndex + 1, col);
}
}

Просмотреть файл

@ -12,7 +12,7 @@
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="YamlDotNet" Version="6.0.0" />
<PackageReference Include="Microsoft.Azure.Sentinel.KustoServices" Version="1.0.11" />
<PackageReference Include="Microsoft.Azure.Sentinel.KustoServices" Version="1.0.12" />
</ItemGroup>
</Project>

Двоичный файл не отображается.

Двоичный файл не отображается.

Просмотреть файл

@ -67,6 +67,33 @@ namespace Kqlvalidations.Tests
var isValid = connectorIds.Count() == 0;
Assert.True(isValid, isValid ? string.Empty : $"Template Id:'{id}' doesn't have valid connectorIds:'{string.Join(",", connectorIds)}'. If a new connector is used and already configured in the Portal, please add it's Id to the list in 'ValidConnectorIds.json' file.");
}
[Fact]
public void Validate_DetectionTemplates_AllFilesAreYamls()
{
string detectionPath = DetectionsYamlFilesTestData.GetDetectionPath();
var yamlFiles = Directory.GetFiles(detectionPath, "*.yaml", SearchOption.AllDirectories).ToList();
var AllFiles = Directory.GetFiles(detectionPath,"*", SearchOption.AllDirectories).ToList();
var numberOfNotYamlFiles = 1; //This is the readme.md file in the directory
Assert.True(AllFiles.Count == yamlFiles.Count + numberOfNotYamlFiles, "All the files in detections folder are supposed to end with .yaml");
}
[Fact]
public void Validate_DetectionTemplates_NoSameTemplateIdTwice()
{
string detectionPath = DetectionsYamlFilesTestData.GetDetectionPath();
var yamlFiles = Directory.GetFiles(detectionPath, "*.yaml", SearchOption.AllDirectories);
var templatesAsStrings = yamlFiles.Select(yaml => GetYamlFileAsString(Path.GetFileName(yaml)));
var templatesAsObjects = templatesAsStrings.Select(yaml => JObject.Parse(ConvertYamlToJson(yaml)));
var duplicationsById = templatesAsObjects.GroupBy(a => a["id"]).Where(group => group.Count() > 1); //Finds duplications -> ids that there are more than 1 template from
var duplicatedId = "";
if (duplicationsById.Count() > 0){
duplicatedId = duplicationsById.Last().Select(x => x["id"]).First().ToString();
}
Assert.True(duplicationsById.Count() == 0, $"There should not be 2 templates with the same ID, but the id {duplicatedId} is duplicated.");
}
private string GetYamlFileAsString(string detectionsYamlFileName)
{

Просмотреть файл

@ -2,8 +2,6 @@
{
public enum AttackTactic
{
Reconnaissance,
ResourceDevelopment,
InitialAccess,
Execution,
Persistence,
@ -15,6 +13,7 @@
Collection,
Exfiltration,
CommandAndControl,
Impact
Impact,
PreAttack
}
}

Просмотреть файл

@ -24,7 +24,7 @@ export async function IsValidWorkbookTemplate(filePath: string): Promise<ExitCod
}
let fileTypeSuffixes = [".json"];
let filePathFolderPrefixes = ["Workbooks"];
let filePathFolderPrefixes = ["Workbooks","Solutions"];
let fileKinds = ["Added", "Modified"];
let CheckOptions = {
onCheckFile: (filePath: string) => {

Просмотреть файл

@ -3,7 +3,7 @@
# the last matching pattern has the most precendence.
# Core team members
* @liemilyg @mgladi @orco365 @shalinoid @KobyKoren @shainw @ianhelle @timbMSFT @juliango2100 @dicolanl @Amitbergman @sagamzu @YaronFruchtmann @preetikr @Yaniv-Shasha @sarah-yo @nazang @ehudk-msft @oshvartz @Liatlishams @NoamLandress @laithhisham
* @liemilyg @mgladi @orco365 @shalinoid @KobyKoren @shainw @ianhelle @timbMSFT @juliango2100 @dicolanl @Amitbergman @sagamzu @YaronFruchtmann @preetikr @Yaniv-Shasha @sarah-yo @nazang @ehudk-msft @oshvartz @Liatlishams @NoamLandress @laithhisham @petebryan
# This is copied from here: https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners

Просмотреть файл

@ -1,5 +1,5 @@
{
"id": "DarktraceDarktrace",
"id": "Darktrace",
"title": "AI Analyst Darktrace",
"publisher": "Darktrace",
"descriptionMarkdown": "The Darktrace connector lets users connect Darktrace Model Breaches in real-time with Azure Sentinel, allowing creation of custom Dashboards, Workbooks, Notebooks and Custom Alerts to improve investigation. Azure Sentinel's enhanced visibility into Darktrace logs enables monitoring and mitigation of security threats.",
@ -111,4 +111,4 @@
"description": "Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)"
}
]
}
}

Просмотреть файл

@ -122,5 +122,21 @@
"title": "4. Secure your machine ",
"description": "Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)"
}
]
],
"metadata": {
"id": "2de7b355-5f0b-4eb1-a264-629314ef86e5",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Vectra AI"
},
"support": {
"name": "Vectra AI",
"link": "https://www.vectra.ai/support",
"tier": "developer"
}
}
}

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 78 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 34 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 52 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 56 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 37 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 70 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 83 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 139 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 116 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 134 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 76 KiB

Просмотреть файл

@ -0,0 +1,136 @@
# AWS Lambda Function to import CloudTrail Logs to Azure Sentinel
Author: Sreedhar Ande
This Lambda function is designed to ingest AWS CloudTrail Events/S3 Events and send them to Azure Log Analytics workspace using the Log Analytics API.
AWS CloudTrail logs are audit type events from all/any AWS resources in a tenancy. Each AWS resource has a unique set of Request and Response Parameters. Azure Log Analytics has a column per table limit of 500, (plus some system columns) the aggregate of AWS parameter fields will exceed this quickly leading to potential loss of event records
Code does the following things with the logs it processes.
1. Takes the core fields of the record. i.e. all fields except for the Request and Response associated fields and puts them in a LogAnalyticsTableName_ALL. Providing a single table with all records with core event information.
2. Looks at each event and puts it into a table with an extension <AWSREsourceType> i.e. LogAnalyticsTableName_S3
3. Exception to 2 above is for EC2 events, the volume of fields for EC2 Request and Response parameters exceeds 500 columns. EC2 data is split into 3 tables, Header, Request & Response.
Ex: LogAnalyticsTableName_EC2_Header
4. In future if other AWS datatypes exceed 500 columns a similar split may be required for them as well.
**Credits**
This Data connector uses a PowerShell logic authored by [Chris Abberley](https://github.com/cabberley)
**Note**
To avoid additional billing and duplication: **CloudTrail Logs only**
1. You can turn off LogAnalyticsTableName_ALL using additional Environment Variable **CoreFieldsAllTable** to **true/false**
2. You can turn off LogAnalyticsTableName_AWSREsourceType using additional Environment Variable **SplitAWSResourceTypeTables** to **true/false**
**Either CoreFieldsAllTable or SplitAWSResourceTypeTables must be true or both can be true**
## **Function Flow process**
# **SNS Lambda Trigger:**
**CloudTrail/CloudWatch/GuardDuty/SecurityHub Logs --> AWS S3 --> AWS SNS Topic --> AWS Lambda --> Azure Log Analytics**
![Picture9](./Graphics/Picture9.png)
# **SQS Lambda Trigger:**
**CloudTrail/CloudWatch/GuardDuty/SecurityHub Logs --> AWS S3 --> AWS SQS --> AWS Lambda --> Azure Log Analytics**
![Picture9](./Graphics/Picture11.png)
**Note**
Data parsing is applicable only to CloudTrail Logs. CloudWatch/GuardDuty/SecurityHub Logs will be ingested to **CoreFieldsAllTable**
## Installation / Setup Guide
## **Pre-requisites**
This function requires AWS Secrets Manager to store Azure Log Analytics WorkspaceId and WorkspaceKey
![Picture10](./Graphics/Picture10.png)
### **Option 1**
### Machine Setup
To deploy this, you will need a machine prepared with the following:
- PowerShell Core – I recommend PowerShell 7 [found here](https://github.com/PowerShell/PowerShell/releases)
- .Net Core 3.1 SDK [found here](https://dotnet.microsoft.com/download)
- AWSLambdaPSCore module – You can install this either from the [PowerShell Gallery](https://www.powershellgallery.com/packages?q=AWSLambdaPSCore), or you can install it by using the following PowerShell Core shell command:
```powershell
Install-Module AWSLambdaPSCore -Scope CurrentUser
```
See the documentation here https://docs.aws.amazon.com/lambda/latest/dg/powershell-devenv.html
I recommend you review https://docs.aws.amazon.com/lambda/latest/dg/powershell-package.html to review the cmdlets that are part of AWSLambdaPSCore.
Note: If the environment uses a proxy, you may need to add the following to VSCode profile
```powershell
Added to VS Code profile:
$webclient=New-Object System.Net.WebClient
$webclient.Proxy.Credentials = [System.Net.CredentialCache]::DefaultNetworkCredentials
```
### Create the Lambda Function
1. ```Get-AWSPowerShellLambdaTemplate```
2. ```New-AWSPowerShellLambda -ScriptName IngestCloudTrailEventsToSentinel -Template S3EventToSNS```
OR
```New-AWSPowerShellLambda -ScriptName IngestCloudTrailEventsToSentinel -Template S3EventToSQS```
3. Copy the PowerShell code from https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AWS-CloudTrail-Ingestion-Lambda/SNS-Lambda-Trigge/IngestCloudTrailEventsToSentinel.ps1
5. Paste in your code file created from Step #2
6. Go to script file folder
7. ```Publish-AWSPowerShellLambda -Name YourLambdaNameHere -ScriptPath <path>/IngestCloudTrailEventsToSentinel.ps1 -Region <region> -IAMRoleArn <arn of role created earlier> -ProfileName <profile>```
Ex: ```Publish-AWSPowerShellLambda -ScriptPath .\IngestCloudTrailEventsToSentinel.ps1 -Name IngestCloudTrailEventsToSentinel -Region us-east-2```
You might need –ProfileName if your configuration of .aws/credentials file doesn't contain a default. See this [document](https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/setup-credentials.html) for information on setting up your AWS credentials.
### **Option 2**
1. Create a new AWS Lambda and select "Author from scratch"
2. Give Function Name and select Runtime ".NET Core 2.1 (C#/PowerShell)" and click Create function
3. After successful creation, now you can change its code and configuration
4. Under Function code, click on Actions --> Upload a .zip file (/aws-data-connector-az-sentinel/blob/main/IngestCloudTrailEventsToSentinel.zip)
5. Follow the steps in "### Lambda Configuration" from step 2
### **Note: Either you choose Option 1/Option 2, the following configuration steps are mandatory.**
### **Lambda Configuration**
1. Once created, login to the AWS console. In Find services, search for Lambda. Click on Lambda.
![Picture1](./Graphics/Picture1.png)
2. Click on the lambda function name you used with the cmdlet. Click Environment Variables and add the following
```
SecretName
LogAnalyticsTableName
CoreFieldsAllTable --> Boolean
SplitAWSResourceTypeTables --> Boolean
```
![Picture4](./Graphics/Picture4.png)
3. Click on the lambda function name you used with the cmdlet.Click Add Trigger
![Picture2](./Graphics/Picture2.png)
4. Select SNS. Select the SNS Name. Click Add.
![Picture3](./Graphics/Picture3.png)
5. Create AWS Role : The Lambda function will need an execution role defined that grants access to the S3 bucket and CloudWatch logs. To create an execution role:
1. Open the [roles](https://console.aws.amazon.com/iam/home#/roles) page in the IAM console.
2. Choose Create role.
3. Create a role with the following properties.
- Trusted entity – AWS Lambda.
- Role name – AWSSNStoAzureSentinel.
- Permissions – AWSLambdaBasicExecutionRole & AmazonS3ReadOnlyAccess & secretsmanager:GetSecretValue & kms:Decrypt - required only if you use a customer-managed AWS KMS key to encrypt the secret. You do not need this permission to use the account's default AWS managed CMK for Secrets Manager
The AWSLambdaExecute policy has the permissions that the function needs to manage objects in Amazon S3 and write logs to CloudWatch Logs. Copy the arn of the role created as you will need it for the next step.
6. Your lambda function is ready to send data to Log Analytics.
### **Test the function**
1. To test your function, Perform some actions like Start EC2, Stop EC2, Login into EC2, etc.,.
2. To see the logs, go the Lambda function. Click Monitoring tab. Click view logs in CloudWatch.
![Pciture5](./Graphics/Picture5.png)
3. In CloudWatch, you will see each log stream from the runs. Select the latest.
![Picture6](./Graphics/Picture6.png)
4. Here you can see anything from the script from the Write-Host cmdlet.
![Picture7](./Graphics/Picture7.png)
5. Go to portal.azure.com and verify your data is in the custom log.
![Picture8](./Graphics/Picture8.png)

Просмотреть файл

@ -0,0 +1,347 @@
# PowerShell script file to be executed as a AWS Lambda function.
#
# When executing in Lambda the following variables will be predefined.
# $LambdaInput - A PSObject that contains the Lambda function input data.
# $LambdaContext - An Amazon.Lambda.Core.ILambdaContext object that contains information about the currently running Lambda environment.
#
# The last item in the PowerShell pipeline will be returned as the result of the Lambda function.
#
# To include PowerShell modules with your Lambda function, like the AWS.Tools.S3 module, add a "#Requires" statement
# indicating the module and version. If using an AWS.Tools.* module the AWS.Tools.Common module is also required.
#
# The following link contains documentation describing the structure of the S3 event object.
# https://docs.aws.amazon.com/AmazonS3/latest/dev/notification-content-structure.html
#
# This example demonstrates how to process an S3 Event that follows the process:
# S3 Event -> SNS Topic -> Lambda Function
#Requires -Modules @{ModuleName='AWS.Tools.Common';ModuleVersion='4.1.5.0'}
#Requires -Modules @{ModuleName='AWS.Tools.S3';ModuleVersion='4.1.5.0'}
#Requires -Modules @{ModuleName='AWS.Tools.SecretsManager';ModuleVersion='4.1.5.0'}
# Uncomment to send the input event to CloudWatch Logs
#Write-Host (ConvertTo-Json -InputObject $LambdaInput -Compress -Depth 5)
#$PSVersionTable
# Get the current universal time in the default string format.
$currentUTCtime = (Get-Date).ToUniversalTime()
# Code to retrieve credentials from AWS Secrets Manager
$secretName = $env:SecretName
$secretValue = ConvertFrom-Json (Get-SECSecretValue -SecretId $secretName -ErrorAction Stop -Verbose).SecretString -ErrorAction Stop
$workspaceId = $secretValue.LAWID
$workspaceKey = $secretValue.LAWKEY
$LATableName = $env:LogAnalyticsTableName
$IsCoreFieldsAllTable = $env:CoreFieldsAllTable
$IsSplitAWSResourceTypes = $env:SplitAWSResourceTypeTables
$ResourceID = ''
#The $eventobjectlist is the Json Parameter field names that form the core of the Json message that we want in the ALL Table in Log Ananlytics
$eventobjectlist = @('eventTime', 'eventVersion', 'userIdentity', 'eventSource', 'eventName', 'awsRegion', 'sourceIPAddress', 'userAgent', 'errorCode', 'errorMessage', 'requestID', 'eventID', 'eventType', 'apiVersion', 'managementEvent', 'readOnly', 'resources', 'recipientAccountId', 'serviceEventDetails', 'sharedEventID', 'vpcEndpointId', 'eventCategory', 'additionalEventData')
Function Expand-GZipFile {
Param(
$infile,
$outfile
)
Write-Host "Processing Expand-GZipFile for: infile = $infile, outfile = $outfile"
$inputfile = New-Object System.IO.FileStream $infile, ([IO.FileMode]::Open), ([IO.FileAccess]::Read), ([IO.FileShare]::Read)
$output = New-Object System.IO.FileStream $outfile, ([IO.FileMode]::Create), ([IO.FileAccess]::Write), ([IO.FileShare]::None)
$gzipStream = New-Object System.IO.Compression.GzipStream $inputfile, ([IO.Compression.CompressionMode]::Decompress)
$buffer = New-Object byte[](1024)
while ($true) {
$read = $gzipstream.Read($buffer, 0, 1024)
if ($read -le 0) { break }
$output.Write($buffer, 0, $read)
}
$gzipStream.Close()
$output.Close()
$inputfile.Close()
}
#function to create HTTP Header signature required to authenticate post
Function New-BuildSignature {
param(
$customerId,
$sharedKey,
$date,
$contentLength,
$method,
$contentType,
$resource )
$xHeaders = "x-ms-date:" + $date
$stringToHash = $method + "`n" + $contentLength + "`n" + $contentType + "`n" + $xHeaders + "`n" + $resource
$bytesToHash = [Text.Encoding]::UTF8.GetBytes($stringToHash)
$keyBytes = [Convert]::FromBase64String($sharedKey)
$sha256 = New-Object System.Security.Cryptography.HMACSHA256
$sha256.Key = $keyBytes
$calculatedHash = $sha256.ComputeHash($bytesToHash)
$encodedHash = [Convert]::ToBase64String($calculatedHash)
$authorization = 'SharedKey {0}:{1}' -f $customerId, $encodedHash
return $authorization
}
# Function to create and post the request
Function Invoke-LogAnalyticsData {
Param(
$CustomerId,
$SharedKey,
$Body,
$LogTable,
$TimeStampField,
$resourceId)
$method = "POST"
$contentType = "application/json"
$resource = "/api/logs"
$rfc1123date = [DateTime]::UtcNow.ToString("r")
$contentLength = $Body.Length
$signature = New-BuildSignature `
-customerId $CustomerId `
-sharedKey $SharedKey `
-date $rfc1123date `
-contentLength $contentLength `
-method $method `
-contentType $contentType `
-resource $resource
$uri = "https://" + $CustomerId + ".ods.opinsights.azure.com" + $resource + "?api-version=2016-04-01"
$headers1 = @{
"Authorization" = $signature;
"Log-Type" = $LogTable;
"x-ms-date" = $rfc1123date;
"x-ms-AzureResourceId" = $resourceId;
"time-generated-field" = $TimeStampField;
}
$status = $false
do {
$response = Invoke-WebRequest -Uri $uri -Method $method -ContentType $contentType -Headers $headers1 -Body $Body
#If requests are being made at a rate higher than this, then these requests will receive HTTP status code 429 (Too Many Requests) along with the Retry-After:
#<delta-seconds> header which indicates the number of seconds until requests to this application are likely to be accepted.If requests are being made at a rate higher than this,
#then these requests will receive HTTP status code 429 (Too Many Requests) along with the Retry-After: <delta-seconds> header which indicates the number of seconds until requests to this application are likely to be accepted.
If ($reponse.StatusCode -eq 429) {
$rand = get-random -minimum 10 -Maximum 80
start-sleep -seconds $rand
}
else { $status = $true }
}until($status)
Remove-variable -name Body
return $response.StatusCode
}
Function Ingest-Core-Fields-Single-Table {
Param(
$coreEvents)
$coreJson = convertto-json $coreEvents -depth 5 -Compress
$Table = "$LATableName" + "_All"
IF (($corejson.Length) -gt 28MB) {
Write-Host "Log length is greater than 28 MB, splitting and sending to Log Analytics"
$bits = [math]::Round(($corejson.length) / 20MB) + 1
$TotalRecords = $coreEvents.Count
$RecSetSize = [math]::Round($TotalRecords / $bits) + 1
$start = 0
For ($x = 0; $x -lt $bits; $x++) {
IF ( ($start + $recsetsize) -gt $TotalRecords) {
$finish = $totalRecords
}
ELSE {
$finish = $start + $RecSetSize
}
$body = Convertto-Json ($coreEvents[$start..$finish]) -Depth 5 -Compress
$result = Invoke-LogAnalyticsData -CustomerId $workspaceId -SharedKey $workspaceKey -Body $body -LogTable $Table -TimeStampField 'eventTime' -ResourceId $ResourceID
if ($result -eq 200)
{
Write-Host "CloudTrail Logs successfully ingested to LogAnalytics Workspace under Custom Logs --> Table: $Table"
}
$start = $finish + 1
}
$null = Remove-variable -name body
}
Else {
$result = Invoke-LogAnalyticsData -CustomerId $workspaceId -SharedKey $workspaceKey -Body $coreJson -LogTable $Table -TimeStampField 'eventTime' -ResourceId $ResourceID
if ($result -eq 200)
{
Write-Host "CloudTrail Logs successfully ingested to LogAnalytics Workspace under Custom Logs --> Table: $Table"
}
}
$null = remove-variable -name coreEvents
$null = remove-variable -name coreJson
}
Function Ingest-AWS-ResourceType-Multi-Tables {
Param(
$eventSources,
$groupEvents)
$RecCount = 0
foreach ($d in $eventSources) {
#$events = $groupevents[$d]
$eventsJson = ConvertTo-Json $groupEvents[$d] -depth 5 -Compress
$Table = $LATableName + '_' + $d
$TotalRecords = $groupEvents[$d].Count
$recCount += $TotalRecords
IF (($eventsjson.Length) -gt 28MB) {
#$events = Convertfrom-json $corejson
$bits = [math]::Round(($eventsjson.length) / 20MB) + 1
$TotalRecords = $groupEvents[$d].Count
$RecSetSize = [math]::Round($TotalRecords / $bits) + 1
$start = 0
For ($x = 0; $x -lt $bits; $x++) {
IF ( ($start + $recsetsize) -gt $TotalRecords) {
$finish = $totalRecords
}
ELSE {
$finish = $start + $RecSetSize
}
$body = Convertto-Json ($groupEvents[$d][$start..$finish]) -Depth 5 -Compress
$result = Invoke-LogAnalyticsData -CustomerId $workspaceId -SharedKey $workspaceKey -Body $body -LogTable $Table -TimeStampField 'eventTime' -ResourceId $ResourceID
if ($result -eq 200)
{
Write-Host "CloudTrail Logs successfully ingested to LogAnalytics Workspace under Custom Logs --> Table: $Table"
}
$start = $finish + 1
}
$null = Remove-variable -name body
}
Else {
#$logEvents = Convertto-Json $events -depth 20 -compress
$result = Invoke-LogAnalyticsData -CustomerId $workspaceId -SharedKey $workspaceKey -Body $eventsJson -LogTable $Table -TimeStampField 'eventTime' -ResourceId $ResourceID
if ($result -eq 200)
{
Write-Host "CloudTrail Logs successfully ingested to LogAnalytics Workspace under Custom Logs --> Table: $Table"
}
}
}
}
foreach ($snsRecord in $LambdaInput.Records)
{
$snsMessage = ConvertFrom-Json -InputObject $snsRecord.Sns.Message
foreach ($s3Event in $snsMessage.Records)
{
$s3BucketName = $s3Event.s3.bucket.name
$s3BucketKey = $s3Event.s3.object.key
Write-Host "Processing event for: bucket = $s3BucketName, key = $s3BucketKey"
IF ($Null -ne $s3BucketName -and $Null -ne $s3BucketKey) {
$s3KeyPath = $s3BucketKey -Replace ('%3A', ':')
$fileNameSplit = $s3KeyPath.split('/')
$fileSplits = $fileNameSplit.Length - 1
$fileName = $filenameSplit[$fileSplits].replace(':', '_')
$downloadedFile = Read-S3Object -BucketName $s3BucketName -Key $s3BucketKey -File "/tmp/$filename"
Write-Host "Object $s3BucketKey is $($downloadedFile.Size) bytes; Extension is $($downloadedFile.Extension)"
IF ($downloadedFile.Extension -eq '.gz' ) {
$infile = "/tmp/$filename"
$outfile = "/tmp/" + $filename -replace ($downloadedFile.Extension, '')
Expand-GZipFile $infile.Trim() $outfile.Trim()
$null = Remove-Item -Path $infile -Force -Recurse -ErrorAction Ignore
$filename = $filename -replace ($downloadedFile.Extension, '')
$filename = $filename.Trim()
$logEvents = Get-Content -Raw -LiteralPath ("/tmp/$filename" )
$logEvents = $LogEvents.Substring(0, ($LogEvents.length) - 1)
$LogEvents = $LogEvents -Replace ('{"Records":', '')
$loglength = $logEvents.Length
$logevents = Convertfrom-json $LogEvents -AsHashTable
$groupevents = @{}
$coreEvents = @()
$eventSources = @()
Foreach ($log in $logevents) {
$Logdetails = @{}
$Logdetails1 = @{}
$b = ((($log.eventSource).split('.'))[0]) -replace ('-', '')
IF ($b -eq 'ec2') {
foreach ($col in $eventobjectlist) {
$logdetails1 += @{$col = $log.$col }
}
$ec2Header = $b + '_Header'
IF ($null -eq $groupevents[$ec2Header]) {
Add-Member -inputobject $groupevents -Name $b -MemberType NoteProperty -value @() -Force
$groupevents[$ec2Header] = @()
$eventSources += $ec2Header
}
$groupevents[$ec2Header] += $Logdetails1
$Ec2Request = $b + '_Request'
IF ($null -eq $groupevents[$Ec2Request]) {
Add-Member -inputobject $groupevents -Name $Ec2Request -MemberType NoteProperty -value @() -Force
$groupevents[$Ec2Request] = @()
$eventSources += $Ec2Request
}
$ec2Events = @{}
$ec2Events += @{'eventID' = $log.eventID }
$ec2Events += @{'awsRegion' = $log.awsRegion }
$ec2Events += @{'requestID' = $log.requestID }
$ec2Events += @{'eventTime' = $log.eventTime }
$ec2Events += @{'requestParameters' = $log.requestParameters }
$groupevents[$Ec2Request] += $ec2Events
$Ec2Response = $b + '_Response'
IF ($null -eq $groupevents[$Ec2Response]) {
Add-Member -inputobject $groupevents -Name $Ec2Response -MemberType NoteProperty -value @() -Force
$groupevents[$Ec2Response] = @()
$eventSources += $Ec2Response
}
$ec2Events = @{}
$ec2Events += @{'eventID' = $log.eventID }
$ec2Events += @{'awsRegion' = $log.awsRegion }
$ec2Events += @{'requestID' = $log.requestID }
$ec2Events += @{'eventTime' = $log.eventTime }
$ec2Events += @{'responseElements' = $log.responseElements }
$groupevents[$Ec2Response] += $ec2Events
}
Else {
IF ($null -eq $groupevents[$b]) {
Add-Member -inputobject $groupevents -Name $b -MemberType NoteProperty -value @() -Force
$groupevents[$b] = @()
$eventSources += $b
}
$groupevents[$b] += $log
}
foreach ($col in $eventobjectlist) {
$logdetails += @{$col = $log.$col }
}
$coreEvents += $Logdetails
}
IF ($IsCoreFieldsAllTable -eq "true" -and $IsSplitAWSResourceTypes -eq "true") {
Ingest-Core-Fields-Single-Table -CoreEvents $coreEvents
Ingest-AWS-ResourceType-Multi-Tables -EventSources $eventSources -GroupEvents $groupevents
}
ELSEIF ($IsCoreFieldsAllTable -eq "true" -and $IsSplitAWSResourceTypes -eq "false"){
Ingest-Core-Fields-Single-Table -CoreEvents $coreEvents
}
ELSEIF ($IsCoreFieldsAllTable -eq "false" -and $IsSplitAWSResourceTypes -eq "true"){
Ingest-AWS-ResourceType-Multi-Tables -EventSources $eventSources -GroupEvents $groupevents
}
ELSE {
Write-Host "Make sure you have correct values supplied in Environment Variables for CoreFieldsAllTable and SplitAWSResourceTypeTables"
}
$null = Remove-Variable -Name groupevents
$null = Remove-Variable -Name LogEvents
}
ELSEIF ($downloadedFile.Extension -eq '.json'){
$coreEvents = Get-Content -Raw -LiteralPath ("/tmp/$filename") | ConvertFrom-Json
Ingest-Core-Fields-Single-Table -CoreEvents $coreEvents
}
ELSEIF ($downloadedFile.Extension -eq '.csv'){
$coreEvents = import-csv "/tmp/$filename"
Ingest-Core-Fields-Single-Table -CoreEvents $coreEvents
}
}
}
}

Двоичный файл не отображается.

Просмотреть файл

@ -0,0 +1,348 @@
# PowerShell script file to be executed as a AWS Lambda function.
#
# When executing in Lambda the following variables will be predefined.
# $LambdaInput - A PSObject that contains the Lambda function input data.
# $LambdaContext - An Amazon.Lambda.Core.ILambdaContext object that contains information about the currently running Lambda environment.
#
# The last item in the PowerShell pipeline will be returned as the result of the Lambda function.
#
# To include PowerShell modules with your Lambda function, like the AWS.Tools.S3 module, add a "#Requires" statement
# indicating the module and version. If using an AWS.Tools.* module the AWS.Tools.Common module is also required.
#
# The following link contains documentation describing the structure of the S3 event object.
# https://docs.aws.amazon.com/AmazonS3/latest/dev/notification-content-structure.html
#
# This example demonstrates how to process an S3 Event that follows the process:
# S3 Event -> SNS Topic -> Lambda Function
#Requires -Modules @{ModuleName='AWS.Tools.Common';ModuleVersion='4.1.5.0'}
#Requires -Modules @{ModuleName='AWS.Tools.S3';ModuleVersion='4.1.5.0'}
#Requires -Modules @{ModuleName='AWS.Tools.SecretsManager';ModuleVersion='4.1.5.0'}
# Uncomment to send the input event to CloudWatch Logs
#Write-Host (ConvertTo-Json -InputObject $LambdaInput -Compress -Depth 5)
#$PSVersionTable
# Get the current universal time in the default string format.
$currentUTCtime = (Get-Date).ToUniversalTime()
# Code to retrieve credentials from AWS Secrets Manager
$secretName = $env:SecretName
$secretValue = ConvertFrom-Json (Get-SECSecretValue -SecretId $secretName -ErrorAction Stop -Verbose).SecretString -ErrorAction Stop
$workspaceId = $secretValue.LAWID
$workspaceKey = $secretValue.LAWKEY
$LATableName = $env:LogAnalyticsTableName
$IsCoreFieldsAllTable = $env:CoreFieldsAllTable
$IsSplitAWSResourceTypes = $env:SplitAWSResourceTypeTables
$ResourceID = ''
#The $eventobjectlist is the Json Parameter field names that form the core of the Json message that we want in the ALL Table in Log Ananlytics
$eventobjectlist = @('eventTime', 'eventVersion', 'userIdentity', 'eventSource', 'eventName', 'awsRegion', 'sourceIPAddress', 'userAgent', 'errorCode', 'errorMessage', 'requestID', 'eventID', 'eventType', 'apiVersion', 'managementEvent', 'readOnly', 'resources', 'recipientAccountId', 'serviceEventDetails', 'sharedEventID', 'vpcEndpointId', 'eventCategory', 'additionalEventData')
Function Expand-GZipFile {
Param(
$infile,
$outfile
)
Write-Host "Processing Expand-GZipFile for: infile = $infile, outfile = $outfile"
$inputfile = New-Object System.IO.FileStream $infile, ([IO.FileMode]::Open), ([IO.FileAccess]::Read), ([IO.FileShare]::Read)
$output = New-Object System.IO.FileStream $outfile, ([IO.FileMode]::Create), ([IO.FileAccess]::Write), ([IO.FileShare]::None)
$gzipStream = New-Object System.IO.Compression.GzipStream $inputfile, ([IO.Compression.CompressionMode]::Decompress)
$buffer = New-Object byte[](1024)
while ($true) {
$read = $gzipstream.Read($buffer, 0, 1024)
if ($read -le 0) { break }
$output.Write($buffer, 0, $read)
}
$gzipStream.Close()
$output.Close()
$inputfile.Close()
}
#function to create HTTP Header signature required to authenticate post
Function New-BuildSignature {
param(
$customerId,
$sharedKey,
$date,
$contentLength,
$method,
$contentType,
$resource )
$xHeaders = "x-ms-date:" + $date
$stringToHash = $method + "`n" + $contentLength + "`n" + $contentType + "`n" + $xHeaders + "`n" + $resource
$bytesToHash = [Text.Encoding]::UTF8.GetBytes($stringToHash)
$keyBytes = [Convert]::FromBase64String($sharedKey)
$sha256 = New-Object System.Security.Cryptography.HMACSHA256
$sha256.Key = $keyBytes
$calculatedHash = $sha256.ComputeHash($bytesToHash)
$encodedHash = [Convert]::ToBase64String($calculatedHash)
$authorization = 'SharedKey {0}:{1}' -f $customerId, $encodedHash
return $authorization
}
# Function to create and post the request
Function Invoke-LogAnalyticsData {
Param(
$CustomerId,
$SharedKey,
$Body,
$LogTable,
$TimeStampField,
$resourceId)
$method = "POST"
$contentType = "application/json"
$resource = "/api/logs"
$rfc1123date = [DateTime]::UtcNow.ToString("r")
$contentLength = $Body.Length
$signature = New-BuildSignature `
-customerId $CustomerId `
-sharedKey $SharedKey `
-date $rfc1123date `
-contentLength $contentLength `
-method $method `
-contentType $contentType `
-resource $resource
$uri = "https://" + $CustomerId + ".ods.opinsights.azure.com" + $resource + "?api-version=2016-04-01"
$headers1 = @{
"Authorization" = $signature;
"Log-Type" = $LogTable;
"x-ms-date" = $rfc1123date;
"x-ms-AzureResourceId" = $resourceId;
"time-generated-field" = $TimeStampField;
}
$status = $false
do {
$response = Invoke-WebRequest -Uri $uri -Method $method -ContentType $contentType -Headers $headers1 -Body $Body
#If requests are being made at a rate higher than this, then these requests will receive HTTP status code 429 (Too Many Requests) along with the Retry-After:
#<delta-seconds> header which indicates the number of seconds until requests to this application are likely to be accepted.If requests are being made at a rate higher than this,
#then these requests will receive HTTP status code 429 (Too Many Requests) along with the Retry-After: <delta-seconds> header which indicates the number of seconds until requests to this application are likely to be accepted.
If ($reponse.StatusCode -eq 429) {
$rand = get-random -minimum 10 -Maximum 80
start-sleep -seconds $rand
}
else { $status = $true }
}until($status)
Remove-variable -name Body
return $response.StatusCode
}
Function Ingest-Core-Fields-Single-Table {
Param(
$coreEvents)
$coreJson = convertto-json $coreEvents -depth 5 -Compress
$Table = "$LATableName" + "_All"
IF (($corejson.Length) -gt 28MB) {
Write-Host "Log length is greater than 28 MB, splitting and sending to Log Analytics"
$bits = [math]::Round(($corejson.length) / 20MB) + 1
$TotalRecords = $coreEvents.Count
$RecSetSize = [math]::Round($TotalRecords / $bits) + 1
$start = 0
For ($x = 0; $x -lt $bits; $x++) {
IF ( ($start + $recsetsize) -gt $TotalRecords) {
$finish = $totalRecords
}
ELSE {
$finish = $start + $RecSetSize
}
$body = Convertto-Json ($coreEvents[$start..$finish]) -Depth 5 -Compress
$result = Invoke-LogAnalyticsData -CustomerId $workspaceId -SharedKey $workspaceKey -Body $body -LogTable $Table -TimeStampField 'eventTime' -ResourceId $ResourceID
if ($result -eq 200)
{
Write-Host "CloudTrail Logs successfully ingested to LogAnalytics Workspace under Custom Logs --> Table: $Table"
}
$start = $finish + 1
}
$null = Remove-variable -name body
}
Else {
#$logEvents = Convertto-Json $events -depth 20 -compress
$result = Invoke-LogAnalyticsData -CustomerId $workspaceId -SharedKey $workspaceKey -Body $coreJson -LogTable $Table -TimeStampField 'eventTime' -ResourceId $ResourceID
if ($result -eq 200)
{
Write-Host "CloudTrail Logs successfully ingested to LogAnalytics Workspace under Custom Logs --> Table: $Table"
}
}
$null = remove-variable -name coreEvents
$null = remove-variable -name coreJson
}
Function Ingest-AWS-ResourceType-Multi-Tables {
Param(
$eventSources,
$groupEvents)
$RecCount = 0
foreach ($d in $eventSources) {
#$events = $groupevents[$d]
$eventsJson = ConvertTo-Json $groupEvents[$d] -depth 5 -Compress
$Table = $LATableName + '_' + $d
$TotalRecords = $groupEvents[$d].Count
$recCount += $TotalRecords
IF (($eventsjson.Length) -gt 28MB) {
#$events = Convertfrom-json $corejson
$bits = [math]::Round(($eventsjson.length) / 20MB) + 1
$TotalRecords = $groupEvents[$d].Count
$RecSetSize = [math]::Round($TotalRecords / $bits) + 1
$start = 0
For ($x = 0; $x -lt $bits; $x++) {
IF ( ($start + $recsetsize) -gt $TotalRecords) {
$finish = $totalRecords
}
ELSE {
$finish = $start + $RecSetSize
}
$body = Convertto-Json ($groupEvents[$d][$start..$finish]) -Depth 5 -Compress
$result = Invoke-LogAnalyticsData -CustomerId $workspaceId -SharedKey $workspaceKey -Body $body -LogTable $Table -TimeStampField 'eventTime' -ResourceId $ResourceID
if ($result -eq 200)
{
Write-Host "CloudTrail Logs successfully ingested to LogAnalytics Workspace under Custom Logs --> Table: $Table"
}
$start = $finish + 1
}
$null = Remove-variable -name body
}
Else {
#$logEvents = Convertto-Json $events -depth 20 -compress
$result = Invoke-LogAnalyticsData -CustomerId $workspaceId -SharedKey $workspaceKey -Body $eventsJson -LogTable $Table -TimeStampField 'eventTime' -ResourceId $ResourceID
if ($result -eq 200)
{
Write-Host "CloudTrail Logs successfully ingested to LogAnalytics Workspace under Custom Logs --> Table: $Table"
}
}
}
}
foreach ($sqsRecord in $LambdaInput.Records)
{
$sqsRecordBody = ConvertFrom-Json -InputObject $sqsRecord.body
foreach ($s3Event in $sqsRecordBody.Records)
{
$s3BucketName = $s3Event.s3.bucket.name
$s3BucketKey = $s3Event.s3.object.key
Write-Host "Processing event for: bucket = $s3BucketName, key = $s3BucketKey"
IF ($Null -ne $s3BucketName -and $Null -ne $s3BucketKey) {
$s3KeyPath = $s3BucketKey -Replace ('%3A', ':')
$fileNameSplit = $s3KeyPath.split('/')
$fileSplits = $fileNameSplit.Length - 1
$fileName = $filenameSplit[$fileSplits].replace(':', '_')
$downloadedFile = Read-S3Object -BucketName $s3BucketName -Key $s3BucketKey -File "/tmp/$filename"
Write-Host "Object $s3BucketKey is $($downloadedFile.Size) bytes; Extension is $($downloadedFile.Extension)"
IF ($downloadedFile.Extension -eq '.gz' ) {
$infile = "/tmp/$filename"
$outfile = "/tmp/" + $filename -replace ($downloadedFile.Extension, '')
Expand-GZipFile $infile.Trim() $outfile.Trim()
$null = Remove-Item -Path $infile -Force -Recurse -ErrorAction Ignore
$filename = $filename -replace ($downloadedFile.Extension, '')
$filename = $filename.Trim()
$logEvents = Get-Content -Raw -LiteralPath ("/tmp/$filename" )
$logEvents = $LogEvents.Substring(0, ($LogEvents.length) - 1)
$LogEvents = $LogEvents -Replace ('{"Records":', '')
$loglength = $logEvents.Length
$logevents = Convertfrom-json $LogEvents -AsHashTable
$groupevents = @{}
$coreEvents = @()
$eventSources = @()
Foreach ($log in $logevents) {
$Logdetails = @{}
$Logdetails1 = @{}
$b = ((($log.eventSource).split('.'))[0]) -replace ('-', '')
IF ($b -eq 'ec2') {
foreach ($col in $eventobjectlist) {
$logdetails1 += @{$col = $log.$col }
}
$ec2Header = $b + '_Header'
IF ($null -eq $groupevents[$ec2Header]) {
Add-Member -inputobject $groupevents -Name $b -MemberType NoteProperty -value @() -Force
$groupevents[$ec2Header] = @()
$eventSources += $ec2Header
}
$groupevents[$ec2Header] += $Logdetails1
$Ec2Request = $b + '_Request'
IF ($null -eq $groupevents[$Ec2Request]) {
Add-Member -inputobject $groupevents -Name $Ec2Request -MemberType NoteProperty -value @() -Force
$groupevents[$Ec2Request] = @()
$eventSources += $Ec2Request
}
$ec2Events = @{}
$ec2Events += @{'eventID' = $log.eventID }
$ec2Events += @{'awsRegion' = $log.awsRegion }
$ec2Events += @{'requestID' = $log.requestID }
$ec2Events += @{'eventTime' = $log.eventTime }
$ec2Events += @{'requestParameters' = $log.requestParameters }
$groupevents[$Ec2Request] += $ec2Events
$Ec2Response = $b + '_Response'
IF ($null -eq $groupevents[$Ec2Response]) {
Add-Member -inputobject $groupevents -Name $Ec2Response -MemberType NoteProperty -value @() -Force
$groupevents[$Ec2Response] = @()
$eventSources += $Ec2Response
}
$ec2Events = @{}
$ec2Events += @{'eventID' = $log.eventID }
$ec2Events += @{'awsRegion' = $log.awsRegion }
$ec2Events += @{'requestID' = $log.requestID }
$ec2Events += @{'eventTime' = $log.eventTime }
$ec2Events += @{'responseElements' = $log.responseElements }
$groupevents[$Ec2Response] += $ec2Events
}
Else {
IF ($null -eq $groupevents[$b]) {
Add-Member -inputobject $groupevents -Name $b -MemberType NoteProperty -value @() -Force
$groupevents[$b] = @()
$eventSources += $b
}
$groupevents[$b] += $log
}
foreach ($col in $eventobjectlist) {
$logdetails += @{$col = $log.$col }
}
$coreEvents += $Logdetails
}
IF ($IsCoreFieldsAllTable -eq "true" -and $IsSplitAWSResourceTypes -eq "true") {
Ingest-Core-Fields-Single-Table -CoreEvents $coreEvents
Ingest-AWS-ResourceType-Multi-Tables -EventSources $eventSources -GroupEvents $groupevents
}
ELSEIF ($IsCoreFieldsAllTable -eq "true" -and $IsSplitAWSResourceTypes -eq "false"){
Ingest-Core-Fields-Single-Table -CoreEvents $coreEvents
}
ELSEIF ($IsCoreFieldsAllTable -eq "false" -and $IsSplitAWSResourceTypes -eq "true"){
Ingest-AWS-ResourceType-Multi-Tables -EventSources $eventSources -GroupEvents $groupevents
}
ELSE {
Write-Host "Make sure you have correct values supplied in Environment Variables for CoreFieldsAllTable and SplitAWSResourceTypeTables"
}
$null = Remove-Variable -Name groupevents
$null = Remove-Variable -Name LogEvents
}
ELSEIF ($downloadedFile.Extension -eq '.json'){
$coreEvents = Get-Content -Raw -LiteralPath ("/tmp/$filename") | ConvertFrom-Json
Ingest-Core-Fields-Single-Table -CoreEvents $coreEvents
}
ELSEIF ($downloadedFile.Extension -eq '.csv'){
$coreEvents = import-csv "/tmp/$filename"
Ingest-Core-Fields-Single-Table -CoreEvents $coreEvents
}
}
}
}

Двоичный файл не отображается.

Просмотреть файл

@ -168,5 +168,21 @@
"title": "",
"description": "**5. Complete Setup.**\n\n1. Once all application settings have been entered, click **Save**. Note that it will take some time to have the required dependencies download, so you may see some inital failure messages."
}
]
],
"metadata" : {
"id": "152fa8d4-b84b-4370-8317-b63ed52f9fe3",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Agari"
},
"support": {
"name": "Agari",
"link": "https://support.agari.com/hc/en-us/articles/360000645632-How-to-access-Agari-Support",
"tier": "developer"
}
}
}

Просмотреть файл

@ -152,5 +152,21 @@
"title": "",
"description": "> You should now be able to receive logs in the *AlsidForADLog_CL* table, logs data can be parse using the **afad_parser()** function, used by all query samples, workbooks and analytic templates."
}
]
],
"metadata": {
"id": "12ff1831-b733-4861-a3e7-6115d20106f4",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Alsid"
},
"support": {
"name": "Alsid",
"link": "https://www.alsid.com/contact-us/",
"tier": "developer"
}
}
}

Просмотреть файл

@ -28,7 +28,7 @@
}
],
"availability": {
"status": 1,
"status": 2,
"isPreview": true
},
"permissions": {

Просмотреть файл

@ -91,7 +91,7 @@
},
{
"provider": "Microsoft.OperationalInsights/workspaces/sharedKeys",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key).",
"providerDisplayName": "Keys",
"scope": "Workspace",
"requiredPermissions": {
@ -125,5 +125,21 @@
}
]
}
]
],
"metadata": {
"id": "31f0ea52-dcd4-443b-9d04-a3e709addebc",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Better Mobile"
},
"support": {
"name": "Better Mobile",
"email": "support@better.mobi",
"tier": "developer"
}
}
}

Просмотреть файл

@ -97,7 +97,7 @@
},
{
"provider": "Microsoft.OperationalInsights/workspaces/sharedKeys",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key).",
"providerDisplayName": "Keys",
"scope": "Workspace",
"requiredPermissions": {
@ -139,5 +139,21 @@
}
]
}
]
],
"metadata": {
"id": "3be993d4-3aa7-41de-8280-e62de7859eca",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Beyond Security"
},
"support": {
"name": "Beyond Security",
"link": "https://beyondsecurity.freshdesk.com/support/home",
"tier": "developer"
}
}
}

Просмотреть файл

@ -122,5 +122,21 @@
"title": "4. Secure your machine ",
"description": "Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)"
}
]
],
"metadata": {
"id": "7504f78d-1928-4399-a1ae-ba826c47c42d",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Citrix Systems"
},
"support": {
"name": "Citrix Systems",
"link": "https://www.citrix.com/support/",
"tier": "developer"
}
}
}

Просмотреть файл

@ -68,12 +68,6 @@
"action": true
}
}
],
"customs": [
{
"name": "Include custom pre-requisites if the connectivity requires - else delete customs",
"description": "Description for any custom pre-requisite"
}
]
},
"instructionSteps": [

Просмотреть файл

@ -114,5 +114,21 @@
}
]
}
]
],
"metadata": {
"id": "47835227-715b-4000-892e-e1fff81023c0",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "WatchGuard"
},
"support": {
"name": "WatchGuard",
"link": "https://www.watchguard.com/wgrd-support/overview",
"tier": "developer"
}
}
}

Просмотреть файл

@ -96,5 +96,21 @@
"title": "4. Secure your machine ",
"description": "Make sure to configure the machines security according to your organizations security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)"
}
]
],
"metadata": {
"id": "1c45e738-21dd-4fcd-9449-e2c9478e9552",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Cyberark"
},
"support": {
"name": "Cyberark",
"link": "https://www.cyberark.com/customer-support/",
"tier": "developer"
}
}
}

Просмотреть файл

@ -122,5 +122,21 @@
"title": "5. Forcepoint integration installation guide ",
"description": "To complete the installation of this Forcepoint product integration, follow the guide linked below.\n\n[Installation Guide >](https://frcpnt.com/ngfw-sentinel)"
}
]
],
"metadata":{
"id": "e002d400-e0b0-4673-959a-eec31378d17c",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Forcepoint"
},
"support": {
"name": "Forcepoint",
"link": "https://support.forcepoint.com/",
"tier": "developer"
}
}
}

Просмотреть файл

@ -116,6 +116,22 @@
},
{ "title": "5. Forcepoint integration installation guide ",
"description": "To complete the installation of this Forcepoint product integration, follow the guide linked below.\n\n[Installation Guide >](https://frcpnt.com/casb-sentinel)"
}
],
"metadata": {
"id": "04f93db2-8f2a-4edc-bb78-9e1e7587faff",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Forcepoint"
},
"support": {
"name": "Forcepoint",
"link": "https://support.forcepoint.com",
"tier": "developer"
}
}
]
}

Просмотреть файл

@ -57,7 +57,7 @@
},
{
"provider": "Microsoft.OperationalInsights/workspaces/sharedKeys",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key).",
"providerDisplayName": "Keys",
"scope": "Workspace",
"requiredPermissions": {
@ -91,5 +91,21 @@
}
]
}
]
],
"metadata": {
"id": "c4961e1e-45b1-4565-a096-6e14561c90b6",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Forcepoint"
},
"support": {
"name": "Forcepoint",
"link": "https://support.forcepoint.com/",
"tier": "developer"
}
}
}

Просмотреть файл

@ -118,5 +118,21 @@
"title" : "4. Secure your machine ",
"description" : "Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)"
}
]
],
"metadata": {
"id": "cb5b9a69-5ab1-445c-8491-6b96a2ea3100",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "ForgeRock"
},
"support": {
"name": "ForgeRock",
"link": "https://www.forgerock.com/support",
"tier": "developer"
}
}
}

Двоичный файл не отображается.

Просмотреть файл

@ -3,7 +3,7 @@
Language: PowerShell
Version: 1.2
Author: Nicholas Dicola, Sreedhar Ande
Last Modified: 02/08/2021
Last Modified: 03/12/2021
DESCRIPTION
This Function App calls the GitHub REST API (https://api.github.com/) to pull the GitHub
@ -36,25 +36,21 @@ $AzureWebJobsStorage = $env:AzureWebJobsStorage
$personalAccessToken = $env:PersonalAccessToken
$workspaceId = $env:WorkspaceId
$workspaceKey = $env:WorkspaceKey
$LAURI = $env:LAURI
$storageAccountContainer = "github-repo-logs"
$AuditLogTable = $env:GitHubAuditLogsTableName
if ([string]::IsNullOrEmpty($AuditLogTable))
{
$AuditLogTable = "GitHub_CL"
}
$RepoLogTable = $env:GitHubRepoLogsTableName
if ([string]::IsNullOrEmpty($RepoLogTable))
{
$RepoLogTable = "GitHubRepoLogs_CL"
}
#The AzureTenant variable is used to specify other cloud environments like Azure Gov(.us) etc.,
$AzureTenant = $env:AZURE_TENANT
$AuditLogTable = "GitHub_CL"
$RepoLogTable = "GitHubRepoLogs_CL"
$currentStartTime = (get-date).ToUniversalTime() | get-date -Format yyyy-MM-ddTHH:mm:ss:ffffffZ
if (-Not [string]::IsNullOrEmpty($LAURI)){
if($LAURI.Trim() -notmatch 'https:\/\/([\w\-]+)\.ods\.opinsights\.azure.([a-zA-Z\.]+)$')
{
Write-Error -Message "DocuSign-SecurityEvents: Invalid Log Analytics Uri." -ErrorAction Stop
Exit
}
}
function Write-OMSLogfile {
<#
.SYNOPSIS
@ -129,12 +125,13 @@ function Write-OMSLogfile {
-contentType $ContentType `
-resource $resource
# Compatible with Commercial and Gov Tenants
if ([string]::IsNullOrEmpty($AzureTenant)){
$uri = "https://" + $CustomerId + ".ods.opinsights.azure.com" + $resource + "?api-version=2016-04-01"
# Compatible with previous version
if ([string]::IsNullOrEmpty($LAURI)){
$LAURI = "https://" + $CustomerId + ".ods.opinsights.azure.com" + $resource + "?api-version=2016-04-01"
}
else{
$uri = "https://" + $CustomerId + ".ods.opinsights.azure" +$AzureTenant + $resource + "?api-version=2016-04-01"
else
{
$LAURI = $LAURI + $resource + "?api-version=2016-04-01"
}
$headers = @{
@ -143,7 +140,7 @@ function Write-OMSLogfile {
"x-ms-date" = $rfc1123date
"time-generated-field" = $dateTime
}
$response = Invoke-WebRequest -Uri $uri -Method $method -ContentType $ContentType -Headers $headers -Body $Body -UseBasicParsing
$response = Invoke-WebRequest -Uri $LAURI -Method $method -ContentType $ContentType -Headers $headers -Body $Body -UseBasicParsing
Write-Verbose -message ('Post Function Return Code ' + $response.statuscode)
return $response.statuscode
}
@ -398,14 +395,23 @@ foreach($org in $githubOrgs){
$forkLogs | Add-Member -NotePropertyName LogType -NotePropertyValue Forks
#Send to log A
SendToLogA -gitHubData $forkLogs -customLogName $RepoLogTable
}
}
$uri = "https://api.github.com/repos/$orgName/$repoName/secret-scanning/alerts"
$secretscanningalerts = $null
$secretscanningalerts = Invoke-RestMethod -Method Get -Uri $uri -Headers $headers
if ($secretscanningalerts.Length -gt 0){
$secretscanningalerts | Add-Member -NotePropertyName OrgName -NotePropertyValue $orgName
$secretscanningalerts | Add-Member -NotePropertyName Repository -NotePropertyValue $repoName
$secretscanningalerts | Add-Member -NotePropertyName LogType -NotePropertyValue SecretScanningAlerts
#Send to log A
SendToLogA -gitHubData $secretscanningalerts -customLogName $RepoLogTable
}
}
else {
Write-Host "$repoName is empty"
Write-Verbose "$repoName is empty"
}
}
}
# get blobs for last run

Просмотреть файл

@ -1,3 +1,11 @@
## 1.3
- Added secret-scanning/alerts logs
- Updated ARM template to support both Commercial and Azure Gov
- Removed previously added logic
- Environment variables to provide additional support for users to supply their own values for Table names
## 1.2
- Fixed issues raised on Sentinel GitHub Repo on AuditLogs
- Updated logic to ingest each AuditLog as an individual record

Просмотреть файл

@ -118,7 +118,7 @@
},
{
"title": "Option 1 - Azure Resource Manager (ARM) Template",
"description": "This method provides an automated deployment of the GitHub Data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FDataConnectors%2FGithubFunction%2Fazurecomdeploy_dotcomtenants.json)\t[![Deploy To Azure Gov](https://raw.githubusercontent.com/Azure/azure-quickstart-templates/master/1-CONTRIBUTION-GUIDE/images/deploytoazuregov.png)](https://portal.azure.us/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FDataConnectors%2FGithubFunction%2Fazuregovdeploy_dotustenants.json)\t(**.us Tenant**)\n\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Personal Access Token** \n> - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy."
"description": "This method provides an automated deployment of the GitHub Data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FDataConnectors%2FGithubFunction%2Fazuredeploy.json)\n\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Personal Access Token** \n> - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy."
},
{
"title": "Option 2 - Manual Deployment of Azure Functions",
@ -130,7 +130,7 @@
},
{
"title": "",
"description": "**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Select **Timer Trigger**.\n3. Enter a unique Function **Name** and leave the default cron schedule of every 5 minutes, then click **Create**.\n4. Click on **Code + Test** on the left pane. \n5. Copy the [Function App Code](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fandedevsecops%2FAzure-Sentinel%2Faz-func-github-dataconnector%2FDataConnectors%2FGithubFunction%2Fazuredeploy_GitHubData.json) and paste into the Function App `run.ps1` editor.\n5. Click **Save**."
"description": "**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Select **Timer Trigger**.\n3. Enter a unique Function **Name** and leave the default cron schedule of every 5 minutes, then click **Create**.\n4. Click on **Code + Test** on the left pane. \n5. Copy the [Function App Code](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fandedevsecops%2FAzure-Sentinel%2Faz-func-github-dataconnector%2FDataConnectors%2FGithubFunction%2Fazuredeploy.json) and paste into the Function App `run.ps1` editor.\n5. Click **Save**."
},
{
"title": "",

Просмотреть файл

@ -1,4 +1,5 @@
{
"org":"",
"lastContext": "",
"lastRun": ""
}

Просмотреть файл

@ -9,27 +9,24 @@
"description": "Specifies the name of the Function App."
}
},
"PersonalAccessToken": {
"defaultValue": "Enter the GitHub Personal Access Token (PAT)",
"type": "string",
"PersonalAccessToken": {
"type": "securestring",
"metadata": {
"description": "Specifies GitHub Enterprise Personal Access Token."
}
},
"WorkspaceId": {
"type": "string",
"defaultValue": "<WorkspaceId>",
"type": "string",
"metadata": {
"description": "Specifies the Log Analytics Workspace Id."
}
},
"WorkspaceKey": {
"type": "string",
"defaultValue": "<WorkspaceKey>",
"type": "securestring",
"metadata": {
"description": "Specifies the Log Analytics Workspace Key."
}
},
},
"FunctionSchedule": {
"type": "string",
"defaultValue": "0 */10 * * * *",
@ -40,10 +37,13 @@
},
"variables": {
"FunctionName": "[concat(toLower(parameters('FunctionName')), uniqueString(resourceGroup().id))]",
"KeyVaultName": "[tolower(concat('githubkv', uniqueString(resourceGroup().id, subscription().id)))]",
"StorageAccountName":"[concat(substring(variables('FunctionName'), 0, 20), 'sa')]",
"KeyVaultName": "[concat(substring(variables('FunctionName'), 0, 20), 'kv')]",
"GitAPIToken": "GitAPIToken",
"LogAnalyticsWorkspaceKey": "LogAnalyticsWorkspaceKey",
"StorageContainerName": "github-repo-logs"
"StorageContainerName": "github-repo-logs",
"StorageSuffix":"[environment().suffixes.storage]",
"LogAnaltyicsUri":"[replace(environment().portal, 'https://portal', concat('https://', toLower(parameters('WorkspaceId')), '.ods.opinsights'))]"
},
"resources": [
{
@ -60,7 +60,7 @@
{
"type": "Microsoft.Storage/storageAccounts",
"apiVersion": "2019-06-01",
"name": "[tolower(variables('FunctionName'))]",
"name": "[variables('StorageAccountName')]",
"location": "[resourceGroup().location]",
"sku": {
"name": "Standard_LRS",
@ -112,9 +112,9 @@
{
"type": "Microsoft.Storage/storageAccounts/blobServices",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default')]",
"name": "[concat(variables('StorageAccountName'), '/default')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]"
"[resourceId('Microsoft.Storage/storageAccounts', variables('StorageAccountName'))]"
],
"sku": {
"name": "Standard_LRS",
@ -133,9 +133,9 @@
{
"type": "Microsoft.Storage/storageAccounts/fileServices",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default')]",
"name": "[concat(variables('StorageAccountName'), '/default')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]"
"[resourceId('Microsoft.Storage/storageAccounts', variables('StorageAccountName'))]"
],
"sku": {
"name": "Standard_LRS",
@ -154,7 +154,7 @@
"name": "[variables('FunctionName')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('StorageAccountName'))]",
"[resourceId('Microsoft.Web/serverfarms', variables('FunctionName'))]",
"[resourceId('Microsoft.Insights/components', variables('FunctionName'))]"
],
@ -167,7 +167,10 @@
"serverFarmId": "[resourceId('Microsoft.Web/serverfarms', variables('FunctionName'))]",
"httpsOnly": true,
"clientAffinityEnabled": true,
"alwaysOn": true
"alwaysOn": true,
"siteConfig": {
"powerShellVersion": "~7"
}
},
"resources": [
{
@ -185,16 +188,15 @@
"FUNCTIONS_WORKER_RUNTIME": "powershell",
"APPINSIGHTS_INSTRUMENTATIONKEY": "[reference(resourceId('Microsoft.insights/components', variables('FunctionName')), '2015-05-01').InstrumentationKey]",
"APPLICATIONINSIGHTS_CONNECTION_STRING": "[reference(resourceId('microsoft.insights/components', variables('FunctionName')), '2015-05-01').ConnectionString]",
"AzureWebJobsStorage": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(variables('FunctionName')),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(variables('FunctionName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=core.windows.net')]",
"WEBSITE_CONTENTAZUREFILECONNECTIONSTRING": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(variables('FunctionName')),';AccountKey=', listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(variables('FunctionName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=core.windows.net')]",
"AzureWebJobsStorage": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(variables('StorageAccountName')),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', variables('StorageAccountName')), '2019-06-01').keys[0].value, ';EndpointSuffix=',toLower(variables('StorageSuffix')))]",
"WEBSITE_CONTENTAZUREFILECONNECTIONSTRING": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(variables('StorageAccountName')),';AccountKey=', listKeys(resourceId('Microsoft.Storage/storageAccounts', variables('StorageAccountName')), '2019-06-01').keys[0].value, ';EndpointSuffix=',toLower(variables('StorageSuffix')))]",
"WEBSITE_CONTENTSHARE": "[toLower(variables('FunctionName'))]",
"PersonalAccessToken": "[concat('@Microsoft.KeyVault(SecretUri=', reference(variables('GitAPIToken')).secretUriWithVersion, ')')]",
"TMPDIR": "D:\\local\\Temp",
"TMPDIR": "C:\\local\\Temp",
"WorkspaceId": "[parameters('WorkspaceId')]",
"WorkspaceKey": "[concat('@Microsoft.KeyVault(SecretUri=', reference(variables('LogAnalyticsWorkspaceKey')).secretUriWithVersion, ')')]",
"Schedule": "[parameters('FunctionSchedule')]",
"GitHubAuditLogsTableName": "GitHubAuditLogs",
"GitHubRepoLogsTableName": "GitHubRepoLogs",
"LAURI": "[variables('LogAnaltyicsUri')]",
"WEBSITE_RUN_FROM_PACKAGE": "https://aka.ms/githubazurefunctionzip"
}
}
@ -263,26 +265,13 @@
}
]
},
{
"type": "Microsoft.Web/sites/hostNameBindings",
"apiVersion": "2018-11-01",
"name": "[concat(variables('FunctionName'), '/', variables('FunctionName'), '.azurewebsites.net')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Web/sites', variables('FunctionName'))]"
],
"properties": {
"siteName": "[variables('FunctionName')]",
"hostNameType": "Verified"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/azure-webjobs-hosts')]",
"name": "[concat(variables('StorageAccountName'), '/default/azure-webjobs-hosts')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('StorageAccountName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('StorageAccountName'))]"
],
"properties": {
"publicAccess": "None"
@ -291,10 +280,10 @@
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/azure-webjobs-secrets')]",
"name": "[concat(variables('StorageAccountName'), '/default/azure-webjobs-secrets')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('StorageAccountName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('StorageAccountName'))]"
],
"properties": {
"publicAccess": "None"
@ -303,10 +292,10 @@
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), concat('/default/', variables('StorageContainerName')))]",
"name": "[concat(variables('StorageAccountName'), concat('/default/', variables('StorageContainerName')))]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('StorageAccountName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('StorageAccountName'))]"
],
"properties": {
"publicAccess": "None"
@ -315,10 +304,10 @@
{
"type": "Microsoft.Storage/storageAccounts/fileServices/shares",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/', tolower(variables('FunctionName')))]",
"name": "[concat(variables('StorageAccountName'), '/default/', tolower(variables('StorageAccountName')))]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/fileServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
"[resourceId('Microsoft.Storage/storageAccounts/fileServices', variables('StorageAccountName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('StorageAccountName'))]"
],
"properties": {
"shareQuota": 5120

Просмотреть файл

@ -1,329 +0,0 @@
{
"$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"FunctionName": {
"defaultValue": "GitHubLogs",
"type": "string",
"metadata": {
"description": "Specifies the name of the Function App."
}
},
"PersonalAccessToken": {
"defaultValue": "Enter the GitHub Personal Access Token (PAT)",
"type": "string",
"metadata": {
"description": "Specifies GitHub Enterprise Personal Access Token."
}
},
"WorkspaceId": {
"type": "string",
"defaultValue": "<WorkspaceId>",
"metadata": {
"description": "Specifies the Log Analytics Workspace Id."
}
},
"WorkspaceKey": {
"type": "string",
"defaultValue": "<WorkspaceKey>",
"metadata": {
"description": "Specifies the Log Analytics Workspace Key."
}
},
"FunctionSchedule": {
"type": "string",
"defaultValue": "0 */10 * * * *",
"metadata": {
"description": "For a `TimerTrigger` to work, you provide a schedule in the form of a [cron expression](https://en.wikipedia.org/wiki/Cron#CRON_expression)(See the link for full details). A cron expression is a string with 6 separate expressions which represent a given schedule via patterns. The pattern we use to represent every 1 hour is `0 0 * * * *`. This, in plain text, means: When seconds is equal to 0, minutes is divisible by 5, for any hour, day of the month, month, day of the week, or year"
}
}
},
"variables": {
"FunctionName": "[concat(toLower(parameters('FunctionName')), uniqueString(resourceGroup().id))]",
"KeyVaultName": "[tolower(concat('githubkv', uniqueString(resourceGroup().id, subscription().id)))]",
"GitAPIToken": "GitAPIToken",
"LogAnalyticsWorkspaceKey": "LogAnalyticsWorkspaceKey",
"StorageContainerName": "github-repo-logs"
},
"resources": [
{
"type": "Microsoft.Insights/components",
"apiVersion": "2015-05-01",
"name": "[variables('FunctionName')]",
"location": "[resourceGroup().location]",
"kind": "web",
"properties": {
"Application_Type": "web",
"ApplicationId": "[variables('FunctionName')]"
}
},
{
"type": "Microsoft.Storage/storageAccounts",
"apiVersion": "2019-06-01",
"name": "[tolower(variables('FunctionName'))]",
"location": "[resourceGroup().location]",
"sku": {
"name": "Standard_LRS",
"tier": "Standard"
},
"kind": "StorageV2",
"properties": {
"networkAcls": {
"bypass": "AzureServices",
"virtualNetworkRules": [
],
"ipRules": [
],
"defaultAction": "Allow"
},
"supportsHttpsTrafficOnly": true,
"encryption": {
"services": {
"file": {
"keyType": "Account",
"enabled": true
},
"blob": {
"keyType": "Account",
"enabled": true
}
},
"keySource": "Microsoft.Storage"
}
}
},
{
"type": "Microsoft.Web/serverfarms",
"apiVersion": "2018-02-01",
"name": "[variables('FunctionName')]",
"location": "[resourceGroup().location]",
"sku": {
"name": "Y1",
"tier": "Dynamic"
},
"kind": "functionapp",
"properties": {
"name": "[variables('FunctionName')]",
"workerSize": "0",
"workerSizeId": "0",
"numberOfWorkers": "1"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]"
],
"sku": {
"name": "Standard_LRS",
"tier": "Standard"
},
"properties": {
"cors": {
"corsRules": [
]
},
"deleteRetentionPolicy": {
"enabled": false
}
}
},
{
"type": "Microsoft.Storage/storageAccounts/fileServices",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]"
],
"sku": {
"name": "Standard_LRS",
"tier": "Standard"
},
"properties": {
"cors": {
"corsRules": [
]
}
}
},
{
"type": "Microsoft.Web/sites",
"apiVersion": "2018-11-01",
"name": "[variables('FunctionName')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]",
"[resourceId('Microsoft.Web/serverfarms', variables('FunctionName'))]",
"[resourceId('Microsoft.Insights/components', variables('FunctionName'))]"
],
"kind": "functionapp",
"identity": {
"type": "SystemAssigned"
},
"properties": {
"name": "[variables('FunctionName')]",
"serverFarmId": "[resourceId('Microsoft.Web/serverfarms', variables('FunctionName'))]",
"httpsOnly": true,
"clientAffinityEnabled": true,
"alwaysOn": true
},
"resources": [
{
"apiVersion": "2018-11-01",
"type": "config",
"name": "appsettings",
"dependsOn": [
"[concat('Microsoft.Web/sites/', variables('FunctionName'))]",
"[resourceId('Microsoft.KeyVault/vaults/', variables('KeyVaultName'))]",
"[resourceId('Microsoft.KeyVault/vaults/secrets', variables('KeyVaultName'), variables('GitAPIToken'))]",
"[resourceId('Microsoft.KeyVault/vaults/secrets', variables('KeyVaultName'), variables('LogAnalyticsWorkspaceKey'))]"
],
"properties": {
"FUNCTIONS_EXTENSION_VERSION": "~3",
"FUNCTIONS_WORKER_RUNTIME": "powershell",
"APPINSIGHTS_INSTRUMENTATIONKEY": "[reference(resourceId('Microsoft.insights/components', variables('FunctionName')), '2015-05-01').InstrumentationKey]",
"APPLICATIONINSIGHTS_CONNECTION_STRING": "[reference(resourceId('microsoft.insights/components', variables('FunctionName')), '2015-05-01').ConnectionString]",
"AzureWebJobsStorage": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(variables('FunctionName')),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(variables('FunctionName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=core.usgovcloudapi.net')]",
"WEBSITE_CONTENTAZUREFILECONNECTIONSTRING": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(variables('FunctionName')),';AccountKey=', listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(variables('FunctionName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=core.usgovcloudapi.net')]",
"WEBSITE_CONTENTSHARE": "[toLower(variables('FunctionName'))]",
"PersonalAccessToken": "[concat('@Microsoft.KeyVault(SecretUri=', reference(variables('GitAPIToken')).secretUriWithVersion, ')')]",
"TMPDIR": "D:\\local\\Temp",
"WorkspaceId": "[parameters('WorkspaceId')]",
"WorkspaceKey": "[concat('@Microsoft.KeyVault(SecretUri=', reference(variables('LogAnalyticsWorkspaceKey')).secretUriWithVersion, ')')]",
"Schedule": "[parameters('FunctionSchedule')]",
"AZURE_TENANT": ".us",
"GitHubAuditLogsTableName": "GitHubAuditLogs",
"GitHubRepoLogsTableName": "GitHubRepoLogs",
"WEBSITE_RUN_FROM_PACKAGE": "https://aka.ms/githubazurefunctionzip"
}
}
]
},
{
"type": "Microsoft.KeyVault/vaults",
"apiVersion": "2016-10-01",
"name": "[variables('KeyVaultName')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Web/sites', variables('FunctionName'))]"
],
"properties": {
"sku": {
"family": "A",
"name": "Standard"
},
"tenantId": "[subscription().tenantId]",
"accessPolicies": [
{
"tenantId": "[subscription().tenantId]",
"objectId": "[reference(resourceId('Microsoft.Web/sites', variables('FunctionName')),'2019-08-01', 'full').identity.principalId]",
"permissions": {
"secrets": [ "get",
"list"
]
}
}
],
"enabledForDeployment": false,
"enabledForDiskEncryption": false,
"enabledForTemplateDeployment": true,
"enableSoftDelete": true
},
"resources": [
{
"type": "secrets",
"apiVersion": "2016-10-01",
"name": "[variables('GitAPIToken')]",
"dependsOn": [
"[resourceId('Microsoft.KeyVault/vaults/', variables('KeyVaultName'))]"
],
"properties": {
"value": "[parameters('PersonalAccessToken')]",
"contentType": "string",
"attributes": {
"enabled": true
}
}
},
{
"type": "secrets",
"apiVersion": "2016-10-01",
"name": "[variables('LogAnalyticsWorkspaceKey')]",
"dependsOn": [
"[resourceId('Microsoft.KeyVault/vaults/', variables('KeyVaultName'))]"
],
"properties": {
"value": "[parameters('WorkspaceKey')]",
"contentType": "string",
"attributes": {
"enabled": true
}
}
}
]
},
{
"type": "Microsoft.Web/sites/hostNameBindings",
"apiVersion": "2018-11-01",
"name": "[concat(variables('FunctionName'), '/', variables('FunctionName'), '.azurewebsites.us')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Web/sites', variables('FunctionName'))]"
],
"properties": {
"siteName": "[variables('FunctionName')]",
"hostNameType": "Verified"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/azure-webjobs-hosts')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"publicAccess": "None"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/azure-webjobs-secrets')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"publicAccess": "None"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), concat('/default/', variables('StorageContainerName')))]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"publicAccess": "None"
}
},
{
"type": "Microsoft.Storage/storageAccounts/fileServices/shares",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/', tolower(variables('FunctionName')))]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/fileServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"shareQuota": 5120
}
}
]
}

Просмотреть файл

@ -12,16 +12,30 @@ Following are the configuration steps to deploy Function App.
A GitHub API Token is required. See the documentation to learn more about the [GitHub Personal Access Token](https://github.com/settings/tokens/).
## Configuration Steps
1. Deploy the ARM template and fill in the parameters.
## Configuration Steps to Deploy Function App
1. Click on Deploy to Azure (For both Commercial & Azure GOV)
<a href="https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FDataConnectors%2FGithubFunction%2Fazuredeploy.json" target="_blank">
<img src="https://aka.ms/deploytoazurebutton"/>
</a>
2. Select the preferred **Subscription**, **Resource Group** and **Location**
**Note**
Best practice : Create new Resource Group while deploying - all the resources of your custom Data connector will reside in the newly created Resource
Group
3. Enter the following value in the ARM template deployment
```
"PersonalAccessToken": This is the GITHUB PAT
"Workspace Id": The Sentinel Log Analytics Workspace Id
"Workspace Key": The Sentinel Log Analytics Workspace Key
"Function Schedule": The `TimerTrigger` makes it incredibly easy to have your functions executed on a schedule
"PersonalAccessToken": This is the GITHUB PAT
"Workspace Id": The Sentinel Log Analytics Workspace Id
"Workspace Key": The Sentinel Log Analytics Workspace Key
"Function Schedule": The `TimerTrigger` makes it incredibly easy to have your functions executed on a schedule
```
2. There are two json files (ORGS.json and lastrun-Audit.json) in Function Dependencies folder
3. Edit the ORGS.json file and update "org": "sampleorg" and replace sample org with your org name.
## Post Deployment Steps
1. There are two json files (ORGS.json and lastrun-Audit.json) in Function Dependencies folder
2. Edit the ORGS.json file and update "org": "sampleorg" and replace sample org with your org name.
```
If you have single org
[
@ -44,7 +58,7 @@ A GitHub API Token is required. See the documentation to learn more about the [G
]
```
4. Edit lastrun-Audit.json and update "org": "sampleorg" and replace sample org with your org name
3. Edit lastrun-Audit.json and update "org": "sampleorg" and replace sample org with your org name
```
If you have single org
@ -73,16 +87,16 @@ A GitHub API Token is required. See the documentation to learn more about the [G
]
```
5. Upload the following files to the storage account "github-repo-logs" container from
4. Upload the following files to the storage account "github-repo-logs" container from
```
ORGS.json
lastrun-Audit.json
```
6. PersonalAccessToken and Workspace Key will be placed as "Secrets" in the Azure KeyVault `githubkv<<uniqueid>>` with only Azure Function access policy. If you want to see/update these secrets,
5. PersonalAccessToken and Workspace Key will be placed as "Secrets" in the Azure KeyVault `<<Function App Name>><<uniqueid>>` with only Azure Function access policy. If you want to see/update these secrets,
```
a. Go to Azure KeyVault "githubkv<<uniqueid>>"
a. Go to Azure KeyVault `<<Function App Name>><<uniqueid>>`
b. Click on "Access Policies" under Settings
c. Click on "Add Access Policy"
i. Configure from template : Secret Management
@ -93,7 +107,7 @@ A GitHub API Token is required. See the documentation to learn more about the [G
```
7. The `TimerTrigger` makes it incredibly easy to have your functions executed on a schedule. This sample demonstrates a simple use case of calling your function based on your schedule provided while deploying. If you want to change
6. The `TimerTrigger` makes it incredibly easy to have your functions executed on a schedule. This sample demonstrates a simple use case of calling your function based on your schedule provided while deploying. If you want to change
the schedule
```
a. Click on Function App "Configuration" under Settings
@ -102,50 +116,21 @@ A GitHub API Token is required. See the documentation to learn more about the [G
```
**Note: For a `TimerTrigger` to work, you provide a schedule in the form of a [cron expression](https://en.wikipedia.org/wiki/Cron#CRON_expression)(See the link for full details). A cron expression is a string with 6 separate expressions which represent a given schedule via patterns. The pattern we use to represent every 5 minutes is `0 */5 * * * *`. This, in plain text, means: "When seconds is equal to 0, minutes is divisible by 5, for any hour, day of the month, month, day of the week, or year".**
8. Once Azure Function App is deployed
7. Once Azure Function App is deployed
```
a. Go to `githublogs<<uniqueid>>`
a. Go to `<<Function App Name>><<uniqueid>>`
b. Click on "Advanced Tools" under Development Tools
c. Click on Go --> You will be redirected to Web App --> Check Temp folder path.
d. It can be either C:\local\Temp\ or D:\local\Temp\.
```
9. After finding Temp folder path
8. After finding Temp folder path
```
a. Go to `githublogs<<uniqueid>>`
a. Go to `<<Function App Name>><<uniqueid>>`
b. Click on "Configuration" under Settings
c. Click on "TMPDIR" under "Application Settings"
d. Update Drive (C//D) based on your findings from Step 9.
```
**Note: Make sure the value in "TMPDIR" doesnt have "\\" at the end.**
10. **[Previous Version (prior to 2/9/2021) deployed users only ]**. If you want to ingest GitHub Audit & Repo logs into New custom logs, follow the steps
```
a. Go to `githublogs<<uniqueid>>`
b. Click on "Configuration" under Settings
c. Click on "New Application Setting"
d. Name --> GitHubAuditLogsTableName.
e. Value --> <<Your preferred table name for GitHub Audit Logs, for example GitHubAuditLogs>>
f. Click on "Ok"
g. Click on "New Application Setting"
h. Name --> GitHubRepoLogsTableName.
i. Value --> <<Your preferred table name for GitHub Repo Logs, for example GitHubRepoLogs>>
j. Click on "Ok"
```
**Note**
If you don't create these new environment variable, then it will be ingested to default
Audit Logs --> GitHub_CL
Repo Logs --> GitHubRepoLogs_CL
11. **For Azure Gov customers only**, You will see additional environment variable "Azure Tenant" under "Configuration" --> "Application Settings" and its default value is ".us"
Currently this Function App supports "Azure Gov(.US)" tenants
Ex: https://portal.azure.us
Note: there are two parsers (here)[https://github.com/Azure/Azure-Sentinel/blob/master/Parsers/GitHub] to make the logs useful
## Deploy the Function App template
<a href="https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FDataConnectors%2FGithubFunction%2Fazurecomdeploy_dotcomtenants.json" target="_blank">
<img src="https://aka.ms/deploytoazurebutton"/>
</a>
<a href="https://portal.azure.us/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FDataConnectors%2FGithubFunction%2Fazuregovdeploy_dotustenants.json" target="_blank">
<img src="https://raw.githubusercontent.com/Azure/azure-quickstart-templates/master/1-CONTRIBUTION-GUIDE/images/deploytoazuregov.png"/>
</a>

Просмотреть файл

@ -129,5 +129,21 @@
"title": "5. Secure your machine ",
"description": "Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)"
}
]
],
"metadata": {
"id": "f8699c9c-536c-4d28-9049-d0c555dd8c8c",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Imperva"
},
"support": {
"name": "Imperva",
"link": "https://www.imperva.com/support/technical-support/",
"tier": "developer"
}
}
}

Просмотреть файл

@ -53,7 +53,7 @@
},
{
"provider": "Microsoft.OperationalInsights/workspaces/sharedKeys",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key).",
"providerDisplayName": "Keys",
"scope": "Workspace",
"requiredPermissions": {
@ -87,5 +87,21 @@
}
]
}
]
],
"metadata": {
"id": "4eb027bc-5a8e-4e7e-8dac-3aaba3e487b1",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "NXLog"
},
"support": {
"name": "NXLog",
"link": "https://nxlog.co/community-forum",
"tier": "developer"
}
}
}

Просмотреть файл

@ -57,7 +57,7 @@
},
{
"provider": "Microsoft.OperationalInsights/workspaces/sharedKeys",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key).",
"providerDisplayName": "Keys",
"scope": "Workspace",
"requiredPermissions": {
@ -91,5 +91,21 @@
}
]
}
]
],
"metadata": {
"id": "3969d734-ab64-44fe-ac9b-73d758e0e814",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "NXLog"
},
"support": {
"name": "NXLog",
"link": "https://nxlog.co/community-forum",
"tier": "developer"
}
}
}

Просмотреть файл

@ -131,7 +131,7 @@
},
{
"title": "",
"description": "**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following five (5) application settings individually, with their respective string values (case-sensitive): \n\t\tapiToken\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tlogAnalyticsUri (optional)\n - Use the following schema for the `uri` value: `https://<OktaDomain>/api/v1/logs?since=` Replace `<OktaDomain>` with your domain. [Click here](https://developer.okta.com/docs/reference/api-overview/#url-namespace) for further details on how to identify your Okta domain namespace. There is no need to add a time value to the URI, the Function App will dynamically append the inital start time of logs to UTC 0:00 for the current UTC date as time value to the URI in the proper format.\n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for delegated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://<CustomerId>.ods.opinsights.azure.us. \n4. Once all application settings have been entered, click **Save**."
"description": "**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following five (5) application settings individually, with their respective string values (case-sensitive): \n\t\tapiToken\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tlogAnalyticsUri (optional)\n - Use the following schema for the `uri` value: `https://<OktaDomain>/api/v1/logs?since=` Replace `<OktaDomain>` with your domain. [Click here](https://developer.okta.com/docs/reference/api-overview/#url-namespace) for further details on how to identify your Okta domain namespace. There is no need to add a time value to the URI, the Function App will dynamically append the inital start time of logs to UTC 0:00 for the current UTC date as time value to the URI in the proper format.\n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://<CustomerId>.ods.opinsights.azure.us. \n4. Once all application settings have been entered, click **Save**."
}
]
}

Просмотреть файл

@ -236,6 +236,21 @@
}
]
],
"metadata": {
"id": "81ae314e-2c7c-40d0-87fe-812ffda0b60c",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Onapsis"
},
"support": {
"name": "Onapsis",
"link": "https://onapsis.force.com/s/login/",
"tier": "developer"
}
}
}

Просмотреть файл

@ -53,7 +53,7 @@
},
{
"provider": "Microsoft.OperationalInsights/workspaces/sharedKeys",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key).",
"providerDisplayName": "Keys",
"scope": "Workspace",
"requiredPermissions": {
@ -87,5 +87,21 @@
}
]
}
]
],
"metadata": {
"id": "f664e101-f4af-4d74-809c-8fad6ee3c381",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Orca Security"
},
"support": {
"name": "Orca Security",
"link": "http://support.orca.security/",
"tier": "developer"
}
}
}

Просмотреть файл

@ -35,7 +35,8 @@
}
],
"availability": {
"status": 1
"status": 1,
"isPreview": true
},
"permissions": {
"resourceProvider": [
@ -113,5 +114,21 @@
"title": "4. Secure your machine ",
"description": "Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)"
}
]
],
"metadata": {
"id": "ef80260c-3aec-43bc-a1e5-c2f2372c9adc",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Palo Alto Networks"
},
"support": {
"name": "Palo Alto Networks",
"link": "https://www.paloaltonetworks.com/company/contact-support",
"tier": "developer"
}
}
}

Просмотреть файл

@ -99,5 +99,21 @@
}
]
}
]
],
"metadata": {
"id": "1d855d54-0f17-43b3-ad33-93a0ab7b6ce8",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Perimeter 81"
},
"support": {
"name": "Perimeter 81",
"link": "https://support.perimeter81.com/",
"tier": "developer"
}
}
}

Просмотреть файл

@ -118,5 +118,21 @@
"title": "4. Secure your machine ",
"description": "Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)"
}
]
],
"metadata": {
"id": "bbe6d9ef-2581-41b8-95b0-9d50c919d377",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "SonicWall"
},
"support": {
"name": "SonicWall",
"link": "https://www.sonicwall.com/support/",
"tier": "developer"
}
}
}

Просмотреть файл

@ -53,7 +53,7 @@
},
{
"provider": "Microsoft.OperationalInsights/workspaces/sharedKeys",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key).",
"providerDisplayName": "Keys",
"scope": "Workspace",
"requiredPermissions": {
@ -99,5 +99,21 @@
"title": "4. Turn on the integration",
"description": "To turn on the integration, select Enable, and then click Save.\n"
}
]
],
"metadata": {
"id": "a3646b81-9e6a-4f4b-beb1-9d2eba8ab669",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Sophos"
},
"support": {
"name": "Sophos",
"link": "https://secure2.sophos.com/en-us/support.aspx",
"tier": "developer"
}
}
}

Просмотреть файл

@ -125,5 +125,21 @@
}
]
}
]
],
"metadata": {
"id": "5040166e-9344-4b4a-b260-8f2e3539ae45",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Squadra Technologies"
},
"support": {
"name": "Squadra Technologies",
"link": "https://www.squadratechnologies.com/Contact.aspx",
"tier": "developer"
}
}
}

Просмотреть файл

@ -110,5 +110,23 @@
"title": "4. Secure your machine ",
"description": "Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)"
}
]
],
"metadata": {
"id": "Unique Identifier (GUID) used to identify dependencies and content from solutions or community.",
"version": "This is an optional field. Default and recommended format for kind value as **community or solutions** is string eg. \"1.0.0\" aligning with solutions which makes it easier to manage the content. Whereas, for kind value as **sourceRepository** the recommended format is numeric (eg. 1, 1.0,1.0.0, etc) aligning to ARM template best practices.",
"kind": "dataConnector",
"source": {
"kind": "source type of the content. Value must be one of these : localWorkspace | community | solution | sourceRepository",
"name": "Name of the content source. The repo name, solution name, LA workspace name etc."
},
"author": {
"name": "Name of the author. For localWorkspace it is automatically the workspace user"
},
"support": {
"tier": "Type of support for content item: microsoft | developer | community",
"name": "Name of support contact or company",
"email": "Optional: Email of support contact",
"link":"Optional: Link for support help, like to support page to open a ticket etc"
}
}
}

Просмотреть файл

@ -125,5 +125,23 @@
"title": "3. Configure the Function App",
"description": "1. In the Function App screen, click the Function App name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following 'x (number of)' application settings individually, under Name, with their respective string values (case-sensitive) under Value: \n\t\tapiUsername\n\t\tapipassword\n\t\tapiToken\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tlogAnalyticsUri (optional)\n(add any other settings required by the Function App)\nSet the `uri` value to: `<add uri value>` \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Azure Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://<CustomerId>.ods.opinsights.azure.us. \n4. Once all application settings have been entered, click **Save**."
}
]
],
"metadata": {
"id": "Unique Identifier (GUID) used to identify dependencies and content from solutions or community.",
"version": "This is an optional field. Default and recommended format for kind value as **community or solutions** is string eg. \"1.0.0\" aligning with solutions which makes it easier to manage the content. Whereas, for kind value as **sourceRepository** the recommended format is numeric (eg. 1, 1.0,1.0.0, etc) aligning to ARM template best practices.",
"kind": "dataConnector",
"source": {
"kind": "source type of the content. Value must be one of these : localWorkspace | community | solution | sourceRepository",
"name": "Name of the content source. The repo name, solution name, LA workspace name etc."
},
"author": {
"name": "Name of the author. For localWorkspace it is automatically the workspace user"
},
"support": {
"tier": "Type of support for content item: microsoft | developer | community",
"name": "Name of support contact or company",
"email": "Optional: Email of support contact",
"link":"Optional: Link for support help, like to support page to open a ticket etc"
}
}
}

Просмотреть файл

@ -89,5 +89,23 @@
}
]
}
]
],
"metadata": {
"id": "Unique Identifier (GUID) used to identify dependencies and content from solutions or community.",
"version": "This is an optional field. Default and recommended format for kind value as **community or solutions** is string eg. \"1.0.0\" aligning with solutions which makes it easier to manage the content. Whereas, for kind value as **sourceRepository** the recommended format is numeric (eg. 1, 1.0,1.0.0, etc) aligning to ARM template best practices.",
"kind": "dataConnector",
"source": {
"kind": "source type of the content. Value must be one of these : localWorkspace | community | solution | sourceRepository",
"name": "Name of the content source. The repo name, solution name, LA workspace name etc."
},
"author": {
"name": "Name of the author. For localWorkspace it is automatically the workspace user"
},
"support": {
"tier": "Type of support for content item: microsoft | developer | community",
"name": "Name of support contact or company",
"email": "Optional: Email of support contact",
"link":"Optional: Link for support help, like to support page to open a ticket etc"
}
}
}

Просмотреть файл

@ -106,5 +106,23 @@
}
]
}
]
],
"metadata": {
"id": "Unique Identifier (GUID) used to identify dependencies and content from solutions or community.",
"version": "This is an optional field. Default and recommended format for kind value as **community or solutions** is string eg. \"1.0.0\" aligning with solutions which makes it easier to manage the content. Whereas, for kind value as **sourceRepository** the recommended format is numeric (eg. 1, 1.0,1.0.0, etc) aligning to ARM template best practices.",
"kind": "dataConnector",
"source": {
"kind": "source type of the content. Value must be one of these : localWorkspace | community | solution | sourceRepository",
"name": "Name of the content source. The repo name, solution name, LA workspace name etc."
},
"author": {
"name": "Name of the author. For localWorkspace it is automatically the workspace user"
},
"support": {
"tier": "Type of support for content item: microsoft | developer | community",
"name": "Name of support contact or company",
"email": "Optional: Email of support contact",
"link":"Optional: Link for support help, like to support page to open a ticket etc"
}
}
}

Просмотреть файл

@ -89,7 +89,8 @@ A data connector can have multiple data types and these can be represented by co
3. **permissions** – Represents the required permissions needed for the data connector to be enabled or connected. For e.g. write permissions to the workspace is needed for connector to be enabled, etc. These appear in the connector UX in the prerequisites section. This property value need **not** be updated and can remain as-is.
4. **instructionSteps** – These are the specific instructions to connect to the data connector.
* For CEF and Syslog, leverage the existing text as-is and add anything custom as needed.
* For REST API, either provide a link to your website/documentation that outlines the onboarding guidance to send data to Azure Sentinel **or** provide detailed guidance for customers to send data to Azure Sentinel.<p>
* For REST API, either provide a link to your website/documentation that outlines the onboarding guidance to send data to Azure Sentinel **or** provide detailed guidance for customers to send data to Azure Sentinel.
* If Connector is dependent on Kusto Function (Parser), **additionalRequirementBanner** and **instruction step** about Parser need to be added in Connector. <p>
# What is the format for redirection/Short links?
1. Redirection link for **Parser** - https://aka.ms/sentinel-[connectorid]-parser

Просмотреть файл

@ -120,5 +120,21 @@
"title": "4. Secure your machine ",
"description": "Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)"
}
]
],
"metadata": {
"id": "c4c9c58b-d659-49af-a11e-2d5d7bd8ccc8",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Thycotic"
},
"support": {
"name": "Thycotic",
"link": "https://thycotic.com/support/",
"tier": "developer"
}
}
}

Просмотреть файл

@ -130,5 +130,21 @@
"title": "Azure Resource Manager (ARM) Template Deployment",
"description": "This method provides an automated deployment of the Trend Micro XDR connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-trendmicroxdr-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter a unique **Function Name**, **Workspace ID**, **Workspace Key**, **API Token** and **Region Code**. \n - Note: Provide the appropriate region code based on where your Trend Micro XDR instance is deployed: us, eu, au, in, sg, jp \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy."
}
]
],
"metadata": {
"id": "61d3a450-20c0-4f0e-9209-b8cf41d9a774",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Trend Micro"
},
"support": {
"name": "Trend Micro",
"link": "https://success.trendmicro.com/technical-support",
"tier": "developer"
}
}
}

Просмотреть файл

@ -52,7 +52,8 @@
}
],
"availability": {
"status": 1
"status": 1,
"isPreview": true
},
"permissions": {
"resourceProvider": [
@ -130,5 +131,21 @@
"title": "4. Secure your machine ",
"description": "Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)"
}
]
],
"metadata": {
"id": "abf0937a-e5be-4587-a805-fd5dbcffd6cd",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Trend Micro"
},
"support": {
"name": "Trend Micro",
"link": "https://success.trendmicro.com/technical-support",
"tier": "developer"
}
}
}

Просмотреть файл

@ -126,5 +126,21 @@
"title": "4. Secure your machine ",
"description": "Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)"
}
]
],
"metadata": {
"id": "78cd5319-f6b0-4428-be45-5dea94c8ec83",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Trend Micro"
},
"support": {
"name": "Trend Micro",
"link": "https://success.trendmicro.com/technical-support",
"tier": "developer"
}
}
}

Просмотреть файл

@ -126,5 +126,21 @@
"title": "4. Secure your machine ",
"description": "Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)"
}
]
],
"metadata": {
"id": "4c0776c2-a5dc-419d-8cf7-81c2484448d2",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "WireX Systems"
},
"support": {
"name": "WireX Systems",
"email": "support@wirexsystems.com",
"tier": "developer"
}
}
}

Просмотреть файл

@ -61,15 +61,14 @@
},
{
"provider": "Microsoft.OperationalInsights/workspaces/sharedKeys",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key).",
"providerDisplayName": "Keys",
"scope": "Workspace",
"requiredPermissions": {
"action": true
}
}
],
"customs":""
]
},
"instructionSteps": [
{
@ -96,5 +95,21 @@
}
]
}
]
],
"metadata": {
"id": "26bcf619-26b2-44aa-a7ad-212da52deeb8",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Zimperium"
},
"support": {
"name": "Zimperium",
"link": "https://www.zimperium.com/support",
"tier": "developer"
}
}
}

Просмотреть файл

@ -103,5 +103,21 @@
}
]
}
]
],
"metadata" : {
"id": "ffaeb3c2-6c9a-4d55-8852-e13da1162ec6",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Alcide"
},
"support": {
"name": "Alcide",
"link": "https://www.alcide.io/company/contact-us/",
"tier": "developer"
}
}
}

Просмотреть файл

@ -114,5 +114,21 @@
"title": "4. Secure your machine ",
"description": "Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)"
}
]
],
"metadata": {
"id": "aa770f1e-4d05-477a-8dc1-b893772f3a46",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Illusive Networks"
},
"support": {
"name": "Illusive Networks",
"link": "https://www.illusivenetworks.com/technical-support/",
"tier": "developer"
}
}
}

Просмотреть файл

@ -0,0 +1,112 @@
if $programname == "filterlog" then
{
set $!rule = field($msg,44,1);
set $!subrule = field($msg,44,2);
set $!anchor = field($msg,44,3);
set $!trackerid = field($msg,44,4);
set $!destint = field($msg,44,5);
set $!reason = field($msg,44,6);
set $!vendoraction = field($msg,44,7);
set $!vendordirection = field($msg,44,8);
set $!ipversion = field($msg,44,9);
if $!ipversion == "4" then
{
set $!tos = field($msg,44,10);
set $!ecn = field($msg,44,11);
set $!ttl = field($msg,44,12);
set $!id = field($msg,44,13);
set $!offset = field($msg,44,14);
set $!flags = field($msg,44,15);
set $!transportid = field($msg,44,16);
set $!vendortransport = field($msg,44,17);
set $!bytes = field($msg,44,18);
set $!srcip = field($msg,44,19);
set $!dstip = field($msg,44,20);
if $!vendortransport == "tcp" then
{
set $!srcport = field($msg,44,21);
set $!dstport = field($msg,44,22);
set $!payloadbytes = field($msg,44,23);
set $!tcpflags = field($msg,44,24);
set $!seqnumber = field($msg,44,25);
set $!ack = field($msg,44,26);
set $!window = field($msg,44,27);
set $!urg = field($msg,44,28);
set $!options = field($msg,44,29);
}
if $!vendortransport == "udp" then
{
set $!srcport = field($msg,44,21);
set $!dstport = field($msg,44,22);
set $!payloadbytes = field($msg,44,23);
}
if $!vendortransport == "icmp" then
{
set $!icmpcode = field($msg,44,21);
if $!icmpcode == "request" then
{
set $!echoid = field($msg,44,22);
set $!echoseq = field($msg,44,23);
}
if $!icmpcode == "unreachport" then
{
set $!icmpdstip = field($msg,44,22);
set $!unreachableprotocolid = field($msg,44,23);
set $!unreachableprotocoldata = field($msg,44,24);
}
if $!icmpcode == "redirect" or $!icmpcode == "unreach" or $!icmpcode == "timexceed" then
{
set $!icmptext = field($msg,44,22);
}
}
if $!vendortransport == "carp" then
{
set $!carptype = field($msg,44,21);
set $!carpttl = field($msg,44,22);
set $!vhid = field($msg,44,23);
set $!version = field($msg,44,24);
set $!advbase = field($msg,44,25);
set $!advskew = field($msg,44,26);
}
if $!vendortransport == "igmp" then
{
set $!datalength = field($msg,44,21);
}
if $!vendortransport == "pim" then
{
set $!datalength = field($msg,44,21);
}
}
if $!ipversion == "6" then
{
set $!class = field($msg,44,10);
set $!flowlabel = field($msg,44,11);
set $!hoplimit = field($msg,44,12);
set $!vendortransport = field($msg,44,13);
set $!transportid = field($msg,44,14);
set $!bytes = field($msg,44,15);
set $!srcip = field($msg,44,16);
set $!dstip = field($msg,44,17);
if $!vendortransport == "TCP" then {
set $!srcport = field($msg,44,18);
set $!dstport = field($msg,44,19);
set $!payloadbytes = field($msg,44,20);
set $!tcpflags = field($msg,44,21);
set $!seqnumber = field($msg,44,22);
set $!ack = field($msg,44,23);
set $!window = field($msg,44,24);
set $!urg = field($msg,44,25);
set $!options = field($msg,44,26);
}
if $!vendortransport == "UDP" then {
set $!srcport = field($msg,44,18);
set $!dstport = field($msg,44,19);
set $!payloadbytes = field($msg,44,20);
}
}
$template pfsense_log, "<%pri%>%timereported:::date-rfc3339% %hostname% CEF: 0|NETGATE|pfsense||filterlog|%$!vendortransport%-%$!vendoraction%||cs1Label=rule cs1=%$!rule% cs2Label=subrule cs2=%$!subrule% anchor=%$!anchor% trackerid=%$!trackerid% deviceInboundInterface=%$!destint% reason=%$!reason% act=%$!vendoraction% deviceDirection=%$!vendordirection% ipversion=%$!ipversion% tos=%$!tos% ecn=%$!ecn% ttl=%$!ttl% id=%$!id% offset=%$!offset% flags=%$!flags% trasportid=%$!transportid% proto=%$!vendortransport% in=%$!bytes% src=%$!srcip% dst=%$!dstip% spt=%$!srcport% dpt=%$!dstport% payloadbytes=%$!payloadbytes% tcpflags=%$!tcpflags% seqnumber=%$!seqnumber% ack=%$!ack% window=%$!window% urg=%$!urg% options=%$!options% icmpcode=%$!icmpcode% echoid=%$!echoid% echoseq=%$!echoseq% icmpdstip=%$!icmpdstip% unreachableprotocolid=%$!unreachableprotocolid% unreachableprotocoldata=%$!unreachableprotocoldata% icmptext=%$!icmptext% carptype=%$!carptype% carpttl=%$!carpttl% vhid=%$!vhid% version=%$!version% advbase=%$!advbase% advskew=%$!advskew% datalength=%$!datalength% class=%$!class% flowlabel=%$!flowlabel% hoplimit=%$!hoplimit%\n"
#/var/log/pfsense.log;pfsense_log
@@127.0.0.1:25226;pfsense_log
stop
}

Просмотреть файл

@ -0,0 +1,22 @@
if $programname == "nginx" then
{
set $!dst = field($msg,32,1);
set $!user = field($msg,45,2);
set $!requesttime = field($msg,91,2);
set $!requesttime = field($!requesttime,93,1);
set $!url = field($msg,34,2);
set $!httpmethod = field($!url,32,1);
set $!httpprotocol = field($!url,32,3);
set $!url = field($!url,32,2);
set $!status = field($msg,34,3);
set $!status = field($!status,32,2);
set $!bytesout = field($msg,34,3);
set $!bytesout = field($!bytesout,32,3);
set $!httpreferrer = field($msg,34,4);
set $!httpuseragent = field($msg,34,6);
$template pfsensenginx_log, "<%pri%>%timereported:::date-rfc3339% %hostname% CEF: 0|NETGATE|pfsense||nginx|access||dst=%$!dst% user=%$!user% start=%$!requesttime% requestMethod=%$!httpmethod% request=%$!url% proto=%$!httpprotocol% status=%$!status% out=%$!bytesout% requestContext=%$!httpreferrer% requestClientApplication=%$!httpuseragent%"
#/var/log/pfsensenginx.log;pfsensenginx_log
@@127.0.0.1:25226;pfsensenginx_log
stop
}

Просмотреть файл

@ -0,0 +1,137 @@
{
"id": "pfsense",
"title": "pfsense",
"publisher": "Microsoft",
"logo": "pfsense_logo.svg",
"descriptionMarkdown": "The pfsense firewall connector allows you to easily connect your pfsense logs with Azure Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.",
"graphQueries": [
{
"metricName": "Total data received",
"legend": "pfsense",
"baseQuery": "\nCommonSecurityLog\n| where DeviceProduct == \"pfsense\"\n"
}
],
"sampleQueries": [
{
"description": "All logs",
"query": "\nCommonSecurityLog\n| where DeviceProduct == \"pfsense\"\n| sort by TimeGenerated"
},
{
"description": "Filterlog",
"query": "\nCommonSecurityLog\n| where DeviceProduct == \"pfsense\" and DeviceEventClassID == \"filterlog\"\n| sort by TimeGenerated"
},
{
"description": "nginx",
"query": "\nCommonSecurityLog\n| where DeviceProduct == \"pfsense\" and DeviceEventClassID == \"nginx\"\n| sort by TimeGenerated"
}
],
"connectivityCriterias": [
{
"type": "IsConnectedQuery",
"value": [
"\nCommonSecurityLog\n| where DeviceProduct == \"pfsense\"\n| summarize LastLogReceived = max(TimeGenerated)\n| project IsConnected = LastLogReceived > ago(30d)"
]
}
],
"dataTypes": [
{
"name": "CommonSecurityLog (pfsense)",
"lastDataReceivedQuery": "\nCommonSecurityLog\n| where DeviceProduct == \"pfsense\"\n| summarize Time = max(TimeGenerated)\n| where isnotempty(Time)"
}
],
"availability": {
"status": 1,
"isPreview": true
},
"permissions": {
"resourceProvider": [
{
"provider": "Microsoft.OperationalInsights/workspaces",
"permissionsDisplayText": "read and write permissions are required.",
"providerDisplayName": "Workspace",
"scope": "Workspace",
"requiredPermissions": {
"read": true,
"write": true,
"delete": true
}
},
{
"provider": "Microsoft.OperationalInsights/workspaces/sharedKeys",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key).",
"providerDisplayName": "Keys",
"scope": "Workspace",
"requiredPermissions": {
"action": true
}
}
]
},
"instructionSteps": [
{
"title": "1. Linux Syslog agent configuration",
"description": "Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Azure Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace",
"innerSteps": [
{
"title": "1.1 Select or create a Linux machine",
"description": "Select or create a Linux machine that Azure Sentinel will use as the proxy between your security solution and Azure Sentinel this machine can be on your on-prem environment, Azure or other clouds."
},
{
"title": "1.2 Install the CEF collector on the Linux machine",
"description": "Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Azure Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python –version.\n\n> 2. You must have elevated permissions (sudo) on your machine.",
"instructions": [
{
"parameters": {
"fillWith": [
"WorkspaceId",
"PrimaryKey"
],
"label": "Run the following command to install and apply the CEF collector:",
"value": "sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}"
},
"type": "CopyableLabel"
}
]
}
]
},
{
"title": "2. Download RSYSLOG Config files to CEF Collector",
"description": "Set rsyslog to parse and send CEF format message.",
"instructions": [
{
"parameters": {
"label": "Use the CLI to run the following commands:",
"value": "sudo wget -O /etc/rsyslog.d/51-pfsense-filterlog.conf https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/pfsense/51-pfsense-filterlog.conf\nsudo wget -O /etc/rsyslog.d/52-pfsense-nginx.conf https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/pfsense/52-pfsense-nginx.conf\nsystemctl restart rsyslog",
"rows": 8
},
"type": "CopyableLabel"
}
]
},
{
"title": "3. Forward pfsense logs to Syslog agent",
"description": "Set your pfsense to send Syslog messages in to the proxy machine. Make sure you to send the logs to port 514 TCP on the machines IP address.\n go to the [pfsense Document Library](https://docs.netgate.com/pfsense/en/latest/monitoring/logs/remote.html) for instructions."
},
{
"title": "3. Validate connection",
"description": "Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python –version\n\n>2. You must have elevated permissions (sudo) on your machine",
"instructions": [
{
"parameters": {
"fillWith": [
"WorkspaceId"
],
"label": "Run the following command to validate your connectivity:",
"value": "sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}"
},
"type": "CopyableLabel"
}
]
},
{
"title": "4. Secure your machine ",
"description": "Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)"
}
]
}

Просмотреть файл

@ -0,0 +1,14 @@
# pfSense Data Connecter
Author: Nicholas DiCola
This connector collects filterlog and nginx logs via RSYSLOG and parses them to CEF format so that they are ingested into Azure Sentinel in CommonEventFortmat.
## Instructions
1. Install the CEF collection agent from the Azure Sentinel Data connectors blade.
2. Download the .conf files to /etc/rsyslog.d/ using the following commands:
sudo wget -O /etc/rsyslog.d/51-pfsense-filterlog.conf https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/pfsense/51-pfsense-filterlog.conf
sudo wget -O /etc/rsyslog.d/52-pfsense-nginx.conf https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/pfsense/52-pfsense-nginx.conf
3. Restart rsyslog using the following command
systemctl restart rsyslog
There are parsers located [here](https://github.com/Azure/Azure-Sentinel/tree/master/Parsers/pfsense)

Просмотреть файл

@ -125,5 +125,21 @@
}
]
}
]
],
"metadata": {
"id": "afbf6c4a-7190-442a-a649-5c18a907ceb3",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Barracuda Networks"
},
"support": {
"name": "Barracuda Networks",
"link": "https://www.barracuda.com/support",
"tier": "developer"
}
}
}

Просмотреть файл

@ -14,7 +14,6 @@ triggerThreshold: 0
tactics:
- CommandAndControl
- Exfiltration
- ResourceDevelopment
relevantTechniques:
- T1584
- T1071
@ -89,4 +88,4 @@ entityMappings:
- identifier: Algorithm
columnName: MD5
- identifier: Value
columnName: FileHashCustomEntity
columnName: FileHashCustomEntity

Просмотреть файл

@ -0,0 +1,142 @@
id: d804b39c-03a4-417c-a949-bdbf21fa3305
name: Exchange Server Vulnerabilities Disclosed March 2021 IoC Match
description: |
'This detection look for IoCs shared by Microsoft relating to attacks exploiting the Exchange Server vulnerabilities disclosed in March 2021. It looks for SHA256 file hashes, IP addresses and file paths in a number of data sources. This query can also be customized with additional data sources that may include these elements.
Ref: https://msrc-blog.microsoft.com/2021/03/02/multiple-security-updates-released-for-exchange-server/'
severity: Medium
requiredDataConnectors:
- connectorId: AzureMonitor(IIS)
dataTypes:
- W3CIISLog
- connectorId: AzureMonitor(WireData)
dataTypes:
- WireData
- connectorId: CheckPoint
dataTypes:
- CommonSecurityLog (CheckPoint)
- connectorId: CiscoASA
dataTypes:
- CommonSecurityLog (Cisco)
- connectorId: CEF
dataTypes:
- CommonSecurityLog
- connectorId: F5
dataTypes:
- CommonSecurityLog (F5)
- connectorId: Fortinet
dataTypes:
- CommonSecurityLog (Fortinet)
- connectorId: PaloAltoNetworks
dataTypes:
- CommonSecurityLog (PaloAlto)
- connectorId: SecurityEvents
dataTypes:
- SecurityEvents
- connectorId: WindowsFirewall
dataTypes:
- WindowsFirewall
queryFrequency: 1h
queryPeriod: 1h
triggerOperator: gt
triggerThreshold: 0
tactics:
- InitialAccess
relevantTechniques:
- T1190
query: |
let iocs = externaldata(DateAdded:string,FirstSeen:string,IoC:string,Type:string,TLP:string)
[@"https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Sample%20Data/Feeds/MSTICIoCs-ExchangeServerVulnerabilitiesDisclosedMarch2021.csv"] with (format="csv", ignoreFirstRecord=True);
let file_paths = (iocs | where Type =~ "filepath");
let sha256s = (iocs | where Type =~ "sha256");
let ips = (iocs | where Type =~ "ip");
union isfuzzy=true
(SecurityEvent
| where EventID == 4663
| where ObjectName in (file_paths)
| extend timestamp = TimeGenerated, AccountCustomEntity = Account, HostCustomEntity = Computer
),
(DeviceFileEvents
| where FolderPath in (file_paths)
| extend timestamp = TimeGenerated, AccountCustomEntity = InitiatingProcessAccountName, HostCustomEntity = DeviceName
),
(DeviceEvents
| where InitiatingProcessSHA256 in (sha256s)
| extend timestamp = TimeGenerated, AccountCustomEntity = InitiatingProcessAccountName, HostCustomEntity = DeviceName
),
(CommonSecurityLog
| where FileHash in (sha256s)
| extend timestamp = TimeGenerated
),
(Event
//This query uses sysmon data depending on table name used this may need updating
| where Source == "Microsoft-Windows-Sysmon"
| extend EvData = parse_xml(EventData)
| extend EventDetail = EvData.DataItem.EventData.Data
| extend Hashes = EventDetail.[16].["#text"]
| where isnotempty(Hashes)
| parse Hashes with * 'SHA256=' SHA256 ',' *
| where SHA256 in~ (sha256s)
| extend Type = strcat(Type, ": ", Source), Account = UserName, FileHash = Hashes
| extend timestamp = TimeGenerated, AccountCustomEntity = Account, HostCustomEntity = Computer
),
(CommonSecurityLog
| where isnotempty(SourceIP) or isnotempty(DestinationIP)
| where SourceIP in (ips) or DestinationIP in (ips) or Message has_any (ips)
| extend IPMatch = case(SourceIP in (ips), "SourceIP", DestinationIP in (ips), "DestinationIP", "Message")
| summarize StartTimeUtc = min(TimeGenerated), EndTimeUtc = max(TimeGenerated) by SourceIP, DestinationIP, DeviceProduct, DeviceAction, Message, Protocol, SourcePort, DestinationPort, DeviceAddress, DeviceName, IPMatch
| extend timestamp = StartTimeUtc, IPCustomEntity = case(IPMatch == "SourceIP", SourceIP, IPMatch == "DestinationIP", DestinationIP, "IP in Message Field")
),
(VMConnection
| where isnotempty(SourceIp) or isnotempty(DestinationIp)
| where SourceIp in (ips) or DestinationIp in (ips)
| extend IPMatch = case( SourceIp in (ips), "SourceIP", DestinationIp in (ips), "DestinationIP", "None")
| extend timestamp = TimeGenerated , IPCustomEntity = case(IPMatch == "SourceIP", SourceIp, IPMatch == "DestinationIP", DestinationIp, "None"), Host = Computer
),
(Event
| where Source == "Microsoft-Windows-Sysmon"
| where EventID == 3
| extend EvData = parse_xml(EventData)
| extend EventDetail = EvData.DataItem.EventData.Data
| extend SourceIP = EventDetail.[9].["#text"], DestinationIP = EventDetail.[14].["#text"]
| where SourceIP in (ips) or DestinationIP in (ips)
| extend IPMatch = case( SourceIP in (ips), "SourceIP", DestinationIP in (ips), "DestinationIP", "None")
| extend timestamp = TimeGenerated, AccountCustomEntity = UserName, HostCustomEntity = Computer , IPCustomEntity = case(IPMatch == "SourceIP", SourceIP, IPMatch == "DestinationIP", DestinationIP, "None")
),
(WireData
| where isnotempty(RemoteIP)
| where RemoteIP in (ips)
| extend timestamp = TimeGenerated, IPCustomEntity = RemoteIP, HostCustomEntity = Computer
),
(W3CIISLog
| where isnotempty(cIP)
| where cIP in (ips)
| extend timestamp = TimeGenerated, IPCustomEntity = cIP, HostCustomEntity = Computer, AccountCustomEntity = csUserName
),
(
DeviceNetworkEvents
| where isnotempty(RemoteIP)
| where RemoteIP in (ips)
| extend timestamp = TimeGenerated, IPCustomEntity = RemoteIP, HostCustomEntity = DeviceName
),
(
WindowsFirewall
| where SourceIP in (ips) or DestinationIP in (ips)
| extend IPMatch = case( SourceIP in (ips), "SourceIP", DestinationIP in (ips), "DestinationIP", "None")
)
entityMappings:
- entityType: IP
fieldMappings:
- identifier: Address
columnName: IPCustomEntity
- entityType: Account
fieldMappings:
- identifier: FullName
columnName: AccountCustomEntity
- entityType: Host
fieldMappings:
- identifier: FullName
columnName: HostCustomEntity
- entityType: IP
fieldMappings:
- identifier: Address
columnName: IPCustomEntity

Просмотреть файл

@ -0,0 +1,97 @@
id: 0bd65651-1404-438b-8f63-eecddcec87b4
name: Gain Code Execution on ADFS Server via Remote WMI Execution
description: |
'This query detects instances where an attacker has gained the ability to execute code on an ADFS Server through remote WMI Execution.
In order to use this query you need to be collecting Sysmon EventIDs 19, 20, and 21.
If you do not have Sysmon data in your workspace this query will raise an error stating:
Failed to resolve scalar expression named "[@Name]"
For more on how WMI was used in Solorigate see https://www.microsoft.com/security/blog/2021/01/20/deep-dive-into-the-solorigate-second-stage-activation-from-sunburst-to-teardrop-and-raindrop/.
The query contains some features from the following detections to look for potentially malicious ADFS activity. See them for more details.
- ADFS Key Export (Sysmon): https://github.com/Azure/Azure-Sentinel/blob/master/Detections/SecurityEvent/ADFSKeyExportSysmon.yaml
- ADFS DKM Master Key Export: https://github.com/Azure/Azure-Sentinel/blob/master/Detections/MultipleDataSources/ADFS-DKM-MasterKey-Export.yaml'
severity: Medium
requiredDataConnectors:
- connectorId: SecurityEvents
dataTypes:
- SecurityEvent
queryFrequency: 1d
queryPeriod: 7d
triggerOperator: gt
triggerThreshold: 0
tactics:
- LateralMovement
relevantTechniques:
- T1210
tags:
- Solorigate
- NOBELIUM
query: |
let timeframe = 1d;
// Adjust for a longer timeframe for identifying ADFS Servers
let lookback = 6d;
// Identify ADFS Servers
let ADFS_Servers = (
Event
| where TimeGenerated > ago(timeframe+lookback)
| where Source == "Microsoft-Windows-Sysmon"
| where EventID == 1
| extend EventData = parse_xml(EventData).DataItem.EventData.Data
| mv-expand bagexpansion=array EventData
| evaluate bag_unpack(EventData)
| extend Key=tostring(['@Name']), Value=['#text']
| evaluate pivot(Key, any(Value), TimeGenerated, Source, EventLog, Computer, EventLevel, EventLevelName, UserName, RenderedDescription, MG, ManagementGroupName, Type, _ResourceId)
| extend process = split(Image, '\\', -1)[-1]
| where process =~ "Microsoft.IdentityServer.ServiceHost.exe"
| distinct Computer
| union isfuzzy=true (
SecurityEvent
| where TimeGenerated > ago(timeframe+lookback)
| where EventID == 4688 and SubjectLogonId != "0x3e4"
| where ProcessName has "Microsoft.IdentityServer.ServiceHost.exe"
| distinct Computer
)
| distinct Computer);
(union isfuzzy=true
(
SecurityEvent
| where TimeGenerated > ago(timeframe)
| where Computer in~ (ADFS_Servers)
| where ParentProcessName has 'wmiprvse.exe'
// Looking for rundll32.exe is based on intel from the blog linked in the description
// This can be commented out or altered to filter out known internal uses
| where CommandLine has_any ('rundll32')
| project TimeGenerated, TargetAccount, CommandLine, Computer, Account, TargetLogonId
| extend timestamp = TimeGenerated, HostCustomEntity = Computer, AccountCustomEntity = Account
// Search for recent logons to identify lateral movement
| join kind= inner
(SecurityEvent
| where TimeGenerated > ago(timeframe)
| where EventID == 4624 and LogonType == 3
| where Account !endswith "$"
| project TargetLogonId
) on TargetLogonId
),
(
Event
| where TimeGenerated > ago(timeframe)
| where Source == "Microsoft-Windows-Sysmon"
// Check for WMI Events
| where Computer in~ (ADFS_Servers) and EventID in (19, 20, 21)
| extend EventData = parse_xml(EventData).DataItem.EventData.Data
| mv-expand bagexpansion=array EventData
| evaluate bag_unpack(EventData)
| extend Key=tostring(['@Name']), Value=['#text']
| evaluate pivot(Key, any(Value), TimeGenerated, Source, EventLog, Computer, EventLevel, EventLevelName, UserName, RenderedDescription, MG, ManagementGroupName, Type, _ResourceId)
| project TimeGenerated, EventType, Image, Computer, UserName
| extend timestamp = TimeGenerated, HostCustomEntity = Computer, AccountCustomEntity = UserName
)
)
entityMappings:
- entityType: Account
fieldMappings:
- identifier: FullName
columnName: AccountCustomEntity
- entityType: Host
fieldMappings:
- identifier: FullName
columnName: HostCustomEntity

Просмотреть файл

@ -1,5 +1,5 @@
id: 7d6d8a8e-b08a-4082-8dbb-d7fd2cbbc35e
name: HAFNIUM UM Service writing suspicious file.
name: HAFNIUM UM Service writing suspicious file
description: |
'This query looks for the Exchange server UM process writing suspicious files that may be indicative of webshells.
Reference: https://www.microsoft.com/security/blog/2021/03/02/hafnium-targeting-exchange-servers/'
@ -52,4 +52,4 @@ entityMappings:
- entityType: IP
fieldMappings:
- identifier: Address
columnName: IPCustomEntity
columnName: IPCustomEntity

Просмотреть файл

@ -17,6 +17,9 @@ requiredDataConnectors:
- connectorId: PaloAltoNetworks
dataTypes:
- CommonSecurityLog
- connectorId: AzureFirewall
dataTypes:
- AzureDiagnostics
queryFrequency: 1d
queryPeriod: 1d
triggerOperator: gt

Просмотреть файл

@ -2,7 +2,7 @@ id: bff093b2-500e-4ae5-bb49-a5b1423cbd5b
name: External user added and removed in short timeframe
description: |
'This detection flags the occurances of external user accounts that are added to a Team and then removed within
one hour.This data is a part of Office 365 Connector in Azure Sentinel.'
one hour.'
severity: Low
requiredDataConnectors:
- connectorId: Office365

Просмотреть файл

@ -1,28 +0,0 @@
id: 0625fcce-6d52-491e-8c68-1d9b801d25b9
name: HAFNIUM Suspicious UM Service Error.
description: |
'This query looks for errors that may indicate that an attacker is attempting to exploit a vulnerability in the service.
Reference: https://www.microsoft.com/security/blog/2021/03/02/hafnium-targeting-exchange-servers/'
severity: Low
requiredDataConnectors: []
queryFrequency: 1d
queryPeriod: 1d
triggerOperator: gt
triggerThreshold: 0
tactics:
- InitialAccess
relevantTechniques:
- T1190
query: |
Event
| where EventLog =~ "Application"
| where Source startswith "MSExchange"
| where EventLevelName =~ "error"
| where (RenderedDescription startswith "Watson report" and RenderedDescription contains "umworkerprocess" and RenderedDescription contains "TextFormattingRunProperties") or RenderedDescription startswith "An unhandled exception occurred in a UM worker process" or RenderedDescription startswith "The Microsoft Exchange Unified Messaging service"
| where RenderedDescription !contains "System.OutOfMemoryException"
| extend timestamp = TimeGenerated, HostCustomEntity = Computer
entityMappings:
- entityType: Host
fieldMappings:
- identifier: FullName
columnName: HostCustomEntity

Просмотреть файл

@ -1,5 +1,5 @@
id: 0625fcce-6d52-491e-8c68-1d9b801d25b9
name: HAFNIUM Suspicious UM Service Error.
name: HAFNIUM Suspicious UM Service Error
description: |
'This query looks for errors that may indicate that an attacker is attempting to exploit a vulnerability in the service.
Reference: https://www.microsoft.com/security/blog/2021/03/02/hafnium-targeting-exchange-servers/'
@ -18,11 +18,11 @@ query: |
| where EventLog =~ "Application"
| where Source startswith "MSExchange"
| where EventLevelName =~ "error"
| where (RenderedDescription startswith "Watson report" and RenderedDescription contains "umworkerprocess" and RenderedDescription contains "TextFormattingRunProperties") or RenderedDescription startswith "An unhandled exception occurred in a UM worker process" or RenderedDescription startswith "The Microsoft Exchange Unified Messaging service"
| where (RenderedDescription startswith "Watson report" and RenderedDescription contains "umworkerprocess" and RenderedDescription contains "TextFormattingRunProperties") or RenderedDescription startswith "An unhandled exception occurred in a UM worker process" or RenderedDescription startswith "The Microsoft Exchange Unified Messaging service" or RenderedDescription contains "MSExchange Unified Messaging"
| where RenderedDescription !contains "System.OutOfMemoryException"
| extend timestamp = TimeGenerated, HostCustomEntity = Computer
entityMappings:
- entityType: Host
fieldMappings:
- identifier: FullName
columnName: HostCustomEntity
columnName: HostCustomEntity

Просмотреть файл

@ -1,44 +0,0 @@
id: 23005e87-2d3a-482b-b03d-edbebd1ae151
name: HAFNIUM Suspicious Exchange Request
description: |
'This query looks for suspicious request patterns to Exchange servers that fit a pattern observed by HAFNIUM actors.
The same query can be run on HTTPProxy logs from on-premise hosted Exchange servers.
Reference: https://www.microsoft.com/security/blog/2021/03/02/hafnium-targeting-exchange-servers/'
severity: Medium
requiredDataConnectors:
- connectorId: AzureMonitor(IIS)
dataTypes:
- W3CIISLog
queryFrequency: 1d
queryPeriod: 14d
triggerOperator: gt
triggerThreshold: 0
tactics:
- InitialAccess
relevantTechniques:
- T1190
query: |
let exchange_servers = (
W3CIISLog
| where TimeGenerated > ago(14d)
| where sSiteName =~ "Exchange Back End"
| summarize by Computer);
W3CIISLog
| where TimeGenerated > ago(1d)
| where Computer in (exchange_servers)
| where csUriQuery startswith "t="
| project-reorder TimeGenerated, Computer, csUriStem, csUriQuery, csUserName, csUserAgent, cIP
| extend timestamp = TimeGenerated, AccountCustomEntity = csUserName, HostCustomEntity = Computer, IPCustomEntity = cIP
entityMappings:
- entityType: Account
fieldMappings:
- identifier: FullName
columnName: AccountCustomEntity
- entityType: Host
fieldMappings:
- identifier: FullName
columnName: HostCustomEntity
- entityType: IP
fieldMappings:
- identifier: Address
columnName: IPCustomEntity

Просмотреть файл

@ -17,8 +17,8 @@ relevantTechniques:
query: |
let scriptExtensions = dynamic([".php", ".jsp", ".js", ".aspx", ".asmx", ".asax", ".cfm", ".shtml"]);
http_proxy_oab_CL
| where Message contains "Download failed and temporary file"
| extend File = extract("([^\\\\]*)(\\\\[^']*)",2,Message)
| where RawData contains "Download failed and temporary file"
| extend File = extract("([^\\\\]*)(\\\\[^']*)",2,RawData)
| extend Extension = strcat(".",split(File, ".")[-1])
| extend InteractiveFile = iif(Extension in (scriptExtensions), "Yes", "No")
// Uncomment the following line to alert only on interactive file download type

Просмотреть файл

@ -39,10 +39,16 @@ query: |
| mv-expand parse_json(Folders)
| extend folders = tostring(Folders.Path)
| extend ClientIP = iif(Client_IPAddress startswith "[", extract("\\[([^\\]]*)", 1, Client_IPAddress), Client_IPAddress)
| summarize make_set(folders), make_set(ClientInfoString), make_set(ClientIP), make_set(MailboxGuid), make_set(MailboxOwnerUPN) by UserId
| summarize StartTime=max(TimeGenerated), EndTime=min(TimeGenerated), make_set(folders), make_set(ClientInfoString), make_set(ClientIP), make_set(MailboxGuid), make_set(MailboxOwnerUPN) by UserId
| extend folder_count = array_length(set_folders)
| extend user_count = array_length(set_MailboxGuid)
| where user_count > user_threshold or folder_count > folder_threshold
| extend Reason = case(user_count > user_threshold and folder_count > folder_threshold, "Both User and Folder Threshold Exceeded", folder_count > folder_threshold and user_count < user_threshold, "Folder Count Threshold Exceeded","User Threshold Exceeded")
| sort by user_count desc
| project-reorder UserId, user_count, folder_count, set_MailboxOwnerUPN, set_ClientIP, set_ClientInfoString, set_folders
| project-reorder UserId, user_count, folder_count, set_MailboxOwnerUPN, set_ClientIP, set_ClientInfoString, set_folders
| extend timestamp = StartTime, AccountCustomEntity = UserId
entityMappings:
- entityType: Account
fieldMappings:
- identifier: FullName
columnName: AccountCustomEntity

Просмотреть файл

@ -1,5 +1,5 @@
id: e9cef368-6257-4dab-9a48-4793729c46a2
name: External user added and removed in short timeframe
id: 119d9e1c-afcc-4d23-b239-cdb4e7bf851c
name: External user added and removed in a short timeframe - Hunt Version
description: |
'This hunting query identifies external user accounts that are added to a Team and then removed within one hour.'
requiredDataConnectors:

Просмотреть файл

@ -1,5 +1,5 @@
id: 6fce5baf-bfc2-4c56-a6b7-9c4733fc5a45
name: External user from a new organisation added
name: External user from a new organisation added to Teams
description: |
'This query identifies external users added to Teams where the user's domain is not one previously seen in Teams data.'
requiredDataConnectors:
@ -15,19 +15,19 @@ query: |
// If you have more than 14 days worth of Teams data change this value
let data_date = 14d;
// If you want to look at users further back than the last day change this value
let lookback_data = 1d;
let lookback_date = 1d;
let known_orgs = (
OfficeActivity
OfficeActivity
| where TimeGenerated > ago(data_date)
| where OfficeWorkload =~ "MicrosoftTeams"
| where Operation =~ "MemberAdded" or Operation =~ "TeamsSessionStarted"
// Extract the correct UPN and parse our external organization domain
| extend UPN = iif(Operation == "MemberAdded", tostring(parse_json(Members)[0].UPN), UserId)
| extend UPN = iif(Operation == "MemberAdded", tostring(Members[0].UPN), UserId)
| extend Organization = tostring(split(split(UPN, "_")[1], "#")[0])
| where isnotempty(Organization)
| summarize by Organization);
OfficeActivity
| where TimeGenerated > ago(lookback_data)
OfficeActivity
| where TimeGenerated > ago(lookback_date)
| where OfficeWorkload =~ "MicrosoftTeams"
| where Operation =~ "MemberAdded"
| extend UPN = tostring(parse_json(Members)[0].UPN)

Просмотреть файл

@ -6,7 +6,7 @@ description: |
requiredDataConnectors:
- connectorId: Office365
dataTypes:
- OfficeActivity
- OfficeActivity (Exchange)
tactics:
- Collection
- Exfiltration
@ -44,4 +44,13 @@ query: |
)
| extend ClientIP = ClientIPOnly
| project TimeGenerated, RedirectTo, ClientIP, Port, UserId, Operation, RuleName
| extend timestamp = TimeGenerated, AccountCustomEntity = UserId, IPCustomEntity = ClientIP
| extend timestamp = TimeGenerated, AccountCustomEntity = UserId, IPCustomEntity = ClientIP
entityMappings:
- entityType: Account
fieldMappings:
- identifier: FullName
columnName: AccountCustomEntity
- entityType: IP
fieldMappings:
- identifier: Address
columnName: IPCustomEntity

Просмотреть файл

@ -1,12 +1,11 @@
id: 9eb64924-ec8d-44d0-b1f2-10665150fb74
name: Bots added to multiple teams
description: |
'This hunting query helps identify bots added to multiple Teams in a short space of time.
This query is works with the built-in Teams data connector only.'
'This hunting query helps identify bots added to multiple Teams in a short space of time.'
requiredDataConnectors:
- connectorId: Office365
dataTypes:
- OfficeActivity
- OfficeActivity (Teams)
tactics:
- Persistence
- Collection

Просмотреть файл

@ -1,12 +1,11 @@
id: 64990414-b015-4edf-bef0-343b741e68c5
name: Multiple Teams deleted by a single user
description: |
'This hunting query identifies where multiple Teams have been deleted by a single user in a short timeframe.
This query is works with the built-in Teams data connector only.'
'This hunting query identifies where multiple Teams have been deleted by a single user in a short timeframe.'
requiredDataConnectors:
- connectorId: Office365
dataTypes:
- OfficeActivity
- OfficeActivity (Teams)
tactics:
- Impact
relevantTechniques:
@ -26,7 +25,7 @@ query: |
| summarize count() by UserId
| where count_ > max_delete
| project UserId);
OfficeActivity
OfficeActivity
| where TimeGenerated > ago(time_window)
| where OfficeWorkload =~ "MicrosoftTeams"
| where Operation =~ "TeamDeleted"

Просмотреть файл

@ -45,8 +45,17 @@ query: |
| extend UserIdUserFolderFormat = tolower(replace('@|\\.', '_',UserId))
// identify when UserId is not a match to the specific site url personal folder reference
| extend UserIdDiffThanUserFolder = iff(Site_Url has '/personal/' and SiteUrlUserFolder != UserIdUserFolderFormat, true , false )
| summarize TimeGenerated = make_list(TimeGenerated), StartTimeUtc = min(TimeGenerated), EndTimeUtc = max(TimeGenerated), Operations = make_list(Operation), UserAgents = make_list(UserAgent),
| summarize TimeGenerated = make_list(TimeGenerated), StartTime = min(TimeGenerated), EndTime = max(TimeGenerated), Operations = make_list(Operation), UserAgents = make_list(UserAgent),
OfficeIds = make_list(OfficeId), SourceRelativeUrls = make_list(SourceRelativeUrl), FileNames = make_list(SourceFileName)
by OfficeWorkload, RecordType, UserType, UserKey, UserId, ClientIP, Site_Url, SourceFileExtension, SiteUrlUserFolder, UserIdUserFolderFormat, UserIdDiffThanUserFolder
// Use mvexpand on any list items and you can expand out the exact time and other metadata about the hit
| extend timestamp = StartTime, AccountCustomEntity = UserId, IPCustomEntity = ClientIP
entityMappings:
- entityType: Account
fieldMappings:
- identifier: FullName
columnName: AccountCustomEntity
- entityType: IP
fieldMappings:
- identifier: Address
columnName: IPCustomEntity

Просмотреть файл

@ -7,7 +7,7 @@ description: |
requiredDataConnectors:
- connectorId: Office365
dataTypes:
- OfficeActivity
- OfficeActivity (Exchange)
tactics:
- Collection
- Exfiltration
@ -37,4 +37,17 @@ query: |
ClientIP
)
| project TimeGenerated, UserId, UserDomain, subDomain, Operation, ForwardedtoDomain, ClientIPAddress, Result, Port, OriginatingServer, OfficeObjectId, fwdingDestination
| extend timestamp = TimeGenerated, AccountCustomEntity = UserId, IPCustomEntity = ClientIPAddress, HostCustomEntity = OriginatingServer
| extend timestamp = TimeGenerated, AccountCustomEntity = UserId, IPCustomEntity = ClientIPAddress, HostCustomEntity = OriginatingServer
entityMappings:
- entityType: Account
fieldMappings:
- identifier: FullName
columnName: AccountCustomEntity
- entityType: IP
fieldMappings:
- identifier: Address
columnName: IPCustomEntity
- entityType: Host
fieldMappings:
- identifier: FullName
columnName: HostCustomEntity

Просмотреть файл

@ -1,39 +0,0 @@
id: 92f4cf90-85a3-4901-b2f1-13bd19e03f45
name: External user added and removed in short timeframe
description: |
'This hunting query identifies external user accounts that are added to a Team and then removed within
one hour.
This query is works with the built-in Teams data connector only.'
requiredDataConnectors:
- connectorId: Office365
dataTypes:
- OfficeActivity
tactics:
- Persistence
relevantTechniques:
- T1136
query: |
// If you want to look at user added further than 7 days ago adjust this value
let time_ago = 7d;
// If you want to change the timeframe of how quickly accounts need to be added and removed change this value
let time_delta = 1h;
OfficeActivity
| where TimeGenerated > ago(time_ago)
| where OfficeWorkload =~ "MicrosoftTeams"
| where Operation =~ "MemberAdded"
| extend UPN = tostring(Members[0].UPN)
| where UPN contains ("#EXT#")
| project TimeAdded=TimeGenerated, Operation, UPN, UserWhoAdded = UserId, TeamName, TeamGuid
| join (
OfficeActivity
| where TimeGenerated > ago(time_ago)
| where OfficeWorkload =~ "MicrosoftTeams"
| where Operation =~ "MemberRemoved"
| extend UPN = tostring(Members[0].UPN)
| where UPN contains ("#EXT#")
| project TimeDeleted=TimeGenerated, Operation, UPN, UserWhoDeleted = UserId, TeamName, TeamGuid) on UPN, TeamGuid
| where TimeDeleted < (TimeAdded + time_delta)
| project TimeAdded, TimeDeleted, UPN, UserWhoAdded, UserWhoDeleted, TeamName, TeamGuid
// Uncomment the following line to map query entities is you plan to use this as a detection query
//| extend timestamp = TimeAdded, AccountCustomEntity = UPN

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше