Merge branch 'master' into cyberpion-sl-connector

This commit is contained in:
Yotam Rosenmann 2020-12-24 12:34:23 +02:00
Родитель 8d0a4e8ad1 2446105e54
Коммит 29922b4c16
196 изменённых файлов: 73376 добавлений и 824 удалений

14
.github/workflows/IssueComment.yml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,14 @@
name: IssueComment
on: [issues]
jobs:
commenting:
runs-on: ubuntu-latest
steps:
- uses: actions/github-script@0.3.0
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const { issue: { number: issue_number }, repo: { owner, repo } } = context;
github.issues.createComment({ issue_number, owner, repo, body: 'Thank you for submitting an Issue to the Azure Sentinel GitHub repo! You should expect an initial response to your Issue from the team within 5 business days. Note that this response may be delayed during holiday periods. For urgent, production-affecting issues please raise a support ticket via the Azure Portal.' });

19
.github/workflows/ValidationsOnPrivateBranches.yaml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,19 @@
name: Excecute validations on private branches
# Controls when the action will run.
on:
# Triggers the workflow on push or pull request events but only for the master branch
pull_request:
branches: [feature/* ]
jobs:
build:
name: Call Azure Pipeline
runs-on: ubuntu-latest
steps:
- name: Azure Pipelines Action
uses: Azure/pipelines@v1
with:
azure-devops-project-url: https://dev.azure.com/azure/Azure-Sentinel
azure-pipeline-name: 'Azure.Azure-Sentinel'
azure-devops-token: ${{ secrets.AZURE_DEVOPS_TOKEN }}

Просмотреть файл

@ -0,0 +1,87 @@
{
"Name": "DeviceInfo",
"Properties": [
{
"Name": "TenantId",
"Type": "string"
},
{
"Name": "AdditionalFields",
"Type": "dynamic"
},
{
"Name": "ClientVersion",
"Type": "string"
},
{
"Name": "DeviceId",
"Type": "string"
},
{
"Name": "DeviceName",
"Type": "string"
},
{
"Name": "DeviceObjectId",
"Type": "string"
},
{
"Name": "IsAzureADJoined",
"Type": "bool"
},
{
"Name": "LoggedOnUsers",
"Type": "dynamic"
},
{
"Name": "MachineGroup",
"Type": "string"
},
{
"Name": "OSArchitecture",
"Type": "string"
},
{
"Name": "OSBuild",
"Type": "long"
},
{
"Name": "OSPlatform",
"Type": "string"
},
{
"Name": "OSVersion",
"Type": "string"
},
{
"Name": "PublicIP",
"Type": "string"
},
{
"Name": "RegistryDeviceTag",
"Type": "string"
},
{
"Name": "ReportId",
"Type": "long"
},
{
"Name": "TimeGenerated",
"Type": "datetime"
},
{
"Name": "Timestamp",
"Type": "datetime"
},
{
"Name": "SourceSystem",
"Type": "string"
},
{
"Name": "Type",
"Type": "string"
}
]
}

Просмотреть файл

@ -1,157 +0,0 @@
{
"Name": "DeviceNetworkEvents",
"Properties": [
{
"Name": "TenantId",
"Type": "String"
},
{
"Name": "ActionType",
"Type": "String"
},
{
"Name": "AdditionalFields",
"Type": "Dynamic"
},
{
"Name": "AppGuardContainerId",
"Type": "String"
},
{
"Name": "DeviceId",
"Type": "String"
},
{
"Name": "DeviceName",
"Type": "String"
},
{
"Name": "InitiatingProcessAccountDomain",
"Type": "String"
},
{
"Name": "InitiatingProcessAccountName",
"Type": "String"
},
{
"Name": "InitiatingProcessAccountObjectId",
"Type": "String"
},
{
"Name": "InitiatingProcessAccountSid",
"Type": "String"
},
{
"Name": "InitiatingProcessAccountUpn",
"Type": "String"
},
{
"Name": "InitiatingProcessCommandLine",
"Type": "String"
},
{
"Name": "InitiatingProcessCreationTime",
"Type": "Datetime"
},
{
"Name": "InitiatingProcessFileName",
"Type": "String"
},
{
"Name": "InitiatingProcessFolderPath",
"Type": "String"
},
{
"Name": "InitiatingProcessId",
"Type": "Long"
},
{
"Name": "InitiatingProcessIntegrityLevel",
"Type": "String"
},
{
"Name": "InitiatingProcessMD5",
"Type": "String"
},
{
"Name": "InitiatingProcessParentCreationTime",
"Type": "Datetime"
},
{
"Name": "InitiatingProcessParentFileName",
"Type": "String"
},
{
"Name": "InitiatingProcessParentId",
"Type": "Long"
},
{
"Name": "InitiatingProcessSHA1",
"Type": "String"
},
{
"Name": "InitiatingProcessSHA256",
"Type": "String"
},
{
"Name": "InitiatingProcessTokenElevation",
"Type": "String"
},
{
"Name": "LocalIP",
"Type": "String"
},
{
"Name": "LocalIPType",
"Type": "String"
},
{
"Name": "LocalPort",
"Type": "Int"
},
{
"Name": "MachineGroup",
"Type": "String"
},
{
"Name": "Protocol",
"Type": "String"
},
{
"Name": "RemoteIP",
"Type": "String"
},
{
"Name": "RemoteIPType",
"Type": "String"
},
{
"Name": "RemotePort",
"Type": "Int"
},
{
"Name": "RemoteUrl",
"Type": "String"
},
{
"Name": "ReportId",
"Type": "Long"
},
{
"Name": "TimeGenerated",
"Type": "Datetime"
},
{
"Name": "Timestamp",
"Type": "Datetime"
},
{
"Name": "SourceSystem",
"Type": "String"
},
{
"Name": "Type",
"Type": "String"
}
]
}

Просмотреть файл

@ -0,0 +1,89 @@
{
"Name": "QualysKB_CL",
"Properties": [
{
"Name": "TimeGenerated",
"Type": "DateTime"
},
{
"Name": "QID_s",
"Type": "String"
},
{
"Name": "Title_s",
"Type": "String"
},
{
"Name": "Category_s",
"Type": "String"
},
{
"Name": "Consequence_s",
"Type": "String"
},
{
"Name": "Diagnosis_s",
"Type": "String"
},
{
"Name": "Last_Service_Modification_DateTime_s",
"Type": "String"
},
{
"Name": "CVE_ID_s",
"Type": "String"
},
{
"Name": "CVE_URL_s",
"Type": "String"
},
{
"Name": "Vendor_Reference_ID_s",
"Type": "String"
},
{
"Name": "Vendor_Reference_URL_s",
"Type": "String"
},
{
"Name": "PCI_Flag_s",
"Type": "String"
},
{
"Name": "Published_DateTime_s",
"Type": "String"
},
{
"Name": "Severity_Level_s",
"Type": "String"
},
{
"Name": "Software_Product_s",
"Type": "String"
},
{
"Name": "Software_Vendor_s",
"Type": "String"
},
{
"Name": "Solution_s",
"Type": "String"
},
{
"Name": "Vuln_Type_s",
"Type": "String"
},
{
"Name": "Discovery_Additional_Info_s",
"Type": "String"
},
{
"Name": "Discovery_Auth_Type_s",
"Type": "String"
},
{
"Name": "Discovery_Remote_s",
"Type": "String"
}
]
}

Просмотреть файл

@ -0,0 +1,81 @@
{
"Name": "afad_parser",
"Properties": [
{
"Name": "TimeGenerated",
"Type": "DateTime"
},
{
"Name": "Time",
"Type": "DateTime"
},
{
"Name": "ADObject",
"Type": "String"
},
{
"Name": "Host",
"Type": "String"
},
{
"Name": "Product",
"Type": "String"
},
{
"Name": "PID",
"Type": "Int"
},
{
"Name": "MessageType",
"Type": "Int"
},
{
"Name": "AlertID",
"Type": "Int"
},
{
"Name": "Forest",
"Type": "String"
},
{
"Name": "Domain",
"Type": "String"
},
{
"Name": "Codename",
"Type": "String"
},
{
"Name": "Severity",
"Type": "String"
},
{
"Name": "DevianceID",
"Type": "String"
},
{
"Name": "ProfileID",
"Type": "String"
},
{
"Name": "ReasonCodename",
"Type": "String"
},
{
"Name": "EventID",
"Type": "String"
},
{
"Name": "Attributes",
"Type": "String"
},
{
"Name": "EventType",
"Type": "String"
},
{
"Name": "Explanation",
"Type": "String"
}
]
}

Просмотреть файл

@ -12,7 +12,7 @@
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="YamlDotNet" Version="6.0.0" />
<PackageReference Include="Microsoft.Azure.Sentinel.KustoServices" Version="1.0.3" />
<PackageReference Include="Microsoft.Azure.Sentinel.KustoServices" Version="1.0.6" />
</ItemGroup>
</Project>

Двоичный файл не отображается.

Просмотреть файл

@ -14,5 +14,7 @@
"f2dd4a3a-ebac-4994-9499-1a859938c947",
"97ad74c4-fdd9-4a3f-b6bf-5e28f4f71e06",
"f041e01d-840d-43da-95c8-4188f6cef546",
"a4025a76-6490-4e6b-bb69-d02be4b03f07"
"a4025a76-6490-4e6b-bb69-d02be4b03f07",
"e70fa6e0-796a-4e85-9420-98b17b0bb749",
"6d7214d9-4a28-44df-aafb-0910b9e6ae3e"
]

Двоичный файл не отображается.

Просмотреть файл

@ -0,0 +1,145 @@
{
"id": "AlsidForAD",
"title": "Alsid for Active Directory",
"publisher": "Alsid",
"descriptionMarkdown": "Alsid for Active Directory connector allows to export Alsid Indicators of Exposures, trailflow and Indicators of Attacks logs to Azure Sentinel in real time.\nIt provides a data parser to manipulate the logs more easily. The different workbooks ease your Active Directory monitoring and provide different ways to visualize the data. The analytic templates allow to automate responses regarding different events, exposures, or attacks.",
"additionalRequirementBanner": "This data connector depends on a parser based on Kusto Function to work as expected. Follow the steps to use this Kusto Function alias **afad_parser** in queries and workbooks. [Follow steps to get this Kusto Function>](https://github.com/Azure/Azure-Sentinel/blob/master/Parsers/Alsid/afad_parser.kql) ",
"graphQueries": [
{
"metricName": "Total data received",
"legend": "AlsidForADLog",
"baseQuery": "AlsidForADLog_CL"
}
],
"sampleQueries": [
{
"description" : "Get the number of alerts triggered by each IoE",
"query": "afad_parser\n | where MessageType == 0\n | summarize AlertCount = count() by Codename"
},
{
"description" : "Get all IoE alerts with severity superior to the threshold",
"query" : "let threshold = 2;\n let SeverityTable=datatable(Severity:string,Level:int) [\n \"low\", 1,\n \"medium\", 2,\n \"high\", 3,\n \"critical\", 4\n ];\n afad_parser\n | where MessageType == 0\n | lookup kind=leftouter SeverityTable on Severity\n | where Level >= ['threshold']"
},
{
"description" : "Get all IoE alerts for the last 24 hours",
"query" : "afad_parser\r\n| where MessageType == 0 and TimeGenerated > ago(1d)"
},
{
"description" : "Get all IoE alerts for the last 7 days",
"query" : "afad_parser\r\n| where MessageType == 0 and TimeGenerated > ago(7d)"
},
{
"description" : "Get all IoE alerts for the last 30 days",
"query" : "afad_parser\r\n| where MessageType == 0 and TimeGenerated > ago(30d)"
},
{
"description" : "Get all trailflow changes for the last 24 hours",
"query" : "afad_parser\r\n| where MessageType == 1 and TimeGenerated > ago(1d)"
},
{
"description" : "Get all trailflow changes for the last 7 days",
"query" : "afad_parser\r\n| where MessageType == 1 and TimeGenerated > ago(7d)"
}
],
"dataTypes": [
{
"name": "AlsidForADLog_CL",
"lastDataReceivedQuery": "AlsidForADLog_CL\n | summarize Time = max(TimeGenerated)\n | where isnotempty(Time)"
}
],
"connectivityCriterias": [
{
"type": "IsConnectedQuery",
"value": [
"AlsidForADLog_CL\n | summarize LastLogReceived = max(TimeGenerated)\n | project IsConnected = LastLogReceived > ago(30d)"
]
}
],
"availability": {
"status": 1
},
"permissions": {
"resourceProvider": [
{
"provider": "Microsoft.OperationalInsights/workspaces",
"permissionsDisplayText": "write permission is required.",
"providerDisplayName": "Workspace",
"scope": "Workspace",
"requiredPermissions": {
"write": true,
"delete": true
}
}
]
},
"instructionSteps": [
{
"title": "",
"description": ">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://github.com/Azure/Azure-Sentinel/blob/master/Parsers/Alsid/afad_parser.kql) to create the Kusto Functions alias, **afad_parser**",
"instructions": [
]
},
{
"title": "1. Configure the Syslog server",
"description": "You will first need a **linux Syslog** server that Alsid for AD will send logs to. Typically you can run **rsyslog** on **Ubuntu**.\n You can then configure this server as you whish but it is recommended to be able to output AFAD logs in a separate file."
},
{
"title": "2. Configure Alsid to send logs to your Syslog server",
"description": "On your **Alsid for AD** portal, go to *System*, *Configuration* and then *Syslog*.\nFrom there you can create a new Syslog alert toward your Syslog server.\n\nOnce this is done, check that the logs are correctly gathered on your server in a seperate file (to do this, you can use the *Test the configuration* button in the Syslog alert configuration in AFAD)."
},
{
"title": "3. Install and onboard the Microsoft agent for Linux",
"description": "",
"instructions": [
{
"parameters": {
"title": "Choose where to install the agent:",
"instructionSteps": [
{
"title": "Install agent on Azure Linux Virtual Machine",
"description": "Select the machine to install the agent on and then click **Connect**.",
"instructions": [
{
"parameters": {
"linkType": "InstallAgentOnLinuxVirtualMachine"
},
"type": "InstallAgent"
}
]
},
{
"title": "Install agent on a non-Azure Linux Machine",
"description": "Download the agent on the relevant machine and follow the instructions.",
"instructions": [
{
"parameters": {
"linkType": "InstallAgentOnLinuxNonAzure"
},
"type": "InstallAgent"
}
]
}
]
},
"type": "InstructionStepsGroup"
}
]
},
{
"title": "4. Configure the logs to be collected by the agents",
"description": "Configure the agent to collect the logs.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Custom Logs**.\n2. Select **Apply below configuration to my machines** and click **Add**.\n4. Upload a sample AFAD Syslog file from the **Linux** machine running the **Syslog** server and click **Next**.\n5. Set the record delimiter to **New Line** if not already the case and click **Next**.\n6. Select **Linux** and enter the file path to the **Syslog** file, click **+** then **Next**.\n7. Set the **Name** to *AlsidForADLog_CL* then click **Done** (Azure automatically adds *_CL* at the end of the name, there must be only one, make sure the name is not *AlsidForADLog_CL_CL*).\n\nAll of theses steps are showcased [here](https://www.youtube.com/watch?v=JwV1uZSyXM4&feature=youtu.be) as an example",
"instructions": [
{
"parameters": {
"linkType": "OpenAdvancedWorkspaceSettings"
},
"type": "InstallAgent"
}
]
},
{
"title": "",
"description": "> You should now be able to receive logs in the *AlsidForADLog_CL* table, logs data can be parse using the **afad_parser()** function, used by all query samples, workbooks and analytic templates."
}
]
}

Просмотреть файл

@ -75,7 +75,7 @@
}]
}, {
"title": "2. Forward Aruba ClearPass logs to a Syslog agent",
"description": "Configure Aruba ClearPass to forward Syslog messages in CEF format to your Azure Sentinel workspace via the Syslog agent.\n1. [Follow these instructions](https://www.arubanetworks.com/techdocs/ClearPass/6.7/PolicyManager/Content/CPPM_UserGuide/Admin/syslogExportFilters_add_syslog_filter_general.htm) to configure the Aruba ClearPass to forward syslog\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address."
"description": "Configure Aruba ClearPass to forward Syslog messages in CEF format to your Azure Sentinel workspace via the Syslog agent.\n1. [Follow these instructions](https://www.arubanetworks.com/techdocs/ClearPass/6.7/PolicyManager/Content/CPPM_UserGuide/Admin/syslogExportFilters_add_syslog_filter_general.htm) to configure the Aruba ClearPass to forward syslog.\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address."
}, {
"title": "3. Validate connection",
"description": "Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python –version\n\n>2. You must have elevated permissions (sudo) on your machine",
@ -91,4 +91,4 @@
"title": "4. Secure your machine ",
"description": "Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)"
}]
}
}

Просмотреть файл

@ -58,6 +58,7 @@ syslog_ng_documantation_path = "https://www.syslog-ng.com/technical-documents/do
rsyslog_documantation_path = "https://www.rsyslog.com/doc/master/configuration/actions.html"
log_forwarder_deployment_documentation = "https://docs.microsoft.com/azure/sentinel/connect-cef-agent?tabs=rsyslog"
tcpdump_time_restriction = 60
file_read_permissions_octal_representation = 4
mock_message_max = 5
portal_auto_sync_disable_file = "omshelper_disable"
@ -455,6 +456,23 @@ def file_contains_string(file_tokens, file_path):
return all(check_token(token, content) for token in file_tokens)
def check_file_read_permissions(file_path, workspace_id):
# get the octal representation of the file permissions
get_permissions = subprocess.Popen(["stat", "-c", "'%a'", file_path], stdout=subprocess.PIPE)
o, e = get_permissions.communicate()
if e is not None:
print_warning("Unable to verify file permissions for path:" + file_path)
return False
octal_permissions = o.decode('UTF-8').strip("\'\n")
other_permissions = octal_permissions[-1]
if int(other_permissions) < file_read_permissions_octal_representation:
# prompt the user to change the file permissions to default file permissions in consts
print_error("Wrong permissions for the file: {} \nTo fix this please run the following command:"
" \"chmod o+r {} && sudo /opt/microsoft/omsagent/bin/service_control restart {}\"".format(file_path, file_path, workspace_id))
return False
print_ok("File permissions valid")
def sudo_read_file_contains_string(file_tokens, file_path):
restart = subprocess.Popen(["sudo", "cat", file_path], stdout=subprocess.PIPE)
o, e = restart.communicate()
@ -526,6 +544,7 @@ def omsagent_security_event_conf_validation(workspace_id):
print_error("Could not locate necessary port and ip in the agent's configuration.\npath:" + path)
else:
print_ok("Omsagent event configuration content is valid")
check_file_read_permissions(path, workspace_id)
def check_daemon(daemon_name):

Просмотреть файл

@ -0,0 +1,123 @@
{
"id": "CiscoMeraki",
"title": "Cisco Meraki",
"publisher": "Cisco",
"descriptionMarkdown": "The [Cisco Meraki](https://meraki.cisco.com/) connector allows you to easily connect your Cisco Meraki (MX/MR/MS) logs with Azure Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities.",
"additionalRequirementBanner":"These queries and workbooks are dependent on a parser based on a Kusto Function to work as expected. Follow the steps to use this Kusto functions alias **CiscoMeraki** in queries and workbooks. [Follow these steps to get this Kusto functions.](https://aka.ms/sentinel-ciscomeraki-parser)",
"graphQueries": [
{
"metricName": "Total data received",
"legend": "CiscoMeraki",
"baseQuery": "CiscoMeraki"
}
],
"sampleQueries": [
{
"description" : "Total Events by Log Type",
"query": "CiscoMeraki \n | summarize count() by LogType"
},
{
"description" : "Top 10 Blocked Connections",
"query": "CiscoMeraki \n | where LogType == \"security_event\" \n | where Action == \"block\" \n | summarize count() by SrcIpAddr, DstIpAddr, Action, Disposition \n | top 10 by count_"
}
],
"dataTypes": [
{
"name": "Syslog (CiscoMeraki)",
"lastDataReceivedQuery": "CiscoMeraki \n | summarize Time = max(TimeGenerated)\n | where isnotempty(Time)"
}
],
"connectivityCriterias": [
{
"type": "IsConnectedQuery",
"value": [
"CiscoMeraki \n | summarize LastLogReceived = max(TimeGenerated)\n | project IsConnected = LastLogReceived > ago(30d)"
]
}
],
"availability": {
"status": 1
},
"permissions": {
"resourceProvider": [
{
"provider": "Microsoft.OperationalInsights/workspaces",
"permissionsDisplayText": "write permission is required.",
"providerDisplayName": "Workspace",
"scope": "Workspace",
"requiredPermissions": {
"write": true,
"delete": true
}
}
],
"customs": [
{
"name": "Cisco Meraki",
"description": "must be configured to export logs via Syslog"
}
]
},
"instructionSteps": [
{
"title": "",
"description": ">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow the steps](https://aka.ms/sentinel-ciscomeraki-parser) to use the Kusto function alias, **CiscoMeraki**",
"instructions": [
]
},
{
"title": "1. Install and onboard the agent for Linux",
"description": "Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents.",
"instructions": [
{
"parameters": {
"title": "Choose where to install the agent:",
"instructionSteps": [
{
"title": "Install agent on Azure Linux Virtual Machine",
"description": "Select the machine to install the agent on and then click **Connect**.",
"instructions": [
{
"parameters": {
"linkType": "InstallAgentOnLinuxVirtualMachine"
},
"type": "InstallAgent"
}
]
},
{
"title": "Install agent on a non-Azure Linux Machine",
"description": "Download the agent on the relevant machine and follow the instructions.",
"instructions": [
{
"parameters": {
"linkType": "InstallAgentOnLinuxNonAzure"
},
"type": "InstallAgent"
}
]
}
]
},
"type": "InstructionStepsGroup"
}
]
},
{
"title": "2. Configure the logs to be collected",
"description": "Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**.",
"instructions": [
{
"parameters": {
"linkType": "OpenAdvancedWorkspaceSettings"
},
"type": "InstallAgent"
}
]
},
{
"title": "3. Configure and connect the Cisco Meraki device(s)",
"description":"[Follow these instructions](https://documentation.meraki.com/General_Administration/Monitoring_and_Reporting/Meraki_Device_Reporting_-_Syslog%2C_SNMP_and_API) to configure the Cisco Meraki device(s) to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address."
}
]
}

Просмотреть файл

@ -0,0 +1,114 @@
{
"id": "DarktraceDarktrace",
"title": "Darktrace",
"publisher": "Darktrace",
"descriptionMarkdown": "The Darktrace connector lets users connect Darktrace Model Breaches in real-time with Azure Sentinel, allowing creation of custom Dashboards, Workbooks, Notebooks and Custom Alerts to improve investigation. Azure Sentinel's enhanced visibility into Darktrace logs enables monitoring and mitigation of security threats.",
"graphQueries": [
{
"metricName": "Total data received",
"legend": "Darktrace",
"baseQuery": "CommonSecurityLog\n| where DeviceVendor == \"Darktrace\"\n| where DeviceProduct == \"Darktrace\""
}
],
"sampleQueries": [
{
"description": "first 10 most recent data breaches",
"query": "CommonSecurityLog\n| where DeviceVendor == \"Darktrace\"\n| where DeviceProduct == \"Darktrace\"\n| order by TimeGenerated desc \n| limit 10"
}
],
"dataTypes": [
{
"name": "CommonSecurityLog (Darktrace)",
"lastDataReceivedQuery": "CommonSecurityLog\n| where DeviceVendor == \"Darktrace\"\n| where DeviceProduct == \"Darktrace\"\n| summarize Time = max(TimeGenerated)\n| where isnotempty(Time)"
}
],
"connectivityCriterias": [
{
"type": "IsConnectedQuery",
"value": [
"CommonSecurityLog\n| where DeviceVendor == \"Darktrace\"\n| where DeviceProduct == \"Darktrace\"\n| summarize LastLogReceived = max(TimeGenerated)\n| project IsConnected = LastLogReceived > ago(30d)"
]
}
],
"availability": {
"status": 1,
"isPreview": true
},
"permissions": {
"resourceProvider": [
{
"provider": "Microsoft.OperationalInsights/workspaces",
"permissionsDisplayText": "read and write permissions are required.",
"providerDisplayName": "Workspace",
"scope": "Workspace",
"requiredPermissions": {
"read": true,
"write": true,
"delete": true
}
},
{
"provider": "Microsoft.OperationalInsights/workspaces/sharedKeys",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key).",
"providerDisplayName": "Keys",
"scope": "Workspace",
"requiredPermissions": {
"action": true
}
}
]
},
"instructionSteps": [
{
"title": "1. Linux Syslog agent configuration",
"description": "Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Azure Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace",
"innerSteps": [
{
"title": "1.1 Select or create a Linux machine",
"description": "Select or create a Linux machine that Azure Sentinel will use as the proxy between your security solution and Azure Sentinel this machine can be on your on-prem environment, Azure or other clouds."
},
{
"title": "1.2 Install the CEF collector on the Linux machine",
"description": "Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Azure Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine.",
"instructions": [
{
"parameters": {
"fillWith": [
"WorkspaceId",
"PrimaryKey"
],
"label": "Run the following command to install and apply the CEF collector:",
"value": "sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}"
},
"type": "CopyableLabel"
}
]
}
]
},
{
"title": "2. Forward Common Event Format (CEF) logs to Syslog agent",
"description": "Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address."
},
{
"title": "3. Validate connection",
"description": "Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine",
"instructions": [
{
"parameters": {
"fillWith": [
"WorkspaceId"
],
"label": "Run the following command to validate your connectivity:",
"value": "sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}"
},
"type": "CopyableLabel"
}
]
},
{
"title": "4. Secure your machine ",
"description": "Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)"
}
]
}

Двоичный файл не отображается.

Просмотреть файл

@ -0,0 +1,137 @@
{
"id": "ESETEnterpriseInspector",
"title": "ESET Enterprise Inspector (preview)",
"publisher": "ESET Netherlands",
"descriptionMarkdown": "This connector will ingest detections from [ESET Enterprise Inspector](https://www.eset.com/int/business/enterprise-inspector/) using the provided [REST API](https://help.eset.com/eei/1.5/en-US/api.html). This API is present in ESET Enterprise Inspector version 1.4 and later.",
"graphQueries": [
{
"metricName": "Total data received",
"legend": "ESETEnterpriseInspector_CL",
"baseQuery": "ESETEnterpriseInspector_CL"
}
],
"sampleQueries": [
{
"description" : "Top 10 rules triggered",
"query": "ESETEnterpriseInspector_CL\n | where type_s == \"RuleActivated\"\n | summarize count() by ruleName_s\n | top 10 by count_"
},
{
"description" : "Detection totals by type",
"query": "ESETEnterpriseInspector_CL\n | summarize count() by type_s"
},
{
"description" : "Top 10 users with detections",
"query": "ESETEnterpriseInspector_CL\n | extend Username = tolower(processUser_s)\n | where Username !contains \"nt authority\"\n | summarize count() by Username\n | top 10 by count_"
},
{
"description" : "Top 10 systems with detections",
"query": "ESETEnterpriseInspector_CL\n | extend System = tolower(computerName_s)\n | summarize count() by System\n | top 10 by count_"
},
{
"description" : "High severity detections",
"query": "ESETEnterpriseInspector_CL\n | where severityScore_d >= 80\n | order by id_d desc"
},
{
"description" : "Top 10 threats",
"query": "ESETEnterpriseInspector_CL\n | where isnotempty(threatName_s)\n | summarize count() by threatName_s, type_s\n | top 10 by count_"
},
{
"description" : "Detections on new executables",
"query": "ESETEnterpriseInspector_CL\n | where moduleLgAge_d <= 7"
},
{
"description" : "User login outside regular office hours",
"query": "let startOfDay = datetime_part(\"Hour\", datetime(\"09:00\"));\n let endOfDay = datetime_part(\"Hour\", datetime(\"17:00\"));\n let saturday = time(6);\n let sunday = time(0);\n ESETEnterpriseInspector_CL\n | where tolower(ruleName_s) contains \"remote user login\" or tolower(ruleName_s) contains \"detected rdp communication\"\n | where datetime_part(\"Hour\", creationTime_t) !between(startOfDay .. endOfDay)\n or dayofweek(creationTime_t) == saturday or dayofweek(creationTime_t) == sunday"
}
],
"dataTypes": [
{
"name": "ESETEnterpriseInspector_CL",
"lastDataReceivedQuery": "ESETEnterpriseInspector_CL\n | summarize Time = max(TimeGenerated)\n | where isnotempty(Time)"
}
],
"connectivityCriterias": [
{
"type": "IsConnectedQuery",
"value": [
"ESETEnterpriseInspector_CL\n | summarize LastLogReceived = max(TimeGenerated)\n | project IsConnected = LastLogReceived > ago(30d)"
]
}
],
"availability": {
"status": 1,
"isPreview": true
},
"permissions": {
"resourceProvider": [
{
"provider": "Microsoft.OperationalInsights/workspaces",
"permissionsDisplayText": "read and write permissions are required.",
"providerDisplayName": "Workspace",
"scope": "Workspace",
"requiredPermissions": {
"write": true,
"read": true,
"delete": true
}
},
{
"provider": "Microsoft.OperationalInsights/workspaces/sharedKeys",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)",
"providerDisplayName": "Keys",
"scope": "Workspace",
"requiredPermissions": {
"action": true
}
}
],
"customs": [
{
"name": "Access to ESET Security Management Center / ESET PROTECT console",
"description": "Permissions to add users"
}
]
},
"instructionSteps": [
{
"title": "",
"description": ">**NOTE:** This connector uses Azure Functions to connect to ESET Enterprise Inspector to pull its logs into Azure Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details."
},
{
"title": "",
"description": ">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App."
},
{
"title": "Step 1 - Create an API user",
"description": "1. Log into the ESET Security Management Center / ESET PROTECT console with an administrator account, select the **More** tab and the **Users** subtab. \n2. Click on the **ADD NEW** button and add a **native user**.\n3. Create a new user for the API account. **Optional:** Select a **Home group** other than **All** to limit what detections are ingested. \n4. Under the **Permission Sets** tab, assign the **Enterprise Inspector reviewer permission set**.\n4. Log out of the administrator account and log into the console with the new API credentials for validation, then log out of the API account. \n5."
},
{
"title": "Step 2 - Copy Workspace ID and Key",
"description": ">**IMPORTANT:** Before deploying the ESET Enterprise Inspector connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.",
"instructions":[
{
"parameters": {
"fillWith": [
"WorkspaceId"
],
"label": "Workspace ID"
},
"type": "CopyableLabel"
},
{
"parameters": {
"fillWith": [
"PrimaryKey"
],
"label": "Primary Key"
},
"type": "CopyableLabel"
}
]
},
{
"title": "Step 3 - Deploy the Azure Resource Manager (ARM) Template",
"description": "Use this method for automated deployment of the ESET Enterprise Inspector connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelesetenterpriseinspectorazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password** , enter the **Enterprise Inspector base URL** and the **first ID** to start ingesting detections from.\n - The defailt starting ID is **0**. This means that all detections will be ingested. \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy."
}
]
}

Просмотреть файл

@ -0,0 +1,80 @@
# Title: ESET Enterprise Inspector Data Connector
# Language: Python
# Version: 1.0
# Author(s): ESET Netherlands - Donny Maasland
# Last Modified: 11/25/2020
# Comment: Initial release
#
# DESCRIPTION
# This Function App calls the ESET Enterprise Inspector API (https://help.eset.com/eei/1.5/en-US/api.html)
# and gathers all new detections that have been triggered.
#
# The response from the ESET Enterprise Inspector API is recieved in JSON format. This function will build
# the signature and authorization header needed to post the data to the Log Analytics workspace via
# the HTTP Data Connector API. The Function App will will post all detections to the ESETEnterpriseInspector_CL
# table in Log Analytivs.
import datetime
import logging
import json
import os
import azure.functions as func
from distutils.util import strtobool
from enterpriseinspector import EnterpriseInspector
from requests.packages.urllib3.exceptions import InsecureRequestWarning
def main(eeitimer: func.TimerRequest, inputblob: func.InputStream, outputblob: func.Out[func.InputStream], outputqueue: func.Out[str]):
utc_timestamp = datetime.datetime.utcnow().replace(
tzinfo=datetime.timezone.utc).isoformat()
if eeitimer.past_due:
logging.info('The timer is past due!')
# Set variables
base_url = os.environ['baseUrl']
username = os.environ['eeiUsername']
password = os.environ['eeiPassword']
domain = bool(strtobool(os.environ['domainLogin']))
verify = bool(strtobool(os.environ['verifySsl']))
start_from_id = int(os.environ['startFromID'])
# Connect to ESET Enterprise Inspector server
ei = EnterpriseInspector(
base_url=base_url,
username=username,
password=password,
domain=domain,
verify=verify
)
# Get last processed detection id
if inputblob:
last_id = json.loads(inputblob.read())['id']
else:
last_id = start_from_id
# Get new detections
detections = ei.detections(last_id)
# Get detection details and send to queue
if detections:
logging.info('Processing detections..')
outputqueue.set(
json.dumps(detections)
)
# Write last succesfully processed detetion to blob storage
latest_detection = detections[-1]
outputblob.set(
json.dumps({
'id': latest_detection['id']
})
)
logging.info('Done processing detections.')
logging.info('Python timer trigger function ran at %s', utc_timestamp)

Просмотреть файл

@ -0,0 +1,32 @@
{
"scriptFile": "__init__.py",
"bindings": [
{
"name": "eeitimer",
"type": "timerTrigger",
"direction": "in",
"schedule": "0 */5 * * * *"
},
{
"name": "inputblob",
"type": "blob",
"path": "enterprise-inspector-detections/last.json",
"connection": "AzureWebJobsStorage",
"direction": "in"
},
{
"name": "outputblob",
"type": "blob",
"path": "enterprise-inspector-detections/last.json",
"connection": "AzureWebJobsStorage",
"direction": "out"
},
{
"type": "queue",
"direction": "out",
"name": "outputqueue",
"queueName": "enterprise-inspector-detections",
"connection": "AzureWebJobsStorage"
}
]
}

Просмотреть файл

@ -0,0 +1,62 @@
# Title: ESET Enterprise Inspector Data Connector
# Language: Python
# Version: 1.0
# Author(s): ESET Netherlands - Donny Maasland
# Last Modified: 11/25/2020
# Comment: Initial release
#
# DESCRIPTION
# This Function App calls the ESET Enterprise Inspector API (https://help.eset.com/eei/1.5/en-US/api.html)
# and gathers all new detections that have been triggered.
#
# The response from the ESET Enterprise Inspector API is recieved in JSON format. This function will build
# the signature and authorization header needed to post the data to the Log Analytics workspace via
# the HTTP Data Connector API. The Function App will will post all detections to the ESETEnterpriseInspector_CL
# table in Log Analytivs.
import logging
import json
import os
import azure.functions as func
from datacollector import post_data
from distutils.util import strtobool
from enterpriseinspector import EnterpriseInspector
def main(eeimsg: func.QueueMessage) -> None:
detection = json.loads(eeimsg.get_body().decode('utf-8'))
logging.info(f"Queue trigger function processed item: {detection['id']}")
# Set variables
base_url = os.environ['baseUrl']
username = os.environ['eeiUsername']
password = os.environ['eeiPassword']
domain = bool(strtobool(os.environ['domainLogin']))
verify = bool(strtobool(os.environ['verifySsl']))
workspace_id = os.environ['workspaceId']
workspace_key = os.environ['workspaceKey']
log_type = 'ESETEnterpriseInspector'
# Connect to ESET Enterprise Inspector server
ei = EnterpriseInspector(
base_url=base_url,
username=username,
password=password,
domain=domain,
verify=verify
)
# Get detection details
detection_details = ei.detection_details(detection)
# Send data via data collector API
body = json.dumps(detection_details)
post_data(
customer_id=workspace_id,
shared_key=workspace_key,
body=body,
log_type=log_type
)

Просмотреть файл

@ -0,0 +1,12 @@
{
"scriptFile": "__init__.py",
"bindings": [
{
"name": "eeimsg",
"type": "queueTrigger",
"direction": "in",
"queueName": "enterprise-inspector-detections",
"connection": "AzureWebJobsStorage"
}
]
}

Просмотреть файл

@ -0,0 +1,237 @@
{
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"FunctionName": {
"defaultValue": "EsetEI",
"type": "string",
"maxLength": 24
},
"WorkspaceID": {
"type": "string",
"defaultValue": "<workspaceID>"
},
"WorkspaceKey": {
"type": "securestring"
},
"baseUrl": {
"type": "string",
"defaultValue": "https://<Enterprise Inspector URL>"
},
"eeiUsername": {
"type": "string",
"defaultValue": "<eeiUsername>"
},
"eeiPassword": {
"type": "securestring"
},
"domainLogin": {
"type": "string",
"defaultValue": "false",
"allowedValues": [
"true",
"false"
]
},
"verifySsl": {
"type": "string",
"defaultValue": "true",
"allowedValues": [
"true",
"false"
]
},
"startFromID": {
"type": "int",
"defaultValue": 0
}
},
"variables": {
"FunctionName": "[concat(toLower(parameters('FunctionName')), uniqueString(resourceGroup().id))]"
},
"resources": [
{
"type": "Microsoft.Insights/components",
"apiVersion": "2015-05-01",
"name": "[variables('FunctionName')]",
"location": "[resourceGroup().location]",
"kind": "web",
"properties": {
"Application_Type": "web",
"ApplicationId": "[variables('FunctionName')]"
}
},
{
"type": "Microsoft.Storage/storageAccounts",
"apiVersion": "2019-06-01",
"name": "[tolower(variables('FunctionName'))]",
"location": "[resourceGroup().location]",
"sku": {
"name": "Standard_LRS",
"tier": "Standard"
},
"kind": "StorageV2",
"properties": {
"networkAcls": {
"bypass": "AzureServices",
"virtualNetworkRules": [
],
"ipRules": [
],
"defaultAction": "Allow"
},
"supportsHttpsTrafficOnly": true,
"encryption": {
"services": {
"file": {
"keyType": "Account",
"enabled": true
},
"blob": {
"keyType": "Account",
"enabled": true
}
},
"keySource": "Microsoft.Storage"
}
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]"
],
"sku": {
"name": "Standard_LRS",
"tier": "Standard"
},
"properties": {
"cors": {
"corsRules": [
]
},
"deleteRetentionPolicy": {
"enabled": false
}
}
},
{
"type": "Microsoft.Storage/storageAccounts/fileServices",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]"
],
"sku": {
"name": "Standard_LRS",
"tier": "Standard"
},
"properties": {
"cors": {
"corsRules": [
]
}
}
},
{
"type": "Microsoft.Web/sites",
"apiVersion": "2018-11-01",
"name": "[variables('FunctionName')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]",
"[resourceId('Microsoft.Insights/components', variables('FunctionName'))]"
],
"kind": "functionapp,linux",
"identity": {
"type": "SystemAssigned"
},
"properties": {
"name": "[variables('FunctionName')]",
"httpsOnly": true,
"clientAffinityEnabled": true,
"alwaysOn": true,
"reserved": true
},
"resources": [
{
"apiVersion": "2018-11-01",
"type": "config",
"name": "appsettings",
"dependsOn": [
"[concat('Microsoft.Web/sites/', variables('FunctionName'))]"
],
"properties": {
"FUNCTIONS_EXTENSION_VERSION": "~3",
"FUNCTIONS_WORKER_RUNTIME": "python",
"APPINSIGHTS_INSTRUMENTATIONKEY": "[reference(resourceId('Microsoft.insights/components', variables('FunctionName')), '2015-05-01').InstrumentationKey]",
"APPLICATIONINSIGHTS_CONNECTION_STRING": "[reference(resourceId('microsoft.insights/components', variables('FunctionName')), '2015-05-01').ConnectionString]",
"AzureWebJobsStorage": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(variables('FunctionName')),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(variables('FunctionName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=core.windows.net')]",
"workspaceId": "[parameters('WorkspaceId')]",
"workspaceKey": "[parameters('WorkspaceKey')]",
"baseUrl": "[parameters('baseUrl')]",
"eeiUsername": "[parameters('eeiUsername')]",
"eeiPassword": "[parameters('eeiPassword')]",
"domainLogin": "[parameters('domainLogin')]",
"verifySsl": "[parameters('verifySsl')]",
"startFromID": "[parameters('startFromID')]",
"WEBSITE_RUN_FROM_PACKAGE": "https://aka.ms/sentinel-esetenterpriseinspector-functionapp"
}
}
]
},
{
"type": "Microsoft.Web/sites/hostNameBindings",
"apiVersion": "2018-11-01",
"name": "[concat(variables('FunctionName'), '/', variables('FunctionName'), '.azurewebsites.net')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Web/sites', variables('FunctionName'))]"
],
"properties": {
"siteName": "[variables('FunctionName')]",
"hostNameType": "Verified"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/azure-webjobs-hosts')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"publicAccess": "None"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/azure-webjobs-secrets')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"publicAccess": "None"
}
},
{
"type": "Microsoft.Storage/storageAccounts/fileServices/shares",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/', tolower(variables('FunctionName')))]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/fileServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"shareQuota": 5120
}
}
]
}

Просмотреть файл

@ -0,0 +1,48 @@
# Taken from https://docs.microsoft.com/azure/azure-monitor/platform/data-collector-api
import json
import requests
import datetime
import hashlib
import hmac
import base64
from enterpriseinspector.eifunctions import exit_error
#####################
######Functions######
#####################
# Build the API signature
def build_signature(customer_id, shared_key, date, content_length, method, content_type, resource):
x_headers = 'x-ms-date:' + date
string_to_hash = method + "\n" + str(content_length) + "\n" + content_type + "\n" + x_headers + "\n" + resource
bytes_to_hash = bytes(string_to_hash, encoding="utf-8")
decoded_key = base64.b64decode(shared_key)
encoded_hash = base64.b64encode(hmac.new(decoded_key, bytes_to_hash, digestmod=hashlib.sha256).digest()).decode()
authorization = "SharedKey {}:{}".format(customer_id,encoded_hash)
return authorization
# Build and send a request to the POST API
def post_data(customer_id, shared_key, body, log_type):
method = 'POST'
content_type = 'application/json'
resource = '/api/logs'
rfc1123date = datetime.datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
content_length = len(body)
signature = build_signature(customer_id, shared_key, rfc1123date, content_length, method, content_type, resource)
uri = 'https://' + customer_id + '.ods.opinsights.azure.com' + resource + '?api-version=2016-04-01'
headers = {
'content-type': content_type,
'Authorization': signature,
'Log-Type': log_type,
'x-ms-date': rfc1123date
}
response = requests.post(uri,data=body, headers=headers)
if (response.status_code >= 200 and response.status_code <= 299):
print('Accepted')
else:
exit_error(f'Response code "{response.status_code}" while sending data through data-collector API.')

Просмотреть файл

@ -0,0 +1,7 @@
import requests
from enterpriseinspector.enterpriseinspector import EnterpriseInspector
from requests.packages.urllib3.exceptions import InsecureRequestWarning
# Disable default SSL warning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)

Просмотреть файл

@ -0,0 +1,7 @@
import logging
def exit_error(err, exception=True):
logging.error(err)
if exception:
raise Exception(err)

Просмотреть файл

@ -0,0 +1,180 @@
import requests
import math
import logging
from urllib.parse import urljoin
from enterpriseinspector.eifunctions import exit_error
class EnterpriseInspector:
"""A small class used for communicating with the ESET Enterprise Inspector server"""
def __init__(self, base_url, username, password, domain=False, verify=True):
self.base_url = base_url
self.username = username
self.password = password
self.domain = domain
self.verify = verify
self.page_size = 100
self.token = None
if not self.verify:
logging.warning(
'Verification of SSL certificate has been disabled!'
)
self.login()
def login(self):
json = {
'username': self.username,
'password': self.password,
'domain' : self.domain
}
self.api_call(
endpoint='authenticate',
method='PUT',
json=json
)
if not self.token:
exit_error('Authentication failure')
def api_call(self, endpoint, method='GET', json={}, headers={}, params={}):
# Only need 'GET' and 'PUT' for now
if method.upper() == 'GET':
req = requests.get
elif method.upper() == 'PUT':
req = requests.put
else:
req = requests.get
# Add authorization token to request if present
if self.token:
headers.update({
'Authorization': f'Bearer {self.token}'
})
# Remove any extra '/' characters that would cause a 400
url = urljoin(
self.base_url,
f'/api/v1/{endpoint}',
)
resp = req(
url=url,
json=json,
headers=headers,
params=params,
verify=self.verify
)
if resp.status_code != 200:
exit_error(
f'API call failed: [{resp.status_code}] {resp.content}'
)
# Token might get updated between requests
if 'X-Security-Token' in resp.headers:
self.token = resp.headers['X-Security-Token']
return resp
def detections(self, last_id):
params = {
'$orderBy': 'id asc',
'$filter': f'id gt {last_id}',
'$count': 1,
}
# Get the first batch of detections
logging.info('Getting list of detections..')
resp = self.api_call('detections', params=params).json()
count = resp['count']
detections = resp['value']
pages = math.ceil(count / self.page_size)
logging.info(f'Found {count} detection(s).')
# Check if there are more pages
if pages > 1:
logging.info(f'Detections spread over {pages} pages.')
for skip in range(self.page_size, count, self.page_size):
current_page = int(skip / self.page_size + 1)
logging.info(f'Getting page {current_page}.')
params.update({
'$skip': skip,
'$count': 0
})
resp = self.api_call('detections', params=params).json()
detections += resp['value']
return detections
def enrich(self, detection_details):
# Resolve "moduleSignatureType"
signature_types = {
90: 'Trusted',
80: 'Valid',
75: 'AdHoc',
70: 'None',
60: 'Invalid',
0: 'Unkown'
}
try:
signature_type = signature_types[detection_details['moduleSignatureType']]
except KeyError:
signature_type = signature_types[0]
# Resolve "type"
types = {
0: 'UnknownAlarm',
1: 'RuleActivated',
2: 'MalwareFoundOnDisk',
3: 'MalwareFoundInMemory',
4: 'ExploitDetected',
5: 'FirewallDetection',
7: 'BlockedAddress',
8: 'CryptoBlockerDetection',
}
try:
detection_type = types[detection_details['type']]
except KeyError:
detection_type = types[0]
# Create deeplink
deep_link = urljoin(
self.base_url,
f"/console/detection/{detection_details['id']}",
)
detection_details.update({
'type': detection_type,
'moduleSignatureType': signature_type,
'deepLink': deep_link
})
return detection_details
def detection_details(self, detection):
# Get detection details
resp = self.api_call(f"detections/{detection['id']}").json()
# Enrich detection details
detection_details = self.enrich(resp['DETECTION'])
return detection_details

Просмотреть файл

@ -0,0 +1,6 @@
# DO NOT include azure-functions-worker in this file
# The Python Worker is managed by Azure Functions platform
# Manually managing azure-functions-worker may cause unexpected issues
azure-functions
requests

Просмотреть файл

@ -2264,7 +2264,7 @@
"type": "string"
},
"permalink": {
"type": "string"
"type": ["string", "null"]
},
"publishedAt": {
"type": "string"

Просмотреть файл

@ -1,4 +0,0 @@
.git*
.vscode
local.settings.json
test

6
DataConnectors/GithubFunction/.gitignore поставляемый
Просмотреть файл

@ -1,6 +0,0 @@
# Azure Functions artifacts
bin
obj
appsettings.json
local.settings.json

Двоичный файл не отображается.

Просмотреть файл

@ -4,7 +4,7 @@
"name": "Timer",
"type": "timerTrigger",
"direction": "in",
"schedule": "0 */5 * * * *"
"schedule": "%Schedule%"
}
]
}

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

Просмотреть файл

@ -11,7 +11,6 @@
# Authenticate with Azure PowerShell using MSI.
# Remove this if you are not planning on using MSI or Azure PowerShell.
Import-Module Az.Accounts -RequiredVersion '1.9.5'
if ($env:MSI_SECRET) {
Disable-AzContextAutosave -Scope Process
Connect-AzAccount -Identity

Просмотреть файл

@ -2,6 +2,5 @@
# See https://aka.ms/functionsmanageddependency for additional information.
#
@{
'Az' = '4.8.0'
'Az.Accounts' = '1.9.5'
'Az' = '5.*'
}

Просмотреть файл

@ -0,0 +1,13 @@
## 1.1
- Added CHANGELOG.MD to track future code changes
- Updated "lastrun-Audit.json" to support multiple org's
- Implemented logic to support multiple org's in Function App code
- Implemented logic to support Azure Gov (.US) Tenants in Function App code
- Created new ARM Template to Support Azure Gov (.US) Tenants
- Updated "function.json" inorder to accept TimeTrigger(CRON Expression) from Function App environment variable. Providing more flexibility to the user to change schedule to trigger Function App
- Updated README.MD
- Updated File structure
## 1.0
- Converted GitHub Data connector from Logic Apps to Azure Function
- Splitting the data if it is more than 25MB

Просмотреть файл

@ -118,7 +118,7 @@
},
{
"title": "Option 1 - Azure Resource Manager (ARM) Template",
"description": "This method provides an automated deployment of the GitHub Data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fandedevsecops%2FAzure-Sentinel%2Faz-func-github-dataconnector%2FDataConnectors%2FGithubFunction%2Fazuredeploy_GitHubData.json)\n\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Personal Access Token** \n> - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy."
"description": "This method provides an automated deployment of the GitHub Data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FDataConnectors%2FGithubFunction%2Fazurecomdeploy_dotcomtenants.json)\t[![Deploy To Azure Gov](https://raw.githubusercontent.com/Azure/azure-quickstart-templates/master/1-CONTRIBUTION-GUIDE/images/deploytoazuregov.png)](https://portal.azure.us/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FDataConnectors%2FGithubFunction%2Fazuregovdeploy_dotustenants.json)\t(**.us Tenant**)\n\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Personal Access Token** \n> - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy."
},
{
"title": "Option 2 - Manual Deployment of Azure Functions",

Просмотреть файл

@ -0,0 +1,3 @@
[
{"org": ""}
]

Двоичный файл не отображается.

Просмотреть файл

@ -1,3 +0,0 @@
[
{"org": "ndicolademo"}
]

Просмотреть файл

@ -29,6 +29,13 @@
"metadata": {
"description": "Specifies the Log Analytics Workspace Key."
}
},
"FunctionSchedule": {
"type": "string",
"defaultValue": "0 */10 * * * *",
"metadata": {
"description": "For a `TimerTrigger` to work, you provide a schedule in the form of a [cron expression](https://en.wikipedia.org/wiki/Cron#CRON_expression)(See the link for full details). A cron expression is a string with 6 separate expressions which represent a given schedule via patterns. The pattern we use to represent every 1 hour is `0 0 * * * *`. This, in plain text, means: When seconds is equal to 0, minutes is divisible by 5, for any hour, day of the month, month, day of the week, or year"
}
}
},
"variables": {
@ -185,6 +192,7 @@
"TMPDIR": "D:\\local\\Temp",
"WorkspaceId": "[parameters('WorkspaceId')]",
"WorkspaceKey": "[concat('@Microsoft.KeyVault(SecretUri=', reference(variables('LogAnalyticsWorkspaceKey')).secretUriWithVersion, ')')]",
"Schedule": "[parameters('FunctionSchedule')]",
"WEBSITE_RUN_FROM_PACKAGE": "https://aka.ms/githubazurefunctionzip"
}
}

Просмотреть файл

@ -0,0 +1,327 @@
{
"$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"FunctionName": {
"defaultValue": "GitHubLogs",
"type": "string",
"metadata": {
"description": "Specifies the name of the Function App."
}
},
"PersonalAccessToken": {
"defaultValue": "Enter the GitHub Personal Access Token (PAT)",
"type": "string",
"metadata": {
"description": "Specifies GitHub Enterprise Personal Access Token."
}
},
"WorkspaceId": {
"type": "string",
"defaultValue": "<WorkspaceId>",
"metadata": {
"description": "Specifies the Log Analytics Workspace Id."
}
},
"WorkspaceKey": {
"type": "string",
"defaultValue": "<WorkspaceKey>",
"metadata": {
"description": "Specifies the Log Analytics Workspace Key."
}
},
"FunctionSchedule": {
"type": "string",
"defaultValue": "0 */10 * * * *",
"metadata": {
"description": "For a `TimerTrigger` to work, you provide a schedule in the form of a [cron expression](https://en.wikipedia.org/wiki/Cron#CRON_expression)(See the link for full details). A cron expression is a string with 6 separate expressions which represent a given schedule via patterns. The pattern we use to represent every 1 hour is `0 0 * * * *`. This, in plain text, means: When seconds is equal to 0, minutes is divisible by 5, for any hour, day of the month, month, day of the week, or year"
}
}
},
"variables": {
"FunctionName": "[concat(toLower(parameters('FunctionName')), uniqueString(resourceGroup().id))]",
"KeyVaultName": "[tolower(concat('githubkv', uniqueString(resourceGroup().id, subscription().id)))]",
"GitAPIToken": "GitAPIToken",
"LogAnalyticsWorkspaceKey": "LogAnalyticsWorkspaceKey",
"StorageContainerName": "github-repo-logs"
},
"resources": [
{
"type": "Microsoft.Insights/components",
"apiVersion": "2015-05-01",
"name": "[variables('FunctionName')]",
"location": "[resourceGroup().location]",
"kind": "web",
"properties": {
"Application_Type": "web",
"ApplicationId": "[variables('FunctionName')]"
}
},
{
"type": "Microsoft.Storage/storageAccounts",
"apiVersion": "2019-06-01",
"name": "[tolower(variables('FunctionName'))]",
"location": "[resourceGroup().location]",
"sku": {
"name": "Standard_LRS",
"tier": "Standard"
},
"kind": "StorageV2",
"properties": {
"networkAcls": {
"bypass": "AzureServices",
"virtualNetworkRules": [
],
"ipRules": [
],
"defaultAction": "Allow"
},
"supportsHttpsTrafficOnly": true,
"encryption": {
"services": {
"file": {
"keyType": "Account",
"enabled": true
},
"blob": {
"keyType": "Account",
"enabled": true
}
},
"keySource": "Microsoft.Storage"
}
}
},
{
"type": "Microsoft.Web/serverfarms",
"apiVersion": "2018-02-01",
"name": "[variables('FunctionName')]",
"location": "[resourceGroup().location]",
"sku": {
"name": "Y1",
"tier": "Dynamic"
},
"kind": "functionapp",
"properties": {
"name": "[variables('FunctionName')]",
"workerSize": "0",
"workerSizeId": "0",
"numberOfWorkers": "1"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]"
],
"sku": {
"name": "Standard_LRS",
"tier": "Standard"
},
"properties": {
"cors": {
"corsRules": [
]
},
"deleteRetentionPolicy": {
"enabled": false
}
}
},
{
"type": "Microsoft.Storage/storageAccounts/fileServices",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]"
],
"sku": {
"name": "Standard_LRS",
"tier": "Standard"
},
"properties": {
"cors": {
"corsRules": [
]
}
}
},
{
"type": "Microsoft.Web/sites",
"apiVersion": "2018-11-01",
"name": "[variables('FunctionName')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]",
"[resourceId('Microsoft.Web/serverfarms', variables('FunctionName'))]",
"[resourceId('Microsoft.Insights/components', variables('FunctionName'))]"
],
"kind": "functionapp",
"identity": {
"type": "SystemAssigned"
},
"properties": {
"name": "[variables('FunctionName')]",
"serverFarmId": "[resourceId('Microsoft.Web/serverfarms', variables('FunctionName'))]",
"httpsOnly": true,
"clientAffinityEnabled": true,
"alwaysOn": true
},
"resources": [
{
"apiVersion": "2018-11-01",
"type": "config",
"name": "appsettings",
"dependsOn": [
"[concat('Microsoft.Web/sites/', variables('FunctionName'))]",
"[resourceId('Microsoft.KeyVault/vaults/', variables('KeyVaultName'))]",
"[resourceId('Microsoft.KeyVault/vaults/secrets', variables('KeyVaultName'), variables('GitAPIToken'))]",
"[resourceId('Microsoft.KeyVault/vaults/secrets', variables('KeyVaultName'), variables('LogAnalyticsWorkspaceKey'))]"
],
"properties": {
"FUNCTIONS_EXTENSION_VERSION": "~3",
"FUNCTIONS_WORKER_RUNTIME": "powershell",
"APPINSIGHTS_INSTRUMENTATIONKEY": "[reference(resourceId('Microsoft.insights/components', variables('FunctionName')), '2015-05-01').InstrumentationKey]",
"APPLICATIONINSIGHTS_CONNECTION_STRING": "[reference(resourceId('microsoft.insights/components', variables('FunctionName')), '2015-05-01').ConnectionString]",
"AzureWebJobsStorage": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(variables('FunctionName')),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(variables('FunctionName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=core.usgovcloudapi.net')]",
"WEBSITE_CONTENTAZUREFILECONNECTIONSTRING": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(variables('FunctionName')),';AccountKey=', listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(variables('FunctionName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=core.usgovcloudapi.net')]",
"WEBSITE_CONTENTSHARE": "[toLower(variables('FunctionName'))]",
"PersonalAccessToken": "[concat('@Microsoft.KeyVault(SecretUri=', reference(variables('GitAPIToken')).secretUriWithVersion, ')')]",
"TMPDIR": "D:\\local\\Temp",
"WorkspaceId": "[parameters('WorkspaceId')]",
"WorkspaceKey": "[concat('@Microsoft.KeyVault(SecretUri=', reference(variables('LogAnalyticsWorkspaceKey')).secretUriWithVersion, ')')]",
"Schedule": "[parameters('FunctionSchedule')]",
"AZURE_TENANT": ".us",
"WEBSITE_RUN_FROM_PACKAGE": "https://aka.ms/githubazurefunctionzip"
}
}
]
},
{
"type": "Microsoft.KeyVault/vaults",
"apiVersion": "2016-10-01",
"name": "[variables('KeyVaultName')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Web/sites', variables('FunctionName'))]"
],
"properties": {
"sku": {
"family": "A",
"name": "Standard"
},
"tenantId": "[subscription().tenantId]",
"accessPolicies": [
{
"tenantId": "[subscription().tenantId]",
"objectId": "[reference(resourceId('Microsoft.Web/sites', variables('FunctionName')),'2019-08-01', 'full').identity.principalId]",
"permissions": {
"secrets": [ "get",
"list"
]
}
}
],
"enabledForDeployment": false,
"enabledForDiskEncryption": false,
"enabledForTemplateDeployment": true,
"enableSoftDelete": true
},
"resources": [
{
"type": "secrets",
"apiVersion": "2016-10-01",
"name": "[variables('GitAPIToken')]",
"dependsOn": [
"[resourceId('Microsoft.KeyVault/vaults/', variables('KeyVaultName'))]"
],
"properties": {
"value": "[parameters('PersonalAccessToken')]",
"contentType": "string",
"attributes": {
"enabled": true
}
}
},
{
"type": "secrets",
"apiVersion": "2016-10-01",
"name": "[variables('LogAnalyticsWorkspaceKey')]",
"dependsOn": [
"[resourceId('Microsoft.KeyVault/vaults/', variables('KeyVaultName'))]"
],
"properties": {
"value": "[parameters('WorkspaceKey')]",
"contentType": "string",
"attributes": {
"enabled": true
}
}
}
]
},
{
"type": "Microsoft.Web/sites/hostNameBindings",
"apiVersion": "2018-11-01",
"name": "[concat(variables('FunctionName'), '/', variables('FunctionName'), '.azurewebsites.us')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Web/sites', variables('FunctionName'))]"
],
"properties": {
"siteName": "[variables('FunctionName')]",
"hostNameType": "Verified"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/azure-webjobs-hosts')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"publicAccess": "None"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/azure-webjobs-secrets')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"publicAccess": "None"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), concat('/default/', variables('StorageContainerName')))]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"publicAccess": "None"
}
},
{
"type": "Microsoft.Storage/storageAccounts/fileServices/shares",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/', tolower(variables('FunctionName')))]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/fileServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"shareQuota": 5120
}
}
]
}

Просмотреть файл

@ -1,4 +0,0 @@
{
"$schema": "http://json.schemastore.org/proxies",
"proxies": {}
}

Просмотреть файл

@ -8,61 +8,126 @@ Author: Nicholas DiCola, Sreedhar Ande
Following are the configuration steps to deploy Function App.
## **Pre-requisites**
A GitHub API Token is required. See the documentation to learn more about the [GitHub Personal Access Token](https://github.com/settings/tokens/).
## Configuration Steps
1. Generate a GitHub (Personal Access Token)[https://github.com/settings/tokens]. GitHub user settings -> Developer settings -> Personal access tokens.
2. Deploy the ARM template and fill in the parameters.
```
"PersonalAccessToken": This is the GITHUB PAT
"Workspace Id": The Sentinel Log Analytics Workspace Id
"Workspace Key": The Sentinel Log Analytics Workspace Key
```
4. There are two json files (ORGS.json and lastrun-Audit.json).
5. Edit the ORGS.json file and update "org": "sampleorg" and replace sample org with your org name. If you have addtional orgs, add another line
```
{"org": "sampleorg1"}
{"org": "sampleorg2"}
.
.
.
```
for each org.
1. Deploy the ARM template and fill in the parameters.
```
"PersonalAccessToken": This is the GITHUB PAT
"Workspace Id": The Sentinel Log Analytics Workspace Id
"Workspace Key": The Sentinel Log Analytics Workspace Key
"Function Schedule": The `TimerTrigger` makes it incredibly easy to have your functions executed on a schedule
```
2. There are two json files (ORGS.json and lastrun-Audit.json) in Function Dependencies folder
3. Edit the ORGS.json file and update "org": "sampleorg" and replace sample org with your org name.
```
If you have single org
[
{
"org": "sampleorg1"
}
]
6. Upload the following files to the storage account "github-repo-logs" container.
```
ORGS.json
lastrun-Audit.json
```
If you have multiple org's
[
{
"org": "sampleorg1"
},
{
"org": "sampleorg2"
},
{
"org": "sampleorg3"
}
]
```
7. PersonalAccessToken and Workspace Key will be placed as "Secrets" in the Azure KeyVault `githubkv<<uniqueid>>` with only Azure Function access policy. If you want to see/update these secrets,
4. Edit lastrun-Audit.json and update "org": "sampleorg" and replace sample org with your org name
```
a. Go to Azure KeyVault "githubkv<<uniqueid>>"
b. Click on "Access Policies" under Settings
c. Click on "Add Access Policy"
i. Configure from template : Secret Management
ii. Key Permissions : GET, LIST, SET
iii. Select Prinicpal : <<Your Account>>
iv. Add
d. Click "Save"
```
If you have single org
```
[
{
"org": "sampleorg1",
"lastContext": "",
"lastRun": ""
}
]
8. The `TimerTrigger` makes it incredibly easy to have your functions executed on a schedule. This sample demonstrates a simple use case of calling your function every 5 minutes.
If you have multiple org's
9. For a `TimerTrigger` to work, you provide a schedule in the form of a [cron expression](https://en.wikipedia.org/wiki/Cron#CRON_expression)(See the link for full details). A cron expression is a string with 6 separate expressions which represent a given schedule via patterns. The pattern we use to represent every 5 minutes is `0 */5 * * * *`. This, in plain text, means: "When seconds is equal to 0, minutes is divisible by 5, for any hour, day of the month, month, day of the week, or year".
[
{
"org": "sampleorg1",
"lastContext": "",
"lastRun": ""
},
{
"org": "sampleorg2",
"lastContext": "",
"lastRun": ""
}
]
```
10. Once Azure Function App is deployed, go to `githublogs<<uniqueid>>` --> Click on "Advanced Tools" under Development Tools --> Click on Go --> You will be redirected to Web App --> Check Temp folder path. Sometimes it
will be C:\local\Temp\ or D:\local\Temp\.
5. Upload the following files to the storage account "github-repo-logs" container from
```
ORGS.json
lastrun-Audit.json
```
11. After finding Temp folder path, go to `githublogs<<uniqueid>>` --> Click on "Configuration" under Settings --> Click on "TMPDIR" under "Application Settings" --> update Drive (C//D) based on your deployment.
Note: Make sure the value in "TMPDIR" doesnt have "\\" at the end.
6. PersonalAccessToken and Workspace Key will be placed as "Secrets" in the Azure KeyVault `githubkv<<uniqueid>>` with only Azure Function access policy. If you want to see/update these secrets,
Note: there are two parsers (here)[https://github.com/Azure/Azure-Sentinel/tree/master/Parsers/GitHub] to make the logs useful
```
a. Go to Azure KeyVault "githubkv<<uniqueid>>"
b. Click on "Access Policies" under Settings
c. Click on "Add Access Policy"
i. Configure from template : Secret Management
ii. Key Permissions : GET, LIST, SET
iii. Select Prinicpal : <<Your Account>>
iv. Add
d. Click "Save"
```
7. The `TimerTrigger` makes it incredibly easy to have your functions executed on a schedule. This sample demonstrates a simple use case of calling your function based on your schedule provided while deploying. If you want to change
the schedule
```
a. Click on Function App "Configuration" under Settings
b. Click on "Schedule" under "Application Settings"
c. Update your own schedule using cron expression.
```
**Note: For a `TimerTrigger` to work, you provide a schedule in the form of a [cron expression](https://en.wikipedia.org/wiki/Cron#CRON_expression)(See the link for full details). A cron expression is a string with 6 separate expressions which represent a given schedule via patterns. The pattern we use to represent every 5 minutes is `0 */5 * * * *`. This, in plain text, means: "When seconds is equal to 0, minutes is divisible by 5, for any hour, day of the month, month, day of the week, or year".**
8. Once Azure Function App is deployed
```
a. Go to `githublogs<<uniqueid>>`
b. Click on "Advanced Tools" under Development Tools
c. Click on Go --> You will be redirected to Web App --> Check Temp folder path.
d. It can be either C:\local\Temp\ or D:\local\Temp\.
```
9. After finding Temp folder path
```
a. Go to `githublogs<<uniqueid>>`
b. Click on "Configuration" under Settings
c. Click on "TMPDIR" under "Application Settings"
d. Update Drive (C//D) based on your findings from Step 9.
```
**Note: Make sure the value in "TMPDIR" doesnt have "\\" at the end.**
10. **For Azure Gov customers only**, You will see additional environment variable "Azure Tenant" under "Configuration" --> "Application Settings" and its default value is ".us"
Currently this Function App supports "Azure Gov(.US)" tenants
Ex: https://portal.azure.us
Note: there are two parsers (here)[https://github.com/Azure/Azure-Sentinel/tree/master/Parsers/GitHubFunction] to make the logs useful
## Deploy the Function App template
<a href="https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FDataConnectors%2FGithubFunction%2Fazuredeploy_GitHubFunctionApp.json" target="_blank">
<a href="https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FDataConnectors%2FGithubFunction%2Fazurecomdeploy_dotcomtenants.json" target="_blank">
<img src="https://aka.ms/deploytoazurebutton"/>
</a>
<a href="https://portal.azure.us/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FDataConnectors%2FGithubFunction%2Fazuredeploy_GitHubFunctionApp.json" target="_blank">
<a href="https://portal.azure.us/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FDataConnectors%2FGithubFunction%2Fazuregovdeploy_dotustenants.json" target="_blank">
<img src="https://raw.githubusercontent.com/Azure/azure-quickstart-templates/master/1-CONTRIBUTION-GUIDE/images/deploytoazuregov.png"/>
</a>

Просмотреть файл

@ -141,7 +141,7 @@
"WorkspaceID": "[parameters('WorkspaceID')]",
"WorkspaceKey": "[parameters('WorkspaceKey')]",
"GooglePickleString": "[parameters('GooglePickleString')]",
"WEBSITE_RUN_FROM_PACKAGE": "https://github.com/Azure/Azure-Sentinel/blob/GworkspaceReportsConnector/DataConnectors/GoogleWorkspaceReports/GWorkspaceReportsAPISentinelConn.zip?raw=true"
"WEBSITE_RUN_FROM_PACKAGE": "https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GoogleWorkspaceReports/GWorkspaceReportsAPISentinelConn.zip?raw=true"
}
}
]

Просмотреть файл

@ -198,7 +198,7 @@
"customLogName": "[parameters('customLogName')]",
"workspaceID": "[parameters('workspaceID')]",
"workspaceKey": "[concat('@Microsoft.KeyVault(SecretUri=', reference(resourceId('Microsoft.KeyVault/vaults/secrets', parameters('FunctionName'), 'workspaceKey')).SecretUriWithVersion, ')')]",
"WEBSITE_RUN_FROM_PACKAGE": "https://github.com/Azure/Azure-Sentinel/blob/ZoomLogs/DataConnectors/OneLogin/OneLogin_logs_template.zip?raw=true"
"WEBSITE_RUN_FROM_PACKAGE": "https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/OneLogin/OneLogin_logs_template.zip?raw=true"
}
}
@ -254,4 +254,4 @@
}
}
]
}
}

Просмотреть файл

@ -146,7 +146,7 @@
"WorkspaceKey": "[parameters('WorkspaceKey')]",
"ProofpointClusterID": "[parameters('ProofpointClusterID')]",
"ProofpointToken": "[parameters('ProofpointToken')]",
"WEBSITE_RUN_FROM_PACKAGE": "https://github.com/Azure/Azure-Sentinel/blob/ProofpointPODConnector/DataConnectors/ProofpointPOD/ProofpointSentinelConn.zip?raw=true"
"WEBSITE_RUN_FROM_PACKAGE": "https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/ProofpointPOD/ProofpointSentinelConn.zip?raw=true"
}
}
@ -202,4 +202,4 @@
}
}
]
}
}

Просмотреть файл

@ -76,7 +76,7 @@
"instructionSteps": [
{
"title": "",
"description": ">**NOTE:** This connector uses Azure Functions to connect to Netskope to pull logs into Azure Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details."
"description": ">**NOTE:** This connector uses Azure Functions to connect to Qualys KB connector to pull logs into Azure Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details."
}, {
"title": "",
"description": ">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow the steps](https://aka.ms/sentinel-qualyskb-parser) to use the Kusto function alias, **QualysKB**"
@ -119,7 +119,7 @@
},
{
"title": "Option 2 - Manual Deployment of Azure Functions",
"description": "This method provides the step-by-step instructions to deploy the Netskope connector manually with Azure Function."
"description": "This method provides the step-by-step instructions to deploy the Qualys KB connector manually with Azure Function."
},
{
"title": "",
@ -131,7 +131,7 @@
},
{
"title": "",
"description": "**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following seven (7) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapiPassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tfilterParameters\n> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348). The `uri` value must follow the following schema: `https://<API Server>/api/2.0` \n> - Add any additional filter parameters, for the `filterParameters` variable, that need to be appended to the URI. The `filterParameter` value should include a \"&\" symbol between each parameter and should not include any spaces.\n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n4. Once all application settings have been entered, click **Save**."
"description": "**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following six (6) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapiPassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tfilterParameters\n> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348). The `uri` value must follow the following schema: `https://<API Server>/api/2.0` \n> - Add any additional filter parameters, for the `filterParameters` variable, that need to be appended to the URI. The `filterParameter` value should include a \"&\" symbol between each parameter and should not include any spaces.\n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n4. Once all application settings have been entered, click **Save**."
}
]
}

Просмотреть файл

@ -193,7 +193,7 @@
"apiPassword": "[parameters('APIPassword')]",
"uri": "[parameters('Uri')]",
"filterParameters": "[parameters('FilterParameters(Optional)')]",
"WEBSITE_RUN_FROM_PACKAGE": "https://github.com/chicduong/ccd-dev/blob/master/QualysKB/AzureFunctionQualysKB.zip?raw=true"
"WEBSITE_RUN_FROM_PACKAGE": "https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/Qualys%20KB/AzureFunctionQualysKB.zip?raw=true"
}
}
]

Просмотреть файл

@ -154,6 +154,7 @@ Once you have a working POC, you are ready to build, validate the data connector
* [Connector_CEF_Template.json](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/Templates/Connector_CEF_template.json)
* [Connector_REST_API_template.json](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/Templates/Connector_REST_API_template.json)
* [Connector_Syslog_template.json](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/Templates/Connector_Syslog_template.json)
* [DataConnector_API_AzureFunctionApp_template.json](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/Templates/Connector_REST_API_AzureFunctionApp_template/DataConnector_API_AzureFunctionApp_template.json)
3. **Validate the Connector UX** – Follow these steps to render and validate the connector UX you just built
1. The test utility can be accessed by this URL - https://portal.azure.com/?feature.BringYourOwnConnector=true
2. Go to Azure Sentinel -> Data Connectors

Просмотреть файл

@ -161,7 +161,7 @@
"SalesforceSecurityToken": "[parameters('SalesforceSecurityToken')]",
"SalesforceConsumerKey": "[parameters('SalesforceConsumerKey')]",
"SalesforceConsumerSecret": "[parameters('SalesforceConsumerSecret')]",
"WEBSITE_RUN_FROM_PACKAGE": "https://github.com/Azure/Azure-Sentinel/blob/SalesforceServiceCloudConnector/DataConnectors/SalesforceServiceCloud/SalesforceSentinelConn.zip?raw=true"
"WEBSITE_RUN_FROM_PACKAGE": "https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/SalesforceServiceCloud/SalesforceSentinelConn.zip?raw=true"
}
}
]
@ -216,4 +216,4 @@
}
}
]
}
}

Просмотреть файл

@ -181,7 +181,7 @@
"workspaceKey": "[parameters('WorkspaceKey')]",
"api_key": "[parameters('APIKey')]",
"regioncode": "[parameters('RegionCode')]",
"WEBSITE_RUN_FROM_PACKAGE": "https://trendmicroxdr.blob.core.windows.net/functionapp/AzureFunctionTrendMicroXDR.zip"
"WEBSITE_RUN_FROM_PACKAGE": "https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/Trend%20Micro/AzureFunctionTrendMicroXDR.zip?raw=true"
}
}
]
@ -237,4 +237,4 @@
}
]
}

Просмотреть файл

@ -202,7 +202,7 @@
"customLogName": "[parameters('customLogName')]",
"workspaceID": "[parameters('workspaceID')]",
"workspaceKey": "[concat('@Microsoft.KeyVault(SecretUri=', reference(resourceId('Microsoft.KeyVault/vaults/secrets', parameters('FunctionName'), 'workspaceKey')).SecretUriWithVersion, ')')]",
"WEBSITE_RUN_FROM_PACKAGE": "https://github.com/Azure/Azure-Sentinel/blob/ZoomLogs/DataConnectors/Zoom/zoom_logs_template.zip?raw=true",
"WEBSITE_RUN_FROM_PACKAGE": "https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/Zoom/zoom_logs_template.zip?raw=true",
"ZoomVerification": "[parameters('ZoomVerification')]"
}
}
@ -258,4 +258,4 @@
}
}
]
}
}

Просмотреть файл

@ -0,0 +1,29 @@
id: 9649e203-3cb7-47ff-89a9-42f2a5eefe31
name: Alsid Active Directory attacks pathways
description: |
'Searches for triggered Indicators of Exposures related to Active Directory attacks pathways'
severity: Low
requiredDataConnectors:
- connectorId: AlsidForAD
dataTypes:
- AlsidForADLog_CL
queryFrequency: 2h
queryPeriod: 2h
triggerOperator: gt
triggerThreshold: 0
tactics:
- CredentialAccess
relevantTechniques:
- T1110
query: |
let SeverityTable=datatable(Severity:string,Level:int) [
"low", 1,
"medium", 2,
"high", 3,
"critical", 4
];
let codeNameList = datatable(Codename:string)["C-PRIV-ACCOUNTS-SPN", "C-SDPROP-CONSISTENCY", "C-DANG-PRIMGROUPID", "C-GPO-HARDENING", "C-DC-ACCESS-CONSISTENCY", "C-DANGEROUS-TRUST-RELATIONSHIP", "C-UNCONST-DELEG", "C-ABNORMAL-ENTRIES-IN-SCHEMA"];
afad_parser
| where MessageType == 0 and Codename in~ (codeNameList)
| lookup kind=leftouter SeverityTable on Severity
| order by Level

Просмотреть файл

@ -0,0 +1,28 @@
id: 154fde9f-ae00-4422-a8da-ef00b11da3fc
name: Alsid Indicators of Exposures
description: |
'Searches for triggered Indicators of Exposures'
severity: Low
requiredDataConnectors:
- connectorId: AlsidForAD
dataTypes:
- AlsidForADLog_CL
queryFrequency: 2h
queryPeriod: 2h
triggerOperator: gt
triggerThreshold: 0
tactics:
- CredentialAccess
relevantTechniques:
- T1110
query: |
let SeverityTable=datatable(Severity:string,Level:int) [
"low", 1,
"medium", 2,
"high", 3,
"critical", 4
];
afad_parser
| where MessageType == 0
| lookup kind=leftouter SeverityTable on Severity
| order by Level

Просмотреть файл

@ -0,0 +1,29 @@
id: 472b7cf4-bf1a-4061-b9ab-9fe4894e3c17
name: Alsid Password issues
description: |
'Searches for triggered Indicators of Exposures related to password issues'
severity: Low
requiredDataConnectors:
- connectorId: AlsidForAD
dataTypes:
- AlsidForADLog_CL
queryFrequency: 2h
queryPeriod: 2h
triggerOperator: gt
triggerThreshold: 0
tactics:
- CredentialAccess
relevantTechniques:
- T1110
query: |
let SeverityTable=datatable(Severity:string,Level:int) [
"low", 1,
"medium", 2,
"high", 3,
"critical", 4
];
let codeNameList = datatable(Codename:string)["C-CLEARTEXT-PASSWORD", "C-PASSWORD-DONT-EXPIRE", "C-USER-REVER-PWDS", "C-PASSWORD-POLICY", "C-USER-PASSWORD", "C-KRBTGT-PASSWORD", "C-AAD-SSO-PASSWORD", "C-REVER-PWD-GPO"];
afad_parser
| where MessageType == 0 and Codename in~ (codeNameList)
| lookup kind=leftouter SeverityTable on Severity
| order by Level

Просмотреть файл

@ -0,0 +1,29 @@
id: a5fe9489-cf8b-47ae-a87e-8f3a13e4203e
name: Alsid privileged accounts issues
description: |
'Searches for triggered Indicators of Exposures related to privileged accounts issues'
severity: Low
requiredDataConnectors:
- connectorId: AlsidForAD
dataTypes:
- AlsidForADLog_CL
queryFrequency: 2h
queryPeriod: 2h
triggerOperator: gt
triggerThreshold: 0
tactics:
- CredentialAccess
relevantTechniques:
- T1110
query: |
let SeverityTable=datatable(Severity:string,Level:int) [
"low", 1,
"medium", 2,
"high", 3,
"critical", 4
];
let codeNameList = datatable(Codename:string)["C-PRIV-ACCOUNTS-SPN", "C-NATIVE-ADM-GROUP-MEMBERS", "C-KRBTGT-PASSWORD", "C-PROTECTED-USERS-GROUP-UNUSED", "C-ADMINCOUNT-ACCOUNT-PROPS", "C-ADM-ACC-USAGE", "C-LAPS-UNSECURE-CONFIG", "C-DISABLED-ACCOUNTS-PRIV-GROUPS"];
afad_parser
| where MessageType == 0 and Codename in~ (codeNameList)
| lookup kind=leftouter SeverityTable on Severity
| order by Level

Просмотреть файл

@ -0,0 +1,29 @@
id: fb9e0b51-8867-48d7-86f4-6e76f2176bf8
name: Alsid user accounts issues
description: |
'Searches for triggered Indicators of Exposures related to user accounts issues'
severity: Low
requiredDataConnectors:
- connectorId: AlsidForAD
dataTypes:
- AlsidForADLog_CL
queryFrequency: 2h
queryPeriod: 2h
triggerOperator: gt
triggerThreshold: 0
tactics:
- CredentialAccess
relevantTechniques:
- T1110
query: |
let SeverityTable=datatable(Severity:string,Level:int) [
"low", 1,
"medium", 2,
"high", 3,
"critical", 4
];
let codeNameList = datatable(Codename:string)["C-ACCOUNTS-DANG-SID-HISTORY", "C-PRE-WIN2000-ACCESS-MEMBERS", "C-PASSWORD-DONT-EXPIRE", "C-SLEEPING-ACCOUNTS", "C-DANG-PRIMGROUPID", "C-PASSWORD-NOT-REQUIRED", "C-USER-PASSWORD"];
afad_parser
| where MessageType == 0 and Codename in~ (codeNameList)
| lookup kind=leftouter SeverityTable on Severity
| order by Level

Просмотреть файл

@ -0,0 +1,42 @@
id: 31337365-4b1d-adf5-00da-0000000FF1CE
name: Modified domain federation trust settings
description: |
'This will alert when a user or application modifies the federation settings on the domain. For example, this alert will trigger when a new Active Directory Federated Service (ADFS) TrustedRealm object, such as a signing certificate, is added to the domain.
Modification to domain federation settings should be rare. Confirm the added or modified target domain/URL is legitimate administrator behavior.
To understand why an authorized user may update settings for a federated domain in Office 365, Azure, or Intune, see: https://docs.microsoft.com/office365/troubleshoot/active-directory/update-federated-domain-office-365.
For details on security realms that accept security tokens, see the ADFS Proxy Protocol (MS-ADFSPP) specification: https://docs.microsoft.com/openspecs/windows_protocols/ms-adfspp/e7b9ea73-1980-4318-96a6-da559486664b.
For further information on AuditLogs please see https://docs.microsoft.com/azure/active-directory/reports-monitoring/reference-audit-activities.'
severity: High
requiredDataConnectors:
- connectorId: AzureActiveDirectory
dataTypes:
- AuditLogs
queryFrequency: 1h
queryPeriod: 1h
triggerOperator: gt
triggerThreshold: 0
tactics:
- CredentialAccess
relevantTechniques:
- T1134
query: |
let auditLookback = 1h;
AuditLogs
| where TimeGenerated > ago(auditLookback)
| where OperationName =~ "Set federation settings on domain"
//| where Result =~ "success" // commenting out, as it may be interesting to capture failed attempts
| extend targetDisplayName = tostring(TargetResources[0].displayName)
| extend UserAgent = iff(AdditionalDetails[0].key == "User-Agent",tostring(AdditionalDetails[0].value),"")
| extend InitiatingUserOrApp = iff(isnotempty(InitiatedBy.user.userPrincipalName),tostring(InitiatedBy.user.userPrincipalName), tostring(InitiatedBy.app.displayName))
| extend InitiatingIpAddress = iff(isnotempty(InitiatedBy.user.ipAddress), tostring(InitiatedBy.user.ipAddress), tostring(InitiatedBy.app.ipAddress))
| project-reorder TimeGenerated, OperationName, InitiatingUserOrApp, AADOperationType, targetDisplayName, Result, InitiatingIpAddress, UserAgent, CorrelationId, TenantId, AADTenantId
| extend timestamp = TimeGenerated, AccountCustomEntity = InitiatingUserOrApp, IPCustomEntity = InitiatingIpAddress
entityMappings:
- entityType: Account
fieldMappings:
- identifier: FullName
columnName: AccountCustomEntity
- entityType: IP
fieldMappings:
- identifier: Address
columnName: IPCustomEntity

Просмотреть файл

@ -0,0 +1,56 @@
id: 2cfc3c6e-f424-4b88-9cc9-c89f482d016a
name: First access credential added to Application or Service Principal where no credential was present
description: |
'This will alert when an admin or app owner account adds a new credential to an Application or Service Principal where there was no previous verify KeyCredential associated.
If a threat actor obtains access to an account with sufficient privileges and adds the alternate authentication material triggering this event, the threat actor can now authenticate as the Application or Service Principal using this credential.
Additional information on OAuth Credential Grants can be found in RFC 6749 Section 4.4 or https://docs.microsoft.com/azure/active-directory/develop/v2-oauth2-client-creds-grant-flow
For further information on AuditLogs please see https://docs.microsoft.com/azure/active-directory/reports-monitoring/reference-audit-activities.'
severity: High
requiredDataConnectors:
- connectorId: AzureActiveDirectory
dataTypes:
- AuditLogs
queryFrequency: 1h
queryPeriod: 1h
triggerOperator: gt
triggerThreshold: 0
tactics:
- CredentialAccess
relevantTechniques:
- T1550.001
query: |
let auditLookback = 1h;
AuditLogs
| where TimeGenerated > ago(auditLookback)
| where OperationName has_any ("Add service principal", "Certificates and secrets management") // captures "Add service principal", "Add service principal credentials", and "Update application – Certificates and secrets management" events
| where Result =~ "success"
| mv-expand target = TargetResources
| where tostring(InitiatedBy.user.userPrincipalName) has "@" or tostring(InitiatedBy.app.displayName) has "@"
| extend targetDisplayName = tostring(TargetResources[0].displayName)
| extend targetId = tostring(TargetResources[0].id)
| extend targetType = tostring(TargetResources[0].type)
| extend keyEvents = TargetResources[0].modifiedProperties
| mv-expand keyEvents
| where keyEvents.displayName =~ "KeyDescription"
| extend new_value_set = parse_json(tostring(keyEvents.newValue))
| extend old_value_set = parse_json(tostring(keyEvents.oldValue))
| where old_value_set == "[]"
| parse new_value_set with * "KeyIdentifier=" keyIdentifier:string ",KeyType=" keyType:string ",KeyUsage=" keyUsage:string ",DisplayName=" keyDisplayName:string "]" *
| where keyUsage == "Verify" or keyUsage == ""
| extend UserAgent = iff(AdditionalDetails[0].key == "User-Agent",tostring(AdditionalDetails[0].value),"")
| extend InitiatingUserOrApp = iff(isnotempty(InitiatedBy.user.userPrincipalName),tostring(InitiatedBy.user.userPrincipalName), tostring(InitiatedBy.app.displayName))
| extend InitiatingIpAddress = iff(isnotempty(InitiatedBy.user.ipAddress), tostring(InitiatedBy.user.ipAddress), tostring(InitiatedBy.app.ipAddress))
// The below line is currently commented out but Azure Sentinel users can modify this query to show only Application or only Service Principal events in their environment
//| where targetType =~ "Application" // or targetType =~ "ServicePrincipal"
| project-away new_value_set, old_value_set
| project-reorder TimeGenerated, OperationName, InitiatingUserOrApp, InitiatingIpAddress, UserAgent, targetDisplayName, targetId, targetType, keyDisplayName, keyType, keyUsage, keyIdentifier, CorrelationId, TenantId
| extend timestamp = TimeGenerated, AccountCustomEntity = InitiatingUserOrApp, IPCustomEntity = InitiatingIpAddress
entityMappings:
- entityType: Account
fieldMappings:
- identifier: FullName
columnName: AccountCustomEntity
- entityType: IP
fieldMappings:
- identifier: Address
columnName: IPCustomEntity

Просмотреть файл

@ -0,0 +1,53 @@
id: 2560515c-07d1-434e-87fb-ebe3af267760
name: Mail.Read Permissions Granted to Application
description: |
'This query look for applications that have been granted permissions to Read Mail (Permissions field has Mail.Read) and subsequently has been consented to. This can help identify applications that have been abused to gain access to mailboxes.'
severity: Medium
requiredDataConnectors:
- connectorId: AzureActiveDirectory
dataTypes:
- AuditLogs
queryFrequency: 1d
queryPeriod: 1d
triggerOperator: gt
triggerThreshold: 0
tactics:
- Persistence
relevantTechniques:
- T1098
query: |
AuditLogs
| where Category =~ "ApplicationManagement"
| where ActivityDisplayName =~ "Add delegated permission grant"
| where Result =~ "success"
| where tostring(InitiatedBy.user.userPrincipalName) has "@" or tostring(InitiatedBy.app.displayName) has "@"
| extend props = parse_json(tostring(TargetResources[0].modifiedProperties))
| mv-expand props
| extend UserAgent = tostring(AdditionalDetails[0].value)
| extend InitiatingUser = tostring(parse_json(tostring(InitiatedBy.user)).userPrincipalName)
| extend UserIPAddress = tostring(parse_json(tostring(InitiatedBy.user)).ipAddress)
| extend DisplayName = tostring(props.displayName)
| extend Permissions = tostring(parse_json(tostring(props.newValue)))
| where Permissions has_any ("Mail.Read", "Mail.ReadWrite")
| extend PermissionsAddedTo = tostring(TargetResources[0].displayName)
| extend Type = tostring(TargetResources[0].type)
| project-away props
| join kind=leftouter(
AuditLogs
| where ActivityDisplayName has "Consent to application"
| extend AppName = tostring(TargetResources[0].displayName)
| extend AppId = tostring(TargetResources[0].id)
| project AppName, AppId, CorrelationId) on CorrelationId
| project-reorder TimeGenerated, OperationName, InitiatingUser, UserIPAddress, UserAgent, PermissionsAddedTo, Permissions, AppName, AppId, CorrelationId
| extend timestamp = TimeGenerated, AccountCustomEntity = InitiatingUser, IPCustomEntity = UserIPAddress
entityMappings:
- entityType: Account
fieldMappings:
- identifier: FullName
columnName: AccountCustomEntity
- entityType: IP
fieldMappings:
- identifier: Address
columnName: IPCustomEntity

Просмотреть файл

@ -1,7 +1,7 @@
id: 79566f41-df67-4e10-a703-c38a6213afd8
name: New access credential added to Application or Service Principal
description: |
'This will alert when an admin or app owner account adds a new credential to an Application or Service Principal.
'This will alert when an admin or app owner account adds a new credential to an Application or Service Principal where a verify KeyCredential was already present for the app.
If a threat actor obtains access to an account with sufficient privileges and adds the alternate authentication material triggering this event, the threat actor can now authenticate as the Application or Service Principal using this credential.
Additional information on OAuth Credential Grants can be found in RFC 6749 Section 4.4 or https://docs.microsoft.com/azure/active-directory/develop/v2-oauth2-client-creds-grant-flow
For further information on AuditLogs please see https://docs.microsoft.com/azure/active-directory/reports-monitoring/reference-audit-activities.'
@ -24,25 +24,27 @@ query: |
| where TimeGenerated > ago(auditLookback)
| where OperationName has_any ("Add service principal", "Certificates and secrets management") // captures "Add service principal", "Add service principal credentials", and "Update application – Certificates and secrets management" events
| where Result =~ "success"
| mv-expand target = TargetResources
| where tostring(InitiatedBy.user.userPrincipalName) has "@" or tostring(InitiatedBy.app.displayName) has "@"
| extend targetDisplayName = tostring(TargetResources[0].displayName)
| extend targetId = tostring(TargetResources[0].id)
| extend targetType = tostring(TargetResources[0].type)
| extend keyEvents = TargetResources[0].modifiedProperties
| where keyEvents has "KeyIdentifier=" and keyEvents has "KeyUsage=Verify"
| mv-expand keyEvents
| where keyEvents.displayName =~ "KeyDescription"
| parse keyEvents.newValue with * "KeyIdentifier=" keyIdentifier:string ",KeyType=" keyType:string ",KeyUsage=" keyUsage:string ",DisplayName=" keyDisplayName:string "]" *
| parse keyEvents.oldValue with * "KeyIdentifier=" keyIdentifierOld:string ",KeyType" *
| where keyEvents.oldValue == "[]" or keyIdentifier != keyIdentifierOld
| where keyUsage == "Verify"
| extend new_value_set = parse_json(tostring(keyEvents.newValue))
| extend old_value_set = parse_json(tostring(keyEvents.oldValue))
| where old_value_set != "[]"
| extend diff = set_difference(new_value_set, old_value_set)
| where isnotempty(diff)
| parse diff with * "KeyIdentifier=" keyIdentifier:string ",KeyType=" keyType:string ",KeyUsage=" keyUsage:string ",DisplayName=" keyDisplayName:string "]" *
| where keyUsage == "Verify" or keyUsage == ""
| extend UserAgent = iff(AdditionalDetails[0].key == "User-Agent",tostring(AdditionalDetails[0].value),"")
| extend InitiatingUserOrApp = iff(isnotempty(InitiatedBy.user.userPrincipalName),tostring(InitiatedBy.user.userPrincipalName), tostring(InitiatedBy.app.displayName))
| extend InitiatingIpAddress = iff(isnotempty(InitiatedBy.user.ipAddress), tostring(InitiatedBy.user.ipAddress), tostring(InitiatedBy.app.ipAddress))
//
// The below line is currently commented out but Azure Sentinel users can modify this query to show only Application or only Service Principal events in their environment
//| where targetType =~ "Application" // or targetType =~ "ServicePrincipal"
| project-away keyEvents
| project-away diff, new_value_set, old_value_set
| project-reorder TimeGenerated, OperationName, InitiatingUserOrApp, InitiatingIpAddress, UserAgent, targetDisplayName, targetId, targetType, keyDisplayName, keyType, keyUsage, keyIdentifier, CorrelationId, TenantId
| extend timestamp = TimeGenerated, AccountCustomEntity = InitiatingUserOrApp, IPCustomEntity = InitiatingIpAddress
entityMappings:

Просмотреть файл

@ -32,7 +32,7 @@ query: |
let AuditTrail = AuditLogs | where TimeGenerated >= ago(auditLookback) and TimeGenerated < ago(current)
// 2 other operations that can be part of malicious activity in this situation are
// "Add OAuth2PermissionGrant" and "Add service principal", extend the filter below to capture these too
| where OperationName == "Consent to application"
| where OperationName has "Consent to application"
| extend InitiatedBy = iff(isnotempty(tostring(parse_json(tostring(InitiatedBy.user)).userPrincipalName)),
tostring(parse_json(tostring(InitiatedBy.user)).userPrincipalName), tostring(parse_json(tostring(InitiatedBy.app)).displayName))
| extend TargetResourceName = tolower(tostring(TargetResources.[0].displayName))
@ -42,7 +42,7 @@ query: |
;
// Gather current period of audit data
let RecentConsent = AuditLogs | where TimeGenerated >= ago(current)
| where OperationName == "Consent to application"
| where OperationName has "Consent to application"
| extend IpAddress = iff(isnotempty(tostring(parse_json(tostring(InitiatedBy.user)).ipAddress)),
tostring(parse_json(tostring(InitiatedBy.user)).ipAddress), tostring(parse_json(tostring(InitiatedBy.app)).ipAddress))
| extend InitiatedBy = iff(isnotempty(tostring(parse_json(tostring(InitiatedBy.user)).userPrincipalName)),

Просмотреть файл

@ -0,0 +1,36 @@
id: 6d7214d9-4a28-44df-aafb-0910b9e6ae3e
name: New CloudShell User
description: |
'Identifies when a user creates an Azure CloudShell for the first time.
Monitor this activity to ensure only expected user are using CloudShell'
severity: Low
requiredDataConnectors:
- connectorId: AzureActivity
dataTypes:
- AzureActivity
queryFrequency: 1d
queryPeriod: 1d
triggerOperator: gt
triggerThreshold: 0
tactics:
- Execution
relevantTechniques:
- T1059
query: |
AzureActivity
| extend message = tostring(parse_json(Properties).message)
| extend AppId = tostring(parse_json(Claims).appid)
| where AppId contains "c44b4083-3bb0-49c1-b47d-974e53cbdf3c"
| where OperationName =~ "Microsoft.Portal/consoles/write"
| extend timestamp = TimeGenerated, AccountCustomEntity = Caller, IPCustomEntity = CallerIpAddress
entityMappings:
- entityType: Account
fieldMappings:
- identifier: FullName
columnName: AccountCustomEntity
- entityType: IP
fieldMappings:
- identifier: Address
columnName: IPCustomEntity

Просмотреть файл

@ -1,8 +1,8 @@
id: 0914adab-90b5-47a3-a79f-7cdcac843aa7
name: Azure Key Vault access TimeSeries anomaly
description: |
'Indentifies a sudden increase in count of Azure Key Vault secret or vault access operations by CallerIPAddress. The query leverages a built-in KQL anomaly detection algorithm
to find large deviations from baseline Azure Key Vault access patterns. Any sudden increase in the count of Azure Key Vault accesses can be an
'Indentifies a sudden increase in count of Azure Key Vault secret or vault access operations by CallerIPAddress. The query leverages a built-in KQL anomaly detection algorithm
to find large deviations from baseline Azure Key Vault access patterns. Any sudden increase in the count of Azure Key Vault accesses can be an
indication of adversary dumping credentials via automated methods. If you are seeing any noise, try filtering known source(IP/Account) and user-agent combinations.
TimeSeries Reference Blog: https://techcommunity.microsoft.com/t5/azure-sentinel/looking-for-unknown-anomalies-what-is-normal-time-series/ba-p/555052'
severity: Low
@ -32,8 +32,8 @@ query: |
| where ResultType !~ "None" and isnotempty(ResultType)
| where CallerIPAddress !~ "None" and isnotempty(CallerIPAddress)
| where ResourceType =~ "VAULTS" and ResultType =~ "Success"
| where OperationName in (OperationList)
| project TimeGenerated, OperationName, Resource, CallerIPAddress
| where OperationName in (OperationList)
| project TimeGenerated, OperationName, Resource, CallerIPAddress
| make-series HourlyCount=count() on TimeGenerated from startofday(ago(starttime)) to startofday(now()) step timeframe by Resource;
//Filter anomolies against TimeSeriesData
let TimeSeriesAlerts = TimeSeriesData
@ -42,7 +42,7 @@ query: |
| where anomalies > 0 | extend AnomalyHour = TimeGenerated
| where baseline > baselinethreshold // Filtering low count events per baselinethreshold
| project Resource, AnomalyHour, TimeGenerated, HourlyCount, baseline, anomalies, score;
let AnomalyHours = TimeSeriesAlerts  | where TimeGenerated > ago(2d) | project TimeGenerated;
let AnomalyHours = TimeSeriesAlerts | where TimeGenerated > ago(2d) | project TimeGenerated;
// Filter the alerts since specified timeframe
TimeSeriesAlerts
| where TimeGenerated > ago(2d)
@ -51,7 +51,7 @@ query: |
AzureDiagnostics
| where TimeGenerated > ago(timeframe)
| extend DateHour = bin(TimeGenerated, 1h) // create a new column and round to hour
| where DateHour in ((AnomalyHours)) //filter the dataset to only selected anomaly hours
| where DateHour in ((AnomalyHours)) //filter the dataset to only selected anomaly hours
| extend ResultType = columnifexists("ResultType", "NoResultType")
| extend requestUri_s = columnifexists("requestUri_s", "None"), identity_claim_http_schemas_microsoft_com_identity_claims_objectidentifier_g = columnifexists("identity_claim_http_schemas_microsoft_com_identity_claims_objectidentifier_g", "None")
| extend id_s = columnifexists("id_s", "None"), CallerIPAddress = columnifexists("CallerIPAddress", "None"), clientInfo_s = columnifexists("clientInfo_s", "None")
@ -62,9 +62,9 @@ query: |
| where clientInfo_s !~ "None" and isnotempty(clientInfo_s)
| where requestUri_s !~ "None" and isnotempty(requestUri_s)
| where ResourceType =~ "VAULTS" and ResultType =~ "Success"
| where OperationName in (OperationList)
| summarize PerOperationCount=count(), LatestAnomalyTime = arg_max(TimeGenerated,*) by bin(TimeGenerated,1h), Resource, OperationName, id_s, CallerIPAddress, identity_claim_http_schemas_microsoft_com_identity_claims_objectidentifier_g, requestUri_s, clientInfo_s
) on Resource, TimeGenerated
| where OperationName in (OperationList)
| summarize PerOperationCount=count(), LatestAnomalyTime = arg_max(TimeGenerated,*) by bin(TimeGenerated,1h), Resource, OperationName, id_s, CallerIPAddress, identity_claim_http_schemas_microsoft_com_identity_claims_objectidentifier_g, requestUri_s, clientInfo_s
) on Resource, TimeGenerated
| summarize EventCount=count(), OperationNameList = make_set(OperationName), RequestURLList = make_set(requestUri_s, 100), AccountList = make_set(identity_claim_http_schemas_microsoft_com_identity_claims_objectidentifier_g, 100), AccountMax = arg_max(identity_claim_http_schemas_microsoft_com_identity_claims_objectidentifier_g,*) by Resource, id_s, clientInfo_s, LatestAnomalyTime
| extend timestamp = LatestAnomalyTime, IPCustomEntity = CallerIPAddress, AccountCustomEntity = AccountMax
entityMappings:

Просмотреть файл

@ -1,11 +1,11 @@
id: 06a9b845-6a95-4432-a78b-83919b28c375
name: Time series anomaly detection for total volume of traffic
description: |
'Identifies anamalous spikes in network traffic logs as compared to baseline or normal historical patterns.
The query leverages a KQL built-in anomaly detection algorithm to find large deviations from baseline patterns.
'Identifies anamalous spikes in network traffic logs as compared to baseline or normal historical patterns.
The query leverages a KQL built-in anomaly detection algorithm to find large deviations from baseline patterns.
Sudden increases in network traffic volume may be an indication of data exfiltration attempts and should be investigated.
The higher the score, the further it is from the baseline value.
The output is aggregated to provide summary view of unique source IP to destination IP address and port traffic observed in the flagged anomaly hour.
The output is aggregated to provide summary view of unique source IP to destination IP address and port traffic observed in the flagged anomaly hour.
The source IP addresses which were sending less than percentotalthreshold of the total traffic have been exluded whose value can be adjusted as needed .
You may have to run queries for individual source IP addresses from SourceIPlist to determine if anything looks suspicious'
severity: Medium
@ -46,7 +46,7 @@ query: |
let timeframe = 1h;
let scorethreshold = 5;
let percentotalthreshold = 50;
let TimeSeriesData = CommonSecurityLog
let TimeSeriesData = CommonSecurityLog
| where isnotempty(DestinationIP) and isnotempty(SourceIP)
| where TimeGenerated between (startofday(ago(starttime))..startofday(ago(endtime)))
| project TimeGenerated,SourceIP, DestinationIP, DeviceVendor
@ -57,19 +57,19 @@ query: |
| mv-expand Total to typeof(double), TimeGenerated to typeof(datetime), anomalies to typeof(double),score to typeof(double), baseline to typeof(long)
| where anomalies > 0 | extend score = round(score,2), AnomalyHour = TimeGenerated
| project DeviceVendor,AnomalyHour, TimeGenerated, Total, baseline, anomalies, score;
let AnomalyHours = TimeSeriesAlerts  | where TimeGenerated > ago(2d) | project TimeGenerated;
// Join anomalies with Base Data to popalate associated records for investigation - Results sorted by score in descending order
TimeSeriesAlerts
let AnomalyHours = TimeSeriesAlerts | where TimeGenerated > ago(2d) | project TimeGenerated;
// Join anomalies with Base Data to popalate associated records for investigation - Results sorted by score in descending order
TimeSeriesAlerts
| where TimeGenerated > ago(2d)
| join (
CommonSecurityLog
CommonSecurityLog
| where isnotempty(DestinationIP) and isnotempty(SourceIP)
| where TimeGenerated between (startofday(ago(starttime))..startofday(ago(endtime)))
| extend DateHour = bin(TimeGenerated, 1h) // create a new column and round to hour
| where DateHour in ((AnomalyHours)) //filter the dataset to only selected anomaly hours
| summarize HourlyCount = count(), TimeGeneratedMax = arg_max(TimeGenerated, *), DestinationIPlist = make_set(DestinationIP, 100), DestinationPortlist = make_set(DestinationPort, 100) by DeviceVendor, SourceIP, TimeGeneratedHour= bin(TimeGenerated, 1h)
| extend AnomalyHour = TimeGeneratedHour
) on AnomalyHour, DeviceVendor
) on AnomalyHour, DeviceVendor
| extend PercentTotal = round((HourlyCount / Total) * 100, 3)
| where PercentTotal > percentotalthreshold
| project DeviceVendor , AnomalyHour, TimeGeneratedMax, SourceIP, DestinationIPlist, DestinationPortlist, HourlyCount, PercentTotal, Total, baseline, score, anomalies

Просмотреть файл

@ -0,0 +1,47 @@
id: 738702fd-0a66-42c7-8586-e30f0583f8fe
name: TEARDROP memory-only dropper
description: |
Identifies SolarWinds TEARDROP memory-only dropper IOCs in Windows defender Exploit Guard activity
References:
- https://www.fireeye.com/blog/threat-research/2020/12/evasive-attacker-leverages-solarwinds-supply-chain-compromises-with-sunburst-backdoor.html
- https://gist.github.com/olafhartong/71ffdd4cab4b6acd5cbcd1a0691ff82f
severity: High
requiredDataConnectors:
- connectorId: MicrosoftThreatProtection
dataTypes:
- DeviceEvents
queryFrequency: 1d
queryPeriod: 1d
triggerOperator: gt
triggerThreshold: 0
tactics:
- Execution
- Persistence
relevantTechniques:
- T1012
- T1027
- T1543.003
- T1195.002
query: |
DeviceEvents
| where ActionType has "ExploitGuardNonMicrosoftSignedBlocked"
| where InitiatingProcessFileName contains "svchost.exe" and FileName contains "NetSetupSvc.dll"
| extend
timestamp = TimeGenerated,
AccountCustomEntity = iff(isnotempty(InitiatingProcessAccountUpn), InitiatingProcessAccountUpn, InitiatingProcessAccountName),
HostCustomEntity = DeviceName,
FileHashCustomEntity = MD5
entityMappings:
- entityType: Account
fieldMappings:
- identifier: FullName
columnName: AccountCustomEntity
- entityType: Host
fieldMappings:
- identifier: FullName
columnName: HostCustomEntity
- entityType: FileHash
fieldMappings:
- identifier: FileHash
columnName: FileHashCustomEntity

Просмотреть файл

@ -0,0 +1,45 @@
id: a3c144f9-8051-47d4-ac29-ffb0c312c910
name: SUNBURST and SUPERNOVA backdoor hashes
description: |
Identifies SolarWinds SUNBURST and SUPERNOVA backdoor file hash IOCs in DeviceFileEvents
References:
- https://www.fireeye.com/blog/threat-research/2020/12/evasive-attacker-leverages-solarwinds-supply-chain-compromises-with-sunburst-backdoor.html
- https://gist.github.com/olafhartong/71ffdd4cab4b6acd5cbcd1a0691ff82f
severity: High
requiredDataConnectors:
- connectorId: MicrosoftThreatProtection
dataTypes:
- DeviceFileEvents
queryFrequency: 1d
queryPeriod: 1d
triggerOperator: gt
triggerThreshold: 0
tactics:
- Execution
- Persistence
relevantTechniques:
- T1195.002
query: |
let SunburstMD5=dynamic(["b91ce2fa41029f6955bff20079468448","02af7cec58b9a5da1c542b5a32151ba1","2c4a910a1299cdae2a4e55988a2f102e","846e27a652a5e1bfbd0ddd38a16dc865","4f2eb62fa529c0283b28d05ddd311fae"]);
let SupernovaMD5="56ceb6d0011d87b6e4d7023d7ef85676";
DeviceFileEvents
| where MD5 in(SunburstMD5) or MD5 in(SupernovaMD5)
| extend
timestamp = TimeGenerated,
AccountCustomEntity = iff(isnotempty(InitiatingProcessAccountUpn), InitiatingProcessAccountUpn, InitiatingProcessAccountName),
HostCustomEntity = DeviceName,
FileHashCustomEntity = MD5
entityMappings:
- entityType: Account
fieldMappings:
- identifier: FullName
columnName: AccountCustomEntity
- entityType: Host
fieldMappings:
- identifier: FullName
columnName: HostCustomEntity
- entityType: FileHash
fieldMappings:
- identifier: FileHash
columnName: FileHashCustomEntity

Просмотреть файл

@ -0,0 +1,55 @@
id: ce1e7025-866c-41f3-9b08-ec170e05e73e
name: SUNBURST network beacons
description: |
Identifies SolarWinds SUNBURST domain beacon IOCs in DeviceNetworkEvents
References:
- https://www.fireeye.com/blog/threat-research/2020/12/evasive-attacker-leverages-solarwinds-supply-chain-compromises-with-sunburst-backdoor.html
- https://gist.github.com/olafhartong/71ffdd4cab4b6acd5cbcd1a0691ff82f
severity: Medium
requiredDataConnectors:
- connectorId: MicrosoftThreatProtection
dataTypes:
- DeviceNetworkEvents
queryFrequency: 1d
queryPeriod: 1d
triggerOperator: gt
triggerThreshold: 0
tactics:
- Execution
- Persistence
relevantTechniques:
- T1195.002
query: |
let SunburstURL=dynamic(["panhardware.com","databasegalore.com","avsvmcloud.com","freescanonline.com","thedoccloud.com","deftsecurity.com"]);
DeviceNetworkEvents
| where ActionType == "ConnectionSuccess"
| where RemoteUrl in(SunburstURL)
| extend
timestamp = TimeGenerated,
AccountCustomEntity = iff(isnotempty(InitiatingProcessAccountUpn), InitiatingProcessAccountUpn, InitiatingProcessAccountName),
HostCustomEntity = DeviceName,
FileHashCustomEntity = InitiatingProcessMD5,
URLCustomEntity = RemoteUrl,
IPCustomEntity = RemoteIP
entityMappings:
- entityType: Account
fieldMappings:
- identifier: FullName
columnName: AccountCustomEntity
- entityType: Host
fieldMappings:
- identifier: FullName
columnName: HostCustomEntity
- entityType: IP
fieldMappings:
- identifier: Address
columnName: IPCustomEntity
- entityType: URL
fieldMappings:
- identifier: Url
columnName: URLCustomEntity
- entityType: FileHash
fieldMappings:
- identifier: FileHash
columnName: FileHashCustomEntity

Просмотреть файл

@ -0,0 +1,45 @@
id: 4a3073ac-7383-48a9-90a8-eb6716183a54
name: SUNBURST suspicious SolarWinds child processes
description: |
Identifies suspicious child processes of SolarWinds.Orion.Core.BusinessLayer.dll that may be evidence of the SUNBURST backdoor
References:
- https://www.fireeye.com/blog/threat-research/2020/12/evasive-attacker-leverages-solarwinds-supply-chain-compromises-with-sunburst-backdoor.html
- https://gist.github.com/olafhartong/71ffdd4cab4b6acd5cbcd1a0691ff82f
severity: Medium
requiredDataConnectors:
- connectorId: MicrosoftThreatProtection
dataTypes:
- DeviceProcessEvents
queryFrequency: 1d
queryPeriod: 1d
triggerOperator: gt
triggerThreshold: 0
tactics:
- Execution
- Persistence
relevantTechniques:
- T1195.002
query: |
let excludeProcs = dynamic([@"\SolarWinds\Orion\APM\APMServiceControl.exe", @"\SolarWinds\Orion\ExportToPDFCmd.Exe", @"\SolarWinds.Credentials\SolarWinds.Credentials.Orion.WebApi.exe", @"\SolarWinds\Orion\Topology\SolarWinds.Orion.Topology.Calculator.exe", @"\SolarWinds\Orion\Database-Maint.exe", @"\SolarWinds.Orion.ApiPoller.Service\SolarWinds.Orion.ApiPoller.Service.exe", @"\Windows\SysWOW64\WerFault.exe"]);
DeviceProcessEvents
| where InitiatingProcessFileName =~ "solarwinds.businesslayerhost.exe"
| where not(FolderPath has_any (excludeProcs))
| extend
timestamp = TimeGenerated,
AccountCustomEntity = iff(isnotempty(InitiatingProcessAccountUpn), InitiatingProcessAccountUpn, InitiatingProcessAccountName),
HostCustomEntity = DeviceName,
FileHashCustomEntity = MD5
entityMappings:
- entityType: Account
fieldMappings:
- identifier: FullName
columnName: AccountCustomEntity
- entityType: Host
fieldMappings:
- identifier: FullName
columnName: HostCustomEntity
- entityType: FileHash
fieldMappings:
- identifier: FileHash
columnName: FileHashCustomEntity

Просмотреть файл

@ -0,0 +1,45 @@
id: 18e6a87e-9d06-4a4e-8b59-3469cd49552d
name: ADFS DKM Master Key Export
description: |
'Identifies an export of the ADFS DKM Master Key from Active Directory.
References: https://blogs.microsoft.com/on-the-issues/2020/12/13/customers-protect-nation-state-cyberattacks/,
https://www.fireeye.com/blog/threat-research/2020/12/evasive-attacker-leverages-solarwinds-supply-chain-compromises-with-sunburst-backdoor.html?1'
severity: Medium
requiredDataConnectors:
- connectorId: SecurityEvents
dataTypes:
- SecurityEvents
- connectorId: MicrosoftThreatProtection
dataTypes:
- DeviceEvents
queryFrequency: 1d
queryPeriod: 1d
triggerOperator: gt
triggerThreshold: 0
tactics:
- Collection
relevantTechniques:
- T1005
query: |
(union isfuzzy=true (SecurityEvent
| where EventID == 4662
| where ObjectServer == 'DS'
| where OperationType == 'Object Access'
//| where ObjectName contains '<GUID of ADFS DKM Container>' This is unique to the domain.
| where ObjectType contains '5cb41ed0-0e4c-11d0-a286-00aa003049e2' // Contact Class
| where Properties contains '8d3bca50-1d7e-11d0-a081-00aa006c33ed' // Picture Attribute - Ldap-Display-Name: thumbnailPhoto
| extend timestamp = TimeGenerated, HostCustomEntity = Computer, AccountCustomEntity = SubjectAccount),
(DeviceEvents
| where ActionType =~ "LdapSearch"
| where AdditionalFields.AttributeList contains "thumbnailPhoto"
| extend timestamp = TimeGenerated, HostCustomEntity = DeviceName, AccountCustomEntity = InitiatingProcessAccountName)
)
entityMappings:
- entityType: Account
fieldMappings:
- identifier: FullName
columnName: AccountCustomEntity
- entityType: Host
fieldMappings:
- identifier: FullName
columnName: HostCustomEntity

Просмотреть файл

@ -95,7 +95,7 @@ query: |
| project UserPrincipalName, SuspiciousIP, UserIPDelta = delta, SuspiciousLoginCountry = country, SuspiciousCountryPrevalence = prevalence, EventTimes
//Teams join to collect operations the user account has performed within the given time range
| join kind=inner(
TeamsData
OfficeActivity
| where TimeGenerated >= ago(timeRange)
| where Operation in~ ("TeamsAdminAction", "MemberAdded", "MemberRemoved", "MemberRoleChanged", "AppInstalled", "BotAddedToTeam")
| project Operation, UserId=tolower(UserId), OperationTime=TimeGenerated

Просмотреть файл

@ -17,7 +17,7 @@ requiredDataConnectors:
- connectorId: PaloAltoNetworks
dataTypes:
- CommonSecurityLog
- connectorId: Microsoft 365 Defender
- connectorId: MicrosoftThreatProtection
dataTypes:
- DeviceNetworkEvents
queryFrequency: 1d

Просмотреть файл

@ -38,7 +38,7 @@ requiredDataConnectors:
- connectorId: AWS
dataTypes:
- AWSCloudTrail
- connectorId: Microsoft 365 Defender
- connectorId: MicrosoftThreatProtection
dataTypes:
- DeviceNetworkEvents
queryFrequency: 1d

Просмотреть файл

@ -1,8 +1,8 @@
id: b725d62c-eb77-42ff-96f6-bdc6745fc6e0
name: New UserAgent observed in last 24 hours
description: |
'Identifies new UserAgents observed in the last 24 hours versus the previous 14 days. This detection
extracts words from user agents to build the baseline and determine rareity rather than perform a
'Identifies new UserAgents observed in the last 24 hours versus the previous 14 days. This detection
extracts words from user agents to build the baseline and determine rareity rather than perform a
direct comparison. This avoids FPs caused by version numbers and other high entropy user agent components.
These new UserAgents could be benign. However, in normally stable environments,
these new UserAgents could provide a starting point for investigating malicious activity.
@ -35,7 +35,7 @@ query: |
let starttime = 14d;
let endtime = 1d;
let UserAgentAll =
let UserAgentAll =
(union isfuzzy=true
(OfficeActivity
| where TimeGenerated >= ago(starttime)
@ -55,12 +55,12 @@ query: |
| summarize StartTime = min(TimeGenerated), EndTime = max(TimeGenerated) by UserAgent, SourceIP = SourceIpAddress, Account = UserIdentityUserName, Type, EventSource, EventName
))
// remove wordSize blocks of non-numeric hex characters prior to word extraction
| extend UserAgentNoHexAlphas = replace("([A-Fa-f]{4,})", "x", UserAgent)
| extend UserAgentNoHexAlphas = replace("([A-Fa-f]{4,})", "x", UserAgent)
// once blocks of hex chars are removed, extract wordSize blocks of a-z
| extend Tokens = extract_all("([A-Za-z]{4,})", UserAgentNoHexAlphas)
| extend Tokens = extract_all("([A-Za-z]{4,})", UserAgentNoHexAlphas)
// concatenate extracted words to create a summarized user agent for baseline and comparison
| extend NormalizedUserAgent = strcat_array(Tokens, "|")
| project-away UserAgentNoHexAlphas, Tokens;
| extend NormalizedUserAgent = strcat_array(Tokens, "|")
| project-away UserAgentNoHexAlphas, Tokens;
UserAgentAll
| where StartTime >= ago(endtime)
| summarize StartTime = min(StartTime), EndTime = max(EndTime), count() by UserAgent, NormalizedUserAgent, SourceIP, Account, Type, RecordType, Operation, EventSource, EventName, sSiteName, csMethod, csUriStem

Просмотреть файл

@ -0,0 +1,83 @@
id: cecdbd4c-4902-403c-8d4b-32eb1efe460b
name: Solorigate Network Beacon
description: |
'Identifies a match across various data feeds for domains IOCs related to the Solorigate incident.
References: https://blogs.microsoft.com/on-the-issues/2020/12/13/customers-protect-nation-state-cyberattacks/,
https://www.fireeye.com/blog/threat-research/2020/12/evasive-attacker-leverages-solarwinds-supply-chain-compromises-with-sunburst-backdoor.html?1'
severity: High
requiredDataConnectors:
- connectorId: DNS
dataTypes:
- DnsEvents
- connectorId: AzureMonitor(VMInsights)
dataTypes:
- VMConnection
- connectorId: CiscoASA
dataTypes:
- CommonSecurityLog
- connectorId: PaloAltoNetworks
dataTypes:
- CommonSecurityLog
- connectorId: MicrosoftThreatProtection
dataTypes:
- DeviceNetworkEvents
queryFrequency: 6h
queryPeriod: 6h
triggerOperator: gt
triggerThreshold: 0
tactics:
- CommandAndControl
relevantTechniques:
- T1102
query: |
let domains = dynamic(["incomeupdate.com","zupertech.com","databasegalore.com","panhardware.com","avsvmcloud.com","digitalcollege.org","freescanonline.com","deftsecurity.com","thedoccloud.com","virtualdataserver.com","lcomputers.com","webcodez.com","globalnetworkissues.com","kubecloud.com","seobundlekit.com","solartrackingsystem.net","virtualwebdata.com"]);
let timeframe = 6h;
(union isfuzzy=true
(CommonSecurityLog
| where TimeGenerated >= ago(timeframe)
| parse Message with * '(' DNSName ')' *
| where DNSName in~ (domains) or DestinationHostName has_any (domains) or RequestURL has_any(domains)
| extend AccountCustomEntity = SourceUserID, HostCustomEntity = DeviceName, IPCustomEntity = SourceIP
),
(DnsEvents
| where TimeGenerated >= ago(timeframe)
| extend DNSName = Name
| where isnotempty(DNSName)
| where DNSName in~ (domains)
| extend IPCustomEntity = ClientIP
),
(VMConnection
| where TimeGenerated >= ago(timeframe)
| parse RemoteDnsCanonicalNames with * '["' DNSName '"]' *
| where isnotempty(DNSName)
| where DNSName in~ (domains)
| extend IPCustomEntity = RemoteIp
),
(DeviceNetworkEvents
| where TimeGenerated >= ago(timeframe)
| where isnotempty(RemoteUrl)
| where RemoteUrl has_any (domains)
| extend DNSName = RemoteUrl
| extend IPCustomEntity = RemoteIP
| extend HostCustomEntity = DeviceName
)
)
entityMappings:
- entityType: Account
fieldMappings:
- identifier: FullName
columnName: AccountCustomEntity
- entityType: Host
fieldMappings:
- identifier: FullName
columnName: HostCustomEntity
- entityType: IP
fieldMappings:
- identifier: Address
columnName: IPCustomEntity
- entityType: DNS
fieldMappings:
- identifier: DomainName
columnName: DNSName

Просмотреть файл

@ -1,10 +1,10 @@
id: f2dd4a3a-ebac-4994-9499-1a859938c947
name: Time series anomaly for data size transferred to public internet
description: |
'Identifies anomalous data transfer to public networks. The query leverages built-in KQL anomaly detection algorithms that detects large deviations from a baseline pattern.
A sudden increase in data transferred to unknown public networks is an indication of data exfiltration attempts and should be investigated.
'Identifies anomalous data transfer to public networks. The query leverages built-in KQL anomaly detection algorithms that detects large deviations from a baseline pattern.
A sudden increase in data transferred to unknown public networks is an indication of data exfiltration attempts and should be investigated.
The higher the score, the further it is from the baseline value.
The output is aggregated to provide summary view of unique source IP to destination IP address and port bytes sent traffic observed in the flagged anomaly hour.
The output is aggregated to provide summary view of unique source IP to destination IP address and port bytes sent traffic observed in the flagged anomaly hour.
The source IP addresses which were sending less than bytessentperhourthreshold have been exluded whose value can be adjusted as needed .
You may have to run queries for individual source IP addresses from SourceIPlist to determine if anything looks suspicious'
severity: Medium
@ -45,7 +45,7 @@ query: |
| make-series TotalBytesSent=sum(BytesSent) on TimeGenerated from startofday(ago(starttime)) to startofday(ago(endtime)) step timeframe by DeviceVendor
),
(
CommonSecurityLog
CommonSecurityLog
| where TimeGenerated between (startofday(ago(starttime))..startofday(ago(endtime)))
| where isnotempty(DestinationIP) and isnotempty(SourceIP)
| extend DestinationIpType = iff(DestinationIP matches regex PrivateIPregex,"private" ,"public" )
@ -58,21 +58,21 @@ query: |
let TimeSeriesAlerts = TimeSeriesData
| extend (anomalies, score, baseline) = series_decompose_anomalies(TotalBytesSent, scorethreshold, -1, 'linefit')
| mv-expand TotalBytesSent to typeof(double), TimeGenerated to typeof(datetime), anomalies to typeof(double),score to typeof(double), baseline to typeof(long)
| where anomalies > 0 | extend AnomalyHour = TimeGenerated
| where anomalies > 0 | extend AnomalyHour = TimeGenerated
| extend TotalBytesSentinMBperHour = round(((TotalBytesSent / 1024)/1024),2), baselinebytessentperHour = round(((baseline / 1024)/1024),2), score = round(score,2)
| project DeviceVendor, AnomalyHour, TimeGenerated, TotalBytesSentinMBperHour, baselinebytessentperHour, anomalies, score;
let AnomalyHours = TimeSeriesAlerts  | where TimeGenerated > ago(2d) | project TimeGenerated;
let AnomalyHours = TimeSeriesAlerts | where TimeGenerated > ago(2d) | project TimeGenerated;
//Union of all BaseLogs aggregated per hour
let BaseLogs = (union isfuzzy=true
(
CommonSecurityLog
CommonSecurityLog
| where isnotempty(DestinationIP) and isnotempty(SourceIP)
| where TimeGenerated between (startofday(ago(starttime))..startofday(ago(endtime)))
| extend DateHour = bin(TimeGenerated, 1h) // create a new column and round to hour
| where DateHour in ((AnomalyHours)) //filter the dataset to only selected anomaly hours
| where DateHour in ((AnomalyHours)) //filter the dataset to only selected anomaly hours
| extend DestinationIpType = iff(DestinationIP matches regex PrivateIPregex,"private" ,"public" )
| where DestinationIpType == "public"
| extend SentBytesinMB = ((SentBytes / 1024)/1024), ReceivedBytesinMB = ((ReceivedBytes / 1024)/1024)
| extend SentBytesinMB = ((SentBytes / 1024)/1024), ReceivedBytesinMB = ((ReceivedBytes / 1024)/1024)
| summarize HourlyCount = count(), TimeGeneratedMax=arg_max(TimeGenerated, *), DestinationIPList=make_set(DestinationIP, 100), DestinationPortList = make_set(DestinationPort,100), TotalSentBytesinMB = sum(SentBytesinMB), TotalReceivedBytesinMB = sum(ReceivedBytesinMB) by SourceIP, DeviceVendor, TimeGeneratedHour=bin(TimeGenerated,1h)
| where TotalSentBytesinMB > bytessentperhourthreshold
| sort by TimeGeneratedHour asc, TotalSentBytesinMB desc
@ -81,11 +81,11 @@ query: |
| project DeviceVendor, TimeGeneratedHour, TimeGeneratedMax, SourceIP, DestinationIPList, DestinationPortList, TotalSentBytesinMB, TotalReceivedBytesinMB, Rank
),
(
VMConnection
VMConnection
| where isnotempty(DestinationIp) and isnotempty(SourceIp)
| where TimeGenerated between (startofday(ago(starttime))..startofday(ago(endtime)))
| extend DateHour = bin(TimeGenerated, 1h) // create a new column and round to hour
| where DateHour in ((AnomalyHours)) //filter the dataset to only selected anomaly hours
| where DateHour in ((AnomalyHours)) //filter the dataset to only selected anomaly hours
| extend SourceIP = SourceIp, DestinationIP = DestinationIp
| extend DestinationIpType = iff(DestinationIp matches regex PrivateIPregex,"private" ,"public" )
| where DestinationIpType == "public" | extend DeviceVendor = "VMConnection"
@ -103,11 +103,11 @@ query: |
| where TimeGenerated > ago(2d)
| join (
BaseLogs | extend AnomalyHour = TimeGeneratedHour
) on DeviceVendor, AnomalyHour | sort by score desc
| project DeviceVendor, AnomalyHour,TimeGeneratedMax, SourceIP, DestinationIPList, DestinationPortList, TotalSentBytesinMB, TotalReceivedBytesinMB, TotalBytesSentinMBperHour, baselinebytessentperHour, score, anomalies
) on DeviceVendor, AnomalyHour | sort by score desc
| project DeviceVendor, AnomalyHour,TimeGeneratedMax, SourceIP, DestinationIPList, DestinationPortList, TotalSentBytesinMB, TotalReceivedBytesinMB, TotalBytesSentinMBperHour, baselinebytessentperHour, score, anomalies
| summarize EventCount = count(), StartTimeUtc= min(TimeGeneratedMax), EndTimeUtc= max(TimeGeneratedMax), SourceIPMax= arg_max(SourceIP,*), TotalBytesSentinMB = sum(TotalSentBytesinMB), TotalBytesReceivedinMB = sum(TotalReceivedBytesinMB), SourceIPList = make_set(SourceIP, 100), DestinationIPList = make_set(DestinationIPList, 100) by AnomalyHour,TotalBytesSentinMBperHour, baselinebytessentperHour, score, anomalies
| project DeviceVendor, AnomalyHour, StartTimeUtc, EndTimeUtc, SourceIPMax, SourceIPList, DestinationIPList, DestinationPortList, TotalBytesSentinMB, TotalBytesReceivedinMB, TotalBytesSentinMBperHour, baselinebytessentperHour, score, anomalies, EventCount
| extend timestamp =EndTimeUtc, IPCustomEntity = SourceIPMax
| project DeviceVendor, AnomalyHour, StartTimeUtc, EndTimeUtc, SourceIPMax, SourceIPList, DestinationIPList, DestinationPortList, TotalBytesSentinMB, TotalBytesReceivedinMB, TotalBytesSentinMBperHour, baselinebytessentperHour, score, anomalies, EventCount
| extend timestamp =EndTimeUtc, IPCustomEntity = SourceIPMax
entityMappings:
- entityType: IP
fieldMappings:

Просмотреть файл

@ -1,11 +1,11 @@
id: b4ceb583-4c44-4555-8ecf-39f572e827ba
name: Exchange workflow MailItemsAccessed operation anomaly
name: Exchange workflow MailItemsAccessed operation anomaly
description: |
'Identifies anomalous increases in Exchange mail items accessed operations.
The query leverages KQL built-in anomaly detection algorithms to find large deviations from baseline patterns. 
Sudden increases in execution frequency of sensitive actions should be further investigated for malicious activity.
Manually change scorethreshold from 1.5 to 3 or higher to reduce the noise based on outliers flagged from the query criteria.
Read more about MailItemsAccessed- https://docs.microsoft.com/microsoft-365/compliance/advanced-audit?view=o365-worldwide#mailitemsaccessed'
'Identifies anomalous increases in Exchange mail items accessed operations.
The query leverages KQL built-in anomaly detection algorithms to find large deviations from baseline patterns.
Sudden increases in execution frequency of sensitive actions should be further investigated for malicious activity.
Manually change scorethreshold from 1.5 to 3 or higher to reduce the noise based on outliers flagged from the query criteria.
Read more about MailItemsAccessed- https://docs.microsoft.com/microsoft-365/compliance/advanced-audit?view=o365-worldwide#mailitemsaccessed'
severity: Medium
requiredDataConnectors:
- connectorId: Office365
@ -26,27 +26,27 @@ query: |
let timeframe = 1h;
let scorethreshold = 1.5;
let percentthreshold = 50;
// Preparing the time series data aggregated hourly count of MailItemsAccessd Operation in the form of multi-value array to use with time series anomaly function.
let TimeSeriesData =
OfficeActivity 
| where TimeGenerated  between (startofday(ago(starttime))..startofday(ago(endtime)))
| where OfficeWorkload=~ "Exchange" and Operation =~ "MailItemsAccessed" and ResultStatus =~ "Succeeded"
| project TimeGenerated, Operation, MailboxOwnerUPN 
| make-series Total=count() on TimeGenerated from startofday(ago(starttime)) to startofday(ago(endtime)) step timeframe;
let TimeSeriesAlerts = TimeSeriesData
| extend (anomalies, score, baseline) = series_decompose_anomalies(Total, scorethreshold, -1, 'linefit')
| mv-expand Total to typeof(double), TimeGenerated to typeof(datetime), anomalies to typeof(double), score to typeof(double), baseline to typeof(long)
| where anomalies > 0
| project TimeGenerated, Total, baseline, anomalies, score;
// Joining the flagged outlier from the previous step with the original dataset to present contextual information
// during the anomalyhour to analysts to conduct investigation or informed decisions.
TimeSeriesAlerts | where TimeGenerated > ago(2d)
// Join against base logs since specified timeframe to retrive records associated with the hour of anomoly
| join ( 
  OfficeActivity 
| where TimeGenerated > ago(2d)
| where OfficeWorkload=~ "Exchange" and Operation =~ "MailItemsAccessed" and ResultStatus =~ "Succeeded"
) on TimeGenerated
// Preparing the time series data aggregated hourly count of MailItemsAccessd Operation in the form of multi-value array to use with time series anomaly function.
let TimeSeriesData =
OfficeActivity
| where TimeGenerated between (startofday(ago(starttime))..startofday(ago(endtime)))
| where OfficeWorkload=~ "Exchange" and Operation =~ "MailItemsAccessed" and ResultStatus =~ "Succeeded"
| project TimeGenerated, Operation, MailboxOwnerUPN
| make-series Total=count() on TimeGenerated from startofday(ago(starttime)) to startofday(ago(endtime)) step timeframe;
let TimeSeriesAlerts = TimeSeriesData
| extend (anomalies, score, baseline) = series_decompose_anomalies(Total, scorethreshold, -1, 'linefit')
| mv-expand Total to typeof(double), TimeGenerated to typeof(datetime), anomalies to typeof(double), score to typeof(double), baseline to typeof(long)
| where anomalies > 0
| project TimeGenerated, Total, baseline, anomalies, score;
// Joining the flagged outlier from the previous step with the original dataset to present contextual information
// during the anomalyhour to analysts to conduct investigation or informed decisions.
TimeSeriesAlerts | where TimeGenerated > ago(2d)
// Join against base logs since specified timeframe to retrive records associated with the hour of anomoly
| join (
OfficeActivity
| where TimeGenerated > ago(2d)
| where OfficeWorkload=~ "Exchange" and Operation =~ "MailItemsAccessed" and ResultStatus =~ "Succeeded"
) on TimeGenerated
entityMappings:
- entityType: IP
fieldMappings:

Просмотреть файл

@ -20,15 +20,21 @@ relevantTechniques:
- T1098
- T1089
query: |
let timeframe = 1d;
let opList = OfficeActivity
| where TimeGenerated >= ago(timeframe)
| summarize by Operation
//| where Operation startswith "Remove-" or Operation startswith "Disable-"
| where Operation has_any ("Remove", "Disable")
| where Operation contains "AntiPhish" or Operation contains "SafeAttachment" or Operation contains "SafeLinks" or Operation contains "Dlp" or Operation contains "Audit"
| summarize make_set(Operation);
OfficeActivity
| where TimeGenerated >= ago(timeframe)
| where RecordType =~ "ExchangeAdmin"
| where UserType in~ ("Admin","DcAdmin")
// Only admin or global-admin can disable/remove policy
| where Operation startswith "Remove-" or Operation startswith "Disable-"
| where Operation has_any ("AntiPhish", "SafeAttachment", "SafeLinks", "Dlp", "Audit")
| where RecordType =~ "ExchangeAdmin"
| where UserType in~ ("Admin","DcAdmin")
// Pass in interesting Operation list
| where Operation in~ (opList)
| extend ClientIPOnly = case(
ClientIP has ".", tostring(split(ClientIP,":")[0]),
ClientIP has "[", tostring(trim_start(@'[[]',tostring(split(ClientIP,"]")[0]))),

Просмотреть файл

@ -0,0 +1,41 @@
id: e70fa6e0-796a-4e85-9420-98b17b0bb749
name: Solorigate Defender Detections
description: |
'Surfaces any Defender Alert for Solorigate Events. In Azure Sentinel the SecurityAlerts table includes only the Device Name of the affected device, this query joins the DeviceInfo table to clearly connect other information such as
Device group, ip, logged on users etc. This way, the Sentinel user can have all the pertinent device info in one view for all the the Solarigate Defender alerts.'
severity: High
requiredDataConnectors:
- connectorId: MicrosoftDefenderAdvancedThreatProtection
dataTypes:
- SecurityAlert (MDATP)
- DeviceInfo
queryFrequency: 1d
queryPeriod: 1d
triggerOperator: gt
triggerThreshold: 0
tactics:
- InitialAccess
relevantTechniques:
- T1195
query: |
DeviceInfo
| extend DeviceName = tolower(DeviceName)
| join (SecurityAlert
| where ProviderName =~ "MDATP"
| extend ThreatName = tostring(parse_json(ExtendedProperties).ThreatName)
| where ThreatName has "Solorigate"
| extend HostCustomEntity = tolower(CompromisedEntity)
| take 10) on $left.DeviceName == $right.HostCustomEntity
| project TimeGenerated, DisplayName, ThreatName, CompromisedEntity, PublicIP, MachineGroup, AlertSeverity, Description, LoggedOnUsers, DeviceId, TenantId
| extend timestamp = TimeGenerated, IPCustomEntity = PublicIP
entityMappings:
- entityType: Host
fieldMappings:
- identifier: FullName
columnName: HostCustomEntity
- entityType: IP
fieldMappings:
- identifier: Address
columnName: IPCustomEntity

Просмотреть файл

@ -0,0 +1,71 @@
id: dcdf9bfc-c239-4764-a9f9-3612e6dff49c
name: ADFS Key Export (Sysmon)
description: |
'This detection uses Sysmon telemetry to detect potential ADFS certificate material export.
In order to use this query you need to be collecting Sysmon EventIdD 17 and 18.
If you do not have Sysmon data in your workspace this query will raise an error stating:
Failed to resolve scalar expression named "[@Name]"'
severity: Medium
requiredDataConnectors:
- connectorId: SecurityEvents
dataTypes:
- SecurityEvent
queryFrequency: 1d
queryPeriod: 1d
triggerOperator: gt
triggerThreshold: 0
tactics:
- Collection
relevantTechniques:
- T1005
query: |
// Adjust this to use a longer timeframe to identify ADFS servers
let lookback = 6d;
// Adjust this to adjust the key export detection timeframe
let timeframe = 1d;
// Start be identifying ADFS servers to reduce FP chance
let ADFS_Servers = (
Event
| where TimeGenerated > ago(timeframe+lookback)
| where Source == "Microsoft-Windows-Sysmon"
| extend EventData = parse_xml(EventData).DataItem.EventData.Data
| mv-expand bagexpansion=array EventData
| evaluate bag_unpack(EventData)
| extend Key=tostring(['@Name']), Value=['#text']
| evaluate pivot(Key, any(Value), TimeGenerated, Source, EventLog, Computer, EventLevel, EventLevelName, EventID, UserName, RenderedDescription, MG, ManagementGroupName, Type, _ResourceId)
| extend process = split(Image, '\\', -1)[-1]
| where process =~ "Microsoft.IdentityServer.ServiceHost.exe"
| summarize by Computer);
// Look for ADFS servers where Named Pipes event are present
Event
| where TimeGenerated > ago(timeframe)
| where Source == "Microsoft-Windows-Sysmon"
| where Computer in~ (ADFS_Servers)
| extend RenderedDescription = tostring(split(RenderedDescription, ":")[0])
| extend EventData = parse_xml(EventData).DataItem.EventData.Data
| mv-expand bagexpansion=array EventData
| evaluate bag_unpack(EventData)
| extend Key=tostring(['@Name']), Value=['#text']
| evaluate pivot(Key, any(Value), TimeGenerated, Source, EventLog, Computer, EventLevel, EventLevelName, EventID, UserName, RenderedDescription, MG, ManagementGroupName, Type, _ResourceId)
| extend RuleName = column_ifexists("RuleName", ""), TechniqueId = column_ifexists("TechniqueId", ""), TechniqueName = column_ifexists("TechniqueName", "")
| parse RuleName with * 'technique_id=' TechniqueId ',' * 'technique_name=' TechniqueName
| where EventID in (17,18)
// Look for Pipe related to querying the WID
| where PipeName == "\\MICROSOFT##WID\\tsql\\query"
| extend process = split(Image, '\\', -1)[-1]
// Exclude expected processes
| where process !in ("Microsoft.IdentityServer.ServiceHost.exe", "Microsoft.Identity.Health.Adfs.PshSurrogate.exe", "AzureADConnect.exe", "Microsoft.Tri.Sensor.exe", "wsmprovhost.exe","mmc.exe", "sqlservr.exe")
| extend Operation = RenderedDescription
| project-reorder TimeGenerated, EventType, Operation, process, Image, Computer, UserName
| extend HostCustomEntity = Computer, AccountCustomEntity = UserName
entityMappings:
- entityType: Account
fieldMappings:
- identifier: FullName
columnName: AccountCustomEntity
- entityType: Host
fieldMappings:
- identifier: FullName
columnName: HostCustomEntity

Просмотреть файл

@ -1,8 +1,8 @@
id: 2c55fe7a-b06f-4029-a5b9-c54a2320d7b8
name: Process execution frequency anomaly
description: |
'Identifies anomalous spike in frequency of executions of sensitive processes which are often leveraged as attack vectors.
The query leverages KQL built-in anomaly detection algorithms to find large deviations from baseline patterns.
'Identifies anomalous spike in frequency of executions of sensitive processes which are often leveraged as attack vectors.
The query leverages KQL built-in anomaly detection algorithms to find large deviations from baseline patterns.
Sudden increases in execution frequency of sensitive processes should be further investigated for malicious activity.
Tune the values from 1.5 to 3 in series_decompose_anomalies for further outliers or based on custom threshold values for score.'
severity: Medium
@ -25,7 +25,7 @@ query: |
let timeframe = 1h;
let TotalEventsThreshold = 5;
let ExeList = dynamic(["powershell.exe","cmd.exe","wmic.exe","psexec.exe","cacls.exe","rundll.exe"]);
let TimeSeriesData =
let TimeSeriesData =
SecurityEvent
| where EventID == 4688 | extend Process = tolower(Process)
| where TimeGenerated between (startofday(ago(starttime))..startofday(ago(endtime)))
@ -38,18 +38,18 @@ query: |
| where anomalies > 0
| project Process, TimeGenerated, Total, baseline, anomalies, score
| where Total > TotalEventsThreshold;
let AnomalyHours = TimeSeriesAlerts  | where TimeGenerated > ago(2d) | project TimeGenerated;
let AnomalyHours = TimeSeriesAlerts | where TimeGenerated > ago(2d) | project TimeGenerated;
TimeSeriesAlerts
| where TimeGenerated > ago(2d)
| join (
SecurityEvent
| where TimeGenerated between (startofday(ago(starttime))..startofday(ago(endtime)))
| extend DateHour = bin(TimeGenerated, 1h) // create a new column and round to hour
| where DateHour in ((AnomalyHours)) //filter the dataset to only selected anomaly hours
| where DateHour in ((AnomalyHours)) //filter the dataset to only selected anomaly hours
| where EventID == 4688 | extend Process = tolower(Process)
| summarize CommandlineCount = count() by bin(TimeGenerated, 1h), Process, CommandLine, Computer, Account
) on Process, TimeGenerated
| project AnomalyHour = TimeGenerated, Computer, Account, Process, CommandLine, CommandlineCount, Total, baseline, anomalies, score
) on Process, TimeGenerated
| project AnomalyHour = TimeGenerated, Computer, Account, Process, CommandLine, CommandlineCount, Total, baseline, anomalies, score
| extend timestamp = AnomalyHour, AccountCustomEntity = Account, HostCustomEntity = Computer
entityMappings:
- entityType: Account

Просмотреть файл

@ -0,0 +1,40 @@
id: 50574fac-f8d1-4395-81c7-78a463ff0c52
name: Azure Active Directory PowerShell accessing non-AAD resources
description: |
'This will alert when a user or application signs in using Azure Active Directory PowerShell to access non-Active Directory resources, such as the Azure Key Vault, which may be undesired or unauthorized behavior.
For capabilities and expected behavior of the Azure Active Directory PowerShell module, see: https://docs.microsoft.com/powershell/module/azuread/?view=azureadps-2.0.
For further information on Azure Active Directory Signin activity reports, see: https://docs.microsoft.com/azure/active-directory/reports-monitoring/concept-sign-ins.'
severity: Low
requiredDataConnectors:
- connectorId: AzureActiveDirectory
dataTypes:
- SigninLogs
queryFrequency: 1h
queryPeriod: 1h
triggerOperator: gt
triggerThreshold: 0
tactics:
- InitialAccess
relevantTechniques:
- T1078.004
query: |
let auditLookback = 1h;
SigninLogs
| where TimeGenerated > ago(auditLookback)
| where AppId =~ "1b730954-1685-4b74-9bfd-dac224a7b894" // AppDisplayName IS Azure Active Directory PowerShell
| where TokenIssuerType =~ "AzureAD"
| where ResourceIdentity !in ("00000002-0000-0000-c000-000000000000", "00000003-0000-0000-c000-000000000000") // ResourceDisplayName IS NOT Windows Azure Active Directory OR Microsoft Graph
| where Status.errorCode == 0 // Success
| project-reorder IPAddress, UserAgent, ResourceDisplayName, UserDisplayName, UserId, UserPrincipalName
| order by TimeGenerated desc
// New entity mapping
| extend timestamp = TimeGenerated, AccountCustomEntity = UserPrincipalName, IPCustomEntity = IPAddress
entityMappings:
- entityType: Account
fieldMappings:
- identifier: FullName
columnName: AccountCustomEntity
- entityType: IP
fieldMappings:
- identifier: Address
columnName: IPCustomEntity

Просмотреть файл

@ -1,12 +1,12 @@
// Usage Instruction :
// Usage Instruction :
// Either run this query as a stand alone query within Azure Sentinel or save it as a KQL function for later use. Further details on
// functions can be found here: https://techcommunity.microsoft.com/t5/Azure-Sentinel/Using-KQL-functions-to-speed-up-analysis-in-Azure-Sentinel/ba-p/712381
// For US Daylight Savings
//Below function will take timestamp as input and provides status if the timestamp is in US DaylightSavings window or not.
//Below function will take timestamp as input and provides status if the timestamp is in US DaylightSavings window or not.
// Check sample usage on test data at the end
let CheckifDaylightSavingUS = (UtcDateTime:datetime) {
// Reference formula: http://www.webexhibits.org/daylightsaving/i.html
// US daylight savings time begins (clocks go forward) on the second Sunday in March,
// US daylight savings time begins (clocks go forward) on the second Sunday in March,
// and ends (clocks go back) on the first Sunday in November:
let Year = getyear(UtcDateTime);
// Calculate day portion of last sunday in March and in November
@ -21,7 +21,7 @@ iff(UtcDateTime between (DaylightStart .. DaylightEnd), "True", "False")
// For EU Daylight Savings
let CheckifDaylightSavingEU = (UtcDateTime:datetime) {
// Reference formula: http://www.webexhibits.org/daylightsaving/i.html
// European Summer Time begins (clocks go forward) at 01:00 UTC on the last Sunday in March,
// European Summer Time begins (clocks go forward) at 01:00 UTC on the last Sunday in March,
// and ends (clocks go back) at 01:00 UTC on the last Sunday in October:
let Year = getyear(UtcDateTime);
// Calculate day portion of last sunday in March and in October
@ -36,14 +36,14 @@ iff(UtcDateTime between (DaylightStart .. DaylightEnd), "True", "False")
// Sample Usage in the Query - Generate Status based on InputDatetime in UTC
// let T = materialize(datatable(InputDateTime:datetime)
// [
//          datetime(2020-03-07 16:59),
//          datetime(2020-03-07 17:01),
//          datetime(2020-10-31 15:59),
//          datetime(2020-10-31 16:01),
//          datetime(2020-03-29 00:59),
//          datetime(2020-03-29 01:00),
//          datetime(2020-10-25 00:59),
//          datetime(2020-10-25 01:01),
// datetime(2020-03-07 16:59),
// datetime(2020-03-07 17:01),
// datetime(2020-10-31 15:59),
// datetime(2020-10-31 16:01),
// datetime(2020-03-29 00:59),
// datetime(2020-03-29 01:00),
// datetime(2020-10-25 00:59),
// datetime(2020-10-25 01:01),
// ])
// ;
// T

Просмотреть файл

@ -0,0 +1,34 @@
id: 42831fb3-f61d-41e9-95d9-f08797479a0e
name: Azure CloudShell Usage
description: |
'This query look for users starting an Azure CloudShell session and summarizes the Azure Activity from that
user account during that timeframe (by default 1 hour). This can be used to help identify abuse of the CloudShell
to modify Azure resources.'
requiredDataConnectors:
- connectorId: AzureActiveDirectory
dataTypes:
- AuditLogs
tactics:
- Execution
relevantTechniques:
- T1059
query: |
AzureActivity
| where ActivityStatusValue == "Succeeded"
| where ResourceGroup contains "cloud-shell-storage"
| where OperationNameValue == "Microsoft.Storage/storageAccounts/listKeys/action"
// Change the timekey scope below to get activity for a longer window
| summarize by Caller, timekey= bin(TimeGenerated, 1h)
| join (AzureActivity
| where OperationNameValue != "Microsoft.Storage/storageAccounts/listKeys/action"
| where isnotempty(OperationName)
// Change the timekey scope below to get activity for a longer window
| summarize make_set(OperationName) by Caller, timekey=bin(TimeGenerated, 1h)) on Caller, timekey
| extend timestamp = timekey, AccountCustomEntity = Caller
entityMappings:
- entityType: Account
fieldMappings:
- identifier: FullName
columnName: AccountCustomEntity

Просмотреть файл

@ -0,0 +1,35 @@
id: 0fb54a5c-5599-4ff9-80a2-f788c3ed285e
name: Solorigate DNS Pattern
description: |
'Looks for DGA pattern of the domain associated with Solorigate in order to find other domains with the same activity pattern.'
requiredDataConnectors:
- connectorId: DNS
dataTypes:
- DnsEvents
tactics:
- CommandAndControl
relevantTechniques:
- T1568
query: |
let cloudApiTerms = dynamic(["api", "east", "west"]);
DnsEvents
| where IPAddresses != "" and IPAddresses != "127.0.0.1"
| where Name endswith ".com" or Name endswith ".org" or Name endswith ".net"
| extend domain_split = split(Name, ".")
| where tostring(domain_split[-5]) != "" and tostring(domain_split[-6]) == ""
| extend sub_domain = tostring(domain_split[0])
| where sub_domain !contains "-"
| extend sub_directories = strcat(domain_split[-3], " ", domain_split[-4])
| where sub_directories has_any(cloudApiTerms)
//Based on sample communications the subdomain is always between 20 and 30 bytes
| where strlen(domain_split) < 32 or strlen(domain_split) > 20
| extend domain = strcat(tostring(domain_split[-2]), ".", tostring(domain_split[-1]))
| extend subdomain_no = countof(sub_domain, @"(\d)", "regex")
| extend subdomain_ch = countof(sub_domain, @"([a-z])", "regex")
| where subdomain_no > 1
| extend percentage_numerical = toreal(subdomain_no) / toreal(strlen(sub_domain)) * 100
| where percentage_numerical < 50 and percentage_numerical > 5
| summarize count(), make_set(Name), FirstSeen=min(TimeGenerated), LastSeen=max(TimeGenerated) by Name
| order by count_ asc

Просмотреть файл

@ -0,0 +1,62 @@
id: 29a1815a-3ada-4182-a178-e52c483d2f95
name: Solorigate Encoded Domain in URL
description: |
'Looks for a logon domain seen in Azure AD logs appearing in a DNS query encoded with the DGA encoding used in the Solorigate incident.
Reference: https://blogs.microsoft.com/on-the-issues/2020/12/13/customers-protect-nation-state-cyberattacks/'
requiredDataConnectors:
- connectorId: DNS
dataTypes:
- DnsEvents
- connectorId: AzureActiveDirectory
dataTypes:
- SigninLogs
tactics:
- CommandAndControl
relevantTechniques:
- T1568
query: |
let dictionary = dynamic(["r","q","3","g","s","a","l","t","6","u","1","i","y","f","z","o","p","5","7","2","d","4","9","b","n","x","8","c","v","m","k","e","w","h","j"]);
let regex_bad_domains = SigninLogs
//Collect domains from tenant from signin logs
| where TimeGenerated > ago(1d)
| extend domain = tostring(split(UserPrincipalName, "@", 1)[0])
| where domain != ""
| summarize by domain
| extend split_domain = split(domain, ".")
//This cuts back on domains such as na.contoso.com by electing not to match on the "na" portion
| extend target_string = iff(strlen(split_domain[0]) <= 2, split_domain[1], split_domain[0])
| extend target_string = split(target_string, "-")
| mv-expand target_string
//Rip all of the alphanumeric out of the domain name
| extend string_chars = extract_all(@"([a-z0-9])", tostring(target_string))
//Guid for tracking our data
| extend guid = new_guid()
//Expand to get all of the individual chars from the domain
| mv-expand string_chars
| extend chars = tostring(string_chars)
//Conduct computation to encode the domain as per actor spec
| extend computed_char = array_index_of(dictionary, chars)
| extend computed_char = dictionary[(computed_char + 4) % array_length(dictionary)]
| summarize make_list(computed_char) by guid, domain
| extend target_encoded = tostring(strcat_array(list_computed_char, ""))
//These are probably too small, but can be edited (expect FP's when going too small)
| where strlen(target_encoded) > 5
| distinct target_encoded
| summarize make_set(target_encoded)
//Key to join to DNS
| extend key = 1;
DnsEvents
| where TimeGenerated > ago(1d)
| summarize by Name
| extend key = 1
//For each DNS query join the malicious domain list
| join kind=inner (
regex_bad_domains
) on key
| project-away key
//Expand each malicious key for each DNS query observed
| mv-expand set_target_encoded
//IndexOf allows us to fuzzy match on the substring
| extend match = indexof(Name, set_target_encoded)
| where match > -1

Просмотреть файл

@ -14,7 +14,7 @@ query: |
let EndRunTime = StartTime - RunTime;
let EndLearningTime = StartTime + LearningPeriod;
let GitHubOrgMemberLogs = (GitHubAudit
where Action == "org.invite_member" or Action == "org.update_member" or Action == "org.add_member Action == "repo.add_member" Action == "team.add_member");
| where Action == "org.invite_member" or Action == "org.update_member" or Action == "org.add_member" or Action == "repo.add_member" or Action == "team.add_member");
GitHubOrgMemberLogs
| where TimeGenerated between (ago(EndLearningTime) .. ago(StartTime))
| distinct Actor
@ -23,4 +23,3 @@ query: |
| where TimeGenerated between (ago(StartTime) .. ago(EndRunTime))
| distinct Actor
) on Actor

Просмотреть файл

@ -15,27 +15,27 @@ relevantTechniques:
- T1020
query: |
let lookback = 30d;
let timeframe = 1d;
let threshold = 0;
let lookback = 30d;
let timeframe = 1d;
let threshold = 0;
LAQueryLogs
| where TimeGenerated between (ago(lookback)..ago(timeframe))
| mv-expand(RequestContext)
| extend RequestContextExtended = split(RequestTarget, "/") 
| extend Subscription = tostring(RequestContextExtended[2]), ResourceGroups = tostring(RequestContextExtended[4]), Workspace = tostring(RequestContextExtended[8])
| summarize count(), HistWorkspaceCount=dcount(Workspace) by AADEmail
| join (
| where TimeGenerated between (ago(lookback)..ago(timeframe))
| mv-expand(RequestContext)
| extend RequestContextExtended = split(RequestTarget, "/")
| extend Subscription = tostring(RequestContextExtended[2]), ResourceGroups = tostring(RequestContextExtended[4]), Workspace = tostring(RequestContextExtended[8])
| summarize count(), HistWorkspaceCount=dcount(Workspace) by AADEmail
| join (
LAQueryLogs
| where TimeGenerated > ago(timeframe)
| mv-expand(RequestContext)
| extend RequestContextExtended = split(RequestTarget, "/") 
| extend Subscription = tostring(RequestContextExtended[2]), ResourceGroups = tostring(RequestContextExtended[4]), Workspace = tostring(RequestContextExtended[8])
| summarize make_set(Workspace), count(), CurrWorkspaceCount=dcount(Workspace) by AADEmail
) on AADEmail
| where CurrWorkspaceCount > HistWorkspaceCount
| where TimeGenerated > ago(timeframe)
| mv-expand(RequestContext)
| extend RequestContextExtended = split(RequestTarget, "/")
| extend Subscription = tostring(RequestContextExtended[2]), ResourceGroups = tostring(RequestContextExtended[4]), Workspace = tostring(RequestContextExtended[8])
| summarize make_set(Workspace), count(), CurrWorkspaceCount=dcount(Workspace) by AADEmail
) on AADEmail
| where CurrWorkspaceCount > HistWorkspaceCount
// Uncomment follow rows to see queries made by these users
//| join (
//LAQueryLogs
//| where TimeGenerated > ago(timeframe))
//| where TimeGenerated > ago(timeframe))
//on AADEmail
//| extend timestamp = TimeGenerated, AccountCustomEntity = AADEmail
//| extend timestamp = TimeGenerated, AccountCustomEntity = AADEmail

Просмотреть файл

@ -1,7 +1,7 @@
id: cd11d6a1-e2ad-47fa-9a9f-4c70b143d4fd
name: Multiple large queries made by user
description: |
'This hunting query looks for users who are running multiple queries that return either a very large
'This hunting query looks for users who are running multiple queries that return either a very large
amount of data or the maximum amount allowed by the query method.'
requiredDataConnectors:
- connectorId: AzureMonitor(Query Audit)
@ -26,5 +26,5 @@ query: |
| where TimeGenerated > ago(timeframe)
| where (ResponseRowCount == 10001 and RequestClientApp in(UI_apps)) or (ResponseRowCount > 10001 and RequestClientApp !in(UI_apps)))
on AADEmail
| extend timestamp = TimeGenerated, AccountCustomEntity = AADEmail
| extend timestamp = TimeGenerated, AccountCustomEntity = AADEmail

Просмотреть файл

@ -15,14 +15,14 @@ relevantTechniques:
- T1020
query: |
let lookback = 7d;
let timeframe = 1d;
let lookback = 7d;
let timeframe = 1d;
LAQueryLogs
| where TimeGenerated between (ago(lookback)..ago(timeframe))
| where ResponseCode == 200
| join kind= rightanti( 
| where TimeGenerated between (ago(lookback)..ago(timeframe))
| where ResponseCode == 200
| join kind= rightanti(
LAQueryLogs
| where TimeGenerated > ago(timeframe)
| where TimeGenerated > ago(timeframe)
)
on RequestClientApp
| extend timestamp = TimeGenerated, AccountCustomEntity = AADEmail
on RequestClientApp
| extend timestamp = TimeGenerated, AccountCustomEntity = AADEmail

Просмотреть файл

@ -15,16 +15,16 @@ relevantTechniques:
- T1020
query: |
let lookback = 7d;
let timeframe = 1d;
let lookback = 7d;
let timeframe = 1d;
LAQueryLogs
| where TimeGenerated between (ago(lookback)..ago(timeframe))
| where ResponseCode == 200 and RequestClientApp != "AppAnalytics" and AADEmail !contains "@"
| distinct AADClientId 
| join kind=rightanti( 
| where TimeGenerated between (ago(lookback)..ago(timeframe))
| where ResponseCode == 200 and RequestClientApp != "AppAnalytics" and AADEmail !contains "@"
| distinct AADClientId
| join kind=rightanti(
LAQueryLogs
| where TimeGenerated > ago(timeframe)
| where ResponseCode == 200 and RequestClientApp != "AppAnalytics" and AADEmail !contains "@"
| where TimeGenerated > ago(timeframe)
| where ResponseCode == 200 and RequestClientApp != "AppAnalytics" and AADEmail !contains "@"
)
on AADClientId
| extend timestamp = TimeGenerated, AccountCustomEntity = AADEmail
on AADClientId
| extend timestamp = TimeGenerated, AccountCustomEntity = AADEmail

Просмотреть файл

@ -22,5 +22,5 @@ query: |
| where TimeGenerated > ago(timeframe))
on AADEmail
| project TimeGenerated, AADEmail, QueryText, RequestClientApp, RequestTarget
| extend timestamp = TimeGenerated, AccountCustomEntity = AADEmail
| extend timestamp = TimeGenerated, AccountCustomEntity = AADEmail

Просмотреть файл

@ -18,12 +18,12 @@ query: |
| make-series rows = sum(ResponseRowCount) on TimeGenerated in range(startofday(ago(lookback)), now(), 1h)
| extend (anomalies, score, baseline) = series_decompose_anomalies(rows,3, -1, 'linefit')
| mv-expand anomalies to typeof(int), score to typeof(double), TimeGenerated to typeof(datetime)
| where anomalies > threshold
| where anomalies > threshold
| sort by score desc
| join kind=rightsemi (
LAQueryLogs
| summarize make_set(QueryText) by AADEmail, RequestTarget, TimeGenerated = bin(TimeGenerated, 1h))
on TimeGenerated
| project TimeGenerated, AADEmail, RequestTarget, set_QueryText
| extend timestamp = TimeGenerated, AccountCustomEntity = AADEmail
| extend timestamp = TimeGenerated, AccountCustomEntity = AADEmail

Просмотреть файл

@ -25,7 +25,7 @@ query: |
| extend querytext_lower = tolower(QueryText)
| where querytext_lower has_any(keywords)
| project TimeGenerated, AADEmail, QueryText, RequestClientApp, RequestTarget, ResponseCode, ResponseRowCount, ResponseDurationMs, CorrelationId
| extend timestamp = TimeGenerated, AccountCustomEntity = AADEmail
| extend timestamp = TimeGenerated, AccountCustomEntity = AADEmail
| join kind=leftanti ( LAQueryLogs
| where TimeGenerated > ago(timeframe)
| where RequestClientApp != 'Sentinel-General'

Просмотреть файл

@ -32,6 +32,6 @@ query: |
LAQueryLogs
| where TimeGenerated > ago(timeframe)
| where AADEmail in(anomolous_users)
| extend timestamp = TimeGenerated, AccountCustomEntity = AADEmail
| extend timestamp = TimeGenerated, AccountCustomEntity = AADEmail
// Comment out the line below to see the queries run by users.
| summarize total_rows = sum(ResponseRowCount), NoQueries = count(), AvgQuerySize = sum(ResponseRowCount)/count() by AADEmail

Просмотреть файл

@ -20,10 +20,10 @@ query: |
| where ResponseCode != 200
| summarize count() by AADEmail, bin(TimeGenerated, timeframe)
| where count_ > threshold
| join kind=rightsemi (
| join kind=rightsemi (
LAQueryLogs
| where TimeGenerated > ago(lookback)
| summarize make_set(QueryText) by AADEmail, bin(TimeGenerated, timeframe))
on AADEmail, TimeGenerated
| extend timestamp = TimeGenerated, AccountCustomEntity = AADEmail
| extend timestamp = TimeGenerated, AccountCustomEntity = AADEmail

Просмотреть файл

@ -0,0 +1,112 @@
id: e3b8ca4a-2bab-4246-860c-fc3bb8e7ac50
name: FireEye stolen red teaming tools communications
description: |
'This composite hunting query will highlight any HTTP traffic in CommonSecurityLog web proxies (such as ZScaler) that match known patterns used by red teaming tools potentially stolen from FireEye. Most FireEye red teaming tools are designed to mimic
legitimate API activity, false positives are common. This query includes a basic check to determine how common a hostname is in you environment, and allows you to modify this threshold to remove legitimate traffic from the query results.
This query contains only a subset of potential FireEye red team tool communications, and therefore should not be relied upon alone :) .'
requiredDataConnectors:
- connectorId: Zscaler
dataTypes:
- CommonSecurityLog
tactics:
- CommandAndControl
relevantTechniques:
- T1071.001
query: |
let lookback = 7d;
let domainLookback = 7d;
let domainCountThreshold = 100; //Maxiumum number of times a domain ahs been visited
//Backdoor.HTTP.BEACON.[Yelp GET]
let FEQuery1 = CommonSecurityLog
| where TimeGenerated > ago(lookback)
| where RequestMethod == "GET"
| where RequestURL contains "&parent_request_id="
| where RequestURL matches regex @"&parent_request_id=(?:[A-Za-z0-9_\/\+\-\%]{128,1000})={0,2}[^\r\n]{0,256}"
| extend Quality = "high"
| extend RuleName = "Backdoor.HTTP.BEACON.[Yelp GET]"
| project TimeGenerated, Quality, RuleName, DeviceVendor, DeviceProduct, TenantId, SourceIP, DestinationIP, DestinationHostName, RequestMethod, RequestURL;
//Backdoor.HTTP.BEACON.[CSBundle CDN GET]
let FEQuery2 = CommonSecurityLog
| where TimeGenerated > ago(lookback)
| where RequestMethod == "GET"
| where FileType =~ "GZIP"
| where RequestURL matches regex @"(?:\/v1\/queue|\/v1\/profile|\/v1\/docs\/wsdl|\/v1\/pull)"
| extend Quality = "low"
| extend RuleName = "Backdoor.HTTP.BEACON.[CSBundle CDN GET]"
| project TimeGenerated, Quality, RuleName, DeviceVendor, DeviceProduct, TenantId, SourceIP, DestinationIP, DestinationHostName, RequestMethod, RequestURL;
//Backdoor.HTTP.BEACON.[CSBundle USAToday GET]
let FEQuery3 = CommonSecurityLog
| where TimeGenerated > ago(lookback)
| where RequestMethod == "GET"
| where isempty(RequestContext)
| where RequestURL matches regex @"(?:\/USAT-GUP\/user\/|\/entertainment\/|\/entertainment\/navdd-q1a2z3Z6TET4gv2PNfXpaJAniOzOajK7M\.min\.json|\/global-q1a2z3C4M2nNlQYzWhCC0oMSEFjQbW1KA\.min\.json|\/life\/|\/news\/weather\/|\/opinion\/|\/sports\/|\/sports\/navdd-q1a2z3JHa8KzCRLOQAnDoVywVWF7UwxJs\.min\.json|\/tangstatic\/js\/main-q1a2z3b37df2b1\.min\.js|\/tangstatic\/js\/pbjsandwich-q1a2z300ab4198\.min\.js|\/tangstatic\/js\/pg-q1a2z3bbc110a4\.min\.js|\/tangsvc\/pg\/3221104001\/|\/ta`ngsvc\/pg\/5059005002\/|\/tangsvc\/pg\/5066496002\/|\/tech\/|\/travel\/)"
| where DestinationHostName !endswith "usatoday.com"
| extend Quality = "medium"
| extend RuleName = "Backdoor.HTTP.BEACON.[CSBundle USAToday GET]"
| project TimeGenerated, Quality, RuleName, DeviceVendor, DeviceProduct, TenantId, SourceIP, DestinationIP, DestinationHostName, RequestMethod, RequestURL;
//Backdoor.HTTP.BEACON.[CSBundle Original POST]
let FEQuery4 = CommonSecurityLog
| where TimeGenerated > ago(lookback)
| where RequestMethod == "POST"
| where isempty(RequestContext)
| where RequestURL matches regex @"(?:\/v4\/links\/check-activity\/check|\/v1\/stats|\/gql|\/api2\/json\/check\/ticket|\/1.5\/95648064\/storage\/history|\/1.5\/95648064\/storage\/tabs|\/u\/0\/_\/og\/botguard\/get|\/ev\/prd001001|\/ev\/ext001001|\/gp\/aw\/ybh\/handlers|\/v3\/links\/ping-beat\/check)"
| extend Quality = "low"
| extend RuleName = "Backdoor.HTTP.BEACON.[CSBundle Original POST]"
| project TimeGenerated, Quality, RuleName, DeviceVendor, DeviceProduct, TenantId, SourceIP, DestinationIP, DestinationHostName, RequestMethod, RequestURL;
//Backdoor.HTTP.BEACON.[CSBundle MSOffice POST
let FEQuery5 = CommonSecurityLog
| where TimeGenerated > ago(lookback)
| where RequestMethod == "POST"
| where isempty(RequestContext)
| where RequestURL contains "/v1/push"
| extend Quality = "low"
| extend RuleName = "Backdoor.HTTP.BEACON.[CSBundle MSOffice POST]"
| project TimeGenerated, Quality, RuleName, DeviceVendor, DeviceProduct, TenantId, SourceIP, DestinationIP, DestinationHostName, RequestMethod, RequestURL;
//Backdoor.HTTP.BEACON.[CSBundle NYTIMES POST]
let FEQuery6 = CommonSecurityLog
| where TimeGenerated > ago(lookback)
| where RequestMethod == "POST"
| where isempty(RequestContext)
| where RequestURL matches regex @"(?:\/track|\/api\/v1\/survey\/embed|\/svc\/weather\/v2)"
| extend Quality = "low"
| extend RuleName = "Backdoor.HTTP.BEACON.[CSBundle NYTIMES POST]"
| project TimeGenerated, Quality, RuleName, DeviceVendor, DeviceProduct, TenantId, SourceIP, DestinationIP, DestinationHostName, RequestMethod, RequestURL;
//Backdoor.HTTP.BEACON.[CSBundle MSOffice GET]
let FEQuery7 = CommonSecurityLog
| where TimeGenerated > ago(lookback)
| where RequestMethod == "GET"
| where isempty(RequestContext)
| where RequestURL matches regex @"(?:\/updates|\/license\/eula|\/docs\/office|\/software-activation)"
| extend Quality = "low"
| extend RuleName = "Backdoor.HTTP.BEACON.[CSBundle MSOffice GET]"
| project TimeGenerated, Quality, RuleName, DeviceVendor, DeviceProduct, TenantId, SourceIP, DestinationIP, DestinationHostName, RequestMethod, RequestURL;
//Backdoor.HTTP.BEACON.[CSBundle MSOffice POST]
let FEQuery8 = CommonSecurityLog
| where TimeGenerated > ago(lookback)
| where RequestMethod == "POST"
| where isempty(RequestContext)
| where RequestURL contains "/notification"
| extend Quality = "low"
| extend RuleName = "Backdoor.HTTP.BEACON.[CSBundle MSOffice POST]"
| project TimeGenerated, Quality, RuleName, DeviceVendor, DeviceProduct, TenantId, SourceIP, DestinationIP, DestinationHostName, RequestMethod, RequestURL;
//Backdoor.HTTP.BEACON.[CSBundle Original GET]
let FEQuery9 = CommonSecurityLog
| where TimeGenerated > ago(lookback)
| where RequestMethod == "GET"
| where isempty(RequestContext)
| where RequestURL matches regex @"(?:\/api2\/json\/access\/ticket|\/api2\/json\/cluster\/resources|\/api2\/json\/cluster\/tasks|\/en-us\/p\/onerf\/MeSilentPassport|\/en-us\/p\/book-2\/8MCPZJJCC98C|\/en-us\/store\/api\/checkproductinwishlist|\/gp\/cerberus\/gv|\/gp\/aj\/private\/reviewsGallery\/get-application-resources|\/gp\/aj\/private\/reviewsGallery\/get-image-gallery-assets|\/v1\/buckets\/default\/ext-5dkJ19tFufpMZjVJbsWCiqDcclDw\/records|\/v3\/links\/ping-centre|\/v4\/links\/activity-stream|\/wp-content\/themes\/am43-6\/dist\/records|\/wp-content\/themes\/am43-6\/dist\/records|\/wp-includes\/js\/script\/indigo-migrate)"
| extend Quality = "medium"
| extend RuleName = "Backdoor.HTTP.BEACON.[CSBundle Original GET]"
| project TimeGenerated, Quality, RuleName, DeviceVendor, DeviceProduct, TenantId, SourceIP, DestinationIP, DestinationHostName, RequestMethod, RequestURL;
let Results = union FEQuery1, FEQuery3, FEQuery4, FEQuery5, FEQuery6, FEQuery7, FEQuery8, FEQuery9;
//Check to see if the destination host name is low hitting in data, defeats a lot of legit API traffic
Results
| join (
CommonSecurityLog
| where TimeGenerated > ago(domainLookback)
| where DestinationHostName != ""
| summarize DomainCount=count() by DestinationHostName)
on $left.DestinationHostName == $right.DestinationHostName
| project TimeGenerated, Quality, DeviceVendor, DeviceProduct, TenantId, SourceIP, DestinationIP, DestinationHostName, RequestMethod, RequestURL, DomainCount
| where DomainCount <= domainCountThreshold

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше