Merge branch 'master' into SlackAuditConnector

This commit is contained in:
v-rucdu 2021-03-16 10:27:24 +05:30 коммит произвёл GitHub
Родитель 25dc3f9166 78eee60551
Коммит ee02cae67b
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
696 изменённых файлов: 54306 добавлений и 4546 удалений

Просмотреть файл

@ -9,24 +9,30 @@ import * as logger from "./utils/logger";
import { ConnectorCategory } from "./utils/dataConnector";
export async function IsValidDataConnectorSchema(filePath: string): Promise<ExitCode> {
let jsonFile = JSON.parse(fs.readFileSync(filePath, "utf8"));
if(isPotentialConnectorJson(jsonFile))
{
let connectorCategory = getConnectorCategory(jsonFile.dataTypes, jsonFile.instructionSteps);
let schema = JSON.parse(fs.readFileSync(".script/utils/schemas/"+ connectorCategory +"_ConnectorSchema.json", "utf8"));
isValidSchema(jsonFile, schema);
isValidId(jsonFile.id);
isValidDataType(jsonFile.dataTypes);
/* Disabling temporarily till we get confirmation from PM*/
// isValidFileName(filePath
isValidPermissions(jsonFile.permissions, connectorCategory);
if(!filePath.includes('Templates'))
{
let jsonFile = JSON.parse(fs.readFileSync(filePath, "utf8"));
if(isPotentialConnectorJson(jsonFile))
{
let connectorCategory = getConnectorCategory(jsonFile.dataTypes, jsonFile.instructionSteps);
let schema = JSON.parse(fs.readFileSync(".script/utils/schemas/"+ connectorCategory +"_ConnectorSchema.json", "utf8"));
isValidSchema(jsonFile, schema);
isValidId(jsonFile.id);
isValidDataType(jsonFile.dataTypes);
/* Disabling temporarily till we get confirmation from PM*/
// isValidFileName(filePath
isValidPermissions(jsonFile.permissions, connectorCategory);
}
else{
console.warn(`Could not identify json file as a connector. Skipping File path: ${filePath}`)
}
}
else{
console.warn(`Could not identify json file as a connector. Skipping File path: ${filePath}`)
}
return ExitCode.SUCCESS;
console.warn(`Skipping Files under Templates folder : ${filePath}`)
}
return ExitCode.SUCCESS;
}
function isPotentialConnectorJson(jsonFile: any) {
@ -61,7 +67,7 @@ function getConnectorCategory(dataTypes : any, instructionSteps:[])
}
let fileTypeSuffixes = ["json"];
let filePathFolderPrefixes = ["DataConnectors"];
let filePathFolderPrefixes = ["DataConnectors","Solutions"];
let fileKinds = ["Added", "Modified"];
let CheckOptions = {
onCheckFile: (filePath: string) => {

Просмотреть файл

@ -30,7 +30,7 @@ export async function IsIdHasChanged(filePath: string): Promise<ExitCode> {
let fileKinds = ["Modified"];
let fileTypeSuffixes = ["yaml", "yml", "json"];
let filePathFolderPrefixes = ["Detections"];
let filePathFolderPrefixes = ["Detections","Solutions"];
let CheckOptions = {
onCheckFile: (filePath: string) => {
return IsIdHasChanged(filePath);

Просмотреть файл

@ -1,69 +0,0 @@
{
"Name": "AzureDevOpsAuditing",
"Properties": [
{
"Name": "TimeGenerated",
"Type": "DateTime"
},
{
"Name": "Data",
"Type": "Dynamic"
},
{
"Name": "Area",
"Type": "String"
},
{
"Name": "OperationName",
"Type": "String"
},
{
"Name": "Details",
"Type": "String"
},
{
"Name": "GroupName",
"Type": "String"
},
{
"Name": "ActorUPN",
"Type": "String"
},
{
"Name": "ActorDisplayName",
"Type": "String"
},
{
"Name": "EntityName",
"Type": "String"
},
{
"Name": "AuthenticationMechanism",
"Type": "String"
},
{
"Name": "IpAddress",
"Type": "String"
},
{
"Name": "UserAgent",
"Type": "String"
},
{
"Name": "AuthenticationMechanism",
"Type": "String"
},
{
"Name": "ScopeDisplayName",
"Type": "String"
},
{
"Name": "ProjectName",
"Type": "String"
},
{
"Name": "ProjectId",
"Type": "String"
}
]
}

Просмотреть файл

@ -0,0 +1,293 @@
{
"Name": "BoxEvents_CL",
"Properties": [
{
"Name": "TimeGenerated",
"Type": "DateTime"
},
{
"Name": "additional_details_advancedFolderSettings_oldOwnerOnlyInvite_b",
"Type": "Boolean"
},
{
"Name": "additional_details_advancedFolderSettings_newOwnerOnlyInvite_b",
"Type": "Boolean"
},
{
"Name": "source_item_name_g",
"Type": "String"
},
{
"Name": "additional_details_metadata_type_s",
"Type": "String"
},
{
"Name": "additional_details_metadata_operationParams_s",
"Type": "String"
},
{
"Name": "additional_details_task_due_at_t",
"Type": "DateTime"
},
{
"Name": "action_by_type_s",
"Type": "String"
},
{
"Name": "action_by_id_s",
"Type": "String"
},
{
"Name": "action_by_name_s",
"Type": "String"
},
{
"Name": "action_by_login_s",
"Type": "String"
},
{
"Name": "additional_details_annotation_id_d",
"Type": "Double"
},
{
"Name": "additional_details_group_id_s",
"Type": "String"
},
{
"Name": "additional_details_group_name_s",
"Type": "String"
},
{
"Name": "source_user_email_s",
"Type": "String"
},
{
"Name": "additional_details_comment_id_d",
"Type": "Double"
},
{
"Name": "additional_details_message_s",
"Type": "String"
},
{
"Name": "additional_details_task_id_d",
"Type": "Double"
},
{
"Name": "additional_details_task_message_s",
"Type": "String"
},
{
"Name": "additional_details_task_created_by_id_d",
"Type": "Double"
},
{
"Name": "additional_details_task_created_by_login_s",
"Type": "String"
},
{
"Name": "additional_details_task_assignment_assigned_to_id_d",
"Type": "Double"
},
{
"Name": "additional_details_task_assignment_assigned_to_login_s",
"Type": "String"
},
{
"Name": "additional_details_task_assignment_status_s",
"Type": "String"
},
{
"Name": "additional_details_task_assignment_message_s",
"Type": "String"
},
{
"Name": "source_file_id_s",
"Type": "String"
},
{
"Name": "source_file_name_s",
"Type": "String"
},
{
"Name": "source_parent_name_g",
"Type": "String"
},
{
"Name": "source_item_type_s",
"Type": "String"
},
{
"Name": "source_item_id_s",
"Type": "String"
},
{
"Name": "source_item_name_s",
"Type": "String"
},
{
"Name": "source_parent_type_s",
"Type": "String"
},
{
"Name": "source_parent_name_s",
"Type": "String"
},
{
"Name": "source_parent_id_s",
"Type": "String"
},
{
"Name": "source_owned_by_type_s",
"Type": "String"
},
{
"Name": "source_owned_by_id_s",
"Type": "String"
},
{
"Name": "source_owned_by_name_s",
"Type": "String"
},
{
"Name": "source_owned_by_login_s",
"Type": "String"
},
{
"Name": "created_by_type_s",
"Type": "String"
},
{
"Name": "created_by_id_s",
"Type": "String"
},
{
"Name": "created_by_name_s",
"Type": "String"
},
{
"Name": "created_by_login_s",
"Type": "String"
},
{
"Name": "created_at_t",
"Type": "DateTime"
},
{
"Name": "event_id_g",
"Type": "String"
},
{
"Name": "event_type_s",
"Type": "String"
},
{
"Name": "ip_address_s",
"Type": "String"
},
{
"Name": "type_s",
"Type": "String"
},
{
"Name": "additional_details_size_d",
"Type": "Double"
},
{
"Name": "additional_details_ekm_id_g",
"Type": "String"
},
{
"Name": "additional_details_version_id_s",
"Type": "String"
},
{
"Name": "additional_details_service_id_s",
"Type": "String"
},
{
"Name": "additional_details_service_name_s",
"Type": "String"
},
{
"Name": "source_type_s",
"Type": "String"
},
{
"Name": "source_id_s",
"Type": "String"
},
{
"Name": "source_name_s",
"Type": "String"
},
{
"Name": "source_login_s",
"Type": "String"
},
{
"Name": "additional_details_access_token_identifier_s",
"Type": "String"
},
{
"Name": "additional_details_shared_link_id_s",
"Type": "String"
},
{
"Name": "source_folder_id_s",
"Type": "String"
},
{
"Name": "source_folder_name_s",
"Type": "String"
},
{
"Name": "source_user_id_s",
"Type": "String"
},
{
"Name": "source_user_name_s",
"Type": "String"
},
{
"Name": "accessible_by_type_s",
"Type": "String"
},
{
"Name": "accessible_by_id_s",
"Type": "String"
},
{
"Name": "accessible_by_name_s",
"Type": "String"
},
{
"Name": "accessible_by_login_s",
"Type": "String"
},
{
"Name": "additional_details_type_s",
"Type": "String"
},
{
"Name": "additional_details_collab_id_s",
"Type": "String"
},
{
"Name": "additional_details_role_s",
"Type": "String"
},
{
"Name": "additional_details_is_performed_by_admin_b",
"Type": "Boolean"
},
{
"Name": "Type",
"Type": "String"
},
{
"Name": "_ResourceId",
"Type": "String"
}
]
}

Просмотреть файл

@ -0,0 +1,229 @@
{
"Name": "CiscoISEEvent",
"Properties": [
{
"Name": "TimeGenerated",
"Type": "DateTime"
},
{
"Name": "EventVendor",
"Type": "String"
},
{
"Name": "EventProduct",
"Type": "String"
},
{
"Name": "EventId",
"Type": "String"
},
{
"Name": "EventSeverity",
"Type": "String"
},
{
"Name": "EventCategory",
"Type": "String"
},
{
"Name": "EventMessage",
"Type": "String"
},
{
"Name": "ConfigVersionId",
"Type": "String"
},
{
"Name": "DvcIpAddr",
"Type": "String"
},
{
"Name": "DvcHostname",
"Type": "String"
},
{
"Name": "DstIpAddr",
"Type": "String"
},
{
"Name": "DstPortNumber",
"Type": "String"
},
{
"Name": "DstUserName",
"Type": "String"
},
{
"Name": "NetworkProtocol",
"Type": "String"
},
{
"Name": "RequestLatency",
"Type": "String"
},
{
"Name": "NasIpAddress",
"Type": "String"
},
{
"Name": "NasPort",
"Type": "String"
},
{
"Name": "NasPortType",
"Type": "String"
},
{
"Name": "NasIdentifier",
"Type": "String"
},
{
"Name": "ServiceType",
"Type": "String"
},
{
"Name": "FramedMtu",
"Type": "String"
},
{
"Name": "CalledStationId",
"Type": "String"
},
{
"Name": "CallingStationId",
"Type": "String"
},
{
"Name": "EventType",
"Type": "String"
},
{
"Name": "DvcAction",
"Type": "DateTime"
},
{
"Name": "PrivilegeLevel",
"Type": "String"
},
{
"Name": "SrcIpAddr",
"Type": "String"
},
{
"Name": "NetworkDeviceProfileId",
"Type": "String"
},
{
"Name": "AcsSessionId",
"Type": "String"
},
{
"Name": "AcctSessionId",
"Type": "String"
},
{
"Name": "AuthenType",
"Type": "String"
},
{
"Name": "AuthenticationIdentityStore",
"Type": "String"
},
{
"Name": "AuthenticationMethod",
"Type": "String"
},
{
"Name": "SelectedAccessService",
"Type": "String"
},
{
"Name": "SelectedShellProfile",
"Type": "String"
},
{
"Name": "IdentityGroup",
"Type": "String"
},
{
"Name": "Service",
"Type": "String"
},
{
"Name": "ServiceArgument",
"Type": "String"
},
{
"Name": "CmdSet",
"Type": "String"
},
{
"Name": "MatchedCommandSet",
"Type": "String"
},
{
"Name": "AuthenMethod",
"Type": "String"
},
{
"Name": "SelectedCommandSet",
"Type": "String"
},
{
"Name": "NetworkDeviceProfileName",
"Type": "String"
},
{
"Name": "PostureStatus",
"Type": "String"
},
{
"Name": "SelectedAuthorizationProfiles",
"Type": "String"
},
{
"Name": "AuthorizationPolicyMatchedRule",
"Type": "String"
},
{
"Name": "DvcMacAddr",
"Type": "String"
},
{
"Name": "DevicePublicMac",
"Type": "String"
},
{
"Name": "DevicePlatform",
"Type": "String"
},
{
"Name": "DevicePlatformVersion",
"Type": "String"
},
{
"Name": "DeviceType",
"Type": "String"
},
{
"Name": "HttpUserAgentOriginal",
"Type": "String"
},
{
"Name": "EventResult",
"Type": "String"
},
{
"Name": "RadiusPacketType",
"Type": "String"
},
{
"Name": "EventTypeDetailed",
"Type": "String"
},
{
"Name": "EventResultDetails",
"Type": "String"
}
]
}

Просмотреть файл

@ -0,0 +1,165 @@
{
"Name": "Cisco_Umbrella",
"Properties": [
{
"Name": "TimeGenerated",
"Type": "DateTime"
},
{
"Name": "EventType",
"Type": "String"
},
{
"Name": "NetworkSessionId",
"Type": "String"
},
{
"Name": "Identities",
"Type": "String"
},
{
"Name": "NetworkRuleName",
"Type": "String"
},
{
"Name": "IdentityType",
"Type": "String"
},
{
"Name": "NetworkDirection",
"Type": "String"
},
{
"Name": "NetworkProtocol",
"Type": "String"
},
{
"Name": "NetworkPackets",
"Type": "String"
},
{
"Name": "SourceIP",
"Type": "String"
},
{
"Name": "SrcPortNumber",
"Type": "String"
},
{
"Name": "DstIpAddr",
"Type": "String"
},
{
"Name": "DvcHostname",
"Type": "String"
},
{
"Name": "NetworkRuleNumber",
"Type": "String"
},
{
"Name": "DvcAction",
"Type": "String"
},
{
"Name": "DnsQueryName",
"Type": "String"
},
{
"Name": "ThreatCategory",
"Type": "String"
},
{
"Name": "DnsQueryTypeName",
"Type": "String"
},
{
"Name": "DnsResponseCodeName",
"Type": "String"
},
{
"Name": "IdentityTypes",
"Type": "String"
},
{
"Name": "PolicyIdentity",
"Type": "String"
},
{
"Name": "PolicyIdentityType",
"Type": "String"
},
{
"Name": "SrcIpAddr",
"Type": "String"
},
{
"Name": "DstPortNumber",
"Type": "String"
},
{
"Name": "EventEndTime",
"Type": "DateTime"
},
{
"Name": "SrcNatIpAddr",
"Type": "String"
},
{
"Name": "HttpContentType",
"Type": "String"
},
{
"Name": "HttpReferrerOriginal",
"Type": "String"
},
{
"Name": "HttpUserAgentOriginal",
"Type": "String"
},
{
"Name": "HttpStatusCode",
"Type": "String"
},
{
"Name": "SrcBytes",
"Type": "Double"
},
{
"Name": "DstBytes",
"Type": "Double"
},
{
"Name": "HttpResponseBodyBytes",
"Type": "Double"
},
{
"Name": "HashSha256",
"Type": "String"
},
{
"Name": "AvDetections",
"Type": "String"
},
{
"Name": "AmpDisposition",
"Type": "String"
},
{
"Name": "ThreatName",
"Type": "String"
},
{
"Name": "AmpScore",
"Type": "String"
},
{
"Name": "UrlOriginal",
"Type": "String"
},
{
"Name": "UrlCategory",
"Type": "String"
}
]
}

Просмотреть файл

@ -0,0 +1,638 @@
{
"Name": "CrowdStrikeFalconEventStream",
"Properties":
[
{
"Name": "TenantId",
"Type": "String"
},
{
"Name": "SourceSystem",
"Type": "String"
},
{
"Name": "TimeGenerated",
"Type": "DateTime"
},
{
"Name": "ReceiptTime",
"Type": "DateTime"
},
{
"Name": "DeviceVendor",
"Type": "String"
},
{
"Name": "DeviceProduct",
"Type": "String"
},
{
"Name": "DeviceEventClassID",
"Type": "String"
},
{
"Name": "LogSeverity",
"Type": "String"
},
{
"Name": "OriginalLogSeverity",
"Type": "String"
},
{
"Name": "DeviceAction",
"Type": "String"
},
{
"Name": "SimplifiedDeviceAction",
"Type": "String"
},
{
"Name": "Computer",
"Type": "String"
},
{
"Name": "CommunicationDirection",
"Type": "String"
},
{
"Name": "DeviceFacility",
"Type": "String"
},
{
"Name": "DestinationPort",
"Type": "Int"
},
{
"Name": "DestinationIP",
"Type": "String"
},
{
"Name": "DeviceAddress",
"Type": "String"
},
{
"Name": "DeviceName",
"Type": "String"
},
{
"Name": "Message",
"Type": "String"
},
{
"Name": "Protocol",
"Type": "String"
},
{
"Name": "SourcePort",
"Type": "Int"
},
{
"Name": "SrcIpAddr",
"Type": "String"
},
{
"Name": "RemoteIP",
"Type": "String"
},
{
"Name": "RemotePort",
"Type": "String"
},
{
"Name": "MaliciousIP",
"Type": "String"
},
{
"Name": "ThreatSeverity",
"Type": "Int"
},
{
"Name": "IndicatorThreatType",
"Type": "String"
},
{
"Name": "ThreatDescription",
"Type": "String"
},
{
"Name": "ThreatConfidence",
"Type": "String"
},
{
"Name": "ReportReferenceLink",
"Type": "String"
},
{
"Name": "MaliciousIPLongitude",
"Type": "real"
},
{
"Name": "MaliciousIPLatitude",
"Type": "real"
},
{
"Name": "MaliciousIPCountry",
"Type": "String"
},
{
"Name": "DeviceVersion",
"Type": "String"
},
{
"Name": "Activity",
"Type": "String"
},
{
"Name": "ApplicationProtocol",
"Type": "String"
},
{
"Name": "EventCount",
"Type": "Int"
},
{
"Name": "DestinationDnsDomain",
"Type": "String"
},
{
"Name": "DestinationServiceName",
"Type": "String"
},
{
"Name": "DstIpAddr",
"Type": "String"
},
{
"Name": "DestinationTranslatedPort",
"Type": "Int"
},
{
"Name": "DeviceDnsDomain",
"Type": "String"
},
{
"Name": "DeviceExternalID",
"Type": "String"
},
{
"Name": "DeviceInboundInterface",
"Type": "String"
},
{
"Name": "DeviceNtDomain",
"Type": "String"
},
{
"Name": "DeviceOutboundInterface",
"Type": "String"
},
{
"Name": "DevicePayloadId",
"Type": "String"
},
{
"Name": "ProcessName",
"Type": "String"
},
{
"Name": "DeviceTranslatedAddress",
"Type": "String"
},
{
"Name": "DstHostName",
"Type": "String"
},
{
"Name": "DestinationMACAddress",
"Type": "String"
},
{
"Name": "DstNtDomain",
"Type": "String"
},
{
"Name": "DestinationProcessId",
"Type": "Int"
},
{
"Name": "DestinationUserPrivileges",
"Type": "String"
},
{
"Name": "DestinationProcessName",
"Type": "String"
},
{
"Name": "DeviceTimeZone",
"Type": "String"
},
{
"Name": "DestinationUserID",
"Type": "String"
},
{
"Name": "DstUserName",
"Type": "String"
},
{
"Name": "DeviceMacAddress",
"Type": "String"
},
{
"Name": "ProcessID",
"Type": "Int"
},
{
"Name": "ExternalID",
"Type": "Int"
},
{
"Name": "FileCreateTime",
"Type": "String"
},
{
"Name": "FileHash",
"Type": "String"
},
{
"Name": "FileID",
"Type": "String"
},
{
"Name": "FileModificationTime",
"Type": "String"
},
{
"Name": "FilePath",
"Type": "String"
},
{
"Name": "FilePermission",
"Type": "String"
},
{
"Name": "FileType",
"Type": "String"
},
{
"Name": "FileName",
"Type": "String"
},
{
"Name": "FileSize",
"Type": "Int"
},
{
"Name": "ReceivedBytes",
"Type": "long"
},
{
"Name": "OldFileCreateTime",
"Type": "String"
},
{
"Name": "OldFileHash",
"Type": "String"
},
{
"Name": "OldFileID",
"Type": "String"
},
{
"Name": "OldFileModificationTime",
"Type": "String"
},
{
"Name": "OldFileName",
"Type": "String"
},
{
"Name": "OldFilePath",
"Type": "String"
},
{
"Name": "OldFilePermission",
"Type": "String"
},
{
"Name": "OldFileSize",
"Type": "Int"
},
{
"Name": "OldFileType",
"Type": "String"
},
{
"Name": "SentBytes",
"Type": "long"
},
{
"Name": "RequestURL",
"Type": "String"
},
{
"Name": "RequestClientApplication",
"Type": "String"
},
{
"Name": "RequestContext",
"Type": "String"
},
{
"Name": "RequestCookies",
"Type": "String"
},
{
"Name": "RequestMethod",
"Type": "String"
},
{
"Name": "SourceHostName",
"Type": "String"
},
{
"Name": "SrcMacAddr",
"Type": "String"
},
{
"Name": "SourceNTDomain",
"Type": "String"
},
{
"Name": "SourceDnsDomain",
"Type": "String"
},
{
"Name": "SourceServiceName",
"Type": "String"
},
{
"Name": "SourceTranslatedAddress",
"Type": "String"
},
{
"Name": "SourceTranslatedPort",
"Type": "Int"
},
{
"Name": "SourceProcessId",
"Type": "Int"
},
{
"Name": "SourceUserPrivileges",
"Type": "String"
},
{
"Name": "SourceProcessName",
"Type": "String"
},
{
"Name": "SourceUserID",
"Type": "String"
},
{
"Name": "SourceUserName",
"Type": "String"
},
{
"Name": "EventType",
"Type": "String"
},
{
"Name": "DeviceCustomIPv6Address1",
"Type": "String"
},
{
"Name": "DeviceCustomIPv6Address1Label",
"Type": "String"
},
{
"Name": "DeviceCustomIPv6Address2",
"Type": "String"
},
{
"Name": "DeviceCustomIPv6Address2Label",
"Type": "String"
},
{
"Name": "DeviceCustomIPv6Address3",
"Type": "String"
},
{
"Name": "DeviceCustomIPv6Address3Label",
"Type": "String"
},
{
"Name": "DeviceCustomIPv6Address4",
"Type": "String"
},
{
"Name": "DeviceCustomIPv6Address4Label",
"Type": "String"
},
{
"Name": "DeviceCustomFloatingPoint1",
"Type": "real"
},
{
"Name": "DeviceCustomFloatingPoint1Label",
"Type": "String"
},
{
"Name": "DeviceCustomFloatingPoint2",
"Type": "real"
},
{
"Name": "DeviceCustomFloatingPoint2Label",
"Type": "String"
},
{
"Name": "DeviceCustomFloatingPoint3",
"Type": "real"
},
{
"Name": "DeviceCustomFloatingPoint3Label",
"Type": "String"
},
{
"Name": "DeviceCustomFloatingPoint4",
"Type": "real"
},
{
"Name": "DeviceCustomFloatingPoint4Label",
"Type": "String"
},
{
"Name": "FlexDate1",
"Type": "String"
},
{
"Name": "FlexDate1Label",
"Type": "String"
},
{
"Name": "FlexNumber1",
"Type": "Int"
},
{
"Name": "FlexNumber1Label",
"Type": "String"
},
{
"Name": "FlexNumber2",
"Type": "Int"
},
{
"Name": "FlexNumber2Label",
"Type": "String"
},
{
"Name": "FlexString1",
"Type": "String"
},
{
"Name": "FlexString1Label",
"Type": "String"
},
{
"Name": "FlexString2",
"Type": "String"
},
{
"Name": "FlexString2Label",
"Type": "String"
},
{
"Name": "AdditionalExtensions",
"Type": "String"
},
{
"Name": "StartTime",
"Type": "DateTime"
},
{
"Name": "EndTime",
"Type": "DateTime"
},
{
"Name": "Type",
"Type": "String"
},
{
"Name": "_ResourceId",
"Type": "String"
},
{
"Name": "Outcome",
"Type": "String"
},
{
"Name": "Technique",
"Type": "String"
},
{
"Name": "PatternDisposition",
"Type": "String"
},
{
"Name": "SessionStartTime",
"Type": "DateTime"
},
{
"Name": "SessionEndTime",
"Type": "DateTime"
},
{
"Name": "ParentProcessId",
"Type": "Int"
},
{
"Name": "ChildProcessId",
"Type": "Int"
},
{
"Name": "Offset",
"Type": "Int"
},
{
"Name": "EventTimestamp",
"Type": "DateTime"
},
{
"Name": "ExeWrittenTime",
"Type": "DateTime"
},
{
"Name": "DnsRequestTime",
"Type": "DateTime"
},
{
"Name": "NetworkAccessTime",
"Type": "DateTime"
},
{
"Name": "DocAccessTime",
"Type": "DateTime"
},
{
"Name": "HashSpreadingEventTime",
"Type": "DateTime"
},
{
"Name": "HashSpreadingSensorTime",
"Type": "DateTime"
},
{
"Name": "ScanResultName",
"Type": "String"
},
{
"Name": "WrittenExeFileName",
"Type": "String"
},
{
"Name": "QuarantineFileSHA256",
"Type": "String"
},
{
"Name": "ScanResultEngine",
"Type": "String"
},
{
"Name": "AccessedDocFileName",
"Type": "String"
},
{
"Name": "WrittenExeFilePath",
"Type": "String"
},
{
"Name": "AccessedDocFilePath",
"Type": "String"
},
{
"Name": "QuarantineFilePath",
"Type": "String"
},
{
"Name": "ScanResultVersion",
"Type": "String"
},
{
"Name": "CommandLine",
"Type": "String"
},
{
"Name": "FalconHostLink",
"Type": "String"
},
{
"Name": "SensorId",
"Type": "String"
},
{
"Name": "Severity",
"Type": "String"
}
]
}

Просмотреть файл

@ -0,0 +1,73 @@
{
"Name":"Jira_Audit_CL",
"Properties":[
{
"Name":"EventVendor",
"Type":"String"
},
{
"Name":"EventProduct",
"Type":"String"
},
{
"Name":"id_d",
"Type":"Double"
},
{
"Name":"summary_s",
"Type":"String"
},
{
"Name":"remoteAddress_s",
"Type":"String"
},
{
"Name":"authorKey_s",
"Type":"String"
},
{
"Name":"authorAccountId_s",
"Type":"String"
},
{
"Name":"created_t",
"Type":"DateTime"
},
{
"Name":"eventSource_s",
"Type":"String"
},
{
"Name":"objectItem_id_s",
"Type":"String"
},
{
"Name":"objectItem_name_s",
"Type":"String"
},
{
"Name":"objectItem_typeName_s",
"Type":"String"
},
{
"Name":"changedValues_s",
"Type":"String"
},
{
"Name":"associatedItems_s",
"Type":"String"
},
{
"Name":"objectItem_parentId_s",
"Type":"String"
},
{
"Name":"objectItem_parentName_s",
"Type":"String"
},
{
"Name":"Category",
"Type":"String"
}
]
}

Просмотреть файл

@ -0,0 +1,13 @@
{
"Name": "NGINX_CL",
"Properties": [
{
"Name": "TimeGenerated",
"Type": "DateTime"
},
{
"Name": "RawData",
"Type": "String"
}
]
}

Просмотреть файл

@ -0,0 +1,565 @@
{
"Name": "ProofpointPOD",
"Properties": [
{
"Name": "TimeGenerated",
"Type": "DateTime"
},
{
"Name": "EventVendor",
"Type": "String"
},
{
"Name": "EventProduct",
"Type": "String"
},
{
"Name": "FilterModulesUrldefenseCountsNoRewriteIsLargeMsgPartSize",
"Type": "Double"
},
{
"Name": "SrcDvcHostname",
"Type": "String"
},
{
"Name": "PpsVersion",
"Type": "String"
},
{
"Name": "PpsCid",
"Type": "String"
},
{
"Name": "MsgParts",
"Type": "String"
},
{
"Name": "MetadataOriginDataAgent",
"Type": "String"
},
{
"Name": "MetadataOriginDataVersion",
"Type": "String"
},
{
"Name": "MetadataOriginDataCid",
"Type": "String"
},
{
"Name": "EventOriginalTime",
"Type": "DateTime"
},
{
"Name": "TlsCipher",
"Type": "String"
},
{
"Name": "TlsCipherBits",
"Type": "Double"
},
{
"Name": "TlsVersion",
"Type": "String"
},
{
"Name": "SrcIpAddr",
"Type": "String"
},
{
"Name": "NetworkSessionId",
"Type": "String"
},
{
"Name": "SrcDvcHostname",
"Type": "String"
},
{
"Name": "SrcGeoCountry",
"Type": "String"
},
{
"Name": "ConnectionHelo",
"Type": "String"
},
{
"Name": "NetworkProtocol",
"Type": "String"
},
{
"Name": "NetworkConnectionState",
"Type": "String"
},
{
"Name": "NetworkBytes",
"Type": "Double"
},
{
"Name": "MsgLang",
"Type": "String"
},
{
"Name": "MsgNormalizedHeaderSubject",
"Type": "String"
},
{
"Name": "MsgNormalizedHeaderMessageId",
"Type": "String"
},
{
"Name": "MsgNormalizedHeaderTo",
"Type": "String"
},
{
"Name": "MsgNormalizedHeadertoHashed",
"Type": "String"
},
{
"Name": "MsgNormalizedHeaderFrom",
"Type": "String"
},
{
"Name": "MsgNormalizedHeaderFromHashed",
"Type": "String"
},
{
"Name": "MsgNormalizedHeaderFrom",
"Type": "String"
},
{
"Name": "MsgNormalizedHeaderFromHashed",
"Type": "String"
},
{
"Name": "MsgHeaderMessageId",
"Type": "String"
},
{
"Name": "MsgHeaderSubject",
"Type": "String"
},
{
"Name": "MsgHeaderTo",
"Type": "String"
},
{
"Name": "MsgHeaderToHashed",
"Type": "String"
},
{
"Name": "MsgParsedAddressesFromHashed",
"Type": "String"
},
{
"Name": "MsgParsedAddressesFrom",
"Type": "String"
},
{
"Name": "MsgParsedAddressesToHashed",
"Type": "String"
},
{
"Name": "MsgParsedAddressesTo",
"Type": "String"
},
{
"Name": "NetworkDuration",
"Type": "Double"
},
{
"Name": "FilterActions",
"Type": "String"
},
{
"Name": "FilterRoutes",
"Type": "String"
},
{
"Name": "FilterQid",
"Type": "String"
},
{
"Name": "EventStartTime",
"Type": "DateTime"
},
{
"Name": "NetworkDirection",
"Type": "String"
},
{
"Name": "FilterQuarantineRule",
"Type": "String"
},
{
"Name": "FilterQuarantineFolder",
"Type": "String"
},
{
"Name": "FilterModulesDmarcSrvid",
"Type": "String"
},
{
"Name": "FilterModulesDmarcRecords",
"Type": "String"
},
{
"Name": "FilterModulesDmarcFilterdResult",
"Type": "String"
},
{
"Name": "FilterModulesDmarcAuthResults",
"Type": "String"
},
{
"Name": "FilterModulesSpfDomain",
"Type": "String"
},
{
"Name": "FilterModulesSpfResult",
"Type": "String"
},
{
"Name": "FilterModulesSpamTriggeredClassifier",
"Type": "String"
},
{
"Name": "FilterModulesSpamSafeBlockedListMatches",
"Type": "String"
},
{
"Name": "FilterModulesSpamVersionEngine",
"Type": "String"
},
{
"Name": "FilterModulesSpamVersionDefinitions",
"Type": "String"
},
{
"Name": "FilterModulesSpamScoresOverall",
"Type": "Double"
},
{
"Name": "FilterModulesSpamScoresEngine",
"Type": "Double"
},
{
"Name": "FilterModulesSpamScoresClassifiers",
"Type": "String"
},
{
"Name": "FilterModulesSpamLangs",
"Type": "String"
},
{
"Name": "FilterModulesUrldefenseVersionEngine",
"Type": "String"
},
{
"Name": "FilterModulesUrldefenseCountsUnique",
"Type": "Double"
},
{
"Name": "FilterModulesUrldefenseCountsRewritten",
"Type": "Double"
},
{
"Name": "FilterModulesUrldefenseCountsTotal",
"Type": "Double"
},
{
"Name": "FilterModulesDkimv",
"Type": "String"
},
{
"Name": "FilterModulesPdrV2Response",
"Type": "String"
},
{
"Name": "FilterModulesZerohourScore",
"Type": "String"
},
{
"Name": "FilterDisposition",
"Type": "String"
},
{
"Name": "FilterSuborgsRcpts",
"Type": "String"
},
{
"Name": "FilterSuborgsSender",
"Type": "String"
},
{
"Name": "FilterMsgSizeBytes",
"Type": "Double"
},
{
"Name": "FilterVerifiedRcptsHashed",
"Type": "String"
},
{
"Name": "FilterVerifiedRcpts",
"Type": "String"
},
{
"Name": "Guid",
"Type": "String"
},
{
"Name": "EnvelopeRcptsHashed",
"Type": "String"
},
{
"Name": "DstUserUpn",
"Type": "String"
},
{
"Name": "EnvelopeFromHashed",
"Type": "String"
},
{
"Name": "SrcUserUpn",
"Type": "String"
},
{
"Name": "EventType",
"Type": "String"
},
{
"Name": "MsgNormalizedHeaderReturnPathHashed",
"Type": "String"
},
{
"Name": "MsgNormalizedHeaderXMailer",
"Type": "String"
},
{
"Name": "MsgNormalizedHeaderReturnPath",
"Type": "String"
},
{
"Name": "MsgHeaderXMailer",
"Type": "String"
},
{
"Name": "MsgHeaderReturnPathHashed",
"Type": "String"
},
{
"Name": "MsgHeaderReturnPath",
"Type": "String"
},
{
"Name": "FilterModulesSpamCharsets",
"Type": "String"
},
{
"Name": "FilterModulesDmarcAlignment",
"Type": "String"
},
{
"Name": "MsgNormalizedHeaderXOriginatingIp",
"Type": "String"
},
{
"Name": "MsgHeaderXOriginatingIp",
"Type": "String"
},
{
"Name": "MsgNormalizedHeaderReplyTo",
"Type": "String"
},
{
"Name": "MsgNormalizedHeaderReplyToHashed",
"Type": "String"
},
{
"Name": "MsgHeaderReplyToHashed",
"Type": "String"
},
{
"Name": "MsgHeaderReplyTo",
"Type": "String"
},
{
"Name": "FilterModulesUrldefenseCountsNoRewriteIsEmail",
"Type": "Double"
},
{
"Name": "FilterModulesPdrV2Rscore",
"Type": "Double"
},
{
"Name": "FilterOrigGuid",
"Type": "String"
},
{
"Name": "MsgNormalizedHeaderCc",
"Type": "String"
},
{
"Name": "MsgNormalizedHeaderCcHashed",
"Type": "String"
},
{
"Name": "MsgParsedAddressesCcHashed",
"Type": "String"
},
{
"Name": "MsgParsedAddressesCc",
"Type": "String"
},
{
"Name": "MsgHeaderCcHashed",
"Type": "String"
},
{
"Name": "MsgHeaderCc",
"Type": "String"
},
{
"Name": "FilterModulesAvVirusNames",
"Type": "String"
},
{
"Name": "FilterThrottleIp",
"Type": "String"
},
{
"Name": "FilterModulesUrldefenseCountsNoRewriteIsUnsupportedScheme",
"Type": "Double"
},
{
"Name": "FilterModulesUrldefenseCountsNoRewriteIsSchemeless",
"Type": "Double"
},
{
"Name": "FilterModulesUrldefenseCountsNoRewriteIsMaxLengthExceeded",
"Type": "Double"
},
{
"Name": "FilterModulesUrldefenseCountsNoRewriteIsExcludedDomain",
"Type": "Double"
},
{
"Name": "EventVendor",
"Type": "String"
},
{
"Name": "EventProduct",
"Type": "String"
},
{
"Name": "SmMsgid",
"Type": "String"
},
{
"Name": "PpsCid",
"Type": "String"
},
{
"Name": "PpsAgent",
"Type": "String"
},
{
"Name": "Id",
"Type": "String"
},
{
"Name": "EventUid",
"Type": "String"
},
{
"Name": "SmNrcpts",
"Type": "String"
},
{
"Name": "NetworkBytes",
"Type": "String"
},
{
"Name": "SmAuth",
"Type": "String"
},
{
"Name": "TlsEstablished",
"Type": "String"
},
{
"Name": "SrcNatIpAddr",
"Type": "String"
},
{
"Name": "ProcessName",
"Type": "String"
},
{
"Name": "NetworkProtocol",
"Type": "String"
},
{
"Name": "SrcUserUpn",
"Type": "String"
},
{
"Name": "SmClass",
"Type": "String"
},
{
"Name": "SmQid",
"Type": "String"
},
{
"Name": "EventOriginalTime",
"Type": "DateTime"
},
{
"Name": "EventOriginalMessage",
"Type": "String"
},
{
"Name": "MetadataOriginDataAgent",
"Type": "String"
},
{
"Name": "MetadataOriginDataCid",
"Type": "String"
},
{
"Name": "EventType",
"Type": "String"
},
{
"Name": "SmMailer",
"Type": "String"
},
{
"Name": "NetworkConnectionStateDetailed",
"Type": "String"
},
{
"Name": "SmDsn",
"Type": "String"
},
{
"Name": "DstUserUpn",
"Type": "String"
},
{
"Name": "NetworkDuration",
"Type": "String"
},
{
"Name": "SmPri",
"Type": "String"
},
{
"Name": "SmXdelay",
"Type": "String"
},
{
"Name": "SmCtladdr",
"Type": "String"
}
]
}

Просмотреть файл

@ -0,0 +1,181 @@
{
"Name": "StorageBlobLogs",
"Properties": [
{
"Name": "TenantId",
"Type": "string"
},
{
"Name": "SourceSystem",
"Type": "string"
},
{
"Name": "TimeGenerated",
"Type": "datetime"
},
{
"Name": "AccountName",
"Type": "string"
},
{
"Name": "Location",
"Type": "string"
},
{
"Name": "Protocol",
"Type": "string"
},
{
"Name": "OperationName",
"Type": "string"
},
{
"Name": "AuthenticationType",
"Type": "string"
},
{
"Name": "StatusCode",
"Type": "string"
},
{
"Name": "StatusText",
"Type": "string"
},
{
"Name": "DurationMs",
"Type": "real"
},
{
"Name": "ServerLatencyMs",
"Type": "real"
},
{
"Name": "Uri",
"Type": "string"
},
{
"Name": "CallerIpAddress",
"Type": "string"
},
{
"Name": "CorrelationId",
"Type": "string"
},
{
"Name": "SchemaVersion",
"Type": "string"
},
{
"Name": "OperationVersion",
"Type": "string"
},
{
"Name": "AuthenticationHash",
"Type": "string"
},
{
"Name": "RequesterObjectId",
"Type": "string"
},
{
"Name": "RequesterTenantId",
"Type": "string"
},
{
"Name": "RequesterAppId",
"Type": "string"
},
{
"Name": "RequesterAudience",
"Type": "string"
},
{
"Name": "RequesterTokenIssuer",
"Type": "string"
},
{
"Name": "RequesterUpn",
"Type": "string"
},
{
"Name": "AuthorizationDetails",
"Type": "dynamic"
},
{
"Name": "UserAgentHeader",
"Type": "string"
},
{
"Name": "ReferrerHeader",
"Type": "string"
},
{
"Name": "ClientRequestId",
"Type": "string"
},
{
"Name": "Etag",
"Type": "string"
},
{
"Name": "ServiceType",
"Type": "string"
},
{
"Name": "OperationCount",
"Type": "int"
},
{
"Name": "RequestHeaderSize",
"Type": "long"
},
{
"Name": "RequestBodySize",
"Type": "long"
},
{
"Name": "ResponseHeaderSize",
"Type": "long"
},
{
"Name": "ResponseBodySize",
"Type": "long"
},
{
"Name": "RequestMd5",
"Type": "string"
},
{
"Name": "ResponseMd5",
"Type": "string"
},
{
"Name": "LastModifiedTime",
"Type": "datetime"
},
{
"Name": "ConditionsUsed",
"Type": "string"
},
{
"Name": "ContentLengthHeader",
"Type": "long"
},
{
"Name": "Category",
"Type": "string"
},
{
"Name": "TlsVersion",
"Type": "string"
},
{
"Name": "Type",
"Type": "string"
},
{
"Name": "_ResourceId",
"Type": "string"
}
]
}

Просмотреть файл

@ -0,0 +1,229 @@
{
"Name": "StorageFileLogs",
"Properties": [
{
"Name": "TenantId",
"Type": "string"
},
{
"Name": "TimeGenerated",
"Type": "datetime"
},
{
"Name": "AccountName",
"Type": "string"
},
{
"Name": "Location",
"Type": "string"
},
{
"Name": "Protocol",
"Type": "string"
},
{
"Name": "OperationName",
"Type": "string"
},
{
"Name": "AuthenticationType",
"Type": "string"
},
{
"Name": "StatusCode",
"Type": "string"
},
{
"Name": "StatusText",
"Type": "string"
},
{
"Name": "DurationMs",
"Type": "real"
},
{
"Name": "ServerLatencyMs",
"Type": "real"
},
{
"Name": "Uri",
"Type": "string"
},
{
"Name": "CallerIpAddress",
"Type": "string"
},
{
"Name": "CorrelationId",
"Type": "string"
},
{
"Name": "SchemaVersion",
"Type": "string"
},
{
"Name": "OperationVersion",
"Type": "string"
},
{
"Name": "AuthenticationHash",
"Type": "string"
},
{
"Name": "RequesterObjectId",
"Type": "string"
},
{
"Name": "RequesterTenantId",
"Type": "string"
},
{
"Name": "RequesterAppId",
"Type": "string"
},
{
"Name": "RequesterAudience",
"Type": "string"
},
{
"Name": "RequesterTokenIssuer",
"Type": "string"
},
{
"Name": "RequesterUpn",
"Type": "string"
},
{
"Name": "RequesterUserName",
"Type": "string"
},
{
"Name": "AuthorizationDetails",
"Type": "dynamic"
},
{
"Name": "SmbPrimarySID",
"Type": "string"
},
{
"Name": "UserAgentHeader",
"Type": "string"
},
{
"Name": "ReferrerHeader",
"Type": "string"
},
{
"Name": "ClientRequestId",
"Type": "string"
},
{
"Name": "Etag",
"Type": "string"
},
{
"Name": "ServiceType",
"Type": "string"
},
{
"Name": "OperationCount",
"Type": "int"
},
{
"Name": "RequestHeaderSize",
"Type": "long"
},
{
"Name": "RequestBodySize",
"Type": "long"
},
{
"Name": "ResponseHeaderSize",
"Type": "long"
},
{
"Name": "ResponseBodySize",
"Type": "long"
},
{
"Name": "RequestMd5",
"Type": "string"
},
{
"Name": "ResponseMd5",
"Type": "string"
},
{
"Name": "LastModifiedTime",
"Type": "datetime"
},
{
"Name": "ConditionsUsed",
"Type": "string"
},
{
"Name": "ContentLengthHeader",
"Type": "long"
},
{
"Name": "Category",
"Type": "string"
},
{
"Name": "TlsVersion",
"Type": "string"
},
{
"Name": "SmbTreeConnectID",
"Type": "string"
},
{
"Name": "SmbPersistentHandleID",
"Type": "string"
},
{
"Name": "SmbVolatileHandleID",
"Type": "string"
},
{
"Name": "SmbMessageID",
"Type": "string"
},
{
"Name": "SmbCreditsConsumed",
"Type": "int"
},
{
"Name": "SmbCommandDetail",
"Type": "string"
},
{
"Name": "SmbFileId",
"Type": "string"
},
{
"Name": "SmbSessionID",
"Type": "string"
},
{
"Name": "SmbCommandMajor",
"Type": "int"
},
{
"Name": "SmbCommandMinor",
"Type": "string"
},
{
"Name": "SourceSystem",
"Type": "string"
},
{
"Name": "Type",
"Type": "string"
},
{
"Name": "_ResourceId",
"Type": "string"
}
]
}

Просмотреть файл

@ -0,0 +1,61 @@
{
"Name": "http_proxy_oab_CL",
"Properties": [
{
"Name": "TenantId",
"Type": "String"
},
{
"Name": "SourceSystem",
"Type": "String"
},
{
"Name": "MG",
"Type": "String"
},
{
"Name": "ManagementGroupName",
"Type": "String"
},
{
"Name": "TimeGenerated",
"Type": "DateTime"
},
{
"Name": "Computer",
"Type": "String"
},
{
"Name": "RawData",
"Type": "String"
},
{
"Name": "_Fields_Timestamp_t",
"Type": "DateTime"
},
{
"Name": "RecordType_s",
"Type": "String"
},
{
"Name": "ContextId_g",
"Type": "String"
},
{
"Name": "OabGuid_g",
"Type": "String"
},
{
"Name": "Message",
"Type": "String"
},
{
"Name": "Type",
"Type": "String"
},
{
"Name": "_ResourceId",
"Type": "String"
}
]
}

Просмотреть файл

@ -4,6 +4,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using Xunit;
using YamlDotNet.Serialization;
@ -37,8 +38,31 @@ namespace Kqlvalidations.Tests
{
return;
}
var lines = Regex.Split(queryStr, @"\n\r?");
var validationRes = _queryValidator.ValidateSyntax(queryStr);
Assert.True(validationRes.IsValid, validationRes.IsValid ? string.Empty : $"Template Id:{id} is not valid Errors:{validationRes.Diagnostics.Select(d => d.ToString()).ToList().Aggregate((s1, s2) => s1 + "," + s2)}");
var firstErrorLocation = (Line: 0, Col: 0);
if (!validationRes.IsValid)
{
firstErrorLocation = GetLocationInQuery(queryStr, validationRes.Diagnostics.First(d => d.Severity == "Error").Start);
}
Assert.True(validationRes.IsValid, validationRes.IsValid ? string.Empty : $"Template Id:{id} is not valid in Line:{firstErrorLocation.Line} col:{firstErrorLocation.Col} Errors:{validationRes.Diagnostics.Select(d => d.ToString()).ToList().Aggregate((s1, s2) => s1 + "," + s2)}");
}
private (int Line, int Col) GetLocationInQuery(string queryStr, int pos)
{
var lines = Regex.Split(queryStr, "\n");
var curlineIndex = 0;
var curPos = 0;
while (lines.Length > curlineIndex && pos > curPos + lines[curlineIndex].Length + 1)
{
curPos += lines[curlineIndex].Length + 1;
curlineIndex++;
}
var col = (pos - curPos + 1);
return (curlineIndex + 1, col);
}
}

Просмотреть файл

@ -12,7 +12,7 @@
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="YamlDotNet" Version="6.0.0" />
<PackageReference Include="Microsoft.Azure.Sentinel.KustoServices" Version="1.0.6" />
<PackageReference Include="Microsoft.Azure.Sentinel.KustoServices" Version="1.0.12" />
</ItemGroup>
</Project>

Двоичный файл не отображается.

Двоичный файл не отображается.

Просмотреть файл

@ -27,6 +27,11 @@ describe("dataConnectorValidator", () => {
await checkValid(".script/tests/dataConnectorValidatorTest/testFiles/missingIdAndConnectivityCriteria.json");
});
// Skipping json files if exists in Template folder.
it("should skip .json file if exist under Templates folder", async () => {
await checkValid(".script/tests/dataConnectorValidatorTest/testFiles/Templates/Connector_REST_API_template.json");
});
it("should throw an exception when dataConnectorSchema.json is missing a required property", async () => {
await checkInvalid(".script/tests/dataConnectorValidatorTest/testFiles/missingPublisherProperty.json", "SchemaError");
});

Просмотреть файл

@ -0,0 +1,93 @@
{
"id": "ProviderNameApplianceName",
"title": "PROVIDER NAME APPLIANCE NAME",
"publisher": "PROVIDER NAME",
"descriptionMarkdown": "Use this template if you have a REST API connection to push data into Azure Sentinel Log Analytics.",
"graphQueries": [
{
"metricName": "Total data received",
"legend": "DATATYPE_NAME",
"baseQuery": "DATATYPE_NAME"
}
],
"sampleQueries": [
{
"description" : "One-line title for your sample query 1",
"query": "Kusto Query 1"
}
],
"dataTypes": [
{
"name": "DATATYPE_NAME",
"lastDataReceivedQuery": "DATATYPE_NAME\n | summarize Time = max(TimeGenerated)\n | where isnotempty(Time)"
}
],
"connectivityCriterias": [
{
"type": "IsConnectedQuery",
"value": [
"DATATYPE_NAME\n | summarize LastLogReceived = max(TimeGenerated)\n | project IsConnected = LastLogReceived > ago(30d)"
]
}
],
"availability": {
"status": 1,
"isPreview": true
},
"permissions": {
"resourceProvider": [
{
"provider": "Microsoft.OperationalInsights/workspaces",
"permissionsDisplayText": "read and write permissions are required.",
"providerDisplayName": "Workspace",
"scope": "Workspace",
"requiredPermissions": {
"write": true,
"read": true,
"delete": true
}
},
{
"provider": "Microsoft.OperationalInsights/workspaces/sharedKeys",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)",
"providerDisplayName": "Keys",
"scope": "Workspace",
"requiredPermissions": {
"action": true
}
}
],
"customs": [
{
"name": "Include custom pre-requisites if the connectivity requires - else delete customs",
"description": "Description for any custom pre-requisite"
}
]
},
"instructionSteps": [
{
"title": "",
"description": "1. How to get access to the data connector\n 2. If you have documentation to connect on your side link to that\n 3. Else, provide step by step instructions to discover the connection in your product\n",
"instructions": [
{
"parameters": {
"fillWith": [
"WorkspaceId"
],
"label": "Workspace ID"
},
"type": "CopyableLabel"
},
{
"parameters": {
"fillWith": [
"PrimaryKey"
],
"label": "Primary Key"
},
"type": "CopyableLabel"
}
]
}
]
}

Просмотреть файл

@ -67,6 +67,33 @@ namespace Kqlvalidations.Tests
var isValid = connectorIds.Count() == 0;
Assert.True(isValid, isValid ? string.Empty : $"Template Id:'{id}' doesn't have valid connectorIds:'{string.Join(",", connectorIds)}'. If a new connector is used and already configured in the Portal, please add it's Id to the list in 'ValidConnectorIds.json' file.");
}
[Fact]
public void Validate_DetectionTemplates_AllFilesAreYamls()
{
string detectionPath = DetectionsYamlFilesTestData.GetDetectionPath();
var yamlFiles = Directory.GetFiles(detectionPath, "*.yaml", SearchOption.AllDirectories).ToList();
var AllFiles = Directory.GetFiles(detectionPath,"*", SearchOption.AllDirectories).ToList();
var numberOfNotYamlFiles = 1; //This is the readme.md file in the directory
Assert.True(AllFiles.Count == yamlFiles.Count + numberOfNotYamlFiles, "All the files in detections folder are supposed to end with .yaml");
}
[Fact]
public void Validate_DetectionTemplates_NoSameTemplateIdTwice()
{
string detectionPath = DetectionsYamlFilesTestData.GetDetectionPath();
var yamlFiles = Directory.GetFiles(detectionPath, "*.yaml", SearchOption.AllDirectories);
var templatesAsStrings = yamlFiles.Select(yaml => GetYamlFileAsString(Path.GetFileName(yaml)));
var templatesAsObjects = templatesAsStrings.Select(yaml => JObject.Parse(ConvertYamlToJson(yaml)));
var duplicationsById = templatesAsObjects.GroupBy(a => a["id"]).Where(group => group.Count() > 1); //Finds duplications -> ids that there are more than 1 template from
var duplicatedId = "";
if (duplicationsById.Count() > 0){
duplicatedId = duplicationsById.Last().Select(x => x["id"]).First().ToString();
}
Assert.True(duplicationsById.Count() == 0, $"There should not be 2 templates with the same ID, but the id {duplicatedId} is duplicated.");
}
private string GetYamlFileAsString(string detectionsYamlFileName)
{

Просмотреть файл

@ -0,0 +1,37 @@
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Text.RegularExpressions;
namespace Microsoft.Azure.Sentinel.Analytics.Management.AnalyticsManagement.Contracts.Model.ARM.ModelValidation
{
public class DictionaryKeyMatchesRegexAttribute : ValidationAttribute
{
private readonly Regex _keyRegex;
public DictionaryKeyMatchesRegexAttribute(string regexToMatch)
{
_keyRegex = new Regex(regexToMatch);
}
protected override ValidationResult IsValid(object value, ValidationContext validationContext)
{
if (value == null)
{
return ValidationResult.Success;
}
var dictionaryValue = (Dictionary<string, string>)value;
var fieldName = validationContext.MemberName;
foreach (string key in dictionaryValue.Keys)
{
if (!_keyRegex.IsMatch(key))
{
return new ValidationResult($"The key '{key}' in {fieldName} is invalid. The key must start with a letter and contain only alphanumeric English characters");
}
}
return ValidationResult.Success;
}
}
}

Просмотреть файл

@ -0,0 +1,28 @@
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
namespace Microsoft.Azure.Sentinel.Analytics.Management.AnalyticsManagement.Contracts.Model.ARM.ModelValidation
{
public class DictionaryLengthAttribute : ValidationAttribute
{
private readonly int _maxLength;
public DictionaryLengthAttribute(int maxLength)
{
_maxLength = maxLength;
}
protected override ValidationResult IsValid(object value, ValidationContext validationContext)
{
if (value == null)
{
return ValidationResult.Success;
}
var dictionaryValue = (Dictionary<string, string>)value;
var fieldName = validationContext.MemberName;
return dictionaryValue.Count <= _maxLength ? ValidationResult.Success : new ValidationResult($"Maximum length of {fieldName} exceeded. {fieldName} length should be less than or equal to {_maxLength}");
}
}
}

Просмотреть файл

@ -0,0 +1,42 @@
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
namespace Microsoft.Azure.Sentinel.Analytics.Management.AnalyticsManagement.Contracts.Model.ARM.ModelValidation
{
public class DictionaryMaxKeyAndValueLengthsAttribute : ValidationAttribute
{
private readonly int _maxKeyLength;
private readonly int _maxValueLength;
public DictionaryMaxKeyAndValueLengthsAttribute(int maxKeyLength, int maxValueLength)
{
_maxKeyLength = maxKeyLength;
_maxValueLength = maxValueLength;
}
protected override ValidationResult IsValid(object value, ValidationContext validationContext)
{
if (value == null)
{
return ValidationResult.Success;
}
var dictionaryValue = (Dictionary<string, string>)value;
var fieldName = validationContext.MemberName;
foreach (KeyValuePair<string, string> keyValuePair in dictionaryValue)
{
if (keyValuePair.Key.Length > _maxKeyLength)
{
return new ValidationResult($"Maximum length of key '{keyValuePair.Key}' in {fieldName} exceeded. Max key length should be less than or equal to {_maxKeyLength}");
}
else if (keyValuePair.Value.Length > _maxValueLength)
{
return new ValidationResult($"Maximum length of value '{keyValuePair.Value}' in {fieldName} exceeded. Max value length should be less than or equal to {_maxValueLength}");
}
}
return ValidationResult.Success;
}
}
}

Просмотреть файл

@ -0,0 +1,37 @@
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Text.RegularExpressions;
namespace Microsoft.Azure.Sentinel.Analytics.Management.AnalyticsManagement.Contracts.Model.ARM.ModelValidation
{
public class DictionaryValueMatchesRegexAttribute : ValidationAttribute
{
private readonly Regex _valueRegex;
public DictionaryValueMatchesRegexAttribute(string regexToMatch)
{
_valueRegex = new Regex(regexToMatch);
}
protected override ValidationResult IsValid(object value, ValidationContext validationContext)
{
if (value == null)
{
return ValidationResult.Success;
}
var dictionaryValue = (Dictionary<string, string>)value;
var fieldName = validationContext.MemberName;
foreach (string entryValue in dictionaryValue.Values)
{
if (!_valueRegex.IsMatch(entryValue))
{
return new ValidationResult($"The value '{entryValue}' in {fieldName} is invalid. The value must start with a letter or underscore, and contain only alphanumeric English characters");
}
}
return ValidationResult.Success;
}
}
}

Просмотреть файл

@ -41,6 +41,13 @@ namespace Microsoft.Azure.Sentinel.Analytics.Management.AnalyticsTemplatesServic
[Range(0, 10000)]
public int TriggerThreshold { get; set; }
[JsonProperty("customDetails", Required = Required.Default, NullValueHandling = NullValueHandling.Ignore)]
[DictionaryLength(20)]
[DictionaryMaxKeyAndValueLengths(maxKeyLength: 20, maxValueLength: 500)] // 500 is the max length of a column name in LA
[DictionaryKeyMatchesRegex("^[a-zA-Z]+\\w*$")] // The custom field key must start with an English letter and contain only alphanumeric characters (i.e. [a-zA-Z0-9_])
[DictionaryValueMatchesRegex("^[a-zA-Z_]+\\w*$")] // The custom field value must start with an English letter or an underscore and contain only alphanumeric characters (i.e. [a-zA-Z0-9_])
public Dictionary<string, string> CustomDetails { get; set; }
[JsonProperty("entityMappings", Required = Required.Default, NullValueHandling = NullValueHandling.Ignore)]
[ValidEntityMappings(entityMappingsMinLength: 1, entityMappingsMaxLength: 5, fieldMappingsMinLength: 1, fieldMappingsMaxLength: 3)]
public List<EntityMapping> EntityMappings { get; set; }

Просмотреть файл

@ -1,16 +1,3 @@
[
"89e6adbd-612c-4fbe-bc3d-32f81baf3b6c",
"4d8de9e6-263e-4845-8618-cd23a4f58b70",
"5efb0cfd-063d-417a-803b-562eae5b0301",
"ac891683-53c3-4f86-86b4-c361708e2b2b",
"d564ff12-8f53-41b8-8649-44f76b37b99f",
"2790795b-7dba-483e-853f-44aa0bc9c985",
"f041e01d-840d-43da-95c8-4188f6cef546",
"5436f471-b03d-41cb-b333-65891f887c43",
"3ff0fffb-d963-40c0-b235-3404f915add7",
"bff093b2-500e-4ae5-bb49-a5b1423cbd5b",
"e4779bdc-397a-4b71-be28-59e6a1e1d16b",
"8e267e91-6bda-4b3c-bf68-9f5cbdd103a3",
"58fc0170-0877-4ea8-a9ff-d805e361cfae",
"1218175f-c534-421c-8070-5dcaabf28067"
]
]

Просмотреть файл

@ -35,6 +35,7 @@
"Citrix",
"CitrixWAF",
"CyberArk",
"CyberpionSecurityLogs",
"Darktrace",
"DDOS",
"DNS",

Просмотреть файл

@ -24,7 +24,7 @@ export async function IsValidWorkbookTemplate(filePath: string): Promise<ExitCod
}
let fileTypeSuffixes = [".json"];
let filePathFolderPrefixes = ["Workbooks"];
let filePathFolderPrefixes = ["Workbooks","Solutions"];
let fileKinds = ["Added", "Modified"];
let CheckOptions = {
onCheckFile: (filePath: string) => {

49
.vscode/launch.json поставляемый
Просмотреть файл

@ -1,49 +0,0 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Launch Program",
"type": "node",
"request": "launch",
"args": ["${relativeFile}"],
"runtimeArgs": ["--nolazy", "-r", "ts-node/register"],
"sourceMaps": true,
"cwd": "${workspaceRoot}",
"protocol": "inspector",
},
{
"type": "node",
"request": "launch",
"name": "Mocha All",
"program": "${workspaceFolder}/node_modules/mocha/bin/_mocha",
"args": [
"--timeout",
"999999",
"--colors",
"${workspaceFolder}/.script/test"
],
"runtimeArgs": ["--nolazy", "-r", "ts-node/register"],
"console": "integratedTerminal",
"internalConsoleOptions": "neverOpen"
},
{
"type": "node",
"request": "launch",
"name": "Mocha Current File",
"program": "${workspaceFolder}/node_modules/mocha/bin/_mocha",
"args": [
"--timeout",
"999999",
"--colors",
"${file}"
],
"runtimeArgs": ["--nolazy", "-r", "ts-node/register"],
"console": "integratedTerminal",
"internalConsoleOptions": "neverOpen"
}
]
}

3
.vscode/settings.json поставляемый
Просмотреть файл

@ -1,3 +0,0 @@
{
"powershell.codeFormatting.addWhitespaceAroundPipe": true
}

Просмотреть файл

@ -3,7 +3,7 @@
# the last matching pattern has the most precendence.
# Core team members
* @liemilyg @mgladi @orco365 @shalinoid @KobyKoren @lizamash @shainw @ianhelle @timbMSFT @juliango2100 @dicolanl @Amitbergman @sagamzu @YaronFruchtmann @preetikr @Yaniv-Shasha @sarah-yo @nazang @ehudk-msft @oshvartz @Liatlishams @NoamLandress @laithhisham
* @liemilyg @mgladi @orco365 @shalinoid @KobyKoren @shainw @ianhelle @timbMSFT @juliango2100 @dicolanl @Amitbergman @sagamzu @YaronFruchtmann @preetikr @Yaniv-Shasha @sarah-yo @nazang @ehudk-msft @oshvartz @Liatlishams @NoamLandress @laithhisham @petebryan
# This is copied from here: https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners

Просмотреть файл

@ -0,0 +1,114 @@
{
"id": "Darktrace",
"title": "AI Analyst Darktrace",
"publisher": "Darktrace",
"descriptionMarkdown": "The Darktrace connector lets users connect Darktrace Model Breaches in real-time with Azure Sentinel, allowing creation of custom Dashboards, Workbooks, Notebooks and Custom Alerts to improve investigation. Azure Sentinel's enhanced visibility into Darktrace logs enables monitoring and mitigation of security threats.",
"graphQueries": [
{
"metricName": "Total data received",
"legend": "Darktrace",
"baseQuery": "CommonSecurityLog\n| where DeviceVendor == \"Darktrace\" "
}
],
"sampleQueries": [
{
"description": "first 10 most recent data breaches",
"query": "CommonSecurityLog\n| where DeviceVendor == \"Darktrace\"\n| order by TimeGenerated desc \n| limit 10"
}
],
"dataTypes": [
{
"name": "CommonSecurityLog (Darktrace)",
"lastDataReceivedQuery": "CommonSecurityLog\n| where DeviceVendor == \"Darktrace\"\n| summarize Time = max(TimeGenerated)\n| where isnotempty(Time)"
}
],
"connectivityCriterias": [
{
"type": "IsConnectedQuery",
"value": [
"CommonSecurityLog\n| where DeviceVendor == \"Darktrace\"\n| summarize LastLogReceived = max(TimeGenerated)\n| project IsConnected = LastLogReceived > ago(30d)"
]
}
],
"availability": {
"status": 1,
"isPreview": true
},
"permissions": {
"resourceProvider": [
{
"provider": "Microsoft.OperationalInsights/workspaces",
"permissionsDisplayText": "read and write permissions are required.",
"providerDisplayName": "Workspace",
"scope": "Workspace",
"requiredPermissions": {
"read": true,
"write": true,
"delete": true
}
},
{
"provider": "Microsoft.OperationalInsights/workspaces/sharedKeys",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key).",
"providerDisplayName": "Keys",
"scope": "Workspace",
"requiredPermissions": {
"action": true
}
}
]
},
"instructionSteps": [
{
"title": "1. Linux Syslog agent configuration",
"description": "Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Azure Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace",
"innerSteps": [
{
"title": "1.1 Select or create a Linux machine",
"description": "Select or create a Linux machine that Azure Sentinel will use as the proxy between your security solution and Azure Sentinel this machine can be on your on-prem environment, Azure or other clouds."
},
{
"title": "1.2 Install the CEF collector on the Linux machine",
"description": "Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Azure Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine.",
"instructions": [
{
"parameters": {
"fillWith": [
"WorkspaceId",
"PrimaryKey"
],
"label": "Run the following command to install and apply the CEF collector:",
"value": "sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}"
},
"type": "CopyableLabel"
}
]
}
]
},
{
"title": "2. Forward Common Event Format (CEF) logs to Syslog agent",
"description": "Configure Darktrace to forward Syslog messages in CEF format to your Azure workspace via the Syslog agent. \n\n 1) Within the Darktrace Threat Visualizer, navigate to the System Config page in the main menu under Admin. \n\n 2) From the left-hand menu, select Modules and choose Azure Sentinel from the available Workflow Integrations.\\n 3) A configuration window will open. Locate Azure Sentinel Syslog CEF and click New to reveal the configuration settings, unless already exposed. \n\n 4) In the Server configuration field, enter the location of the log forwarder and optionally modify the communication port. Ensure that the port selected is set to 514 and is allowed by any intermediary firewalls. \n\n 5) Configure any alert thresholds, time offsets or additional settings as required. \n\n 6) Review any additional configuration options you may wish to enable that alter the Syslog syntax.\n\n 7) Enable Send Alerts and save your changes."
},
{
"title": "3. Validate connection",
"description": "Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine",
"instructions": [
{
"parameters": {
"fillWith": [
"WorkspaceId"
],
"label": "Run the following command to validate your connectivity:",
"value": "sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}"
},
"type": "CopyableLabel"
}
]
},
{
"title": "4. Secure your machine ",
"description": "Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)"
}
]
}

Просмотреть файл

@ -122,5 +122,21 @@
"title": "4. Secure your machine ",
"description": "Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)"
}
]
],
"metadata": {
"id": "2de7b355-5f0b-4eb1-a264-629314ef86e5",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Vectra AI"
},
"support": {
"name": "Vectra AI",
"link": "https://www.vectra.ai/support",
"tier": "developer"
}
}
}

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 78 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 34 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 52 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 56 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 37 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 70 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 83 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 139 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 116 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 134 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 76 KiB

Просмотреть файл

@ -0,0 +1,136 @@
# AWS Lambda Function to import CloudTrail Logs to Azure Sentinel
Author: Sreedhar Ande
This Lambda function is designed to ingest AWS CloudTrail Events/S3 Events and send them to Azure Log Analytics workspace using the Log Analytics API.
AWS CloudTrail logs are audit type events from all/any AWS resources in a tenancy. Each AWS resource has a unique set of Request and Response Parameters. Azure Log Analytics has a column per table limit of 500, (plus some system columns) the aggregate of AWS parameter fields will exceed this quickly leading to potential loss of event records
Code does the following things with the logs it processes.
1. Takes the core fields of the record. i.e. all fields except for the Request and Response associated fields and puts them in a LogAnalyticsTableName_ALL. Providing a single table with all records with core event information.
2. Looks at each event and puts it into a table with an extension <AWSREsourceType> i.e. LogAnalyticsTableName_S3
3. Exception to 2 above is for EC2 events, the volume of fields for EC2 Request and Response parameters exceeds 500 columns. EC2 data is split into 3 tables, Header, Request & Response.
Ex: LogAnalyticsTableName_EC2_Header
4. In future if other AWS datatypes exceed 500 columns a similar split may be required for them as well.
**Credits**
This Data connector uses a PowerShell logic authored by [Chris Abberley](https://github.com/cabberley)
**Note**
To avoid additional billing and duplication: **CloudTrail Logs only**
1. You can turn off LogAnalyticsTableName_ALL using additional Environment Variable **CoreFieldsAllTable** to **true/false**
2. You can turn off LogAnalyticsTableName_AWSREsourceType using additional Environment Variable **SplitAWSResourceTypeTables** to **true/false**
**Either CoreFieldsAllTable or SplitAWSResourceTypeTables must be true or both can be true**
## **Function Flow process**
# **SNS Lambda Trigger:**
**CloudTrail/CloudWatch/GuardDuty/SecurityHub Logs --> AWS S3 --> AWS SNS Topic --> AWS Lambda --> Azure Log Analytics**
![Picture9](./Graphics/Picture9.png)
# **SQS Lambda Trigger:**
**CloudTrail/CloudWatch/GuardDuty/SecurityHub Logs --> AWS S3 --> AWS SQS --> AWS Lambda --> Azure Log Analytics**
![Picture9](./Graphics/Picture11.png)
**Note**
Data parsing is applicable only to CloudTrail Logs. CloudWatch/GuardDuty/SecurityHub Logs will be ingested to **CoreFieldsAllTable**
## Installation / Setup Guide
## **Pre-requisites**
This function requires AWS Secrets Manager to store Azure Log Analytics WorkspaceId and WorkspaceKey
![Picture10](./Graphics/Picture10.png)
### **Option 1**
### Machine Setup
To deploy this, you will need a machine prepared with the following:
- PowerShell Core – I recommend PowerShell 7 [found here](https://github.com/PowerShell/PowerShell/releases)
- .Net Core 3.1 SDK [found here](https://dotnet.microsoft.com/download)
- AWSLambdaPSCore module – You can install this either from the [PowerShell Gallery](https://www.powershellgallery.com/packages?q=AWSLambdaPSCore), or you can install it by using the following PowerShell Core shell command:
```powershell
Install-Module AWSLambdaPSCore -Scope CurrentUser
```
See the documentation here https://docs.aws.amazon.com/lambda/latest/dg/powershell-devenv.html
I recommend you review https://docs.aws.amazon.com/lambda/latest/dg/powershell-package.html to review the cmdlets that are part of AWSLambdaPSCore.
Note: If the environment uses a proxy, you may need to add the following to VSCode profile
```powershell
Added to VS Code profile:
$webclient=New-Object System.Net.WebClient
$webclient.Proxy.Credentials = [System.Net.CredentialCache]::DefaultNetworkCredentials
```
### Create the Lambda Function
1. ```Get-AWSPowerShellLambdaTemplate```
2. ```New-AWSPowerShellLambda -ScriptName IngestCloudTrailEventsToSentinel -Template S3EventToSNS```
OR
```New-AWSPowerShellLambda -ScriptName IngestCloudTrailEventsToSentinel -Template S3EventToSQS```
3. Copy the PowerShell code from https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AWS-CloudTrail-Ingestion-Lambda/SNS-Lambda-Trigge/IngestCloudTrailEventsToSentinel.ps1
5. Paste in your code file created from Step #2
6. Go to script file folder
7. ```Publish-AWSPowerShellLambda -Name YourLambdaNameHere -ScriptPath <path>/IngestCloudTrailEventsToSentinel.ps1 -Region <region> -IAMRoleArn <arn of role created earlier> -ProfileName <profile>```
Ex: ```Publish-AWSPowerShellLambda -ScriptPath .\IngestCloudTrailEventsToSentinel.ps1 -Name IngestCloudTrailEventsToSentinel -Region us-east-2```
You might need –ProfileName if your configuration of .aws/credentials file doesn't contain a default. See this [document](https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/setup-credentials.html) for information on setting up your AWS credentials.
### **Option 2**
1. Create a new AWS Lambda and select "Author from scratch"
2. Give Function Name and select Runtime ".NET Core 2.1 (C#/PowerShell)" and click Create function
3. After successful creation, now you can change its code and configuration
4. Under Function code, click on Actions --> Upload a .zip file (/aws-data-connector-az-sentinel/blob/main/IngestCloudTrailEventsToSentinel.zip)
5. Follow the steps in "### Lambda Configuration" from step 2
### **Note: Either you choose Option 1/Option 2, the following configuration steps are mandatory.**
### **Lambda Configuration**
1. Once created, login to the AWS console. In Find services, search for Lambda. Click on Lambda.
![Picture1](./Graphics/Picture1.png)
2. Click on the lambda function name you used with the cmdlet. Click Environment Variables and add the following
```
SecretName
LogAnalyticsTableName
CoreFieldsAllTable --> Boolean
SplitAWSResourceTypeTables --> Boolean
```
![Picture4](./Graphics/Picture4.png)
3. Click on the lambda function name you used with the cmdlet.Click Add Trigger
![Picture2](./Graphics/Picture2.png)
4. Select SNS. Select the SNS Name. Click Add.
![Picture3](./Graphics/Picture3.png)
5. Create AWS Role : The Lambda function will need an execution role defined that grants access to the S3 bucket and CloudWatch logs. To create an execution role:
1. Open the [roles](https://console.aws.amazon.com/iam/home#/roles) page in the IAM console.
2. Choose Create role.
3. Create a role with the following properties.
- Trusted entity – AWS Lambda.
- Role name – AWSSNStoAzureSentinel.
- Permissions – AWSLambdaBasicExecutionRole & AmazonS3ReadOnlyAccess & secretsmanager:GetSecretValue & kms:Decrypt - required only if you use a customer-managed AWS KMS key to encrypt the secret. You do not need this permission to use the account's default AWS managed CMK for Secrets Manager
The AWSLambdaExecute policy has the permissions that the function needs to manage objects in Amazon S3 and write logs to CloudWatch Logs. Copy the arn of the role created as you will need it for the next step.
6. Your lambda function is ready to send data to Log Analytics.
### **Test the function**
1. To test your function, Perform some actions like Start EC2, Stop EC2, Login into EC2, etc.,.
2. To see the logs, go the Lambda function. Click Monitoring tab. Click view logs in CloudWatch.
![Pciture5](./Graphics/Picture5.png)
3. In CloudWatch, you will see each log stream from the runs. Select the latest.
![Picture6](./Graphics/Picture6.png)
4. Here you can see anything from the script from the Write-Host cmdlet.
![Picture7](./Graphics/Picture7.png)
5. Go to portal.azure.com and verify your data is in the custom log.
![Picture8](./Graphics/Picture8.png)

Просмотреть файл

@ -0,0 +1,347 @@
# PowerShell script file to be executed as a AWS Lambda function.
#
# When executing in Lambda the following variables will be predefined.
# $LambdaInput - A PSObject that contains the Lambda function input data.
# $LambdaContext - An Amazon.Lambda.Core.ILambdaContext object that contains information about the currently running Lambda environment.
#
# The last item in the PowerShell pipeline will be returned as the result of the Lambda function.
#
# To include PowerShell modules with your Lambda function, like the AWS.Tools.S3 module, add a "#Requires" statement
# indicating the module and version. If using an AWS.Tools.* module the AWS.Tools.Common module is also required.
#
# The following link contains documentation describing the structure of the S3 event object.
# https://docs.aws.amazon.com/AmazonS3/latest/dev/notification-content-structure.html
#
# This example demonstrates how to process an S3 Event that follows the process:
# S3 Event -> SNS Topic -> Lambda Function
#Requires -Modules @{ModuleName='AWS.Tools.Common';ModuleVersion='4.1.5.0'}
#Requires -Modules @{ModuleName='AWS.Tools.S3';ModuleVersion='4.1.5.0'}
#Requires -Modules @{ModuleName='AWS.Tools.SecretsManager';ModuleVersion='4.1.5.0'}
# Uncomment to send the input event to CloudWatch Logs
#Write-Host (ConvertTo-Json -InputObject $LambdaInput -Compress -Depth 5)
#$PSVersionTable
# Get the current universal time in the default string format.
$currentUTCtime = (Get-Date).ToUniversalTime()
# Code to retrieve credentials from AWS Secrets Manager
$secretName = $env:SecretName
$secretValue = ConvertFrom-Json (Get-SECSecretValue -SecretId $secretName -ErrorAction Stop -Verbose).SecretString -ErrorAction Stop
$workspaceId = $secretValue.LAWID
$workspaceKey = $secretValue.LAWKEY
$LATableName = $env:LogAnalyticsTableName
$IsCoreFieldsAllTable = $env:CoreFieldsAllTable
$IsSplitAWSResourceTypes = $env:SplitAWSResourceTypeTables
$ResourceID = ''
#The $eventobjectlist is the Json Parameter field names that form the core of the Json message that we want in the ALL Table in Log Ananlytics
$eventobjectlist = @('eventTime', 'eventVersion', 'userIdentity', 'eventSource', 'eventName', 'awsRegion', 'sourceIPAddress', 'userAgent', 'errorCode', 'errorMessage', 'requestID', 'eventID', 'eventType', 'apiVersion', 'managementEvent', 'readOnly', 'resources', 'recipientAccountId', 'serviceEventDetails', 'sharedEventID', 'vpcEndpointId', 'eventCategory', 'additionalEventData')
Function Expand-GZipFile {
Param(
$infile,
$outfile
)
Write-Host "Processing Expand-GZipFile for: infile = $infile, outfile = $outfile"
$inputfile = New-Object System.IO.FileStream $infile, ([IO.FileMode]::Open), ([IO.FileAccess]::Read), ([IO.FileShare]::Read)
$output = New-Object System.IO.FileStream $outfile, ([IO.FileMode]::Create), ([IO.FileAccess]::Write), ([IO.FileShare]::None)
$gzipStream = New-Object System.IO.Compression.GzipStream $inputfile, ([IO.Compression.CompressionMode]::Decompress)
$buffer = New-Object byte[](1024)
while ($true) {
$read = $gzipstream.Read($buffer, 0, 1024)
if ($read -le 0) { break }
$output.Write($buffer, 0, $read)
}
$gzipStream.Close()
$output.Close()
$inputfile.Close()
}
#function to create HTTP Header signature required to authenticate post
Function New-BuildSignature {
param(
$customerId,
$sharedKey,
$date,
$contentLength,
$method,
$contentType,
$resource )
$xHeaders = "x-ms-date:" + $date
$stringToHash = $method + "`n" + $contentLength + "`n" + $contentType + "`n" + $xHeaders + "`n" + $resource
$bytesToHash = [Text.Encoding]::UTF8.GetBytes($stringToHash)
$keyBytes = [Convert]::FromBase64String($sharedKey)
$sha256 = New-Object System.Security.Cryptography.HMACSHA256
$sha256.Key = $keyBytes
$calculatedHash = $sha256.ComputeHash($bytesToHash)
$encodedHash = [Convert]::ToBase64String($calculatedHash)
$authorization = 'SharedKey {0}:{1}' -f $customerId, $encodedHash
return $authorization
}
# Function to create and post the request
Function Invoke-LogAnalyticsData {
Param(
$CustomerId,
$SharedKey,
$Body,
$LogTable,
$TimeStampField,
$resourceId)
$method = "POST"
$contentType = "application/json"
$resource = "/api/logs"
$rfc1123date = [DateTime]::UtcNow.ToString("r")
$contentLength = $Body.Length
$signature = New-BuildSignature `
-customerId $CustomerId `
-sharedKey $SharedKey `
-date $rfc1123date `
-contentLength $contentLength `
-method $method `
-contentType $contentType `
-resource $resource
$uri = "https://" + $CustomerId + ".ods.opinsights.azure.com" + $resource + "?api-version=2016-04-01"
$headers1 = @{
"Authorization" = $signature;
"Log-Type" = $LogTable;
"x-ms-date" = $rfc1123date;
"x-ms-AzureResourceId" = $resourceId;
"time-generated-field" = $TimeStampField;
}
$status = $false
do {
$response = Invoke-WebRequest -Uri $uri -Method $method -ContentType $contentType -Headers $headers1 -Body $Body
#If requests are being made at a rate higher than this, then these requests will receive HTTP status code 429 (Too Many Requests) along with the Retry-After:
#<delta-seconds> header which indicates the number of seconds until requests to this application are likely to be accepted.If requests are being made at a rate higher than this,
#then these requests will receive HTTP status code 429 (Too Many Requests) along with the Retry-After: <delta-seconds> header which indicates the number of seconds until requests to this application are likely to be accepted.
If ($reponse.StatusCode -eq 429) {
$rand = get-random -minimum 10 -Maximum 80
start-sleep -seconds $rand
}
else { $status = $true }
}until($status)
Remove-variable -name Body
return $response.StatusCode
}
Function Ingest-Core-Fields-Single-Table {
Param(
$coreEvents)
$coreJson = convertto-json $coreEvents -depth 5 -Compress
$Table = "$LATableName" + "_All"
IF (($corejson.Length) -gt 28MB) {
Write-Host "Log length is greater than 28 MB, splitting and sending to Log Analytics"
$bits = [math]::Round(($corejson.length) / 20MB) + 1
$TotalRecords = $coreEvents.Count
$RecSetSize = [math]::Round($TotalRecords / $bits) + 1
$start = 0
For ($x = 0; $x -lt $bits; $x++) {
IF ( ($start + $recsetsize) -gt $TotalRecords) {
$finish = $totalRecords
}
ELSE {
$finish = $start + $RecSetSize
}
$body = Convertto-Json ($coreEvents[$start..$finish]) -Depth 5 -Compress
$result = Invoke-LogAnalyticsData -CustomerId $workspaceId -SharedKey $workspaceKey -Body $body -LogTable $Table -TimeStampField 'eventTime' -ResourceId $ResourceID
if ($result -eq 200)
{
Write-Host "CloudTrail Logs successfully ingested to LogAnalytics Workspace under Custom Logs --> Table: $Table"
}
$start = $finish + 1
}
$null = Remove-variable -name body
}
Else {
$result = Invoke-LogAnalyticsData -CustomerId $workspaceId -SharedKey $workspaceKey -Body $coreJson -LogTable $Table -TimeStampField 'eventTime' -ResourceId $ResourceID
if ($result -eq 200)
{
Write-Host "CloudTrail Logs successfully ingested to LogAnalytics Workspace under Custom Logs --> Table: $Table"
}
}
$null = remove-variable -name coreEvents
$null = remove-variable -name coreJson
}
Function Ingest-AWS-ResourceType-Multi-Tables {
Param(
$eventSources,
$groupEvents)
$RecCount = 0
foreach ($d in $eventSources) {
#$events = $groupevents[$d]
$eventsJson = ConvertTo-Json $groupEvents[$d] -depth 5 -Compress
$Table = $LATableName + '_' + $d
$TotalRecords = $groupEvents[$d].Count
$recCount += $TotalRecords
IF (($eventsjson.Length) -gt 28MB) {
#$events = Convertfrom-json $corejson
$bits = [math]::Round(($eventsjson.length) / 20MB) + 1
$TotalRecords = $groupEvents[$d].Count
$RecSetSize = [math]::Round($TotalRecords / $bits) + 1
$start = 0
For ($x = 0; $x -lt $bits; $x++) {
IF ( ($start + $recsetsize) -gt $TotalRecords) {
$finish = $totalRecords
}
ELSE {
$finish = $start + $RecSetSize
}
$body = Convertto-Json ($groupEvents[$d][$start..$finish]) -Depth 5 -Compress
$result = Invoke-LogAnalyticsData -CustomerId $workspaceId -SharedKey $workspaceKey -Body $body -LogTable $Table -TimeStampField 'eventTime' -ResourceId $ResourceID
if ($result -eq 200)
{
Write-Host "CloudTrail Logs successfully ingested to LogAnalytics Workspace under Custom Logs --> Table: $Table"
}
$start = $finish + 1
}
$null = Remove-variable -name body
}
Else {
#$logEvents = Convertto-Json $events -depth 20 -compress
$result = Invoke-LogAnalyticsData -CustomerId $workspaceId -SharedKey $workspaceKey -Body $eventsJson -LogTable $Table -TimeStampField 'eventTime' -ResourceId $ResourceID
if ($result -eq 200)
{
Write-Host "CloudTrail Logs successfully ingested to LogAnalytics Workspace under Custom Logs --> Table: $Table"
}
}
}
}
foreach ($snsRecord in $LambdaInput.Records)
{
$snsMessage = ConvertFrom-Json -InputObject $snsRecord.Sns.Message
foreach ($s3Event in $snsMessage.Records)
{
$s3BucketName = $s3Event.s3.bucket.name
$s3BucketKey = $s3Event.s3.object.key
Write-Host "Processing event for: bucket = $s3BucketName, key = $s3BucketKey"
IF ($Null -ne $s3BucketName -and $Null -ne $s3BucketKey) {
$s3KeyPath = $s3BucketKey -Replace ('%3A', ':')
$fileNameSplit = $s3KeyPath.split('/')
$fileSplits = $fileNameSplit.Length - 1
$fileName = $filenameSplit[$fileSplits].replace(':', '_')
$downloadedFile = Read-S3Object -BucketName $s3BucketName -Key $s3BucketKey -File "/tmp/$filename"
Write-Host "Object $s3BucketKey is $($downloadedFile.Size) bytes; Extension is $($downloadedFile.Extension)"
IF ($downloadedFile.Extension -eq '.gz' ) {
$infile = "/tmp/$filename"
$outfile = "/tmp/" + $filename -replace ($downloadedFile.Extension, '')
Expand-GZipFile $infile.Trim() $outfile.Trim()
$null = Remove-Item -Path $infile -Force -Recurse -ErrorAction Ignore
$filename = $filename -replace ($downloadedFile.Extension, '')
$filename = $filename.Trim()
$logEvents = Get-Content -Raw -LiteralPath ("/tmp/$filename" )
$logEvents = $LogEvents.Substring(0, ($LogEvents.length) - 1)
$LogEvents = $LogEvents -Replace ('{"Records":', '')
$loglength = $logEvents.Length
$logevents = Convertfrom-json $LogEvents -AsHashTable
$groupevents = @{}
$coreEvents = @()
$eventSources = @()
Foreach ($log in $logevents) {
$Logdetails = @{}
$Logdetails1 = @{}
$b = ((($log.eventSource).split('.'))[0]) -replace ('-', '')
IF ($b -eq 'ec2') {
foreach ($col in $eventobjectlist) {
$logdetails1 += @{$col = $log.$col }
}
$ec2Header = $b + '_Header'
IF ($null -eq $groupevents[$ec2Header]) {
Add-Member -inputobject $groupevents -Name $b -MemberType NoteProperty -value @() -Force
$groupevents[$ec2Header] = @()
$eventSources += $ec2Header
}
$groupevents[$ec2Header] += $Logdetails1
$Ec2Request = $b + '_Request'
IF ($null -eq $groupevents[$Ec2Request]) {
Add-Member -inputobject $groupevents -Name $Ec2Request -MemberType NoteProperty -value @() -Force
$groupevents[$Ec2Request] = @()
$eventSources += $Ec2Request
}
$ec2Events = @{}
$ec2Events += @{'eventID' = $log.eventID }
$ec2Events += @{'awsRegion' = $log.awsRegion }
$ec2Events += @{'requestID' = $log.requestID }
$ec2Events += @{'eventTime' = $log.eventTime }
$ec2Events += @{'requestParameters' = $log.requestParameters }
$groupevents[$Ec2Request] += $ec2Events
$Ec2Response = $b + '_Response'
IF ($null -eq $groupevents[$Ec2Response]) {
Add-Member -inputobject $groupevents -Name $Ec2Response -MemberType NoteProperty -value @() -Force
$groupevents[$Ec2Response] = @()
$eventSources += $Ec2Response
}
$ec2Events = @{}
$ec2Events += @{'eventID' = $log.eventID }
$ec2Events += @{'awsRegion' = $log.awsRegion }
$ec2Events += @{'requestID' = $log.requestID }
$ec2Events += @{'eventTime' = $log.eventTime }
$ec2Events += @{'responseElements' = $log.responseElements }
$groupevents[$Ec2Response] += $ec2Events
}
Else {
IF ($null -eq $groupevents[$b]) {
Add-Member -inputobject $groupevents -Name $b -MemberType NoteProperty -value @() -Force
$groupevents[$b] = @()
$eventSources += $b
}
$groupevents[$b] += $log
}
foreach ($col in $eventobjectlist) {
$logdetails += @{$col = $log.$col }
}
$coreEvents += $Logdetails
}
IF ($IsCoreFieldsAllTable -eq "true" -and $IsSplitAWSResourceTypes -eq "true") {
Ingest-Core-Fields-Single-Table -CoreEvents $coreEvents
Ingest-AWS-ResourceType-Multi-Tables -EventSources $eventSources -GroupEvents $groupevents
}
ELSEIF ($IsCoreFieldsAllTable -eq "true" -and $IsSplitAWSResourceTypes -eq "false"){
Ingest-Core-Fields-Single-Table -CoreEvents $coreEvents
}
ELSEIF ($IsCoreFieldsAllTable -eq "false" -and $IsSplitAWSResourceTypes -eq "true"){
Ingest-AWS-ResourceType-Multi-Tables -EventSources $eventSources -GroupEvents $groupevents
}
ELSE {
Write-Host "Make sure you have correct values supplied in Environment Variables for CoreFieldsAllTable and SplitAWSResourceTypeTables"
}
$null = Remove-Variable -Name groupevents
$null = Remove-Variable -Name LogEvents
}
ELSEIF ($downloadedFile.Extension -eq '.json'){
$coreEvents = Get-Content -Raw -LiteralPath ("/tmp/$filename") | ConvertFrom-Json
Ingest-Core-Fields-Single-Table -CoreEvents $coreEvents
}
ELSEIF ($downloadedFile.Extension -eq '.csv'){
$coreEvents = import-csv "/tmp/$filename"
Ingest-Core-Fields-Single-Table -CoreEvents $coreEvents
}
}
}
}

Двоичный файл не отображается.

Просмотреть файл

@ -0,0 +1,348 @@
# PowerShell script file to be executed as a AWS Lambda function.
#
# When executing in Lambda the following variables will be predefined.
# $LambdaInput - A PSObject that contains the Lambda function input data.
# $LambdaContext - An Amazon.Lambda.Core.ILambdaContext object that contains information about the currently running Lambda environment.
#
# The last item in the PowerShell pipeline will be returned as the result of the Lambda function.
#
# To include PowerShell modules with your Lambda function, like the AWS.Tools.S3 module, add a "#Requires" statement
# indicating the module and version. If using an AWS.Tools.* module the AWS.Tools.Common module is also required.
#
# The following link contains documentation describing the structure of the S3 event object.
# https://docs.aws.amazon.com/AmazonS3/latest/dev/notification-content-structure.html
#
# This example demonstrates how to process an S3 Event that follows the process:
# S3 Event -> SNS Topic -> Lambda Function
#Requires -Modules @{ModuleName='AWS.Tools.Common';ModuleVersion='4.1.5.0'}
#Requires -Modules @{ModuleName='AWS.Tools.S3';ModuleVersion='4.1.5.0'}
#Requires -Modules @{ModuleName='AWS.Tools.SecretsManager';ModuleVersion='4.1.5.0'}
# Uncomment to send the input event to CloudWatch Logs
#Write-Host (ConvertTo-Json -InputObject $LambdaInput -Compress -Depth 5)
#$PSVersionTable
# Get the current universal time in the default string format.
$currentUTCtime = (Get-Date).ToUniversalTime()
# Code to retrieve credentials from AWS Secrets Manager
$secretName = $env:SecretName
$secretValue = ConvertFrom-Json (Get-SECSecretValue -SecretId $secretName -ErrorAction Stop -Verbose).SecretString -ErrorAction Stop
$workspaceId = $secretValue.LAWID
$workspaceKey = $secretValue.LAWKEY
$LATableName = $env:LogAnalyticsTableName
$IsCoreFieldsAllTable = $env:CoreFieldsAllTable
$IsSplitAWSResourceTypes = $env:SplitAWSResourceTypeTables
$ResourceID = ''
#The $eventobjectlist is the Json Parameter field names that form the core of the Json message that we want in the ALL Table in Log Ananlytics
$eventobjectlist = @('eventTime', 'eventVersion', 'userIdentity', 'eventSource', 'eventName', 'awsRegion', 'sourceIPAddress', 'userAgent', 'errorCode', 'errorMessage', 'requestID', 'eventID', 'eventType', 'apiVersion', 'managementEvent', 'readOnly', 'resources', 'recipientAccountId', 'serviceEventDetails', 'sharedEventID', 'vpcEndpointId', 'eventCategory', 'additionalEventData')
Function Expand-GZipFile {
Param(
$infile,
$outfile
)
Write-Host "Processing Expand-GZipFile for: infile = $infile, outfile = $outfile"
$inputfile = New-Object System.IO.FileStream $infile, ([IO.FileMode]::Open), ([IO.FileAccess]::Read), ([IO.FileShare]::Read)
$output = New-Object System.IO.FileStream $outfile, ([IO.FileMode]::Create), ([IO.FileAccess]::Write), ([IO.FileShare]::None)
$gzipStream = New-Object System.IO.Compression.GzipStream $inputfile, ([IO.Compression.CompressionMode]::Decompress)
$buffer = New-Object byte[](1024)
while ($true) {
$read = $gzipstream.Read($buffer, 0, 1024)
if ($read -le 0) { break }
$output.Write($buffer, 0, $read)
}
$gzipStream.Close()
$output.Close()
$inputfile.Close()
}
#function to create HTTP Header signature required to authenticate post
Function New-BuildSignature {
param(
$customerId,
$sharedKey,
$date,
$contentLength,
$method,
$contentType,
$resource )
$xHeaders = "x-ms-date:" + $date
$stringToHash = $method + "`n" + $contentLength + "`n" + $contentType + "`n" + $xHeaders + "`n" + $resource
$bytesToHash = [Text.Encoding]::UTF8.GetBytes($stringToHash)
$keyBytes = [Convert]::FromBase64String($sharedKey)
$sha256 = New-Object System.Security.Cryptography.HMACSHA256
$sha256.Key = $keyBytes
$calculatedHash = $sha256.ComputeHash($bytesToHash)
$encodedHash = [Convert]::ToBase64String($calculatedHash)
$authorization = 'SharedKey {0}:{1}' -f $customerId, $encodedHash
return $authorization
}
# Function to create and post the request
Function Invoke-LogAnalyticsData {
Param(
$CustomerId,
$SharedKey,
$Body,
$LogTable,
$TimeStampField,
$resourceId)
$method = "POST"
$contentType = "application/json"
$resource = "/api/logs"
$rfc1123date = [DateTime]::UtcNow.ToString("r")
$contentLength = $Body.Length
$signature = New-BuildSignature `
-customerId $CustomerId `
-sharedKey $SharedKey `
-date $rfc1123date `
-contentLength $contentLength `
-method $method `
-contentType $contentType `
-resource $resource
$uri = "https://" + $CustomerId + ".ods.opinsights.azure.com" + $resource + "?api-version=2016-04-01"
$headers1 = @{
"Authorization" = $signature;
"Log-Type" = $LogTable;
"x-ms-date" = $rfc1123date;
"x-ms-AzureResourceId" = $resourceId;
"time-generated-field" = $TimeStampField;
}
$status = $false
do {
$response = Invoke-WebRequest -Uri $uri -Method $method -ContentType $contentType -Headers $headers1 -Body $Body
#If requests are being made at a rate higher than this, then these requests will receive HTTP status code 429 (Too Many Requests) along with the Retry-After:
#<delta-seconds> header which indicates the number of seconds until requests to this application are likely to be accepted.If requests are being made at a rate higher than this,
#then these requests will receive HTTP status code 429 (Too Many Requests) along with the Retry-After: <delta-seconds> header which indicates the number of seconds until requests to this application are likely to be accepted.
If ($reponse.StatusCode -eq 429) {
$rand = get-random -minimum 10 -Maximum 80
start-sleep -seconds $rand
}
else { $status = $true }
}until($status)
Remove-variable -name Body
return $response.StatusCode
}
Function Ingest-Core-Fields-Single-Table {
Param(
$coreEvents)
$coreJson = convertto-json $coreEvents -depth 5 -Compress
$Table = "$LATableName" + "_All"
IF (($corejson.Length) -gt 28MB) {
Write-Host "Log length is greater than 28 MB, splitting and sending to Log Analytics"
$bits = [math]::Round(($corejson.length) / 20MB) + 1
$TotalRecords = $coreEvents.Count
$RecSetSize = [math]::Round($TotalRecords / $bits) + 1
$start = 0
For ($x = 0; $x -lt $bits; $x++) {
IF ( ($start + $recsetsize) -gt $TotalRecords) {
$finish = $totalRecords
}
ELSE {
$finish = $start + $RecSetSize
}
$body = Convertto-Json ($coreEvents[$start..$finish]) -Depth 5 -Compress
$result = Invoke-LogAnalyticsData -CustomerId $workspaceId -SharedKey $workspaceKey -Body $body -LogTable $Table -TimeStampField 'eventTime' -ResourceId $ResourceID
if ($result -eq 200)
{
Write-Host "CloudTrail Logs successfully ingested to LogAnalytics Workspace under Custom Logs --> Table: $Table"
}
$start = $finish + 1
}
$null = Remove-variable -name body
}
Else {
#$logEvents = Convertto-Json $events -depth 20 -compress
$result = Invoke-LogAnalyticsData -CustomerId $workspaceId -SharedKey $workspaceKey -Body $coreJson -LogTable $Table -TimeStampField 'eventTime' -ResourceId $ResourceID
if ($result -eq 200)
{
Write-Host "CloudTrail Logs successfully ingested to LogAnalytics Workspace under Custom Logs --> Table: $Table"
}
}
$null = remove-variable -name coreEvents
$null = remove-variable -name coreJson
}
Function Ingest-AWS-ResourceType-Multi-Tables {
Param(
$eventSources,
$groupEvents)
$RecCount = 0
foreach ($d in $eventSources) {
#$events = $groupevents[$d]
$eventsJson = ConvertTo-Json $groupEvents[$d] -depth 5 -Compress
$Table = $LATableName + '_' + $d
$TotalRecords = $groupEvents[$d].Count
$recCount += $TotalRecords
IF (($eventsjson.Length) -gt 28MB) {
#$events = Convertfrom-json $corejson
$bits = [math]::Round(($eventsjson.length) / 20MB) + 1
$TotalRecords = $groupEvents[$d].Count
$RecSetSize = [math]::Round($TotalRecords / $bits) + 1
$start = 0
For ($x = 0; $x -lt $bits; $x++) {
IF ( ($start + $recsetsize) -gt $TotalRecords) {
$finish = $totalRecords
}
ELSE {
$finish = $start + $RecSetSize
}
$body = Convertto-Json ($groupEvents[$d][$start..$finish]) -Depth 5 -Compress
$result = Invoke-LogAnalyticsData -CustomerId $workspaceId -SharedKey $workspaceKey -Body $body -LogTable $Table -TimeStampField 'eventTime' -ResourceId $ResourceID
if ($result -eq 200)
{
Write-Host "CloudTrail Logs successfully ingested to LogAnalytics Workspace under Custom Logs --> Table: $Table"
}
$start = $finish + 1
}
$null = Remove-variable -name body
}
Else {
#$logEvents = Convertto-Json $events -depth 20 -compress
$result = Invoke-LogAnalyticsData -CustomerId $workspaceId -SharedKey $workspaceKey -Body $eventsJson -LogTable $Table -TimeStampField 'eventTime' -ResourceId $ResourceID
if ($result -eq 200)
{
Write-Host "CloudTrail Logs successfully ingested to LogAnalytics Workspace under Custom Logs --> Table: $Table"
}
}
}
}
foreach ($sqsRecord in $LambdaInput.Records)
{
$sqsRecordBody = ConvertFrom-Json -InputObject $sqsRecord.body
foreach ($s3Event in $sqsRecordBody.Records)
{
$s3BucketName = $s3Event.s3.bucket.name
$s3BucketKey = $s3Event.s3.object.key
Write-Host "Processing event for: bucket = $s3BucketName, key = $s3BucketKey"
IF ($Null -ne $s3BucketName -and $Null -ne $s3BucketKey) {
$s3KeyPath = $s3BucketKey -Replace ('%3A', ':')
$fileNameSplit = $s3KeyPath.split('/')
$fileSplits = $fileNameSplit.Length - 1
$fileName = $filenameSplit[$fileSplits].replace(':', '_')
$downloadedFile = Read-S3Object -BucketName $s3BucketName -Key $s3BucketKey -File "/tmp/$filename"
Write-Host "Object $s3BucketKey is $($downloadedFile.Size) bytes; Extension is $($downloadedFile.Extension)"
IF ($downloadedFile.Extension -eq '.gz' ) {
$infile = "/tmp/$filename"
$outfile = "/tmp/" + $filename -replace ($downloadedFile.Extension, '')
Expand-GZipFile $infile.Trim() $outfile.Trim()
$null = Remove-Item -Path $infile -Force -Recurse -ErrorAction Ignore
$filename = $filename -replace ($downloadedFile.Extension, '')
$filename = $filename.Trim()
$logEvents = Get-Content -Raw -LiteralPath ("/tmp/$filename" )
$logEvents = $LogEvents.Substring(0, ($LogEvents.length) - 1)
$LogEvents = $LogEvents -Replace ('{"Records":', '')
$loglength = $logEvents.Length
$logevents = Convertfrom-json $LogEvents -AsHashTable
$groupevents = @{}
$coreEvents = @()
$eventSources = @()
Foreach ($log in $logevents) {
$Logdetails = @{}
$Logdetails1 = @{}
$b = ((($log.eventSource).split('.'))[0]) -replace ('-', '')
IF ($b -eq 'ec2') {
foreach ($col in $eventobjectlist) {
$logdetails1 += @{$col = $log.$col }
}
$ec2Header = $b + '_Header'
IF ($null -eq $groupevents[$ec2Header]) {
Add-Member -inputobject $groupevents -Name $b -MemberType NoteProperty -value @() -Force
$groupevents[$ec2Header] = @()
$eventSources += $ec2Header
}
$groupevents[$ec2Header] += $Logdetails1
$Ec2Request = $b + '_Request'
IF ($null -eq $groupevents[$Ec2Request]) {
Add-Member -inputobject $groupevents -Name $Ec2Request -MemberType NoteProperty -value @() -Force
$groupevents[$Ec2Request] = @()
$eventSources += $Ec2Request
}
$ec2Events = @{}
$ec2Events += @{'eventID' = $log.eventID }
$ec2Events += @{'awsRegion' = $log.awsRegion }
$ec2Events += @{'requestID' = $log.requestID }
$ec2Events += @{'eventTime' = $log.eventTime }
$ec2Events += @{'requestParameters' = $log.requestParameters }
$groupevents[$Ec2Request] += $ec2Events
$Ec2Response = $b + '_Response'
IF ($null -eq $groupevents[$Ec2Response]) {
Add-Member -inputobject $groupevents -Name $Ec2Response -MemberType NoteProperty -value @() -Force
$groupevents[$Ec2Response] = @()
$eventSources += $Ec2Response
}
$ec2Events = @{}
$ec2Events += @{'eventID' = $log.eventID }
$ec2Events += @{'awsRegion' = $log.awsRegion }
$ec2Events += @{'requestID' = $log.requestID }
$ec2Events += @{'eventTime' = $log.eventTime }
$ec2Events += @{'responseElements' = $log.responseElements }
$groupevents[$Ec2Response] += $ec2Events
}
Else {
IF ($null -eq $groupevents[$b]) {
Add-Member -inputobject $groupevents -Name $b -MemberType NoteProperty -value @() -Force
$groupevents[$b] = @()
$eventSources += $b
}
$groupevents[$b] += $log
}
foreach ($col in $eventobjectlist) {
$logdetails += @{$col = $log.$col }
}
$coreEvents += $Logdetails
}
IF ($IsCoreFieldsAllTable -eq "true" -and $IsSplitAWSResourceTypes -eq "true") {
Ingest-Core-Fields-Single-Table -CoreEvents $coreEvents
Ingest-AWS-ResourceType-Multi-Tables -EventSources $eventSources -GroupEvents $groupevents
}
ELSEIF ($IsCoreFieldsAllTable -eq "true" -and $IsSplitAWSResourceTypes -eq "false"){
Ingest-Core-Fields-Single-Table -CoreEvents $coreEvents
}
ELSEIF ($IsCoreFieldsAllTable -eq "false" -and $IsSplitAWSResourceTypes -eq "true"){
Ingest-AWS-ResourceType-Multi-Tables -EventSources $eventSources -GroupEvents $groupevents
}
ELSE {
Write-Host "Make sure you have correct values supplied in Environment Variables for CoreFieldsAllTable and SplitAWSResourceTypeTables"
}
$null = Remove-Variable -Name groupevents
$null = Remove-Variable -Name LogEvents
}
ELSEIF ($downloadedFile.Extension -eq '.json'){
$coreEvents = Get-Content -Raw -LiteralPath ("/tmp/$filename") | ConvertFrom-Json
Ingest-Core-Fields-Single-Table -CoreEvents $coreEvents
}
ELSEIF ($downloadedFile.Extension -eq '.csv'){
$coreEvents = import-csv "/tmp/$filename"
Ingest-Core-Fields-Single-Table -CoreEvents $coreEvents
}
}
}
}

Двоичный файл не отображается.

Просмотреть файл

@ -154,11 +154,11 @@
},
{
"title": "",
"description": "**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Click on **Code + Test** on the left pane. \n3. Copy the [Function App Code](https://aka.ms/sentinelagariazurefunctioncode) and paste into the Function App `run.ps1` editor.\n3. Click **Save**."
"description": "**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Click on **Code + Test** on the left pane. \n3. Copy the [Function App Code](https://aka.ms/sentinel-agari-functionapp) and paste into the Function App `run.ps1` editor.\n3. Click **Save**."
},
{
"title": "",
"description": "**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following eight to eleven (11-14) application settings individually, with their respective string values (case-sensitive): \n\t\tclientID\n\t\tclientSecret\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\tenableBrandProtectionAPI\n\t\tenablePhishingResponseAPI\n\t\tenablePhishingDefenseAPI\n\t\tresGroup\n\t\tfunctionName\n\t\tsubId\n\t\tenableSecurityGraphSharing\n\t\t<--- Required if enableSecurityGraphSharing is set to true --->\n\t\tGraphTenantId\n\t\tGraphClientId\n\t\tGraphClientSecret\n> - Enter your Agari ClientID and Secret in 'clientId' and 'clientSecret'\n> - Enter 'true' or 'false' for 'enablePhishingDefense', 'enableBrandProtection', 'enablePhishingResponse' as per your product subscriptions.\n> - Enter your Resource Group name in resGroup, the name of the Function (from previous step) in functionName and your Subscription ID in subId.\n> - Enter 'true' or 'false' for 'enableSecurtyGraphAPI'. If you are enabling the Security Graph, the 'GraphTenantId','GraphClientId', and 'GraphClientSecret' is required.\n>\n"
"description": "**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following eight to twelve (8-12) application settings individually, with their respective string values (case-sensitive): \n\t\tclientID\n\t\tclientSecret\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\tenableBrandProtectionAPI\n\t\tenablePhishingResponseAPI\n\t\tenablePhishingDefenseAPI\n\t\tresGroup\n\t\tfunctionName\n\t\tsubId\n\t\tenableSecurityGraphSharing\n\t\t<--- Required if enableSecurityGraphSharing is set to true --->\n\t\tGraphTenantId\n\t\tGraphClientId\n\t\tGraphClientSecret\n\t\tlogAnalyticsUri (optional)\n> - Enter your Agari ClientID and Secret in 'clientId' and 'clientSecret'\n> - Enter 'true' or 'false' for 'enablePhishingDefense', 'enableBrandProtection', 'enablePhishingResponse' as per your product subscriptions.\n> - Enter your Resource Group name in resGroup, the name of the Function (from previous step) in functionName and your Subscription ID in subId.\n> - Enter 'true' or 'false' for 'enableSecurtyGraphAPI'. If you are enabling the Security Graph, the 'GraphTenantId','GraphClientId', and 'GraphClientSecret' is required.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://<CustomerId>.ods.opinsights.azure.us`.\n"
},
{
"title": "",
@ -168,5 +168,21 @@
"title": "",
"description": "**5. Complete Setup.**\n\n1. Once all application settings have been entered, click **Save**. Note that it will take some time to have the required dependencies download, so you may see some inital failure messages."
}
]
}
],
"metadata" : {
"id": "152fa8d4-b84b-4370-8317-b63ed52f9fe3",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Agari"
},
"support": {
"name": "Agari",
"link": "https://support.agari.com/hc/en-us/articles/360000645632-How-to-access-Agari-Support",
"tier": "developer"
}
}
}

Просмотреть файл

@ -9,6 +9,8 @@ if ($Timer.IsPastDue) {
Write-Host "PowerShell timer is running late!"
}
$logAnalyticsUri = $env:logAnalyticsUri
# Write an information log with the current time.
Write-Host "PowerShell timer trigger function ran! TIME: $currentUTCtime"
@ -41,6 +43,43 @@ $bplastLog = $env:BPlastLogTime
$apdlastLog = $env:APDlastLogTime
$aprlastLog = $env:APRlastLogTime
if ([string]::IsNullOrEmpty($logAnalyticsUri))
{
$logAnalyticsUri = "https://" + $CustomerId + ".ods.opinsights.azure.com"
}
# Returning if the Log Analytics Uri is in incorrect format.
# Sample format supported: https://" + $customerId + ".ods.opinsights.azure.com
if($logAnalyticsUri -notmatch 'https:\/\/([\w\-]+)\.ods\.opinsights\.azure.([a-zA-Z\.]+)$')
{
throw "Agari: Invalid Log Analytics Uri."
}
# Set boolean values based on environment variables
if ($bpEnabled -Match "True"){
$bpEnabled = $true
} else {
$bpEnabled = $false
}
if ($apdEnabled -Match "True"){
$apdEnabled = $true
} else {
$apdEnabled = $false
}
if ($aprEnabled -Match "True"){
$aprEnabled = $true
} else {
$aprEnabled = $false
}
if ($sgEnabled -Match "True"){
$sgEnabled = $true
} else {
$sgEnabled = $false
}
# Function to build the Authorization signature for the Log Analytics Data Connector API
Function Build-Signature ($customerId, $sharedKey, $date, $contentLength, $method, $contentType, $resource)
{
@ -78,7 +117,8 @@ Function Post-LogAnalyticsData($customerId, $sharedKey, $body, $logType)
-method $method `
-contentType $contentType `
-resource $resource
$uri = "https://" + $customerId + ".ods.opinsights.azure.com" + $resource + "?api-version=2016-04-01"
$logAnalyticsUri = $logAnalyticsUri + $resource + "?api-version=2016-04-01"
$headers = @{
"Authorization" = $signature;
@ -87,7 +127,7 @@ Function Post-LogAnalyticsData($customerId, $sharedKey, $body, $logType)
"time-generated-field" = $TimeStampField;
}
$response = Invoke-WebRequest -Uri $uri -Method $method -ContentType $contentType -Headers $headers -Body $body -UseBasicParsing
$response = Invoke-WebRequest -Uri $logAnalyticsUri -Method $method -ContentType $contentType -Headers $headers -Body $body -UseBasicParsing
return $response.StatusCode
}

Двоичные данные
DataConnectors/Agari/agari.zip

Двоичный файл не отображается.

Просмотреть файл

@ -4,7 +4,9 @@
"parameters": {
"FunctionName": {
"defaultValue": "agari",
"type": "string"
"type": "string",
"minLength": 1,
"maxLength": 11
},
"clientID": {
"type": "string",
@ -49,7 +51,9 @@
},
"variables": {
"FunctionName": "[concat(toLower(parameters('FunctionName')), uniqueString(resourceGroup().id))]"
"FunctionName": "[concat(toLower(parameters('FunctionName')), uniqueString(resourceGroup().id))]",
"StorageSuffix":"[environment().suffixes.storage]",
"LogAnaltyicsUri":"[replace(environment().portal, 'https://portal', concat('https://', toLower(parameters('WorkspaceID')), '.ods.opinsights'))]"
},
"resources": [
@ -223,8 +227,8 @@
"FUNCTIONS_WORKER_RUNTIME": "powershell",
"APPINSIGHTS_INSTRUMENTATIONKEY": "[reference(resourceId('Microsoft.insights/components', parameters('FunctionName')), '2015-05-01').InstrumentationKey]",
"APPLICATIONINSIGHTS_CONNECTION_STRING": "[reference(resourceId('microsoft.insights/components', parameters('FunctionName')), '2015-05-01').ConnectionString]",
"AzureWebJobsStorage": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(parameters('FunctionName')),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(parameters('FunctionName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=core.windows.net')]",
"WEBSITE_CONTENTAZUREFILECONNECTIONSTRING": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(parameters('FunctionName')),';AccountKey=', listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(parameters('FunctionName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=core.windows.net')]",
"AzureWebJobsStorage": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(parameters('FunctionName')),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(parameters('FunctionName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=',toLower(variables('StorageSuffix')))]",
"WEBSITE_CONTENTAZUREFILECONNECTIONSTRING": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(parameters('FunctionName')),';AccountKey=', listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(parameters('FunctionName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=',toLower(variables('StorageSuffix')))]",
"WEBSITE_CONTENTSHARE": "[toLower(parameters('FunctionName'))]",
"clientID": "[parameters('clientID')]",
"clientSecret": "[parameters('clientSecret')]",
@ -243,25 +247,13 @@
"APRlastLogTime": "",
"APDlastLogTime": "",
"BPlastLogTime": "",
"logAnalyticsUri": "[variables('LogAnaltyicsUri')]",
"WEBSITE_RUN_FROM_PACKAGE": "https://aka.ms/sentinel-agari-functionapp"
}
}
]
},
{
"type": "Microsoft.Web/sites/hostNameBindings",
"apiVersion": "2018-11-01",
"name": "[concat(parameters('FunctionName'), '/', parameters('FunctionName'), '.azurewebsites.net')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Web/sites', parameters('FunctionName'))]"
],
"properties": {
"siteName": "[parameters('FunctionName')]",
"hostNameType": "Verified"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",

Просмотреть файл

@ -74,7 +74,7 @@
},
{
"provider": "Microsoft.OperationalInsights/workspaces/sharedKeys",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key).",
"providerDisplayName": "Keys",
"scope": "Workspace",
"requiredPermissions": {
@ -92,11 +92,11 @@
},
{
"title": "1. Configure the Syslog server",
"description": "You will first need a **linux Syslog** server that Alsid for AD will send logs to. Typically you can run **rsyslog** on **Ubuntu**.\n You can then configure this server as you whish but it is recommended to be able to output AFAD logs in a separate file."
"description": "You will first need a **linux Syslog** server that Alsid for AD will send logs to. Typically you can run **rsyslog** on **Ubuntu**.\n You can then configure this server as you wish but it is recommended to be able to output AFAD logs in a separate file."
},
{
"title": "2. Configure Alsid to send logs to your Syslog server",
"description": "On your **Alsid for AD** portal, go to *System*, *Configuration* and then *Syslog*.\nFrom there you can create a new Syslog alert toward your Syslog server.\n\nOnce this is done, check that the logs are correctly gathered on your server in a seperate file (to do this, you can use the *Test the configuration* button in the Syslog alert configuration in AFAD)."
"description": "On your **Alsid for AD** portal, go to *System*, *Configuration* and then *Syslog*.\nFrom there you can create a new Syslog alert toward your Syslog server.\n\nOnce this is done, check that the logs are correctly gathered on your server in a separate file (to do this, you can use the *Test the configuration* button in the Syslog alert configuration in AFAD)."
},
{
"title": "3. Install and onboard the Microsoft agent for Linux",
@ -138,7 +138,7 @@
},
{
"title": "4. Configure the logs to be collected by the agents",
"description": "Configure the agent to collect the logs.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Custom Logs**.\n2. Select **Apply below configuration to my machines** and click **Add**.\n4. Upload a sample AFAD Syslog file from the **Linux** machine running the **Syslog** server and click **Next**.\n5. Set the record delimiter to **New Line** if not already the case and click **Next**.\n6. Select **Linux** and enter the file path to the **Syslog** file, click **+** then **Next**.\n7. Set the **Name** to *AlsidForADLog_CL* then click **Done** (Azure automatically adds *_CL* at the end of the name, there must be only one, make sure the name is not *AlsidForADLog_CL_CL*).\n\nAll of theses steps are showcased [here](https://www.youtube.com/watch?v=JwV1uZSyXM4&feature=youtu.be) as an example",
"description": "Configure the agent to collect the logs.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Custom Logs**.\n2. Select **Apply below configuration to my machines** and click **Add**.\n4. Upload a sample AFAD Syslog file from the **Linux** machine running the **Syslog** server and click **Next**.\n5. Set the record delimiter to **New Line** if not already the case and click **Next**.\n6. Select **Linux** and enter the file path to the **Syslog** file, click **+** then **Next**.\n7. In the Name field type *AlsidForADLog* before the _CL suffix, then click **Done**.\n\nAll of theses steps are showcased [here](https://www.youtube.com/watch?v=JwV1uZSyXM4&feature=youtu.be) as an example",
"instructions": [
{
"parameters": {
@ -152,5 +152,21 @@
"title": "",
"description": "> You should now be able to receive logs in the *AlsidForADLog_CL* table, logs data can be parse using the **afad_parser()** function, used by all query samples, workbooks and analytic templates."
}
]
],
"metadata": {
"id": "12ff1831-b733-4861-a3e7-6115d20106f4",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Alsid"
},
"support": {
"name": "Alsid",
"link": "https://www.alsid.com/contact-us/",
"tier": "developer"
}
}
}

Двоичные данные
DataConnectors/AtlassianJiraAudit/JiraAuditAPISentinelConn.zip Normal file

Двоичный файл не отображается.

Просмотреть файл

@ -0,0 +1,119 @@
import requests
import json
import datetime
from requests.auth import HTTPBasicAuth
import azure.functions as func
import base64
import hmac
import hashlib
import os
import tempfile
import logging
from .state_manager import StateManager
customer_id = os.environ['WorkspaceID']
shared_key = os.environ['WorkspaceKey']
jira_token = os.environ['JiraAccessToken']
jira_username = os.environ['JiraUsername']
jira_homesite_name = os.environ['JiraHomeSiteName']
connection_string = os.environ['AzureWebJobsStorage']
log_type = 'Jira_Audit'
jira_uri_audit = "https://" + jira_homesite_name + ".atlassian.net/rest/api/3/auditing/record"
def generate_date():
current_time = datetime.datetime.utcnow().replace(second=0, microsecond=0) - datetime.timedelta(minutes=10)
state = StateManager(connection_string=connection_string)
past_time = state.get()
if past_time is not None:
logging.info("The last time point is: {}".format(past_time))
else:
logging.info("There is no last time point, trying to get events for last hour.")
past_time = (current_time - datetime.timedelta(minutes=60)).strftime("%Y-%m-%dT%H:%M:%SZ")
state.post(current_time.strftime("%Y-%m-%dT%H:%M:%SZ"))
return (past_time, current_time.strftime("%Y-%m-%dT%H:%M:%SZ"))
def get_result_request(offset,limit,from_time,to_time):
try:
r = requests.get(url=jira_uri_audit,
headers={'Accept': 'application/json'},
auth=HTTPBasicAuth(jira_username, jira_token),
params={
"offset": offset,
"limit": limit,
"from": from_time,
"to": to_time
})
if r.status_code == 200:
return r.json().get("records")
elif r.status_code == 401:
logging.error("The authentication credentials are incorrect or missing. Error code: {}".format(r.status_code))
elif r.status_code == 403:
logging.error("The user does not have the required permissions or Jira products are on free plans. Audit logs are available when at least one Jira product is on a paid plan. Error code: {}".format(r.status_code))
else:
logging.error("Something wrong. Error code: {}".format(r.status_code))
except Exception as err:
logging.error("Something wrong. Exception error text: {}".format(err))
def get_result(time_range):
from_time = time_range[0]
to_time = time_range[1]
offset = 0
limit = 1000
element_count = None
global_element_count = 0
while element_count != 0:
result = get_result_request(offset,limit,from_time,to_time)
element_count = len(result)
if offset == 0 and element_count == 0:
logging.info("Logs not founded. Time period: from {} to {}.".format(from_time,to_time))
elif offset != 0 and element_count != 0:
logging.info("Processing {} events".format(element_count))
offset = offset + limit
if element_count > 0:
post_status_code = post_data(json.dumps(result))
if post_status_code is not None:
global_element_count = global_element_count + element_count
logging.info("Processed {} events to Azure Sentinel. Time period: from {} to {}.".format(global_element_count,from_time, to_time))
def build_signature(customer_id, shared_key, date, content_length, method, content_type, resource):
x_headers = 'x-ms-date:' + date
string_to_hash = method + "\n" + str(content_length) + "\n" + content_type + "\n" + x_headers + "\n" + resource
bytes_to_hash = bytes(string_to_hash, encoding="utf-8")
decoded_key = base64.b64decode(shared_key)
encoded_hash = base64.b64encode(hmac.new(decoded_key, bytes_to_hash, digestmod=hashlib.sha256).digest()).decode()
authorization = "SharedKey {}:{}".format(customer_id,encoded_hash)
return authorization
def post_data(body):
method = 'POST'
content_type = 'application/json'
resource = '/api/logs'
rfc1123date = datetime.datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
content_length = len(body)
signature = build_signature(customer_id, shared_key, rfc1123date, content_length, method, content_type, resource)
uri = 'https://' + customer_id + '.ods.opinsights.azure.com' + resource + '?api-version=2016-04-01'
headers = {
'content-type': content_type,
'Authorization': signature,
'Log-Type': log_type,
'x-ms-date': rfc1123date
}
response = requests.post(uri,data=body, headers=headers)
if (response.status_code >= 200 and response.status_code <= 299):
return response.status_code
else:
logging.warn("Events are not processed into Azure. Response code: {}".format(response.status_code))
return None
def main(mytimer: func.TimerRequest) -> None:
if mytimer.past_due:
logging.info('The timer is past due!')
logging.info('Starting program')
get_result(generate_date())

Просмотреть файл

@ -0,0 +1,11 @@
{
"scriptFile": "__init__.py",
"bindings": [
{
"name": "mytimer",
"type": "timerTrigger",
"direction": "in",
"schedule": "0 */10 * * * *"
}
]
}

Просмотреть файл

@ -0,0 +1,22 @@
from azure.storage.fileshare import ShareClient
from azure.storage.fileshare import ShareFileClient
from azure.core.exceptions import ResourceNotFoundError
class StateManager:
def __init__(self, connection_string, share_name='funcstatemarkershare', file_path='funcstatemarkerfile'):
self.share_cli = ShareClient.from_connection_string(conn_str=connection_string, share_name=share_name)
self.file_cli = ShareFileClient.from_connection_string(conn_str=connection_string, share_name=share_name, file_path=file_path)
def post(self, marker_text: str):
try:
self.file_cli.upload_file(marker_text)
except ResourceNotFoundError:
self.share_cli.create_share()
self.file_cli.upload_file(marker_text)
def get(self):
try:
return self.file_cli.download_file().readall().decode()
except ResourceNotFoundError:
return None

Просмотреть файл

@ -0,0 +1,121 @@
{
"id": "JiraAuditAPI",
"title": "Atlassian Jira Audit",
"publisher": "Atlassian",
"descriptionMarkdown": "The [Atlassian Jira](https://www.atlassian.com/software/jira) Audit data connector provides the capability to ingest [Jira Audit Records](https://support.atlassian.com/jira-cloud-administration/docs/audit-activities-in-jira-applications/) events into Azure Sentinel through the REST API. Refer to [API documentation](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.",
"additionalRequirementBanner": "These queries and workbooks are dependent on a parser based on Kusto to work as expected. Follow the steps to use this Kusto functions alias **JiraAudit** in queries and workbooks [Follow steps to get this Kusto functions>](https://aka.ms/sentinel-jiraauditapi-parser).",
"graphQueries": [{
"metricName": "Total data received",
"legend": "Jira_Audit_CL",
"baseQuery": "Jira_Audit_CL"
}
],
"sampleQueries": [{
"description": "Jira Audit Events - All Activities",
"query": "JiraAudit\n | sort by TimeGenerated desc"
}
],
"dataTypes": [{
"name": "Jira_Audit_CL",
"lastDataReceivedQuery": "Jira_Audit_CL\n | summarize Time = max(TimeGenerated)\n | where isnotempty(Time)"
}
],
"connectivityCriterias": [{
"type": "IsConnectedQuery",
"value": [
"Jira_Audit_CL\n | summarize LastLogReceived = max(TimeGenerated)\n | project IsConnected = LastLogReceived > ago(30d)"
]
}
],
"availability": {
"status": 2,
"isPreview": true
},
"permissions": {
"resourceProvider": [{
"provider": "Microsoft.OperationalInsights/workspaces",
"permissionsDisplayText": "read and write permissions on the workspace are required.",
"providerDisplayName": "Workspace",
"scope": "Workspace",
"requiredPermissions": {
"write": true,
"read": true,
"delete": true
}
},
{
"provider": "Microsoft.OperationalInsights/workspaces/sharedKeys",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key).",
"providerDisplayName": "Keys",
"scope": "Workspace",
"requiredPermissions": {
"action": true
}
}
],
"customs": [{
"name": "Microsoft.Web/sites permissions",
"description": "Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/)."
},
{
"name": "REST API Credentials/permissions",
"description": "**JiraAccessToken**, **JiraUsername** is required for REST API. [See the documentation to learn more about API](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/). Check all [requirements and follow the instructions](https://developer.atlassian.com/cloud/jira/platform/rest/v3/intro/#authentication) for obtaining credentials."
}
]
},
"instructionSteps": [{
"title": "",
"description": ">**NOTE:** This connector uses Azure Functions to connect to the Jira REST API to pull its logs into Azure Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details."
},
{
"title": "",
"description": ">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App."
},
{
"description": ">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-jiraauditapi-parser) to create the Kusto functions alias, **JiraAudit**"
},
{
"title": "",
"description": "**STEP 1 - Configuration steps for the Jira API**\n\n [Follow the instructions](https://developer.atlassian.com/cloud/jira/platform/rest/v3/intro/#authentication) to obtain the credentials. \n"
},
{
"title": "",
"description": "**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following).",
"instructions": [{
"parameters": {
"fillWith": [
"WorkspaceId"
],
"label": "Workspace ID"
},
"type": "CopyableLabel"
},
{
"parameters": {
"fillWith": [
"PrimaryKey"
],
"label": "Primary Key"
},
"type": "CopyableLabel"
}
]
},
{
"title": "Option 1 - Azure Resource Manager (ARM) Template",
"description": "Use this method for automated deployment of the Jira Audit data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentineljiraauditazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **JiraAccessToken**, **JiraUsername**, **JiraHomeSiteName** (short site name part, as example HOMESITENAME from https://HOMESITENAME.atlassian.net) and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy."
},
{
"title": "Option 2 - Manual Deployment of Azure Functions",
"description": "Use the following step-by-step instructions to deploy the Jira Audit data connector manually with Azure Functions (Deployment via Visual Studio Code)."
},
{
"title": "",
"description": "**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://github.com/averbn/azure_sentinel_data_connectors/blob/main/jira-audit-azure-sentinel-data-connector/JiraAuditAPISentinelConn.zip?raw=true) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. JiraAuditXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Azure Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration."
},
{
"title": "",
"description": "**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tJiraUsername\n\t\tJiraAccessToken\n\t\tJiraHomeSiteName\n\t\tWorkspaceID\n\t\tWorkspaceKey\n3. Once all application settings have been entered, click **Save**."
}
]
}

Просмотреть файл

@ -0,0 +1,212 @@
{
"$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"FunctionName": {
"defaultValue": "JiraAudit",
"type": "string"
},
"WorkspaceID": {
"type": "string",
"defaultValue": "<workspaceID>"
},
"WorkspaceKey": {
"type": "securestring",
"defaultValue": "<workspaceKey>"
},
"JiraAccessToken": {
"type": "securestring",
"defaultValue": "<JiraAccessToken>"
},
"JiraUsername": {
"type": "string",
"defaultValue": "<JiraUsername>"
},
"JiraHomeSiteName": {
"type": "string",
"defaultValue": "<JiraHomeSiteName>"
}
},
"variables": {
"FunctionName": "[concat(toLower(parameters('FunctionName')), uniqueString(resourceGroup().id))]"
},
"resources": [
{
"type": "Microsoft.Insights/components",
"apiVersion": "2015-05-01",
"name": "[variables('FunctionName')]",
"location": "[resourceGroup().location]",
"kind": "web",
"properties": {
"Application_Type": "web",
"ApplicationId": "[variables('FunctionName')]"
}
},
{
"type": "Microsoft.Storage/storageAccounts",
"apiVersion": "2019-06-01",
"name": "[tolower(variables('FunctionName'))]",
"location": "[resourceGroup().location]",
"sku": {
"name": "Standard_LRS",
"tier": "Standard"
},
"kind": "StorageV2",
"properties": {
"networkAcls": {
"bypass": "AzureServices",
"virtualNetworkRules": [],
"ipRules": [],
"defaultAction": "Allow"
},
"supportsHttpsTrafficOnly": true,
"encryption": {
"services": {
"file": {
"keyType": "Account",
"enabled": true
},
"blob": {
"keyType": "Account",
"enabled": true
}
},
"keySource": "Microsoft.Storage"
}
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]"
],
"sku": {
"name": "Standard_LRS",
"tier": "Standard"
},
"properties": {
"cors": {
"corsRules": []
},
"deleteRetentionPolicy": {
"enabled": false
}
}
},
{
"type": "Microsoft.Storage/storageAccounts/fileServices",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]"
],
"sku": {
"name": "Standard_LRS",
"tier": "Standard"
},
"properties": {
"cors": {
"corsRules": []
}
}
},
{
"type": "Microsoft.Web/sites",
"apiVersion": "2018-11-01",
"name": "[variables('FunctionName')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]",
"[resourceId('Microsoft.Insights/components', variables('FunctionName'))]"
],
"kind": "functionapp,linux",
"identity": {
"type": "SystemAssigned"
},
"properties": {
"name": "[variables('FunctionName')]",
"httpsOnly": true,
"clientAffinityEnabled": true,
"alwaysOn": true,
"reserved": true,
"siteConfig": {
"linuxFxVersion": "python|3.8"
}
},
"resources": [
{
"apiVersion": "2018-11-01",
"type": "config",
"name": "appsettings",
"dependsOn": [
"[concat('Microsoft.Web/sites/', variables('FunctionName'))]"
],
"properties": {
"FUNCTIONS_EXTENSION_VERSION": "~3",
"FUNCTIONS_WORKER_RUNTIME": "python",
"APPINSIGHTS_INSTRUMENTATIONKEY": "[reference(resourceId('Microsoft.insights/components', variables('FunctionName')), '2015-05-01').InstrumentationKey]",
"APPLICATIONINSIGHTS_CONNECTION_STRING": "[reference(resourceId('microsoft.insights/components', variables('FunctionName')), '2015-05-01').ConnectionString]",
"AzureWebJobsStorage": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(variables('FunctionName')),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(variables('FunctionName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=core.windows.net')]",
"WorkspaceID": "[parameters('WorkspaceID')]",
"WorkspaceKey": "[parameters('WorkspaceKey')]",
"JiraAccessToken": "[parameters('JiraAccessToken')]",
"JiraUsername": "[parameters('JiraUsername')]",
"JiraHomeSiteName": "[parameters('JiraHomeSiteName')]",
"WEBSITE_RUN_FROM_PACKAGE": "https://github.com/averbn/azure_sentinel_data_connectors/blob/main/jira-audit-azure-sentinel-data-connector/JiraAuditAPISentinelConn.zip?raw=true"
}
}
]
},
{
"type": "Microsoft.Web/sites/hostNameBindings",
"apiVersion": "2018-11-01",
"name": "[concat(variables('FunctionName'), '/', variables('FunctionName'), '.azurewebsites.net')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Web/sites', variables('FunctionName'))]"
],
"properties": {
"siteName": "[variables('FunctionName')]",
"hostNameType": "Verified"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/azure-webjobs-hosts')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"publicAccess": "None"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/azure-webjobs-secrets')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"publicAccess": "None"
}
},
{
"type": "Microsoft.Storage/storageAccounts/fileServices/shares",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/', tolower(variables('FunctionName')))]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/fileServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"shareQuota": 5120
}
}
]
}

Просмотреть файл

@ -0,0 +1,15 @@
{
"version": "2.0",
"logging": {
"applicationInsights": {
"samplingSettings": {
"isEnabled": true,
"excludedTypes": "Request"
}
}
},
"extensionBundle": {
"id": "Microsoft.Azure.Functions.ExtensionBundle",
"version": "[1.*, 2.0.0)"
}
}

Просмотреть файл

@ -0,0 +1,4 @@
{
"$schema": "http://json.schemastore.org/proxies",
"proxies": {}
}

Просмотреть файл

@ -0,0 +1,7 @@
# DO NOT include azure-functions-worker in this file
# The Python Worker is managed by Azure Functions platform
# Manually managing azure-functions-worker may cause unexpected issues
azure-functions
requests
azure-storage-file-share==12.3.0

Просмотреть файл

@ -91,7 +91,7 @@
},
{
"provider": "Microsoft.OperationalInsights/workspaces/sharedKeys",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key).",
"providerDisplayName": "Keys",
"scope": "Workspace",
"requiredPermissions": {
@ -125,5 +125,21 @@
}
]
}
]
],
"metadata": {
"id": "31f0ea52-dcd4-443b-9d04-a3e709addebc",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Better Mobile"
},
"support": {
"name": "Better Mobile",
"email": "support@better.mobi",
"tier": "developer"
}
}
}

Просмотреть файл

@ -97,7 +97,7 @@
},
{
"provider": "Microsoft.OperationalInsights/workspaces/sharedKeys",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key).",
"providerDisplayName": "Keys",
"scope": "Workspace",
"requiredPermissions": {
@ -139,5 +139,21 @@
}
]
}
]
],
"metadata": {
"id": "3be993d4-3aa7-41de-8280-e62de7859eca",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Beyond Security"
},
"support": {
"name": "Beyond Security",
"link": "https://beyondsecurity.freshdesk.com/support/home",
"tier": "developer"
}
}
}

Просмотреть файл

@ -0,0 +1,11 @@
{
"scriptFile": "main.py",
"bindings": [
{
"name": "mytimer",
"type": "timerTrigger",
"direction": "in",
"schedule": "0 */2 * * * *"
}
]
}

Просмотреть файл

@ -0,0 +1,142 @@
import os
import json
import datetime
import time
from boxsdk.auth.jwt_auth import JWTAuth
from boxsdk import Client
from boxsdk.object.events import Events, EnterpriseEventsStreamType
from boxsdk.util.api_call_decorator import api_call
from .sentinel_connector import AzureSentinelConnector
from .state_manager import StateManager
from dateutil.parser import parse as parse_date
import azure.functions as func
import logging
WORKSPACE_ID = os.environ['AzureSentinelWorkspaceId']
SHARED_KEY = os.environ['AzureSentinelSharedKey']
LOG_TYPE = 'BoxEvents'
# interval of script execution
SCRIPT_EXECUTION_INTERVAL_MINUTES = 2
# if ts of last extracted event is older than now - MAX_PERIOD_MINUTES -> script will get events from now - SCRIPT_EXECUTION_INTERVAL_MINUTES
MAX_PERIOD_MINUTES = 1440
# max azure function lifetime
AZURE_FUNC_MAX_EXECUTION_TIME_MINUTES = 9
def main(mytimer: func.TimerRequest):
start_time = time.time()
config_json = os.environ['BOX_CONFIG_JSON']
config_dict = json.loads(config_json)
file_storage_connection_string = os.environ['AzureWebJobsStorage']
state_manager = StateManager(connection_string=file_storage_connection_string)
stream_position, created_after = get_stream_pos_and_date_from(
marker=state_manager.get(),
max_period_minutes=MAX_PERIOD_MINUTES,
script_execution_interval_minutes=SCRIPT_EXECUTION_INTERVAL_MINUTES
)
logging.info('Script started. Getting events from stream_position {}, created_after {}'.format(stream_position, created_after))
sentinel = AzureSentinelConnector(workspace_id=WORKSPACE_ID, shared_key=SHARED_KEY, log_type=LOG_TYPE, queue_size=10000)
with sentinel:
for events, stream_position in get_events(config_dict, created_after, stream_position=stream_position):
for event in events:
sentinel.send(event)
last_event_date = events[-1]['created_at']
if check_if_time_is_over(start_time, SCRIPT_EXECUTION_INTERVAL_MINUTES, AZURE_FUNC_MAX_EXECUTION_TIME_MINUTES):
logging.info('Stopping script because time for execution is over.')
break
save_marker(state_manager, stream_position, last_event_date)
if sentinel.failed_sent_events_number:
logging.error('Script finished unsuccessfully. {} events have been sent. {} events have not been sent'.format(sentinel.successfull_sent_events_number, sentinel.failed_sent_events_number))
exit(1)
else:
logging.info('Script finished successfully. {} events have been sent. {} events have not been sent'.format(sentinel.successfull_sent_events_number, sentinel.failed_sent_events_number))
def get_stream_pos_and_date_from(marker, max_period_minutes, script_execution_interval_minutes):
"""Returns last saved checkpoint. If last checkpoint is older than max_period_minutes - returns now - script_execution_interval_minutes."""
def get_default_date_from(script_execution_interval_minutes):
date_from = datetime.datetime.utcnow() - datetime.timedelta(minutes=script_execution_interval_minutes)
date_from = date_from.replace(tzinfo=datetime.timezone.utc, second=0, microsecond=0).isoformat()
return date_from
def get_token_from_marker(marker, max_period_minutes):
token = 0
try:
last_token, last_event_date = marker.split()
last_event_date = parse_date(last_event_date)
minutes_from_last_ingested_event = (datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc) - last_event_date).seconds / 60
if minutes_from_last_ingested_event < max_period_minutes:
token = last_token
except Exception:
pass
return token
token = get_token_from_marker(marker, max_period_minutes)
if token:
date_from = None
else:
date_from = get_default_date_from(script_execution_interval_minutes)
return int(token), date_from
def save_marker(state_manager, stream_position, last_event_date):
logging.info('Saving last stream_position {} and last_event_date {}'.format(stream_position, last_event_date))
state_manager.post(str(stream_position) + ' ' + last_event_date)
def check_if_time_is_over(start_time, interval_minutes, max_script_exec_time_minutes):
"""Returns True if function's execution time is less than interval between function executions and
less than max azure func lifetime. In other case returns False."""
max_minutes = min(interval_minutes, max_script_exec_time_minutes)
if max_minutes > 1:
max_time = max_minutes * 60 - 30
else:
max_time = 50
script_execution_time = time.time() - start_time
if script_execution_time > max_time:
return True
else:
return False
class ExtendedEvents(Events):
@api_call
def get_events(self, stream_position=0, stream_type=EnterpriseEventsStreamType.ADMIN_LOGS, created_after=None, created_before=None, limit=100):
url = self.get_url()
params = {
'limit': limit,
'stream_position': stream_position,
'stream_type': stream_type,
'created_after': created_after,
'created_before': created_before
}
box_response = self._session.get(url, params=params)
response = box_response.json().copy()
return self.translator.translate(self._session, response_object=response)
def get_events(config_dict, created_after=None, stream_position=0):
limit = 500
config = JWTAuth.from_settings_dictionary(config_dict)
client = Client(config)
events_client = ExtendedEvents(client._session)
while True:
res = events_client.get_events(stream_position=stream_position, created_after=created_after, limit=limit)
stream_position = res['next_stream_position']
events = [event.response_object for event in res['entries']]
yield events, stream_position
if len(events) < limit:
break

Просмотреть файл

@ -0,0 +1,114 @@
import requests
import datetime
import logging
import json
import hashlib
import hmac
import base64
from threading import Thread
class AzureSentinelConnector:
def __init__(self, workspace_id, shared_key, log_type, queue_size=200, bulks_number=10, queue_size_bytes=25 * (2**20)):
self.workspace_id = workspace_id
self.shared_key = shared_key
self.log_type = log_type
self.queue_size = queue_size
self.bulks_number = bulks_number
self.queue_size_bytes = queue_size_bytes
self._queue = []
self._bulks_list = []
self.successfull_sent_events_number = 0
self.failed_sent_events_number = 0
def send(self, event):
self._queue.append(event)
if len(self._queue) >= self.queue_size:
self.flush(force=False)
def flush(self, force=True):
self._bulks_list.append(self._queue)
if force:
self._flush_bulks()
else:
if len(self._bulks_list) >= self.bulks_number:
self._flush_bulks()
self._queue = []
def _flush_bulks(self):
jobs = []
for queue in self._bulks_list:
if queue:
queue_list = self._split_big_request(queue)
if len(queue_list) == 1:
self._post_data(self.workspace_id, self.shared_key, queue_list[0], self.log_type)
else:
for q in queue_list:
jobs.append(Thread(target=self._post_data, args=(self.workspace_id, self.shared_key, q, self.log_type, )))
for job in jobs:
job.start()
for job in jobs:
job.join()
self._bulks_list = []
def __enter__(self):
pass
def __exit__(self, type, value, traceback):
self.flush()
def _build_signature(self, workspace_id, shared_key, date, content_length, method, content_type, resource):
x_headers = 'x-ms-date:' + date
string_to_hash = method + "\n" + str(content_length) + "\n" + content_type + "\n" + x_headers + "\n" + resource
bytes_to_hash = bytes(string_to_hash, encoding="utf-8")
decoded_key = base64.b64decode(shared_key)
encoded_hash = base64.b64encode(hmac.new(decoded_key, bytes_to_hash, digestmod=hashlib.sha256).digest()).decode()
authorization = "SharedKey {}:{}".format(workspace_id, encoded_hash)
return authorization
def _post_data(self, workspace_id, shared_key, body, log_type):
events_number = len(body)
body = json.dumps(body)
method = 'POST'
content_type = 'application/json'
resource = '/api/logs'
rfc1123date = datetime.datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
content_length = len(body)
signature = self._build_signature(workspace_id, shared_key, rfc1123date, content_length, method, content_type, resource)
uri = 'https://' + workspace_id + '.ods.opinsights.azure.com' + resource + '?api-version=2016-04-01'
headers = {
'content-type': content_type,
'Authorization': signature,
'Log-Type': log_type,
'x-ms-date': rfc1123date
}
try:
response = requests.post(uri, data=body, headers=headers)
except Exception as err:
logging.error("Error during sending events to Azure Sentinel: {}".format(err))
self.failed_sent_events_number += events_number
else:
if (response.status_code >= 200 and response.status_code <= 299):
logging.info('{} events have been successfully sent to Azure Sentinel'.format(events_number))
self.successfull_sent_events_number += events_number
else:
logging.error("Error during sending events to Azure Sentinel. Response code: {}".format(response.status_code))
self.failed_sent_events_number += events_number
def _check_size(self, queue):
data_bytes_len = len(json.dumps(queue).encode())
return data_bytes_len < self.queue_size_bytes
def _split_big_request(self, queue):
if self._check_size(queue):
return [queue]
else:
middle = int(len(queue) / 2)
queues_list = [queue[:middle], queue[middle:]]
return self._split_big_request(queues_list[0]) + self._split_big_request(queues_list[1])

Просмотреть файл

@ -0,0 +1,22 @@
from azure.storage.fileshare import ShareClient
from azure.storage.fileshare import ShareFileClient
from azure.core.exceptions import ResourceNotFoundError
class StateManager:
def __init__(self, connection_string, share_name='funcstatemarkershare', file_path='funcstatemarkerfile'):
self.share_cli = ShareClient.from_connection_string(conn_str=connection_string, share_name=share_name)
self.file_cli = ShareFileClient.from_connection_string(conn_str=connection_string, share_name=share_name, file_path=file_path)
def post(self, marker_text: str):
try:
self.file_cli.upload_file(marker_text)
except ResourceNotFoundError:
self.share_cli.create_share()
self.file_cli.upload_file(marker_text)
def get(self):
try:
return self.file_cli.download_file().readall().decode()
except ResourceNotFoundError:
return None

Двоичные данные
DataConnectors/Box/BoxConn.zip Normal file

Двоичный файл не отображается.

Просмотреть файл

@ -0,0 +1,130 @@
{
"id": "BoxDataConnector",
"title": "Box",
"publisher": "Box",
"descriptionMarkdown": "The Box data connector provides the capability to ingest [Box enterprise's events](https://developer.box.com/guides/events/#admin-events) into Azure Sentinel using the Box REST API. Refer to [Box documentation](https://developer.box.com/guides/events/for-enterprise/) for more information.",
"additionalRequirementBanner": "These queries and workbooks are dependent on a parser based on Kusto to work as expected. Follow the steps to use this Kusto functions alias **BoxEvents** in queries and workbooks. [Follow steps to get this Kusto functions>](https://aka.ms/sentinel-BoxDataConnector-parser) ",
"graphQueries": [
{
"metricName": "Box events",
"legend": "BoxEvents_CL",
"baseQuery": "BoxEvents_CL"
}
],
"sampleQueries": [
{
"description" : "All Box events",
"query": "BoxEvents\n| sort by TimeGenerated desc"
}
],
"dataTypes": [
{
"name": "BoxEvents_CL",
"lastDataReceivedQuery": "BoxEvents_CL\n | summarize Time = max(TimeGenerated)\n | where isnotempty(Time)"
}
],
"connectivityCriterias": [
{
"type": "IsConnectedQuery",
"value": [
"BoxEvents_CL\n | summarize LastLogReceived = max(TimeGenerated)\n | project IsConnected = LastLogReceived > ago(1d)"
]
}
],
"availability": {
"status": 1,
"isPreview": true
},
"permissions": {
"resourceProvider": [
{
"provider": "Microsoft.OperationalInsights/workspaces",
"permissionsDisplayText": "read and write permissions on the workspace are required.",
"providerDisplayName": "Workspace",
"scope": "Workspace",
"requiredPermissions": {
"write": true,
"read": true,
"delete": true
}
},
{
"provider": "Microsoft.OperationalInsights/workspaces/sharedKeys",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key).",
"providerDisplayName": "Keys",
"scope": "Workspace",
"requiredPermissions": {
"action": true
}
}
],
"customs": [
{
"name": "Microsoft.Web/sites permissions",
"description": "Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/)."
},
{
"name": "Box API Credentials",
"description": "Box config JSON file is required for Box REST API JWT authentication. [See the documentation to learn more about JWT authentication](https://developer.box.com/guides/authentication/jwt/)."
}
]
},
"instructionSteps": [
{
"title": "",
"description": ">**NOTE:** This connector uses Azure Functions to connect to the Box REST API to pull logs into Azure Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details."
},
{
"title": "",
"description": ">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App."
},
{
"title": "",
"description": ">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://aka.ms/sentinel-BoxDataConnector-parser) to create the Kusto function alias **BoxEvents**."
},
{
"title": "",
"description": "**STEP 1 - Configuration of the Box events collection**\n\nSee documentation to [setup JWT authentication](https://developer.box.com/guides/applications/custom-apps/jwt-setup/) and [obtain JSON file with credentials](https://developer.box.com/guides/authentication/jwt/with-sdk/#prerequisites)."
},
{
"title": "",
"description": "**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Box data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Box JSON configuration file, readily available.",
"instructions":[
{
"parameters": {
"fillWith": [
"WorkspaceId"
],
"label": "Workspace ID"
},
"type": "CopyableLabel"
},
{
"parameters": {
"fillWith": [
"PrimaryKey"
],
"label": "Primary Key"
},
"type": "CopyableLabel"
}
]
},
{
"title": "Option 1 - Azure Resource Manager (ARM) Template",
"description": "Use this method for automated deployment of the Box data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BoxDataConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **AzureSentinelWorkspaceId**, **AzureSentinelSharedKey**, **BoxConfigJSON**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy."
},
{
"title": "Option 2 - Manual Deployment of Azure Functions",
"description": "Use the following step-by-step instructions to deploy the Box data connector manually with Azure Functions (Deployment via Visual Studio Code)."
},
{
"title": "",
"description": "**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BoxDataConnector-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BoxXYZ).\n\n\te. **Select a runtime:** Choose Python 3.6 (note that other versions of python are not supported for this function).\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Azure Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration."
},
{
"title": "",
"description": "**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAzureSentinelWorkspaceId\n\t\tAzureSentinelSharedKey\n\t\tBOX_CONFIG_JSON\n3. Once all application settings have been entered, click **Save**."
}
]
}

Просмотреть файл

@ -0,0 +1,203 @@
{
"$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"FunctionName": {
"defaultValue": "Box",
"type": "string"
},
"BoxConfigJSON": {
"type": "string",
"defaultValue": "Paste Box config file content here"
},
"AzureSentinelWorkspaceId": {
"type": "string",
"defaultValue": "<AzureSentinelWorkspaceId>"
},
"AzureSentinelSharedKey": {
"type": "securestring",
"defaultValue": ""
}
},
"variables": {
"FunctionName": "[concat(toLower(parameters('FunctionName')), uniqueString(resourceGroup().id))]"
},
"resources": [
{
"type": "Microsoft.Insights/components",
"apiVersion": "2015-05-01",
"name": "[variables('FunctionName')]",
"location": "[resourceGroup().location]",
"kind": "web",
"properties": {
"Application_Type": "web",
"ApplicationId": "[variables('FunctionName')]"
}
},
{
"type": "Microsoft.Storage/storageAccounts",
"apiVersion": "2019-06-01",
"name": "[tolower(variables('FunctionName'))]",
"location": "[resourceGroup().location]",
"sku": {
"name": "Standard_LRS",
"tier": "Standard"
},
"kind": "StorageV2",
"properties": {
"networkAcls": {
"bypass": "AzureServices",
"virtualNetworkRules": [],
"ipRules": [],
"defaultAction": "Allow"
},
"supportsHttpsTrafficOnly": true,
"encryption": {
"services": {
"file": {
"keyType": "Account",
"enabled": true
},
"blob": {
"keyType": "Account",
"enabled": true
}
},
"keySource": "Microsoft.Storage"
}
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]"
],
"sku": {
"name": "Standard_LRS",
"tier": "Standard"
},
"properties": {
"cors": {
"corsRules": []
},
"deleteRetentionPolicy": {
"enabled": false
}
}
},
{
"type": "Microsoft.Storage/storageAccounts/fileServices",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]"
],
"sku": {
"name": "Standard_LRS",
"tier": "Standard"
},
"properties": {
"cors": {
"corsRules": []
}
}
},
{
"type": "Microsoft.Web/sites",
"apiVersion": "2018-11-01",
"name": "[variables('FunctionName')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]",
"[resourceId('Microsoft.Insights/components', variables('FunctionName'))]"
],
"kind": "functionapp,linux",
"identity": {
"type": "SystemAssigned"
},
"properties": {
"name": "[variables('FunctionName')]",
"httpsOnly": true,
"clientAffinityEnabled": true,
"alwaysOn": true,
"reserved": true,
"siteConfig": {
"linuxFxVersion": "python|3.6"
}
},
"resources": [
{
"apiVersion": "2018-11-01",
"type": "config",
"name": "appsettings",
"dependsOn": [
"[concat('Microsoft.Web/sites/', variables('FunctionName'))]"
],
"properties": {
"FUNCTIONS_EXTENSION_VERSION": "~3",
"FUNCTIONS_WORKER_RUNTIME": "python",
"APPINSIGHTS_INSTRUMENTATIONKEY": "[reference(resourceId('Microsoft.insights/components', variables('FunctionName')), '2015-05-01').InstrumentationKey]",
"APPLICATIONINSIGHTS_CONNECTION_STRING": "[reference(resourceId('microsoft.insights/components', variables('FunctionName')), '2015-05-01').ConnectionString]",
"AzureWebJobsStorage": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(variables('FunctionName')),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(variables('FunctionName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=core.windows.net')]",
"BOX_CONFIG_JSON": "[parameters('BoxConfigJSON')]",
"AzureSentinelWorkspaceId": "[parameters('AzureSentinelWorkspaceId')]",
"AzureSentinelSharedKey": "[parameters('AzureSentinelSharedKey')]",
"WEBSITE_RUN_FROM_PACKAGE": "https://aka.ms/sentinel-BoxDataConnector-functionapp"
}
}
]
},
{
"type": "Microsoft.Web/sites/hostNameBindings",
"apiVersion": "2018-11-01",
"name": "[concat(variables('FunctionName'), '/', variables('FunctionName'), '.azurewebsites.net')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Web/sites', variables('FunctionName'))]"
],
"properties": {
"siteName": "[variables('FunctionName')]",
"hostNameType": "Verified"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/azure-webjobs-hosts')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"publicAccess": "None"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/azure-webjobs-secrets')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"publicAccess": "None"
}
},
{
"type": "Microsoft.Storage/storageAccounts/fileServices/shares",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/', tolower(variables('FunctionName')))]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/fileServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"shareQuota": 5120
}
}
]
}

Просмотреть файл

@ -0,0 +1,15 @@
{
"version": "2.0",
"logging": {
"applicationInsights": {
"samplingSettings": {
"isEnabled": true,
"excludedTypes": "Request"
}
}
},
"extensionBundle": {
"id": "Microsoft.Azure.Functions.ExtensionBundle",
"version": "[1.*, 2.0.0)"
}
}

Просмотреть файл

@ -0,0 +1,4 @@
{
"$schema": "http://json.schemastore.org/proxies",
"proxies": {}
}

Просмотреть файл

@ -0,0 +1,11 @@
# DO NOT include azure-functions-worker in this file
# The Python Worker is managed by Azure Functions platform
# Manually managing azure-functions-worker may cause unexpected issues
azure-functions
pyjwt==2.0.0
cryptography==3.3.1
boxsdk==2.11.0
azure-storage-file-share==12.3.0
python-dateutil

Двоичный файл не отображается.

Просмотреть файл

@ -3,7 +3,7 @@
"title": "Cisco Umbrella",
"publisher": "Cisco",
"descriptionMarkdown": "The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Azure Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.",
"additionalRequirementBanner": "These queries and workbooks are dependent on a parser based on Kusto to work as expected. Follow the steps to use this Kusto functions alias **Cisco_Umbrella** in queries and workbooks. [Follow steps to get this Kusto functions>](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) ",
"additionalRequirementBanner": "These queries and workbooks are dependent on a parser based on Kusto to work as expected. Follow the steps to use this Kusto functions alias **Cisco_Umbrella** in queries and workbooks. [Follow steps to get this Kusto functions>](https://aka.ms/sentinel-ciscoumbrella-function) ",
"graphQueries": [
{
"metricName": "Cisco Umbrella DNS Logs",
@ -166,11 +166,11 @@
},
{
"title": "",
"description": "**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/CiscoUmbrella/CiscoUmbrellaConn.zip?raw=true) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. UmbrellaXYZ).\n\n\te. **Select a runtime:** Choose Python 3.8.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Azure Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration."
"description": "**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. UmbrellaXYZ).\n\n\te. **Select a runtime:** Choose Python 3.8.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Azure Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration."
},
{
"title": "",
"description": "**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n3. Once all application settings have been entered, click **Save**."
"description": "**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://<CustomerId>.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**."
}
]
}
}

Просмотреть файл

@ -4,6 +4,8 @@
"parameters": {
"FunctionName": {
"defaultValue": "Umbrella",
"minLength": 1,
"maxLength": 11,
"type": "string"
},
"WorkspaceID": {
@ -28,7 +30,9 @@
}
},
"variables": {
"FunctionName": "[concat(toLower(parameters('FunctionName')), uniqueString(resourceGroup().id))]"
"FunctionName": "[concat(toLower(parameters('FunctionName')), uniqueString(resourceGroup().id))]",
"StorageSuffix": "[environment().suffixes.storage]",
"LogAnaltyicsUri": "[replace(environment().portal, 'https://portal', concat('https://', toLower(parameters('WorkspaceID')), '.ods.opinsights'))]"
},
"resources": [
{
@ -146,30 +150,18 @@
"FUNCTIONS_WORKER_RUNTIME": "python",
"APPINSIGHTS_INSTRUMENTATIONKEY": "[reference(resourceId('Microsoft.insights/components', variables('FunctionName')), '2015-05-01').InstrumentationKey]",
"APPLICATIONINSIGHTS_CONNECTION_STRING": "[reference(resourceId('microsoft.insights/components', variables('FunctionName')), '2015-05-01').ConnectionString]",
"AzureWebJobsStorage": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(variables('FunctionName')),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(variables('FunctionName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=core.windows.net')]",
"AzureWebJobsStorage": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(variables('FunctionName')),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(variables('FunctionName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=',toLower(variables('StorageSuffix')))]",
"WorkspaceID": "[parameters('WorkspaceID')]",
"WorkspaceKey": "[parameters('WorkspaceKey')]",
"S3Bucket": "[parameters('S3Bucket')]",
"AWSAccessKeyId": "[parameters('AWSAccessKeyId')]",
"AWSSecretAccessKey": "[parameters('AWSSecretAccessKey')]",
"logAnalyticsUri": "[variables('LogAnaltyicsUri')]",
"WEBSITE_RUN_FROM_PACKAGE": "https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/CiscoUmbrella/CiscoUmbrellaConn.zip?raw=true"
}
}
]
},
{
"type": "Microsoft.Web/sites/hostNameBindings",
"apiVersion": "2018-11-01",
"name": "[concat(variables('FunctionName'), '/', variables('FunctionName'), '.azurewebsites.net')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Web/sites', variables('FunctionName'))]"
],
"properties": {
"siteName": "[variables('FunctionName')]",
"hostNameType": "Verified"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",

Просмотреть файл

@ -16,6 +16,7 @@ from threading import Thread
from io import StringIO
import azure.functions as func
import re
TIME_INTERVAL_MINUTES = 10
@ -30,6 +31,15 @@ sentinel_log_type = 'Cisco_Umbrella'
aws_s3_bucket = os.environ.get('S3Bucket')
aws_access_key_id = os.environ.get('AWSAccessKeyId')
aws_secret_acces_key = os.environ.get('AWSSecretAccessKey')
logAnalyticsUri = os.environ.get('logAnalyticsUri')
if ((logAnalyticsUri in (None, '') or str(logAnalyticsUri).isspace())):
logAnalyticsUri = 'https://' + sentinel_customer_id + '.ods.opinsights.azure.com'
pattern = r'https:\/\/([\w\-]+)\.ods\.opinsights\.azure.([a-zA-Z\.]+)$'
match = re.match(pattern,str(logAnalyticsUri))
if(not match):
raise Exception("Cisco_Umbrella: Invalid Log Analytics Uri.")
def main(mytimer: func.TimerRequest) -> None:
@ -67,28 +77,28 @@ def main(mytimer: func.TimerRequest) -> None:
elif 'cloudfirewalllogs' in key.lower() or 'cdfwlogs' in key.lower():
cdfw_files.append(obj)
sentinel = AzureSentinelConnector(sentinel_customer_id, sentinel_shared_key, sentinel_log_type + '_dns', queue_size=10000, bulks_number=10)
sentinel = AzureSentinelConnector(logAnalyticsUri, sentinel_customer_id, sentinel_shared_key, sentinel_log_type + '_dns', queue_size=10000, bulks_number=10)
with sentinel:
for obj in dns_files:
cli.process_file(obj, dest=sentinel)
failed_sent_events_number += sentinel.failed_sent_events_number
successfull_sent_events_number += sentinel.successfull_sent_events_number
sentinel = AzureSentinelConnector(sentinel_customer_id, sentinel_shared_key, sentinel_log_type + '_proxy', queue_size=10000, bulks_number=10)
sentinel = AzureSentinelConnector(logAnalyticsUri, sentinel_customer_id, sentinel_shared_key, sentinel_log_type + '_proxy', queue_size=10000, bulks_number=10)
with sentinel:
for obj in proxy_files:
cli.process_file(obj, dest=sentinel)
failed_sent_events_number += sentinel.failed_sent_events_number
successfull_sent_events_number += sentinel.successfull_sent_events_number
sentinel = AzureSentinelConnector(sentinel_customer_id, sentinel_shared_key, sentinel_log_type + '_ip', queue_size=10000, bulks_number=10)
sentinel = AzureSentinelConnector(logAnalyticsUri, sentinel_customer_id, sentinel_shared_key, sentinel_log_type + '_ip', queue_size=10000, bulks_number=10)
with sentinel:
for obj in ip_files:
cli.process_file(obj, dest=sentinel)
failed_sent_events_number += sentinel.failed_sent_events_number
successfull_sent_events_number += sentinel.successfull_sent_events_number
sentinel = AzureSentinelConnector(sentinel_customer_id, sentinel_shared_key, sentinel_log_type + '_cloudfirewall', queue_size=10000, bulks_number=10)
sentinel = AzureSentinelConnector(logAnalyticsUri, sentinel_customer_id, sentinel_shared_key, sentinel_log_type + '_cloudfirewall', queue_size=10000, bulks_number=10)
with sentinel:
for obj in cdfw_files:
cli.process_file(obj, dest=sentinel)
@ -96,7 +106,7 @@ def main(mytimer: func.TimerRequest) -> None:
successfull_sent_events_number += sentinel.successfull_sent_events_number
else:
sentinel = AzureSentinelConnector(sentinel_customer_id, sentinel_shared_key, sentinel_log_type, queue_size=10000, bulks_number=10)
sentinel = AzureSentinelConnector(logAnalyticsUri, sentinel_customer_id, sentinel_shared_key, sentinel_log_type, queue_size=10000, bulks_number=10)
with sentinel:
for obj in obj_list:
cli.process_file(obj, dest=sentinel)
@ -409,7 +419,8 @@ class UmbrellaClient:
class AzureSentinelConnector:
def __init__(self, customer_id, shared_key, log_type, queue_size=200, bulks_number=10, queue_size_bytes=25 * (2**20)):
def __init__(self, log_analytics_uri, customer_id, shared_key, log_type, queue_size=200, bulks_number=10, queue_size_bytes=25 * (2**20)):
self.log_analytics_uri = log_analytics_uri
self.customer_id = customer_id
self.shared_key = shared_key
self.log_type = log_type
@ -476,7 +487,7 @@ class AzureSentinelConnector:
rfc1123date = datetime.datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
content_length = len(body)
signature = self._build_signature(customer_id, shared_key, rfc1123date, content_length, method, content_type, resource)
uri = 'https://' + customer_id + '.ods.opinsights.azure.com' + resource + '?api-version=2016-04-01'
uri = self.log_analytics_uri + resource + '?api-version=2016-04-01'
headers = {
'content-type': content_type,

Просмотреть файл

@ -122,5 +122,21 @@
"title": "4. Secure your machine ",
"description": "Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)"
}
]
],
"metadata": {
"id": "7504f78d-1928-4399-a1ae-ba826c47c42d",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Citrix Systems"
},
"support": {
"name": "Citrix Systems",
"link": "https://www.citrix.com/support/",
"tier": "developer"
}
}
}

Просмотреть файл

@ -0,0 +1,99 @@
{
"id": "CognniSentinelDataConnector",
"title": "Cognni",
"publisher": "Cognni",
"descriptionMarkdown": "The Cognni connector offers a quick and simple integration with Azure Sentinel. You can use Cognni to autonomously map your previously unclassified important information and detect related incidents. This allows you to recognize risks to your important information, understand the severity of the incidents, and investigate the details you need to remediate, fast enough to make a difference.",
"graphQueries": [
{
"metricName": "Total data received",
"legend": "CognniIncidents_CL",
"baseQuery": "CognniIncidents_CL"
}
],
"sampleQueries": [
{
"description" : "Get all incidents order by time",
"query": "CognniIncidents_CL | order by TimeGenerated desc "
},
{
"description" : "Get high risk incidents",
"query": "CognniIncidents_CL | where Severity == 3"
},
{
"description" : "Get medium risk incidents",
"query": "CognniIncidents_CL | where Severity == 2"
},
{
"description" : "Get low risk incidents",
"query": "CognniIncidents_CL | where Severity == 1"
}
],
"dataTypes": [
{
"name": "CognniIncidents_CL",
"lastDataReceivedQuery": "CognniIncidents_CL\n | summarize Time = max(TimeGenerated)\n | where isnotempty(Time)"
}
],
"connectivityCriterias": [
{
"type": "IsConnectedQuery",
"value": [
"CognniIncidents_CL\n | summarize LastLogReceived = max(TimeGenerated)\n | project IsConnected = LastLogReceived > ago(30d)"
]
}
],
"availability": {
"status": 2,
"isPreview": true
},
"permissions": {
"resourceProvider": [
{
"provider": "Microsoft.OperationalInsights/workspaces",
"permissionsDisplayText": "read and write permissions are required.",
"providerDisplayName": "Workspace",
"scope": "Workspace",
"requiredPermissions": {
"write": true,
"read": true,
"delete": true
}
},
{
"provider": "Microsoft.OperationalInsights/workspaces/sharedKeys",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key).",
"providerDisplayName": "Keys",
"scope": "Workspace",
"requiredPermissions": {
"action": true
}
}
]
},
"instructionSteps": [
{
"title": "Connect to Cognni",
"description": "1. Go to [Cognni integrations page](https://intelligence.cognni.ai/integrations)\n2. Click **'Connect'** on the 'Azure Sentinel' box\n3. Copy and paste **'workspaceId'** and **'sharedKey'** (from below) to the related fields on Cognni's integrations screen\n4. Click the **'Connect'** botton to complete the configuration. \n Soon, all your Cognni-detected incidents will be forwarded here (into Azure Sentinel)\n\nNot a Cognni user? [Join us](https://azuremarketplace.microsoft.com/en-us/marketplace/apps/shieldox.appsource_freetrial)",
"instructions": [
{
"parameters": {
"fillWith": [
"WorkspaceId"
],
"label": "Workspace ID"
},
"type": "CopyableLabel"
},
{
"parameters": {
"fillWith": [
"PrimaryKey"
],
"label": "Shared Key"
},
"type": "CopyableLabel"
}
]
}
]
}

Просмотреть файл

@ -114,5 +114,21 @@
}
]
}
]
],
"metadata": {
"id": "47835227-715b-4000-892e-e1fff81023c0",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "WatchGuard"
},
"support": {
"name": "WatchGuard",
"link": "https://www.watchguard.com/wgrd-support/overview",
"tier": "developer"
}
}
}

Просмотреть файл

@ -96,5 +96,21 @@
"title": "4. Secure your machine ",
"description": "Make sure to configure the machines security according to your organizations security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)"
}
]
],
"metadata": {
"id": "1c45e738-21dd-4fcd-9449-e2c9478e9552",
"version": "1.0.0",
"kind": "dataConnector",
"source": {
"kind": "community"
},
"author": {
"name": "Cyberark"
},
"support": {
"name": "Cyberark",
"link": "https://www.cyberark.com/customer-support/",
"tier": "developer"
}
}
}

Просмотреть файл

@ -31,7 +31,7 @@
}
],
"availability": {
"status": 1,
"status": 2,
"isPreview": true
},
"permissions": {
@ -49,7 +49,7 @@
},
{
"provider": "Microsoft.OperationalInsights/workspaces/sharedKeys",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)",
"permissionsDisplayText": "read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key).",
"providerDisplayName": "Keys",
"scope": "Workspace",
"requiredPermissions": {

Просмотреть файл

@ -0,0 +1,74 @@
param(
[Parameter(Mandatory=$true)]$DocuSignEnvironment,
[Parameter(Mandatory=$true)]$IntegrationKey
)
If ($DocuSignEnvironment.ToLower() -eq "developer") {
$jwtHost = "account-d"
}
Else {
$jwtHost = "account"
}
$scopes = "signature%20impersonation"
$PORT = '8080'
$IP = 'localhost'
$state = [Convert]::ToString($(Get-Random -Maximum 1000000000), 16)
$authorizationEndpoint = "https://$jwtHost.docusign.com/oauth"
$redirectUri = "http://${IP}:${PORT}/authorization-code/callback"
$redirectUriEscaped = [Uri]::EscapeDataString($redirectURI)
$authorizationURL = "$authorizationEndpoint/auth?scope=$scopes&redirect_uri=$redirectUriEscaped&client_id=$IntegrationKey&state=$state&response_type=code"
Write-Output "The authorization URL is: $authorizationURL"
Write-Output ""
# Request the authorization code
# Use Http Server
$http = New-Object System.Net.HttpListener
# Hostname and port to listen on
$http.Prefixes.Add($redirectURI + "/")
# Start the Http Server
$http.Start()
if ($http.IsListening) {
Write-Output "Open the following URL in a browser to continue:" $authorizationURL
Start-Process $authorizationURL
}
while ($http.IsListening) {
$context = $http.GetContext()
if ($context.Request.HttpMethod -eq 'GET' -and $context.Request.Url.LocalPath -match '/authorization-code/callback') {
# write-host "Check context"
# write-host "$($context.Request.UserHostAddress) => $($context.Request.Url)" -f 'mag'
[string]$html = '
<html lang="en">
<head>
<meta charset="utf-8">
<title></title>
</head>
<body>
Ok. You may close this tab and return to the shell. This window closes automatically in five seconds.
<script type="text/javascript">
setTimeout(
function ( )
{
self.close();
}, 5000 );
</script>
</body>
</html>
'
# Respond to the request
$buffer = [System.Text.Encoding]::UTF8.GetBytes($html) # Convert HTML to bytes
$context.Response.ContentLength64 = $buffer.Length
$context.Response.OutputStream.Write($buffer, 0, $buffer.Length) # Stream HTML to browser
$context.Response.OutputStream.Close() # Close the response
Start-Sleep 10
$http.Stop()
}
}

Двоичный файл не отображается.

Просмотреть файл

@ -1,13 +1,16 @@
<#
Title: DocuSign Security Events Data Connector
Language: PowerShell
Version: 1.0
Version: 2.0
Author: Sreedhar Ande
Last Modified: 1/13/2021
Comment: Inital Release
Last Modified: 2/16/2021
Comment: V2 re-designed;
Ingests Security Events for your DocuSign account into Azure Log Analytics Workspace using DocuSign Monitor REST API
Ingests DocuSign Account Users into Azure Log Analytics Workspace using DocuSign Users REST API
Note:Above API's resumes getting records from the spot where the previous call left off to avoid duplication of records in DocuSignSecurityEvents_CL and DocuSignUsers_CL Log Analytics Workspace custom tables
DESCRIPTION
This Function App calls the DocuSign Monitor REST API (https://{ORG}.docusign.net/api/v2.0/datasets/monitor/stream/) to pull the security events for your DocuSign account.
This Function App calls the DocuSign Monitor REST API (https://lens.docusign.net/api/v2.0/datasets/monitor/stream/) to pull the security events for your DocuSign account.
The response from the DocuSign Monitor REST API is recieved in JSON format. This function will build the signature and authorization header
needed to post the data to the Log Analytics workspace via the HTTP Data Connector API.
#>
@ -18,13 +21,12 @@ param($Timer)
# Get the current universal time in the default string format.
$currentUTCtime = (Get-Date).ToUniversalTime()
# The 'IsPastDue' property is 'true' when the current function invocation is later than scheduled.
if ($Timer.IsPastDue) {
Write-Host "PowerShell timer is running late!"
Write-Host "DocuSign-SecurityEvents: Azure Function triggered at: $currentUTCtime - timer is running late!"
}
else{
Write-Host "DocuSign-SecurityEvents: Azure Function triggered at: $currentUTCtime - timer is ontime!"
}
# Write an information log with the current time.
Write-Host "PowerShell timer trigger function ran! TIME: $currentUTCtime"
# Main
if ($env:MSI_SECRET -and (Get-Module -ListAvailable Az.Accounts)){
@ -33,16 +35,31 @@ if ($env:MSI_SECRET -and (Get-Module -ListAvailable Az.Accounts)){
$AzureWebJobsStorage = $env:AzureWebJobsStorage
$DocuSignAccessToken = $env:DocuSignOAuthAccessToken
$DocuSignIntegrationKey = $env:DocuSignIntegrationKey
$DocuSignAdminUserGUID = $env:DocuSignAdminUserGUID
$DocuSignAccountAPIID = $env:DocuSignAccountAPIID
$DocuSignEnvironment = $env:DocuSignEnvironment
$workspaceId = $env:WorkspaceId
$workspaceKey = $env:WorkspaceKey
$storageAccountContainer = "docusign-monitor"
$CustomLogTable = $env:CustomLogTableName
$tempDir=$env:TMPDIR
#The AzureTenant variable is used to specify other cloud environments like Azure Gov(.us) etc.,
$AzureTenant = $env:AZURE_TENANT
$storageAccountTableName = "docusignexecutions"
$LATableDSMAPI = $env:LATableDSMAPI
$LATableDSUsers = $env:LATableDSUsers
$LAURI = $env:LAURI
$DocuSignUserInfoBaseURI = $env:DocuSignUserInfoBaseURI
# Flag to turn on/off DocuSign Users information into LA Workspace Table
$DocuSignUsersIngestion = $env:NeedDocuSignUsers
$currentStartTime = (get-date).ToUniversalTime() | get-date -Format yyyy-MM-ddTHH:mm:ss:ffffffZ
Write-Output "LAURI : $LAURI"
if($LAURI.Trim() -notmatch 'https:\/\/([\w\-]+)\.ods\.opinsights\.azure.([a-zA-Z\.]+)$')
{
Write-Error -Message "DocuSign-SecurityEvents: Invalid Log Analytics Uri." -ErrorAction Stop
Exit
}
Function Write-OMSLogfile {
<#
@ -117,14 +134,15 @@ Function Write-OMSLogfile {
-method $method `
-contentType $contentType `
-resource $resource
$uri = "https://" + $customerId + ".ods.opinsights.azure.com" + $resource + "?api-version=2016-04-01"
$LAURI = $LAURI.Trim() + $resource + "?api-version=2016-04-01"
Write-Output "LAURI : $LAURI"
$headers = @{
"Authorization" = $signature;
"Log-Type" = $type;
"x-ms-date" = $rfc1123date
"time-generated-field" = $dateTime
}
$response = Invoke-WebRequest -Uri $uri -Method $method -ContentType $contentType -Headers $headers -Body $Body -UseBasicParsing
$response = Invoke-WebRequest -Uri $LAURI.Trim() -Method $method -ContentType $contentType -Headers $headers -Body $Body -UseBasicParsing
Write-Verbose -message ('Post Function Return Code ' + $response.statuscode)
return $response.statuscode
}
@ -144,7 +162,7 @@ Function Write-OMSLogfile {
return $returnCode
}
Function SendToLogA ($eventsData) {
Function SendToLogA ($eventsData, $eventsTable) {
#Test Size; Log A limit is 30MB
$tempdata = @()
$tempDataSize = 0
@ -155,7 +173,7 @@ Function SendToLogA ($eventsData) {
$tempdata += $record
$tempDataSize += ($record | ConvertTo-Json -depth 20).Length
if ($tempDataSize -gt 25MB) {
$postLAStatus = Write-OMSLogfile -dateTime (Get-Date) -type $CustomLogTable -logdata $tempdata -CustomerID $workspaceId -SharedKey $workspaceKey
$postLAStatus = Write-OMSLogfile -dateTime (Get-Date) -type $eventsTable -logdata $tempdata -CustomerID $workspaceId -SharedKey $workspaceKey
write-Host "Sending data = $TempDataSize"
$tempdata = $null
$tempdata = @()
@ -163,127 +181,253 @@ Function SendToLogA ($eventsData) {
}
}
Write-Host "Sending left over data = $Tempdatasize"
$postLAStatus = Write-OMSLogfile -dateTime (Get-Date) -type $CustomLogTable -logdata $eventsData -CustomerID $workspaceId -SharedKey $workspaceKey
$postLAStatus = Write-OMSLogfile -dateTime (Get-Date) -type $eventsTable -logdata $eventsData -CustomerID $workspaceId -SharedKey $workspaceKey
}
Else {
$postLAStatus = Write-OMSLogfile -dateTime (Get-Date) -type $CustomLogTable -logdata $eventsData -CustomerID $workspaceId -SharedKey $workspaceKey
$postLAStatus = Write-OMSLogfile -dateTime (Get-Date) -type $eventsTable -logdata $eventsData -CustomerID $workspaceId -SharedKey $workspaceKey
}
return $postLAStatus
return $postLAStatus
}
$docuSignAPIHeaders = @{
Authorization = "bearer $DocuSignAccessToken"
'Content-Type' = "application/json"
If ($DocuSignEnvironment.ToLower() -eq "developer") {
$jwtHost = "account-d"
$dsmHost = "lens-d"
}
Else {
$jwtHost = "account"
$dsmHost = "lens"
}
#Get Orgs from ORGS.json in Az Storage
$apiVersion = "eSignature"
$timestamp = [int][double]::Parse((Get-Date (Get-Date).ToUniversalTime() -UFormat %s))
$storageAccountContext = New-AzStorageContext -ConnectionString $AzureWebJobsStorage
$checkBlob = Get-AzStorageBlob -Blob ORGS.json -Container $storageAccountContainer -Context $storageAccountContext
$checkBlob = Get-AzStorageBlob -Blob "DocuSignRSAPrivateKey.key" -Container $storageAccountContainer -Context $storageAccountContext
if($null -ne $checkBlob){
Get-AzStorageBlobContent -Blob ORGS.json -Container $storageAccountContainer -Context $storageAccountContext -Destination "$tempDir\orgs.json" -Force
$docuSignOrgs = Get-Content "$tempDir\orgs.json" | ConvertFrom-Json
Get-AzStorageBlobContent -Blob "DocuSignRSAPrivateKey.key" -Container $storageAccountContainer -Context $storageAccountContext -Destination "C:\local\Temp\DocuSignRSAPrivateKey.key" -Force
$privateKeyPath = "C:\local\Temp\DocuSignRSAPrivateKey.key"
}
else{
Write-Error "No ORGS.json file, exiting"
Write-Error "No DocuSignRSAPrivateKey.key file, exiting"
exit
}
foreach($org in $docuSignOrgs){
$orgName = $org.org
Write-Host "Starting to process ORG: $orgName"
#check for last run file
$checkBlob = Get-AzStorageBlob -Blob "lastrun-Monitor.json" -Container $storageAccountContainer -Context $storageAccountContext
if($null -ne $checkBlob){
#Blob found get data
Get-AzStorageBlobContent -Blob "lastrun-Monitor.json" -Container $storageAccountContainer -Context $storageAccountContext -Destination "$tempDir\lastrun-Monitor.json" -Force
$lastRunMonitorContext = Get-Content "$tempDir\lastrun-Monitor.json" | ConvertFrom-Json
}
else {
#no blob create the context
$lastRun = $currentStartTime
$lastRunMonitor = @"
{
"org":$orgName
"lastRun": "$lastRun",
"lastRunEndCursor": ""
if ($apiVersion -eq "rooms") {
$scopes = "signature%20impersonation%20dtr.rooms.read%20dtr.rooms.write%20dtr.documents.read%20dtr.documents.write%20dtr.profile.read%20dtr.profile.write%20dtr.company.read%20dtr.company.write%20room_forms"
} elseif ($apiVersion -eq "eSignature") {
$scopes = "signature%20impersonation"
} elseif ($apiVersion -eq "click") {
$scopes = "click.manage"
}
"@
$lastRunMonitor | Out-File "$tempDir\lastrun-Monitor.json"
$lastRunMonitorContext = $lastRunMonitor | ConvertFrom-Json
}
$lastRunEndCursorContext = $lastRunMonitorContext | Where-Object {$_.org -eq $orgName}
if([string]::IsNullOrEmpty($lastRunEndCursorContext.lastRunEndCursor)){
$lastRunEndCursorValue=""
}
else {
$lastRunEndCursorValue = $lastRunEndCursorContext.lastRunEndCursor
}
$complete=$false
$iterations=0
DO{
$iterations++
try{
$docuSignMonitorAPI=$null
$monitorApiResponse = $null
$docuSignMonitorAPI = "https://${orgName}.docusign.net/api/v2.0/datasets/monitor/stream?cursor=${lastRunEndCursorValue}&limit=2000"
$monitorApiResponse = Invoke-RestMethod -Uri $docuSignMonitorAPI -Method 'GET' -Headers $docuSignAPIHeaders
Write-Output "Iteration:$iterations"
# Get the endCursor value from the response.
# This lets you resume getting records from the spot where this call left off
$currentRunEndCursorValue = $monitorApiResponse.endCursor
Write-Output "currentRunEndCursorValue :$currentRunEndCursorValue"
Write-Output "Last run cursorValue : $lastRunEndCursorValue"
# Step 1. Create a JWT
$decJwtHeader = [ordered]@{
'typ' = 'JWT';
'alg' = 'RS256'
} | ConvertTo-Json -Compress
# Remove %20 from scope string
$scopes = $scopes -replace '%20',' '
$exp = $timestamp + 7200
$decJwtPayLoad = [ordered]@{
'iss' = $DocuSignIntegrationKey;
'sub' = $DocuSignAdminUserGUID;
'iat' = $timestamp;
'exp' = $exp;
'aud' = "$jwtHost.docusign.com";
'scope' = $scopes
} | ConvertTo-Json -Compress
$encJwtHeaderBytes = [System.Text.Encoding]::UTF8.GetBytes($decJwtHeader)
$encJwtHeader = [System.Convert]::ToBase64String($encJwtHeaderBytes) -replace '\+', '-' -replace '/', '_' -replace '='
$encJwtPayLoadBytes = [System.Text.Encoding]::UTF8.GetBytes($decJwtPayLoad)
$encJwtPayLoad = [System.Convert]::ToBase64String($encJwtPayLoadBytes) -replace '\+', '-' -replace '/', '_' -replace '='
$jwtToken = "$encJwtHeader.$encJwtPayLoad"
try{
Add-Type -Path "C:\home\site\wwwroot\Modules\DerConverter.dll"
Add-Type -Path "C:\home\site\wwwroot\Modules\PemUtils.dll"
$keyStream = [System.IO.File]::OpenRead($privateKeyPath)
$rsaParameters = [PemUtils.PemReader]::new($keyStream).ReadRsaKey()
$rsa = [System.Security.Cryptography.RSA]::Create($rsaParameters)
$tokenBytes = [System.Text.Encoding]::ASCII.GetBytes($jwtToken)
$signedToken = $rsa.SignData(
$tokenBytes,
[System.Security.Cryptography.HashAlgorithmName]::SHA256,
[System.Security.Cryptography.RSASignaturePadding]::Pkcs1)
$signedBase64Token = [System.Convert]::ToBase64String($signedToken) -replace '\+', '-' -replace '/', '_' -replace '='
$jwtToken = "$encJwtHeader.$encJwtPayLoad.$signedBase64Token"
$keyStream.Close()
$keyStream.Dispose()
}
catch {
Write-Output "Please check you have DerConverter.dll and PemUtils.dll under C:\home\site\wwwroot\Modules"
$keyStream.Close()
$keyStream.Dispose()
}
# Step 2. Obtain the access token
try {
Write-Output "Obtaining DocuSign access token"
$authorizationEndpoint = "https://$jwtHost.docusign.com/oauth/"
$tokenResponse = Invoke-WebRequest `
-Uri "$authorizationEndpoint/token" `
-Method "POST" `
-Body "grant_type=urn:ietf:params:oauth:grant-type:jwt-bearer&assertion=$jwtToken"
$docuSignAccessToken = ($tokenResponse | ConvertFrom-Json).access_token
#Setup uri Headers for requests to DSM API & User API
$docuSignAPIHeaders = New-Object "System.Collections.Generic.Dictionary[[String],[String]]"
$docuSignAPIHeaders.Add("Content-Type", "application/json")
$docuSignAPIHeaders.Add("Authorization", "Bearer $docuSignAccessToken")
$StorageTable = Get-AzStorageTable -Name $storageAccountTableName -Context $storageAccountContext -ErrorAction Ignore
if($null -eq $StorageTable.Name){
New-AzStorageTable -Name $storageAccountTableName -Context $storageAccountContext
$docuSignTimeStampTbl = (Get-AzStorageTable -Name $storageAccountTableName -Context $storageAccountContext.Context).cloudTable
Add-AzTableRow -table $docuSignTimeStampTbl -PartitionKey "docusignmonitor" -RowKey "lastRunEndCursor" -property @{"lastCursorValue"=""} -UpdateExisting
}
Else {
$docuSignTimeStampTbl = (Get-AzStorageTable -Name $storageAccountTableName -Context $storageAccountContext.Context).cloudTable
}
# retrieve the last execution values
$lastExeEndCursor = Get-azTableRow -table $docuSignTimeStampTbl -partitionKey "docusignmonitor" -RowKey "lastRunEndCursor" -ErrorAction Ignore
$lastRunEndCursorValue = $lastExeEndCursor.lastCursorValue
$complete=$false
$iterations=0
DO{
$iterations++
try{
$docuSignMonitorAPI=$null
$monitorApiResponse = $null
$docuSignMonitorAPI = "https://$dsmHost.docusign.net/api/v2.0/datasets/monitor/stream?cursor=${lastRunEndCursorValue}&limit=2000"
Write-Output "Calling DocuSign Monitor API"
$monitorApiResponse = Invoke-RestMethod -Uri $docuSignMonitorAPI -Method 'GET' -Headers $docuSignAPIHeaders
# Display the data
Write-Output "Iteration:$iterations"
# Get the endCursor value from the response. This lets you resume
# getting records from the spot where this call left off
#Response from Invoke-RestMethod
$currentRunEndCursorValue = $monitorApiResponse.endCursor
Write-Output "currentRunEndCursorValue :$currentRunEndCursorValue"
Write-Output "Last run cursorValue : $lastRunEndCursorValue"
if (![string]::IsNullOrEmpty($lastRunEndCursorValue))
{
# If the endCursor from the response is the same as the one that you already have,
# it means that you have reached the end of the records
if ($currentRunEndCursorValue -eq $lastRunEndCursorValue)
{
Write-Output 'Current run endCursor & last run endCursor values are the same. This indicates that you have reached the end of your available records.'
$complete=$true
}
}
if(!$complete){
Write-Output "Updating the cursor value of $lastRunEndCursorValue to the new value of $currentRunEndCursorValue"
$lastRunEndCursorValue=$currentRunEndCursorValue
$securityEvents = $monitorApiResponse.data
$securityEventsCount = $monitorApiResponse.data.length
if ($securityEventsCount -gt 0) {
$postReturnCode = SendToLogA -EventsData $securityEvents -EventsTable $LATableDSMAPI
$securityEventsCount = $monitorApiResponse.data.length
if($postReturnCode -eq 200)
{
Write-Output ("$securityEventsCount - DocuSign Security Events have been ingested into Azure Log Analytics Workspace Table --> $LATableDSMAPI")
}
}
Remove-Item $monitorApiResponse
Add-AzTableRow -table $docuSignTimeStampTbl -PartitionKey "docusignmonitor" -RowKey "lastRunEndCursor" -property @{"lastCursorValue"=$lastRunEndCursorValue} -UpdateExisting
Start-Sleep -Second 5
}
}
catch{
$int = 0
foreach($header in $_.Exception.Response.Headers){
if($header -eq "X-DocuSign-TraceToken"){ write-host "TraceToken : " $_.Exception.Response.Headers[$int]}
$int++
}
write-host "Error : $_.ErrorDetails.Message"
write-host "Command : $_.InvocationInfo.Line"
$complete = $true
}
} While ($complete -eq $false )
#users Export
if ($DocuSignUsersIngestion.ToLower() -eq "true"){
try{
$docuSignUsersAPI = $null
$userApiResponse = $null
$docuSignUsersAPI = "$DocuSignUserInfoBaseURI/restapi/v2.1/accounts/$DocuSignAccountAPIID/users?additional_info=true"
Write-Output "Calling DocuSign Users API"
$userApiResponse = Invoke-RestMethod -Uri $docuSignUsersAPI -Method 'GET' -Headers $docuSignAPIHeaders
$docuSignUsers = $userApiResponse.users
$accountUsers = @()
foreach($dsUser in $docuSignUsers)
{
# If the endCursor from the response is the same as the one that you already have,
# it means that you have reached the end of the records
if ($currentRunEndCursorValue.Substring(0, $currentRunEndCursorValue.LastIndexOf('_')) -eq $lastRunEndCursorValue.Substring(0, $lastRunEndCursorValue.LastIndexOf('_')))
{
Write-Output 'Current run endCursor & last run endCursor values are the same. This indicates that you have reached the end of your available records.'
$complete=$true
$isUserExisting = Get-azTableRow -table $docuSignTimeStampTbl -partitionKey $dsUser.userId.ToString() -ErrorAction Ignore
if ($null -eq $isUserExisting) {
Add-AzTableRow -table $docuSignTimeStampTbl -PartitionKey $dsUser.userId.ToString() -RowKey $dsUser.userName.ToString() -UpdateExisting
$accountUsers += $dsUser
}
}
if(!$complete){
Write-Output "Updating the cursor value of $lastRunEndCursorValue to the new value of $currentRunEndCursorValue"
$lastRunEndCursorValue=$currentRunEndCursorValue
$postReturnCode = SendToLogA -EventsData $monitorApiResponse.data
}
$totalUsers = $accountUsers.Count
Write-Output "New Users Count : $totalUsers"
if($totalUsers -gt 0) {
Write-Output "Ingesting DocuSign Users information to $LATableDSUsers"
$postReturnCode = SendToLogA -EventsData $accountUsers -EventsTable $LATableDSUsers
if($postReturnCode -eq 200)
{
Write-Host ("{$monitorApiResponse.data.length} DocuSign Security Events have been ingested into Azure Log Analytics Workspace Table {$CustomLogTable}")
Write-Output ("$totalUsers users have been ingested into Azure Log Analytics Workspace Table $LATableDSUsers")
}
Remove-Item $monitorApiResponse
$lastRunEndCursorContext.org = $orgName
$lastRunEndCursorContext.lastRunEndCursor = $lastRunEndCursorValue
$lastRunEndCursorContext.lastRun = $currentStartTime
$lastRunMonitorContext | ConvertTo-Json | Out-File "$tempDir\lastrun-Monitor.json"
Set-AzStorageBlobContent -Blob "lastrun-Monitor.json" -Container $storageAccountContainer -Context $storageAccountContext -File "$tempDir\lastrun-Monitor.json" -Force
Remove-Item "$tempDir\lastrun-Monitor.json" -Force
Remove-Item "$tempDir\orgs.json" -Force
Start-Sleep -Second 5
}
}
catch{
$int = 0
foreach($header in $_.Exception.Response.Headers){
if($header -eq "X-DocuSign-TraceToken"){ write-host "TraceToken : " $_.Exception.Response.Headers[$int]}
$int++
else {
Write-Output ("No New Users")
}
write-host "Error : $_.ErrorDetails.Message"
write-host "Command : $_.InvocationInfo.Line"
$complete = $true
}
} While ($complete -eq $false )
Remove-Item $userApiResponse
}
catch {
$int = 0
foreach($header in $_.Exception.Response.Headers){
if($header -eq "X-DocuSign-TraceToken"){ write-host "TraceToken : " $_.Exception.Response.Headers[$int]}
$int++
}
write-host "Error : $_.ErrorDetails.Message"
write-host "Command : $_.InvocationInfo.Line"
}
}
Remove-Item $privateKeyPath -Force
Write-Output "Done."
} # closing foreach
}
catch {
$int = 0
foreach($header in $_.Exception.Response.Headers){
if($header -eq "X-DocuSign-TraceToken")
{ write-host "TraceToken : " $_.Exception.Response.Headers[$int]}
$int++
}
write-host "Error : $_.ErrorDetails.Message"
write-host "Command : $_.InvocationInfo.Line"
}
Write-Output "Done."

Просмотреть файл

@ -3,4 +3,5 @@
#
@{
'Az' = '5.*'
'AzTable' = '2.*'
}

Просмотреть файл

@ -0,0 +1,19 @@
## 2.1
- Implemented new logic to ingest users without duplication
- Adding UserId and UserName to Storage Account Table to avoid duplication
## 2.0
- Added CHANGELOG.MD to track future code changes
- Re-designed architecture of the Data connector
- Implemented logic to generate JWT Token and then get Access Token to interact with DocuSign API
- Implemented logic to ingest DocuSign Users into Log Analytics Table
- Added logic to update base uri to retrive users using Environment Variable
- Created single ARM Template to support both Azure Commerical & Azure Gov (.US) Tenants
- Updated README.MD
## 1.0
- Ingesting DocuSign Security Events into Azure Log Analytics Workspace
- Using DocuSign Access Token to interact with DocuSign Monitor API
- Access Token expires every 1 hr
- Updated "function.json" inorder to accept TimeTrigger(CRON Expression) from Function App environment variable. Providing more flexibility to the user to change schedule to trigger Function App

Просмотреть файл

@ -1,3 +0,0 @@
[
{"org": ""}
]

Просмотреть файл

@ -1,7 +0,0 @@
[
{
"org": "",
"lastRunEndCursor": "",
"lastRun": ""
}
]

Просмотреть файл

@ -1,111 +1,96 @@
# Ingest DocuSign Security Events
Author: Sreedhar Ande
DocuSign-SecurityEvents Data connector ingests security events for your DocuSign account into Azure Log Analytics Workspace using DocuSign Monitor REST API
DocuSign-SecurityEvents Data connector ingests
1. Security Events for your DocuSign account into Azure Log Analytics Workspace using DocuSign Monitor REST API
2. DocuSign Account Users into Azure Log Analytics Workspace using DocuSign Users REST API
Following are the configuration steps to deploy Data connector.
Technical Blog
https://techcommunity.microsoft.com/t5/azure-sentinel/protecting-your-docusign-agreements-with-azure-sentinel/ba-p/2085502
**Note**
Above API's resumes getting records from the spot where the previous call left off to avoid duplication of records in DocuSignSecurityEvents_CL and DocuSignUsers_CL Log Analytics Workspace custom tables
## **Pre-requisites**
1. Obtain DocuSign OAuth Token
Option #1
DocuSign OAuth Token is required. See the documentation to obtain https://developers.docusign.com/platform/auth/jwt/jwt-get-token/
1. Login to your DocuSign Account
2. To create new "Integration Key", click on "Add & Integration Key"
![IntegrationKey](./images/2IntegrationKey.png)
3. Youll see a dialog box to enter your app name. Give your app a short, but descriptive name, such as "Azure Sentinel Integration"
4. Select ADD to add your app. Your app is automatically assigned an integration key (GUID) value that cannot be changed, as shown here
![AppAzSentinelIntegration](./images/AppAzSentinelIntegration.png)
5. Under Authentication, Select "Authorization Code Grant" as Azure Sentinel Data Connector uses JWT Grant Flow to get "Access Token" to communicate with DocuSign Monitor API and Users API
6. Select ADD SECRET KEY, which creates a new, automatically generated GUID value that represents a secret key
**Note**
Copy the secret key to a safe location by selecting the copy icon shown in the image.After your integration settings are saved, secret keys are masked for security reasons and cannot be revealed again. If you dont copy them first, your only option will be to delete the secret key in question and add a new one.
7. Set a redirect URI for your app which DocuSign will redirect the browser after authentication when using the Authorization Code Grant.
Option #2
1. https://apiexplorer.docusign.com/
2. Authenticate using your credentials
3. Select any API end point and click on "Send Request"
4. If you receive "Success" response - copy the Authorization Bearer token (token only without Bearer prefix)
8. Under Additional Settings, Select "ADD URI", which displays a new text box for you to enter the URI add in the following **redirect uri** `http://localhost:8080/authorization-code/callback` where your authenticated users will be redirected.
2. Copy two json files (ORGS.json and lastrun-Monitor.json) from Function Dependencies folder to your local drive
3. Edit the ORGS.json file and update "org": "sampleorg" and replace sample org with your org name.
```
If you have single org
[
{
"org": "sampleorg1"
}
]
9. RSA key pair is required to use the JWT Grant authentication flow. To generate navigate to Service Integration, Click on "ADD RSA KEYPAIR", which creates a new, automatically generated GUID value that represents the ID for the private and public key pair
**Note:**
You are only able to view your RSA key pair immediately after creating it, so be sure to save it or record it to a safe place. To ensure the key pairs security, there is no way to go back and view it again after you close the window.
If you have multiple org's
[
{
"org": "sampleorg1"
},
{
"org": "sampleorg2"
},
{
"org": "sampleorg3"
}
]
```
10. **Important** Copy the **private_key** into the file **"DocuSignRSAPrivateKey.key"** If you dont copy them first, your only option will be to delete the RSA key pair and add a new one. Select OK, then select SAVE.
![RSAKeyPair](./images/RSAKeyPair.png)
4. Edit lastrun-Monitor.json and update "org": "sampleorg" and replace sample org with your org name
### Request Application Consent
```
If you have single org
1. Run **[Application_Consent.ps1.](./Application_Consent.ps1)** and provide values for the following
```
DocuSignEnvironment: Enter value as Developer or Production
IntegrationKey: Enter DocuSign App Integration Key
```
2. Script will construct a URI value matching the "DocuSignEnvironment". This path differs depending on whether your app is in the development environment or in production.
For the developer demo environment, the base URI is https://account-d.docusign.com/oauth/auth
For the production platform, the base URI is https://account.docusign.com/oauth/auth
[
{
"org": "sampleorg1",
"lastRunEndCursor": "",
"lastRun": ""
}
]
3. Script will prompt you to log in to your DocuSign account and be presented with a request to grant signature and impersonation permissions to your app, as shown by the screenshot.
If you have multiple org's
4. Click on Accept, After you grants permission, youll be able to use the OAuth JWT Grant flow to impersonate that user and make API calls.
![consent](./images/consent.png)
[
{
"org": "sampleorg1",
"lastRunEndCursor": "",
"lastRun": ""
},
{
"org": "sampleorg2",
"lastRunEndCursor": "",
"lastRun": ""
}
]
```
**Note**
Its only one-time step to collect consent
## Deploy the Function App template
<a href="https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FDataConnectors%2FDocuSign-SecurityEvents%2Fazuredeploy_dotcomtenants.json" target="_blank">
<img src="https://aka.ms/deploytoazurebutton"/>
</a>
<a href="https://portal.azure.us/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FDataConnectors%2FDocuSign-SecurityEvents%2Fazuredeploy_dotgovtenants.json" target="_blank">
<img src="https://raw.githubusercontent.com/Azure/azure-quickstart-templates/master/1-CONTRIBUTION-GUIDE/images/deploytoazuregov.png"/>
</a>
## Configuration Steps to Deploy Function App
1. Click on Deploy to Azure/Deploy to Azure Gov button
1. Click on Deploy to Azure (For both Commercial & Azure GOV)
<a href="https://aka.ms/sentinel-docusignconnector-azuredeploy" target="_blank">
<img src="https://aka.ms/deploytoazurebutton"/>
</a>
2. Select the preferred **Subscription**, **Resource Group** and **Location**
**Note**
Best practice : Create new Resource Group while deploying - all the resources of your custom Data connector will reside in the newly created Resource
Group
3. Enter the following value in the ARM template deployment
```
"DocuSignAccessToken": This is the DocuSign OAuth Token
"DocuSign Integration Key": DocuSign App Integration Key
"DocuSignAdminUserGUID" : Admin User Name GUID
"DocuSignAccountAPIID" : DocuSign Account API ID
"Workspace Id": Azure Log Analytics Workspace Id
"Workspace Key": Azure Log Analytics Workspace Key
"CustomLogTableName": Azure Log Analytics Custom Log Table Name
"Function Schedule": The `TimerTrigger` makes it incredibly easy to have your functions executed on a schedule. The default **Time Interval** is set to pull
the last ten (10) minutes of data.
```
## Post Deployment Steps
1. After successful deployment go to your Resource Group and search for storage account, named - `docusign<<uniqueid>>` and upload previously edited json files under "docusign-monitor" container
```
ORGS.json
lastrun-Monitor.json
```
1. **Important**
After successful deployment, Navigate to Resource Group and search for storage account, named - `<<FunctionAppName>><<uniqueid>>` and upload previously saved file **"DocuSignRSAPrivateKey.key"** to "docusign-monitor" container
2. DocuSignAccessToken and Workspace Key will be placed as "Secrets" in the Azure KeyVault `docusignkv<<uniqueid>>` with only Azure Function access policy. If you want to see/update these secrets,
2. DocuSignIntegrationKey, DocuSignAdminUserGUID, DocuSignAccountID and Workspace Key will be placed as "Secrets" in the Azure KeyVault `<<FunctionAppName>><<uniqueid>>` with only Azure Function access policy. If you want to see/update these secrets,
```
a. Go to Azure KeyVault "docusignkv<<uniqueid>>"
a. Go to Azure KeyVault "<<FunctionAppName>><<uniqueid>>"
b. Click on "Access Policies" under Settings
c. Click on "Add Access Policy"
i. Configure from template : Secret Management
@ -115,34 +100,74 @@ Following are the configuration steps to deploy Data connector.
d. Click "Save"
```
After granting permissions, If you want to update/change value for any Secrets
** Step 1 : Update existing Secret Value **
```
a. Go to Azure KeyVault "<<FunctionAppName>><<uniqueid>>"
b. Click on "Secrets" and select "Secret Name"
c. Click on "New Version" to create a new version of the existing secret.
d. Copy "Secret Identifier"
```
** Step 2 : Update KeyVault Reference in Azure Function **
```
a. Go to your Resource Group --> Click on Function App `<<FunctionAppName>><<uniqueid>>`
b. Click on Function App "Configuration" under Settings
c. Click on envionment variable that has value in KeyVault under "Application Settings"
d. Update value @Microsoft.KeyVault(SecretUri=<<Step 1 copied Secret Identifier URI>>).
e. Before clicking OK, make sure the status is "Resolved"
```
3. The `TimerTrigger` makes it incredibly easy to have your functions executed on a schedule. This sample demonstrates a simple use case of calling your function based on your schedule provided while deploying. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly update environment variable **"Schedule**" (post deployment) to prevent overlapping data ingestion.
3. The `TimerTrigger` makes it incredibly easy to have your functions executed on a schedule. The default **Time Interval** is set to pull the last ten (10) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly update environment variable **"Schedule**" to prevent overlapping data ingestion.
```
a. Go to your Resource Group --> Click on Function App `docusign<<uniqueid>>`
a. Go to your Resource Group --> Click on Function App `<<FunctionAppName>><<uniqueid>>`
b. Click on Function App "Configuration" under Settings
c. Click on "Schedule" under "Application Settings"
d. Update your own schedule using cron expression.
```
**Note: For a `TimerTrigger` to work, you provide a schedule in the form of a [cron expression](https://en.wikipedia.org/wiki/Cron#CRON_expression)(See the link for full details). A cron expression is a string with 6 separate expressions which represent a given schedule via patterns. The pattern we use to represent every 10 minutes is `0 */10 * * * *`. This, in plain text, means: "When seconds is equal to 0, minutes is divisible by 10, for any hour, day of the month, month, day of the week, or year".**
4. Verify Temp folder path
```
a. Go to your Resource Group --> Click on Function App `docusign<<uniqueid>>`
b. Click on "Advanced Tools" under Development Tools
c. Click on Go --> You will be redirected to Web App --> Check Temp folder path.
d. It can be either C:\local\Temp\ or D:\local\Temp\.
```
5. After finding Temp folder path
```
a. Go to your Resource Group --> Click on Function App `docusign<<uniqueid>>`
b. Click on "Configuration" under Settings
c. Click on "TMPDIR" under "Application Settings"
d. Update Drive (C//D) based on your findings from Step 9.
```
**Note: Make sure the value in "TMPDIR" doesnt have "\\" at the end.**
4. To target your DocuSign Environment, Update the Environment Variable "DocuSignEnvironment"
```
a. Go to your Resource Group --> Click on Function App `<<FunctionAppName>><<uniqueid>>`
b. Click on Function App "Configuration" under Settings
c. Click on "DocuSignEnvironment" under "Application Settings"
d. By Default its "Developer". Update your target environment
Ex: Developer or Production`
6. **For Azure Gov customers only**, You will see additional environment variable "Azure Tenant" under "Configuration" --> "Application Settings" and its default
value is ".us"
```
5. You can Switch On/Off DocuSign Users Ingestion using "NeedDocuSignUsers" boolean Environment Variable
```
a. Go to your Resource Group --> Click on Function App `<<FunctionAppName>><<uniqueid>>`
b. Click on Function App "Configuration" under Settings
c. Click on "NeedDocuSignUsers" under "Application Settings"
d. By default its "True", you can set it "False" if you dont want DocuSign Users information into your LA Workspace
Currently this Function App supports "Azure Gov(.US)" tenants
Ex: https://portal.azure.us
```
6. To interact with DocuSign UserInfo API, Update the Environment Variable "DocuSignUserInfoBaseURI" with baseuri
```
a. Go to your Resource Group --> Click on Function App `<<FunctionAppName>><<uniqueid>>`
b. Click on Function App "Configuration" under Settings
c. Click on "DocuSignUserInfoBaseURI" under "Application Settings"
d. By Default its "https://demo.docusign.net" targeting Demo envionment `
```
7. You can edit/update LA Table Name for DocuSign SecurityEvents
```
a. Go to your Resource Group --> Click on Function App `<<FunctionAppName>><<uniqueid>>`
b. Click on Function App "Configuration" under Settings
c. Click on "LATableDSMAPI" under "Application Settings"
d. By default its "DocuSignSecurityEvents"
```
8. You can edit/update LA Table Name for DocuSign Users
```
a. Go to your Resource Group --> Click on Function App `<<FunctionAppName>><<uniqueid>>`
b. Click on Function App "Configuration" under Settings
c. Click on "LATableDSUsers" under "Application Settings"
d. By default its "DocuSignUsers"
```

Просмотреть файл

@ -9,11 +9,25 @@
"description": "Specifies the name of the Function App."
}
},
"DocuSignAccessToken": {
"defaultValue": "Enter the DocuSign OAuth Token",
"DocuSignIntegrationKey": {
"defaultValue": "Enter the DocuSign Integration Key",
"type": "string",
"metadata": {
"description": "Specifies DocuSign OAuth Token."
"description": "Specifies DocuSign Integration Key."
}
},
"DocuSignAdminUserGUID": {
"defaultValue": "Enter the DocuSign Admin User GUID",
"type": "string",
"metadata": {
"description": "Specifies DocuSign Admin User GUID"
}
},
"DocuSignAccountAPIID": {
"defaultValue": "Enter the DocuSign Account ID",
"type": "string",
"metadata": {
"description": "Specifies DocuSign Account ID"
}
},
"WorkspaceId": {
@ -29,28 +43,19 @@
"metadata": {
"description": "Specifies the Azure Log Analytics Workspace Key."
}
},
"CustomLogTableName": {
"type": "string",
"defaultValue": "DocuSignSecurityEvents",
"metadata": {
"description": "Specifies the Azure Log Analytics Custom Log Table Name."
}
},
"FunctionSchedule": {
"type": "string",
"defaultValue": "0 */10 * * * *",
"metadata": {
"description": "For a `TimerTrigger` to work, you provide a schedule in the form of a [cron expression](https://en.wikipedia.org/wiki/Cron#CRON_expression)(See the link for full details). A cron expression is a string with 6 separate expressions which represent a given schedule via patterns. The pattern we use to represent every 1 hour is `0 0 * * * *`. This, in plain text, means: When seconds is equal to 0, minutes is divisible by 5, for any hour, day of the month, month, day of the week, or year"
}
}
}
},
"variables": {
"FunctionName": "[concat(toLower(parameters('FunctionName')), uniqueString(resourceGroup().id))]",
"KeyVaultName": "[tolower(concat('docusignkv', uniqueString(resourceGroup().id, subscription().id)))]",
"DocuSignOAuthToken": "DocuSignOAuthToken",
"FunctionName": "[concat(toLower(parameters('FunctionName')), uniqueString(resourceGroup().id), uniqueString(subscription().id))]",
"StorageAccountName":"[concat(substring(variables('FunctionName'), 0, 20), 'sa')]",
"KeyVaultName": "[concat(substring(variables('FunctionName'), 0, 20), 'kv')]",
"DocuSignIntegrationKey": "DocuSignIntegrationKey",
"DocuSignAdminUserGUID" : "DocuSignAdminUserGUID",
"DocuSignAccountAPIID" : "DocuSignAccountAPIID",
"LogAnalyticsWorkspaceKey": "LogAnalyticsWorkspaceKey",
"StorageContainerName": "docusign-monitor"
"StorageContainerName": "docusign-monitor",
"StorageSuffix":"[environment().suffixes.storage]",
"LogAnaltyicsUri":"[replace(environment().portal, 'https://portal', concat('https://', toLower(parameters('WorkspaceId')), '.ods.opinsights'))]"
},
"resources": [
{
@ -67,7 +72,7 @@
{
"type": "Microsoft.Storage/storageAccounts",
"apiVersion": "2019-06-01",
"name": "[tolower(variables('FunctionName'))]",
"name": "[variables('StorageAccountName')]",
"location": "[resourceGroup().location]",
"sku": {
"name": "Standard_LRS",
@ -119,9 +124,9 @@
{
"type": "Microsoft.Storage/storageAccounts/blobServices",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default')]",
"name": "[concat(variables('StorageAccountName'), '/default')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]"
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('StorageAccountName')))]"
],
"sku": {
"name": "Standard_LRS",
@ -140,9 +145,9 @@
{
"type": "Microsoft.Storage/storageAccounts/fileServices",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default')]",
"name": "[concat(variables('StorageAccountName'), '/default')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]"
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('StorageAccountName')))]"
],
"sku": {
"name": "Standard_LRS",
@ -161,7 +166,7 @@
"name": "[variables('FunctionName')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]",
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('StorageAccountName')))]",
"[resourceId('Microsoft.Web/serverfarms', variables('FunctionName'))]",
"[resourceId('Microsoft.Insights/components', variables('FunctionName'))]"
],
@ -174,33 +179,44 @@
"serverFarmId": "[resourceId('Microsoft.Web/serverfarms', variables('FunctionName'))]",
"httpsOnly": true,
"clientAffinityEnabled": true,
"alwaysOn": true
"alwaysOn": true,
"siteConfig": {
"powerShellVersion": "~7"
}
},
"resources": [
{
"apiVersion": "2018-11-01",
"type": "config",
"name": "appsettings",
"name": "appsettings",
"dependsOn": [
"[concat('Microsoft.Web/sites/', variables('FunctionName'))]",
"[resourceId('Microsoft.KeyVault/vaults/', variables('KeyVaultName'))]",
"[resourceId('Microsoft.KeyVault/vaults/secrets', variables('KeyVaultName'), variables('DocuSignOAuthToken'))]",
"[resourceId('Microsoft.KeyVault/vaults/secrets', variables('KeyVaultName'), variables('DocuSignIntegrationKey'))]",
"[resourceId('Microsoft.KeyVault/vaults/secrets', variables('KeyVaultName'), variables('DocuSignAdminUserGUID'))]",
"[resourceId('Microsoft.KeyVault/vaults/secrets', variables('KeyVaultName'), variables('DocuSignAccountAPIID'))]",
"[resourceId('Microsoft.KeyVault/vaults/secrets', variables('KeyVaultName'), variables('LogAnalyticsWorkspaceKey'))]"
],
"properties": {
"FUNCTIONS_EXTENSION_VERSION": "~3",
"FUNCTIONS_WORKER_RUNTIME": "powershell",
"FUNCTIONS_WORKER_RUNTIME": "powershell",
"APPINSIGHTS_INSTRUMENTATIONKEY": "[reference(resourceId('Microsoft.insights/components', variables('FunctionName')), '2015-05-01').InstrumentationKey]",
"APPLICATIONINSIGHTS_CONNECTION_STRING": "[reference(resourceId('microsoft.insights/components', variables('FunctionName')), '2015-05-01').ConnectionString]",
"AzureWebJobsStorage": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(variables('FunctionName')),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(variables('FunctionName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=core.windows.net')]",
"WEBSITE_CONTENTAZUREFILECONNECTIONSTRING": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(variables('FunctionName')),';AccountKey=', listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(variables('FunctionName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=core.windows.net')]",
"AzureWebJobsStorage": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(variables('StorageAccountName')),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(variables('StorageAccountName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=',toLower(variables('StorageSuffix')))]",
"WEBSITE_CONTENTAZUREFILECONNECTIONSTRING": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(variables('StorageAccountName')),';AccountKey=', listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(variables('StorageAccountName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=',toLower(variables('StorageSuffix')))]",
"WEBSITE_CONTENTSHARE": "[toLower(variables('FunctionName'))]",
"DocuSignOAuthAccessToken": "[concat('@Microsoft.KeyVault(SecretUri=', reference(variables('DocuSignOAuthToken')).secretUriWithVersion, ')')]",
"TMPDIR": "D:\\local\\Temp",
"DocuSignIntegrationKey": "[concat('@Microsoft.KeyVault(SecretUri=', reference(variables('DocuSignIntegrationKey')).secretUriWithVersion, ')')]",
"DocuSignAdminUserGUID": "[concat('@Microsoft.KeyVault(SecretUri=', reference(variables('DocuSignAdminUserGUID')).secretUriWithVersion, ')')]",
"DocuSignAccountAPIID": "[concat('@Microsoft.KeyVault(SecretUri=', reference(variables('DocuSignAccountAPIID')).secretUriWithVersion, ')')]",
"WorkspaceId": "[parameters('WorkspaceId')]",
"WorkspaceKey": "[concat('@Microsoft.KeyVault(SecretUri=', reference(variables('LogAnalyticsWorkspaceKey')).secretUriWithVersion, ')')]",
"Schedule": "[parameters('FunctionSchedule')]",
"CustomLogTableName": "[parameters('CustomLogTableName')]",
"Schedule": "0 */10 * * * *",
"LATableDSMAPI": "DocuSignSecurityEvents",
"LATableDSUsers": "DocuSignUsers",
"DocuSignEnvironment": "Developer",
"NeedDocuSignUsers":"True",
"LAURI": "[variables('LogAnaltyicsUri')]",
"DocuSignUserInfoBaseURI" : "https://demo.docusign.net",
"WEBSITE_RUN_FROM_PACKAGE": "https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/DocuSign-SecurityEvents/AzureFunctionDocuSignMonitor/DocuSignMonitorTimerTrigger.zip?raw=true"
}
}
@ -240,18 +256,48 @@
{
"type": "secrets",
"apiVersion": "2016-10-01",
"name": "[variables('DocuSignOAuthToken')]",
"name": "[variables('DocuSignIntegrationKey')]",
"dependsOn": [
"[resourceId('Microsoft.KeyVault/vaults/', variables('KeyVaultName'))]"
],
"properties": {
"value": "[parameters('DocuSignAccessToken')]",
"value": "[parameters('DocuSignIntegrationKey')]",
"contentType": "string",
"attributes": {
"enabled": true
}
}
},
},
{
"type": "secrets",
"apiVersion": "2016-10-01",
"name": "[variables('DocuSignAdminUserGUID')]",
"dependsOn": [
"[resourceId('Microsoft.KeyVault/vaults/', variables('KeyVaultName'))]"
],
"properties": {
"value": "[parameters('DocuSignAdminUserGUID')]",
"contentType": "string",
"attributes": {
"enabled": true
}
}
},
{
"type": "secrets",
"apiVersion": "2016-10-01",
"name": "[variables('DocuSignAccountAPIID')]",
"dependsOn": [
"[resourceId('Microsoft.KeyVault/vaults/', variables('KeyVaultName'))]"
],
"properties": {
"value": "[parameters('DocuSignAccountAPIID')]",
"contentType": "string",
"attributes": {
"enabled": true
}
}
},
{
"type": "secrets",
"apiVersion": "2016-10-01",
@ -269,26 +315,13 @@
}
]
},
{
"type": "Microsoft.Web/sites/hostNameBindings",
"apiVersion": "2018-11-01",
"name": "[concat(variables('FunctionName'), '/', variables('FunctionName'), '.azurewebsites.net')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Web/sites', variables('FunctionName'))]"
],
"properties": {
"siteName": "[variables('FunctionName')]",
"hostNameType": "Verified"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/azure-webjobs-hosts')]",
"name": "[concat(variables('StorageAccountName'), '/default/azure-webjobs-hosts')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('StorageAccountName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('StorageAccountName'))]"
],
"properties": {
"publicAccess": "None"
@ -297,10 +330,10 @@
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/azure-webjobs-secrets')]",
"name": "[concat(variables('StorageAccountName'), '/default/azure-webjobs-secrets')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('StorageAccountName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('StorageAccountName'))]"
],
"properties": {
"publicAccess": "None"
@ -309,10 +342,10 @@
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), concat('/default/', variables('StorageContainerName')))]",
"name": "[concat(variables('StorageAccountName'), concat('/default/', variables('StorageContainerName')))]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('StorageAccountName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('StorageAccountName'))]"
],
"properties": {
"publicAccess": "None"
@ -321,10 +354,10 @@
{
"type": "Microsoft.Storage/storageAccounts/fileServices/shares",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/', tolower(variables('FunctionName')))]",
"name": "[concat(variables('StorageAccountName'), '/default/', tolower(variables('StorageAccountName')))]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/fileServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
"[resourceId('Microsoft.Storage/storageAccounts/fileServices', variables('StorageAccountName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('StorageAccountName'))]"
],
"properties": {
"shareQuota": 5120

Просмотреть файл

@ -1,335 +0,0 @@
{
"$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"FunctionName": {
"defaultValue": "DocuSign",
"type": "string",
"metadata": {
"description": "Specifies the name of the Function App."
}
},
"DocuSignAccessToken": {
"defaultValue": "Enter the DocuSign OAuth Token",
"type": "string",
"metadata": {
"description": "Specifies DocuSign OAuth Token."
}
},
"WorkspaceId": {
"type": "string",
"defaultValue": "<WorkspaceId>",
"metadata": {
"description": "Specifies the Azure Log Analytics Workspace Id."
}
},
"WorkspaceKey": {
"type": "string",
"defaultValue": "<WorkspaceKey>",
"metadata": {
"description": "Specifies the Azure Log Analytics Workspace Key."
}
},
"CustomLogTableName": {
"type": "string",
"defaultValue": "DocuSignSecurityEvents",
"metadata": {
"description": "Specifies the Azure Log Analytics Custom Log Table Name."
}
},
"FunctionSchedule": {
"type": "string",
"defaultValue": "0 */10 * * * *",
"metadata": {
"description": "For a `TimerTrigger` to work, you provide a schedule in the form of a [cron expression](https://en.wikipedia.org/wiki/Cron#CRON_expression)(See the link for full details). A cron expression is a string with 6 separate expressions which represent a given schedule via patterns. The pattern we use to represent every 1 hour is `0 0 * * * *`. This, in plain text, means: When seconds is equal to 0, minutes is divisible by 5, for any hour, day of the month, month, day of the week, or year"
}
}
},
"variables": {
"FunctionName": "[concat(toLower(parameters('FunctionName')), uniqueString(resourceGroup().id))]",
"KeyVaultName": "[tolower(concat('docusignkv', uniqueString(resourceGroup().id, subscription().id)))]",
"DocuSignOAuthToken": "DocuSignOAuthToken",
"LogAnalyticsWorkspaceKey": "LogAnalyticsWorkspaceKey",
"StorageContainerName": "docusign-monitor"
},
"resources": [
{
"type": "Microsoft.Insights/components",
"apiVersion": "2015-05-01",
"name": "[variables('FunctionName')]",
"location": "[resourceGroup().location]",
"kind": "web",
"properties": {
"Application_Type": "web",
"ApplicationId": "[variables('FunctionName')]"
}
},
{
"type": "Microsoft.Storage/storageAccounts",
"apiVersion": "2019-06-01",
"name": "[tolower(variables('FunctionName'))]",
"location": "[resourceGroup().location]",
"sku": {
"name": "Standard_LRS",
"tier": "Standard"
},
"kind": "StorageV2",
"properties": {
"networkAcls": {
"bypass": "AzureServices",
"virtualNetworkRules": [
],
"ipRules": [
],
"defaultAction": "Allow"
},
"supportsHttpsTrafficOnly": true,
"encryption": {
"services": {
"file": {
"keyType": "Account",
"enabled": true
},
"blob": {
"keyType": "Account",
"enabled": true
}
},
"keySource": "Microsoft.Storage"
}
}
},
{
"type": "Microsoft.Web/serverfarms",
"apiVersion": "2018-02-01",
"name": "[variables('FunctionName')]",
"location": "[resourceGroup().location]",
"sku": {
"name": "Y1",
"tier": "Dynamic"
},
"kind": "functionapp",
"properties": {
"name": "[variables('FunctionName')]",
"workerSize": "0",
"workerSizeId": "0",
"numberOfWorkers": "1"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]"
],
"sku": {
"name": "Standard_LRS",
"tier": "Standard"
},
"properties": {
"cors": {
"corsRules": [
]
},
"deleteRetentionPolicy": {
"enabled": false
}
}
},
{
"type": "Microsoft.Storage/storageAccounts/fileServices",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]"
],
"sku": {
"name": "Standard_LRS",
"tier": "Standard"
},
"properties": {
"cors": {
"corsRules": [
]
}
}
},
{
"type": "Microsoft.Web/sites",
"apiVersion": "2018-11-01",
"name": "[variables('FunctionName')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts', tolower(variables('FunctionName')))]",
"[resourceId('Microsoft.Web/serverfarms', variables('FunctionName'))]",
"[resourceId('Microsoft.Insights/components', variables('FunctionName'))]"
],
"kind": "functionapp",
"identity": {
"type": "SystemAssigned"
},
"properties": {
"name": "[variables('FunctionName')]",
"serverFarmId": "[resourceId('Microsoft.Web/serverfarms', variables('FunctionName'))]",
"httpsOnly": true,
"clientAffinityEnabled": true,
"alwaysOn": true
},
"resources": [
{
"apiVersion": "2018-11-01",
"type": "config",
"name": "appsettings",
"dependsOn": [
"[concat('Microsoft.Web/sites/', variables('FunctionName'))]",
"[resourceId('Microsoft.KeyVault/vaults/', variables('KeyVaultName'))]",
"[resourceId('Microsoft.KeyVault/vaults/secrets', variables('KeyVaultName'), variables('DocuSignOAuthToken'))]",
"[resourceId('Microsoft.KeyVault/vaults/secrets', variables('KeyVaultName'), variables('LogAnalyticsWorkspaceKey'))]"
],
"properties": {
"FUNCTIONS_EXTENSION_VERSION": "~3",
"FUNCTIONS_WORKER_RUNTIME": "powershell",
"APPINSIGHTS_INSTRUMENTATIONKEY": "[reference(resourceId('Microsoft.insights/components', variables('FunctionName')), '2015-05-01').InstrumentationKey]",
"APPLICATIONINSIGHTS_CONNECTION_STRING": "[reference(resourceId('microsoft.insights/components', variables('FunctionName')), '2015-05-01').ConnectionString]",
"AzureWebJobsStorage": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(variables('FunctionName')),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(variables('FunctionName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=core.usgovcloudapi.net')]",
"WEBSITE_CONTENTAZUREFILECONNECTIONSTRING": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(variables('FunctionName')),';AccountKey=', listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(variables('FunctionName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=core.usgovcloudapi.net')]",
"WEBSITE_CONTENTSHARE": "[toLower(variables('FunctionName'))]",
"DocuSignOAuthAccessToken": "[concat('@Microsoft.KeyVault(SecretUri=', reference(variables('DocuSignOAuthToken')).secretUriWithVersion, ')')]",
"TMPDIR": "D:\\local\\Temp",
"WorkspaceId": "[parameters('WorkspaceId')]",
"WorkspaceKey": "[concat('@Microsoft.KeyVault(SecretUri=', reference(variables('LogAnalyticsWorkspaceKey')).secretUriWithVersion, ')')]",
"Schedule": "[parameters('FunctionSchedule')]",
"CustomLogTableName": "[parameters('CustomLogTableName')]",
"AZURE_TENANT": ".us",
"WEBSITE_RUN_FROM_PACKAGE": "https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/DocuSign-SecurityEvents/AzureFunctionDocuSignMonitor/DocuSignMonitorTimerTrigger.zip?raw=true"
}
}
]
},
{
"type": "Microsoft.KeyVault/vaults",
"apiVersion": "2016-10-01",
"name": "[variables('KeyVaultName')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Web/sites', variables('FunctionName'))]"
],
"properties": {
"sku": {
"family": "A",
"name": "Standard"
},
"tenantId": "[subscription().tenantId]",
"accessPolicies": [
{
"tenantId": "[subscription().tenantId]",
"objectId": "[reference(resourceId('Microsoft.Web/sites', variables('FunctionName')),'2019-08-01', 'full').identity.principalId]",
"permissions": {
"secrets": [ "get",
"list"
]
}
}
],
"enabledForDeployment": false,
"enabledForDiskEncryption": false,
"enabledForTemplateDeployment": true,
"enableSoftDelete": true
},
"resources": [
{
"type": "secrets",
"apiVersion": "2016-10-01",
"name": "[variables('DocuSignOAuthToken')]",
"dependsOn": [
"[resourceId('Microsoft.KeyVault/vaults/', variables('KeyVaultName'))]"
],
"properties": {
"value": "[parameters('DocuSignAccessToken')]",
"contentType": "string",
"attributes": {
"enabled": true
}
}
},
{
"type": "secrets",
"apiVersion": "2016-10-01",
"name": "[variables('LogAnalyticsWorkspaceKey')]",
"dependsOn": [
"[resourceId('Microsoft.KeyVault/vaults/', variables('KeyVaultName'))]"
],
"properties": {
"value": "[parameters('WorkspaceKey')]",
"contentType": "string",
"attributes": {
"enabled": true
}
}
}
]
},
{
"type": "Microsoft.Web/sites/hostNameBindings",
"apiVersion": "2018-11-01",
"name": "[concat(variables('FunctionName'), '/', variables('FunctionName'), '.azurewebsites.us')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Web/sites', variables('FunctionName'))]"
],
"properties": {
"siteName": "[variables('FunctionName')]",
"hostNameType": "Verified"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/azure-webjobs-hosts')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"publicAccess": "None"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/azure-webjobs-secrets')]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"publicAccess": "None"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), concat('/default/', variables('StorageContainerName')))]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/blobServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"publicAccess": "None"
}
},
{
"type": "Microsoft.Storage/storageAccounts/fileServices/shares",
"apiVersion": "2019-06-01",
"name": "[concat(variables('FunctionName'), '/default/', tolower(variables('FunctionName')))]",
"dependsOn": [
"[resourceId('Microsoft.Storage/storageAccounts/fileServices', variables('FunctionName'), 'default')]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('FunctionName'))]"
],
"properties": {
"shareQuota": 5120
}
}
]
}

Двоичные данные
DataConnectors/DocuSign-SecurityEvents/images/2IntegrationKey.png Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 57 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 30 KiB

Двоичные данные
DataConnectors/DocuSign-SecurityEvents/images/RSAKeyPair.png Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 31 KiB

Двоичные данные
DataConnectors/DocuSign-SecurityEvents/images/consent.png Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 26 KiB

Просмотреть файл

@ -135,7 +135,7 @@
},
{
"title": "Step 3 - Deploy the Azure Resource Manager (ARM) Template",
"description": "Use this method for automated deployment of the ESET Enterprise Inspector connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelesetenterpriseinspectorazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password** , enter the **Enterprise Inspector base URL** and the **first ID** to start ingesting detections from.\n - The defailt starting ID is **0**. This means that all detections will be ingested. \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy."
"description": "Use this method for automated deployment of the ESET Enterprise Inspector connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESETEnterpriseInspector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password** , enter the **Enterprise Inspector base URL** and the **first ID** to start ingesting detections from.\n - The defailt starting ID is **0**. This means that all detections will be ingested. \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy."
}
]
}
}

Просмотреть файл

@ -5,7 +5,8 @@
"FunctionName": {
"defaultValue": "EsetEI",
"type": "string",
"maxLength": 24
"minLength": 1,
"maxLength": 11
},
"WorkspaceID": {
"type": "string",
@ -47,7 +48,9 @@
}
},
"variables": {
"FunctionName": "[concat(toLower(parameters('FunctionName')), uniqueString(resourceGroup().id))]"
"FunctionName": "[concat(toLower(parameters('FunctionName')), uniqueString(resourceGroup().id))]",
"StorageSuffix": "[environment().suffixes.storage]",
"LogAnaltyicsUri": "[replace(environment().portal, 'https://portal', concat('https://', toLower(parameters('WorkspaceID')), '.ods.opinsights'))]"
},
"resources": [
{
@ -169,7 +172,7 @@
"FUNCTIONS_WORKER_RUNTIME": "python",
"APPINSIGHTS_INSTRUMENTATIONKEY": "[reference(resourceId('Microsoft.insights/components', variables('FunctionName')), '2015-05-01').InstrumentationKey]",
"APPLICATIONINSIGHTS_CONNECTION_STRING": "[reference(resourceId('microsoft.insights/components', variables('FunctionName')), '2015-05-01').ConnectionString]",
"AzureWebJobsStorage": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(variables('FunctionName')),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(variables('FunctionName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=core.windows.net')]",
"AzureWebJobsStorage": "[concat('DefaultEndpointsProtocol=https;AccountName=', toLower(variables('FunctionName')),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', toLower(variables('FunctionName'))), '2019-06-01').keys[0].value, ';EndpointSuffix=',toLower(variables('StorageSuffix')))]",
"workspaceId": "[parameters('WorkspaceId')]",
"workspaceKey": "[parameters('WorkspaceKey')]",
"baseUrl": "[parameters('baseUrl')]",
@ -178,24 +181,12 @@
"domainLogin": "[parameters('domainLogin')]",
"verifySsl": "[parameters('verifySsl')]",
"startFromID": "[parameters('startFromID')]",
"logAnalyticsUri": "[variables('LogAnaltyicsUri')]",
"WEBSITE_RUN_FROM_PACKAGE": "https://aka.ms/sentinel-esetenterpriseinspector-functionapp"
}
}
]
},
{
"type": "Microsoft.Web/sites/hostNameBindings",
"apiVersion": "2018-11-01",
"name": "[concat(variables('FunctionName'), '/', variables('FunctionName'), '.azurewebsites.net')]",
"location": "[resourceGroup().location]",
"dependsOn": [
"[resourceId('Microsoft.Web/sites', variables('FunctionName'))]"
],
"properties": {
"siteName": "[variables('FunctionName')]",
"hostNameType": "Verified"
}
},
{
"type": "Microsoft.Storage/storageAccounts/blobServices/containers",
"apiVersion": "2019-06-01",

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше