Merge pull request #172 from Azure/zhzhao8888/feature/forensics_mod
Initial check in for forensics python modules
This commit is contained in:
Коммит
df24aff12f
|
@ -0,0 +1,18 @@
|
|||
Azure-Sentinel-Utilities Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
|
||||
MIT License Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE
|
|
@ -0,0 +1,16 @@
|
|||
# $Id: README.txt 2019-06-05 $
|
||||
# Author: Zhipeng Zhao <zhzhao@microsoft.com>
|
||||
# Copyright: .
|
||||
|
||||
"""
|
||||
AZURE SENTINEL NOTEBOOKS PYTHON TOOLS
|
||||
======================================
|
||||
This package is developed to support Azure Sentinel Notebooks. It is in an early preview stage so please provide feedback, report bugs, and suggets for new features.
|
||||
|
||||
INSTALL
|
||||
=======
|
||||
!pip install Azure-Sentinel-Utilities
|
||||
|
||||
"""
|
||||
|
||||
__docformat__ = 'restructuredtext'
|
|
@ -0,0 +1,7 @@
|
|||
"""
|
||||
SentinelAnomalyLookup: This package is developed for Azure Sentinel Anomaly lookup
|
||||
"""
|
||||
|
||||
# __init__.py
|
||||
from .anomaly_lookup import *
|
||||
from .anomaly_lookup_view_helper import *
|
|
@ -0,0 +1,241 @@
|
|||
"""
|
||||
Anomaly Lookup:
|
||||
This module provides process flow functions for anomaly lookup. Method - run is the main entry point.
|
||||
It has dependency on .NET library: Microsoft.Azure.CIS.Notebooks.AnomalyLookup.
|
||||
"""
|
||||
|
||||
import clr
|
||||
clr.AddReference("System")
|
||||
clr.AddReference('Newtonsoft.Json')
|
||||
clr.AddReference("Microsoft.Azure.CIS.Notebooks.AnomalyLookup")
|
||||
|
||||
import datetime as dt
|
||||
import pandas as pd
|
||||
from pandas.io.json import json_normalize
|
||||
import copy
|
||||
import sys
|
||||
import json
|
||||
import ipywidgets as widgets
|
||||
from ipywidgets import Button, GridBox, Layout, ButtonStyle
|
||||
from azure.loganalytics import LogAnalyticsDataClient
|
||||
from azure.loganalytics.models import QueryBody
|
||||
from System import *
|
||||
from Microsoft.Azure.CIS.Notebooks.AnomalyLookup import *
|
||||
from Microsoft.Azure.CIS.Notebooks.LogHelper import *
|
||||
from .anomaly_lookup_view_helper import *
|
||||
|
||||
|
||||
class AnomalyLookup(object):
|
||||
def __init__(self, workspace_id, la_data_client):
|
||||
self.workspace_id = workspace_id
|
||||
self.la_data_client = la_data_client
|
||||
self.logger = AILogger();
|
||||
|
||||
def query_table_list(self):
|
||||
return self.query_loganalytics(KqlLibrary.ListTables())
|
||||
|
||||
def query_loganalytics(self, query):
|
||||
res = self.la_data_client.query(self.workspace_id, QueryBody(query=query))
|
||||
json = res.as_dict()
|
||||
cols = json_normalize(json['tables'][0], 'columns')
|
||||
df = json_normalize(json['tables'][0], 'rows')
|
||||
if (df.shape[0] != 0):
|
||||
df.columns = cols.name
|
||||
return df
|
||||
|
||||
def construct_related_queries(self, dfAnomalies):
|
||||
if (dfAnomalies.shape[0] == 0):
|
||||
return
|
||||
|
||||
queries = ''
|
||||
for tbl in dfAnomalies.Table.unique():
|
||||
|
||||
curTableAnomalies = dfAnomalies.ix[dfAnomalies.Table == tbl,:]
|
||||
query = """{tbl} | where TimeGenerated > ago(30d) | where ingestion_time() > datetime({maxTimestamp})-1d and ingestion_time() < datetime({maxTimestamp}) | where {entCol} has "{qEntity}" | where """.format(**{
|
||||
'tbl': tbl,
|
||||
'qTimestamp': curTableAnomalies.qTimestamp.iloc[0],
|
||||
'maxTimestamp': curTableAnomalies.maxTimestamp.iloc[0],
|
||||
'entCol': curTableAnomalies.entCol.iloc[0],
|
||||
'qEntity': curTableAnomalies.qEntity.iloc[0]
|
||||
})
|
||||
|
||||
for j, row in curTableAnomalies.iterrows():
|
||||
query += " {col} == to{colType}(\"{colVal}\") or".format(**{
|
||||
'col': row.colName,
|
||||
'colType': (row.colType) if 'colType' in row.keys() else 'string',
|
||||
'colVal': row.colVal
|
||||
})
|
||||
|
||||
query = query[:-2] # drop the last or
|
||||
query += " | take 1000" # limit the output size
|
||||
|
||||
query = query.replace("\\","\\\\")
|
||||
#print(query + "\n\n")
|
||||
|
||||
queries += query
|
||||
return queries
|
||||
|
||||
def get_timewindow(self, qEntity, qTimestamp, entCol, tbl):
|
||||
# find the relevant time window for analysis
|
||||
winStart = 0
|
||||
minTimestamp = None
|
||||
delta = None
|
||||
maxTimestamp = None
|
||||
longMinTimestamp = None
|
||||
for f in range(-30, 0, 1):
|
||||
dfTimeRange = self.query_loganalytics(KqlLibrary.TimeWindowQuery(tbl, qTimestamp, entCol, qEntity, f, f+1, 'd'))
|
||||
|
||||
if (dfTimeRange.shape[0] > 0):
|
||||
winStart = f
|
||||
break
|
||||
|
||||
dtQTimestamp = pd.to_datetime(qTimestamp)
|
||||
ind2now = dt.datetime.utcnow() - dtQTimestamp
|
||||
if (winStart < -3):
|
||||
if (ind2now > dt.timedelta(days=1)):
|
||||
delta = '1d'
|
||||
maxTimestamp = dtQTimestamp + dt.timedelta(days=1)
|
||||
else:
|
||||
delta = '1d'
|
||||
maxTimestamp = dt.datetime.now()
|
||||
longMinTimestamp = maxTimestamp + dt.timedelta(days=winStart)
|
||||
minTimestamp = maxTimestamp + dt.timedelta(days=max([-6,winStart]))
|
||||
|
||||
elif (winStart < 0): # switch to hours
|
||||
winStartH = -5
|
||||
for f in range(-3*24, -5, 1):
|
||||
dfTimeRange = self.query_loganalytics(KqlLibrary.TimeWindowQuery(tbl, qTimestamp, entCol, qEntity, f, f+1, 'h'))
|
||||
|
||||
if (dfTimeRange.shape[0] > 0):
|
||||
winStartH = f
|
||||
break
|
||||
if (winStartH < -5):
|
||||
if (ind2now > dt.timedelta(hours=1)):
|
||||
delta = '1h'
|
||||
maxTimestamp = dtQTimestamp + dt.timedelta(hours=1)
|
||||
else:
|
||||
delta = '1h'
|
||||
maxTimestamp = dt.datetime.now()
|
||||
minTimestamp = maxTimestamp + dt.timedelta(hours=winStartH)
|
||||
longMinTimestamp = minTimestamp
|
||||
|
||||
return minTimestamp, delta, maxTimestamp, longMinTimestamp
|
||||
|
||||
def run(self, qTimestamp, qEntity, tables):
|
||||
scan = 'all'
|
||||
|
||||
progress_bar = AnomalyLookupViewHelper.define_int_progress_bar()
|
||||
display(progress_bar)
|
||||
|
||||
# list tables if not given
|
||||
if (len(tables) == 0):
|
||||
tables = self.query_loganalytics(KqlLibrary.ListTables())
|
||||
tables = tables.TableName.tolist()
|
||||
|
||||
progress_bar.value += 1
|
||||
|
||||
# find the column in which the query entity appears in each table - assumption that it appears in just one columns
|
||||
tables2search = []
|
||||
for tbl in tables:
|
||||
print(tbl)
|
||||
entInTable = self.query_loganalytics(KqlLibrary.IsEntityInTable(tbl, qTimestamp, qEntity))
|
||||
if (entInTable.shape[0] > 0):
|
||||
entCol = [col for col in entInTable.select_dtypes('object').columns[1:] if
|
||||
type(entInTable.ix[0, col]) != type(None) and entInTable.ix[:, col].str.contains(qEntity,
|
||||
case=False).all()]
|
||||
if (len(entCol) > 0):
|
||||
entCol = entCol[0]
|
||||
tables2search.append({'table': tbl, 'entCol': entCol})
|
||||
|
||||
progress_bar.value += 2
|
||||
|
||||
# for each table, find the time window to query on
|
||||
for tbl in tables2search:
|
||||
tbl['minTimestamp'], tbl['delta'], tbl['maxTimestamp'], tbl['longMinTimestamp'] = self.get_timewindow(qEntity, qTimestamp, tbl['entCol'], tbl['table'])
|
||||
|
||||
progress_bar.value += 1
|
||||
|
||||
# identify all the categorical columns per table on which we will find anomalies
|
||||
categoricalCols = []
|
||||
for tbl in tables2search:
|
||||
dfCols = self.query_loganalytics(KqlLibrary.IsCatColumn(tbl['table']))
|
||||
|
||||
for col in dfCols.ColumnName:
|
||||
dfIsCat = self.query_loganalytics(KqlLibrary.IsCatHeuristic(tbl['table'], col))
|
||||
|
||||
if (dfIsCat.shape[0] > 0):
|
||||
catColInfo = copy.deepcopy(tbl)
|
||||
catColInfo['col'] = col
|
||||
categoricalCols.append(catColInfo)
|
||||
|
||||
progress_bar.value += 2
|
||||
|
||||
# find short-term anomalous categories
|
||||
shortTermAnomalies = []
|
||||
for colInfo in categoricalCols:
|
||||
minTime = colInfo['minTimestamp'].strftime('%Y-%m-%dT%H:%M:%S.%f')
|
||||
maxTime = colInfo['maxTimestamp'].strftime('%Y-%m-%dT%H:%M:%S.%f')
|
||||
curAnomalies = self.query_loganalytics(KqlLibrary.AvgStdAnomalyDetection(colInfo['table'], colInfo['col'], colInfo['entCol'], qEntity, minTime, maxTime, qTimestamp, colInfo['delta']))
|
||||
for j, row in curAnomalies.iterrows():
|
||||
stAnomalyInfo = copy.deepcopy(colInfo)
|
||||
stAnomalyInfo['colType'] = row['colType']
|
||||
stAnomalyInfo['colVal'] = row['colVal'].replace("\\","\\\\").replace('"',"'")
|
||||
stAnomalyInfo['befAvg'] = row['befAvg']
|
||||
stAnomalyInfo['befStd'] = row['befStd']
|
||||
stAnomalyInfo['aftAvg'] = row['aftAvg']
|
||||
stAnomalyInfo['dist'] = row['dist']
|
||||
shortTermAnomalies.append(stAnomalyInfo)
|
||||
|
||||
progress_bar.value += 2
|
||||
|
||||
# find long-term anomalous categories
|
||||
longTermAnomalies = []
|
||||
for anomalousCat in shortTermAnomalies:
|
||||
minTimestamp = anomalousCat['minTimestamp'].strftime('%Y-%m-%dT%H:%M:%S.%f')
|
||||
maxTimestamp = anomalousCat['maxTimestamp'].strftime('%Y-%m-%dT%H:%M:%S.%f')
|
||||
longMinTimestamp = anomalousCat['longMinTimestamp'].strftime('%Y-%m-%dT%H:%M:%S.%f')
|
||||
curLongTermAnomalies = self.query_loganalytics(KqlLibrary.LongWinAnomalyDetection(
|
||||
anomalousCat['table'],
|
||||
anomalousCat['col'],
|
||||
anomalousCat['entCol'],
|
||||
qEntity,
|
||||
anomalousCat['colType'],
|
||||
anomalousCat['colVal'],
|
||||
minTimestamp,
|
||||
maxTimestamp,
|
||||
qTimestamp,
|
||||
longMinTimestamp,
|
||||
anomalousCat['befAvg'],
|
||||
anomalousCat['befStd'],
|
||||
anomalousCat['aftAvg'],
|
||||
anomalousCat['dist'],
|
||||
anomalousCat['delta']))
|
||||
|
||||
longTermAnomalies.append(curLongTermAnomalies)
|
||||
|
||||
if (len(longTermAnomalies) > 0):
|
||||
anomalies = pd.concat(longTermAnomalies,axis=0)
|
||||
else:
|
||||
anomalies = pd.DataFrame()
|
||||
|
||||
progress_bar.value += 1
|
||||
queries = self.construct_related_queries(anomalies)
|
||||
progress_bar.close()
|
||||
self.anomaly = str(anomalies.to_json(orient='records'))
|
||||
|
||||
return anomalies, queries
|
||||
|
||||
def is_result_true_positive(self, button):
|
||||
val = self.is_tp.value
|
||||
if val:
|
||||
result = self.logger.IsResultTruePositive('AnomalyLookup', val, self.anomaly)
|
||||
if result == True: print('saved')
|
||||
|
||||
def ask_is_entity_compromised(self):
|
||||
label_tp = widgets.Label(value='Is this entity compromised?')
|
||||
self.is_tp = widgets.RadioButtons( options=['Yes', 'No'], value=None, description='', disabled=False)
|
||||
save_tp = widgets.Button(description='Save', disabled=False, style=AnomalyLookupViewHelper.define_button_style(), layout=AnomalyLookupViewHelper.define_button_layout(), icon='save')
|
||||
save_tp.on_click(self.is_result_true_positive)
|
||||
display(label_tp)
|
||||
display(self.is_tp)
|
||||
display(save_tp)
|
|
@ -0,0 +1,18 @@
|
|||
"""
|
||||
Anomaly Lookup View Helper:
|
||||
This module provides helper methods for UI components.
|
||||
"""
|
||||
|
||||
import os
|
||||
import ipywidgets as widgets
|
||||
from ipywidgets import Button, GridBox, Layout, ButtonStyle, IntProgress
|
||||
|
||||
class AnomalyLookupViewHelper(object):
|
||||
def define_int_progress_bar():
|
||||
return widgets.IntProgress(value=0, min=0, max=10, step=1, description='Loading:', bar_style='success', orientation='horizontal', position='top')
|
||||
|
||||
def define_button_style():
|
||||
return ButtonStyle(button_color='#FFF', font_color='blue')
|
||||
|
||||
def define_button_layout():
|
||||
return Layout(width='auto', height='27px', border='2px solid black')
|
|
@ -0,0 +1,11 @@
|
|||
"""
|
||||
SentinelAzure: This package is developed for initializing and manipulating Python client objects of Azure Resource Management
|
||||
"""
|
||||
|
||||
# __init__.py
|
||||
from .azure_aad_helper import *
|
||||
from .azure_compute_helper import *
|
||||
from .azure_network_helper import *
|
||||
from .azure_resource_helper import *
|
||||
from .azure_storage_helper import *
|
||||
from .azure_loganalytics_helper import *
|
|
@ -0,0 +1,37 @@
|
|||
"""
|
||||
Azure AAD Helper:
|
||||
This module provides helper methods to authenticate users into ARM using either 'servivce principal' or 'user id/password'.
|
||||
"""
|
||||
|
||||
from azure.common.credentials import ServicePrincipalCredentials
|
||||
from azure.common.credentials import UserPassCredentials
|
||||
from azure.mgmt.resource import ResourceManagementClient
|
||||
from azure.mgmt.compute import ComputeManagementClient
|
||||
from azure.mgmt.network import NetworkManagementClient
|
||||
from azure.mgmt.storage import StorageManagementClient
|
||||
|
||||
class AADHelper:
|
||||
def authenticate(self, auth_method, **kwargs):
|
||||
creds = self.get_credentials(auth_method, **kwargs)
|
||||
return self.initialize_azure_clients(kwargs['subscription_id'], creds)
|
||||
|
||||
def get_credentials(self, auth_method, **kwargs):
|
||||
if auth_method == 'Service Principal':
|
||||
credentials = ServicePrincipalCredentials(client_id=kwargs['client_id'], secret=kwargs['secret'], tenant=kwargs['tenant_id'])
|
||||
elif auth_method == 'User ID Password':
|
||||
credentials = UserPassCredentials(username=kwargs['user_id'], password=kwargs['password'])
|
||||
else:
|
||||
credentials = None
|
||||
|
||||
return credentials
|
||||
|
||||
def initialize_azure_clients(self, subscription_id, credentials):
|
||||
if credentials is not None:
|
||||
resource_client = ResourceManagementClient(credentials, subscription_id)
|
||||
compute_client = ComputeManagementClient(credentials, subscription_id)
|
||||
network_client = NetworkManagementClient(credentials, subscription_id)
|
||||
storage_client = StorageManagementClient(credentials, subscription_id)
|
||||
|
||||
return compute_client, network_client, resource_client, storage_client
|
||||
|
||||
# end of the class
|
|
@ -0,0 +1,237 @@
|
|||
"""
|
||||
Azure Compute Helper:
|
||||
This module provides helper methods to initialize and manipulate ComputeClient object.
|
||||
VM, VM Extension, and VM snapshots are the focal points.
|
||||
"""
|
||||
|
||||
from azure.mgmt.compute.models import DiskCreateOption
|
||||
from azure.mgmt.compute import ComputeManagementClient
|
||||
import azure.mgmt.compute.models
|
||||
import json
|
||||
import pandas as pd
|
||||
import requests
|
||||
|
||||
|
||||
|
||||
class VMExtensionProperties:
|
||||
def __init__(self):
|
||||
self.api_version = ''
|
||||
self.command_key = 'commandToExecute'
|
||||
self.command_to_execute = ''
|
||||
self.type_handler_version = ''
|
||||
self.type_publisher = ''
|
||||
self.resource_type = 'virtualMachines'
|
||||
self.extension_type = ''
|
||||
self.file_uris = []
|
||||
self.protected_settings = {}
|
||||
self.settings = {}
|
||||
|
||||
class WindowsVMExtensionProperties(VMExtensionProperties):
|
||||
def __init__(self, command_to_execute, file_uris):
|
||||
super(WindowsVMExtensionProperties, self).__init__()
|
||||
self.api_version = '2018-06-01'
|
||||
self.command_to_execute = command_to_execute
|
||||
self.type_handler_version = '1.9'
|
||||
self.type_publisher = 'Microsoft.Compute'
|
||||
self.extension_type = 'CustomScriptExtension'
|
||||
self.file_uris = file_uris
|
||||
self.protected_settings[self.command_key] = command_to_execute
|
||||
self.settings['fileUris'] = file_uris
|
||||
|
||||
class LinuxVMExtensionProperties(VMExtensionProperties):
|
||||
def __init__(self, command_to_execute, file_uris):
|
||||
super(LinuxVMExtensionProperties, self).__init__()
|
||||
self.api_version = '2015-06-15'
|
||||
self.command_to_execute = command_to_execute
|
||||
self.type_handler_version = '2.0'
|
||||
self.type_publisher = 'Microsoft.Azure.Extensions'
|
||||
self.extension_type = 'CustomScript'
|
||||
self.file_uris = file_uris
|
||||
self.protected_settings[self.command_key] = command_to_execute
|
||||
self.settings['fileUris'] = file_uris
|
||||
|
||||
class ComputeHelper:
|
||||
def __init__(self, compute_client, resource_group):
|
||||
self.compute_client = compute_client
|
||||
self.resource_group = resource_group
|
||||
|
||||
def get_vm_disk_names(self, vm_name):
|
||||
vm = self.compute_client.virtual_machines.get(self.resource_group, vm_name, expand='instanceView')
|
||||
|
||||
if vm is not None and vm.instance_view is not None:
|
||||
return [d.name for d in vm.instance_view.disks]
|
||||
else:
|
||||
return []
|
||||
|
||||
def create_snapshot_async(self, **kwargs):
|
||||
managed_disk = self.compute_client.disks.get(self.resource_group, kwargs['selected_disk'])
|
||||
|
||||
async_snapshot_creation = self.compute_client.snapshots.create_or_update(
|
||||
self.resource_group,
|
||||
kwargs['snapshot_name'],
|
||||
{
|
||||
'location': managed_disk.location,
|
||||
'creation_data': {
|
||||
'create_option': 'Copy',
|
||||
'source_uri': managed_disk.id
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
return async_snapshot_creation.result()
|
||||
|
||||
def generate_snapshot_sas_url_async(self, **kwargs):
|
||||
async_snapshot_export = self.compute_client.snapshots.grant_access(
|
||||
self.resource_group,
|
||||
kwargs['snapshot_name'],
|
||||
'read',
|
||||
kwargs['int_seconds'])
|
||||
|
||||
result = async_snapshot_export.result()
|
||||
return result.access_sas
|
||||
|
||||
def create_snapshot_and_generate_sas_url(self, **kwargs):
|
||||
snapshot = self.create_snapshot_async(**kwargs)
|
||||
if snapshot is not None and snapshot.provisioning_state == 'Succeeded':
|
||||
return self.generate_snapshot_sas_url_async(**kwargs)
|
||||
|
||||
def get_vm_list(self):
|
||||
return self.compute_client.virtual_machines.list_all()
|
||||
|
||||
def get_vm_and_vm_extensions(self, vm_name):
|
||||
vm = self.compute_client.virtual_machines.get(self.resource_group, vm_name, expand='instanceView')
|
||||
|
||||
if (vm is not None):
|
||||
return vm, vm.instance_view.extensions
|
||||
else:
|
||||
return None, None
|
||||
|
||||
def get_vm(self, vm_name):
|
||||
return self.compute_client.virtual_machines.get(self.resource_group, vm_name)
|
||||
|
||||
def has_vm_extensions(self, vm):
|
||||
try:
|
||||
return vm.instance_view.extensions is not None
|
||||
except:
|
||||
return False
|
||||
|
||||
def get_customscript_extensions(self, vm):
|
||||
try:
|
||||
exts = vm.instance_view.extensions
|
||||
if exts is not None:
|
||||
return list(ext for ext in exts if ext.type == 'Microsoft.Azure.Extensions.CustomScript')
|
||||
else:
|
||||
return None
|
||||
except:
|
||||
return None
|
||||
|
||||
def delete_vm_extension_async(self, vm_name, vm_extension_name):
|
||||
async_vm_extension_delete = self.compute_client.virtual_machine_extensions.delete(self.resource_group, vm_name, vm_extension_name)
|
||||
return async_vm_extension_delete.result()
|
||||
|
||||
def check_vm_extension_installability(self, vm):
|
||||
if self.has_vm_agent(vm) == False:
|
||||
return False, 'No guest agent on the VM, VM Extension cannot be installed'
|
||||
else:
|
||||
exts = self.get_customscript_extensions(vm)
|
||||
if exts is not None:
|
||||
return False, 'VM has custom script extension installed already, need to delete the VM extension first to continue'
|
||||
else:
|
||||
return True, ""
|
||||
|
||||
def has_vm_agent(self, vm):
|
||||
try:
|
||||
return vm.instance_view.vm_agent is not None
|
||||
except:
|
||||
return False
|
||||
|
||||
def initialize_vm_extension(self, vm_extension_properties, vm_location):
|
||||
vm_extension = azure.mgmt.compute.models.VirtualMachineExtension(
|
||||
location = vm_location,
|
||||
publisher = vm_extension_properties.type_publisher,
|
||||
virtual_machine_extension_type = vm_extension_properties.extension_type,
|
||||
type_handler_version = vm_extension_properties.type_handler_version,
|
||||
auto_upgrade_minor_version = True,
|
||||
settings = vm_extension_properties.settings,
|
||||
protected_settings = vm_extension_properties.protected_settings
|
||||
)
|
||||
return vm_extension
|
||||
|
||||
def create_vm_extension_async(self, vm_name, vm_extension_name, vm_extension):
|
||||
async_vm_extension_creation = self.compute_client.virtual_machine_extensions.create_or_update(
|
||||
self.resource_group,
|
||||
vm_name,
|
||||
vm_extension_name,
|
||||
vm_extension
|
||||
)
|
||||
return async_vm_extension_creation.result()
|
||||
|
||||
def get_uploaded_result(self, upload_container_path):
|
||||
try:
|
||||
response = requests.get(upload_container_path)
|
||||
response.encoding = response.apparent_encoding
|
||||
start_of_json = response.text.index('{')
|
||||
raw_json = response.text[start_of_json::]
|
||||
return json.loads(raw_json)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return None
|
||||
|
||||
def create_availability_set(self):
|
||||
avset_params = {
|
||||
'location': vm_location,
|
||||
'sku': { 'name': 'Aligned' },
|
||||
'platform_fault_domain_count': 3
|
||||
}
|
||||
availability_set_result = this.compute_client.availability_sets.create_or_update(
|
||||
group_name,
|
||||
'myAVSet',
|
||||
avset_params
|
||||
)
|
||||
|
||||
def create_vm(self, nic, **kwargs):
|
||||
create_vm_async = self.compute_client.virtual_machines.create_or_update(
|
||||
self.resource_group,
|
||||
kwargs['vm_name'],
|
||||
azure.mgmt.compute.models.VirtualMachine(
|
||||
location = kwargs['vm_location'],
|
||||
os_profile = azure.mgmt.compute.models.OSProfile(
|
||||
admin_username = kwargs['user_name'],
|
||||
admin_password = kwargs['password'],
|
||||
computer_name = kwargs['vm_name'],
|
||||
),
|
||||
hardware_profile = azure.mgmt.compute.models.HardwareProfile(
|
||||
vm_size = azure.mgmt.compute.models.VirtualMachineSizeTypes.standard_b2s
|
||||
),
|
||||
network_profile=azure.mgmt.compute.models.NetworkProfile(
|
||||
network_interfaces=[
|
||||
azure.mgmt.compute.models.NetworkInterfaceReference(
|
||||
id=nic.id,
|
||||
primary=True
|
||||
),
|
||||
],
|
||||
),
|
||||
storage_profile=azure.mgmt.compute.models.StorageProfile(
|
||||
os_disk=azure.mgmt.compute.models.OSDisk(
|
||||
caching=azure.mgmt.compute.models.CachingTypes.none,
|
||||
create_option=azure.mgmt.compute.models.DiskCreateOptionTypes.from_image,
|
||||
name=kwargs['snapshot_name'],
|
||||
os_type = kwargs['os_type'],
|
||||
vhd=azure.mgmt.compute.models.VirtualHardDisk(
|
||||
uri='https://{0}.blob.core.windows.net/{1}/forensics.vhd'.format(kwargs['stroage_account_name'], kwargs['blob_container_name'])
|
||||
),
|
||||
image=azure.mgmt.compute.models.VirtualHardDisk(
|
||||
uri='https://{0}.blob.core.windows.net/{1}/abcd.vhd'.format(kwargs['stroage_account_name'], kwargs['blob_container_name'])
|
||||
),
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
create_vm_async.wait()
|
||||
|
||||
def delete_vm_async(self, vm_name):
|
||||
async_vm_delete = self.compute_client.virtual_machines.delete(self.resource_group, vm_name)
|
||||
return async_vm_delete.result()
|
||||
#async_vm_delete.wait()
|
||||
|
||||
# end of the class
|
|
@ -0,0 +1,19 @@
|
|||
"""
|
||||
Azure LogAnalytics Helper:
|
||||
This module provides helper methods to initialize and manipulate LogAnalyticsManagementClient object.
|
||||
Workspace is the focal point.
|
||||
"""
|
||||
|
||||
from azure.mgmt.loganalytics import LogAnalyticsManagementClient
|
||||
|
||||
class LogAnalyticsHelper(object):
|
||||
def __init__(self, la_client):
|
||||
self.la_client = la_client
|
||||
|
||||
def get_workspace_name_list(self):
|
||||
return sorted([ws.name for ws in self.la_client.workspaces.list()])
|
||||
|
||||
def get_workspace_id(self, workspace_name):
|
||||
workspace = next(ws for ws in self.la_client.workspaces.list() if ws.name == workspace_name)
|
||||
return workspace.customer_id
|
||||
# end of the class
|
|
@ -0,0 +1,93 @@
|
|||
"""
|
||||
Azure Network Helper:
|
||||
This module provides helper methods to initialize and manipulate NetworkManagementClient object.
|
||||
"""
|
||||
|
||||
from azure.mgmt.network import NetworkManagementClient
|
||||
|
||||
class NetworkHelper:
|
||||
def __init__(self, network_client, nic_name):
|
||||
self.network_client = network_client
|
||||
self.nic_name = nic_name
|
||||
|
||||
def get_nic(self, resource_group):
|
||||
return self.network_client.network_interfaces.get(
|
||||
resource_group,
|
||||
self.nic_name
|
||||
)
|
||||
|
||||
def prepare_network_for_vm_creation(self, resource_group, vm_location):
|
||||
self.create_public_ip_address(resource_group, vm_location)
|
||||
self.create_vnet(resource_group, vm_location)
|
||||
self.create_subnet(resource_group)
|
||||
self.create_nic(resource_group, vm_location)
|
||||
|
||||
def create_public_ip_address(self, resource_group, vm_location):
|
||||
public_ip_addess_params = {
|
||||
'location': vm_location,
|
||||
'public_ip_allocation_method': 'Dynamic'
|
||||
}
|
||||
creation_result = self.network_client.public_ip_addresses.create_or_update(
|
||||
resource_group,
|
||||
'myIPAddress',
|
||||
public_ip_addess_params
|
||||
)
|
||||
|
||||
return creation_result.result()
|
||||
|
||||
def create_vnet(self, resource_group, vm_location):
|
||||
vnet_params = {
|
||||
'location': vm_location,
|
||||
'address_space': {
|
||||
'address_prefixes': ['10.0.0.0/16']
|
||||
}
|
||||
}
|
||||
creation_result = self.network_client.virtual_networks.create_or_update(
|
||||
resource_group,
|
||||
'myVNet',
|
||||
vnet_params
|
||||
)
|
||||
return creation_result.result()
|
||||
|
||||
def create_subnet(self, resource_group):
|
||||
subnet_params = {
|
||||
'address_prefix': '10.0.0.0/24'
|
||||
}
|
||||
creation_result = self.network_client.subnets.create_or_update(
|
||||
resource_group,
|
||||
'myVNet',
|
||||
'mySubnet',
|
||||
subnet_params
|
||||
)
|
||||
|
||||
return creation_result.result()
|
||||
|
||||
def create_nic(self, resource_group, vm_location):
|
||||
subnet_info = self.network_client.subnets.get(
|
||||
resource_group,
|
||||
'myVNet',
|
||||
'mySubnet'
|
||||
)
|
||||
publicIPAddress = self.network_client.public_ip_addresses.get(
|
||||
resource_group,
|
||||
'myIPAddress'
|
||||
)
|
||||
nic_params = {
|
||||
'location': vm_location,
|
||||
'ip_configurations': [{
|
||||
'name': 'myIPConfig',
|
||||
'public_ip_address': publicIPAddress,
|
||||
'subnet': {
|
||||
'id': subnet_info.id
|
||||
}
|
||||
}]
|
||||
}
|
||||
creation_result = self.network_client.network_interfaces.create_or_update(
|
||||
resource_group,
|
||||
self.nic_name,
|
||||
nic_params
|
||||
)
|
||||
|
||||
return creation_result.result()
|
||||
|
||||
# end of the class
|
|
@ -0,0 +1,21 @@
|
|||
"""
|
||||
Azure Resource Helper:
|
||||
This module provides helper methods to initialize and manipulate ResourceManagementClient object.
|
||||
Resource Group is the focal point.
|
||||
"""
|
||||
|
||||
from azure.mgmt.resource import ResourceManagementClient
|
||||
|
||||
class ResourceHelper:
|
||||
def __init__(self, resource_client, resource_group):
|
||||
self.resource_client = resource_client
|
||||
self.resource_group = resource_group
|
||||
|
||||
def create_resource_group(self, resource_group_location):
|
||||
resource_group_params = { 'location': resource_group_location }
|
||||
create_resource_group_async = self.resource_client.resource_groups.create_or_update(
|
||||
self.resource_group,
|
||||
resource_group_params
|
||||
)
|
||||
|
||||
# end of the class
|
|
@ -0,0 +1,80 @@
|
|||
"""
|
||||
Azure Storage Helper:
|
||||
This module provides helper methods to initialize and manipulate StorageManagementClient object.
|
||||
Storage account and blob service are the focal points.
|
||||
"""
|
||||
|
||||
from datetime import *
|
||||
import time
|
||||
import azure.mgmt.storage.models
|
||||
from azure.mgmt.storage import StorageManagementClient
|
||||
from azure.storage.blob import BlockBlobService, PageBlobService, AppendBlobService
|
||||
from azure.storage.blob.models import BlobBlock, ContainerPermissions, ContentSettings
|
||||
|
||||
class StorageHelper:
|
||||
def __init__(self, storage_client):
|
||||
self.storage_client = storage_client
|
||||
|
||||
def is_storage_account_name_available(self, storage_account_name):
|
||||
return self.storage_client.storage_accounts.check_name_availability(storage_account_name)
|
||||
|
||||
def create_storage_account_async(self, storage_account_name, resource_group, **kwargs):
|
||||
storage_params = azure.mgmt.storage.models.StorageAccountCreateParameters(
|
||||
sku=azure.mgmt.storage.models.Sku(name='standard_lrs'),
|
||||
kind=azure.mgmt.storage.models.Kind.storage,
|
||||
location=kwargs['storage_location']
|
||||
)
|
||||
async_storage_creation = self.storage_client.storage_accounts.create(
|
||||
resource_group,
|
||||
storage_account_name,
|
||||
storage_params
|
||||
)
|
||||
storage_account = async_storage_creation.result()
|
||||
|
||||
def get_storage_account_names(self, resource_group):
|
||||
storage_account_list = self.storage_client.storage_accounts.list_by_resource_group(resource_group)
|
||||
return [item.name for item in storage_account_list]
|
||||
|
||||
def get_storage_account_properties(self, storage_account_name, resource_group):
|
||||
return self.storage_client.storage_accounts.get_properties(resource_group, storage_account_name)
|
||||
|
||||
def get_storage_account_key(self, storage_account_name, resource_group):
|
||||
storage_keys = self.storage_client.storage_accounts.list_keys(resource_group, storage_account_name)
|
||||
if storage_keys is not None:
|
||||
return {v.key_name: v.value for v in storage_keys.keys}['key1']
|
||||
else:
|
||||
return None
|
||||
|
||||
def initialize_block_blob_service(self, storage_account_name, storage_key, blob_container_name):
|
||||
self.storage_account_name = storage_account_name
|
||||
self.storage_key = storage_key
|
||||
self.blob_container_name = blob_container_name
|
||||
|
||||
self.block_blob_service = BlockBlobService(account_name=self.storage_account_name, account_key=self.storage_key)
|
||||
|
||||
def create_blob_container(self):
|
||||
return self.block_blob_service.create_container(self.blob_container_name)
|
||||
|
||||
def get_blob_container(self):
|
||||
containers = self.block_blob_service.list_containers(self.blob_container_name)
|
||||
return next(c for c in containers if c.name == self.blob_container_name)
|
||||
|
||||
def copy_vhd(self, file_name, file_path):
|
||||
status = self.block_blob_service.copy_blob(self.blob_container_name, file_name, file_path)
|
||||
if status.status == 'pending':
|
||||
time.sleep(120)
|
||||
|
||||
def generate_blob_container_sas_url(self, expiration_in_days):
|
||||
container_permission = ContainerPermissions(read=True, write=True, list=True)
|
||||
return self.block_blob_service.generate_container_shared_access_signature(container_name = self.blob_container_name, permission=container_permission, protocol='https', start=datetime.now(), expiry=datetime.now() + timedelta(days=expiration_in_days))
|
||||
|
||||
def build_upload_container_path(self, target_os_type, sas_url):
|
||||
return 'https://{0}.blob.core.windows.net/{1}/{2}/{3}?{4}'.format(self.storage_account_name, self.blob_container_name, target_os_type.lower(), 'piresults.json', sas_url)
|
||||
|
||||
def get_container_name_list(self, resource_group, storage_account_name, blob_container_name):
|
||||
key = self.get_storage_account_key(storage_account_name, resource_group)
|
||||
self.initialize_block_blob_service(storage_account_name, key, blob_container_name)
|
||||
containers = self.block_blob_service.list_containers()
|
||||
return list(c.name for c in containers)
|
||||
|
||||
# end of the class
|
|
@ -0,0 +1,7 @@
|
|||
"""
|
||||
SentinelGitHub: This package is developed for GitHub related functionalities.
|
||||
"""
|
||||
|
||||
# __init__.py
|
||||
from .github_helper import *
|
||||
from .query_helper import *
|
|
@ -0,0 +1,34 @@
|
|||
"""
|
||||
GitHub Helper:
|
||||
This module provides helper methods to access content in GitHub.
|
||||
"""
|
||||
|
||||
import requests
|
||||
import json
|
||||
class GitHubHelper(object):
|
||||
def __init__(self):
|
||||
github_query_url = "https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Hunting%20Queries/DeployedQueries.json"
|
||||
response = requests.get(github_query_url)
|
||||
response.encoding = response.apparent_encoding
|
||||
self.queries = json.loads(response.text)
|
||||
|
||||
def get_queries(self):
|
||||
if self.queries != None:
|
||||
try:
|
||||
return [query['name'] for query in self.queries]
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return None
|
||||
|
||||
def get_github_query(self, query_name):
|
||||
q = None
|
||||
if self.queries != None:
|
||||
try:
|
||||
entry = [query for query in self.queries if query['name'] == query_name]
|
||||
if entry is not None:
|
||||
q = entry[0]['query']
|
||||
except Exception as e:
|
||||
print(e)
|
||||
finally:
|
||||
return q
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
"""
|
||||
Query Helper:
|
||||
This module returns constructed Kusto queries (KQL).
|
||||
"""
|
||||
|
||||
import json
|
||||
class QueryHelper(object):
|
||||
|
||||
def get_query_for_hunting_bookmark(start_date, end_date, max_record=20):
|
||||
return 'HuntingBookmark | where EventTime > datetime("{}") and EventTime <= datetime("{}") | take {}'.format(start_date, end_date, max_record)
|
|
@ -0,0 +1,6 @@
|
|||
"""
|
||||
SentinelLog: This package provides log functionalities.
|
||||
"""
|
||||
|
||||
# __init__.py
|
||||
from .log_helper import *
|
|
@ -0,0 +1,75 @@
|
|||
"""
|
||||
Log Helper:
|
||||
This module provides log functionalities through Azure Application Insights.
|
||||
It has dependency on .NET library: Microsoft.Azure.CIS.Notebooks.LogHelper
|
||||
"""
|
||||
|
||||
import clr
|
||||
clr.AddReference('Microsoft.Azure.CIS.Notebooks.LogHelper')
|
||||
clr.AddReference('Microsoft.ApplicationInsights')
|
||||
|
||||
import os
|
||||
import ipywidgets as widgets
|
||||
from ipywidgets import Button, GridBox, Layout, ButtonStyle, IntProgress
|
||||
from IPython.display import HTML
|
||||
from Microsoft.Azure.CIS.Notebooks.LogHelper import *
|
||||
|
||||
class LogHelper(object):
|
||||
def __init__(self, reset, env_dir, env_dict, notebook_name):
|
||||
self.logger = AILogger();
|
||||
self.user = ''
|
||||
self.pwd = ''
|
||||
self.tenant_domain = ''
|
||||
self.notebook_name = ''
|
||||
self.get_env(reset, env_dir, env_dict, notebook_name)
|
||||
|
||||
def get_env(self, reset, env_dir, env_dict, notebook_name):
|
||||
self.notebook_name = notebook_name
|
||||
self.user = env_dir['USER']
|
||||
self.pwd = env_dir['PWD']
|
||||
if reset != True and 'tenant_domain' in env_dir:
|
||||
self.tenant_domain = env_dir['tenant_domain']
|
||||
elif 'tenant_domain' in env_dict:
|
||||
self.tenant_domain = env_dict['tenant_domain']
|
||||
|
||||
def count_page_view(self):
|
||||
result = self.logger.CountNotebookExectued(self.tenant_domain, self.notebook_name, self.user, self.pwd)
|
||||
|
||||
def provide_feedback(self, button):
|
||||
val = self.give_feedback.value
|
||||
if val:
|
||||
result = self.logger.ProvideFeedback(self.tenant_domain, self.notebook_name, self.user, self.pwd, val)
|
||||
if result == True: print('saved')
|
||||
|
||||
def is_notebook_helpful(self, button):
|
||||
val = self.is_helpful.value
|
||||
if val:
|
||||
result = self.logger.IsNotebookHelpful(self.tenant_domain, self.notebook_name, self.user, self.pwd, val)
|
||||
if result == True: print('saved')
|
||||
|
||||
def ask_is_helpful(self):
|
||||
label_helpful = widgets.Label(value='Is this notebook helpful?')
|
||||
self.is_helpful = widgets.RadioButtons( options=['Yes', 'No'], value=None, description='', disabled=False)
|
||||
save_helpful = widgets.Button(description='Save', disabled=False, style=self.define_button_style(), layout=self.define_button_layout(), icon='save')
|
||||
save_helpful.on_click(self.is_notebook_helpful)
|
||||
display(label_helpful)
|
||||
display(self.is_helpful)
|
||||
display(save_helpful)
|
||||
|
||||
def ask_feedback(self):
|
||||
label_feedback = widgets.Label(value='Please let us know what do you think about this notebook (Limit to 500 characters, text only):')
|
||||
self.give_feedback = widgets.Textarea(layout =self.define_textarea_layout(), value='', placeholder='Thank you for your thoughts', description='', disabled=False, rows = 6)
|
||||
save_feedback = widgets.Button(description='Save', disabled=False, style=self.define_button_style(), layout=self.define_button_layout(), icon='save')
|
||||
save_feedback.on_click(self.provide_feedback)
|
||||
display(label_feedback)
|
||||
display(self.give_feedback)
|
||||
display(save_feedback)
|
||||
|
||||
def define_button_style(self):
|
||||
return ButtonStyle(button_color='#FFF', font_color='blue')
|
||||
|
||||
def define_button_layout(self):
|
||||
return Layout(width='auto', height='27px', border='2px solid black')
|
||||
|
||||
def define_textarea_layout(self):
|
||||
return Layout(width='600px', border='solid')
|
|
@ -0,0 +1,6 @@
|
|||
"""
|
||||
SentinelPlot: This package provides helper functionalities to plot charts
|
||||
"""
|
||||
|
||||
# __init__.py
|
||||
from .plot_helper import *
|
|
@ -0,0 +1,47 @@
|
|||
"""
|
||||
Plot Helper:
|
||||
This module provides plot functionalities through various Python plot packages.
|
||||
"""
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import matplotlib.dates as mdates
|
||||
from datetime import datetime
|
||||
|
||||
class PlotHelper(object):
|
||||
def plot_timeline(dates, events):
|
||||
# Choose some nice levels
|
||||
length = 20
|
||||
short_events = list(map(lambda name: name if len(name) <= length else name[0:length] + '...', events))
|
||||
levels = np.tile([-9, 9, -7, 7, -5, 5, -3, 3, -1, 1], int(np.ceil(len(dates)/10)))[:len(dates)]
|
||||
|
||||
# Create figure and plot a stem plot with the date
|
||||
fig, axis = plt.subplots(figsize=(20, 4), constrained_layout=True)
|
||||
axis.set(title="Hunting Bookmarks Timeline")
|
||||
|
||||
markerline, stemline, baseline = axis.stem(dates, levels,
|
||||
linefmt="C1-", markerfmt="C5^", basefmt="C2-",
|
||||
label="Bookmark")
|
||||
|
||||
plt.setp(markerline, mec="k", mfc="w", zorder=3)
|
||||
|
||||
# Shift the markers to the baseline by replacing the y-data by zeros.
|
||||
markerline.set_ydata(np.zeros(len(dates)))
|
||||
|
||||
# annotate lines
|
||||
vert = np.array(['top', 'bottom'])[(levels > 0).astype(int)]
|
||||
for d, l, r, va in zip(dates, levels, short_events, vert):
|
||||
axis.annotate(r, xy=(d, l), xytext=(-3, np.sign(l)*3),
|
||||
textcoords="offset points", va=va, ha="right")
|
||||
|
||||
# format xaxis with time intervals
|
||||
axis.get_xaxis().set_major_locator(mdates.DayLocator(interval=7))
|
||||
axis.get_xaxis().set_major_formatter(mdates.DateFormatter("%Y-%m-%d %H:%M"))
|
||||
plt.setp(axis.get_xticklabels(), rotation=40, ha="right")
|
||||
|
||||
# remove y axis and spines
|
||||
axis.get_yaxis().set_visible(False)
|
||||
for spine in ["left", "top", "right"]: axis.spines[spine].set_visible(False)
|
||||
|
||||
axis.margins(y=0.1)
|
||||
plt.show()
|
|
@ -0,0 +1,8 @@
|
|||
"""
|
||||
SentinelRegi: This package is developed for viewing registry entries by leveraging ipywidgets, using MVC pattern
|
||||
"""
|
||||
|
||||
# __init__.py
|
||||
from .regi_helper import *
|
||||
from .regi_view_helper import *
|
||||
from .regi_controller import *
|
|
@ -0,0 +1,105 @@
|
|||
"""
|
||||
Regi Controller:
|
||||
This controller module takes user actions and renders corresponding widgets.
|
||||
"""
|
||||
|
||||
import pandas as pd
|
||||
from IPython.display import display, HTML
|
||||
import ipywidgets as widgets
|
||||
from ipywidgets import Button, GridBox, Layout, ButtonStyle, IntProgress
|
||||
from .regi_view_helper import *
|
||||
|
||||
class RegiController(object):
|
||||
def __init__(self, regi_helper):
|
||||
self.root = regi_helper.root
|
||||
self.regi_helper = regi_helper
|
||||
self.path = [RegiViewHelper.define_icon(RegiViewHelper.content_type_breadcrumb), '']
|
||||
|
||||
def display(self):
|
||||
self.progress_bar = RegiViewHelper.define_int_progress_bar()
|
||||
self.nav_breadcrumbs = widgets.HBox()
|
||||
self.button_keys = widgets.VBox()
|
||||
self.value_details = widgets.VBox()
|
||||
self.box_data = widgets.HBox()
|
||||
self.box_top = widgets.VBox()
|
||||
|
||||
self.update()
|
||||
return self.box_top
|
||||
|
||||
def on_key_click(self, button):
|
||||
# reset the current key
|
||||
self.progress_bar = RegiViewHelper.define_int_progress_bar()
|
||||
self.regi_helper.set_current_key(button.description)
|
||||
|
||||
# Build navigator path and update
|
||||
self.build_breadcrumb_path(button.description)
|
||||
|
||||
self.update()
|
||||
return self.box_top
|
||||
|
||||
def update(self):
|
||||
display(self.progress_bar)
|
||||
self.progress_bar.value += 1
|
||||
self.construct_breadcrumb(self.nav_breadcrumbs)
|
||||
self.progress_bar.value += 2
|
||||
self.process_button_data(self.button_keys, RegiViewHelper.content_type_keys)
|
||||
self.progress_bar.value += 3
|
||||
self.process_tabular_data(self.value_details)
|
||||
self.progress_bar.value += 3
|
||||
|
||||
self.box_data.children = [self.button_keys, self.value_details]
|
||||
self.box_top.children = [self.nav_breadcrumbs, self.box_data]
|
||||
self.progress_bar.close()
|
||||
|
||||
def process_button_data(self, box, content_type):
|
||||
buttons = []
|
||||
buttons.append(RegiViewHelper.create_first_button(content_type, RegiViewHelper.define_icon(content_type)))
|
||||
|
||||
for key_name, key_path in self.regi_helper.get_current_subkey_name_path_tuples():
|
||||
button = RegiViewHelper.create_button(RegiViewHelper.button_type_content, RegiViewHelper.define_icon(content_type), key_path)
|
||||
if content_type == RegiViewHelper.content_type_values:
|
||||
button.on_click(self.on_value_click)
|
||||
else:
|
||||
button.on_click(self.on_key_click)
|
||||
|
||||
buttons.append(button)
|
||||
# End of for loop
|
||||
|
||||
box.children = tuple(buttons)
|
||||
box.layout = RegiViewHelper.define_box_layout()
|
||||
|
||||
def process_tabular_data(self, box):
|
||||
data_table = []
|
||||
try:
|
||||
if self.regi_helper.current_key.values_number() != 0:
|
||||
for value in self.regi_helper.get_value_list():
|
||||
data_table.append([value.name(), value.value_type_str(), value.value()])
|
||||
# end of Value loop
|
||||
except:
|
||||
print(sys.exc_info()[1])
|
||||
|
||||
pd.set_option('display.max_colwidth', -1)
|
||||
df = pd.DataFrame(data_table, columns=RegiViewHelper.define_value_table_columns())
|
||||
box.children = tuple([widgets.HTML(RegiViewHelper.get_summary_table_style() + df.to_html(classes="df", escape=True))])
|
||||
box.layout = RegiViewHelper.define_box_layout()
|
||||
|
||||
def build_breadcrumb_path(self, new_key):
|
||||
if new_key in self.path:
|
||||
index = self.path.index(new_key)
|
||||
self.path = self.path[:index + 1]
|
||||
else:
|
||||
self.path.append(new_key)
|
||||
|
||||
def construct_breadcrumb(self, box):
|
||||
buttons = []
|
||||
icon_type = RegiViewHelper.define_icon(RegiViewHelper.content_type_breadcrumb)
|
||||
buttons.append(RegiViewHelper.create_first_button(RegiViewHelper.content_type_breadcrumb, icon_type))
|
||||
|
||||
for key_path in self.path[1:]:
|
||||
button = RegiViewHelper.create_button(RegiViewHelper.button_type_navigator, icon_type, key_path)
|
||||
button.on_click(self.on_key_click)
|
||||
buttons.append(button)
|
||||
# End of for loop
|
||||
|
||||
box.children = tuple(buttons)
|
||||
box.layout = RegiViewHelper.define_box_layout()
|
|
@ -0,0 +1,48 @@
|
|||
"""
|
||||
MFT Helper:
|
||||
This module is built on the top of Python Registry. It knows the registry data object.
|
||||
"""
|
||||
|
||||
from Registry import Registry
|
||||
from Registry.RegistryParse import ParseException
|
||||
|
||||
class RegiHelper(object):
|
||||
root_path = 'ROOT'
|
||||
|
||||
def __init__(self, hive_path):
|
||||
self.hive = Registry.Registry(hive_path)
|
||||
self.root = self.hive.root()
|
||||
self.current_key = self.root
|
||||
|
||||
def get_current_subkey_name_path_tuples(self):
|
||||
#return [(k.name(), k.path()) for k in self.current_key.subkeys()]
|
||||
if self.current_key is not None and self.current_key.subkeys_number() != 0:
|
||||
tup_list = [(k.name(), k.path()) for k in self.current_key.subkeys()]
|
||||
return [(n, p[5:]) if self.root_path in p else (n, p) for (n, p) in tup_list]
|
||||
else:
|
||||
return []
|
||||
|
||||
def find_key_by_path(self, key_path):
|
||||
try:
|
||||
if key_path.strip() == '':
|
||||
return self.root;
|
||||
else:
|
||||
return self.hive.open(key_path)
|
||||
except Registry.RegistryKeyNotFoundException:
|
||||
print("Couldn't find Run key. Exiting...")
|
||||
sys.exit(-1)
|
||||
|
||||
def set_current_key(self, key_path):
|
||||
self.current_key = self.find_key_by_path(key_path)
|
||||
|
||||
def get_value_name_list(self):
|
||||
if self.current_key.values_number() != 0:
|
||||
return [v.name() for v in self.current_key.values()]
|
||||
else:
|
||||
return []
|
||||
|
||||
def get_value_list(self):
|
||||
if self.current_key.values_number() != 0:
|
||||
return [v for v in self.current_key.values()]
|
||||
else:
|
||||
return []
|
|
@ -0,0 +1,70 @@
|
|||
"""
|
||||
Regi View Helper:
|
||||
This module provides helper methods for UI components.
|
||||
"""
|
||||
|
||||
import ipywidgets as widgets
|
||||
from ipywidgets import Button, GridBox, Layout, ButtonStyle, IntProgress
|
||||
|
||||
class RegiViewHelper(object):
|
||||
button_type_content = 'content'
|
||||
button_type_navigator = 'navigator'
|
||||
|
||||
root_path = 'ROOT'
|
||||
|
||||
content_type_keys = 'KEYS'
|
||||
content_type_values = 'VALUES'
|
||||
content_type_breadcrumb = 'KEYS VIEWED'
|
||||
|
||||
def get_summary_table_style():
|
||||
return '''<style> .df th { text-align:center; font-size:large; padding-left:10px; padding-right:10px} tbody tr:nth-child(even) { background-color: #f2f2f2; } td { padding-left: 10px; padding-right: 5px; }</style>'''
|
||||
|
||||
def define_box_layout():
|
||||
return Layout(display='flex', align_items='flex-start', border='solid', margin='5px')
|
||||
|
||||
def define_button_layout(button_type):
|
||||
if button_type == RegiViewHelper.button_type_content:
|
||||
return Layout(width='auto', height='25px')
|
||||
elif button_type == RegiViewHelper.button_type_navigator:
|
||||
return Layout(width='auto', height='27px', border='2px solid black')
|
||||
|
||||
def define_icon(content_type):
|
||||
if content_type == RegiViewHelper.content_type_keys:
|
||||
return 'key'
|
||||
elif content_type == RegiViewHelper.content_type_values:
|
||||
return 'list'
|
||||
elif content_type == RegiViewHelper.content_type_breadcrumb:
|
||||
return 'play'
|
||||
|
||||
def define_button_style(button_type):
|
||||
if button_type == RegiViewHelper.button_type_content:
|
||||
return ButtonStyle(button_color='#FFF', font_color='blue')
|
||||
elif button_type == RegiViewHelper.button_type_navigator:
|
||||
return ButtonStyle(button_color='#d0d0ff')
|
||||
|
||||
def create_first_button(content_type, icon_type):
|
||||
if content_type == RegiViewHelper.content_type_keys:
|
||||
tooltip = 'Click to get lists of the subkeys and values under this subkey'
|
||||
elif content_type == RegiViewHelper.content_type_values:
|
||||
tooltip = 'Click to get details of the value'
|
||||
elif content_type == RegiViewHelper.content_type_breadcrumb:
|
||||
tooltip = 'Click to go back to the key'
|
||||
|
||||
return widgets.Button(description=content_type + ' :', \
|
||||
disabled=True, \
|
||||
layout=Layout(width='auto', height='27px'), \
|
||||
icon=icon_type, \
|
||||
style=ButtonStyle(align_content='center', font_weight='bold'), \
|
||||
tooltip = tooltip)
|
||||
|
||||
def create_button(button_type, icon_type, path):
|
||||
return widgets.Button(description=path, \
|
||||
layout=RegiViewHelper.define_button_layout(button_type), \
|
||||
style=RegiViewHelper.define_button_style(button_type),
|
||||
icon=icon_type)
|
||||
|
||||
def define_value_table_columns():
|
||||
return ['Name', 'Type', 'Data']
|
||||
|
||||
def define_int_progress_bar():
|
||||
return widgets.IntProgress(value=0, min=0, max=10, step=1, description='Loading:', bar_style='success', orientation='horizontal', position='top')
|
|
@ -0,0 +1,7 @@
|
|||
"""
|
||||
SentinelUtils: This package provides utility methods in general
|
||||
"""
|
||||
|
||||
# __init__.py
|
||||
from .config_reader import ConfigReader
|
||||
from .version_management import *
|
|
@ -0,0 +1,17 @@
|
|||
"""
|
||||
Config Reader:
|
||||
This module is used to read JSON config file populated by Azure Notebooks API.
|
||||
"""
|
||||
|
||||
import json
|
||||
class ConfigReader:
|
||||
def read_config_values(filePath):
|
||||
with open(filePath) as json_file:
|
||||
if json_file:
|
||||
json_config = json.load(json_file)
|
||||
return (json_config["tenant_id"],
|
||||
json_config["subscription_id"],
|
||||
json_config["resource_group"],
|
||||
json_config["workspace_id"],
|
||||
json_config["workspace_name"])
|
||||
return None
|
|
@ -0,0 +1,58 @@
|
|||
"""
|
||||
Version Management:
|
||||
This module is used to validate installed Python packages that are required by Azure Sentinel Notebooks.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import pkg_resources
|
||||
from pkg_resources import DistributionNotFound, VersionConflict
|
||||
|
||||
class VersionInformation:
|
||||
name = ''
|
||||
current_version = ''
|
||||
required_version = ''
|
||||
requirement_met = False
|
||||
message = ''
|
||||
|
||||
class ModuleVersionCheck:
|
||||
|
||||
def validate_python(self, required_version):
|
||||
version = VersionInformation()
|
||||
version.name = 'Python'
|
||||
version.current_version = sys.version
|
||||
version.required_version = required_version
|
||||
version.requirement_met = sys.version_info >= tuple(int(x) for x in required_version.split("."))
|
||||
version.message = VersionInformation.name + required_version + ' is required' if VersionInformation.requirement_met == False else ''
|
||||
return version
|
||||
|
||||
def validate_installed_modules(self, module_list):
|
||||
module_versions = []
|
||||
for mod_info in module_list:
|
||||
version = VersionInformation()
|
||||
version.name, version.required_version = mod_info.split(">=")
|
||||
try:
|
||||
pkg_resources.require(mod_info)
|
||||
version.requirement_met = True
|
||||
except:
|
||||
version.requirement_met = False
|
||||
try:
|
||||
version.message = str(sys.exc_info()[0].report)
|
||||
except:
|
||||
version.message = 'Unknown error'
|
||||
finally:
|
||||
if 'azure' in version.name and version.message.find("VersionConflict") >= 0:
|
||||
version.requirement_met = True
|
||||
elif version.message == '' or version.message.find("DistributionNotFound") < 0:
|
||||
version = self.get_version_information(version, mod_info)
|
||||
|
||||
module_versions.append(version)
|
||||
return module_versions
|
||||
|
||||
def get_version_information(self, version, mod_info):
|
||||
mod_list = pkg_resources.WorkingSet()
|
||||
items = list(filter(lambda x: x.project_name.startswith(version.name), mod_list))
|
||||
if len(items) > 0:
|
||||
version.current_version = items[0].version
|
||||
return version
|
||||
|
||||
# end of the class
|
|
@ -0,0 +1,8 @@
|
|||
"""
|
||||
SentinelVHD: This package is developed for viewing MFT entries by leveraging ipywidgets, using MVC pattern
|
||||
"""
|
||||
|
||||
# __init__.py
|
||||
from .file_helper import *
|
||||
from .mft_helper import *
|
||||
from .mft_view_helper import *
|
|
@ -0,0 +1,74 @@
|
|||
"""
|
||||
File Helper:
|
||||
This module provides helper methong to download and display files
|
||||
|
||||
It has dependency on .NET library for downloading files.
|
||||
"""
|
||||
|
||||
import clr
|
||||
clr.AddReference("System")
|
||||
clr.AddReference("System.Collections")
|
||||
clr.AddReference("System.Runtime")
|
||||
clr.AddReference("Microsoft.Azure.Storage.Common")
|
||||
clr.AddReference("Microsoft.Azure.Storage.Blob")
|
||||
clr.AddReference("Microsoft.Azure.KeyVault.Core")
|
||||
clr.AddReference("Newtonsoft.Json")
|
||||
clr.AddReference("Microsoft.Azure.CIS.DiskLib")
|
||||
clr.AddReference("Microsoft.Azure.CIS.DiskLib.Ntfs")
|
||||
clr.AddReference("Microsoft.Azure.CIS.DiskLib.Vhd")
|
||||
clr.AddReference("Microsoft.Azure.CIS.DiskLib.Vhd.Accessors")
|
||||
clr.AddReference("PyHelper")
|
||||
|
||||
import sys
|
||||
import clr
|
||||
import os
|
||||
import json
|
||||
from datetime import datetime,timedelta
|
||||
import ipywidgets as widgets
|
||||
import System
|
||||
from System import *
|
||||
from System.Collections.Generic import *
|
||||
from Newtonsoft.Json import *
|
||||
from Microsoft.Azure.CIS.DiskLib import *
|
||||
from Microsoft.Azure.CIS.DiskLib.Ntfs import *
|
||||
from Microsoft.Azure.CIS.DiskLib.Vhd import *
|
||||
from Microsoft.Azure.CIS.DiskLib.Vhd.Accessors import *
|
||||
from MftExportUtilityLib import *
|
||||
|
||||
class FileHelper(object):
|
||||
def download_file(file_segment, ntfs):
|
||||
return MftExportUtil.DownloadFile(ntfs, int(file_segment, 16))[0]
|
||||
|
||||
def download_file_internal(file_segment, ntfs):
|
||||
return MftExportUtil.DownloadFile(ntfs, int(file_segment, 16))[0]
|
||||
|
||||
def download_files(file_segment_list, ntfs):
|
||||
progress_bar = FileHelper.define_int_progress_bar(len(file_segment_list))
|
||||
display(progress_bar)
|
||||
file_path_list = []
|
||||
for file in file_segment_list:
|
||||
file_path = FileHelper.download_file(file, ntfs)
|
||||
file_path_list.append(file_path)
|
||||
progress_bar.value += 1
|
||||
progress_bar.close()
|
||||
return file_path_list
|
||||
|
||||
def display_file(file_path):
|
||||
with open(file_path, 'r', encoding="utf8", errors='ignore') as f:
|
||||
contents = f.read()
|
||||
print(contents)
|
||||
|
||||
def convert_windows_file_time(win_time):
|
||||
if win_time == 0 : return ''
|
||||
|
||||
dt = win_time / 10
|
||||
return datetime(1601, 1, 1) + timedelta(microseconds=dt)
|
||||
|
||||
def convert_decimal_to_hexadecimal(attr_name, val):
|
||||
if isinstance(val, (int, float)) == False or isinstance(val, (bool)) == True or 'time' in attr_name:
|
||||
return val
|
||||
else:
|
||||
return "0x{:02x}".format(val)
|
||||
|
||||
def define_int_progress_bar(max_step):
|
||||
return widgets.IntProgress(value=0, min=0, max=max_step, step=1, description='Downloading:', bar_style='success', orientation='horizontal', position='top')
|
|
@ -0,0 +1,324 @@
|
|||
"""
|
||||
MFT Helper:
|
||||
This module is consisted of 3 classes:
|
||||
MFT Helper which knows MFT,
|
||||
MFT controller which takes user actions and renders corresponding widgets, and
|
||||
MFT Model Helper which knows the MFT data object.
|
||||
|
||||
It has dependency on .NET libraries:
|
||||
System
|
||||
System.Collections
|
||||
System.Runtime
|
||||
Microsoft.Azure.Storage.Common
|
||||
Microsoft.Azure.Storage.Blob
|
||||
Microsoft.Azure.KeyVault.Core
|
||||
Newtonsoft.Json
|
||||
Microsoft.Azure.CIS.DiskLib
|
||||
Microsoft.Azure.CIS.DiskLib.Ntfs
|
||||
Microsoft.Azure.CIS.DiskLib.Vhd
|
||||
Microsoft.Azure.CIS.DiskLib.Vhd.Accessors
|
||||
PyHelper
|
||||
"""
|
||||
|
||||
import clr
|
||||
clr.AddReference("System")
|
||||
clr.AddReference("System.Collections")
|
||||
clr.AddReference("System.Runtime")
|
||||
clr.AddReference("Microsoft.Azure.Storage.Common")
|
||||
clr.AddReference("Microsoft.Azure.Storage.Blob")
|
||||
clr.AddReference("Microsoft.Azure.KeyVault.Core")
|
||||
clr.AddReference("Newtonsoft.Json")
|
||||
clr.AddReference("Microsoft.Azure.CIS.DiskLib")
|
||||
clr.AddReference("Microsoft.Azure.CIS.DiskLib.Ntfs")
|
||||
clr.AddReference("Microsoft.Azure.CIS.DiskLib.Vhd")
|
||||
clr.AddReference("Microsoft.Azure.CIS.DiskLib.Vhd.Accessors")
|
||||
clr.AddReference("PyHelper")
|
||||
|
||||
import sys
|
||||
import clr
|
||||
import os
|
||||
import json
|
||||
import pandas as pd
|
||||
import ipywidgets as widgets
|
||||
from ipywidgets import Button, GridBox, Layout, ButtonStyle
|
||||
import System
|
||||
from System import *
|
||||
from System.Collections.Generic import *
|
||||
from Newtonsoft.Json import *
|
||||
from Microsoft.Azure.CIS.DiskLib import *
|
||||
from Microsoft.Azure.CIS.DiskLib.Ntfs import *
|
||||
from Microsoft.Azure.CIS.DiskLib.Vhd import *
|
||||
from Microsoft.Azure.CIS.DiskLib.Vhd.Accessors import *
|
||||
from MftExportUtilityLib import *
|
||||
from .mft_view_helper import *
|
||||
from .file_helper import *
|
||||
|
||||
class MFTHelper(object):
|
||||
p1 = 'Partition1'
|
||||
p2 = 'Partition2'
|
||||
p3 = 'Partition3'
|
||||
p4 = 'Partition4'
|
||||
root_segment_low = "0x5"
|
||||
|
||||
def __init__(self, segment_start, sas_url, partition = None):
|
||||
self.uri = Uri(sas_url)
|
||||
self.sas_url = sas_url
|
||||
r = Reader(PageBlobReader.Create(self.uri))
|
||||
self.mbr = r.GetMasterBootRecord()
|
||||
self.root = None
|
||||
if segment_start is None: segment_start = self.root_segment_low
|
||||
self.segment = segment_start
|
||||
self.root_segment_low = self.root_segment_low
|
||||
self.selected_partition = partition
|
||||
|
||||
if partition is not None:
|
||||
if partition == self.p1:
|
||||
part = self.mbr.Partition1
|
||||
elif partition == self.p2:
|
||||
part = self.mbr.Partition2
|
||||
elif partition == self.p3:
|
||||
part = self.mbr.Partition3
|
||||
elif partition == self.p4:
|
||||
part = self.mbr.Partition4
|
||||
self.scan_partition(part)
|
||||
|
||||
def get_partitions(self):
|
||||
return [{self.p1 : hex(self.mbr.Partition1.PartitionType)},
|
||||
{self.p2 : hex(self.mbr.Partition2.PartitionType)},
|
||||
{self.p3 : hex(self.mbr.Partition3.PartitionType)},
|
||||
{self.p4 : hex(self.mbr.Partition4.PartitionType)}]
|
||||
|
||||
def scan_partition(self, partition):
|
||||
ntfs_accessor = PageBlobReader.Create(self.uri, partition.LBAFirstSector *512)
|
||||
self.ntfs = NtfsData(ntfs_accessor)
|
||||
|
||||
i = 0
|
||||
for mft_entry in self.ntfs.Mft:
|
||||
if i < int(self.segment,16):
|
||||
i += 1
|
||||
continue
|
||||
self.root = mft_entry
|
||||
break
|
||||
|
||||
|
||||
class MFTController(object):
|
||||
def __init__(self, mft_helper, reset_path = True):
|
||||
self.root_segment_low = mft_helper.root_segment_low
|
||||
self.root = mft_helper.root
|
||||
self.selected_partition = mft_helper.selected_partition
|
||||
self.sas_url = mft_helper.sas_url
|
||||
if reset_path == True:
|
||||
self.path = [MFTViewHelper.content_type_breadcrumb, 'ROOT [{}]'.format(mft_helper.segment)]
|
||||
|
||||
def display(self):
|
||||
self.progress_bar = MFTViewHelper.define_int_progress_bar()
|
||||
self.nav_breadcrumbs = widgets.HBox()
|
||||
self.button_folders = widgets.VBox()
|
||||
self.button_files = widgets.VBox()
|
||||
self.table_files = widgets.VBox()
|
||||
self.box_folders = widgets.HBox()
|
||||
self.box_files = widgets.HBox()
|
||||
self.box_top = widgets.VBox()
|
||||
|
||||
self.tab_details = widgets.Tab()
|
||||
|
||||
self.update(None)
|
||||
return self.box_top
|
||||
|
||||
def on_folder_click(self, button):
|
||||
self.button_description = button.description
|
||||
name, seg_num_low = self.parse_button_description(self.button_description)
|
||||
|
||||
# Initialize the helper again
|
||||
self.progress_bar = MFTViewHelper.define_int_progress_bar()
|
||||
mft_helper = MFTHelper(seg_num_low, self.sas_url, self.selected_partition)
|
||||
self.__init__(mft_helper, False)
|
||||
|
||||
# Build navigator path and update
|
||||
self.build_breadcrumb_path(self.button_description)
|
||||
self.update(seg_num_low)
|
||||
return self.box_top
|
||||
|
||||
def on_file_click(self, button):
|
||||
self.button_description = button.description
|
||||
name, seg_num_low = self.parse_button_description(self.button_description)
|
||||
|
||||
# Initialize the helper again
|
||||
self.progress_bar = MFTViewHelper.define_int_progress_bar()
|
||||
mft_helper = MFTHelper(seg_num_low, self.sas_url, self.selected_partition)
|
||||
self.__init__(mft_helper, False)
|
||||
|
||||
self.update(seg_num_low, True)
|
||||
return self.box_top
|
||||
|
||||
def update(self, seg_num_low, update_details_only = False):
|
||||
display(self.progress_bar)
|
||||
self.progress_bar.value += 1
|
||||
if update_details_only == False:
|
||||
self.construct_breadcrumb(self.nav_breadcrumbs)
|
||||
self.progress_bar.value += 1
|
||||
self.process_button_data(self.button_folders, MFTViewHelper.content_type_folders)
|
||||
self.progress_bar.value += 2
|
||||
self.process_button_data(self.button_files, MFTViewHelper.content_type_files)
|
||||
self.progress_bar.value += 3
|
||||
self.process_tabular_data(self.table_files, MFTViewHelper.content_type_files)
|
||||
self.progress_bar.value += 1
|
||||
|
||||
if seg_num_low is not None:
|
||||
self.process_tab_attrs(self.tab_details, seg_num_low)
|
||||
self.progress_bar.value += 1
|
||||
|
||||
self.box_folders.children = [self.button_folders, self.tab_details]
|
||||
self.box_files.children = [self.button_files, self.table_files]
|
||||
self.box_top.children = [self.nav_breadcrumbs, self.box_folders, self.box_files]
|
||||
self.progress_bar.close()
|
||||
|
||||
def construct_breadcrumb(self, box):
|
||||
buttons = []
|
||||
icon_type = 'play'
|
||||
buttons.append(MFTViewHelper.create_first_button(MFTViewHelper.content_type_breadcrumb, icon_type))
|
||||
|
||||
for node in self.path[1:]:
|
||||
name, seg_num_low = self.parse_button_description(node)
|
||||
button = MFTViewHelper.create_button(MFTViewHelper.button_type_navigator, icon_type, name, seg_num_low)
|
||||
button.on_click(self.on_folder_click)
|
||||
buttons.append(button)
|
||||
# End of for loop
|
||||
|
||||
box.children = tuple(buttons)
|
||||
box.layout = MFTViewHelper.define_box_layout()
|
||||
|
||||
def process_button_data(self, box, content_type):
|
||||
buttons = []
|
||||
icon_type = content_type.lower()[:-1]
|
||||
buttons.append(MFTViewHelper.create_first_button(content_type, icon_type))
|
||||
|
||||
for node in self.root.Children:
|
||||
if content_type == MFTViewHelper.content_type_files:
|
||||
if node.IsDirectory: continue
|
||||
else:
|
||||
if not node.IsDirectory: continue
|
||||
if node.FileNameData.Flags == 2: continue
|
||||
|
||||
button = MFTViewHelper.create_button(MFTViewHelper.button_type_content, icon_type, node.Name, hex(node.IndexEntry.FileReference.SegmentNumberLowPart))
|
||||
if content_type == MFTViewHelper.content_type_files:
|
||||
button.on_click(self.on_file_click)
|
||||
else:
|
||||
button.on_click(self.on_folder_click)
|
||||
|
||||
buttons.append(button)
|
||||
# End of for loop
|
||||
|
||||
box.children = tuple(buttons)
|
||||
box.layout = MFTViewHelper.define_box_layout()
|
||||
|
||||
def process_tabular_data(self, box, content_type):
|
||||
column_list, data_table = MFTModelHelper.get_summary_table_data(content_type, self.root.Children)
|
||||
|
||||
df = pd.DataFrame(data_table, columns=column_list)
|
||||
box.children = tuple([widgets.HTML(MFTViewHelper.get_summary_table_style() + df.to_html(classes="df", escape=True))])
|
||||
box.layout = MFTViewHelper.define_box_layout()
|
||||
|
||||
def parse_button_description(self, button_description):
|
||||
name = button_description.strip().split('[')[0].strip()
|
||||
seg_num_low = button_description.strip().split('[')[1][:-1].strip()
|
||||
|
||||
return name, seg_num_low
|
||||
|
||||
def build_breadcrumb_path(self, new_node):
|
||||
if new_node in self.path:
|
||||
index = self.path.index(new_node)
|
||||
self.path = self.path[:index + 1]
|
||||
else:
|
||||
self.path.append(new_node)
|
||||
|
||||
def process_tab_attrs(self, tab_container, seg_num_low):
|
||||
tab_list = []
|
||||
outputs = []
|
||||
|
||||
try:
|
||||
for mft_attr in self.root.Attributes:
|
||||
tab_list.append(str(mft_attr.TypeCode).replace('$', ''))
|
||||
output = widgets.Output()
|
||||
selected_attr_list = MFTModelHelper.map_mftattr_to_attr_list(str(mft_attr.TypeCode).replace('$', ''))
|
||||
with output:
|
||||
for select_attr in selected_attr_list:
|
||||
val = getattr(mft_attr, select_attr)
|
||||
print(select_attr + ': ' + str(FileHelper.convert_decimal_to_hexadecimal(select_attr, val)))
|
||||
# end of select attr_list loop
|
||||
|
||||
outputs.append(output)
|
||||
# end of Attributes loop
|
||||
except:
|
||||
print(sys.exc_info()[1])
|
||||
|
||||
tab_container.children = outputs
|
||||
for i in range(len(tab_list)): tab_container.set_title(i, tab_list[i])
|
||||
tab_container.layout = MFTViewHelper.define_box_layout()
|
||||
tab_container.selected_index = 0
|
||||
|
||||
class MFTModelHelper(object):
|
||||
def get_summary_table_data(content_type, mft_entries):
|
||||
data_table = []
|
||||
count = 0
|
||||
column_list = ["Offset", "Seg. Num.", "Size", "Created", "Accessed", "Modified", "Changed"]
|
||||
|
||||
for node in mft_entries:
|
||||
count += 1
|
||||
if count > 10000: break
|
||||
if content_type == 'FILES':
|
||||
if node.IsDirectory: continue
|
||||
else:
|
||||
if not node.IsDirectory: continue
|
||||
if node.FileNameData.Flags == 2: continue
|
||||
|
||||
data_table.append([node.DiskPhysicalOffset,
|
||||
hex(node.IndexEntry.FileReference.SegmentNumberLowPart),
|
||||
node.FileNameData.FileSize,
|
||||
FileHelper.convert_windows_file_time(node.FileNameData.CreationTime),
|
||||
FileHelper.convert_windows_file_time(node.FileNameData.LastAccessTime),
|
||||
FileHelper.convert_windows_file_time(node.FileNameData.LastModificationTime),
|
||||
FileHelper.convert_windows_file_time(node.FileNameData.LastChangeTime)])
|
||||
|
||||
return column_list, data_table
|
||||
|
||||
def map_mftattr_to_attr_list(type_code):
|
||||
# attrs for folder and files
|
||||
if str(type_code) == "StdInfo(0x10)":
|
||||
return ['CreationTime', 'LastModificationTime', 'LastChangeTime', 'LastAccessTime', 'FileAttributes', 'OwnerId', 'SecurityId', 'USN', 'HeaderClusterOffset', 'TypeCode', 'Name',
|
||||
'IsResident']
|
||||
# attrs for folder and files
|
||||
elif str(type_code) == 'FileName(0x30)':
|
||||
return ['FileReference', 'FileSize', 'CreationTime', 'LastModificationTime', 'LastChangeTime', 'LastAccessTime', 'FileAttributes', 'ReparsePointTag', 'FileNameLength',
|
||||
'FileName', 'Flags', 'IsDirectory', 'IsViewIndex', 'HeaderClusterOffset', 'TypeCode', 'Name', 'IsResident']
|
||||
# attrs for folder
|
||||
elif str(type_code) == 'ObjectId(0x40)':
|
||||
return ['ObjectId', 'HeaderClusterOffset', 'TypeCode', 'Name', 'IsResident']
|
||||
# attrs for folder
|
||||
elif str(type_code) == 'IndexRoot(0x90)':
|
||||
return ['BlocksPerIndexBuffer', 'BytesPerIndexBuffer', 'CollationRule', 'IndexedAttributeType', 'HeaderClusterOffset', 'TypeCode', 'Name', 'IsResident']
|
||||
# attrs for folder
|
||||
elif str(type_code) == 'IndexRoot(0x90)':
|
||||
return ['BlocksPerIndexBuffer', 'BytesPerIndexBuffer', 'CollationRule', 'IndexedAttributeType', 'HeaderClusterOffset', 'TypeCode', 'Name', 'IsResident']
|
||||
# attrs for folder
|
||||
elif str(type_code) == 'IndexAlloc(0xa0)':
|
||||
return ['HeaderClusterOffset', 'TypeCode', 'Name', 'IsResident']
|
||||
# attrs for folder
|
||||
elif str(type_code) == 'Bitmap(0xb0)':
|
||||
return ['HeaderClusterOffset', 'TypeCode', 'Name', 'IsResident']
|
||||
# attrs for folder and files
|
||||
elif str(type_code) == 'LoggedUtilityStream(0x100)':
|
||||
return ['HeaderClusterOffset', 'TypeCode', 'Name', 'IsResident']
|
||||
# attrs for files
|
||||
elif str(type_code) == 'AttrList(0x20)':
|
||||
return ['HeaderClusterOffset', 'TypeCode', 'Name', 'IsResident']
|
||||
# attrs for files
|
||||
elif str(type_code) == 'Data(0x80)':
|
||||
return ['DataStream', 'LowestVcn', 'HighestVcn', 'Flags', 'DataSize', 'HeaderClusterOffset', 'TypeCode', 'Name', 'IsResident']
|
||||
# attrs for files
|
||||
elif str(type_code) == 'EAInfo(0xd0)':
|
||||
return ['PackedEASize', 'NeedEACount', 'UnpackedEASize', 'HeaderClusterOffset', 'TypeCode', 'Name', 'IsResident']
|
||||
# attrs for files
|
||||
elif str(type_code) == 'EA(0xe0)':
|
||||
return ['DataStream', 'RunList', 'HeaderClusterOffset', 'TypeCode', 'Name', 'IsResident']
|
||||
|
|
@ -0,0 +1,58 @@
|
|||
"""
|
||||
MFT View Helper:
|
||||
This module provides helper methods for UI components.
|
||||
"""
|
||||
|
||||
import ipywidgets as widgets
|
||||
from ipywidgets import Button, GridBox, Layout, ButtonStyle, IntProgress
|
||||
|
||||
class MFTViewHelper(object):
|
||||
button_type_content = 'content'
|
||||
button_type_navigator = 'navigator'
|
||||
|
||||
content_type_folders = 'FOLDERS'
|
||||
content_type_files = 'FILES'
|
||||
content_type_breadcrumb = 'FOLDERS VIEWED'
|
||||
|
||||
def get_summary_table_style():
|
||||
return '''<style> .df th { text-align:center; font-size:large; padding-left:10px; padding-right:10px} tbody tr:nth-child(even) { background-color: #f2f2f2; } td { padding-left: 10px; padding-right: 5px; }</style>'''
|
||||
|
||||
def define_box_layout():
|
||||
return Layout(display='flex', align_items='flex-start', border='solid', margin='5px')
|
||||
|
||||
def define_button_layout(button_type):
|
||||
if button_type == MFTViewHelper.button_type_content:
|
||||
return Layout(width='auto', height='25px')
|
||||
elif button_type == MFTViewHelper.button_type_navigator:
|
||||
return Layout(width='auto', height='27px', border='2px solid black')
|
||||
|
||||
def define_button_style(button_type):
|
||||
if button_type == MFTViewHelper.button_type_content:
|
||||
return ButtonStyle(button_color='#FFF', font_color='blue')
|
||||
elif button_type == MFTViewHelper.button_type_navigator:
|
||||
return ButtonStyle(button_color='#d0d0ff')
|
||||
|
||||
def create_first_button(content_type, icon_type):
|
||||
if content_type == MFTViewHelper.content_type_folders:
|
||||
tooltip = 'Click to get lists of the subfolders and files under this folder'
|
||||
elif content_type == MFTViewHelper.content_type_files:
|
||||
tooltip = 'Click to get details of the file'
|
||||
elif content_type == MFTViewHelper.content_type_breadcrumb:
|
||||
tooltip = 'Click to go back to the folder'
|
||||
|
||||
return widgets.Button(description=content_type + ' :', \
|
||||
disabled=True, \
|
||||
layout=Layout(width='auto', height='27px'), \
|
||||
icon=icon_type, \
|
||||
style=ButtonStyle(align_content='center', font_weight='bold'), \
|
||||
tooltip = tooltip)
|
||||
|
||||
def create_button(button_type, icon_type, name, seg_num_low):
|
||||
button = widgets.Button(description=name.strip() + ' [' + seg_num_low + ']', \
|
||||
layout=MFTViewHelper.define_button_layout(button_type), \
|
||||
style=MFTViewHelper.define_button_style(button_type),
|
||||
icon=icon_type)
|
||||
return button
|
||||
|
||||
def define_int_progress_bar():
|
||||
return widgets.IntProgress(value=0, min=0, max=10, step=1, description='Loading:', bar_style='success', orientation='horizontal', position='top')
|
|
@ -0,0 +1,6 @@
|
|||
"""
|
||||
SentinelWidgets: This package provides helper functionalities for UI comonents.
|
||||
"""
|
||||
|
||||
# __init__.py
|
||||
from .widget_view_helper import *
|
|
@ -0,0 +1,145 @@
|
|||
"""
|
||||
Widget View Helper:
|
||||
This module provides helper methods for UI controls and components.
|
||||
"""
|
||||
|
||||
import os
|
||||
import ipywidgets as widgets
|
||||
from ipywidgets import Button, GridBox, Layout, ButtonStyle, IntProgress
|
||||
from IPython.display import HTML
|
||||
|
||||
class WidgetViewHelper(object):
|
||||
def set_env(reset, env_dir, env_dict):
|
||||
for key in env_dict.keys():
|
||||
ip = ''
|
||||
if reset == False and key in env_dir:
|
||||
env_dict[key] = env_dir[key]
|
||||
print(key + '=' + env_dir[key])
|
||||
else:
|
||||
ip = input(key + ': ')
|
||||
os.environ[key] = ip
|
||||
env_dict[key] = ip
|
||||
|
||||
return env_dict
|
||||
|
||||
def select_vm(compute):
|
||||
vm_names = sorted(list(vm.name for vm in list(compute.get_vm_list())))
|
||||
return widgets.Dropdown(options=vm_names, value=vm_names[0],description='VM:')
|
||||
|
||||
def select_managed_disk(compute, vm_name):
|
||||
disk_list = compute.get_vm_disk_names(vm_name)
|
||||
return widgets.Dropdown(options=disk_list, value=disk_list[0],description='Disk:')
|
||||
|
||||
def select_account_creation():
|
||||
storage_account_creation = ['Creating new account', 'Using exist account']
|
||||
return widgets.Dropdown(options=storage_account_creation, value=storage_account_creation[0],description='Storage Account Creation:')
|
||||
|
||||
def select_blob_container_creation():
|
||||
blob_container_creation = ['Creating new container', 'Using exist container']
|
||||
return widgets.Dropdown(options=blob_container_creation, value=blob_container_creation[0],description='Blob Container Creation:')
|
||||
|
||||
def select_os():
|
||||
os_type_list = ['Windows', 'Linux']
|
||||
return widgets.Dropdown(options=os_type_list, value=os_type_list[0],description='OS Type:')
|
||||
|
||||
def check_storage_account_name_availability(storage):
|
||||
# usert input storage account name
|
||||
storage_account_name = input('Storage Account Name:')
|
||||
name_availability = storage.is_storage_account_name_available(storage_account_name)
|
||||
return storage_account_name if name_availability.name_available == True else None
|
||||
|
||||
def create_storage_account_and_get_key(storage, storage_account_name, resource_group_for_storage):
|
||||
storage_location = input('Storage Location:')
|
||||
async_storage_creation = storage.create_storage_account_async(storage_account_name, resource_group_for_storage, **{'storage_location' : storage_location})
|
||||
return storage.get_storage_account_key(storage_account_name, resource_group_for_storage)
|
||||
|
||||
def select_storage_account(storage, resource_group_for_storage):
|
||||
storage_account_list = storage.get_storage_account_names(resource_group_for_storage)
|
||||
return widgets.Dropdown(options=storage_account_list, value=storage_account_list[0],description='Existing Storage Accounts:')
|
||||
|
||||
def select_blob_container(storage, resource_group_for_storage, storage_account_name):
|
||||
blob_container_list = storage.get_container_name_list(resource_group_for_storage, storage_account_name, None)
|
||||
return widgets.Dropdown(options=blob_container_list, value=blob_container_list[0],description='Blob Containers:')
|
||||
|
||||
def select_log_analytics_workspace(loganalytics):
|
||||
workspace_name_list = loganalytics.get_workspace_name_list()
|
||||
return widgets.Dropdown(options=workspace_name_list, value=workspace_name_list[0],description='Workspace:')
|
||||
|
||||
def select_multiple_tables(anomaly_lookup):
|
||||
table_list = anomaly_lookup.query_table_list()
|
||||
tables = sorted(table_list.TableName.tolist())
|
||||
return widgets.SelectMultiple(options=tables, row=len(tables), value=[], description='Tables:')
|
||||
|
||||
def generate_upload_container_path(storage, os_type, sas_expiration_in_days):
|
||||
sas_url = storage.generate_blob_container_sas_url(sas_expiration_in_days)
|
||||
upload_container_path = storage.build_upload_container_path(os_type, sas_url)
|
||||
return upload_container_path
|
||||
|
||||
def get_vm_extension_properties(os_type, upload_container_path, user_id = None):
|
||||
if os_type == 'Windows':
|
||||
command_to_execute = 'powershell -File installNotebookExtension.ps1 "{0}" >> out.txt'.format(upload_container_path)
|
||||
file_list = ['https://sentinelnotebooks.blob.core.windows.net/piwindowsstorage/installNotebookExtension.ps1', 'https://sentinelnotebooks.blob.core.windows.net/piwindowsstorage/piextension.zip']
|
||||
elif os_type == 'Linux':
|
||||
command_to_execute = './piondemand.sh "' + upload_container_path + '"'
|
||||
file_list = ['https://sentinelnotebooks.blob.core.windows.net/pilinuxstorage/piondemand.sh','https://sentinelnotebooks.blob.core.windows.net/pilinuxstorage/release/ondemand/stable/pilinux.ondemand.tar.bz2']
|
||||
|
||||
elif os_type == 'DSVM':
|
||||
command_to_execute = './azureforensics.sh {0}'.format(user_id)
|
||||
file_list = ['https://sentinelnotebooks.blob.core.windows.net/forensicsnotebooks/azureforensics.sh','https://sentinelnotebooks.blob.core.windows.net/forensicsnotebooks/vhdexplorer.tar']
|
||||
|
||||
return command_to_execute, file_list
|
||||
|
||||
def define_int_progress_bar():
|
||||
return widgets.IntProgress(value=0, min=0, max=10, step=1, description='Loading:', bar_style='success', orientation='horizontal', position='top')
|
||||
|
||||
# Copy text to Clipboard
|
||||
def copy_to_clipboard(url, text_body, label_text):
|
||||
html_str = (
|
||||
"""<!DOCTYPE html>
|
||||
<html><body>
|
||||
<input id="sentinel_text_for_copy" type="text" readonly style="font-weight: bold; border: none; width:1px;" size = '"""
|
||||
+ str(len(text_body))
|
||||
+ """' value='"""
|
||||
+ text_body
|
||||
+ """'>
|
||||
<a target="_new" href="javascript:void(0);" onclick="sentinel_copy()">""" + label_text + """</a>
|
||||
<script>
|
||||
var sentinel_win = null
|
||||
function sentinel_copy() {
|
||||
var copyText = document.getElementById("sentinel_text_for_copy");
|
||||
copyText.select();
|
||||
document.execCommand("copy");
|
||||
|
||||
var w = screen.width - 300;
|
||||
var h = screen.height - 300;
|
||||
params = 'width='+w+',height='+h
|
||||
sentinel_win = window.open('"""
|
||||
+ url
|
||||
+ """', 'sentinel_win', params);
|
||||
}
|
||||
|
||||
</script>
|
||||
</body></html>"""
|
||||
)
|
||||
|
||||
return html_str
|
||||
|
||||
def construct_url_for_log_analytics_logs(tenant_domain, subscription_id, resource_group, workspace_name):
|
||||
return 'https://ms.portal.azure.com/#@{0}/resource/subscriptions/{1}/resourceGroups/{2}/providers/Microsoft.OperationalInsights/workspaces/{3}/logs'.format(tenant_domain, subscription_id, resource_group, workspace_name)
|
||||
|
||||
def display_html(inner_html):
|
||||
display(HTML(inner_html))
|
||||
|
||||
def pick_start_and_end_date():
|
||||
start_date = widgets.DatePicker(description='Pick a start date', disabled=False)
|
||||
end_date = widgets.DatePicker(description='Pick a end date', disabled=False)
|
||||
display(start_date)
|
||||
display(end_date)
|
||||
return start_date, end_date
|
||||
|
||||
def select_multiple_items(label, item_name):
|
||||
label_item = widgets.Label(value=label)
|
||||
items = widgets.Textarea(value='', placeholder='One per line: \n 0x7ae3 \n 0x7ae6', description=item_name, disabled=False, rows=5)
|
||||
display(label_item)
|
||||
display(items)
|
||||
return items
|
|
@ -0,0 +1,36 @@
|
|||
# -------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See License.txt in the project root for
|
||||
# license information.
|
||||
# --------------------------------------------------------------------------
|
||||
import re
|
||||
import setuptools
|
||||
|
||||
INSTALL_REQUIRES = ['azure>=4.0.0']
|
||||
|
||||
#with open("README.rst", "r") as f:
|
||||
# long_description_text = f.read()
|
||||
with open("LICENSE.txt", "r") as fh:
|
||||
license_txt = fh.read()
|
||||
|
||||
setuptools.setup(
|
||||
name="Azure-Sentinel-Utilities",
|
||||
version="0.1.125",
|
||||
author="Azure Sentinel Notebooks Devs",
|
||||
author_email="zhzhao@microsoft.com",
|
||||
description="AZURE SENTINEL NOTEBOOKS PYTHON TOOLS: This package is developed to support Azure Sentinel Notebooks. It is in an early preview stage so please provide feedback, report bugs, and suggets for new features.",
|
||||
#long_description='',
|
||||
#long_description_content_type="text/x-rst",
|
||||
license=license_txt,
|
||||
url="https://github.com/Azure/Azure-Sentinel",
|
||||
python_requires='>=3.6',
|
||||
packages=setuptools.find_packages(),
|
||||
classifiers=[
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Operating System :: OS Independent",
|
||||
],
|
||||
install_requires=INSTALL_REQUIRES,
|
||||
keywords=['security', 'azure', 'sentinel'],
|
||||
zip_safe=False,
|
||||
)
|
Загрузка…
Ссылка в новой задаче