This commit is contained in:
Emma Rose 2019-10-29 13:56:18 -04:00
Родитель ddba69159a 18c65ce4e2
Коммит b875356bf4
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 1486642516ED3535
40 изменённых файлов: 413 добавлений и 905 удалений

Просмотреть файл

@ -139,3 +139,12 @@ rebuild: clean build-from-cwd
.PHONY: new-alert
new-alert: ## Create an example alert and working alert unit test
python tests/alert_templater.py
.PHONY: run-docs
run-docs: stop-docs ## Start container to serve up documentation for development
docker-compose -f docker/compose/dev-docs.yml up -d
@echo "Visit http://localhost:8000 - Feel free to update source code and then refresh webpage!"
.PHONY: stop-docs
stop-docs: ## Stop the docs development container if running
-docker-compose -f docker/compose/dev-docs.yml stop

Просмотреть файл

@ -147,7 +147,7 @@ class AlertGeoModel(AlertTask):
summary = alert.summary(new_alert)
alert_dict = self.createAlertDict(
summary, 'geomodel', ['geomodel'], events, 'INFO')
summary, 'geomodel', ['geomodel'], events, 'WARNING')
# TODO: When we update to Python 3.7+, change to asdict(alert_produced)
alert_dict['details'] = {

Просмотреть файл

@ -0,0 +1,4 @@
[options]
threshold_count = 1
search_depth_min = 60
host_exclusions = foo.example.com,bar.example.com

Просмотреть файл

@ -11,10 +11,14 @@ from mozdef_util.query_models import SearchQuery, TermMatch
import re
class AlertLdapPasswordSpray(AlertTask):
class AlertLdapBruteforceGlobal(AlertTask):
def main(self):
self.parse_config('ldap_password_spray.conf', ['threshold_count', 'search_depth_min'])
self.parse_config('ldap_bruteforce_global.conf', ['threshold_count', 'search_depth_min', 'host_exclusions'])
search_query = SearchQuery(minutes=int(self.config.search_depth_min))
for host_exclusion in self.config.host_exclusions.split(","):
search_query.add_must_not([TermMatch("details.server", host_exclusion)])
search_query.add_must([
TermMatch('category', 'ldap'),
TermMatch('details.response.error', 'LDAP_INVALID_CREDENTIALS')
@ -24,7 +28,7 @@ class AlertLdapPasswordSpray(AlertTask):
self.walkAggregations(threshold=int(self.config.threshold_count))
def onAggregation(self, aggreg):
category = 'ldap'
category = 'bruteforce'
tags = ['ldap']
severity = 'WARNING'
email_list = set()
@ -41,7 +45,7 @@ class AlertLdapPasswordSpray(AlertTask):
# if len(email_list) == 0:
# return None
summary = 'LDAP Password Spray Attack in Progress from {0} targeting the following account(s): {1}'.format(
summary = 'Global LDAP Bruteforce Attack in Progress from {0} targeting the following account(s): {1}'.format(
aggreg['value'],
", ".join(sorted(email_list)[:10])
)

Просмотреть файл

Просмотреть файл

@ -10,9 +10,9 @@ from lib.alerttask import AlertTask
from mozdef_util.query_models import SearchQuery, TermMatch
class AlertLdapBruteforce(AlertTask):
class AlertLdapBruteforceUser(AlertTask):
def main(self):
self.parse_config('ldap_bruteforce.conf', ['threshold_count', 'search_depth_min', 'host_exclusions'])
self.parse_config('ldap_bruteforce_user.conf', ['threshold_count', 'search_depth_min', 'host_exclusions'])
search_query = SearchQuery(minutes=int(self.config.search_depth_min))
search_query.add_must_not(TermMatch('details.user', ''))
search_query.add_must([
@ -28,7 +28,7 @@ class AlertLdapBruteforce(AlertTask):
self.walkAggregations(threshold=int(self.config.threshold_count))
def onAggregation(self, aggreg):
category = 'ldap'
category = 'bruteforce'
tags = ['ldap']
severity = 'WARNING'
client_list = set()

Просмотреть файл

@ -1,3 +0,0 @@
[options]
threshold_count = 1
search_depth_min = 60

Просмотреть файл

@ -0,0 +1,2 @@
[options]
ip_whitelist = 169.254.169.254

Просмотреть файл

@ -13,6 +13,8 @@ import netaddr
class AlertProxyDropIP(AlertTask):
def main(self):
self.parse_config("proxy_drop_ip.conf", ["ip_whitelist"])
search_query = SearchQuery(minutes=20)
search_query.add_must(
@ -28,6 +30,9 @@ class AlertProxyDropIP(AlertTask):
search_query.add_must([QueryStringMatch("details.host: {}".format(ip_regex))])
for ip in self.config.ip_whitelist.split(","):
search_query.add_must_not([TermMatch("details.host", ip)])
self.filtersManual(search_query)
self.searchEventsAggregated("details.sourceipaddress", samplesLimit=10)
self.walkAggregations(threshold=1)

Просмотреть файл

@ -1,66 +0,0 @@
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
import logging
import sys
from lib.alerttask import AlertTask
from mozdef_util.query_models import SearchQuery, TermMatch
logger = logging.getLogger(__name__)
def setup_logging():
logger = logging.getLogger()
h = logging.StreamHandler(sys.stdout)
logger.setLevel(logging.DEBUG)
return logger
class AlertCloudtrailLoggingDisabled(AlertTask):
def _configureKombu(self):
"""Override the normal behavior of this in order to run in lambda."""
pass
def alertToMessageQueue(self, alertDict):
"""Override the normal behavior of this in order to run in lambda."""
pass
def main(self):
# How many minutes back in time would you like to search?
search_query = SearchQuery(minutes=15)
# What would you like to search for?
# search_query.add_must([
# TermMatch('source', 'cloudtrail'),
# TermMatch('details.eventname', 'DescribeTable')
# ])
self.filtersManual(search_query)
self.searchEventsSimple()
self.walkEvents()
def onEvent(self, event):
category = 'AWSCloudtrail'
# Useful tag and severity rankings for your alert.
tags = ['cloudtrail', 'aws', 'cloudtrailpagerduty']
severity = 'CRITICAL'
# What message should surface in the user interface when this fires?
summary = 'The alert fired!'
return self.createAlertDict(summary, category, tags, [event], severity)
# Learn more about MozDef alerts by exploring the "Alert class!"
def handle(event, context):
logger = setup_logging()
logger.debug('Function initialized.')
a = AlertCloudtrailLoggingDisabled()
return a.main()

Просмотреть файл

@ -1,12 +0,0 @@
from mozdef_util.plugin_set import PluginSet
from mozdef_util.utilities.logger import logger
class AlertPluginSet(PluginSet):
def send_message_to_plugin(self, plugin_class, message, metadata=None):
if 'utctimestamp' in message and 'summary' in message:
message_log_str = '{0} received message: ({1}) {2}'.format(plugin_class.__module__, message['utctimestamp'], message['summary'])
logger.info(message_log_str)
return plugin_class.onMessage(message), metadata

Просмотреть файл

@ -1,549 +0,0 @@
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
import collections
import json
import kombu
import os
import sys
import socket
import netaddr
from configlib import getConfig, OptionParser
from datetime import datetime
from collections import Counter
from celery import Task
from celery.utils.log import get_task_logger
from config import RABBITMQ, ES, ALERT_PLUGINS
from mozdef_util.utilities.toUTC import toUTC
from mozdef_util.elasticsearch_client import ElasticsearchClient
from mozdef_util.query_models import TermMatch, ExistsMatch
sys.path.append(os.path.join(os.path.dirname(__file__), "../../lib"))
from lib.alert_plugin_set import AlertPluginSet
# utility functions used by AlertTask.mostCommon
# determine most common values
# in a list of dicts
def keypaths(nested):
""" return a list of nested dict key paths
like: [u'_source', u'details', u'program']
"""
for key, value in nested.items():
if isinstance(value, collections.Mapping):
for subkey, subvalue in keypaths(value):
yield [key] + subkey, subvalue
else:
yield [key], value
def dictpath(path):
""" split a string representing a
nested dictionary path key.subkey.subkey
"""
for i in path.split("."):
yield "{0}".format(i)
def getValueByPath(input_dict, path_string):
"""
Gets data/value from a dictionary using a dotted accessor-string
http://stackoverflow.com/a/7534478
path_string can be key.subkey.subkey.subkey
"""
return_data = input_dict
for chunk in path_string.split("."):
return_data = return_data.get(chunk, {})
return return_data
def hostname_from_ip(ip):
try:
reversed_dns = socket.gethostbyaddr(ip)
return reversed_dns[0]
except socket.herror:
return None
def add_hostname_to_ip(ip, output_format, require_internal=True):
ip_obj = netaddr.IPNetwork(ip)[0]
if require_internal and not ip_obj.is_private():
return ip
hostname = hostname_from_ip(ip)
if hostname is None:
return ip
else:
return output_format.format(ip, hostname)
class AlertTask(Task):
abstract = True
def __init__(self):
self.alert_name = self.__class__.__name__
self.main_query = None
# Used to store any alerts that were thrown
self.alert_ids = []
# List of events
self.events = None
# List of aggregations
# e.g. when aggregField is email: [{value:'evil@evil.com',count:1337,events:[...]}, ...]
self.aggregations = None
self.log.debug("starting {0}".format(self.alert_name))
self.log.debug(RABBITMQ)
self.log.debug(ES)
self._configureKombu()
self._configureES()
# We want to select all event indices
# and filter out the window based on timestamp
# from the search query
self.event_indices = ["events-*"]
def classname(self):
return self.__class__.__name__
@property
def log(self):
return get_task_logger("%s.%s" % (__name__, self.alert_name))
def parse_config(self, config_filename, config_keys):
myparser = OptionParser()
self.config = None
(self.config, args) = myparser.parse_args([])
for config_key in config_keys:
temp_value = getConfig(config_key, "", config_filename)
setattr(self.config, config_key, temp_value)
def _discover_task_exchange(self):
"""Use configuration information to understand the message queue protocol.
return: amqp, sqs
"""
return getConfig("mqprotocol", "amqp", None)
def __build_conn_string(self):
exchange_protocol = self._discover_task_exchange()
if exchange_protocol == "amqp":
connString = "amqp://{0}:{1}@{2}:{3}//".format(
RABBITMQ["mquser"],
RABBITMQ["mqpassword"],
RABBITMQ["mqserver"],
RABBITMQ["mqport"],
)
return connString
elif exchange_protocol == "sqs":
connString = "sqs://{}".format(getConfig("alertSqsQueueUrl", None, None))
if connString:
connString = connString.replace('https://','')
return connString
def _configureKombu(self):
"""
Configure kombu for amqp or sqs
"""
try:
connString = self.__build_conn_string()
self.mqConn = kombu.Connection(connString)
if connString.find('sqs') == 0:
self.mqConn.transport_options['region'] = os.getenv('DEFAULT_AWS_REGION', 'us-west-2')
self.alertExchange = kombu.Exchange(
name=RABBITMQ["alertexchange"], type="topic", durable=True
)
self.alertExchange(self.mqConn).declare()
alertQueue = kombu.Queue(
os.getenv('OPTIONS_ALERTSQSQUEUEURL').split('/')[4], exchange=self.alertExchange
)
else:
self.alertExchange = kombu.Exchange(
name=RABBITMQ["alertexchange"], type="topic", durable=True
)
self.alertExchange(self.mqConn).declare()
alertQueue = kombu.Queue(
RABBITMQ["alertqueue"], exchange=self.alertExchange
)
alertQueue(self.mqConn).declare()
self.mqproducer = self.mqConn.Producer(serializer="json")
self.log.debug("Kombu configured")
except Exception as e:
self.log.error(
"Exception while configuring kombu for alerts: {0}".format(e)
)
def _configureES(self):
"""
Configure elasticsearch client
"""
try:
self.es = ElasticsearchClient(ES["servers"])
self.log.debug("ES configured")
except Exception as e:
self.log.error("Exception while configuring ES for alerts: {0}".format(e))
def mostCommon(self, listofdicts, dictkeypath):
"""
Given a list containing dictionaries,
return the most common entries
along a key path separated by .
i.e. dictkey.subkey.subkey
returned as a list of tuples
[(value,count),(value,count)]
"""
inspectlist = list()
path = list(dictpath(dictkeypath))
for i in listofdicts:
for k in list(keypaths(i)):
if not (set(k[0]).symmetric_difference(path)):
inspectlist.append(k[1])
return Counter(inspectlist).most_common()
def alertToMessageQueue(self, alertDict):
"""
Send alert to the kombu based message queue. The default is rabbitmq.
"""
try:
# cherry pick items from the alertDict to send to the alerts messageQueue
mqAlert = dict(severity="INFO", category="")
if "severity" in alertDict:
mqAlert["severity"] = alertDict["severity"]
if "category" in alertDict:
mqAlert["category"] = alertDict["category"]
if "utctimestamp" in alertDict:
mqAlert["utctimestamp"] = alertDict["utctimestamp"]
if "eventtimestamp" in alertDict:
mqAlert["eventtimestamp"] = alertDict["eventtimestamp"]
mqAlert["summary"] = alertDict["summary"]
self.log.debug(mqAlert)
ensurePublish = self.mqConn.ensure(
self.mqproducer, self.mqproducer.publish, max_retries=10
)
ensurePublish(
alertDict,
exchange=self.alertExchange,
routing_key=RABBITMQ["alertqueue"],
)
self.log.debug("alert sent to the alert queue")
except Exception as e:
self.log.error(
"Exception while sending alert to message queue: {0}".format(e)
)
def alertToES(self, alertDict):
"""
Send alert to elasticsearch
"""
try:
res = self.es.save_alert(body=alertDict)
self.log.debug("alert sent to ES")
self.log.debug(res)
return res
except Exception as e:
self.log.error("Exception while pushing alert to ES: {0}".format(e))
def tagBotNotify(self, alert):
"""
Tag alert to be excluded based on severity
If 'ircchannel' is set in an alert, we automatically notify mozdefbot
"""
alert["notify_mozdefbot"] = True
if alert["severity"] == "NOTICE" or alert["severity"] == "INFO":
alert["notify_mozdefbot"] = False
# If an alert sets specific ircchannel, then we should probably always notify in mozdefbot
if (
"ircchannel" in alert and alert["ircchannel"] != "" and alert["ircchannel"] is not None
):
alert["notify_mozdefbot"] = True
return alert
def saveAlertID(self, saved_alert):
"""
Save alert to self so we can analyze it later
"""
self.alert_ids.append(saved_alert["_id"])
def filtersManual(self, query):
"""
Configure filters manually
query is a search query object with date_timedelta populated
"""
# Don't fire on already alerted events
duplicate_matcher = TermMatch("alert_names", self.determine_alert_classname())
if duplicate_matcher not in query.must_not:
query.add_must_not(duplicate_matcher)
self.main_query = query
def determine_alert_classname(self):
alert_name = self.classname()
# Allow alerts like the generic alerts (one python alert but represents many 'alerts')
# can customize the alert name
if hasattr(self, "custom_alert_name"):
alert_name = self.custom_alert_name
return alert_name
def executeSearchEventsSimple(self):
"""
Execute the search for simple events
"""
return self.main_query.execute(self.es, indices=self.event_indices)
def searchEventsSimple(self):
"""
Search events matching filters, store events in self.events
"""
try:
results = self.executeSearchEventsSimple()
self.events = results["hits"]
self.log.debug(self.events)
except Exception as e:
self.log.error("Error while searching events in ES: {0}".format(e))
def searchEventsAggregated(self, aggregationPath, samplesLimit=5):
"""
Search events, aggregate matching ES filters by aggregationPath,
store them in self.aggregations as a list of dictionaries
keys:
value: the text value that was found in the aggregationPath
count: the hitcount of the text value
events: the sampled list of events that matched
allevents: the unsample, total list of matching events
aggregationPath can be key.subkey.subkey to specify a path to a dictionary value
relative to the _source that's returned from elastic search.
ex: details.sourceipaddress
"""
# We automatically add the key that we're matching on
# for aggregation, as a query requirement
aggreg_key_exists = ExistsMatch(aggregationPath)
if aggreg_key_exists not in self.main_query.must:
self.main_query.add_must(aggreg_key_exists)
try:
esresults = self.main_query.execute(self.es, indices=self.event_indices)
results = esresults["hits"]
# List of aggregation values that can be counted/summarized by Counter
# Example: ['evil@evil.com','haxoor@noob.com', 'evil@evil.com'] for an email aggregField
aggregationValues = []
for r in results:
aggregationValues.append(getValueByPath(r["_source"], aggregationPath))
# [{value:'evil@evil.com',count:1337,events:[...]}, ...]
aggregationList = []
for i in Counter(aggregationValues).most_common():
idict = {"value": i[0], "count": i[1], "events": [], "allevents": []}
for r in results:
if getValueByPath(r["_source"], aggregationPath) == i[0]:
# copy events detail into this aggregation up to our samples limit
if len(idict["events"]) < samplesLimit:
idict["events"].append(r)
# also copy all events to a non-sampled list
# so we mark all events as alerted and don't re-alert
idict["allevents"].append(r)
aggregationList.append(idict)
self.aggregations = aggregationList
self.log.debug(self.aggregations)
except Exception as e:
self.log.error("Error while searching events in ES: {0}".format(e))
def walkEvents(self, **kwargs):
"""
Walk through events, provide some methods to hook in alerts
"""
if len(self.events) > 0:
for i in self.events:
alert = self.onEvent(i, **kwargs)
if alert:
alert = self.tagBotNotify(alert)
self.log.debug(alert)
alert = self.alertPlugins(alert)
alertResultES = self.alertToES(alert)
self.tagEventsAlert([i], alertResultES)
self.alertToMessageQueue(alert)
self.hookAfterInsertion(alert)
self.saveAlertID(alertResultES)
# did we not match anything?
# can also be used as an alert trigger
if len(self.events) == 0:
alert = self.onNoEvent(**kwargs)
if alert:
alert = self.tagBotNotify(alert)
self.log.debug(alert)
alertResultES = self.alertToES(alert)
self.alertToMessageQueue(alert)
self.hookAfterInsertion(alert)
self.saveAlertID(alertResultES)
def walkAggregations(self, threshold, config=None):
"""
Walk through aggregations, provide some methods to hook in alerts
"""
if len(self.aggregations) > 0:
for aggregation in self.aggregations:
if aggregation["count"] >= threshold:
aggregation["config"] = config
alert = self.onAggregation(aggregation)
if alert:
alert = self.tagBotNotify(alert)
self.log.debug(alert)
alert = self.alertPlugins(alert)
alertResultES = self.alertToES(alert)
# even though we only sample events in the alert
# tag all events as alerted to avoid re-alerting
# on events we've already processed.
self.tagEventsAlert(aggregation["allevents"], alertResultES)
self.alertToMessageQueue(alert)
self.saveAlertID(alertResultES)
def alertPlugins(self, alert):
"""
Send alerts through a plugin system
"""
plugin_dir = os.path.join(os.path.dirname(__file__), "../plugins")
plugin_set = AlertPluginSet(plugin_dir, ALERT_PLUGINS)
alertDict = plugin_set.run_plugins(alert)[0]
return alertDict
def createAlertDict(
self,
summary,
category,
tags,
events,
severity="NOTICE",
url=None,
ircchannel=None,
):
"""
Create an alert dict
"""
alert = {
"utctimestamp": toUTC(datetime.now()).isoformat(),
"severity": severity,
"summary": summary,
"category": category,
"tags": tags,
"events": [],
"ircchannel": ircchannel,
}
if url:
alert["url"] = url
for e in events:
alert["events"].append(
{
"documentindex": e["_index"],
"documentsource": e["_source"],
"documentid": e["_id"],
}
)
self.log.debug(alert)
return alert
def onEvent(self, event, *args, **kwargs):
"""
To be overriden by children to run their code
to be used when creating an alert using an event
must return an alert dict or None
"""
pass
def onNoEvent(self, *args, **kwargs):
"""
To be overriden by children to run their code
when NOTHING matches a filter
which can be used to trigger on the absence of
events much like a dead man switch.
This is to be used when creating an alert using an event
must return an alert dict or None
"""
pass
def onAggregation(self, aggregation):
"""
To be overriden by children to run their code
to be used when creating an alert using an aggregation
must return an alert dict or None
"""
pass
def hookAfterInsertion(self, alert):
"""
To be overriden by children to run their code
to be used when creating an alert using an aggregation
"""
pass
def tagEventsAlert(self, events, alertResultES):
"""
Update the event with the alertid/index
and update the alert_names on the event itself so it's
not re-alerted
"""
try:
for event in events:
if "alerts" not in event["_source"]:
event["_source"]["alerts"] = []
event["_source"]["alerts"].append(
{"index": alertResultES["_index"], "id": alertResultES["_id"]}
)
if "alert_names" not in event["_source"]:
event["_source"]["alert_names"] = []
event["_source"]["alert_names"].append(self.determine_alert_classname())
self.es.save_event(
index=event["_index"], body=event["_source"], doc_id=event["_id"]
)
# We refresh here to ensure our changes to the events will show up for the next search query results
self.es.refresh(event["_index"])
except Exception as e:
self.log.error("Error while updating events in ES: {0}".format(e))
def main(self):
"""
To be overriden by children to run their code
"""
pass
def run(self, *args, **kwargs):
"""
Main method launched by celery periodically
"""
try:
self.main(*args, **kwargs)
self.log.debug("finished")
except Exception as e:
self.log.exception("Exception in main() method: {0}".format(e))
def parse_json_alert_config(self, config_file):
"""
Helper function to parse an alert config file
"""
alert_dir = os.path.join(os.path.dirname(__file__), "..")
config_file_path = os.path.abspath(os.path.join(alert_dir, config_file))
json_obj = {}
with open(config_file_path, "r") as fd:
try:
json_obj = json.load(fd)
except ValueError:
sys.stderr.write("FAILED to open the configuration file\n")
return json_obj

Просмотреть файл

@ -1,82 +0,0 @@
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
from celery.schedules import crontab, timedelta
import time
import logging
import os
ALERTS = {
# 'pythonfile.pythonclass':{'schedule': crontab(minute='*/10')},
# 'pythonfile.pythonclass':{'schedule': timedelta(minutes=10),'kwargs':dict(hostlist=['nsm3', 'nsm5'])},
}
ALERT_PLUGINS = [
# 'relative pythonfile name (exclude the .py) - EX: sso_dashboard',
]
ALERT_ACTIONS = [
# 'relative pythonfile name (exclude the .py) - EX: sso_dashboard',
]
RABBITMQ = {
'mqserver': 'localhost',
'mquser': 'guest',
'mqpassword': 'guest',
'mqport': 5672,
'alertexchange': 'alerts',
'alertqueue': 'mozdef.alert'
}
if os.getenv('OPTIONS_ESSERVERS'):
ES = {
'servers': [os.getenv('OPTIONS_ESSERVERS')]
}
else:
ES = {
'servers': ['http://localhost:9200']
}
RESTAPI_URL = "http://rest:8081"
# Leave empty for no auth
RESTAPI_TOKEN = ""
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'simple': {
'format': '%(levelname)s %(message)s',
'datefmt': '%y %b %d, %H:%M:%S',
},
'standard': {
'format': '%(asctime)s [%(levelname)s] %(name)s %(filename)s:%(lineno)d: %(message)s'
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
'celery': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': 'celery.log',
'formatter': 'standard',
'maxBytes': 1024 * 1024 * 100, # 100 mb
},
},
'loggers': {
'celery': {
'handlers': ['celery', 'console'],
'level': 'INFO',
},
}
}
logging.Formatter.converter = time.gmtime

Просмотреть файл

@ -1,9 +0,0 @@
from alerttask import AlertTask
class DeadmanAlertTask(AlertTask):
def executeSearchEventsSimple(self):
# We override this method to specify the size as 1
# since we only care about if ANY events are found or not
return self.main_query.execute(self.es, indices=self.event_indices, size=1)

Просмотреть файл

@ -1,3 +1,6 @@
[options]
db_download_location = https://geolite.maxmind.com/download/geoip/database/GeoLite2-City.tar.gz
db_location = /opt/mozdef/envs/mozdef/data/GeoLite2-City.mmdb
db_download_location = https://updates.maxmind.com/geoip/databases/GeoLite2-City/update
# Set to empty string for no auth
account_id = ""
license_key = ""

Просмотреть файл

@ -9,11 +9,9 @@ import sys
import os
from configlib import getConfig, OptionParser
import urllib.request
import urllib.error
import urllib.parse
import requests
import tempfile
import tarfile
import gzip
from mozdef_util.geo_ip import GeoIP
from mozdef_util.utilities.logger import logger, initLogger
@ -21,18 +19,22 @@ from mozdef_util.utilities.logger import logger, initLogger
def fetch_db_data(db_download_location):
logger.debug('Fetching db data from ' + db_download_location)
response = urllib.request.urlopen(db_download_location)
db_raw_data = response.read()
auth_creds = None
if options.account_id != '' and options.license_key != '':
logger.debug('Using credentials for maxmind')
auth_creds = (options.account_id, options.license_key)
response = requests.get(db_download_location, auth=auth_creds)
if not response.ok:
raise Exception("Received bad response from maxmind server: {0}".format(response.text))
db_raw_data = response.content
with tempfile.NamedTemporaryFile(mode='wb') as temp:
logger.debug('Writing compressed gzip to temp file: ' + temp.name)
temp.write(db_raw_data)
temp.flush()
logger.debug('Extracting gzip data from ' + temp.name)
tar = tarfile.open(temp.name)
for tarinfo in tar:
if tarinfo.name.endswith('GeoLite2-City.mmdb'):
extracted_file = tar.extractfile(tarinfo.name)
return extracted_file.read()
gfile = gzip.GzipFile(temp.name, "rb")
data = gfile.read()
return data
def save_db_data(save_path, db_data):
@ -64,6 +66,9 @@ def initConfig():
options.db_download_location = getConfig('db_download_location', '', options.configfile)
options.db_location = getConfig('db_location', '', options.configfile)
options.account_id = getConfig('account_id', '', options.configfile)
options.license_key = getConfig('license_key', '', options.configfile)
if __name__ == '__main__':
parser = OptionParser()

Просмотреть файл

@ -0,0 +1,11 @@
---
version: '3.7'
services:
docs:
image: dldl/sphinx-server
networks:
- default
volumes:
- ../../docs/source:/web
ports:
- 8000:8000

Просмотреть файл

@ -16,7 +16,6 @@ RUN \
glibc-devel \
gcc \
libstdc++ \
libffi-devel \
zlib-devel \
libcurl-devel \
openssl \

Просмотреть файл

@ -31,7 +31,6 @@ RUN \
gcc \
gcc-c++ \
libstdc++ \
libffi-devel \
zlib-devel \
nodejs && \
yum clean all && \

Просмотреть файл

@ -1,5 +0,0 @@
Code
****
.. include:: plugins.rst
.. include:: actions.rst

Просмотреть файл

@ -0,0 +1,11 @@
Development
===========
.. toctree::
:maxdepth: 2
alert_development_guide
plugins
actions
mozdef_util
cicd

Просмотреть файл

@ -1,8 +1,3 @@
.. MozDef documentation master file, created by
sphinx-quickstart on Wed Mar 19 10:09:37 2014.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Table of Contents
=================
@ -13,25 +8,14 @@ Table of Contents
introduction
demo
installation
alert_development_guide
mozdef_util
screenshots
usage
cloud_deployment
advanced_settings
code
cicd
development
references
contributors
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
License
=======

Просмотреть файл

@ -86,30 +86,3 @@ Status
------
MozDef has been in production at Mozilla since 2014 where we are using it to process over 300 million events per day.
Roadmap
-------
Initial Release:
* Facilitate replacing base SIEM functionality including log input, event management, search, alerts, basic correlations
* Enhance the incident workflow UI to enable realtime collaboration
* Enable basic plug-ins to the event input stream for meta data, additional parsing, categorization and basic machine learning
* Support as many common event/log shippers as possible with repeatable recipes
* Base integration into Mozilla's defense mechanisms for automation
* 3D visualizations of threat actors
* Fine tuning of interactions between meteor, mongo, dc.js
Recently implemented:
* Support for OIDC authentication/authorization
* Docker containers for each service
* Updates to support recent (breaking) versions of Elasticsearch
Future (join us!):
* Correlation through machine learning, AI
* Enhanced search for alerts, events, attackers within the MozDef UI
* Integration into common defense mechanisms used outside Mozilla

Просмотреть файл

@ -1,5 +1,8 @@
References
==========
* GeoModel
* `v0.1 Specification <geomodel/specifications/v0_1.html>`_
.. toctree::
:maxdepth: 2
geomodel/specifications/v0_1
screenshots

Просмотреть файл

@ -10,6 +10,10 @@ import { Session } from 'meteor/session';
import { Tracker } from 'meteor/tracker';
if (Meteor.isClient) {
let pageIsUnloading = false;
document.addEventListener('readystatechange', event => {
pageIsUnloading = true;
});
//events that could fire in any sub template
Template.layout.events({
"click .ipmenu-copy": function(e,t){
@ -77,7 +81,7 @@ if (Meteor.isClient) {
// Verify a user is actually logged in and Meteor is running
if ((Meteor.user() !== null) && (Meteor.status().connected)) {
// Status 0 means the request failed (CORS denies access)
if (xhrInstance.readyState == 4 && (xhrInstance.status == 302 || xhrInstance.status == 0)) {
if (xhrInstance.readyState == 4 && (xhrInstance.status == 302 || xhrInstance.status == 0) && !pageIsUnloading) {
location.reload();
}
}

Просмотреть файл

@ -372,10 +372,6 @@ caption, legend {
border-color: var(--ack-edit-border-color);
}
.btn-primary {
height: 30px;
}
input[type="search"] {
border-radius: 15px;
}

Просмотреть файл

@ -8,6 +8,7 @@ import json
from datetime import datetime
from platform import node
from mozdef_util.utilities.toUTC import toUTC
from mozdef_util.utilities.key_exists import key_exists
def isIPv4(ip):
@ -267,6 +268,9 @@ class message(object):
'{destinationipaddress}:'
'{destinationport}'
).format(**newmessage['details'])
if key_exists('details.tls', newmessage):
newmessage['details']['tls_encrypted'] = newmessage['details']['tls']
del(newmessage['details']['tls'])
return (newmessage, metadata)
if logtype == 'ssh':
@ -514,17 +518,18 @@ class message(object):
newmessage['details']['client'] = 'unknown'
if 'service' not in newmessage['details']:
newmessage['details']['service'] = 'unknown'
if 'success' not in newmessage['details']:
newmessage['details']['success'] = 'unknown'
if 'error_msg' not in newmessage['details']:
newmessage['details']['error_msg'] = ''
newmessage['summary'] = (
'{sourceipaddress} -> '
'{destinationipaddress}:'
'{destinationport} '
'request {request_type} '
'success {success}'
'request {request_type}'
).format(**newmessage['details'])
if 'success' in newmessage['details']:
newmessage['summary'] += ' success {0}'.format(newmessage['details']['success'])
else:
newmessage['summary'] += ' success unknown'
return (newmessage, metadata)
if logtype == 'ntlm':
@ -544,17 +549,17 @@ class message(object):
del(newmessage['details']['username'])
else:
newmessage['details']['ntlm']['username'] = 'unknown'
if 'success' not in newmessage['details']:
newmessage['details']['success'] = 'unknown'
if 'status' not in newmessage['details']:
newmessage['details']['status'] = 'unknown'
newmessage['summary'] = (
'NTLM: {sourceipaddress} -> '
'{destinationipaddress}:'
'{destinationport} '
'success {success} '
'status {status}'
).format(**newmessage['details'])
if 'success' in newmessage['details']:
newmessage['summary'] += 'success {0} status {1}'.format(newmessage['details']['success'],newmessage['details']['status'])
else:
newmessage['summary'] += 'success unknown status {0}'.format(newmessage['details']['status'])
return (newmessage, metadata)
if logtype == 'smb_files':

Просмотреть файл

@ -28,7 +28,14 @@ class message(object):
'details.requestparameters.callerreference',
'details.requestparameters.description',
'details.requestparameters.describehostsrequest',
'details.requestparameters.describeflowlogsrequest',
'details.requestparameters.describeflowlogsrequest.filter.value',
'details.requestparameters.describenatgatewaysrequest',
'details.requestparameters.describevpcendpointsrequest',
'details.requestparameters.describevpcendpointsrequest.filter',
'details.requestparameters.describevpcendpointsrequest.filter.value',
'details.requestparameters.describevpcendpointsrequest.vpcendpointid',
'details.requestparameters.describevpcendpointserviceconfigurationsrequest',
'details.requestparameters.disableapitermination',
'details.requestparameters.distributionconfig.callerreference',
'details.requestparameters.domainname',
@ -44,6 +51,7 @@ class message(object):
'details.responseelements.role',
'details.responseelements.policy',
'details.requestparameters.rule',
'details.responseelements.createddate',
'details.responseelements.credentials',
'details.responseelements.subnets',
'details.responseelements.endpoint',

Просмотреть файл

@ -43,42 +43,30 @@ class message(object):
def onMessage(self, message, metadata):
if 'details' in message:
if 'sourceipaddress' in message['details']:
ipText = message['details']['sourceipaddress']
if isIP(ipText):
ip = netaddr.IPNetwork(ipText)[0]
if (not ip.is_loopback() and not ip.is_private() and not ip.is_reserved()):
'''lookup geoip info'''
message['details']['sourceipgeolocation'] = self.ipLocation(ipText)
# Add a geo_point coordinates if latitude and longitude exist
if 'latitude' in message['details']['sourceipgeolocation'] and 'longitude' in message['details']['sourceipgeolocation']:
message['details']['sourceipgeopoint'] = '{0},{1}'.format(
message['details']['sourceipgeolocation']['latitude'],
message['details']['sourceipgeolocation']['longitude']
)
keys = ['source', 'destination']
for key in keys:
ip_key = '{0}ipaddress'.format(key)
if ip_key in message['details']:
ipText = message['details'][ip_key]
if isIP(ipText):
ip = netaddr.IPNetwork(ipText)[0]
if (not ip.is_loopback() and not ip.is_private() and not ip.is_reserved()):
'''lookup geoip info'''
geo_key = '{0}ipgeolocation'.format(key)
message['details'][geo_key] = self.ipLocation(ipText)
# Add a geo_point coordinates if latitude and longitude exist
if 'latitude' in message['details'][geo_key] and 'longitude' in message['details'][geo_key]:
if message['details'][geo_key]['latitude'] and message['details'][geo_key]['latitude'] != '' and \
message['details'][geo_key]['longitude'] and message['details'][geo_key]['longitude'] != '':
geopoint_key = '{0}ipgeopoint'.format(key)
message['details'][geopoint_key] = '{0},{1}'.format(
message['details'][geo_key]['latitude'],
message['details'][geo_key]['longitude']
)
else:
# invalid ip sent in the field
# if we send on, elastic search will error, so set it
# to a valid, yet meaningless value
message['details']['sourceipaddress'] = '0.0.0.0'
if 'destinationipaddress' in message['details']:
ipText = message['details']['destinationipaddress']
if isIP(ipText):
ip = netaddr.IPNetwork(ipText)[0]
if (not ip.is_loopback() and not ip.is_private() and not ip.is_reserved()):
'''lookup geoip info'''
message['details']['destinationipgeolocation'] = self.ipLocation(ipText)
# Add a geo_point coordinates if latitude and longitude exist
if 'latitude' in message['details']['destinationipgeolocation'] and 'longitude' in message['details']['destinationipgeolocation']:
message['details']['destinationipgeopoint'] = '{0},{1}'.format(
message['details']['destinationipgeolocation']['latitude'],
message['details']['destinationipgeolocation']['longitude']
)
else:
# invalid ip sent in the field
# if we send on, elastic search will error, so set it
# to a valid, yet meaningless value
message['details']['destinationipaddress'] = '0.0.0.0'
else:
# invalid ip sent in the field
# if we send on, elastic search will error, so set it
# to a valid, yet meaningless value
message['details'][ip_key] = '0.0.0.0'
return (message, metadata)

31
mq/plugins/ldap_fixup.py Normal file
Просмотреть файл

@ -0,0 +1,31 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
from mozdef_util.utilities.key_exists import key_exists
class message(object):
def __init__(self):
'''
rewrites ldap's details.tls field and sets source
'''
self.registration = ['LDAP-Humanizer', 'ldap']
self.priority = 5
def onMessage(self, message, metadata):
# check for category like 'ldap' and rename the tls field
if key_exists('category', message):
data = message.get('category')
if data == 'ldap':
if key_exists('details.tls', message):
message['details']['tls_encrypted'] = message['details']['tls']
del(message['details']['tls'])
if 'source' not in message:
message['source'] = 'ldap'
return (message, metadata)

Просмотреть файл

@ -3,6 +3,8 @@
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
from mozdef_util.utilities.key_exists import key_exists
class message(object):
def __init__(self):
@ -11,16 +13,30 @@ class message(object):
set the priority if you have a preference for order of plugins to run. 0 goes first, 100 is assumed/default if not sent
'''
# get zoom event data
self.registration = ['zoom']
self.registration = ['zoom_host']
self.priority = 2
def onMessage(self, message, metadata):
# check for messages we have vetted as n/a and prevalent
# from a sec standpoint and drop them
# from a sec standpoint and drop them also rewrite fields
# to drop unecessary expansion
# ganglia monitor daemon
if 'details' in message and isinstance(message['details'], dict):
if 'topic' in message['details']['payload']['object']:
# omit "topic" field
if key_exists('details.payload.object.topic', message):
del message['details']['payload']['object']['topic']
# rewrite summary to be more informative
message['summary'] = ""
if key_exists('details.event', message):
message['summary'] = "zoom: {0}".format(message['details']['event'])
if key_exists('details.payload.object.participant.user_name', message):
message['summary'] += " triggered by user {0}".format(message['details']['payload']['object']['participant']['user_name'])
elif key_exists('details.payload.operator', message):
message['summary'] += " triggered by user {0}".format(message['details']['payload']['operator'])
# drop duplicated account_id field
if key_exists('details.payload.account_id', message) and key_exists('details.payload.object.account_id', message):
if message.get('details.payload.account_id') == message.get('details.payload.object.account_id'):
del message['details']['payload']['object']['account_id']
return (message, metadata)

Просмотреть файл

@ -1,62 +1,43 @@
amqp==2.2.2
anyjson==0.3.3
appdirs==1.4.0
boto3==1.7.67
bottle==0.12.4
celery==4.1.0
celery[sqs]==4.1.0
cffi==1.9.1
configlib==2.0.4
configparser==3.5.0b2
cryptography==2.3.1
dnspython==1.15.0
docutils==0.14
duo-client==4.1
elasticsearch==6.3.1
elasticsearch-dsl==6.3.1
enum34==1.1.6
futures==3.1.1
geoip2==2.5.0
GitPython==2.1.3
glob2==0.4.1
google-api-python-client==1.4.0
hjson==2.0.2
httplib2==0.13.0
idna==2.6
ipaddr==2.2.0
ipaddress==1.0.17
ipwhois==0.15.0
jmespath==0.9.3
kombu==4.1.0
meld3==1.0.2
mozdef-client==1.0.11
mozdef-util==3.0.4
netaddr==0.7.19
nose==1.3.7
oauth2client==1.4.12
packaging==16.8
pyasn1==0.1.9
pyasn1-modules==0.0.5
pyOpenSSL==18.0.0
pycurl==7.43.0.2
pycparser==2.17
pymongo==3.6.1
pynsive==0.2.6
pyparsing==2.1.10
python-dateutil==2.6.1
pytz==2017.3
requests-jwt==0.5.3
requests==2.20.0
requests-futures==0.9.7
requests==2.22.0
rsa==3.1.4
setuptools-scm==1.11.1
simplejson==3.13.2
six==1.11.0
slackclient==1.0.9
supervisor==4.0.3
tzlocal==1.4
uritemplate==0.6
urllib3==1.24.3
uwsgi==2.0.17.1
tldextract==2.2.0
websocket-client==0.44.0

Просмотреть файл

@ -111,7 +111,7 @@ class TestAlertGeoModel(GeoModelTest):
'country': 'US',
'latitude': 37.773972,
'longitude': -122.431297,
'observed': '2017-01-01T05:44:00+00:00',
'observed': _NOW - timedelta(minutes=16),
'geopoint': '37.773972,-122.431297',
},
'destination': {
@ -120,13 +120,13 @@ class TestAlertGeoModel(GeoModelTest):
'country': 'CA',
'latitude': 43.6529,
'longitude': -79.3849,
'observed': '2017-01-01T06:00:00+00:00',
'observed': _NOW,
'geopoint': '43.6529,-79.3849',
},
}
],
},
'severity': 'INFO',
'severity': 'WARNING',
'tags': ['geomodel'],
}
@ -276,7 +276,7 @@ class TestOnePreviousLocality(GeoModelTest):
'country': 'CA',
'latitude': 43.6529,
'longitude': -79.3849,
'observed': '2017-01-01T05:55:00+00:00',
'observed': _NOW - timedelta(minutes=5),
'geopoint': '43.6529,-79.3849',
},
'destination': {
@ -285,13 +285,13 @@ class TestOnePreviousLocality(GeoModelTest):
'country': 'US',
'latitude': 37.773972,
'longitude': -122.431297,
'observed': '2017-01-01T06:00:00+00:00',
'observed': _NOW,
'geopoint': '37.773972,-122.431297',
},
}
],
},
'severity': 'INFO',
'severity': 'WARNING',
'tags': ['geomodel'],
}
@ -357,7 +357,7 @@ class TestInitialLocalityPositiveAlert(GeoModelTest):
'country': 'US',
'latitude': 37.773972,
'longitude': -122.431297,
'observed': '2017-01-01T05:57:00+00:00',
'observed': _NOW - timedelta(minutes=3),
'geopoint': '37.773972,-122.431297',
},
'destination': {
@ -366,13 +366,13 @@ class TestInitialLocalityPositiveAlert(GeoModelTest):
'country': 'CA',
'latitude': 43.6529,
'longitude': -79.3849,
'observed': '2017-01-01T06:00:00+00:00',
'observed': _NOW,
'geopoint': '43.6529,-79.3849',
},
}
],
},
'severity': 'INFO',
'severity': 'WARNING',
'tags': ['geomodel'],
}
@ -454,7 +454,7 @@ class TestSameCitiesOutsideRange(GeoModelTest):
'category': 'geomodel',
'summary': 'tester1 seen in Sherbrooke,CA then Sherbrooke,CA',
'details': {'username': 'tester1'},
'severity': 'INFO',
'severity': 'WARNING',
'tags': ['geomodel'],
}
@ -518,7 +518,7 @@ class TestMultipleEventsInWindow(GeoModelTest):
'country': 'CA',
'latitude': 43.6529,
'longitude': -79.3849,
'observed': '2017-01-01T05:56:00+00:00',
'observed': _NOW - timedelta(minutes=4),
'geopoint': '43.6529,-79.3849',
},
'destination': {
@ -527,13 +527,13 @@ class TestMultipleEventsInWindow(GeoModelTest):
'country': 'US',
'latitude': 37.773972,
'longitude': -122.431297,
'observed': '2017-01-01T05:59:00+00:00',
'observed': _NOW - timedelta(minutes=1),
'geopoint': '37.773972,-122.431297',
},
}
],
},
'severity': 'INFO',
'severity': 'WARNING',
'tags': ['geomodel'],
}
@ -660,7 +660,7 @@ class TestSameCitiesFarAway(GeoModelTest):
'country': 'US',
'latitude': 43.6614,
'longitude': -70.2553,
'observed': '2017-01-01T05:57:00+00:00',
'observed': _NOW - timedelta(minutes=3),
'geopoint': '43.6614,-70.2553',
},
'destination': {
@ -669,13 +669,13 @@ class TestSameCitiesFarAway(GeoModelTest):
'country': 'US',
'latitude': 45.5234,
'longitude': -122.6762,
'observed': '2017-01-01T06:00:00+00:00',
'observed': _NOW,
'geopoint': '45.5234,-122.6762',
},
}
],
},
'severity': 'INFO',
'severity': 'WARNING',
'tags': ['geomodel'],
}
@ -787,7 +787,7 @@ class TestMultipleImpossibleJourneys(GeoModelTest):
'country': 'US',
'latitude': 45.5234,
'longitude': -122.6762,
'observed': '2017-01-01T05:55:00+00:00',
'observed': _NOW - timedelta(minutes=5),
'geopoint': '45.5234,-122.6762',
},
'destination': {
@ -796,7 +796,7 @@ class TestMultipleImpossibleJourneys(GeoModelTest):
'country': 'CA',
'latitude': 43.6529,
'longitude': -79.3843,
'observed': '2017-01-01T05:58:00+00:00',
'observed': _NOW - timedelta(minutes=2),
'geopoint': '43.6529,-79.3843',
},
},
@ -807,7 +807,7 @@ class TestMultipleImpossibleJourneys(GeoModelTest):
'country': 'CA',
'latitude': 43.6529,
'longitude': -79.3843,
'observed': '2017-01-01T05:58:00+00:00',
'observed': _NOW - timedelta(minutes=2),
'geopoint': '43.6529,-79.3843',
},
'destination': {
@ -816,7 +816,7 @@ class TestMultipleImpossibleJourneys(GeoModelTest):
'country': 'RU',
'latitude': 59.9343,
'longitude': 30.3351,
'observed': '2017-01-01T06:00:00+00:00',
'observed': _NOW,
'geopoint': '59.9343,30.3351',
},
},

Просмотреть файл

@ -8,8 +8,8 @@ from .negative_alert_test_case import NegativeAlertTestCase
from .alert_test_suite import AlertTestSuite
class TestAlertLdapPasswordSpray(AlertTestSuite):
alert_filename = "ldap_password_spray"
class TestAlertLdapBruteforceGlobal(AlertTestSuite):
alert_filename = "ldap_bruteforce_global"
# This event is the default positive event that will cause the
# alert to trigger
default_event = {
@ -26,6 +26,7 @@ class TestAlertLdapPasswordSpray(AlertTestSuite):
]
}
],
"server": "ldap.example.com",
"response": {
"error": 'LDAP_INVALID_CREDENTIALS',
}
@ -35,17 +36,17 @@ class TestAlertLdapPasswordSpray(AlertTestSuite):
# This alert is the expected result from running this task
default_alert = {
"category": "ldap",
"category": "bruteforce",
"tags": ["ldap"],
"severity": "WARNING",
"summary": "LDAP Password Spray Attack in Progress from 1.2.3.4 targeting the following account(s): jsmith@example.com",
"summary": "Global LDAP Bruteforce Attack in Progress from 1.2.3.4 targeting the following account(s): jsmith@example.com",
}
# This alert is the expected result from this task against multiple matching events
default_alert_aggregated = AlertTestSuite.copy(default_alert)
default_alert_aggregated[
"summary"
] = "LDAP Password Spray Attack in Progress from 1.2.3.4 targeting the following account(s): jsmith@example.com"
] = "Global LDAP Bruteforce Attack in Progress from 1.2.3.4 targeting the following account(s): jsmith@example.com"
test_cases = []
@ -74,6 +75,15 @@ class TestAlertLdapPasswordSpray(AlertTestSuite):
)
)
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event["_source"]["details"]["server"] = "foo.example.com"
test_cases.append(
NegativeAlertTestCase(
description="Negative test with default negative event", events=events
)
)
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event["_source"]["category"] = "bad"

Просмотреть файл

@ -8,8 +8,8 @@ from .negative_alert_test_case import NegativeAlertTestCase
from .alert_test_suite import AlertTestSuite
class TestAlertLdapBruteforce(AlertTestSuite):
alert_filename = "ldap_bruteforce"
class TestAlertLdapBruteforceUser(AlertTestSuite):
alert_filename = "ldap_bruteforce_user"
# This event is the default positive event that will cause the
# alert to trigger
default_event = {
@ -37,7 +37,7 @@ class TestAlertLdapBruteforce(AlertTestSuite):
# This alert is the expected result from running this task
default_alert = {
"category": "ldap",
"category": "bruteforce",
"tags": ["ldap"],
"severity": "WARNING",
"summary": "LDAP Bruteforce Attack in Progress against user (jsmith@example.com) from the following source ip(s): 1.2.3.4",

Просмотреть файл

@ -64,6 +64,15 @@ class TestAlertProxyDropIP(AlertTestSuite):
)
)
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event["_source"]["details"]["host"] = "169.254.169.254"
test_cases.append(
NegativeAlertTestCase(
description="Negative test with default negative event", events=events
)
)
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event["_source"]["details"]["host"] = "1.idonotexist.com"

Просмотреть файл

@ -779,6 +779,8 @@ class TestBroFixup(object):
self.verify_metadata(metadata)
assert toUTC(MESSAGE['ts']).isoformat() == result['utctimestamp']
assert toUTC(MESSAGE['ts']).isoformat() == result['timestamp']
assert 'tls' not in result['details']
assert result['details']['tls_encrypted'] == 'false'
assert result['summary'] == 'SMTP: 63.245.214.155 -> 128.199.139.6:25'
def test_smtp_log2(self):
@ -818,6 +820,8 @@ class TestBroFixup(object):
assert 'from' not in result['details']
assert 'to' not in result['details']
assert 'msg_id' not in result['details']
assert 'tls' not in result['details']
assert result['details']['tls_encrypted'] == 'false'
assert result['summary'] == 'SMTP: 63.245.214.155 -> 128.199.139.6:25'
def test_smtp_unicode(self):
@ -1850,10 +1854,10 @@ class TestBroFixup(object):
self.verify_metadata(metadata)
assert toUTC(MESSAGE['ts']).isoformat() == result['utctimestamp']
assert toUTC(MESSAGE['ts']).isoformat() == result['timestamp']
assert 'success' not in result['details']
for key in MESSAGE.keys():
if not key.startswith('id.'):
assert key in result['details']
assert MESSAGE[key] == result['details'][key]
assert result['summary'] == '10.26.40.121 -> 10.22.69.21:88 request TGS success unknown'
def test_kerberos_log2(self):
@ -1872,7 +1876,7 @@ class TestBroFixup(object):
"request_type":"AS",
"client":"valid_client_principal/VLADG.NET",
"service":"krbtgt/VLADG.NET",
"success":'true',
"success":'True',
"till":1421708111.0,
"cipher":"aes256-cts-hmac-sha1-96",
"forwardable":'false',
@ -1885,11 +1889,12 @@ class TestBroFixup(object):
self.verify_metadata(metadata)
assert toUTC(MESSAGE['ts']).isoformat() == result['utctimestamp']
assert toUTC(MESSAGE['ts']).isoformat() == result['timestamp']
assert MESSAGE['success'] == result['details']['success']
for key in MESSAGE.keys():
if not key.startswith('id.'):
assert key in result['details']
assert MESSAGE[key] == result['details'][key]
assert result['summary'] == '192.168.1.31 -> 192.168.1.32:88 request AS success true'
assert result['summary'] == '192.168.1.31 -> 192.168.1.32:88 request AS success True'
def test_kerberos_log3(self):
event = {
@ -1907,7 +1912,7 @@ class TestBroFixup(object):
"request_type":"TGS",
"client":"valid_client_principal/VLADG.NET",
"service":"krbtgt/VLADG.NET",
"success":'false',
"success":'False',
"error_msg":"TICKET NOT RENEWABLE",
"till":1421708111.0,
"forwardable":'false',
@ -1920,11 +1925,12 @@ class TestBroFixup(object):
self.verify_metadata(metadata)
assert toUTC(MESSAGE['ts']).isoformat() == result['utctimestamp']
assert toUTC(MESSAGE['ts']).isoformat() == result['timestamp']
assert MESSAGE['success'] == result['details']['success']
for key in MESSAGE.keys():
if not key.startswith('id.'):
assert key in result['details']
assert MESSAGE[key] == result['details'][key]
assert result['summary'] == '192.168.1.31 -> 192.168.1.32:88 request TGS success false'
assert result['summary'] == '192.168.1.31 -> 192.168.1.32:88 request TGS success False'
def test_ntlm_log(self):
event = {
@ -1942,7 +1948,7 @@ class TestBroFixup(object):
"username":"T-W864-IX-018$",
"hostname":"T-W864-IX-018",
"domainname":"RELENG",
"success":'true',
"success":'True',
"status":"SUCCESS",
}
event['MESSAGE'] = json.dumps(MESSAGE)
@ -1957,7 +1963,7 @@ class TestBroFixup(object):
assert MESSAGE['domainname'] == result['details']['ntlm']['domainname']
assert MESSAGE['success'] == result['details']['success']
assert MESSAGE['status'] == result['details']['status']
assert result['summary'] == 'NTLM: 10.26.40.48 -> 10.22.69.18:445 success true status SUCCESS'
assert result['summary'] == 'NTLM: 10.26.40.48 -> 10.22.69.18:445 success True status SUCCESS'
def test_ntlm_log2(self):
event = {
@ -1983,7 +1989,7 @@ class TestBroFixup(object):
assert 'username' in result['details']['ntlm']
assert 'hostname' in result['details']['ntlm']
assert 'domainname' in result['details']['ntlm']
assert 'success' in result['details']
assert 'success' not in result['details']
assert 'status' in result['details']
assert result['summary'] == 'NTLM: 10.26.40.48 -> 10.22.69.18:445 success unknown status unknown'

Просмотреть файл

@ -0,0 +1,36 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
from mq.plugins.ldap_fixup import message
class TestLdapFixupPlugin():
def setup(self):
self.plugin = message()
def test_ldap_fixup_plugin(self):
msg = {
'summary': 'LDAP-Humanizer:45582:1.1.1.1',
'hostname': 'random.host.com',
'category': 'ldap',
'details': {
'tls': 'true',
'authenticated': 'true',
}
}
(retmessage, retmeta) = self.plugin.onMessage(msg, {})
expected_message = {
'summary': 'LDAP-Humanizer:45582:1.1.1.1',
'hostname': 'random.host.com',
'category': 'ldap',
'source': 'ldap',
'details': {
'tls_encrypted': 'true',
'authenticated': 'true',
}
}
assert retmessage == expected_message
assert retmeta == {}

Просмотреть файл

@ -12,7 +12,9 @@ class TestZoomFixupPlugin():
def test_topic_removal(self):
msg = {
'summary': 'zoom_event',
'source': 'api_aws_lambda',
'hostname': 'zoom_host',
'details': {
'event': 'meeting.ended',
'payload': {
@ -29,7 +31,9 @@ class TestZoomFixupPlugin():
(retmessage, retmeta) = self.plugin.onMessage(msg, {})
expected_message = {
'summary': 'zoom: meeting.ended',
'source': 'api_aws_lambda',
'hostname': 'zoom_host',
'details': {
'event': 'meeting.ended',
'payload': {
@ -44,3 +48,131 @@ class TestZoomFixupPlugin():
}
assert retmessage == expected_message
assert retmeta == {}
def test_summary_user_name(self):
msg = {
'summary': 'zoom_event',
'source': 'api_aws_lambda',
'hostname': 'zoom_host',
'details': {
'event': 'meeting.sharing_ended',
'payload': {
'object': {
'topic': 'zoomroom',
'account_id': 'ABCDEFG123456',
'id': '123456789',
'type': '4',
'uuid': 'aodij/OWIE9241048=',
"participant": {
'user_id': '12039103',
'user_name': 'Random User',
}
}
}
}
}
(retmessage, retmeta) = self.plugin.onMessage(msg, {})
expected_message = {
'summary': 'zoom: meeting.sharing_ended triggered by user Random User',
'source': 'api_aws_lambda',
'hostname': 'zoom_host',
'details': {
'event': 'meeting.sharing_ended',
'payload': {
'object': {
'account_id': 'ABCDEFG123456',
'id': '123456789',
'type': '4',
'uuid': 'aodij/OWIE9241048=',
"participant": {
'user_id': '12039103',
'user_name': 'Random User',
}
}
}
}
}
assert retmessage == expected_message
assert retmeta == {}
def test_summary_operator(self):
msg = {
'summary': 'zoom_event',
'source': 'api_aws_lambda',
'hostname': 'zoom_host',
'details': {
'event': 'meeting.created',
'payload': {
'account_id': 'ABCDEFG123456',
'operator': 'randomuser@randomco.com',
'operator_id': '12o3i-294jo24jad',
'object': {
'id': '123456789',
'type': '2',
'uuid': 'aodij/OWIE9241048=',
}
}
}
}
(retmessage, retmeta) = self.plugin.onMessage(msg, {})
expected_message = {
'summary': 'zoom: meeting.created triggered by user randomuser@randomco.com',
'source': 'api_aws_lambda',
'hostname': 'zoom_host',
'details': {
'event': 'meeting.created',
'payload': {
'account_id': 'ABCDEFG123456',
'operator': 'randomuser@randomco.com',
'operator_id': '12o3i-294jo24jad',
'object': {
'id': '123456789',
'type': '2',
'uuid': 'aodij/OWIE9241048=',
}
}
}
}
assert retmessage == expected_message
assert retmeta == {}
def test_remove_duplicate_account_id(self):
msg = {
'summary': 'zoom_event',
'source': 'api_aws_lambda',
'hostname': 'zoom_host',
'details': {
'event': 'meeting.created',
'payload': {
'account_id': 'ABCDEFG123456',
'object': {
'account_id': 'ABCDEFG123456',
'id': '123456789',
'type': '2',
'uuid': 'aodij/OWIE9241048='
}
}
}
}
(retmessage, retmeta) = self.plugin.onMessage(msg, {})
expected_message = {
'summary': 'zoom: meeting.created',
'source': 'api_aws_lambda',
'hostname': 'zoom_host',
'details': {
'event': 'meeting.created',
'payload': {
'account_id': 'ABCDEFG123456',
'object': {
'id': '123456789',
'type': '2',
'uuid': 'aodij/OWIE9241048='
}
}
}
}
assert retmessage == expected_message
assert retmeta == {}