More updates to alerts on anomalies from proxy logs

This commit is contained in:
Michal Purzynski 2019-02-21 18:03:10 -08:00
Родитель 6bfb92e79a
Коммит 7ca51bbb75
5 изменённых файлов: 122 добавлений и 160 удалений

Просмотреть файл

@ -15,47 +15,46 @@ class AlertProxyDropIP(AlertTask):
def main(self): def main(self):
search_query = SearchQuery(minutes=20) search_query = SearchQuery(minutes=20)
search_query.add_must([ search_query.add_must(
TermMatch('category', 'squid'), [
TermMatch('tags', 'squid'), TermMatch("category", "proxy"),
TermMatch('details.proxyaction', 'TCP_DENIED/-') TermMatch("details.proxyaction", "TCP_DENIED"),
]) ]
)
# Match on 1.1.1.1, http://1.1.1.1, or https://1.1.1.1 # Match on 1.1.1.1, http://1.1.1.1, or https://1.1.1.1
# This will over-match on short 3-char domains like foo.bar.baz.com, but will get weeded out below # This will over-match on short 3-char domains like foo.bar.baz.com, but will get weeded out below
ip_regex = '/.*\..{1,3}\..{1,3}\..{1,3}(:.*|\/.*)/' ip_regex = "/.*\..{1,3}\..{1,3}\..{1,3}(:.*|\/.*)/"
search_query.add_must([ search_query.add_must(
QueryStringMatch('details.destination: {}'.format(ip_regex)) [QueryStringMatch("details.destination: {}".format(ip_regex))]
]) )
self.filtersManual(search_query) self.filtersManual(search_query)
self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10) self.searchEventsAggregated("details.sourceipaddress", samplesLimit=10)
self.walkAggregations(threshold=1) self.walkAggregations(threshold=1)
# Set alert properties # Set alert properties
def onAggregation(self, aggreg): def onAggregation(self, aggreg):
category = 'squid' category = "squid"
tags = ['squid', 'proxy'] tags = ["squid", "proxy"]
severity = 'WARNING' severity = "WARNING"
# Lucene search has a slight potential for overmatches, so we'd double-check # Lucene search has a slight potential for overmatches, so we'd double-check
# with this pattern to ensure it's truely an IP before we add dest to our dropped list # with this pattern to ensure it's truely an IP before we add dest to our dropped list
pattern = r'^(http:\/\/|https:\/\/|)\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}' pattern = r"^(http:\/\/|https:\/\/|)\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}"
dropped_destinations = set() dropped_destinations = set()
for event in aggreg['allevents']: for event in aggreg["allevents"]:
if re.search(pattern, event['_source']['details']['destination']): if re.search(pattern, event["_source"]["details"]["destination"]):
dropped_destinations.add( dropped_destinations.add(event["_source"]["details"]["destination"])
event['_source']['details']['destination'])
# If it's all over-matches, don't throw the alert # If it's all over-matches, don't throw the alert
if len(dropped_destinations) == 0: if len(dropped_destinations) == 0:
return None return None
summary = 'Suspicious Proxy DROP event(s) detected from {0} to the following IP-based destination(s): {1}'.format( summary = "Suspicious Proxy DROP event(s) detected from {0} to the following IP-based destination(s): {1}".format(
aggreg['value'], aggreg["value"], ",".join(sorted(dropped_destinations))
",".join(sorted(dropped_destinations))
) )
return self.createAlertDict(summary, category, tags, aggreg['events'], severity) return self.createAlertDict(summary, category, tags, aggreg["events"], severity)

Просмотреть файл

@ -12,47 +12,41 @@ from mozdef_util.query_models import QueryStringMatch, SearchQuery, TermMatch
class AlertProxyDropNonStandardPort(AlertTask): class AlertProxyDropNonStandardPort(AlertTask):
def main(self): def main(self):
self.parse_config( self.parse_config("proxy_drop_non_standard_port.conf", ["excludedports"])
'proxy_drop_non_standard_port.conf', ['excludedports'])
search_query = SearchQuery(minutes=20) search_query = SearchQuery(minutes=20)
search_query.add_must([ search_query.add_must(
TermMatch('category', 'squid'), [
TermMatch('tags', 'squid'), TermMatch("category", "proxy"),
TermMatch('details.proxyaction', 'TCP_DENIED/-'), TermMatch("details.proxyaction", "TCP_DENIED"),
TermMatch('details.tcpaction', 'CONNECT') TermMatch("details.method", "CONNECT"),
]) ]
)
# Only notify on certain ports from config for port in self.config.excludedports.split(","):
port_regex = "/.*:({0})/".format( search_query.add_must_not([TermMatch("details.destinationport", port)])
self.config.excludedports.replace(',', '|'))
search_query.add_must_not([
QueryStringMatch('details.destination: {}'.format(port_regex))
])
self.filtersManual(search_query) self.filtersManual(search_query)
# Search aggregations on field 'hostname', keep X samples of # Search aggregations on field 'hostname', keep X samples of
# events at most # events at most
self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10) self.searchEventsAggregated("details.sourceipaddress", samplesLimit=10)
# alert when >= X matching events in an aggregation # alert when >= X matching events in an aggregation
# I think it makes sense to alert every time here # I think it makes sense to alert every time here
self.walkAggregations(threshold=1) self.walkAggregations(threshold=1)
def onAggregation(self, aggreg): def onAggregation(self, aggreg):
category = 'squid' category = "squid"
tags = ['squid', 'proxy'] tags = ["squid", "proxy"]
severity = 'WARNING' severity = "WARNING"
destinations = set() destinations = set()
for event in aggreg['allevents']: for event in aggreg["allevents"]:
destinations.add(event['_source']['details']['destination']) destinations.add(event["_source"]["details"]["destination"])
summary = 'Suspicious Proxy DROP event(s) detected from {0} to the following non-std port destination(s): {1}'.format( summary = "Suspicious Proxy DROP event(s) detected from {0} to the following non-std port destination(s): {1}".format(
aggreg['value'], aggreg["value"], ",".join(sorted(destinations))
",".join(sorted(destinations))
) )
# Create the alert object based on these properties # Create the alert object based on these properties
return self.createAlertDict(summary, category, tags, aggreg['events'], severity) return self.createAlertDict(summary, category, tags, aggreg["events"], severity)

Просмотреть файл

@ -6,34 +6,26 @@
# Copyright (c) 2014 Mozilla Corporation # Copyright (c) 2014 Mozilla Corporation
from urlparse import urlparse
from lib.alerttask import AlertTask from lib.alerttask import AlertTask
from mozdef_util.query_models import SearchQuery, TermMatch, QueryStringMatch from mozdef_util.query_models import SearchQuery, TermMatch
class AlertProxyExfilDomains(AlertTask): class AlertProxyExfilDomains(AlertTask):
def main(self): def main(self):
self.parse_config('proxy_exfil_domains.conf', ['exfil_domains']) self.parse_config("proxy_exfil_domains.conf", ["exfil_domains"])
search_query = SearchQuery(minutes=20) search_query = SearchQuery(minutes=20)
search_query.add_must([ search_query.add_must([TermMatch("category", "proxy")])
TermMatch('category', 'squid'),
TermMatch('tags', 'squid'),
])
# Only notify on certain domains listed in the config for domain in self.config.exfil_domains.split(","):
domain_regex = "/.*({0}).*/".format( search_query.add_should([TermMatch("details.host", domain)])
self.config.exfil_domains.replace(',', '|'))
search_query.add_must([
QueryStringMatch('details.destination: {}'.format(domain_regex))
])
self.filtersManual(search_query) self.filtersManual(search_query)
# Search aggregations on field 'hostname', keep X samples of # Search aggregations on field 'hostname', keep X samples of
# events at most # events at most
self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10) self.searchEventsAggregated("details.sourceipaddress", samplesLimit=10)
# alert when >= X matching events in an aggregation # alert when >= X matching events in an aggregation
# I think it makes sense to alert every time here # I think it makes sense to alert every time here
self.walkAggregations(threshold=1) self.walkAggregations(threshold=1)
@ -43,25 +35,18 @@ class AlertProxyExfilDomains(AlertTask):
# aggreg['count']: number of items in the aggregation, ex: number of failed login attempts # aggreg['count']: number of items in the aggregation, ex: number of failed login attempts
# aggreg['value']: value of the aggregation field, ex: toto@example.com # aggreg['value']: value of the aggregation field, ex: toto@example.com
# aggreg['events']: list of events in the aggregation # aggreg['events']: list of events in the aggregation
category = 'squid' category = "squid"
tags = ['squid', 'proxy'] tags = ["squid", "proxy"]
severity = 'WARNING' severity = "WARNING"
exfil_domains = set() exfil_domains = set()
for event in aggreg['allevents']: for event in aggreg["allevents"]:
try: domain = event["_source"]["details"]["host"]
domain = urlparse(event['_source']['details']['destination']).netloc
except Exception:
# We already have a domain, not a URL
target = event['_source']['details']['destination'].split(':')
domain = target[0]
exfil_domains.add(domain) exfil_domains.add(domain)
summary = 'Suspicious Proxy event(s) detected from {0} to the following exfil domain(s): {1}'.format( summary = "Suspicious Proxy event(s) detected from {0} to the following exfil domain(s): {1}".format(
aggreg['value'], aggreg["value"], ",".join(sorted(exfil_domains))
",".join(sorted(exfil_domains))
) )
# Create the alert object based on these properties # Create the alert object based on these properties
return self.createAlertDict(summary, category, tags, aggreg['events'], severity) return self.createAlertDict(summary, category, tags, aggreg["events"], severity)

Просмотреть файл

@ -17,47 +17,33 @@ class TestAlertProxyDropNonStandardPort(AlertTestSuite):
default_event = { default_event = {
"_type": "event", "_type": "event",
"_source": { "_source": {
"category": "squid", "category": "proxy",
"tags": ["squid"],
"details": { "details": {
"sourceipaddress": "1.2.3.4", "sourceipaddress": "1.2.3.4",
"destination": "evil.com:6667", "destination": "evil.com:1337",
"proxyaction": "TCP_DENIED/-", "destinationport": "1337",
"tcpaction": "CONNECT" "proxyaction": "TCP_DENIED",
} "tcpaction": "CONNECT",
} },
},
} }
default_event2 = AlertTestSuite.copy(default_event) default_event2 = AlertTestSuite.copy(default_event)
default_event2["_source"]["details"]["destination"] = "evil.com:1337" default_event2["_source"]["details"]["destination"] = "evil.com:6667"
default_event2["_source"]["details"]["destinationport"] = "6667"
# This event is the default negative event that will cause the
# alert to trigger
default_negative_event = {
"_type": "event",
"_source": {
"category": "squid",
"tags": ["squid"],
"details": {
"sourceipaddress": "1.2.3.4",
"destination": "example.com:443",
"proxyaction": "TCP_DENIED/-",
"tcpaction": "CONNECT"
}
}
}
# This alert is the expected result from running this task # This alert is the expected result from running this task
default_alert = { default_alert = {
"category": "squid", "category": "squid",
"tags": ['squid', 'proxy'], "tags": ["squid", "proxy"],
"severity": "WARNING", "severity": "WARNING",
"summary": 'Suspicious Proxy DROP event(s) detected from 1.2.3.4 to the following non-std port destination(s): evil.com:6667' "summary": "Suspicious Proxy DROP event(s) detected from 1.2.3.4 to the following non-std port destination(s): evil.com:1337",
} }
default_alert_aggregated = AlertTestSuite.copy(default_alert) default_alert_aggregated = AlertTestSuite.copy(default_alert)
default_alert_aggregated[ default_alert_aggregated[
"summary"] = 'Suspicious Proxy DROP event(s) detected from 1.2.3.4 to the following non-std port destination(s): evil.com:1337,evil.com:6667' "summary"
] = "Suspicious Proxy DROP event(s) detected from 1.2.3.4 to the following non-std port destination(s): evil.com:1337,evil.com:6667"
test_cases = [] test_cases = []
@ -65,7 +51,7 @@ class TestAlertProxyDropNonStandardPort(AlertTestSuite):
PositiveAlertTestCase( PositiveAlertTestCase(
description="Positive test with default events and default alert expected", description="Positive test with default events and default alert expected",
events=AlertTestSuite.create_events(default_event, 1), events=AlertTestSuite.create_events(default_event, 1),
expected_alert=default_alert expected_alert=default_alert,
) )
) )
@ -75,20 +61,23 @@ class TestAlertProxyDropNonStandardPort(AlertTestSuite):
PositiveAlertTestCase( PositiveAlertTestCase(
description="Positive test with default events and default alert expected", description="Positive test with default events and default alert expected",
events=events1 + events2, events=events1 + events2,
expected_alert=default_alert_aggregated expected_alert=default_alert_aggregated,
)
)
test_cases.append(
NegativeAlertTestCase(
description="Positive test with default events and default alert expected",
events=AlertTestSuite.create_events(default_negative_event, 1),
) )
) )
events = AlertTestSuite.create_events(default_event, 10) events = AlertTestSuite.create_events(default_event, 10)
for event in events: for event in events:
event['_source']['category'] = 'bad' event["_source"]["details"]["destinationport"] = "443"
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with events with whitelisted port",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event["_source"]["category"] = "bad"
test_cases.append( test_cases.append(
NegativeAlertTestCase( NegativeAlertTestCase(
description="Negative test case with events with incorrect category", description="Negative test case with events with incorrect category",
@ -98,23 +87,14 @@ class TestAlertProxyDropNonStandardPort(AlertTestSuite):
events = AlertTestSuite.create_events(default_event, 10) events = AlertTestSuite.create_events(default_event, 10)
for event in events: for event in events:
event['_source']['tags'] = 'bad tag example' event["_source"][
"utctimestamp"
] = AlertTestSuite.subtract_from_timestamp_lambda({"minutes": 241})
event["_source"][
"receivedtimestamp"
] = AlertTestSuite.subtract_from_timestamp_lambda({"minutes": 241})
test_cases.append( test_cases.append(
NegativeAlertTestCase( NegativeAlertTestCase(
description="Negative test case with events with incorrect tags", description="Negative test case with old timestamp", events=events
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event['_source']['utctimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda({
'minutes': 241})
event['_source']['receivedtimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda({
'minutes': 241})
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with old timestamp",
events=events,
) )
) )

Просмотреть файл

@ -16,36 +16,40 @@ class TestProxyExfilDomains(AlertTestSuite):
default_event = { default_event = {
"_type": "event", "_type": "event",
"_source": { "_source": {
"category": "squid", "category": "proxy",
"tags": ["squid"], "details": {"sourceipaddress": "1.2.3.4", "host": "pastebin.com"},
"details": { },
"sourceipaddress": "1.2.3.4",
"destination": "https://pastebin.com",
}
}
} }
# This event is an alternate destination that we'd want to aggregate # This event is an alternate destination that we'd want to aggregate
default_event2 = AlertTestSuite.copy(default_event) default_event2 = AlertTestSuite.copy(default_event)
default_event2["_source"]["details"]["destination"] = "http://www.sendspace.com" default_event2["_source"]["details"]["host"] = "www.sendspace.com"
# This event is the default negative event that will not cause the # This event is the default negative event that will not cause the
# alert to trigger # alert to trigger
default_negative_event = AlertTestSuite.copy(default_event) default_negative_event = AlertTestSuite.copy(default_event)
default_negative_event["_source"]["details"]["destination"] = "foo.mozilla.com" default_negative_event["_source"]["details"]["host"] = "foo.mozilla.com"
# This alert is the expected result from running this task # This alert is the expected result from running this task
default_alert = { default_alert = {
"category": "squid", "category": "squid",
"tags": ['squid', 'proxy'], "tags": ["squid", "proxy"],
"severity": "WARNING", "severity": "WARNING",
"summary": 'Suspicious Proxy event(s) detected from 1.2.3.4 to the following exfil domain(s): pastebin.com', "summary": "Suspicious Proxy event(s) detected from 1.2.3.4 to the following exfil domain(s): pastebin.com",
}
default_alert2 = {
"category": "squid",
"tags": ["squid", "proxy"],
"severity": "WARNING",
"summary": "Suspicious Proxy event(s) detected from 1.2.3.4 to the following exfil domain(s): www.sendspace.com",
} }
# This alert is the expected result from this task against multiple matching events # This alert is the expected result from this task against multiple matching events
default_alert_aggregated = AlertTestSuite.copy(default_alert) default_alert_aggregated = AlertTestSuite.copy(default_alert)
default_alert_aggregated[ default_alert_aggregated[
"summary"] = 'Suspicious Proxy event(s) detected from 1.2.3.4 to the following exfil domain(s): pastebin.com,www.sendspace.com' "summary"
] = "Suspicious Proxy event(s) detected from 1.2.3.4 to the following exfil domain(s): pastebin.com,www.sendspace.com"
test_cases = [] test_cases = []
@ -53,7 +57,7 @@ class TestProxyExfilDomains(AlertTestSuite):
PositiveAlertTestCase( PositiveAlertTestCase(
description="Positive test with default events and default alert expected", description="Positive test with default events and default alert expected",
events=AlertTestSuite.create_events(default_event, 1), events=AlertTestSuite.create_events(default_event, 1),
expected_alert=default_alert expected_alert=default_alert,
) )
) )
@ -61,7 +65,15 @@ class TestProxyExfilDomains(AlertTestSuite):
PositiveAlertTestCase( PositiveAlertTestCase(
description="Positive test with default events and default alert expected - dedup", description="Positive test with default events and default alert expected - dedup",
events=AlertTestSuite.create_events(default_event, 2), events=AlertTestSuite.create_events(default_event, 2),
expected_alert=default_alert expected_alert=default_alert,
)
)
test_cases.append(
PositiveAlertTestCase(
description="Positive test with default events and default alert expected - dedup",
events=AlertTestSuite.create_events(default_event2, 2),
expected_alert=default_alert2,
) )
) )
@ -71,7 +83,7 @@ class TestProxyExfilDomains(AlertTestSuite):
PositiveAlertTestCase( PositiveAlertTestCase(
description="Positive test with default events and default alert expected - different destinations", description="Positive test with default events and default alert expected - different destinations",
events=events1 + events2, events=events1 + events2,
expected_alert=default_alert_aggregated expected_alert=default_alert_aggregated,
) )
) )
@ -84,7 +96,7 @@ class TestProxyExfilDomains(AlertTestSuite):
events = AlertTestSuite.create_events(default_event, 10) events = AlertTestSuite.create_events(default_event, 10)
for event in events: for event in events:
event['_source']['category'] = 'bad' event["_source"]["category"] = "bad"
test_cases.append( test_cases.append(
NegativeAlertTestCase( NegativeAlertTestCase(
description="Negative test case with events with incorrect category", description="Negative test case with events with incorrect category",
@ -94,22 +106,14 @@ class TestProxyExfilDomains(AlertTestSuite):
events = AlertTestSuite.create_events(default_event, 10) events = AlertTestSuite.create_events(default_event, 10)
for event in events: for event in events:
event['_source']['tags'] = 'bad tag example' event["_source"][
"utctimestamp"
] = AlertTestSuite.subtract_from_timestamp_lambda({"minutes": 241})
event["_source"][
"receivedtimestamp"
] = AlertTestSuite.subtract_from_timestamp_lambda({"minutes": 241})
test_cases.append( test_cases.append(
NegativeAlertTestCase( NegativeAlertTestCase(
description="Negative test case with events with incorrect tags", description="Negative test case with old timestamp", events=events
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event['_source']['utctimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda({
'minutes': 241})
event['_source']['receivedtimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda({
'minutes': 241})
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with old timestamp",
events=events,
) )
) )