More updates to alerts on anomalies from proxy logs

This commit is contained in:
Michal Purzynski 2019-02-21 18:03:10 -08:00
Родитель 6bfb92e79a
Коммит 7ca51bbb75
5 изменённых файлов: 122 добавлений и 160 удалений

Просмотреть файл

@ -15,47 +15,46 @@ class AlertProxyDropIP(AlertTask):
def main(self):
search_query = SearchQuery(minutes=20)
search_query.add_must([
TermMatch('category', 'squid'),
TermMatch('tags', 'squid'),
TermMatch('details.proxyaction', 'TCP_DENIED/-')
])
search_query.add_must(
[
TermMatch("category", "proxy"),
TermMatch("details.proxyaction", "TCP_DENIED"),
]
)
# Match on 1.1.1.1, http://1.1.1.1, or https://1.1.1.1
# This will over-match on short 3-char domains like foo.bar.baz.com, but will get weeded out below
ip_regex = '/.*\..{1,3}\..{1,3}\..{1,3}(:.*|\/.*)/'
search_query.add_must([
QueryStringMatch('details.destination: {}'.format(ip_regex))
])
ip_regex = "/.*\..{1,3}\..{1,3}\..{1,3}(:.*|\/.*)/"
search_query.add_must(
[QueryStringMatch("details.destination: {}".format(ip_regex))]
)
self.filtersManual(search_query)
self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10)
self.searchEventsAggregated("details.sourceipaddress", samplesLimit=10)
self.walkAggregations(threshold=1)
# Set alert properties
def onAggregation(self, aggreg):
category = 'squid'
tags = ['squid', 'proxy']
severity = 'WARNING'
category = "squid"
tags = ["squid", "proxy"]
severity = "WARNING"
# Lucene search has a slight potential for overmatches, so we'd double-check
# with this pattern to ensure it's truely an IP before we add dest to our dropped list
pattern = r'^(http:\/\/|https:\/\/|)\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}'
pattern = r"^(http:\/\/|https:\/\/|)\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}"
dropped_destinations = set()
for event in aggreg['allevents']:
if re.search(pattern, event['_source']['details']['destination']):
dropped_destinations.add(
event['_source']['details']['destination'])
for event in aggreg["allevents"]:
if re.search(pattern, event["_source"]["details"]["destination"]):
dropped_destinations.add(event["_source"]["details"]["destination"])
# If it's all over-matches, don't throw the alert
if len(dropped_destinations) == 0:
return None
summary = 'Suspicious Proxy DROP event(s) detected from {0} to the following IP-based destination(s): {1}'.format(
aggreg['value'],
",".join(sorted(dropped_destinations))
summary = "Suspicious Proxy DROP event(s) detected from {0} to the following IP-based destination(s): {1}".format(
aggreg["value"], ",".join(sorted(dropped_destinations))
)
return self.createAlertDict(summary, category, tags, aggreg['events'], severity)
return self.createAlertDict(summary, category, tags, aggreg["events"], severity)

Просмотреть файл

@ -12,47 +12,41 @@ from mozdef_util.query_models import QueryStringMatch, SearchQuery, TermMatch
class AlertProxyDropNonStandardPort(AlertTask):
def main(self):
self.parse_config(
'proxy_drop_non_standard_port.conf', ['excludedports'])
self.parse_config("proxy_drop_non_standard_port.conf", ["excludedports"])
search_query = SearchQuery(minutes=20)
search_query.add_must([
TermMatch('category', 'squid'),
TermMatch('tags', 'squid'),
TermMatch('details.proxyaction', 'TCP_DENIED/-'),
TermMatch('details.tcpaction', 'CONNECT')
])
# Only notify on certain ports from config
port_regex = "/.*:({0})/".format(
self.config.excludedports.replace(',', '|'))
search_query.add_must_not([
QueryStringMatch('details.destination: {}'.format(port_regex))
])
search_query.add_must(
[
TermMatch("category", "proxy"),
TermMatch("details.proxyaction", "TCP_DENIED"),
TermMatch("details.method", "CONNECT"),
]
)
for port in self.config.excludedports.split(","):
search_query.add_must_not([TermMatch("details.destinationport", port)])
self.filtersManual(search_query)
# Search aggregations on field 'hostname', keep X samples of
# events at most
self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10)
self.searchEventsAggregated("details.sourceipaddress", samplesLimit=10)
# alert when >= X matching events in an aggregation
# I think it makes sense to alert every time here
self.walkAggregations(threshold=1)
def onAggregation(self, aggreg):
category = 'squid'
tags = ['squid', 'proxy']
severity = 'WARNING'
category = "squid"
tags = ["squid", "proxy"]
severity = "WARNING"
destinations = set()
for event in aggreg['allevents']:
destinations.add(event['_source']['details']['destination'])
for event in aggreg["allevents"]:
destinations.add(event["_source"]["details"]["destination"])
summary = 'Suspicious Proxy DROP event(s) detected from {0} to the following non-std port destination(s): {1}'.format(
aggreg['value'],
",".join(sorted(destinations))
summary = "Suspicious Proxy DROP event(s) detected from {0} to the following non-std port destination(s): {1}".format(
aggreg["value"], ",".join(sorted(destinations))
)
# Create the alert object based on these properties
return self.createAlertDict(summary, category, tags, aggreg['events'], severity)
return self.createAlertDict(summary, category, tags, aggreg["events"], severity)

Просмотреть файл

@ -6,34 +6,26 @@
# Copyright (c) 2014 Mozilla Corporation
from urlparse import urlparse
from lib.alerttask import AlertTask
from mozdef_util.query_models import SearchQuery, TermMatch, QueryStringMatch
from mozdef_util.query_models import SearchQuery, TermMatch
class AlertProxyExfilDomains(AlertTask):
def main(self):
self.parse_config('proxy_exfil_domains.conf', ['exfil_domains'])
self.parse_config("proxy_exfil_domains.conf", ["exfil_domains"])
search_query = SearchQuery(minutes=20)
search_query.add_must([
TermMatch('category', 'squid'),
TermMatch('tags', 'squid'),
])
search_query.add_must([TermMatch("category", "proxy")])
# Only notify on certain domains listed in the config
domain_regex = "/.*({0}).*/".format(
self.config.exfil_domains.replace(',', '|'))
search_query.add_must([
QueryStringMatch('details.destination: {}'.format(domain_regex))
])
for domain in self.config.exfil_domains.split(","):
search_query.add_should([TermMatch("details.host", domain)])
self.filtersManual(search_query)
# Search aggregations on field 'hostname', keep X samples of
# events at most
self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10)
self.searchEventsAggregated("details.sourceipaddress", samplesLimit=10)
# alert when >= X matching events in an aggregation
# I think it makes sense to alert every time here
self.walkAggregations(threshold=1)
@ -43,25 +35,18 @@ class AlertProxyExfilDomains(AlertTask):
# aggreg['count']: number of items in the aggregation, ex: number of failed login attempts
# aggreg['value']: value of the aggregation field, ex: toto@example.com
# aggreg['events']: list of events in the aggregation
category = 'squid'
tags = ['squid', 'proxy']
severity = 'WARNING'
category = "squid"
tags = ["squid", "proxy"]
severity = "WARNING"
exfil_domains = set()
for event in aggreg['allevents']:
try:
domain = urlparse(event['_source']['details']['destination']).netloc
except Exception:
# We already have a domain, not a URL
target = event['_source']['details']['destination'].split(':')
domain = target[0]
for event in aggreg["allevents"]:
domain = event["_source"]["details"]["host"]
exfil_domains.add(domain)
summary = 'Suspicious Proxy event(s) detected from {0} to the following exfil domain(s): {1}'.format(
aggreg['value'],
",".join(sorted(exfil_domains))
summary = "Suspicious Proxy event(s) detected from {0} to the following exfil domain(s): {1}".format(
aggreg["value"], ",".join(sorted(exfil_domains))
)
# Create the alert object based on these properties
return self.createAlertDict(summary, category, tags, aggreg['events'], severity)
return self.createAlertDict(summary, category, tags, aggreg["events"], severity)

Просмотреть файл

@ -17,47 +17,33 @@ class TestAlertProxyDropNonStandardPort(AlertTestSuite):
default_event = {
"_type": "event",
"_source": {
"category": "squid",
"tags": ["squid"],
"category": "proxy",
"details": {
"sourceipaddress": "1.2.3.4",
"destination": "evil.com:6667",
"proxyaction": "TCP_DENIED/-",
"tcpaction": "CONNECT"
}
}
"destination": "evil.com:1337",
"destinationport": "1337",
"proxyaction": "TCP_DENIED",
"tcpaction": "CONNECT",
},
},
}
default_event2 = AlertTestSuite.copy(default_event)
default_event2["_source"]["details"]["destination"] = "evil.com:1337"
# This event is the default negative event that will cause the
# alert to trigger
default_negative_event = {
"_type": "event",
"_source": {
"category": "squid",
"tags": ["squid"],
"details": {
"sourceipaddress": "1.2.3.4",
"destination": "example.com:443",
"proxyaction": "TCP_DENIED/-",
"tcpaction": "CONNECT"
}
}
}
default_event2["_source"]["details"]["destination"] = "evil.com:6667"
default_event2["_source"]["details"]["destinationport"] = "6667"
# This alert is the expected result from running this task
default_alert = {
"category": "squid",
"tags": ['squid', 'proxy'],
"tags": ["squid", "proxy"],
"severity": "WARNING",
"summary": 'Suspicious Proxy DROP event(s) detected from 1.2.3.4 to the following non-std port destination(s): evil.com:6667'
"summary": "Suspicious Proxy DROP event(s) detected from 1.2.3.4 to the following non-std port destination(s): evil.com:1337",
}
default_alert_aggregated = AlertTestSuite.copy(default_alert)
default_alert_aggregated[
"summary"] = 'Suspicious Proxy DROP event(s) detected from 1.2.3.4 to the following non-std port destination(s): evil.com:1337,evil.com:6667'
"summary"
] = "Suspicious Proxy DROP event(s) detected from 1.2.3.4 to the following non-std port destination(s): evil.com:1337,evil.com:6667"
test_cases = []
@ -65,7 +51,7 @@ class TestAlertProxyDropNonStandardPort(AlertTestSuite):
PositiveAlertTestCase(
description="Positive test with default events and default alert expected",
events=AlertTestSuite.create_events(default_event, 1),
expected_alert=default_alert
expected_alert=default_alert,
)
)
@ -75,20 +61,23 @@ class TestAlertProxyDropNonStandardPort(AlertTestSuite):
PositiveAlertTestCase(
description="Positive test with default events and default alert expected",
events=events1 + events2,
expected_alert=default_alert_aggregated
)
)
test_cases.append(
NegativeAlertTestCase(
description="Positive test with default events and default alert expected",
events=AlertTestSuite.create_events(default_negative_event, 1),
expected_alert=default_alert_aggregated,
)
)
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event['_source']['category'] = 'bad'
event["_source"]["details"]["destinationport"] = "443"
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with events with whitelisted port",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event["_source"]["category"] = "bad"
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with events with incorrect category",
@ -98,23 +87,14 @@ class TestAlertProxyDropNonStandardPort(AlertTestSuite):
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event['_source']['tags'] = 'bad tag example'
event["_source"][
"utctimestamp"
] = AlertTestSuite.subtract_from_timestamp_lambda({"minutes": 241})
event["_source"][
"receivedtimestamp"
] = AlertTestSuite.subtract_from_timestamp_lambda({"minutes": 241})
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with events with incorrect tags",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event['_source']['utctimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda({
'minutes': 241})
event['_source']['receivedtimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda({
'minutes': 241})
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with old timestamp",
events=events,
description="Negative test case with old timestamp", events=events
)
)

Просмотреть файл

@ -16,36 +16,40 @@ class TestProxyExfilDomains(AlertTestSuite):
default_event = {
"_type": "event",
"_source": {
"category": "squid",
"tags": ["squid"],
"details": {
"sourceipaddress": "1.2.3.4",
"destination": "https://pastebin.com",
}
}
"category": "proxy",
"details": {"sourceipaddress": "1.2.3.4", "host": "pastebin.com"},
},
}
# This event is an alternate destination that we'd want to aggregate
default_event2 = AlertTestSuite.copy(default_event)
default_event2["_source"]["details"]["destination"] = "http://www.sendspace.com"
default_event2["_source"]["details"]["host"] = "www.sendspace.com"
# This event is the default negative event that will not cause the
# alert to trigger
default_negative_event = AlertTestSuite.copy(default_event)
default_negative_event["_source"]["details"]["destination"] = "foo.mozilla.com"
default_negative_event["_source"]["details"]["host"] = "foo.mozilla.com"
# This alert is the expected result from running this task
default_alert = {
"category": "squid",
"tags": ['squid', 'proxy'],
"tags": ["squid", "proxy"],
"severity": "WARNING",
"summary": 'Suspicious Proxy event(s) detected from 1.2.3.4 to the following exfil domain(s): pastebin.com',
"summary": "Suspicious Proxy event(s) detected from 1.2.3.4 to the following exfil domain(s): pastebin.com",
}
default_alert2 = {
"category": "squid",
"tags": ["squid", "proxy"],
"severity": "WARNING",
"summary": "Suspicious Proxy event(s) detected from 1.2.3.4 to the following exfil domain(s): www.sendspace.com",
}
# This alert is the expected result from this task against multiple matching events
default_alert_aggregated = AlertTestSuite.copy(default_alert)
default_alert_aggregated[
"summary"] = 'Suspicious Proxy event(s) detected from 1.2.3.4 to the following exfil domain(s): pastebin.com,www.sendspace.com'
"summary"
] = "Suspicious Proxy event(s) detected from 1.2.3.4 to the following exfil domain(s): pastebin.com,www.sendspace.com"
test_cases = []
@ -53,7 +57,7 @@ class TestProxyExfilDomains(AlertTestSuite):
PositiveAlertTestCase(
description="Positive test with default events and default alert expected",
events=AlertTestSuite.create_events(default_event, 1),
expected_alert=default_alert
expected_alert=default_alert,
)
)
@ -61,7 +65,15 @@ class TestProxyExfilDomains(AlertTestSuite):
PositiveAlertTestCase(
description="Positive test with default events and default alert expected - dedup",
events=AlertTestSuite.create_events(default_event, 2),
expected_alert=default_alert
expected_alert=default_alert,
)
)
test_cases.append(
PositiveAlertTestCase(
description="Positive test with default events and default alert expected - dedup",
events=AlertTestSuite.create_events(default_event2, 2),
expected_alert=default_alert2,
)
)
@ -71,7 +83,7 @@ class TestProxyExfilDomains(AlertTestSuite):
PositiveAlertTestCase(
description="Positive test with default events and default alert expected - different destinations",
events=events1 + events2,
expected_alert=default_alert_aggregated
expected_alert=default_alert_aggregated,
)
)
@ -84,7 +96,7 @@ class TestProxyExfilDomains(AlertTestSuite):
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event['_source']['category'] = 'bad'
event["_source"]["category"] = "bad"
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with events with incorrect category",
@ -94,22 +106,14 @@ class TestProxyExfilDomains(AlertTestSuite):
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event['_source']['tags'] = 'bad tag example'
event["_source"][
"utctimestamp"
] = AlertTestSuite.subtract_from_timestamp_lambda({"minutes": 241})
event["_source"][
"receivedtimestamp"
] = AlertTestSuite.subtract_from_timestamp_lambda({"minutes": 241})
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with events with incorrect tags",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event['_source']['utctimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda({
'minutes': 241})
event['_source']['receivedtimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda({
'minutes': 241})
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with old timestamp",
events=events,
description="Negative test case with old timestamp", events=events
)
)