From 1b5a3066fed06e8d7a63f5f7d80f677932a582e8 Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Thu, 25 Apr 2019 20:46:46 -0400 Subject: [PATCH 01/88] Started to lay out a new alert plugin for enriching alerts with information about the physical source of IPs --- alerts/plugins/ip_source_enrichment.json.conf | 2 + alerts/plugins/ip_source_enrichment.py | 93 +++++++++++++++++++ 2 files changed, 95 insertions(+) create mode 100644 alerts/plugins/ip_source_enrichment.json.conf create mode 100644 alerts/plugins/ip_source_enrichment.py diff --git a/alerts/plugins/ip_source_enrichment.json.conf b/alerts/plugins/ip_source_enrichment.json.conf new file mode 100644 index 00000000..2c63c085 --- /dev/null +++ b/alerts/plugins/ip_source_enrichment.json.conf @@ -0,0 +1,2 @@ +{ +} diff --git a/alerts/plugins/ip_source_enrichment.py b/alerts/plugins/ip_source_enrichment.py new file mode 100644 index 00000000..169b7c74 --- /dev/null +++ b/alerts/plugins/ip_source_enrichment.py @@ -0,0 +1,93 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# Copyright (c) 2014 Mozilla Corporation + + +CONFIG_FILE = './ip_source_enrichment.json.conf' + + +def _load_config(file_path): + '''Private + + Read and parse a file from disk as JSON into a dictionary. + ''' + + return {} + + +class _RegexMatchRule(object): + '''Private + + A rule defining a mapping from a regular expression matching a + set of IP addresses to a format string accepting one parameter. + + Produced by EnrichIPs given a plugin configuration. + ''' + + def __init__(self, regex, fmt): + '''Initialize the rule with a regular expression and + format string. + ''' + + self._ip_regex = regex + self._format_string = fmt + + + def matches(self, input_str): + '''Produces a list of strings each produced by formatting + the configured format string with any IPs found in the + provided input. + ''' + + return [] + + +class _CIDRMatchRule(object): + '''Private + + A rule defining a mapping from a CIDR address to a format string + that accepts one parameter. + + Produced by EnrichIPs given a plugin configuration. + ''' + + def __init__(self, cidr_str, fmt): + '''Initialize the rule with an IPv4 or IPv6 CIDR address + string and format string. + ''' + + self._cidr = cidr_str + self._format_string = fmt + + + def matches(self, input_str): + '''Produces a list of strings each produced by formatting + the configured format string with any IPs found in the + provided input. + ''' + + return [] + + +class EnrichIPs(object): + '''Add information to alerts containing IP addresses that describes + the source location of the IP address if it can be determined based + on a configured mapping. + ''' + + def __init__(self): + '''Initialize the plugin from a configuration in preparation to + match alerts. + ''' + + self.configuration = _load_config(CONFIG_FILE) + + + def enrich(self, alert): + '''Enrich alerts containing IP addresses with information about + the location from which those IPs originate. + Returns a modified alert. + ''' + + return alert From 02def8c2e53d5d1a2de41e6e348bc38bf8f43ff1 Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Mon, 29 Apr 2019 20:42:20 -0400 Subject: [PATCH 02/88] Vim swp files --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 5928b709..92392c31 100644 --- a/.gitignore +++ b/.gitignore @@ -17,3 +17,4 @@ cloudy_mozdef/aws_parameters.json cloudy_mozdef/aws_parameters.sh docs/source/_build docs/source/_static +*.swp From d3ba77a8865a2e67ad596e921d9f28041dc86296 Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Mon, 29 Apr 2019 20:42:37 -0400 Subject: [PATCH 03/88] Refactoring code to match the preferred interface --- alerts/plugins/ip_source_enrichment.json.conf | 12 ++ alerts/plugins/ip_source_enrichment.py | 126 ++++++++---------- 2 files changed, 70 insertions(+), 68 deletions(-) diff --git a/alerts/plugins/ip_source_enrichment.json.conf b/alerts/plugins/ip_source_enrichment.json.conf index 2c63c085..337c0caf 100644 --- a/alerts/plugins/ip_source_enrichment.json.conf +++ b/alerts/plugins/ip_source_enrichment.json.conf @@ -1,2 +1,14 @@ { + "known": [ + { + "ipVersion": 4, + "range": "8.32.0.0/16", + "format": "{1} is in OFFICE1." + }, + { + "ipVersion": 6, + "range": "4a00:7a49:232::/48", + "format": "{1} is in OFFICE2." + } + ] } diff --git a/alerts/plugins/ip_source_enrichment.py b/alerts/plugins/ip_source_enrichment.py index 169b7c74..2be311b5 100644 --- a/alerts/plugins/ip_source_enrichment.py +++ b/alerts/plugins/ip_source_enrichment.py @@ -3,10 +3,41 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2014 Mozilla Corporation +import netaddr + CONFIG_FILE = './ip_source_enrichment.json.conf' +def _isIPv4(ip): + try: + return netaddr.valid_ipv4(ip) + except: + return False + + +def _isIPv6(ip): + try: + return netaddr.valid_ipv6(ip) + except: + return False + + +def _find_ip_addresses(string): + '''List all of the IPv4 and IPv6 addresses found in a string.''' + + return [] + + +def enrich(self, alert, known_ips): + '''Add information to alerts containing IP addresses that describes + the source location of the IP address if it can be determined based + on a configured mapping. + ''' + + return alert + + def _load_config(file_path): '''Private @@ -16,78 +47,37 @@ def _load_config(file_path): return {} -class _RegexMatchRule(object): - '''Private +class message(object): + '''Alert plugin interface that handles messages (alerts). + This plugin will look for IP addresses in any of the values of an + alert dictionary. For each IP address found, it will append some + text to the summary of the alert to provide more information + about where the IP originates from if it is recognized. - A rule defining a mapping from a regular expression matching a - set of IP addresses to a format string accepting one parameter. + The expected format of the configuration file, + `ip_source_enrichment.json.conf`, is as follows: - Produced by EnrichIPs given a plugin configuration. - ''' - - def __init__(self, regex, fmt): - '''Initialize the rule with a regular expression and - format string. - ''' - - self._ip_regex = regex - self._format_string = fmt - - - def matches(self, input_str): - '''Produces a list of strings each produced by formatting - the configured format string with any IPs found in the - provided input. - ''' - - return [] - - -class _CIDRMatchRule(object): - '''Private - - A rule defining a mapping from a CIDR address to a format string - that accepts one parameter. - - Produced by EnrichIPs given a plugin configuration. - ''' - - def __init__(self, cidr_str, fmt): - '''Initialize the rule with an IPv4 or IPv6 CIDR address - string and format string. - ''' - - self._cidr = cidr_str - self._format_string = fmt - - - def matches(self, input_str): - '''Produces a list of strings each produced by formatting - the configured format string with any IPs found in the - provided input. - ''' - - return [] - - -class EnrichIPs(object): - '''Add information to alerts containing IP addresses that describes - the source location of the IP address if it can be determined based - on a configured mapping. + ```json + { + "known": [ + { + "ipVersion": 4, + "range": "1.2.3.4/8", + "format": "IPv4 {1} is known" + }, + { + "ipVersion": 6, + "range": "1a2b:3c4d:123::/48", + "format": "IPv6 {1} is known" + } + ] + } + ``` ''' def __init__(self): - '''Initialize the plugin from a configuration in preparation to - match alerts. - ''' - - self.configuration = _load_config(CONFIG_FILE) + self._config = _load_config(CONFIG_FILE) - def enrich(self, alert): - '''Enrich alerts containing IP addresses with information about - the location from which those IPs originate. - Returns a modified alert. - ''' - - return alert + def onMessage(self, message): + return enrich(message, self._configj) From 46c2979d8e999beba19658c78a7ce9b6b678a6f2 Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Mon, 29 Apr 2019 21:45:44 -0400 Subject: [PATCH 04/88] Implement some simple code to find IP addresses in strings using regular expressions --- alerts/plugins/ip_source_enrichment.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/alerts/plugins/ip_source_enrichment.py b/alerts/plugins/ip_source_enrichment.py index 2be311b5..8f1690be 100644 --- a/alerts/plugins/ip_source_enrichment.py +++ b/alerts/plugins/ip_source_enrichment.py @@ -26,7 +26,10 @@ def _isIPv6(ip): def _find_ip_addresses(string): '''List all of the IPv4 and IPv6 addresses found in a string.''' - return [] + ipv4_rx = '(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)' + ipv6_rx = '(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))' + + return re.findall(ipv4_rx, string) + re.findall(ipv6_rx, string) def enrich(self, alert, known_ips): From c22a1e6d45cea81c96634b3ca60dc6e6bceb1950 Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Tue, 30 Apr 2019 20:05:00 -0400 Subject: [PATCH 05/88] Wrote some unit tests for the ip_source_enrichment alert plugin --- .../plugins/test_ip_source_enrichment.py | 95 +++++++++++++++++++ 1 file changed, 95 insertions(+) create mode 100644 tests/alerts/plugins/test_ip_source_enrichment.py diff --git a/tests/alerts/plugins/test_ip_source_enrichment.py b/tests/alerts/plugins/test_ip_source_enrichment.py new file mode 100644 index 00000000..62de3705 --- /dev/null +++ b/tests/alerts/plugins/test_ip_source_enrichment.py @@ -0,0 +1,95 @@ +import json +import sys + + +plugin_path = os.path.join(os.path.dirname(__file__), '../../../alerts/plugins') +sys.path.append(plugin_path) + +from ip_source_enrichment import enrich + + +known_ips = [ + { + 'ipVersion': 4, + 'range': '255.0.1.0/8', + 'format': '{1} known', + }, + { + 'ipVersion': 6, + 'range': 'a02b:0db8:beef::/48', + 'format': '{1} known', + } +] + +alert_with_ipv4 = { + 'category': 'bro', + 'tags': ['portscan'], + 'summary': 'this is a test alert', + 'details': { + 'sourceipaddress': '255.0.1.2', + 'destinationipaddress': '192.168.0.1', + 'ports': [22, 9001, 25505, 65534] + } +} + +alert_with_ipv6 = { + 'category': 'bro', + 'tags': ['test'], + 'summary': 'Another test alert', + 'deails': { + 'sourceipaddress': 'a02b:0db8:beef:32cc:4122:0000', + 'destinationipaddress': 'abcd:beef:3232:9001:0000:1234', + 'port': [22, 9001, 24404, 65532] + } +} + +alert_with_ipv4_in_summary = { + 'category': 'test', + 'tags': ['ip', 'in', 'summary'], + 'summary': 'Testing:255.0.1.232 is a random IP in a poorly formatted string', + 'details': {} +} + +alert_with_ipv6_in_summary = { + 'category': 'test', + 'tags': ['ip', 'in', 'summary'], + 'summary': 'Found IPs ["a02b:0db8:beef:32cc:4122:0000"]', + 'details': {} +} + + +class TestIPSourceEnrichment(object): + def test_ipv4_addrs_enriched(self): + enriched = enrich(alert_with_ipv4, known_ips) + + assert '255.0.1.2 known' in enriched['summary'] + + + def test_ipv6_addrs_enriched(self): + enriched = enrich(alert_with_ipv6, known_ips) + + assert 'a02b:0db8:beef:32cc:4122:0000 known' in enriched['summary'] + + + def test_ipv4_addrs_in_summary_enriched(self): + enriched = enrich(alert_with_ipv4_in_summary, known_ips) + + assert '255.0.1.232 known' in enriched['summary'] + + + def test_ipv6_addrs_in_summary_enriched(self): + enriched = enrich(alert_with_ipv6_in_summary, known_ips) + + assert 'a02b:0db8:beef:32cc:4122:0000 known' in enriched['summary'] + + + def test_unrecognized_ipv4_addrs_not_enriched(self): + enriched = enrich(alert_with_ipv4, known_ips) + + assert '192.168.0.1 known' not in enriched['summary'] + + + def test_unrecognized_ipv6_addrs_not_enriched(self): + enriched = enrich(alert_with_ipv6, known_ips) + + assert 'abcd:beef:3232:9001:0000:1234 known' not in enriched['summary'] From 58d6da7d3112d6146bab710323acbc80569b1788 Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Tue, 30 Apr 2019 20:34:18 -0400 Subject: [PATCH 06/88] Fixed up syntax, removed self argument from enrich function --- alerts/plugins/ip_source_enrichment.py | 16 +++++++++++----- .../alerts/plugins/test_ip_source_enrichment.py | 9 +-------- 2 files changed, 12 insertions(+), 13 deletions(-) diff --git a/alerts/plugins/ip_source_enrichment.py b/alerts/plugins/ip_source_enrichment.py index 8f1690be..d89a72d6 100644 --- a/alerts/plugins/ip_source_enrichment.py +++ b/alerts/plugins/ip_source_enrichment.py @@ -3,10 +3,15 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2014 Mozilla Corporation +import os +import re + import netaddr -CONFIG_FILE = './ip_source_enrichment.json.conf' +CONFIG_FILE = os.path.join( + os.path.dirname(__file__), + 'ip_source_enrichment.json.conf') def _isIPv4(ip): @@ -32,12 +37,12 @@ def _find_ip_addresses(string): return re.findall(ipv4_rx, string) + re.findall(ipv6_rx, string) -def enrich(self, alert, known_ips): +def enrich(alert, known_ips): '''Add information to alerts containing IP addresses that describes the source location of the IP address if it can be determined based on a configured mapping. ''' - + return alert @@ -81,6 +86,7 @@ class message(object): def __init__(self): self._config = _load_config(CONFIG_FILE) - def onMessage(self, message): - return enrich(message, self._configj) + known_ips = self._config.get('known', []) + + return enrich(message, known_ips) diff --git a/tests/alerts/plugins/test_ip_source_enrichment.py b/tests/alerts/plugins/test_ip_source_enrichment.py index 62de3705..d850e887 100644 --- a/tests/alerts/plugins/test_ip_source_enrichment.py +++ b/tests/alerts/plugins/test_ip_source_enrichment.py @@ -1,13 +1,11 @@ -import json +import os import sys - plugin_path = os.path.join(os.path.dirname(__file__), '../../../alerts/plugins') sys.path.append(plugin_path) from ip_source_enrichment import enrich - known_ips = [ { 'ipVersion': 4, @@ -64,31 +62,26 @@ class TestIPSourceEnrichment(object): assert '255.0.1.2 known' in enriched['summary'] - def test_ipv6_addrs_enriched(self): enriched = enrich(alert_with_ipv6, known_ips) assert 'a02b:0db8:beef:32cc:4122:0000 known' in enriched['summary'] - def test_ipv4_addrs_in_summary_enriched(self): enriched = enrich(alert_with_ipv4_in_summary, known_ips) assert '255.0.1.232 known' in enriched['summary'] - def test_ipv6_addrs_in_summary_enriched(self): enriched = enrich(alert_with_ipv6_in_summary, known_ips) assert 'a02b:0db8:beef:32cc:4122:0000 known' in enriched['summary'] - def test_unrecognized_ipv4_addrs_not_enriched(self): enriched = enrich(alert_with_ipv4, known_ips) assert '192.168.0.1 known' not in enriched['summary'] - def test_unrecognized_ipv6_addrs_not_enriched(self): enriched = enrich(alert_with_ipv6, known_ips) From 3148479ffdb113ba194c167e2bab7ac85f03759b Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Mon, 6 May 2019 18:45:12 -0400 Subject: [PATCH 07/88] Fixup --- alerts/plugins/ip_source_enrichment.json.conf | 4 +- alerts/plugins/ip_source_enrichment.py | 51 ++++++++++++++----- .../plugins/test_ip_source_enrichment.py | 38 ++++++++------ 3 files changed, 61 insertions(+), 32 deletions(-) diff --git a/alerts/plugins/ip_source_enrichment.json.conf b/alerts/plugins/ip_source_enrichment.json.conf index 337c0caf..c0c57e8e 100644 --- a/alerts/plugins/ip_source_enrichment.json.conf +++ b/alerts/plugins/ip_source_enrichment.json.conf @@ -3,12 +3,12 @@ { "ipVersion": 4, "range": "8.32.0.0/16", - "format": "{1} is in OFFICE1." + "format": "{0} is in OFFICE1." }, { "ipVersion": 6, "range": "4a00:7a49:232::/48", - "format": "{1} is in OFFICE2." + "format": "{0} is in OFFICE2." } ] } diff --git a/alerts/plugins/ip_source_enrichment.py b/alerts/plugins/ip_source_enrichment.py index d89a72d6..1dbca0de 100644 --- a/alerts/plugins/ip_source_enrichment.py +++ b/alerts/plugins/ip_source_enrichment.py @@ -3,6 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2014 Mozilla Corporation +from operator import add import os import re @@ -14,20 +15,6 @@ CONFIG_FILE = os.path.join( 'ip_source_enrichment.json.conf') -def _isIPv4(ip): - try: - return netaddr.valid_ipv4(ip) - except: - return False - - -def _isIPv6(ip): - try: - return netaddr.valid_ipv6(ip) - except: - return False - - def _find_ip_addresses(string): '''List all of the IPv4 and IPv6 addresses found in a string.''' @@ -43,6 +30,42 @@ def enrich(alert, known_ips): on a configured mapping. ''' + def find_ips(value): + if isinstance(value, str): + return _find_ip_addresses(value) + + if isinstance(value, list) or isinstance(value, tuple): + found = [find_ips(item) for item in value] + return reduce(add, found, []) + + if isinstance(value, dict): + found = [find_ips(item) for item in value.values()] + return reduce(add, found, []) + + return [] + + ips = find_ips(alert) + + alert = alert.copy() + + for ip in set(ips): + if netaddr.valid_ipv6(ip): + ip = ip[0] + + ip_address = netaddr.IPAddress(ip) + + if isinstance(ip_address, tuple): + ip_address = netaddr.IPAddress(ip_address[0]) + + matching_descriptions = filter( + lambda known: ip_address in netaddr.IPSet([known['range']]), + known_ips) + + for desc in matching_descriptions: + enriched = desc['format'].format(ip) + + alert['summary'] += '; ' + enriched + return alert diff --git a/tests/alerts/plugins/test_ip_source_enrichment.py b/tests/alerts/plugins/test_ip_source_enrichment.py index d850e887..2c6176ab 100644 --- a/tests/alerts/plugins/test_ip_source_enrichment.py +++ b/tests/alerts/plugins/test_ip_source_enrichment.py @@ -6,16 +6,22 @@ sys.path.append(plugin_path) from ip_source_enrichment import enrich + +good_ipv4 = '255.0.1.2' +good_ipv6 = '3001:4d9c:b29:12f0::1' +bad_ipv4 = '192.168.0.1' +bad_ipv6 = '2001:db8:a0b:12f0::1' + known_ips = [ { 'ipVersion': 4, - 'range': '255.0.1.0/8', - 'format': '{1} known', + 'range': good_ipv4 + '/8', + 'format': '{0} known', }, { 'ipVersion': 6, - 'range': 'a02b:0db8:beef::/48', - 'format': '{1} known', + 'range': good_ipv6 + '/64', + 'format': '{0} known', } ] @@ -24,8 +30,8 @@ alert_with_ipv4 = { 'tags': ['portscan'], 'summary': 'this is a test alert', 'details': { - 'sourceipaddress': '255.0.1.2', - 'destinationipaddress': '192.168.0.1', + 'sourceipaddress': good_ipv4, + 'destinationipaddress': bad_ipv4, 'ports': [22, 9001, 25505, 65534] } } @@ -35,8 +41,8 @@ alert_with_ipv6 = { 'tags': ['test'], 'summary': 'Another test alert', 'deails': { - 'sourceipaddress': 'a02b:0db8:beef:32cc:4122:0000', - 'destinationipaddress': 'abcd:beef:3232:9001:0000:1234', + 'sourceipaddress': good_ipv6, + 'destinationipaddress': bad_ipv6, 'port': [22, 9001, 24404, 65532] } } @@ -44,14 +50,14 @@ alert_with_ipv6 = { alert_with_ipv4_in_summary = { 'category': 'test', 'tags': ['ip', 'in', 'summary'], - 'summary': 'Testing:255.0.1.232 is a random IP in a poorly formatted string', + 'summary': 'Testing:{0} is a random IP in a poorly formatted string'.format(good_ipv4), 'details': {} } alert_with_ipv6_in_summary = { 'category': 'test', 'tags': ['ip', 'in', 'summary'], - 'summary': 'Found IPs ["a02b:0db8:beef:32cc:4122:0000"]', + 'summary': 'Found IPs ["{0}"]'.format(good_ipv6), 'details': {} } @@ -60,29 +66,29 @@ class TestIPSourceEnrichment(object): def test_ipv4_addrs_enriched(self): enriched = enrich(alert_with_ipv4, known_ips) - assert '255.0.1.2 known' in enriched['summary'] + assert '{0} known'.format(good_ipv4) in enriched['summary'] def test_ipv6_addrs_enriched(self): enriched = enrich(alert_with_ipv6, known_ips) - assert 'a02b:0db8:beef:32cc:4122:0000 known' in enriched['summary'] + assert '{0} known'.format(good_ipv6) in enriched['summary'] def test_ipv4_addrs_in_summary_enriched(self): enriched = enrich(alert_with_ipv4_in_summary, known_ips) - assert '255.0.1.232 known' in enriched['summary'] + assert '{0} known'.format(good_ipv4) in enriched['summary'] def test_ipv6_addrs_in_summary_enriched(self): enriched = enrich(alert_with_ipv6_in_summary, known_ips) - assert 'a02b:0db8:beef:32cc:4122:0000 known' in enriched['summary'] + assert '{0} known'.format(good_ipv6) in enriched['summary'] def test_unrecognized_ipv4_addrs_not_enriched(self): enriched = enrich(alert_with_ipv4, known_ips) - assert '192.168.0.1 known' not in enriched['summary'] + assert '{0} known'.format(bad_ipv4) not in enriched['summary'] def test_unrecognized_ipv6_addrs_not_enriched(self): enriched = enrich(alert_with_ipv6, known_ips) - assert 'abcd:beef:3232:9001:0000:1234 known' not in enriched['summary'] + assert '{0} known'.format(bad_ipv6) not in enriched['summary'] From 656bf199108067b706cb302057e4f3f80076a463 Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Wed, 8 May 2019 12:55:52 -0400 Subject: [PATCH 08/88] Use IPs that are easier to test against --- tests/alerts/plugins/test_ip_source_enrichment.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/alerts/plugins/test_ip_source_enrichment.py b/tests/alerts/plugins/test_ip_source_enrichment.py index 2c6176ab..133a2a7f 100644 --- a/tests/alerts/plugins/test_ip_source_enrichment.py +++ b/tests/alerts/plugins/test_ip_source_enrichment.py @@ -8,9 +8,9 @@ from ip_source_enrichment import enrich good_ipv4 = '255.0.1.2' -good_ipv6 = '3001:4d9c:b29:12f0::1' +good_ipv6 = '3001:4d9c:b29:12f0::' bad_ipv4 = '192.168.0.1' -bad_ipv6 = '2001:db8:a0b:12f0::1' +bad_ipv6 = '2001:db8:a0b:12f0::' known_ips = [ { From 1d95a8f25edaeee092a84a6e912439911333dae7 Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Wed, 8 May 2019 12:56:17 -0400 Subject: [PATCH 09/88] IPv6 regex returns a tuple, so we need to parse the first item out --- alerts/plugins/ip_source_enrichment.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/alerts/plugins/ip_source_enrichment.py b/alerts/plugins/ip_source_enrichment.py index 1dbca0de..1dcdb806 100644 --- a/alerts/plugins/ip_source_enrichment.py +++ b/alerts/plugins/ip_source_enrichment.py @@ -21,7 +21,12 @@ def _find_ip_addresses(string): ipv4_rx = '(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)' ipv6_rx = '(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))' - return re.findall(ipv4_rx, string) + re.findall(ipv6_rx, string) + ipv4 = re.findall(ipv4_rx, string) + ipv6 = map( + lambda match: match[0] if isinstance(match, tuple) else match, + re.findall(ipv6_rx, string)) + + return ipv4 + ipv6 def enrich(alert, known_ips): @@ -49,14 +54,8 @@ def enrich(alert, known_ips): alert = alert.copy() for ip in set(ips): - if netaddr.valid_ipv6(ip): - ip = ip[0] - ip_address = netaddr.IPAddress(ip) - if isinstance(ip_address, tuple): - ip_address = netaddr.IPAddress(ip_address[0]) - matching_descriptions = filter( lambda known: ip_address in netaddr.IPSet([known['range']]), known_ips) From 5a6cc454cb62930df284a5026b9c8918742a87d5 Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Mon, 13 May 2019 12:35:53 -0400 Subject: [PATCH 10/88] Fixed syntax --- alerts/plugins/ip_source_enrichment.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/alerts/plugins/ip_source_enrichment.py b/alerts/plugins/ip_source_enrichment.py index 1dcdb806..35ed22c2 100644 --- a/alerts/plugins/ip_source_enrichment.py +++ b/alerts/plugins/ip_source_enrichment.py @@ -23,8 +23,8 @@ def _find_ip_addresses(string): ipv4 = re.findall(ipv4_rx, string) ipv6 = map( - lambda match: match[0] if isinstance(match, tuple) else match, - re.findall(ipv6_rx, string)) + lambda match: match[0] if isinstance(match, tuple) else match, + re.findall(ipv6_rx, string)) return ipv4 + ipv6 From e2e5978ea85d9351d811f0834cc3111741604892 Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Mon, 13 May 2019 17:18:05 -0400 Subject: [PATCH 11/88] Follow standard for json config file naming --- ...ip_source_enrichment.json.conf => ip_source_enrichment.json} | 0 alerts/plugins/ip_source_enrichment.py | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename alerts/plugins/{ip_source_enrichment.json.conf => ip_source_enrichment.json} (100%) diff --git a/alerts/plugins/ip_source_enrichment.json.conf b/alerts/plugins/ip_source_enrichment.json similarity index 100% rename from alerts/plugins/ip_source_enrichment.json.conf rename to alerts/plugins/ip_source_enrichment.json diff --git a/alerts/plugins/ip_source_enrichment.py b/alerts/plugins/ip_source_enrichment.py index 35ed22c2..c043e0ca 100644 --- a/alerts/plugins/ip_source_enrichment.py +++ b/alerts/plugins/ip_source_enrichment.py @@ -12,7 +12,7 @@ import netaddr CONFIG_FILE = os.path.join( os.path.dirname(__file__), - 'ip_source_enrichment.json.conf') + 'ip_source_enrichment.json') def _find_ip_addresses(string): From 27f80e447750b4eccb205d3b46106172cf13e38e Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Mon, 13 May 2019 17:19:57 -0400 Subject: [PATCH 12/88] removed unused ipVersion from config; add 'site' field that will be made distinct entry in alert details --- alerts/plugins/ip_source_enrichment.json | 4 ++-- alerts/plugins/ip_source_enrichment.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/alerts/plugins/ip_source_enrichment.json b/alerts/plugins/ip_source_enrichment.json index c0c57e8e..3d6e2ce5 100644 --- a/alerts/plugins/ip_source_enrichment.json +++ b/alerts/plugins/ip_source_enrichment.json @@ -1,13 +1,13 @@ { "known": [ { - "ipVersion": 4, "range": "8.32.0.0/16", + "site": "OFFICE1", "format": "{0} is in OFFICE1." }, { - "ipVersion": 6, "range": "4a00:7a49:232::/48", + "site": "OFFICE2", "format": "{0} is in OFFICE2." } ] diff --git a/alerts/plugins/ip_source_enrichment.py b/alerts/plugins/ip_source_enrichment.py index c043e0ca..f0833344 100644 --- a/alerts/plugins/ip_source_enrichment.py +++ b/alerts/plugins/ip_source_enrichment.py @@ -91,13 +91,13 @@ class message(object): { "known": [ { - "ipVersion": 4, "range": "1.2.3.4/8", + "site": "office1", "format": "IPv4 {1} is known" }, { - "ipVersion": 6, "range": "1a2b:3c4d:123::/48", + "site": "office2", "format": "IPv6 {1} is known" } ] From b6f48f50a6925e15c17523756caa28ebe546c09e Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Mon, 13 May 2019 17:26:50 -0400 Subject: [PATCH 13/88] Add the name of the office/vpn/whatever to details.site --- alerts/plugins/ip_source_enrichment.py | 1 + tests/alerts/plugins/test_ip_source_enrichment.py | 10 +++++++--- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/alerts/plugins/ip_source_enrichment.py b/alerts/plugins/ip_source_enrichment.py index f0833344..33260c3b 100644 --- a/alerts/plugins/ip_source_enrichment.py +++ b/alerts/plugins/ip_source_enrichment.py @@ -63,6 +63,7 @@ def enrich(alert, known_ips): for desc in matching_descriptions: enriched = desc['format'].format(ip) + alert['details']['site'] = desc['site'] alert['summary'] += '; ' + enriched return alert diff --git a/tests/alerts/plugins/test_ip_source_enrichment.py b/tests/alerts/plugins/test_ip_source_enrichment.py index 133a2a7f..b70d78a5 100644 --- a/tests/alerts/plugins/test_ip_source_enrichment.py +++ b/tests/alerts/plugins/test_ip_source_enrichment.py @@ -14,13 +14,13 @@ bad_ipv6 = '2001:db8:a0b:12f0::' known_ips = [ { - 'ipVersion': 4, 'range': good_ipv4 + '/8', + 'site': 'office1', 'format': '{0} known', }, { - 'ipVersion': 6, 'range': good_ipv6 + '/64', + 'site': 'office2', 'format': '{0} known', } ] @@ -40,7 +40,7 @@ alert_with_ipv6 = { 'category': 'bro', 'tags': ['test'], 'summary': 'Another test alert', - 'deails': { + 'details': { 'sourceipaddress': good_ipv6, 'destinationipaddress': bad_ipv6, 'port': [22, 9001, 24404, 65532] @@ -67,21 +67,25 @@ class TestIPSourceEnrichment(object): enriched = enrich(alert_with_ipv4, known_ips) assert '{0} known'.format(good_ipv4) in enriched['summary'] + assert enriched['details']['site'] == 'office1' def test_ipv6_addrs_enriched(self): enriched = enrich(alert_with_ipv6, known_ips) assert '{0} known'.format(good_ipv6) in enriched['summary'] + assert enriched['details']['site'] == 'office2' def test_ipv4_addrs_in_summary_enriched(self): enriched = enrich(alert_with_ipv4_in_summary, known_ips) assert '{0} known'.format(good_ipv4) in enriched['summary'] + assert enriched['details']['site'] == 'office1' def test_ipv6_addrs_in_summary_enriched(self): enriched = enrich(alert_with_ipv6_in_summary, known_ips) assert '{0} known'.format(good_ipv6) in enriched['summary'] + assert enriched['details']['site'] == 'office2' def test_unrecognized_ipv4_addrs_not_enriched(self): enriched = enrich(alert_with_ipv4, known_ips) From 3fb2c046ee33c621054ed3d24b9f47b00827e749 Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Tue, 14 May 2019 19:25:28 -0400 Subject: [PATCH 14/88] Make 'site' a parameter to format --- alerts/plugins/ip_source_enrichment.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/alerts/plugins/ip_source_enrichment.py b/alerts/plugins/ip_source_enrichment.py index 33260c3b..bc4d7280 100644 --- a/alerts/plugins/ip_source_enrichment.py +++ b/alerts/plugins/ip_source_enrichment.py @@ -61,7 +61,7 @@ def enrich(alert, known_ips): known_ips) for desc in matching_descriptions: - enriched = desc['format'].format(ip) + enriched = desc['format'].format(ip, desc['site']) alert['details']['site'] = desc['site'] alert['summary'] += '; ' + enriched @@ -83,7 +83,8 @@ class message(object): This plugin will look for IP addresses in any of the values of an alert dictionary. For each IP address found, it will append some text to the summary of the alert to provide more information - about where the IP originates from if it is recognized. + about where the IP originates from if it is recognized. It will + also add a `details.site` value containing the value of `site`. The expected format of the configuration file, `ip_source_enrichment.json.conf`, is as follows: @@ -94,16 +95,20 @@ class message(object): { "range": "1.2.3.4/8", "site": "office1", - "format": "IPv4 {1} is known" + "format": "IPv4 {0} is from {1}" }, { "range": "1a2b:3c4d:123::/48", "site": "office2", - "format": "IPv6 {1} is known" + "format": "IPv6 {0} is from {1}" } ] } ``` + + The format string can accept zero to two parameters. The first + will be the IP address found and the second will be the + value of the corresponding 'site'. ''' def __init__(self): From 91d7fe21e39ff1dc3e19181c56c0dc61de0ba193 Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Thu, 16 May 2019 13:45:18 -0400 Subject: [PATCH 15/88] Document and test for a more detailed format for listing sites --- alerts/plugins/ip_source_enrichment.py | 23 +++++++++++++++++-- .../plugins/test_ip_source_enrichment.py | 12 ++++++---- 2 files changed, 29 insertions(+), 6 deletions(-) diff --git a/alerts/plugins/ip_source_enrichment.py b/alerts/plugins/ip_source_enrichment.py index bc4d7280..ae0b9412 100644 --- a/alerts/plugins/ip_source_enrichment.py +++ b/alerts/plugins/ip_source_enrichment.py @@ -83,8 +83,7 @@ class message(object): This plugin will look for IP addresses in any of the values of an alert dictionary. For each IP address found, it will append some text to the summary of the alert to provide more information - about where the IP originates from if it is recognized. It will - also add a `details.site` value containing the value of `site`. + about where the IP originates from if it is recognized. The expected format of the configuration file, `ip_source_enrichment.json.conf`, is as follows: @@ -109,6 +108,26 @@ class message(object): The format string can accept zero to two parameters. The first will be the IP address found and the second will be the value of the corresponding 'site'. + + The modified alert will have a `details.sites` field added to it, + with the following form: + + ```json + { + "details": { + "sites": [ + { + "ip": "1.2.3.4", + "site": "office1" + }, + { + "ip": "1a2b:3c4d:123::", + "site": "office2" + } + ] + } + } + ``` ''' def __init__(self): diff --git a/tests/alerts/plugins/test_ip_source_enrichment.py b/tests/alerts/plugins/test_ip_source_enrichment.py index b70d78a5..20ca2a57 100644 --- a/tests/alerts/plugins/test_ip_source_enrichment.py +++ b/tests/alerts/plugins/test_ip_source_enrichment.py @@ -67,25 +67,29 @@ class TestIPSourceEnrichment(object): enriched = enrich(alert_with_ipv4, known_ips) assert '{0} known'.format(good_ipv4) in enriched['summary'] - assert enriched['details']['site'] == 'office1' + assert len(enriched['details']['sites']) == 1 + assert enriched['details']['sites'][0]['site'] == 'office1' def test_ipv6_addrs_enriched(self): enriched = enrich(alert_with_ipv6, known_ips) assert '{0} known'.format(good_ipv6) in enriched['summary'] - assert enriched['details']['site'] == 'office2' + assert len(enriched['details']['sites']) == 1 + assert enriched['details']['sites'][0]['site'] == 'office2' def test_ipv4_addrs_in_summary_enriched(self): enriched = enrich(alert_with_ipv4_in_summary, known_ips) assert '{0} known'.format(good_ipv4) in enriched['summary'] - assert enriched['details']['site'] == 'office1' + assert len(enriched['details']['sites']) == 1 + assert enriched['details']['sites'][0]['site'] == 'office1' def test_ipv6_addrs_in_summary_enriched(self): enriched = enrich(alert_with_ipv6_in_summary, known_ips) assert '{0} known'.format(good_ipv6) in enriched['summary'] - assert enriched['details']['site'] == 'office2' + assert len(enriched['details']['sites']) == 1 + assert enriched['details']['sites'][0]['site'] == 'office2' def test_unrecognized_ipv4_addrs_not_enriched(self): enriched = enrich(alert_with_ipv4, known_ips) From c4ac61f24d6d9ab654037830ba7219ac75b0b6f5 Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Thu, 16 May 2019 13:54:18 -0400 Subject: [PATCH 16/88] Satisfy tests --- alerts/plugins/ip_source_enrichment.py | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/alerts/plugins/ip_source_enrichment.py b/alerts/plugins/ip_source_enrichment.py index ae0b9412..fa361a52 100644 --- a/alerts/plugins/ip_source_enrichment.py +++ b/alerts/plugins/ip_source_enrichment.py @@ -49,23 +49,30 @@ def enrich(alert, known_ips): return [] + + def ip_in_range(ip): + return lambda known: netaddr.IPAddress(ip) in netaddr.IPSet([known['range']]) + + ips = find_ips(alert) alert = alert.copy() + + alert['details']['sites'] = [] for ip in set(ips): - ip_address = netaddr.IPAddress(ip) - - matching_descriptions = filter( - lambda known: ip_address in netaddr.IPSet([known['range']]), - known_ips) + matching_descriptions = filter(ip_in_range(ip), known_ips) for desc in matching_descriptions: enriched = desc['format'].format(ip, desc['site']) - - alert['details']['site'] = desc['site'] + alert['summary'] += '; ' + enriched + alert['details']['sites'].append({ + 'ip': ip, + 'site': desc['site'], + }) + return alert From 5417830513c54f68629ff4b0f1b2c57963724e2c Mon Sep 17 00:00:00 2001 From: Gene Wood Date: Fri, 17 May 2019 13:17:11 -0700 Subject: [PATCH 17/88] Remove reference to OIDCDiscoveryURL --- cloudy_mozdef/cloudformation/mozdef-instance.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/cloudy_mozdef/cloudformation/mozdef-instance.yml b/cloudy_mozdef/cloudformation/mozdef-instance.yml index 293729d2..0d64bdab 100644 --- a/cloudy_mozdef/cloudformation/mozdef-instance.yml +++ b/cloudy_mozdef/cloudformation/mozdef-instance.yml @@ -166,7 +166,6 @@ Resources: # Future support will be added for cognito backed authentication. client_id=${OIDCClientId} client_secret=${OIDCClientSecret} - discovery_url=${OIDCDiscoveryURL} backend=http://meteor:3000 redirect_uri_path=/redirect_uri httpsredir=no @@ -533,4 +532,4 @@ Outputs: LoadBalancerDNSName: Description: The DNS name of the ALB hosting MozDef. If using OIDC or SSL point your DNS at this. Value: - Fn::GetAtt: [ MozDefElasticLoadBalancingV2LoadBalancer, DNSName ] \ No newline at end of file + Fn::GetAtt: [ MozDefElasticLoadBalancingV2LoadBalancer, DNSName ] From 3c52992250a68d213319d7ec2983162227e5d61f Mon Sep 17 00:00:00 2001 From: Andrew Krug Date: Mon, 20 May 2019 10:17:52 -0700 Subject: [PATCH 18/88] make ci build on reinforce branch as well temporarily --- cloudy_mozdef/ci/deploy | 1 + cloudy_mozdef/experiments/vpcflows-pipeline.yml | 0 2 files changed, 1 insertion(+) delete mode 100644 cloudy_mozdef/experiments/vpcflows-pipeline.yml diff --git a/cloudy_mozdef/ci/deploy b/cloudy_mozdef/ci/deploy index 3840206e..dfb5982c 100644 --- a/cloudy_mozdef/ci/deploy +++ b/cloudy_mozdef/ci/deploy @@ -25,6 +25,7 @@ echo " Head Ref : ${CODEBUILD_WEBHOOK_HEAD_REF}" echo " Trigger : ${CODEBUILD_WEBHOOK_TRIGGER}" if [[ "branch/master" == "${CODEBUILD_WEBHOOK_TRIGGER}" \ + || "branch/reinforce2019" == "${CODEBUILD_WEBHOOK_TRIGGER}" \ || "${CODEBUILD_WEBHOOK_TRIGGER}" =~ ^tag\/v[0-9]+\.[0-9]+\.[0-9]+(\-(prod|pre|testing))?$ ]]; then echo "Codebuild is ubuntu 14.04. Installing packer in order to compensate. Someone should build a CI docker container \;)." wget -nv https://releases.hashicorp.com/packer/1.3.5/packer_1.3.5_linux_amd64.zip diff --git a/cloudy_mozdef/experiments/vpcflows-pipeline.yml b/cloudy_mozdef/experiments/vpcflows-pipeline.yml deleted file mode 100644 index e69de29b..00000000 From f8910079221d4ce8434779609c9c82490cfdec44 Mon Sep 17 00:00:00 2001 From: Andrew Krug Date: Mon, 20 May 2019 11:15:57 -0700 Subject: [PATCH 19/88] add cloudformation to create alert writers environment --- cloudy_mozdef/experiments/alert-writer.yml | 63 ++++++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 cloudy_mozdef/experiments/alert-writer.yml diff --git a/cloudy_mozdef/experiments/alert-writer.yml b/cloudy_mozdef/experiments/alert-writer.yml new file mode 100644 index 00000000..51324952 --- /dev/null +++ b/cloudy_mozdef/experiments/alert-writer.yml @@ -0,0 +1,63 @@ +AWSTemplateFormatVersion: 2010-09-09 +Description: Setup an alert writers environment for use with MozDef for AWS. Note this is PoC only. +Parameters: + VpcId: + Type: AWS::EC2::VPC::Id + Description: 'The VPC ID of the VPC to deploy in (Example : vpc-abcdef12)' + PublicSubnetIds: + Type: List + Description: 'A comma delimited list of public subnet IDs (Example: subnet-abcdef12,subnet-bcdef123)' + MozDefSecurityGroup: + Type: AWS::EC2::SecurityGroup::Id + Description: The security group the MozDef instance runs in. This is needed to access ES. + ESUrl: + Type: String + Description: 'The location of elasticsearch deployed in managed-es.' +Resources: + MozDefLayer: + Type: AWS::Lambda::LayerVersion + Properties: + LayerName: MozDef + Description: Mozilla Enterprise Defense Platform Dependencies + Content: + S3Bucket: public.us-west-2.security.allizom.org + S3Key: mozdef-lambda-layer/layer-latest.zip + CompatibleRuntimes: + - python2.7 + LicenseInfo: 'MPL 2.0' + LambdalertIAMRole: + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Principal: + Service: lambda.amazonaws.com + Action: sts:AssumeRole + ManagedPolicyArns: + - arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole + AlertWritersEnv: + Type: "AWS::Lambda::Function" + Properties: + Handler: "lambdalert.handle" + Role: + Fn::GetAtt: + - "LambdalertIAMRole" + - "Arn" + Code: + S3Bucket: public.us-west-2.security.allizom.org + S3Key: mozdef-lambda-layer/function-latest.zip + Layers: + - !Ref MozDefLayer + Environment: + Variables: + OPTIONS_ESSERVERS: !Ref ESUrl + OPTIONS_MQPROTOCOL: sqs + VpcConfig: + SecurityGroupIds: + - !Ref MozDefSecurityGroup + SubnetIds: !Ref PublicSubnetIds + ReservedConcurrentExecutions: 1 + Runtime: "python2.7" + Timeout: 120 From 1c2b76b239ae90b2dd9398d2b13c6493cddd83b8 Mon Sep 17 00:00:00 2001 From: Phrozyn Date: Mon, 20 May 2019 13:27:02 -0500 Subject: [PATCH 20/88] removes dashboard version as this breaks kibana due to integer type being passed --- .../mozdef_bootstrap/files/dashboards/all_events_area.json | 1 - .../mozdef_bootstrap/files/dashboards/category_pie_graph.json | 1 - .../files/dashboards/destinationip_bar_graph.json | 1 - .../mozdef_bootstrap/files/dashboards/sourceip_bar_graph.json | 1 - 4 files changed, 4 deletions(-) diff --git a/docker/compose/mozdef_bootstrap/files/dashboards/all_events_area.json b/docker/compose/mozdef_bootstrap/files/dashboards/all_events_area.json index 7dbe3f11..3c84a2e0 100644 --- a/docker/compose/mozdef_bootstrap/files/dashboards/all_events_area.json +++ b/docker/compose/mozdef_bootstrap/files/dashboards/all_events_area.json @@ -9,7 +9,6 @@ "visState": "{\"title\":\"All Events Area\",\"type\":\"area\",\"params\":{\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{\"text\":\"utctimestamp per second\"}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"area\",\"mode\":\"stacked\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"drawLinesBetweenPoints\":true,\"showCircles\":true,\"interpolate\":\"linear\",\"valueAxis\":\"ValueAxis-1\"}],\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"times\":[],\"addTimeMarker\":false,\"type\":\"area\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"utctimestamp\",\"interval\":\"s\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}}],\"listeners\":{}}", "uiStateJSON": "{}", "description": "", - "version": 1, "kibanaSavedObjectMeta": { "searchSourceJSON": "{\"index\":\"events-weekly\",\"query\":{\"match_all\":{}},\"filter\":[]}" } diff --git a/docker/compose/mozdef_bootstrap/files/dashboards/category_pie_graph.json b/docker/compose/mozdef_bootstrap/files/dashboards/category_pie_graph.json index c2e6aa5a..4b6ce338 100644 --- a/docker/compose/mozdef_bootstrap/files/dashboards/category_pie_graph.json +++ b/docker/compose/mozdef_bootstrap/files/dashboards/category_pie_graph.json @@ -9,7 +9,6 @@ "visState": "{\"aggs\":[{\"enabled\":true,\"id\":\"1\",\"params\":{},\"schema\":\"metric\",\"type\":\"count\"},{\"enabled\":true,\"id\":\"2\",\"params\":{\"field\":\"category\",\"order\":\"desc\",\"orderBy\":\"1\",\"size\":5},\"schema\":\"segment\",\"type\":\"terms\"}],\"listeners\":{},\"params\":{\"addLegend\":true,\"addTooltip\":true,\"isDonut\":false,\"legendPosition\":\"right\",\"type\":\"pie\"},\"title\":\"Category Pie Graph\",\"type\":\"pie\"}", "uiStateJSON": "{}", "description": "", - "version": 1, "kibanaSavedObjectMeta": { "searchSourceJSON": "{\"index\":\"events-weekly\",\"query\":{\"match_all\":{}},\"filter\":[]}" } diff --git a/docker/compose/mozdef_bootstrap/files/dashboards/destinationip_bar_graph.json b/docker/compose/mozdef_bootstrap/files/dashboards/destinationip_bar_graph.json index cd6b4221..8fc5d7ed 100644 --- a/docker/compose/mozdef_bootstrap/files/dashboards/destinationip_bar_graph.json +++ b/docker/compose/mozdef_bootstrap/files/dashboards/destinationip_bar_graph.json @@ -9,7 +9,6 @@ "visState": "{\"title\":\"DestinationIP Bar Graph\",\"type\":\"histogram\",\"params\":{\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{\"text\":\"details.destinationipaddress: Descending\"}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"histogram\",\"mode\":\"stacked\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"valueAxis\":\"ValueAxis-1\",\"drawLinesBetweenPoints\":true,\"showCircles\":true}],\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"times\":[],\"addTimeMarker\":false,\"type\":\"histogram\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"details.destinationipaddress\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}", "uiStateJSON": "{}", "description": "", - "version": 1, "kibanaSavedObjectMeta": { "searchSourceJSON": "{\"index\":\"events-weekly\",\"query\":{\"match_all\":{}},\"filter\":[]}" } diff --git a/docker/compose/mozdef_bootstrap/files/dashboards/sourceip_bar_graph.json b/docker/compose/mozdef_bootstrap/files/dashboards/sourceip_bar_graph.json index 9f729265..3e8277f9 100644 --- a/docker/compose/mozdef_bootstrap/files/dashboards/sourceip_bar_graph.json +++ b/docker/compose/mozdef_bootstrap/files/dashboards/sourceip_bar_graph.json @@ -9,7 +9,6 @@ "visState": "{\"title\":\"SourceIP Bar Graph\",\"type\":\"histogram\",\"params\":{\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{\"text\":\"details.sourceipaddress: Descending\"}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"histogram\",\"mode\":\"stacked\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"valueAxis\":\"ValueAxis-1\",\"drawLinesBetweenPoints\":true,\"showCircles\":true}],\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"times\":[],\"addTimeMarker\":false,\"type\":\"histogram\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"details.sourceipaddress\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}", "uiStateJSON": "{}", "description": "", - "version": 1, "kibanaSavedObjectMeta": { "searchSourceJSON": "{\"index\":\"events-weekly\",\"query\":{\"match_all\":{}},\"filter\":[]}" } From 9207a18dca0d8b35678f6aae42bedf9b2080aa29 Mon Sep 17 00:00:00 2001 From: Gene Wood Date: Mon, 20 May 2019 17:02:16 -0700 Subject: [PATCH 21/88] Initial VPC Flow log template --- .../cloudformation/mozdef-vpc-flow-logs.yml | 113 ++++++++++++++++++ 1 file changed, 113 insertions(+) create mode 100644 cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml diff --git a/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml b/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml new file mode 100644 index 00000000..529c10c9 --- /dev/null +++ b/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml @@ -0,0 +1,113 @@ +AWSTemplateFormatVersion: 2010-09-09 +Description: Pipeline to send VPC Flow Logs to MozDef +Parameters: + VpcId: + Type: AWS::EC2::VPC::Id + Default: vpc-dc8eacb4 + Description: 'The VPC ID of the VPC to deploy in (Example : vpc-abcdef12)' +Resources: + MozDefVPCFlowLogsSQSQueue: + Type: AWS::SQS::Queue + Properties: + Tags: + - Key: application + Value: mozdef + - Key: stack + Value: !Ref AWS::StackName + LogGroup: + Type: AWS::Logs::LogGroup + Properties: + RetentionInDays: 1 + FlowLogRole: + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Principal: + Service: vpc-flow-logs.amazonaws.com + Action: sts:AssumeRole + Policies: + - PolicyName: AllowWriteCloudWatchLogs + PolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Action: + - logs:CreateLogGroup + - logs:CreateLogStream + - logs:PutLogEvents + - logs:DescribeLogGroups + - logs:DescribeLogStreams + Resource: "*" + FlowLog: + Type: AWS::EC2::FlowLog + Properties: + DeliverLogsPermissionArn: !GetAtt FlowLogRole.Arn + LogDestination: !GetAtt LogGroup.Arn + ResourceId: !Ref VpcId + ResourceType: VPC + TrafficType: ALL + FlowLogProcessorRole: + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Principal: + Service: lambda.amazonaws.com + Action: sts:AssumeRole + ManagedPolicyArns: + - arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole + Policies: + - PolicyName: AllowSendToSQS + PolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Action: + - sqs:DeleteMessage + - sqs:DeleteMessageBatch + - sqs:GetQueueAttributes + - sqs:GetQueueUrl + - sqs:SendMessage + - sqs:SendMessageBatch + Resource: !GetAtt MozDefVPCFlowLogsSQSQueue.Arn + FlowLogProcessor: + Type: AWS::Lambda::Function + Properties: + Code: + ZipFile: | + import os + def lambda_handler(event, context): + print(os.getenv('SQS_ARN')) + Description: Transform VPC Flow logs into MozDef events + Environment: + Variables: + SQS_ARN: !GetAtt MozDefVPCFlowLogsSQSQueue.Arn + Handler: index.lambda_handler + MemorySize: 128 + Role: !GetAtt FlowLogProcessorRole.Arn + Runtime: python3.7 + Tags: + - Key: application + Value: mozdef + - Key: stack + Value: !Ref AWS::StackName + Timeout: 30 + FlowLogSubscriptionFilter: + Type: AWS::Logs::SubscriptionFilter + Properties: + DestinationArn: !GetAtt FlowLogProcessor.Arn + FilterPattern: "" + LogGroupName: String + RoleArn: String +Outputs: + MozDefVPCFlowLogsSQSQueueArn: + Description: ARN of the MozDef VPC Flow Logs SQS Queue + Value: !GetAtt MozDefVPCFlowLogsSQSQueue.Arn + MozDefVPCFlowLogsSQSQueueName: + Description: Name of the MozDef VPC Flow Logs SQS Queue + Value: !GetAtt MozDefVPCFlowLogsSQSQueue.QueueName From 0c46872d246005206f5473701e07ac8f2a91acdb Mon Sep 17 00:00:00 2001 From: A Smith Date: Tue, 21 May 2019 12:32:15 -0500 Subject: [PATCH 22/88] Adding mozdef logo to doc source image folder --- docs/source/images/moz_defense-platform_01.png | Bin 0 -> 18973 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 docs/source/images/moz_defense-platform_01.png diff --git a/docs/source/images/moz_defense-platform_01.png b/docs/source/images/moz_defense-platform_01.png new file mode 100644 index 0000000000000000000000000000000000000000..25c52c9134cd5cb99a9279923e5b238c8fff1857 GIT binary patch literal 18973 zcmeIZXH-+$*Dt&&2h>Bc14_|jMd`g4Q4s`05Rr~RKuSPL2rUFe;V4K@kzPeWK{_PV z1Vlgx5~KwPArv8ykc1)x2<1lo-}}Db?|AO><=Ko8_E>wbx#pZ}mfu=y?&s!ahWv+4 z9RdJ=-{{tL3jpAa0sxNB2YA>m0y9II?0+IbHy#98`nd-|9|nK`eK$WB&>17}Ll2Mz z=%HJve>X@A0E{q3*Y)p(j4u&)Aq39;Bb?p)x5r+2P#O*N1_N)I6_CS|m@!36GkZ zT}V?IbetcM^Wtn}_e|(m4Gs@&61zyhB(Ri2@`cATVp@ROiq!puMR{mtJo;@}aI8$&#*ig;~ z#UB*fvF_g~x7Ul?7}bPYp=Y99H-mw5<+sU*o$zL%$VOrx@t8>ZX4I%Y z2VMv`&cM6|E)KVG0aJUy0}2svsGYsSa>+I;`n_bI%>pAEGd|LawtdP_OhYlhybVJf zc@Vodxy4E&S93orlnD|+j7wz#H^jKcP_+XE$8*il{^9yQtclrDQ zZcATvZJC7!Kwb!EtDKm7B>*U?3X*uy6*19_qpN3c*mnlOnp#Tt5(uVPSZMvxQA6zV zY}@6_Mn31>Ykh)R*8M$6@-03~dwXX51(5Ir3UC2H1nLt~STBL=sp{+Zs*-J_;#F$i z`2NEfSQxuMQ)6q6wsA#E>^$Kqqph~9HzrSn_RWA*nCrR)Te~HaZAFWD6U%ZKy37Os zw_yJ!7_psZJHnOFR|3k$ybk*5R#i+ZsqYXmk9(mfJAaTyYK$J|0(<90sZ0oWi3sqT zR6Kikb5lGEY#Eus4bfGdU|JvnbB)`2?Li@aVt}5`8i6JiA)z(YM;WP}V^Vre2rqKI z6$-*Hk~q-AF&_Gr@ei@inX6Enx?_~>(RbMgfE&kvSkLdkx6*INiiBSC0~bLyM3VHu ziELfl2oG3Ow(5@AAPyTC#h7U~zuzGohb*0uKBS$CoTHm|fuE9JoB|4eQAoed2}CWq zoH0LK-?ZVKsj`$Sjb8hB1I@Ked>;0SquFOATa5)J&S+-F(u9D^8GxKgJct4Sjouyy zImxI;TP=`yhEg_hEq;k(vkSH?B@E5m3I5zT=cl|iX+OdilEXReB-d*IARgiFW7xzq zzCgEpZ6g`gZR9z{R^4xYDAem{ajab?uYgY7PD>Gk!}04sfbqtg^UhBuhaR2jY|ENW zVZpx-hK8g{0dlq23~=sjzs!SRkqk@$81F)6;7U}c-pwkF@kBd4b#J>a?wg8|R0YF) zmNPk9TMf(%TE?D$S?hkbLlEBSt_Qc8FZF4bbS0KF29*i-SA}93#M+(7#^PD7D8~bU z;tdDEFJ<9ch@$K|k7^0$dn(e5tEY_&@A!J9e9uf|l4>1-&A~W7RC^MN@NlQ3cs8waO~&rE&BL=0(dS^L1wwSY^-t|XAF7+P?A z??fcGKk{^I@)y0~(7KLo6&~MIk&UJg$h!0HE_;0Y5eIx=cKZo;W>r4^ez-}I3 zS=NCs(lDL?wgM-_)_(0Ge09Yu>KEaE*qex?1 zf6XcP&&t^k)*>5$mnHwaG6(I+u)U=ML|y7gzf*B7hhtfdYxCCc7l#iNKWBwViu{LG zQ!+b*W^3?=K56xrTuA@H?Rhl8qy^FMsi~bVwo8Bko0N3h+ zi#1U%^#S1cEkL+u#GR)y{Sp9tYY`-+wwtWj+VMrb+DhAmaX14}oCg7eu@Sa`$GdR> zk1ojk1$(o+E4<@id=t3omc6;#aGo2ui~!`4{R-Cp0{Y{HcT61)ZP&SRJG(K_FWmqj zEhYCn$8lL1E^X3cnge}j7t6H|=bPb6kMSbNyxk03XJY7C__=@kyp55eHT7%zxg_6Q zI-jup{TLogakbHuv8Lb(jC`gw!8g*+`uL_ddB5yZ4nE$H;ugGC?NE}i z6m->{S(i0lO1@-5gAXeu-f%oFRg<=MmL&j1c$eXT*$V*QT>R}LdpRG^)-5dv(U1I1C#)xDLU%=qRvv#vhe4aN z&qwS-hGVJYZGGzvk3ztcWdt3_V+NhFmpw3P3(`p>{>-iWaN3Xle!pnBmQ+HR^c)-+ zExUsPmmcL6Lvb}gRx$LJ+1rNr>15V{{tj)b&E^qdqbjkkuVXUy!}acj&4QTlo7v$V zuH?05a-ioxY9G<;gJo_o(cLcA_x=fc?N>J|3|jm@6Iox2+_z;lORnDMkKCU-2U}>r zL{Bdk6J<^JZFYU?^lu@4#O*ERwHyMnl_(hG#KuZi?s$&Z<`*8ERVS(GYrnh=Bu(7G z$Kl1+T0_CC4tDgp{dyL1g;}mCRbb$NaIk-aA7=C&Dl5K^*l{ET)t7|!37snwLNesF zsrA0`Ao6)Fu?WUMvqmgcpiegCO#vUiQ`r9eP-+14yReQhtIp@r$b(eR-SUn_Eha_!=mFTOvQm!QRzD&FmAZ zu>`SwE3RFD@B~WW!Sc~Q%fED$x*4ofdSocqVWiMu-9f_Zswgyq?vNNWvgnkeH1axJ z^h0v62;k<&y)>(i0-u)%npn^_dc^%!MOgiM19oilbYTA;I`Qj`&b1uo#CHen??)CD z&V*)AUG_TDcrF}0lF?{J(p8=DDE~7y&%y0qJ*3p6e-{sdK zFh{pW@!Q^kg~(!-m!>uZE}}t`@WP7)XWcXyE{qc+4dx!=kqy_vSFIm0R!sqxZ;Vy| z%WbXT$vFUI4AirLWv@Jiw*}w&L8NuGe5$?cGY>@V-;JWYv?khH3%?+Bm2lCG*@qs$ z&5MOcDiP#~qE{1mXA%T!_Rurt6)dGGhJ&}#;#6Fz_<)Ls;g64IZlvrHwmE;tmE zqnd;R)kU2}f1;5t5)iCSJ8MqCq||{2s5|Y!*-BJ?9e3a#DMKCMG3y4q*oQf{T22F= zgKGBcNl1re;+q`J#?t6hyXxiG8-kwIOIxSJ41kckdM*9dj~|O~+a#7%%*`1qbO4WU ziS}w@gZM7g+UUeBeriC=iVQ4{*64aX=J`@BicK zQ01^)^0aLZ#pRf)6qxfMGH|HC@9$f2Aw&~pHN{ws0L{|(Uy$^TQ30M!Vol?b2B>paOlZVVjgN15KBHR4Nm1eLc6J!^MFA&*`o2BVj{+IJ5k8&D*`uFJ%ZoWh z8$@@xAm_{+c%B^rcmhssyTe^t^G2H`TMAC|ay6cEmI$bfI1($UASN^w{jdckRT6eUQrI^aU5WEjRADh?Et3#39X! z8fPH%-(ZUu-wyQoJ8rJL%%6N{4}Mqhd0akTfT6k;jcq#&>y24{oCH^&g8fwcfX%zi z*{UaFpdv>P&T_Awb@Hs1`Z;RsDT=05PViZYyIxxOloui1$R{4_>!H$Xcwe^nbI23K zM^8JKWhiq}@_;=O8{OmgG3F8>x_s-)TFVdbn)ks(f8odxXF6-_u5cGuA@8xPw_QJS z6;lvj7=DLzF&(#7^B0@6MUc`;u^J`sH%wBmj`O`0GSIEBYA%k{6jsA^DfI#2`hf7) z=~VN+RfQV!c?(qol>@2g7PR=szGpL&ZW;s~G7cAZS?K#t5byb(CK|WGCs+8M7JTt; zH~ADbw?OZs1y^f)3f^OpNtYC~kiE9WWqqOdxZUx4P%$@+6r0Pg-4NK*-O$nZ$3F^c zf4Vf0vcys&T}kXX9X1y)Kf;Xfr^N^x)EeCmbzb_BqY`$IR{gewvuF9uc|bw=@@>r= z=tI%lcOk@u9_6Tp&@jDkzxKd z0SYa1nW1NM5VS1yjO!E7yPj6?-oMq$@%o2_ zUz`E>zQLrILR{iPw4VeK?saWRPUs4Ya4E%!oW5FBwNli%qE%$tzE_lF_BfHF*K~MG z3;vEP%)F4PFpWBC<+{ELcn)`2X2y$G4CbB=u9PX=A;NHTH5A)kvE@;ngX2)cJcL=8 zq*$2_He$&w@>9>ow{wv1`gPiVTLhe)xKkqUStL1R@A{;~eY}IQ`oEBkiVT|@#s@2v zB{AwLCh_ZAO(ngPT6YDTO`4%yj6Iv#y^VXBrSCWHQ+nq!vyp>y$C&N9B}zW5@G5jD z>v}l!QL5D>yrEcHy<+@y!}xy9vMW$qrmG;@h1_k9e8<-drN4|%0hilg8Ch-`T_2f8 zHuMv17_P$cj|NkkKT_V@xl;>)4z5;~7+Mf=@4N(Qy`2Y3C^hnJNnB|(k}_CJVg8FG zSBJjS3Coms=qqK-8VJT~mqsdD>jr-1?b;R&Tx8|5Y+fpCu0&U-4X8$b=zD;_oJ$D6 zch&nOvXk9;$a##JTPdL-?U@9&T&p%a=y8M)zoyGVl z*3+ar{#_Ueyh&FRdH6noTErj`d_UHB37$^*7@xBK9r9T{(l<2GkZY)=u>Qa-+D~U* zZN+iG%~__<7pxe#d8R5c*CO0IJ!BVYV+&82Mh7R222T2VU91*_$k(zfv2}lur|61+0=; zotWz*jmXd=s}s4oVGiSMfU8U#w9`IC8V4N?h&W5$@*(`g%ilBX-&q^swNq>)7JMQW zoXVm1`o<^*cr3xi*}$V8z!$5nd`6{+qgy3LN^&bD!&fpib$kr{^cPZvTo{7IEZ3!jevAX%(JKLUcskf z|5;NG(wf>zRvHO#A|Yun&U!qo|79DhS?nBr8t6H4-9+sCW=4Lr_DJ;78_u4=8^RYi z{lB}8fd{s>?e(;WgKy3f&GgCzNItD$USr?=q{C!TU{StgzmIcG72cQdmOYX&k)^MY zW!zDN?kf=j?Z^fTcjWs%PMQ3P_dop0p(@3-Bmo``W)m&JCN?1L6t{1Mb6?3my3+DReG+?;6kcPrxmTlYvOlVBgWU0r z@R-sV^lv^yo0L}kd$Hz`g_2~G=>WxfD!|_+n{aV(9hQ+lt8q^U+W00T z%kh>9UbA-7VV(MJa{>6BiLQ5ZOp9@^Vv)h>?)9Bd(~yh-Lp5Abq$};5$&F?mnQJ-B zw8^Lb&6y30b-R+YesH-I`GbDPD&E-7x;8Pp2fu_l%bVLyDm4p!3a60gAGH_G`}e8L zsn7y`g5h_3NYb2gmpPozpsdqRqZmigx_I@E;A5+Oj-%&;LgEfZHv2sY#w5Jgnsj?q z3-!+ye1*%5h4eH|?-frrY`yk(N);=l>0i@n?vBWAHmZk;ac>wAw&h0d7K^1X{^H9zNH`fdyHvI@y4&rZ$hvWIJ}UtK-jcx@T9uL5b_9 zalsmt%8reQz*$;nU%J2WJu-imHvNWVpelF*V=Ptb$7_s(@(&htd&E(6%M8%1w5Ei0 zuW}Wy9$5H?V3A4#c5apgbq*Ss9XqARQi;l0b=zqYNF>9qp}+A`5`xs#(nOM5jb+> zaX@5t()|W%)nE#Q=F_V6NF$8^QWvdmd5ZnH3Dq>GRbXRlnYjijPwW zUQ=DqkoYGz(`&#}qFwJ>QQl7D($u!6onYj`<(7TX?z*+Y8(D@wH!L)Fy9i3s$ETKVh;6dc=P(dpk)L~y| z-jZYe&@O6FC7%^{-hwp?!|1&Gw)_64eClA*{@F!@$i}OtrI%$6JWGcG9%J9l_rVz( zri3{m-JlQ?@aYPepbJvkmHI^U+}R+e`OR`F8W-Uh7tCelbO<-A!u1z;F6V7ycNWq= zDtu&Bt?%!vP~Nsy68bVN|5*_e=@5UU2u=u-2HD_>YhjPw{*6mAmWp$2Ok`>J!MFp$ zbl*_}M=G_4JL+Tdy6Xv#z$~bG>ME^7caqt27F=Fo$_wi}I_`6)PTmrGT?kY`gW2Vt z=hE)Ut+MqJC=99+PryQ`(W>_~f>M*(;;~rj2hfJ%7H%9FpClDc zj$CnQo+l+Nr8!M}JO>|>G>kWytB%*mp00yUy{86`J4lqU%rm!t=5;BAr7yRH#spHZ zMeXQu(6#V+*N0mk>fyp^fLxLRp5H#Zs95^wK(M9rq%_avc86S25jk+Xe~|IT8_vfu zg$eJOwYeN_)I7AUBp~zU3@($89C~V1Mr<58xjE{e0YAjq`a`Iobp?N3R~ALuWOMf!H}30n`mBpwc~Z6(R1(5%mC&S#o4 z@}JXsu6BxzzFl4JHO1lQ{jm(0FexUOx_c*of2_v<)n(a(3ytpL=40=Xea%Pk%$imP z-3oa(Xlq*lk?2_tD~>4Y>;DSVfkA#Cxu)X|f>1x(QXs?Xj*RyVy3}wvZ(AVQdMN^T zY@_NEQf+iuKL1TacZg%n)4(fr;z78phSrL9gg&Xo?O|cpjOYxP?}6J&Wu=6)xINBR z>MMpW6y2$zsdJc#Y?R$guf%1Aen996RDEN5eI`T4+V{RaIh~q}?&!)vth*Ou#*3u4 zBhTZ>METTA&D|9=j1ocuuhwA0QNbJu$$*>^(*>t-wNTW6a6yTAgP{LI_OIMQVa}MU znw4BmL@C3}gYrpaJrd(W2B$yN#b)n*Dovz><@G%z=qoE~FEr*f-t&zgR;k=K8i1|g zxfw6jd)t|XrL)Mf23<#iAevbfqVfRGJJu5QN5&YMvG}!K>QH^I5;(I~eO_KG?eJjD z7x`4v=&tt&U?(1sODK@HZp2(@n;KV{n9b{N?hjO8fwk7}?X;kzF`B6kE%DeGSw%+R zU9cZi?ghYP4le}9{0tHp@cT3#|z2=qv}rGtML)IY)srttgj}-!7pKYQmszE zQ#eokTLe5S4z43r2UhH0O{ZqFW=*3(i=q8D3D7DZ^dZhekIpHz_{JXCM9~nX~fD11LC(tQTxe5<$yo|?a_@X&M>5=mA?Vgn)amYd|Kvk`WWbV3if zp9)xg1PcPj+{ zuud$F(Ke;F93m5r-uma@i^_!$W+&646XASaQO~z4&I5Vo-bQj)P8-Q-6i>5ErYv|^Cs72OO&!J1 z@5@brV@8G+lN~#wFcW9@&21j>)pw1CDQ8Ab$RCb4f!38Or1u+seH**-FwV&jMJicE zBicwYl*Vm)2iKLhub}0mle0dTYH0|nA%{ttNGkxas7evJTSs`CjYXDTbon1a%jyf0 z^sPPg0%Wc(etq~ED$A5AlR1&vW`k7D-HaM?%Kyat(xtb&8qCr@@MN{#bn^(`i}lZ` z+50F4$m70)VMU$9Vr$qG)iIrCN`gCzqtoxj%bk}#&nCa*nP$Q!QEXwA5t5tbqliWR{@H8Zr#Ym+tRY zcCfHZUA&6~F2A^t_lq`H`RTFoj5eq7k#p>mPmxyUU&B`kW34sX-;TwuE3B|s8#>-h z32^bp6sk~uv$K6xdc?ad1KP$9GNtiIUIDdQCHdJ8-YFRl=ctuTm!zpTK42OnIubg$ z8h+a5esY?`O7`P@A+#(!)KO2YhM$vhx~=vhIQ3s2YJ}k6l5B_j@^&WuQ#X#L z`l&`(`hz+AYTVU%*DsJ>O)NE-we9W~P81JEivj@HBi*+8>>++BMwZ>#6?4lAiRNv?eU| zwfO%F4EkG%2cn999!Yybd%f5&UbbQz?u61L8tBh{UgiZjuxBZ`AEp2f~*M z&|w7#U_T%wKlf+_L)UvkRh-3)cMhGb@+6>yy^0z{=)lBnK)7={Zyp2IJi$0GqWtYb zo8z$1g}tJ}ozBJ8oaW4D5-~)F-s5awz!xgq&rO(^ZKoUZ?Rq@ok`+i*; zPu92c4Q3$Jr43X$oP}#{%+#Iix+fmCeAt*0U%W@ZiR3Uzg|g+aFNf0K9%RnsG9=h$Z~gMnk9aauWOMyE0IFs?<@DSNmx z(8w@;!z@D1?F3r(>>hj=wyN~wpv*V%`rO5HmAJ`^6Q?6f%klPYPWZvi2;&ckxUY<4 ztCuR(r=1(Qqw*ubIX?!Y2)aD-MjsVpDUS~UcSylw3!5@+Y74NOdQ1n`{(i+P2gPJb zf${9~^x0Z>O{v1YKt`Xn?ng!85~4UGuA+oBTgtbKdZxpn5Jm7@95D4EXv-!+`y|xMmiJ@^J zVQ(Kyx{FX-!P*C^>>Zx|)E{rGf%{!!wcP-zAh1uX|2beHLx9Har-aCc7a@RdX1D3) z-js_OD+u$zY$3vuGMmbk%-jOV2U`yu01URqmGWAO-J4z7wfl4$(ZoJqR6)ket>oD7 z+_ukDulSgyQVNP(t=*)fnJqczAzjK->T!P<$+yV3rX|(CeWIr70K`<*b+t&({hS4D{7zrr@0Z(k<Ah!604ZV!1l3?fad zr9QxZLZV0}l7Tgb^wdlFyIdCxCJXwGu9jDz$y)F09GI})!l5-uNbfB5%c^+PLA!(> zoplE|TMbltwtcRvs^f6eBrZ%%lFDOkSvf)3n-*rqpNh8|Wh{A4ATB+)^Rh5f^ALz{ zZ(cl0!{{>N(lbX{*Ha7Pu7Fw*!|ImcUSwH$sS3tvJ4VM^d#KJJC6oG8>%m6So+-5s zr#)#cq@4vP?bhM`%G%eT_r50akX?4Mi&xJ5+h{FxW*raBlmd;wvxDgnh?C~!+KKl+ z(__7g`M>WU0Zy|2VvkgDDt)RvgRe<XeYi;N9}p(HtX#U)Hn*h0y69z=LyA6kvN zFiD)}c$ax3g#dBcYvCvIEu;A9rK$t;+ZP_4jvCYQ_cICAGnR;kQ8ee0PGy~5-67b& zG2|!kxrRNEsw0gLbB%dCXO=-_pi#T*&Ei37)Xs&m>M?eq#6L>8e>HoJMq&J74Mj)Q zO_x46K5gdG!uI<*D}iiG_Of|}L^N}%VsaJoUfG%%p)OAekL#o@-)mWxXE)?MTN^zrgG-(p{Uz4f?{FM%b{Ki1(m*gV>+8?piq@ ztVTcr$L8zaztv({j%yb9)Jr`#DHcUS=>);;1IeR^{d4Vpeif-_smVI2FvR)@H8xbS z2!G;}V(nnSMhA+9zA>|&uPab_iCw2Ir%|2mobA4gp_-qLPVQ$~ zB<5VfuuCQM&J~K=->3aMl%DM`0nXhfJq>U-9LjEsNUGeAHvu)_pnKV|5iuT&YvX#8 z&BpEZxy*P8;>^vs&aP;jYAiSX_;+rj?)<_z z`-O8m)Ue%x_FW0ohdX=sgmx;#l>KZ3zo<|`KSQ!K6czDmH;pkX=ZC^uI^Nx|gKB?X zO&&LkpPMP6*nK+241As6!ZJR=0VLQgr9~2Fr3)W6dTnKvK3P-lco@4A@*x_{zvEap zO13iG3L!C+9Qr&pg;&*Apk4RO8mkvRBtFuJadmwAk!k|EV%5EVFJ#KrNvjHG_JJWm zm>208CMS4S`Ph(YzSswI*Eib$ygxncV%@jhtydch4?k;DE2jbm4K-=+J9ae;NiI!uA=*R9o%Y7s zxfWeoXfr7Whp;5a)Q&1u)Xtj!dAVjc{wyhK>P8)mzM1ABq7lL5uN9AzV7w;~1=@8} z$4z$2=FY_wxWd1_TXcS$O7^#coVRBFSgh`0;PUtN6OOKKx?JBHtu23ml)BnwwWln$ zMC=o9>A8mxP`=C>U9bK?>9oi9lFd^rD3g-Ba8yhJYm_$Y##G4Yg*E9k%e5v2Yv@vxwr!3IkH=8!* zybN0zYPs=G=#oOuMriRsQ0LKpIqlTfXG+&hV)$zkUSxpv(IA%XYlj zlywl=5=q)fMv-5){2Sr7mR*n?U1_?xvRBTURNs^e%tj5njdhJiM~bYni_ggGE;h)3 zmV=(RrF7jbX}TU!4He-`mfTAyuG71sZ|ac)Z+SSaveF}LuSvOPi1R((aTMXL^x%$q7^bwaY7BF_?^~QdsVRxK;@Wp}16fbGHm<*J zmTs@%W2MDO#Ngw!#h{`=cFH?_@yoEAoj%vM)?`AGN`Gj4qQ*U#(cTNa?3Y5Y#LqW9 z|FL%Qhu8!S%NXD94&d1>xPfE7ytq|?Xw|9bqmj5GP`(e`FE>>jqdY?mGi4dI?faJJ z_($@y(jlhe_a%ozt;lGx>8ky51V?4=-qnN77Amjv-Y98He;6EcOIlY-pc-wF-cEOq zaQXcJ%e$Y`nn!YBR>EaJhj5;&LOV*n>#!mDVxH~YmkUvT7tTEH@g^(I|F4Xhs#E=@ zP`R1a@r!xy*t}exo_=p_HMV=zfCak?UVBy8HWK%|R_Ol2h|(j?hXx;f{444tTRFBt z2~8_SNqs#{CquPTqTRAzN9^%|r0aHc9#mJHcNP7S2zg8Z40k4lpR}e9hgu|NUb*AQ z*N;Bc-V)A=>Np;QD1IoaKLw>%j3um0f53hT*uIkBs*)6y@7mJEw%J3 z`@5l!@aczTSc_6Di-9aFDg!FJelZ}mKLTzKy#>x)CIy=ti+>dz^eL8y7~d97Mr>XA zV3i`zA3$;|k(jAlw!5rqUMDb{@v&LZtW-hqN1kVQTRPjBpjf@`+gH27sXc`m-$um~ zd)6Nho`!L6r}O$A z7P-|D+bCDwpD6PYDgQg~gL9Dv!5epCn00U6Ro>}Gna*#YW$pzyUV4H7&2?2wcfEdT z5YH7~_}5w0hudwWvbAeCh?Jsgmz!8;&FcYHiD_=BW51y@1s_b>eFYWVnUCWbeE2nd zEZNCwStog`ay0ZZYuXwJW7o}@lROvTc{=3PLiW!11-?_|rLHzt?`Mw*?GETTDUmp%5SPGf^y>|FSQ^jB!XK#WEFq zi8f$CjUqN)nZ%HS9w6CkP%jB|1#`HIWTPD*YVekNcKeyCRVFK9X0G&o@OY8&+G}e$ zjy!e=h@5oJGSzWOX5m`_b@Fut{0^o6@<)w zVvPKRlP&|@RP%J3Vg<3MCSucdI4#l0CMiN@1}qQF`Q3o|H%hQ7mtnMZsxcc$FjIpz zZ~>OkU-hR7LnrPiwMc=i@P zU2C|!kP~qC5fObr&*K*!>DSQ}!Gz9`@PA3L8vB*Pdp+wqTD!3n9P%Y&bI3iyb`6&^ z=;8qO3+{7$_3@a$4UL5}!OxkV1#P;2SvQEFXz7UUH1;q;XZHFO=lzV33>SUEd;I%QTHYhUZ?fgp;-y znUrn9)5WpLCbE^oUh}r1wQc05aaHvJUrW8Mv7kx}?I^toTpZf@b6a@z+l-B1h(n4k zYC`ENf~M_)41|wPN_eCrJTsY8PD(kpBmt3+kaHtevlsgtWxDXO88y6B=w1OxBGU3L zNrl-%drdX8%JmspZj!jR?RC6?^^8DmFb#)_6%`|b$3qR*$wR-h7M2G;*O=tG$U0T$ zrXIAQ`x-b?I{GSs>l!Xt<8Hhre_jyyjEv+E`!A5n8jx#6bHD z&&jk%)rDSD4E|xQtg9)g%Tt!i*uiXZh3xt5pFGx0?npsg53P?kRdgeWZ2k8 z*2p<^61({C07;wFgzxgrMr-3u*Y~A{dy<%RGQz$9${@mDE^SK@>cMNi1cCBJ`nYHA%~1p=08AaMI;Vy#B>nmj0T3RAU*PzC2ec|3_*K{b z>oR!?$o;u%z5f5Kap(L&!1*vQim>z??l+rC0N-7!QAH67KmFkZxE08DTd|O)B>vwG zgDwXj{dp4goPCJ(=QiyCyDInh^?%*CX>=55JtHIbAC>%}`d|L5HLv?R>XqFsPG=y~ z>-W%6@7P7;!A=)A~q9tyNGN4y=;-I5br9*zfLn=h$2E;qzN- zuf_YMqW9bjza#{@Y&j?0!8iqVr8PVs*Rb-U|u{RMY?Td}Ig+J0pg&Kb{`W zE+cg4{C+uv7gI5g94-hjt$CFAo1O|KoCcy1%&7BhKZjW}4EV>pJkg<`MGD9z{m-kF~#N zpT3N8K|H7<*}JF3#KlLp?THr(|40_Z9_JYTheHb_rp_M@NlixXDE=Pm=PyOfWc*^7 zt)?3I>;R7a^pC3y?4Cb(2BW5^z(bso>TY5fy<|MtU7f?C^y~<&4<`I3_G>(F=x_9i z7j4LTMm;It-Jvs(Bu>KV?nf|s2SXy@`@78jfmYVTCM)JH#&q=^UB|L#8Yu<#IK97$ zmEFJ0fMQ|INSE{%@IRI+XXa#yI$kTUpocyFbY_9srnkWlpl6Dzg#0!M8e`9%mNTM~iv+}VC9F@Y{OI}DYq1E9{`bUQbS;6)wj;Th>iNR*N z4mE|j*?%qMpXG3W|L>tWS=reT`O_hV+kgGyFz-6Mz9ngXb5l(!75iiN3p6VyD{E&q zyJ3aF_BPUTKH@8Q1>(`(5O2M=N=3Y;1o|6Plzf?(Nko&DL}jRBQ#> zd9t20m+JIFJ>@|#h3CC4Z2KNHkZn!aAC~%aTM?6XwqIuZ)&cI{M3YSb;iEzF;qp8k zE_uOqwh)EPpELrh3`6i(hN`}#_HoNR+yGnGykl=rTdJ05a@Esy{Cs^s&&|yhUtjZF?q>%X4A|jcj!Kp20pU%W=4KO%0CBe!6pJAGs7w!+T6ZKDG;)jx ze|X73&|v#Si(^IZj>(`%>x%Gfh%sz8}=4iM7 zNzXyo#OS8_ih5k!&dwSxW)9YyOM>sWK%A@pZkQZyvJ;@_XqkKDLO5Hidb?mmRRi@% z-IV={`ovA(`p%^5+=1UBMomZ1T)ZjU+uKyO!NjmM)zmgO(vgV>1g6iuCY&j@zu(<$ zRIm$UTlw8ECF1@cf7n{knZ>A@z6lXU(AH*VX1ZZR+T^r((=S@(O^jhLt8mOp4Y=pA zR?bA1_}ZuPt%$t^^vU=e$N*apr>2}MnaZgD&jJ47Y=v+>GUW__#$X_c>O9zt51A+B zlkiIQJLZ+!Xqc)ty^Ayc&R9PqG7%HG;A4MM+`ko8l zvtiZvF9!V{4(L(ZZFTHgRky5!Q+8oEw}C?Ue4DNhG*#WrYztcX<_~eg{$Dh0-IS zez7UYAg*n$zT-k-AWu}L)c(XpXCYgj3JKc)R+aO*WtUJ)k!fN+Jz#(xDIUMM@QEse zcTl}Wax+@YFSq*-^e9?lTOe)NhO$v!zvQ1i=ZDG#&tOw!r>(v=3S0<>h^-lBwNj0^MQ8$H)7@M~*O0q?Y@7*V)1djjKCbYvbg* z&@CD}?VuVi20SMpTRZ(*GaKoE-1$wm;hu&U`vM_f^~UeNffkO0V(6|jEJrF&6ntwg zb$rqKk{Ek>y?6y&_mo3%D~BmM+~%l+j@A%j97jeoN)`2b?T>eFvlF}4hi;#qF89%q z?{3JzKAW6NmoC-CWXiYx8b(O$(L`TJd4X>wvfE@iOHk&^+UWecYu90{*#sp=^*2u@ zY@U*C49=A>jQP)?j-);;*QBbwP=v>+^^uy&3wld+4 zistQ@i=guCTdXS+)OeI+Wn{eIIy{ji3|#lc4rRun#dH!e;l}g0mysrD3H$oT7Gr;_ z$Ous5S+z*HML%~~I404W2^D<-tzPpa!-y;Qg zQ|o0Je1EbqySDF*@MZc{U}7GMR5YXV)Xw^SDa`FU|9?{E#BU0<&|^9P`Zvp>xXG3R?S!Vh$&zax{$WpHoJH#LbwkvxcUy8kI0t0P`WkVn+@ zaA<>lBx_MwRkBk-y+i2Xt@bpp5SN-nj6sND8b@T{)CZ zY%ce=Hp~g<0egI~k?(J6dTe5!;{+wV;3_=-{4xvPXjeJ8NOb({b5M{X8 zRCYDKV`h@nRA2w71vl)qzR>Ume&4-Z`PIl%(Z_nFnOPfxi)eD zIdeO{&d&5!eZRR&@ z12~-X@`c^PS7vS$NB7ZQ51M+2rXB|NR_AW}VSb^qrigt;x$r1%<&1vBtuwxa9U<=D zUhhm|uNPi+=n?!dyBlChQdUj$EP6)gSA4i1-CVyD;8gQQ;&vuFXr+9zk!8YP+ZkjW znsjD9knbN7?A94;=RXUHPGdm$5tyAE{vPSfvooFwWJON@0xQYDWPeQ*YiIXQ3<9UZ z(V6$<4@~^je^UQ6C;~XU7iv9mX9u+&=HTrlFSj?{&MWk1z3^s5!l?Bt%#^54gnm#t z5o5>vt4=^b;Iog4cO9 Date: Tue, 21 May 2019 10:39:53 -0700 Subject: [PATCH 23/88] Emit SQS URL --- cloudy_mozdef/cloudformation/mozdef-sqs.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/cloudy_mozdef/cloudformation/mozdef-sqs.yml b/cloudy_mozdef/cloudformation/mozdef-sqs.yml index 674b7320..6bae873a 100644 --- a/cloudy_mozdef/cloudformation/mozdef-sqs.yml +++ b/cloudy_mozdef/cloudformation/mozdef-sqs.yml @@ -52,6 +52,9 @@ Outputs: SQSQueueName: Description: Name of the SQS Queue that MozDef will consume events from Value: !GetAtt MozDefSQSQueue.QueueName + SQSQueueUrl: + Description: URL of the SQS Queue that MozDef will consume events from + Value: !Ref MozDefSQSQueue AlertTaskSQSQueueArn: Description: ARN of the SQS Queue that MozDef will consume events from Value: !GetAtt MozDefSQSAlertTaskQueue.Arn @@ -60,6 +63,4 @@ Outputs: Value: !GetAtt MozDefSQSAlertTaskQueue.QueueName AlertTaskSQSQueueUrl: Description: The SQS queue url for the alerttask exchange as used in kombu. - Value: - !Join ['', ['https://', 'sqs', ".", !Ref "AWS::Region",".amazonaws.com", "/" -, !Ref "AWS::AccountId", "/",!GetAtt MozDefSQSAlertTaskQueue.QueueName]] \ No newline at end of file + Value: !Ref MozDefSQSAlertTaskQueue From 151c7fc09f8491722dfcd6f0d66fccc3c9eff1fe Mon Sep 17 00:00:00 2001 From: Gene Wood Date: Tue, 21 May 2019 10:41:53 -0700 Subject: [PATCH 24/88] Add VPC flow log transformer Also change to using the SQS queue created by the parent instead of creating our own --- .../cloudformation/mozdef-parent.yml | 15 ++- .../cloudformation/mozdef-vpc-flow-logs.yml | 96 +++++++++++++++---- 2 files changed, 89 insertions(+), 22 deletions(-) diff --git a/cloudy_mozdef/cloudformation/mozdef-parent.yml b/cloudy_mozdef/cloudformation/mozdef-parent.yml index 3e1dbad4..9be2e527 100644 --- a/cloudy_mozdef/cloudformation/mozdef-parent.yml +++ b/cloudy_mozdef/cloudformation/mozdef-parent.yml @@ -215,6 +215,19 @@ Resources: - Key: stack Value: !Ref AWS::StackName TemplateURL: !Join [ '', [ !FindInMap [ VariableMap, Variables, S3TemplateLocation ], mozdef-cloudtrail.yml ] ] + MozDefVPCFlowLogs: + Type: AWS::CloudFormation::Stack + Properties: + Parameters: + VpcId: !Ref VpcId + MozDefSQSQueueArn: !GetAtt MozDefSQS.Outputs.SQSQueueArn + MozDefSQSQueueUrl: !GetAtt MozDefSQS.Outputs.SQSQueueUrl + Tags: + - Key: application + Value: mozdef + - Key: stack + Value: !Ref AWS::StackName + TemplateURL: !Join [ '', [ !FindInMap [ VariableMap, Variables, S3TemplateLocation ], mozdef-vpc-flow-logs.yml ] ] CloudFormationLambdaIAMRole: Type: AWS::IAM::Role Properties: @@ -312,4 +325,4 @@ Resources: Outputs: LoadBalancerDNSName: Description: The DNS name of the ALB hosting MozDef. If using OIDC or SSL point your DNS at this. If using basic auth no DNS is necessary. - Value: !GetAtt MozDefInstance.Outputs.LoadBalancerDNSName \ No newline at end of file + Value: !GetAtt MozDefInstance.Outputs.LoadBalancerDNSName diff --git a/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml b/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml index 529c10c9..68b10c2e 100644 --- a/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml +++ b/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml @@ -5,15 +5,13 @@ Parameters: Type: AWS::EC2::VPC::Id Default: vpc-dc8eacb4 Description: 'The VPC ID of the VPC to deploy in (Example : vpc-abcdef12)' + MozDefSQSQueueUrl: + Type: String + Description: 'The SQS URL to send MozDef structured events to for consumption' + MozDefSQSQueueArn: + Type: String + Description: 'The SQS ARN to send MozDef structured events to for consumption' Resources: - MozDefVPCFlowLogsSQSQueue: - Type: AWS::SQS::Queue - Properties: - Tags: - - Key: application - Value: mozdef - - Key: stack - Value: !Ref AWS::StackName LogGroup: Type: AWS::Logs::LogGroup Properties: @@ -74,19 +72,69 @@ Resources: - sqs:GetQueueUrl - sqs:SendMessage - sqs:SendMessageBatch - Resource: !GetAtt MozDefVPCFlowLogsSQSQueue.Arn + Resource: !Ref MozDefSQSQueueArn FlowLogProcessor: Type: AWS::Lambda::Function Properties: Code: ZipFile: | - import os + import os, boto3, gzip, base64, json, socket, sys + from datetime import datetime + + PROTO_NUM_MAP = {num: name[8:] for name, num in vars(socket).items() if name.startswith("IPPROTO")} + def lambda_handler(event, context): - print(os.getenv('SQS_ARN')) + client = boto3.client('sqs') + raw_data = event.get('awslogs', {}).get('data') + data = json.loads( + gzip.decompress(base64.b64decode(raw_data)).decode('utf-8')) + entries = [] + for log_event in [x for x in data.get('logEvents', []) if x['extractedFields']['log_status'] == 'OK']: + # TODO : Do we want to do something with log_status NODATA and SKIPDATA events? + fields = log_event['extractedFields'] + message = dict( + category='vpc-flow', + hostname=socket.getfqdn(), + processid=os.getpid(), + processname=sys.argv[0], + severity='INFO', + source='vpc_flow') + message['utctimestamp'] = datetime.utcfromtimestamp( + int(data['timestamp'] / 1000)).strftime('%Y-%m-%dT%H:%M:%S+00:00') + message['summary'] = '{srcaddr}:{srcport} -> {dstaddr}:{dstport} {bytes} bytes {action}'.format(**fields) + message['details'] = dict( + destinationipaddress=fields['dstaddr'], + destinationport=fields['dstport'], + sourceipaddress=fields['srcaddr'], + sourceport=fields['srcport'], + success=fields['action'] == 'ACCEPT', + capture_window_start=datetime.utcfromtimestamp( + fields['start']).strftime('%Y-%m-%dT%H:%M:%S+00:00'), + capture_window_end=datetime.utcfromtimestamp( + fields['end']).strftime('%Y-%m-%dT%H:%M:%S+00:00'), + version=fields['version'], + pkts=fields['packets'], + proto=PROTO_NUM_MAP.get(int(fields['protocol']), 'unknown').lower(), + recipientaccountid=fields['account_id'], + interface_id=fields['interface_id'], + bytes=fields['bytes']) + entry = dict( + Id=log_event['id'], + MessageBody=json.dumps(message)) + entries.append(entry) + if len(entries) == 10: + response = client.send_message_batch( + QueueUrl=os.getenv('SQS_URL'), + Entries=entries) + # TODO : Process the response and do something about failures + del entries[:] + response = client.send_message_batch( + QueueUrl=os.getenv('SQS_URL'), + Entries=entries) Description: Transform VPC Flow logs into MozDef events Environment: Variables: - SQS_ARN: !GetAtt MozDefVPCFlowLogsSQSQueue.Arn + SQS_URL: !Ref MozDefSQSQueueUrl Handler: index.lambda_handler MemorySize: 128 Role: !GetAtt FlowLogProcessorRole.Arn @@ -97,17 +145,23 @@ Resources: - Key: stack Value: !Ref AWS::StackName Timeout: 30 + FlowLogProcessorPermission: + Type: AWS::Lambda::Permission + Properties: + Action: lambda:InvokeFunction + FunctionName: !GetAtt FlowLogProcessor.Arn + Principal: !Join [ '.', [ 'logs', !Ref 'AWS::Region', 'amazonaws.com' ] ] + SourceAccount: !Ref 'AWS::AccountId' + SourceArn: !GetAtt LogGroup.Arn + # LogGroup.Arn claims to be a value like + # arn:aws:logs:us-west-1:123456789012:log-group:/mystack-testgroup-12ABC1AB12A1:* + # https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-logs-loggroup.html#aws-resource-logs-loggroup-return-values + # Which should be correct for SourceArn which is expecting something like + # arn:aws:logs:region:123456789123:log-group:TestLambda:* + # https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/SubscriptionFilters.html#LambdaFunctionExample FlowLogSubscriptionFilter: Type: AWS::Logs::SubscriptionFilter Properties: DestinationArn: !GetAtt FlowLogProcessor.Arn FilterPattern: "" - LogGroupName: String - RoleArn: String -Outputs: - MozDefVPCFlowLogsSQSQueueArn: - Description: ARN of the MozDef VPC Flow Logs SQS Queue - Value: !GetAtt MozDefVPCFlowLogsSQSQueue.Arn - MozDefVPCFlowLogsSQSQueueName: - Description: Name of the MozDef VPC Flow Logs SQS Queue - Value: !GetAtt MozDefVPCFlowLogsSQSQueue.QueueName + LogGroupName: !Ref LogGroup From 88a43b942aa2240bb5df74a821d7cd929db47887 Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Tue, 21 May 2019 20:42:40 -0400 Subject: [PATCH 25/88] Implement _load_config to just naively try to open and parse the config file specified; not going to supply a default because we probably want to know if the file doenst exist --- alerts/plugins/ip_source_enrichment.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/alerts/plugins/ip_source_enrichment.py b/alerts/plugins/ip_source_enrichment.py index fa361a52..ba5a46df 100644 --- a/alerts/plugins/ip_source_enrichment.py +++ b/alerts/plugins/ip_source_enrichment.py @@ -82,7 +82,8 @@ def _load_config(file_path): Read and parse a file from disk as JSON into a dictionary. ''' - return {} + with open(file_path) as config_file: + return json.load(config_file) class message(object): From e191cb2e4a2dafc0f46772145de1783a4ebbd3ea Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Tue, 21 May 2019 20:43:41 -0400 Subject: [PATCH 26/88] Resolving PEP 8 errors --- alerts/plugins/ip_source_enrichment.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/alerts/plugins/ip_source_enrichment.py b/alerts/plugins/ip_source_enrichment.py index ba5a46df..5c4fc351 100644 --- a/alerts/plugins/ip_source_enrichment.py +++ b/alerts/plugins/ip_source_enrichment.py @@ -49,15 +49,13 @@ def enrich(alert, known_ips): return [] - def ip_in_range(ip): return lambda known: netaddr.IPAddress(ip) in netaddr.IPSet([known['range']]) - ips = find_ips(alert) alert = alert.copy() - + alert['details']['sites'] = [] for ip in set(ips): @@ -65,7 +63,7 @@ def enrich(alert, known_ips): for desc in matching_descriptions: enriched = desc['format'].format(ip, desc['site']) - + alert['summary'] += '; ' + enriched alert['details']['sites'].append({ From 353799ca2dd818e3b36f77b8e7d7553b7b7a1dc2 Mon Sep 17 00:00:00 2001 From: A Smith Date: Wed, 22 May 2019 10:17:25 -0500 Subject: [PATCH 27/88] Adding Logo to Readme Opinions/criticism/Suggestions welcome. --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index ce0a83c8..9162588e 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ [![Build Status](https://travis-ci.org/mozilla/MozDef.svg?branch=master)](https://travis-ci.org/mozilla/MozDef) [![Documentation Status](https://readthedocs.org/projects/mozdef/badge/?version=latest)](http://mozdef.readthedocs.io/en/latest/?badge=latest) -# MozDef: Mozilla Enterprise Defense Platform +# MozDef: ![LOGO](docs/source/images/moz_defense-platform_01.png) ## Documentation: @@ -36,4 +36,4 @@ The Mozilla Enterprise Defense Platform (MozDef) seeks to automate the security MozDef is in production at Mozilla where we are using it to process over 300 million events per day. -[1]: https://console.aws.amazon.com/cloudformation/home?region=us-west-2#/stacks/new?stackName=mozdef-for-aws&templateURL=https://s3-us-west-2.amazonaws.com/public.us-west-2.infosec.mozilla.org/mozdef/cf/v1.38.5/mozdef-parent.yml \ No newline at end of file +[1]: https://console.aws.amazon.com/cloudformation/home?region=us-west-2#/stacks/new?stackName=mozdef-for-aws&templateURL=https://s3-us-west-2.amazonaws.com/public.us-west-2.infosec.mozilla.org/mozdef/cf/v1.38.5/mozdef-parent.yml From 1530df18184934de285e6c443886627df56d057d Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Wed, 22 May 2019 15:00:52 -0500 Subject: [PATCH 28/88] Add commented out ports line for mongodb --- docker/compose/docker-compose.yml | 98 ++++++++++++++++--------------- 1 file changed, 50 insertions(+), 48 deletions(-) diff --git a/docker/compose/docker-compose.yml b/docker/compose/docker-compose.yml index ae476280..1696138c 100644 --- a/docker/compose/docker-compose.yml +++ b/docker/compose/docker-compose.yml @@ -1,54 +1,6 @@ --- version: '3.7' services: - nginx: - image: mozdef/mozdef_nginx - build: - context: ../../ - dockerfile: docker/compose/nginx/Dockerfile - cache_from: - - mozdef/mozdef_nginx - - mozdef_nginx:latest - restart: always - command: /usr/sbin/nginx - depends_on: - - kibana - - meteor - ports: - - 80:80 - - 8080:8080 - - 9090:9090 - # - 8081:8081 - networks: - - default - mongodb: - image: mozdef/mozdef_mongodb - build: - context: ../../ - dockerfile: docker/compose/mongodb/Dockerfile - cache_from: - - mozdef/mozdef_mongodb - - mozdef_mongodb:latest - restart: always - command: /usr/bin/mongod --smallfiles --config /etc/mongod.conf - volumes: - - mongodb:/var/lib/mongo - networks: - - default - kibana: - image: mozdef/mozdef_kibana - build: - context: ../../ - dockerfile: docker/compose/kibana/Dockerfile - cache_from: - - mozdef/mozdef_kibana - - mozdef_kibana:latest - restart: always - command: bin/kibana --elasticsearch=http://elasticsearch:9200 - depends_on: - - elasticsearch - networks: - - default elasticsearch: image: mozdef/mozdef_elasticsearch build: @@ -82,6 +34,56 @@ services: # - 15672:15672 # Admin interface networks: - default + mongodb: + image: mozdef/mozdef_mongodb + build: + context: ../../ + dockerfile: docker/compose/mongodb/Dockerfile + cache_from: + - mozdef/mozdef_mongodb + - mozdef_mongodb:latest + restart: always + command: /usr/bin/mongod --smallfiles --config /etc/mongod.conf + volumes: + - mongodb:/var/lib/mongo + # ports: + # - 3002:3002 + networks: + - default + kibana: + image: mozdef/mozdef_kibana + build: + context: ../../ + dockerfile: docker/compose/kibana/Dockerfile + cache_from: + - mozdef/mozdef_kibana + - mozdef_kibana:latest + restart: always + command: bin/kibana --elasticsearch=http://elasticsearch:9200 + depends_on: + - elasticsearch + networks: + - default + nginx: + image: mozdef/mozdef_nginx + build: + context: ../../ + dockerfile: docker/compose/nginx/Dockerfile + cache_from: + - mozdef/mozdef_nginx + - mozdef_nginx:latest + restart: always + command: /usr/sbin/nginx + depends_on: + - kibana + - meteor + ports: + - 80:80 + - 8080:8080 + - 9090:9090 + # - 8081:8081 + networks: + - default # MozDef Specific Containers base: From 1f16a97b6a46e017fafaccc85c1932c7f9e642d6 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Wed, 22 May 2019 15:16:11 -0500 Subject: [PATCH 29/88] Call cidr_merge on ipblocklist ips --- cron/createIPBlockList.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/cron/createIPBlockList.py b/cron/createIPBlockList.py index ecefd2fb..ed68a061 100755 --- a/cron/createIPBlockList.py +++ b/cron/createIPBlockList.py @@ -137,12 +137,13 @@ def main(): {"$project": {"address": 1}}, {"$limit": options.iplimit} ]) - IPList = [] + ips = [] for ip in ipCursor: - IPList.append(ip['address']) + ips.append(ip['address']) + uniq_ranges = netaddr.cidr_merge(ips) # to text with open(options.outputfile, 'w') as outputfile: - for ip in IPList: + for ip in uniq_ranges: outputfile.write("{0}\n".format(ip)) outputfile.close() # to s3? From 76475b59ac6d797aea104ddc84296ceaba809ab5 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Thu, 23 May 2019 15:31:18 -0500 Subject: [PATCH 30/88] Fix kibana protocol for relative urls --- meteor/imports/helpers.js | 3 ++- meteor/imports/themes/side_nav_dark/menu.js | 16 ---------------- 2 files changed, 2 insertions(+), 17 deletions(-) diff --git a/meteor/imports/helpers.js b/meteor/imports/helpers.js index 14c79b3a..48b7d22f 100644 --- a/meteor/imports/helpers.js +++ b/meteor/imports/helpers.js @@ -42,8 +42,9 @@ resolveKibanaURL = function(url){ if ( kibanaURL.hostname == 'relative' ){ // we were passed something like OPTIONS_METEOR_KIBANAURL=http://relative:9090/app/kibana // so lets figure out where we should be - dnsURL=new URL(document.URL); + dnsURL = new URL(document.URL); kibanaURL.hostname = dnsURL.hostname; + kibanaURL.protocol = dnsURL.protocol; } return kibanaURL; }; diff --git a/meteor/imports/themes/side_nav_dark/menu.js b/meteor/imports/themes/side_nav_dark/menu.js index bb3b80d7..d94c6d2e 100644 --- a/meteor/imports/themes/side_nav_dark/menu.js +++ b/meteor/imports/themes/side_nav_dark/menu.js @@ -13,22 +13,6 @@ Template.side_nav_menu.helpers( { //subscription has records? return features.find().count() > 0; }, - resolveKibanaURL: function( url ) { - // special function just for the menu - // to adjust the kibana URL if we are told to make it 'relative' - // to whatever DNS name we are running on - // i.e. pass in http://relative:9090/app/kibana - // when the running dns is something.com - // and we will set the hostname to something.com instead of 'relative' - var kibanaURL = new URL( url ); - if ( kibanaURL.hostname == 'relative' ) { - // we were passed something like OPTIONS_METEOR_KIBANAURL=http://relative:9090/app/kibana - // so lets figure out where we should be - dnsURL = new URL( document.URL ); - kibanaURL.hostname = dnsURL.hostname; - } - return kibanaURL; - }, // loads kibana dashboards kibanadashboards: function() { Meteor.call( 'loadKibanaDashboards' ); From 27934287919e5f5f77f438932c5a89db4682017f Mon Sep 17 00:00:00 2001 From: Andrew Krug Date: Sun, 19 May 2019 08:40:09 -0700 Subject: [PATCH 31/88] fix passing via header --- docker/compose/mozdef_cognito_proxy/files/default.conf | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/docker/compose/mozdef_cognito_proxy/files/default.conf b/docker/compose/mozdef_cognito_proxy/files/default.conf index e0d412d3..8ee2b5d1 100644 --- a/docker/compose/mozdef_cognito_proxy/files/default.conf +++ b/docker/compose/mozdef_cognito_proxy/files/default.conf @@ -22,8 +22,10 @@ server { set_by_lua_block $user { if os.getenv("OIDC_CLIENT_ID") == "Unset" then + ngx.log(ngx.NOTICE, 'OIDC authentication is not in use. Logging in as sample user.') return "mozdefuser@sample.com" else + ngx.log(ngx.NOTICE, 'OIDC authentication in use. Attempting to parse headers.') local resp = {headers=nil, body=nil} local json_safe = require "cjson.safe" local jwt = require "resty.jwt" @@ -37,15 +39,17 @@ server { resp.queryStringParameters = ngx.req.get_uri_args() resp.path = ngx.var.uri if resp['amzn-oidc-data'] ~= nil then - return resp['amzn-oidc-data']['payload']['email'] + local email = resp['amzn-oidc-data']['payload']['email'] + ngx.log(ngx.NOTICE, 'Via header sent to meteor') + return email else ngx.status = 403 end end } auth_basic $auth_basic; - proxy_pass http://$backend; proxy_set_header via $user; + proxy_pass http://$backend; } } @@ -93,9 +97,9 @@ server { end } proxy_set_header Authorization ""; + proxy_set_header via $user; auth_basic $auth_basic; proxy_pass $backend; - proxy_set_header via $user; } error_page 500 502 503 504 /50x.html; location = /50x.html { From f12181a31c263e60f71849b5a2cd0f25e39ab990 Mon Sep 17 00:00:00 2001 From: Andrew Krug Date: Sun, 19 May 2019 08:42:04 -0700 Subject: [PATCH 32/88] fix nocache --- Makefile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 266cfccf..95f496a5 100644 --- a/Makefile +++ b/Makefile @@ -13,6 +13,7 @@ NAME := mozdef VERSION := 0.1 BRANCH := master NO_CACHE := ## Pass `--no-cache` in order to disable Docker cache +PARALLEL := --parallel GITHASH := latest ## Pass `$(git rev-parse --short HEAD`) to tag docker hub images as latest git-hash instead TEST_CASE := tests ## Run all (`tests`) or a specific test case (ex `tests/alerts/tests/alerts/test_proxy_drop_exfil_domains.py`) TMPDIR := $(shell mktemp -d ) @@ -65,7 +66,7 @@ build: build-from-cwd .PHONY: build-from-cwd build-from-cwd: ## Build local MozDef images (use make NO_CACHE=--no-cache build to disable caching) - docker-compose -f docker/compose/docker-compose.yml -p $(NAME) $(NO_CACHE) $(BUILD_MODE) --parallel + docker-compose -f docker/compose/docker-compose.yml -p $(NAME) $(BUILD_MODE) $(PARALLEL) $(NO_CACHE) .PHONY: build-from-github build-from-github: ## Build local MozDef images from the github branch (use make NO_CACHE=--no-cache build to disable caching). From d656ee6e2b6f17516bdbc648ee36cb46a5e11845 Mon Sep 17 00:00:00 2001 From: Andrew Krug Date: Sun, 19 May 2019 10:03:28 -0700 Subject: [PATCH 33/88] stub out layers dir --- cloudy_mozdef/cloudformation/mozdef-instance.yml | 1 + cloudy_mozdef/experiments/vpcflows-pipeline.yml | 0 cloudy_mozdef/lambda_layer/.gitignore | 2 ++ cloudy_mozdef/lambda_layer/build/.keep | 0 meteor/server/mozdef.js | 4 ++-- 5 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 cloudy_mozdef/experiments/vpcflows-pipeline.yml create mode 100644 cloudy_mozdef/lambda_layer/.gitignore create mode 100644 cloudy_mozdef/lambda_layer/build/.keep diff --git a/cloudy_mozdef/cloudformation/mozdef-instance.yml b/cloudy_mozdef/cloudformation/mozdef-instance.yml index 0d64bdab..a1bd0e9f 100644 --- a/cloudy_mozdef/cloudformation/mozdef-instance.yml +++ b/cloudy_mozdef/cloudformation/mozdef-instance.yml @@ -471,6 +471,7 @@ Resources: SessionCookieName: mozdefMeteor TokenEndpoint: !Ref OIDCTokenEndpoint UserInfoEndpoint: !Ref OIDCUserInfoEndpoint + Scope: "openid email" Order: 1 - Type: forward Order: 2 diff --git a/cloudy_mozdef/experiments/vpcflows-pipeline.yml b/cloudy_mozdef/experiments/vpcflows-pipeline.yml new file mode 100644 index 00000000..e69de29b diff --git a/cloudy_mozdef/lambda_layer/.gitignore b/cloudy_mozdef/lambda_layer/.gitignore new file mode 100644 index 00000000..15cc48dd --- /dev/null +++ b/cloudy_mozdef/lambda_layer/.gitignore @@ -0,0 +1,2 @@ +function-latest.zip +layer-latest.zip \ No newline at end of file diff --git a/cloudy_mozdef/lambda_layer/build/.keep b/cloudy_mozdef/lambda_layer/build/.keep new file mode 100644 index 00000000..e69de29b diff --git a/meteor/server/mozdef.js b/meteor/server/mozdef.js index 5e39159c..5fe0b879 100644 --- a/meteor/server/mozdef.js +++ b/meteor/server/mozdef.js @@ -193,6 +193,7 @@ function registerLoginViaHeader() { //grab the email from the header var userEmail = this.connection.httpHeaders[headerName]; + console.log( 'target header:', userEmail ); //our authentication logic //check for user email header @@ -203,8 +204,7 @@ function registerLoginViaHeader() { error: handleError( "SSO Login failure: email not found in the 'via' http header" ) }; } - - console.log( 'target header:', userEmail ); + console.log( 'handling login request', loginRequest ); //we create a user if needed, and get the userId From 3b59924f0b35fa7733d1f565a0667b596cb216d2 Mon Sep 17 00:00:00 2001 From: Andrew Krug Date: Sun, 19 May 2019 11:04:07 -0700 Subject: [PATCH 34/88] update scaffold for alert write env --- .../lambda_layer/build/lambdalert.py | 66 +++ .../lambda_layer/build/lib/__init__.py | 0 .../build/lib/alert_plugin_set.py | 12 + .../lambda_layer/build/lib/alerttask.py | 553 ++++++++++++++++++ .../lambda_layer/build/lib/config.py | 78 +++ .../build/lib/deadman_alerttask.py | 9 + 6 files changed, 718 insertions(+) create mode 100644 cloudy_mozdef/lambda_layer/build/lambdalert.py create mode 100644 cloudy_mozdef/lambda_layer/build/lib/__init__.py create mode 100644 cloudy_mozdef/lambda_layer/build/lib/alert_plugin_set.py create mode 100644 cloudy_mozdef/lambda_layer/build/lib/alerttask.py create mode 100644 cloudy_mozdef/lambda_layer/build/lib/config.py create mode 100644 cloudy_mozdef/lambda_layer/build/lib/deadman_alerttask.py diff --git a/cloudy_mozdef/lambda_layer/build/lambdalert.py b/cloudy_mozdef/lambda_layer/build/lambdalert.py new file mode 100644 index 00000000..3721f8bb --- /dev/null +++ b/cloudy_mozdef/lambda_layer/build/lambdalert.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python + +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# Copyright (c) 2017 Mozilla Corporation + +import logging +import sys +from lib.alerttask import AlertTask +from mozdef_util.query_models import SearchQuery, TermMatch + + +logger = logging.getLogger(__name__) + + +def setup_logging(): + logger = logging.getLogger() + h = logging.StreamHandler(sys.stdout) + logger.setLevel(logging.DEBUG) + return logger + + +class AlertCloudtrailLoggingDisabled(AlertTask): + def _configureKombu(self): + """Override the normal behavior of this in order to run in lambda.""" + pass + + def alertToMessageQueue(self, alertDict): + """Override the normal behavior of this in order to run in lambda.""" + pass + + def main(self): + # How many minutes back in time would you like to search? + search_query = SearchQuery(minutes=15) + + # What would you like to search for? + # search_query.add_must([ + # TermMatch('source', 'cloudtrail'), + # TermMatch('details.eventname', 'DescribeTable') + # ]) + + self.filtersManual(search_query) + self.searchEventsSimple() + self.walkEvents() + + def onEvent(self, event): + category = 'AWSCloudtrail' + + # Useful tag and severity rankings for your alert. + tags = ['cloudtrail', 'aws', 'cloudtrailpagerduty'] + severity = 'CRITICAL' + + # What message should surface in the user interface when this fires? + summary = 'The alert fired!' + + return self.createAlertDict(summary, category, tags, [event], severity) + + # Learn more about MozDef alerts by exploring the "Alert class!" + + +def handle(event, context): + logger = setup_logging() + logger.debug('Function initialized.') + a = AlertCloudtrailLoggingDisabled() + return a.main() diff --git a/cloudy_mozdef/lambda_layer/build/lib/__init__.py b/cloudy_mozdef/lambda_layer/build/lib/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/cloudy_mozdef/lambda_layer/build/lib/alert_plugin_set.py b/cloudy_mozdef/lambda_layer/build/lib/alert_plugin_set.py new file mode 100644 index 00000000..aa8a32b9 --- /dev/null +++ b/cloudy_mozdef/lambda_layer/build/lib/alert_plugin_set.py @@ -0,0 +1,12 @@ +from mozdef_util.plugin_set import PluginSet +from mozdef_util.utilities.logger import logger + + +class AlertPluginSet(PluginSet): + + def send_message_to_plugin(self, plugin_class, message, metadata=None): + if 'utctimestamp' in message and 'summary' in message: + message_log_str = u'{0} received message: ({1}) {2}'.format(plugin_class.__module__, message['utctimestamp'], message['summary']) + logger.info(message_log_str) + + return plugin_class.onMessage(message), metadata diff --git a/cloudy_mozdef/lambda_layer/build/lib/alerttask.py b/cloudy_mozdef/lambda_layer/build/lib/alerttask.py new file mode 100644 index 00000000..0940379b --- /dev/null +++ b/cloudy_mozdef/lambda_layer/build/lib/alerttask.py @@ -0,0 +1,553 @@ +#!/usr/bin/env python + +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# Copyright (c) 2017 Mozilla Corporation + +import collections +import json +import kombu +import os +import sys +import socket +import netaddr + +from configlib import getConfig, OptionParser +from datetime import datetime +from collections import Counter +from celery import Task +from celery.utils.log import get_task_logger +from config import RABBITMQ, ES, ALERT_PLUGINS + +from mozdef_util.utilities.toUTC import toUTC +from mozdef_util.elasticsearch_client import ElasticsearchClient +from mozdef_util.query_models import TermMatch, ExistsMatch + +sys.path.append(os.path.join(os.path.dirname(__file__), "../../lib")) +from lib.alert_plugin_set import AlertPluginSet + + +# utility functions used by AlertTask.mostCommon +# determine most common values +# in a list of dicts +def keypaths(nested): + """ return a list of nested dict key paths + like: [u'_source', u'details', u'program'] + """ + for key, value in nested.iteritems(): + if isinstance(value, collections.Mapping): + for subkey, subvalue in keypaths(value): + yield [key] + subkey, subvalue + else: + yield [key], value + + +def dictpath(path): + """ split a string representing a + nested dictionary path key.subkey.subkey + """ + for i in path.split("."): + yield "{0}".format(i) + + +def getValueByPath(input_dict, path_string): + """ + Gets data/value from a dictionary using a dotted accessor-string + http://stackoverflow.com/a/7534478 + path_string can be key.subkey.subkey.subkey + """ + return_data = input_dict + for chunk in path_string.split("."): + return_data = return_data.get(chunk, {}) + return return_data + + +def hostname_from_ip(ip): + try: + reversed_dns = socket.gethostbyaddr(ip) + return reversed_dns[0] + except socket.herror: + return None + + +def add_hostname_to_ip(ip, output_format, require_internal=True): + ip_obj = netaddr.IPNetwork(ip)[0] + if require_internal and not ip_obj.is_private(): + return ip + hostname = hostname_from_ip(ip) + if hostname is None: + return ip + else: + return output_format.format(ip, hostname) + + +class AlertTask(Task): + + abstract = True + + def __init__(self): + self.alert_name = self.__class__.__name__ + self.main_query = None + + # Used to store any alerts that were thrown + self.alert_ids = [] + + # List of events + self.events = None + # List of aggregations + # e.g. when aggregField is email: [{value:'evil@evil.com',count:1337,events:[...]}, ...] + self.aggregations = None + + self.log.debug("starting {0}".format(self.alert_name)) + self.log.debug(RABBITMQ) + self.log.debug(ES) + + self._configureKombu() + self._configureES() + + # We want to select all event indices + # and filter out the window based on timestamp + # from the search query + self.event_indices = ["events-*"] + + def classname(self): + return self.__class__.__name__ + + @property + def log(self): + return get_task_logger("%s.%s" % (__name__, self.alert_name)) + + def parse_config(self, config_filename, config_keys): + myparser = OptionParser() + self.config = None + (self.config, args) = myparser.parse_args([]) + for config_key in config_keys: + temp_value = getConfig(config_key, "", config_filename) + setattr(self.config, config_key, temp_value) + + def _discover_task_exchange(self): + """Use configuration information to understand the message queue protocol. + return: amqp, sqs + """ + return getConfig("mqprotocol", "amqp", None) + + def __build_conn_string(self): + exchange_protocol = self._discover_task_exchange() + if exchange_protocol == "amqp": + connString = "amqp://{0}:{1}@{2}:{3}//".format( + RABBITMQ["mquser"], + RABBITMQ["mqpassword"], + RABBITMQ["mqserver"], + RABBITMQ["mqport"], + ) + return connString + elif exchange_protocol == "sqs": + connString = "sqs://{}".format(getConfig("alertSqsQueueUrl", None, None)) + if connString: + connString = connString.replace('https://','') + return connString + + def _configureKombu(self): + """ + Configure kombu for amqp or sqs + """ + try: + connString = self.__build_conn_string() + self.mqConn = kombu.Connection(connString) + if connString.find('sqs') == 0: + self.mqConn.transport_options['region'] = os.getenv('DEFAULT_AWS_REGION', 'us-west-2') + self.alertExchange = kombu.Exchange( + name=RABBITMQ["alertexchange"], type="topic", durable=True + ) + self.alertExchange(self.mqConn).declare() + alertQueue = kombu.Queue( + os.getenv('OPTIONS_ALERTSQSQUEUEURL').split('/')[4], exchange=self.alertExchange + ) + else: + self.alertExchange = kombu.Exchange( + name=RABBITMQ["alertexchange"], type="topic", durable=True + ) + self.alertExchange(self.mqConn).declare() + alertQueue = kombu.Queue( + RABBITMQ["alertqueue"], exchange=self.alertExchange + ) + alertQueue(self.mqConn).declare() + self.mqproducer = self.mqConn.Producer(serializer="json") + self.log.debug("Kombu configured") + except Exception as e: + self.log.error( + "Exception while configuring kombu for alerts: {0}".format(e) + ) + + def _configureES(self): + """ + Configure elasticsearch client + """ + try: + self.es = ElasticsearchClient(ES["servers"]) + self.log.debug("ES configured") + except Exception as e: + self.log.error("Exception while configuring ES for alerts: {0}".format(e)) + + def mostCommon(self, listofdicts, dictkeypath): + """ + Given a list containing dictionaries, + return the most common entries + along a key path separated by . + i.e. dictkey.subkey.subkey + returned as a list of tuples + [(value,count),(value,count)] + """ + inspectlist = list() + path = list(dictpath(dictkeypath)) + for i in listofdicts: + for k in list(keypaths(i)): + if not (set(k[0]).symmetric_difference(path)): + inspectlist.append(k[1]) + + return Counter(inspectlist).most_common() + + def alertToMessageQueue(self, alertDict): + """ + Send alert to the kombu based message queue. The default is rabbitmq. + """ + try: + # cherry pick items from the alertDict to send to the alerts messageQueue + mqAlert = dict(severity="INFO", category="") + if "severity" in alertDict: + mqAlert["severity"] = alertDict["severity"] + if "category" in alertDict: + mqAlert["category"] = alertDict["category"] + if "utctimestamp" in alertDict: + mqAlert["utctimestamp"] = alertDict["utctimestamp"] + if "eventtimestamp" in alertDict: + mqAlert["eventtimestamp"] = alertDict["eventtimestamp"] + mqAlert["summary"] = alertDict["summary"] + self.log.debug(mqAlert) + ensurePublish = self.mqConn.ensure( + self.mqproducer, self.mqproducer.publish, max_retries=10 + ) + ensurePublish( + alertDict, + exchange=self.alertExchange, + routing_key=RABBITMQ["alertqueue"], + ) + self.log.debug("alert sent to the alert queue") + except Exception as e: + self.log.error( + "Exception while sending alert to message queue: {0}".format(e) + ) + + def alertToES(self, alertDict): + """ + Send alert to elasticsearch + """ + try: + res = self.es.save_alert(body=alertDict) + self.log.debug("alert sent to ES") + self.log.debug(res) + return res + except Exception as e: + self.log.error("Exception while pushing alert to ES: {0}".format(e)) + + def tagBotNotify(self, alert): + """ + Tag alert to be excluded based on severity + If 'ircchannel' is set in an alert, we automatically notify mozdefbot + """ + alert["notify_mozdefbot"] = True + if alert["severity"] == "NOTICE" or alert["severity"] == "INFO": + alert["notify_mozdefbot"] = False + + # If an alert sets specific ircchannel, then we should probably always notify in mozdefbot + if ( + "ircchannel" in alert and alert["ircchannel"] != "" and alert["ircchannel"] is not None + ): + alert["notify_mozdefbot"] = True + return alert + + def saveAlertID(self, saved_alert): + """ + Save alert to self so we can analyze it later + """ + self.alert_ids.append(saved_alert["_id"]) + + def filtersManual(self, query): + """ + Configure filters manually + + query is a search query object with date_timedelta populated + + """ + # Don't fire on already alerted events + duplicate_matcher = TermMatch("alert_names", self.determine_alert_classname()) + if duplicate_matcher not in query.must_not: + query.add_must_not(duplicate_matcher) + + self.main_query = query + + def determine_alert_classname(self): + alert_name = self.classname() + # Allow alerts like the generic alerts (one python alert but represents many 'alerts') + # can customize the alert name + if hasattr(self, "custom_alert_name"): + alert_name = self.custom_alert_name + return alert_name + + def executeSearchEventsSimple(self): + """ + Execute the search for simple events + """ + return self.main_query.execute(self.es, indices=self.event_indices) + + def searchEventsSimple(self): + """ + Search events matching filters, store events in self.events + """ + try: + results = self.executeSearchEventsSimple() + self.events = results["hits"] + self.log.debug(self.events) + except Exception as e: + self.log.error("Error while searching events in ES: {0}".format(e)) + + def searchEventsAggregated(self, aggregationPath, samplesLimit=5): + """ + Search events, aggregate matching ES filters by aggregationPath, + store them in self.aggregations as a list of dictionaries + keys: + value: the text value that was found in the aggregationPath + count: the hitcount of the text value + events: the sampled list of events that matched + allevents: the unsample, total list of matching events + aggregationPath can be key.subkey.subkey to specify a path to a dictionary value + relative to the _source that's returned from elastic search. + ex: details.sourceipaddress + """ + + # We automatically add the key that we're matching on + # for aggregation, as a query requirement + aggreg_key_exists = ExistsMatch(aggregationPath) + if aggreg_key_exists not in self.main_query.must: + self.main_query.add_must(aggreg_key_exists) + + try: + esresults = self.main_query.execute(self.es, indices=self.event_indices) + results = esresults["hits"] + + # List of aggregation values that can be counted/summarized by Counter + # Example: ['evil@evil.com','haxoor@noob.com', 'evil@evil.com'] for an email aggregField + aggregationValues = [] + for r in results: + aggregationValues.append(getValueByPath(r["_source"], aggregationPath)) + + # [{value:'evil@evil.com',count:1337,events:[...]}, ...] + aggregationList = [] + for i in Counter(aggregationValues).most_common(): + idict = {"value": i[0], "count": i[1], "events": [], "allevents": []} + for r in results: + if ( + getValueByPath(r["_source"], aggregationPath).encode( + "ascii", "ignore" + ) == i[0] + ): + # copy events detail into this aggregation up to our samples limit + if len(idict["events"]) < samplesLimit: + idict["events"].append(r) + # also copy all events to a non-sampled list + # so we mark all events as alerted and don't re-alert + idict["allevents"].append(r) + aggregationList.append(idict) + + self.aggregations = aggregationList + self.log.debug(self.aggregations) + except Exception as e: + self.log.error("Error while searching events in ES: {0}".format(e)) + + def walkEvents(self, **kwargs): + """ + Walk through events, provide some methods to hook in alerts + """ + if len(self.events) > 0: + for i in self.events: + alert = self.onEvent(i, **kwargs) + if alert: + alert = self.tagBotNotify(alert) + self.log.debug(alert) + alert = self.alertPlugins(alert) + alertResultES = self.alertToES(alert) + self.tagEventsAlert([i], alertResultES) + self.alertToMessageQueue(alert) + self.hookAfterInsertion(alert) + self.saveAlertID(alertResultES) + # did we not match anything? + # can also be used as an alert trigger + if len(self.events) == 0: + alert = self.onNoEvent(**kwargs) + if alert: + alert = self.tagBotNotify(alert) + self.log.debug(alert) + alertResultES = self.alertToES(alert) + self.alertToMessageQueue(alert) + self.hookAfterInsertion(alert) + self.saveAlertID(alertResultES) + + def walkAggregations(self, threshold, config=None): + """ + Walk through aggregations, provide some methods to hook in alerts + """ + if len(self.aggregations) > 0: + for aggregation in self.aggregations: + if aggregation["count"] >= threshold: + aggregation["config"] = config + alert = self.onAggregation(aggregation) + if alert: + alert = self.tagBotNotify(alert) + self.log.debug(alert) + alert = self.alertPlugins(alert) + alertResultES = self.alertToES(alert) + # even though we only sample events in the alert + # tag all events as alerted to avoid re-alerting + # on events we've already processed. + self.tagEventsAlert(aggregation["allevents"], alertResultES) + self.alertToMessageQueue(alert) + self.saveAlertID(alertResultES) + + def alertPlugins(self, alert): + """ + Send alerts through a plugin system + """ + + plugin_dir = os.path.join(os.path.dirname(__file__), "../plugins") + plugin_set = AlertPluginSet(plugin_dir, ALERT_PLUGINS) + alertDict = plugin_set.run_plugins(alert)[0] + + return alertDict + + def createAlertDict( + self, + summary, + category, + tags, + events, + severity="NOTICE", + url=None, + ircchannel=None, + ): + """ + Create an alert dict + """ + alert = { + "utctimestamp": toUTC(datetime.now()).isoformat(), + "severity": severity, + "summary": summary, + "category": category, + "tags": tags, + "events": [], + "ircchannel": ircchannel, + } + if url: + alert["url"] = url + + for e in events: + alert["events"].append( + { + "documentindex": e["_index"], + "documentsource": e["_source"], + "documentid": e["_id"], + } + ) + self.log.debug(alert) + return alert + + def onEvent(self, event, *args, **kwargs): + """ + To be overriden by children to run their code + to be used when creating an alert using an event + must return an alert dict or None + """ + pass + + def onNoEvent(self, *args, **kwargs): + """ + To be overriden by children to run their code + when NOTHING matches a filter + which can be used to trigger on the absence of + events much like a dead man switch. + This is to be used when creating an alert using an event + must return an alert dict or None + """ + pass + + def onAggregation(self, aggregation): + """ + To be overriden by children to run their code + to be used when creating an alert using an aggregation + must return an alert dict or None + """ + pass + + def hookAfterInsertion(self, alert): + """ + To be overriden by children to run their code + to be used when creating an alert using an aggregation + """ + pass + + def tagEventsAlert(self, events, alertResultES): + """ + Update the event with the alertid/index + and update the alert_names on the event itself so it's + not re-alerted + """ + try: + for event in events: + if "alerts" not in event["_source"]: + event["_source"]["alerts"] = [] + event["_source"]["alerts"].append( + {"index": alertResultES["_index"], "id": alertResultES["_id"]} + ) + + if "alert_names" not in event["_source"]: + event["_source"]["alert_names"] = [] + event["_source"]["alert_names"].append(self.determine_alert_classname()) + + self.es.save_event( + index=event["_index"], body=event["_source"], doc_id=event["_id"] + ) + # We refresh here to ensure our changes to the events will show up for the next search query results + self.es.refresh(event["_index"]) + except Exception as e: + self.log.error("Error while updating events in ES: {0}".format(e)) + + def main(self): + """ + To be overriden by children to run their code + """ + pass + + def run(self, *args, **kwargs): + """ + Main method launched by celery periodically + """ + try: + self.main(*args, **kwargs) + self.log.debug("finished") + except Exception as e: + self.log.exception("Exception in main() method: {0}".format(e)) + + def parse_json_alert_config(self, config_file): + """ + Helper function to parse an alert config file + """ + alert_dir = os.path.join(os.path.dirname(__file__), "..") + config_file_path = os.path.abspath(os.path.join(alert_dir, config_file)) + json_obj = {} + with open(config_file_path, "r") as fd: + try: + json_obj = json.load(fd) + except ValueError: + sys.stderr.write("FAILED to open the configuration file\n") + + return json_obj diff --git a/cloudy_mozdef/lambda_layer/build/lib/config.py b/cloudy_mozdef/lambda_layer/build/lib/config.py new file mode 100644 index 00000000..b99be01e --- /dev/null +++ b/cloudy_mozdef/lambda_layer/build/lib/config.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python + +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# Copyright (c) 2014 Mozilla Corporation + +from celery.schedules import crontab, timedelta +import time +import logging +import os + +ALERTS = { + # 'pythonfile.pythonclass':{'schedule': crontab(minute='*/10')}, + # 'pythonfile.pythonclass':{'schedule': timedelta(minutes=10),'kwargs':dict(hostlist=['nsm3', 'nsm5'])}, +} + +ALERT_PLUGINS = [ + # 'relative pythonfile name (exclude the .py) - EX: sso_dashboard', +] + +ALERT_ACTIONS = [ + # 'relative pythonfile name (exclude the .py) - EX: sso_dashboard', +] + +RABBITMQ = { + 'mqserver': 'localhost', + 'mquser': 'guest', + 'mqpassword': 'guest', + 'mqport': 5672, + 'alertexchange': 'alerts', + 'alertqueue': 'mozdef.alert' +} + +if os.getenv('OPTIONS_ESSERVERS'): + ES = { + 'servers': [os.getenv('OPTIONS_ESSERVERS')] + } +else: + ES = { + 'servers': ['http://localhost:9200'] + } + +LOGGING = { + 'version': 1, + 'disable_existing_loggers': True, + 'formatters': { + 'simple': { + 'format': '%(levelname)s %(message)s', + 'datefmt': '%y %b %d, %H:%M:%S', + }, + 'standard': { + 'format': '%(asctime)s [%(levelname)s] %(name)s %(filename)s:%(lineno)d: %(message)s' + } + }, + 'handlers': { + 'console': { + 'level': 'DEBUG', + 'class': 'logging.StreamHandler', + 'formatter': 'simple' + }, + 'celery': { + 'level': 'DEBUG', + 'class': 'logging.handlers.RotatingFileHandler', + 'filename': 'celery.log', + 'formatter': 'standard', + 'maxBytes': 1024 * 1024 * 100, # 100 mb + }, + }, + 'loggers': { + 'celery': { + 'handlers': ['celery', 'console'], + 'level': 'INFO', + }, + } +} + +logging.Formatter.converter = time.gmtime diff --git a/cloudy_mozdef/lambda_layer/build/lib/deadman_alerttask.py b/cloudy_mozdef/lambda_layer/build/lib/deadman_alerttask.py new file mode 100644 index 00000000..4bad4824 --- /dev/null +++ b/cloudy_mozdef/lambda_layer/build/lib/deadman_alerttask.py @@ -0,0 +1,9 @@ +from alerttask import AlertTask + + +class DeadmanAlertTask(AlertTask): + + def executeSearchEventsSimple(self): + # We override this method to specify the size as 1 + # since we only care about if ANY events are found or not + return self.main_query.execute(self.es, indices=self.event_indices, size=1) From e169205b227bdf8ef24de8b244ad08be41247f77 Mon Sep 17 00:00:00 2001 From: Andrew Krug Date: Sun, 19 May 2019 11:05:14 -0700 Subject: [PATCH 35/88] update ignores --- cloudy_mozdef/lambda_layer/.gitignore | 6 +++++- cloudy_mozdef/lambda_layer/build/.gitignore | 2 ++ 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 cloudy_mozdef/lambda_layer/build/.gitignore diff --git a/cloudy_mozdef/lambda_layer/.gitignore b/cloudy_mozdef/lambda_layer/.gitignore index 15cc48dd..ed84d590 100644 --- a/cloudy_mozdef/lambda_layer/.gitignore +++ b/cloudy_mozdef/lambda_layer/.gitignore @@ -1,2 +1,6 @@ function-latest.zip -layer-latest.zip \ No newline at end of file +build/lib/* +build/python/* +lib/* +python/* + diff --git a/cloudy_mozdef/lambda_layer/build/.gitignore b/cloudy_mozdef/lambda_layer/build/.gitignore new file mode 100644 index 00000000..83523a5c --- /dev/null +++ b/cloudy_mozdef/lambda_layer/build/.gitignore @@ -0,0 +1,2 @@ +lib/ +python/ \ No newline at end of file From 17345d54928d7592eb8bd1002bec958d13cf953e Mon Sep 17 00:00:00 2001 From: Andrew Krug Date: Mon, 20 May 2019 10:17:52 -0700 Subject: [PATCH 36/88] make ci build on reinforce branch as well temporarily --- cloudy_mozdef/ci/deploy | 1 + cloudy_mozdef/experiments/vpcflows-pipeline.yml | 0 2 files changed, 1 insertion(+) delete mode 100644 cloudy_mozdef/experiments/vpcflows-pipeline.yml diff --git a/cloudy_mozdef/ci/deploy b/cloudy_mozdef/ci/deploy index 3840206e..dfb5982c 100644 --- a/cloudy_mozdef/ci/deploy +++ b/cloudy_mozdef/ci/deploy @@ -25,6 +25,7 @@ echo " Head Ref : ${CODEBUILD_WEBHOOK_HEAD_REF}" echo " Trigger : ${CODEBUILD_WEBHOOK_TRIGGER}" if [[ "branch/master" == "${CODEBUILD_WEBHOOK_TRIGGER}" \ + || "branch/reinforce2019" == "${CODEBUILD_WEBHOOK_TRIGGER}" \ || "${CODEBUILD_WEBHOOK_TRIGGER}" =~ ^tag\/v[0-9]+\.[0-9]+\.[0-9]+(\-(prod|pre|testing))?$ ]]; then echo "Codebuild is ubuntu 14.04. Installing packer in order to compensate. Someone should build a CI docker container \;)." wget -nv https://releases.hashicorp.com/packer/1.3.5/packer_1.3.5_linux_amd64.zip diff --git a/cloudy_mozdef/experiments/vpcflows-pipeline.yml b/cloudy_mozdef/experiments/vpcflows-pipeline.yml deleted file mode 100644 index e69de29b..00000000 From 4774ca3e442c6d80c020945c78f27b7683316751 Mon Sep 17 00:00:00 2001 From: Andrew Krug Date: Mon, 20 May 2019 11:15:57 -0700 Subject: [PATCH 37/88] add cloudformation to create alert writers environment --- cloudy_mozdef/experiments/alert-writer.yml | 63 ++++++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 cloudy_mozdef/experiments/alert-writer.yml diff --git a/cloudy_mozdef/experiments/alert-writer.yml b/cloudy_mozdef/experiments/alert-writer.yml new file mode 100644 index 00000000..51324952 --- /dev/null +++ b/cloudy_mozdef/experiments/alert-writer.yml @@ -0,0 +1,63 @@ +AWSTemplateFormatVersion: 2010-09-09 +Description: Setup an alert writers environment for use with MozDef for AWS. Note this is PoC only. +Parameters: + VpcId: + Type: AWS::EC2::VPC::Id + Description: 'The VPC ID of the VPC to deploy in (Example : vpc-abcdef12)' + PublicSubnetIds: + Type: List + Description: 'A comma delimited list of public subnet IDs (Example: subnet-abcdef12,subnet-bcdef123)' + MozDefSecurityGroup: + Type: AWS::EC2::SecurityGroup::Id + Description: The security group the MozDef instance runs in. This is needed to access ES. + ESUrl: + Type: String + Description: 'The location of elasticsearch deployed in managed-es.' +Resources: + MozDefLayer: + Type: AWS::Lambda::LayerVersion + Properties: + LayerName: MozDef + Description: Mozilla Enterprise Defense Platform Dependencies + Content: + S3Bucket: public.us-west-2.security.allizom.org + S3Key: mozdef-lambda-layer/layer-latest.zip + CompatibleRuntimes: + - python2.7 + LicenseInfo: 'MPL 2.0' + LambdalertIAMRole: + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Principal: + Service: lambda.amazonaws.com + Action: sts:AssumeRole + ManagedPolicyArns: + - arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole + AlertWritersEnv: + Type: "AWS::Lambda::Function" + Properties: + Handler: "lambdalert.handle" + Role: + Fn::GetAtt: + - "LambdalertIAMRole" + - "Arn" + Code: + S3Bucket: public.us-west-2.security.allizom.org + S3Key: mozdef-lambda-layer/function-latest.zip + Layers: + - !Ref MozDefLayer + Environment: + Variables: + OPTIONS_ESSERVERS: !Ref ESUrl + OPTIONS_MQPROTOCOL: sqs + VpcConfig: + SecurityGroupIds: + - !Ref MozDefSecurityGroup + SubnetIds: !Ref PublicSubnetIds + ReservedConcurrentExecutions: 1 + Runtime: "python2.7" + Timeout: 120 From 65f125c83ae6d2a5adcad4f75669538125b59f62 Mon Sep 17 00:00:00 2001 From: Gene Wood Date: Mon, 20 May 2019 17:02:16 -0700 Subject: [PATCH 38/88] Initial VPC Flow log template --- .../cloudformation/mozdef-vpc-flow-logs.yml | 113 ++++++++++++++++++ 1 file changed, 113 insertions(+) create mode 100644 cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml diff --git a/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml b/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml new file mode 100644 index 00000000..529c10c9 --- /dev/null +++ b/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml @@ -0,0 +1,113 @@ +AWSTemplateFormatVersion: 2010-09-09 +Description: Pipeline to send VPC Flow Logs to MozDef +Parameters: + VpcId: + Type: AWS::EC2::VPC::Id + Default: vpc-dc8eacb4 + Description: 'The VPC ID of the VPC to deploy in (Example : vpc-abcdef12)' +Resources: + MozDefVPCFlowLogsSQSQueue: + Type: AWS::SQS::Queue + Properties: + Tags: + - Key: application + Value: mozdef + - Key: stack + Value: !Ref AWS::StackName + LogGroup: + Type: AWS::Logs::LogGroup + Properties: + RetentionInDays: 1 + FlowLogRole: + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Principal: + Service: vpc-flow-logs.amazonaws.com + Action: sts:AssumeRole + Policies: + - PolicyName: AllowWriteCloudWatchLogs + PolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Action: + - logs:CreateLogGroup + - logs:CreateLogStream + - logs:PutLogEvents + - logs:DescribeLogGroups + - logs:DescribeLogStreams + Resource: "*" + FlowLog: + Type: AWS::EC2::FlowLog + Properties: + DeliverLogsPermissionArn: !GetAtt FlowLogRole.Arn + LogDestination: !GetAtt LogGroup.Arn + ResourceId: !Ref VpcId + ResourceType: VPC + TrafficType: ALL + FlowLogProcessorRole: + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Principal: + Service: lambda.amazonaws.com + Action: sts:AssumeRole + ManagedPolicyArns: + - arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole + Policies: + - PolicyName: AllowSendToSQS + PolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Action: + - sqs:DeleteMessage + - sqs:DeleteMessageBatch + - sqs:GetQueueAttributes + - sqs:GetQueueUrl + - sqs:SendMessage + - sqs:SendMessageBatch + Resource: !GetAtt MozDefVPCFlowLogsSQSQueue.Arn + FlowLogProcessor: + Type: AWS::Lambda::Function + Properties: + Code: + ZipFile: | + import os + def lambda_handler(event, context): + print(os.getenv('SQS_ARN')) + Description: Transform VPC Flow logs into MozDef events + Environment: + Variables: + SQS_ARN: !GetAtt MozDefVPCFlowLogsSQSQueue.Arn + Handler: index.lambda_handler + MemorySize: 128 + Role: !GetAtt FlowLogProcessorRole.Arn + Runtime: python3.7 + Tags: + - Key: application + Value: mozdef + - Key: stack + Value: !Ref AWS::StackName + Timeout: 30 + FlowLogSubscriptionFilter: + Type: AWS::Logs::SubscriptionFilter + Properties: + DestinationArn: !GetAtt FlowLogProcessor.Arn + FilterPattern: "" + LogGroupName: String + RoleArn: String +Outputs: + MozDefVPCFlowLogsSQSQueueArn: + Description: ARN of the MozDef VPC Flow Logs SQS Queue + Value: !GetAtt MozDefVPCFlowLogsSQSQueue.Arn + MozDefVPCFlowLogsSQSQueueName: + Description: Name of the MozDef VPC Flow Logs SQS Queue + Value: !GetAtt MozDefVPCFlowLogsSQSQueue.QueueName From 4d4d9b8dc4f517cad5ffe03e9ae11cb27a415b2d Mon Sep 17 00:00:00 2001 From: Gene Wood Date: Tue, 21 May 2019 10:39:53 -0700 Subject: [PATCH 39/88] Emit SQS URL --- cloudy_mozdef/cloudformation/mozdef-sqs.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/cloudy_mozdef/cloudformation/mozdef-sqs.yml b/cloudy_mozdef/cloudformation/mozdef-sqs.yml index 674b7320..6bae873a 100644 --- a/cloudy_mozdef/cloudformation/mozdef-sqs.yml +++ b/cloudy_mozdef/cloudformation/mozdef-sqs.yml @@ -52,6 +52,9 @@ Outputs: SQSQueueName: Description: Name of the SQS Queue that MozDef will consume events from Value: !GetAtt MozDefSQSQueue.QueueName + SQSQueueUrl: + Description: URL of the SQS Queue that MozDef will consume events from + Value: !Ref MozDefSQSQueue AlertTaskSQSQueueArn: Description: ARN of the SQS Queue that MozDef will consume events from Value: !GetAtt MozDefSQSAlertTaskQueue.Arn @@ -60,6 +63,4 @@ Outputs: Value: !GetAtt MozDefSQSAlertTaskQueue.QueueName AlertTaskSQSQueueUrl: Description: The SQS queue url for the alerttask exchange as used in kombu. - Value: - !Join ['', ['https://', 'sqs', ".", !Ref "AWS::Region",".amazonaws.com", "/" -, !Ref "AWS::AccountId", "/",!GetAtt MozDefSQSAlertTaskQueue.QueueName]] \ No newline at end of file + Value: !Ref MozDefSQSAlertTaskQueue From 1bbce41b4b151b05238d6cee531135b605f10820 Mon Sep 17 00:00:00 2001 From: Gene Wood Date: Tue, 21 May 2019 10:41:53 -0700 Subject: [PATCH 40/88] Add VPC flow log transformer Also change to using the SQS queue created by the parent instead of creating our own --- .../cloudformation/mozdef-parent.yml | 15 ++- .../cloudformation/mozdef-vpc-flow-logs.yml | 96 +++++++++++++++---- 2 files changed, 89 insertions(+), 22 deletions(-) diff --git a/cloudy_mozdef/cloudformation/mozdef-parent.yml b/cloudy_mozdef/cloudformation/mozdef-parent.yml index 3e1dbad4..9be2e527 100644 --- a/cloudy_mozdef/cloudformation/mozdef-parent.yml +++ b/cloudy_mozdef/cloudformation/mozdef-parent.yml @@ -215,6 +215,19 @@ Resources: - Key: stack Value: !Ref AWS::StackName TemplateURL: !Join [ '', [ !FindInMap [ VariableMap, Variables, S3TemplateLocation ], mozdef-cloudtrail.yml ] ] + MozDefVPCFlowLogs: + Type: AWS::CloudFormation::Stack + Properties: + Parameters: + VpcId: !Ref VpcId + MozDefSQSQueueArn: !GetAtt MozDefSQS.Outputs.SQSQueueArn + MozDefSQSQueueUrl: !GetAtt MozDefSQS.Outputs.SQSQueueUrl + Tags: + - Key: application + Value: mozdef + - Key: stack + Value: !Ref AWS::StackName + TemplateURL: !Join [ '', [ !FindInMap [ VariableMap, Variables, S3TemplateLocation ], mozdef-vpc-flow-logs.yml ] ] CloudFormationLambdaIAMRole: Type: AWS::IAM::Role Properties: @@ -312,4 +325,4 @@ Resources: Outputs: LoadBalancerDNSName: Description: The DNS name of the ALB hosting MozDef. If using OIDC or SSL point your DNS at this. If using basic auth no DNS is necessary. - Value: !GetAtt MozDefInstance.Outputs.LoadBalancerDNSName \ No newline at end of file + Value: !GetAtt MozDefInstance.Outputs.LoadBalancerDNSName diff --git a/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml b/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml index 529c10c9..68b10c2e 100644 --- a/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml +++ b/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml @@ -5,15 +5,13 @@ Parameters: Type: AWS::EC2::VPC::Id Default: vpc-dc8eacb4 Description: 'The VPC ID of the VPC to deploy in (Example : vpc-abcdef12)' + MozDefSQSQueueUrl: + Type: String + Description: 'The SQS URL to send MozDef structured events to for consumption' + MozDefSQSQueueArn: + Type: String + Description: 'The SQS ARN to send MozDef structured events to for consumption' Resources: - MozDefVPCFlowLogsSQSQueue: - Type: AWS::SQS::Queue - Properties: - Tags: - - Key: application - Value: mozdef - - Key: stack - Value: !Ref AWS::StackName LogGroup: Type: AWS::Logs::LogGroup Properties: @@ -74,19 +72,69 @@ Resources: - sqs:GetQueueUrl - sqs:SendMessage - sqs:SendMessageBatch - Resource: !GetAtt MozDefVPCFlowLogsSQSQueue.Arn + Resource: !Ref MozDefSQSQueueArn FlowLogProcessor: Type: AWS::Lambda::Function Properties: Code: ZipFile: | - import os + import os, boto3, gzip, base64, json, socket, sys + from datetime import datetime + + PROTO_NUM_MAP = {num: name[8:] for name, num in vars(socket).items() if name.startswith("IPPROTO")} + def lambda_handler(event, context): - print(os.getenv('SQS_ARN')) + client = boto3.client('sqs') + raw_data = event.get('awslogs', {}).get('data') + data = json.loads( + gzip.decompress(base64.b64decode(raw_data)).decode('utf-8')) + entries = [] + for log_event in [x for x in data.get('logEvents', []) if x['extractedFields']['log_status'] == 'OK']: + # TODO : Do we want to do something with log_status NODATA and SKIPDATA events? + fields = log_event['extractedFields'] + message = dict( + category='vpc-flow', + hostname=socket.getfqdn(), + processid=os.getpid(), + processname=sys.argv[0], + severity='INFO', + source='vpc_flow') + message['utctimestamp'] = datetime.utcfromtimestamp( + int(data['timestamp'] / 1000)).strftime('%Y-%m-%dT%H:%M:%S+00:00') + message['summary'] = '{srcaddr}:{srcport} -> {dstaddr}:{dstport} {bytes} bytes {action}'.format(**fields) + message['details'] = dict( + destinationipaddress=fields['dstaddr'], + destinationport=fields['dstport'], + sourceipaddress=fields['srcaddr'], + sourceport=fields['srcport'], + success=fields['action'] == 'ACCEPT', + capture_window_start=datetime.utcfromtimestamp( + fields['start']).strftime('%Y-%m-%dT%H:%M:%S+00:00'), + capture_window_end=datetime.utcfromtimestamp( + fields['end']).strftime('%Y-%m-%dT%H:%M:%S+00:00'), + version=fields['version'], + pkts=fields['packets'], + proto=PROTO_NUM_MAP.get(int(fields['protocol']), 'unknown').lower(), + recipientaccountid=fields['account_id'], + interface_id=fields['interface_id'], + bytes=fields['bytes']) + entry = dict( + Id=log_event['id'], + MessageBody=json.dumps(message)) + entries.append(entry) + if len(entries) == 10: + response = client.send_message_batch( + QueueUrl=os.getenv('SQS_URL'), + Entries=entries) + # TODO : Process the response and do something about failures + del entries[:] + response = client.send_message_batch( + QueueUrl=os.getenv('SQS_URL'), + Entries=entries) Description: Transform VPC Flow logs into MozDef events Environment: Variables: - SQS_ARN: !GetAtt MozDefVPCFlowLogsSQSQueue.Arn + SQS_URL: !Ref MozDefSQSQueueUrl Handler: index.lambda_handler MemorySize: 128 Role: !GetAtt FlowLogProcessorRole.Arn @@ -97,17 +145,23 @@ Resources: - Key: stack Value: !Ref AWS::StackName Timeout: 30 + FlowLogProcessorPermission: + Type: AWS::Lambda::Permission + Properties: + Action: lambda:InvokeFunction + FunctionName: !GetAtt FlowLogProcessor.Arn + Principal: !Join [ '.', [ 'logs', !Ref 'AWS::Region', 'amazonaws.com' ] ] + SourceAccount: !Ref 'AWS::AccountId' + SourceArn: !GetAtt LogGroup.Arn + # LogGroup.Arn claims to be a value like + # arn:aws:logs:us-west-1:123456789012:log-group:/mystack-testgroup-12ABC1AB12A1:* + # https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-logs-loggroup.html#aws-resource-logs-loggroup-return-values + # Which should be correct for SourceArn which is expecting something like + # arn:aws:logs:region:123456789123:log-group:TestLambda:* + # https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/SubscriptionFilters.html#LambdaFunctionExample FlowLogSubscriptionFilter: Type: AWS::Logs::SubscriptionFilter Properties: DestinationArn: !GetAtt FlowLogProcessor.Arn FilterPattern: "" - LogGroupName: String - RoleArn: String -Outputs: - MozDefVPCFlowLogsSQSQueueArn: - Description: ARN of the MozDef VPC Flow Logs SQS Queue - Value: !GetAtt MozDefVPCFlowLogsSQSQueue.Arn - MozDefVPCFlowLogsSQSQueueName: - Description: Name of the MozDef VPC Flow Logs SQS Queue - Value: !GetAtt MozDefVPCFlowLogsSQSQueue.QueueName + LogGroupName: !Ref LogGroup From 4c6b83041230d1cb97e628821c61fb120862b629 Mon Sep 17 00:00:00 2001 From: Andrew Krug Date: Thu, 23 May 2019 16:23:00 -0700 Subject: [PATCH 41/88] initial attempt to break apart reinforce features --- .../cloudformation/mozdef-alert-developer.yml | 63 +++ .../cloudformation/mozdef-credential-leak.yml | 46 +++ .../mozdef-parent-reinforce.yml | 358 ++++++++++++++++++ cloudy_mozdef/cloudformation/mozdef-vpc.yml | 133 +++++++ cloudy_mozdef/lambda_layer/.gitignore | 3 +- 5 files changed, 602 insertions(+), 1 deletion(-) create mode 100644 cloudy_mozdef/cloudformation/mozdef-alert-developer.yml create mode 100644 cloudy_mozdef/cloudformation/mozdef-credential-leak.yml create mode 100644 cloudy_mozdef/cloudformation/mozdef-parent-reinforce.yml create mode 100644 cloudy_mozdef/cloudformation/mozdef-vpc.yml diff --git a/cloudy_mozdef/cloudformation/mozdef-alert-developer.yml b/cloudy_mozdef/cloudformation/mozdef-alert-developer.yml new file mode 100644 index 00000000..51324952 --- /dev/null +++ b/cloudy_mozdef/cloudformation/mozdef-alert-developer.yml @@ -0,0 +1,63 @@ +AWSTemplateFormatVersion: 2010-09-09 +Description: Setup an alert writers environment for use with MozDef for AWS. Note this is PoC only. +Parameters: + VpcId: + Type: AWS::EC2::VPC::Id + Description: 'The VPC ID of the VPC to deploy in (Example : vpc-abcdef12)' + PublicSubnetIds: + Type: List + Description: 'A comma delimited list of public subnet IDs (Example: subnet-abcdef12,subnet-bcdef123)' + MozDefSecurityGroup: + Type: AWS::EC2::SecurityGroup::Id + Description: The security group the MozDef instance runs in. This is needed to access ES. + ESUrl: + Type: String + Description: 'The location of elasticsearch deployed in managed-es.' +Resources: + MozDefLayer: + Type: AWS::Lambda::LayerVersion + Properties: + LayerName: MozDef + Description: Mozilla Enterprise Defense Platform Dependencies + Content: + S3Bucket: public.us-west-2.security.allizom.org + S3Key: mozdef-lambda-layer/layer-latest.zip + CompatibleRuntimes: + - python2.7 + LicenseInfo: 'MPL 2.0' + LambdalertIAMRole: + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Principal: + Service: lambda.amazonaws.com + Action: sts:AssumeRole + ManagedPolicyArns: + - arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole + AlertWritersEnv: + Type: "AWS::Lambda::Function" + Properties: + Handler: "lambdalert.handle" + Role: + Fn::GetAtt: + - "LambdalertIAMRole" + - "Arn" + Code: + S3Bucket: public.us-west-2.security.allizom.org + S3Key: mozdef-lambda-layer/function-latest.zip + Layers: + - !Ref MozDefLayer + Environment: + Variables: + OPTIONS_ESSERVERS: !Ref ESUrl + OPTIONS_MQPROTOCOL: sqs + VpcConfig: + SecurityGroupIds: + - !Ref MozDefSecurityGroup + SubnetIds: !Ref PublicSubnetIds + ReservedConcurrentExecutions: 1 + Runtime: "python2.7" + Timeout: 120 diff --git a/cloudy_mozdef/cloudformation/mozdef-credential-leak.yml b/cloudy_mozdef/cloudformation/mozdef-credential-leak.yml new file mode 100644 index 00000000..0463c6fb --- /dev/null +++ b/cloudy_mozdef/cloudformation/mozdef-credential-leak.yml @@ -0,0 +1,46 @@ +AWSTemplateFormatVersion: '2010-09-09' +Description: Template to build out users for insecure account this is only used for training and testing. +Parameters: + SNSReceiverArn: + Type: String + Description: The ARN of the SNS topic to post credentials to. Note that this leaks credentials. +Resources: + IAMUser1: + Type: AWS::IAM::User + Properties: + UserName: Bob213 + Path: / + ManagedPolicyArns: + - arn:aws:iam::aws:policy/AdministratorAccess + IAMUser1Keys: + Type: AWS::IAM::AccessKey + Properties: + UserName: !Ref 'IAMUser1' + CaptureSetupData: + Type: Custom::DataCapture + Version: '1.0' + Properties: + ServiceToken: !Ref SNSReceiverArn + AccessKey: !Ref 'IAMUser1Keys' + SecretAccessKey: !GetAtt 'IAMUser1Keys.SecretAccessKey' + lbURL: !GetAtt 'MyLoadBalancer.DNSName' + AccountID: !Ref 'AWS::AccountId' + MyLoadBalancer: + Type: AWS::ElasticLoadBalancing::LoadBalancer + Properties: + AvailabilityZones: + - us-west-2a + Listeners: + - LoadBalancerPort: '80' + InstancePort: '80' + Protocol: HTTP +Outputs: + AccessKey: + Description: AccessKey + Value: !Ref 'IAMUser1Keys' + SecretAccessKey: + Description: SecretAccessKey + Value: !GetAtt 'IAMUser1Keys.SecretAccessKey' + LBUrl: + Description: lburl + Value: !GetAtt 'MyLoadBalancer.DNSName' diff --git a/cloudy_mozdef/cloudformation/mozdef-parent-reinforce.yml b/cloudy_mozdef/cloudformation/mozdef-parent-reinforce.yml new file mode 100644 index 00000000..95ff4c4f --- /dev/null +++ b/cloudy_mozdef/cloudformation/mozdef-parent-reinforce.yml @@ -0,0 +1,358 @@ +AWSTemplateFormatVersion: 2010-09-09 +Description: Deploy MozDef into AWS +Metadata: + 'AWS::CloudFormation::Interface': + ParameterGroups: + - Label: + default: EC2 Instance + Parameters: + - InstanceType + - KeyName + - SSHIngressCIDR + - Label: + default: Certificate + Parameters: + - ACMCertArn + - Label: + default: OIDC Configuration (optional) If not set this will use basic auth. + Parameters: + - OIDCAuthorizationEndpoint + - OIDCClientId + - OIDCClientSecret + - OIDCIssuer + - OIDCTokenEndpoint + - OIDCUserInfoEndpoint + - Label: + default: Experimental Features + Parameters: + - LeakCredentialSNSArn + ParameterLabels: + InstanceType: + default: EC2 Instance Type + KeyName: + default: EC2 SSH Key Name + SSHIngressCIDR: + default: Inbound SSH allowed IP address CIDR + DomainName: + default: FQDN to host MozDef at + ACMCertArn: + default: ACM Certificate ARN + OIDCAuthorizationEndpoint: + default: OIDC authorization endpoint. + OIDCClientId: + default: OIDC Client ID. + OIDCClientSecret: + default: OIDC Client Secret. + OIDCIssuer: + default: OIDC issuer. + OIDCTokenEndpoint: + default: OIDC oauth token endpoint. + OIDCUserInfoEndpoint: + default: OIDC user info endpoint. +Parameters: + InstanceType: + Type: String + Description: EC2 instance type, e.g. m1.small, m1.large, etc. + Default: m5.large + KeyName: + Type: AWS::EC2::KeyPair::KeyName + Description: Name of an existing EC2 KeyPair to enable SSH access to the web server + SSHIngressCIDR: + Type: String + AllowedPattern: '^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$' + ConstraintDescription: A valid CIDR (e.g. 203.0.113.0/24) + Description: The CIDR of IP addresses from which to allow inbound SSH connections + DomainName: + Type: String + Description: The fully qualified DNS name you will host CloudyMozDef at. + Default: cloudymozdef.security.allizom.org + ACMCertArn: + Type: String + Default: Unset + Description: "The ARN of your pre-issued ACM cert. (Example: arn:aws:acm:us-west-2:123456789012:certificate/abcdef01-2345-6789-abcd-ef0123456789)" + OIDCAuthorizationEndpoint: + Type: String + Default: Unset + ConstraintDescription: A valid URL + Description: "The url of the authorization endpoint found for your oidc provider generall found on (Example: https://auth.example.com/.well-known/openid-configuration)" + OIDCClientId: + Type: String + Default: Unset + Description: The client ID that your OIDC provider issues you for your Mozdef instance. + OIDCClientSecret: + Type: String + Default: Unset + Description: The secret that your OIDC provider issues you for your Mozdef instance. + NoEcho: true + OIDCIssuer: + Type: String + Default: Unset + Description: Generally can be found at the .well-known endpoint for your provider. + OIDCTokenEndpoint: + Type: String + Default: Unset + Description: Generally can be found at the .well-known endpoint for your provider. + OIDCUserInfoEndpoint: + Type: String + Default: Unset + Description: Generally can be found at the .well-known endpoint for your provider. + LeakCredentialSNSArn: + Type: String + Description: The arn of the sns topic to post a credential back to from the account. Do not use unless you are deploying this for reinforce workshop. This will attack the MozDef account. +# A RegionMap of AMI IDs is required by AWS Marketplace https://docs.aws.amazon.com/marketplace/latest/userguide/cloudformation.html#aws-cloudformation-template-preparation +# INSERT MAPPING HERE : This template does not work in this state. The mapping is replaced with a working AWS region to AMI ID mapping as well as a variable map with the S3TemplateLocationPrefix by cloudy_mozdef/ci/publish_versioned_templates. The resulting functioning CloudFormation template is uploaded to S3 for the version being built. +Conditions: + LeakACredential: !Not [!Equals [!Ref LeakCredentialSNSArn, ""]] +Resources: + LeakedCredentials: + Condition: LeakACredential + Type: AWS::CloudFormation::Stack + Properties: + Tags: + - Key: application + Value: mozdef + TemplateURL: !Join [ '', [ !FindInMap [ VariableMap, Variables, S3TemplateLocation ] , mozdef-credential-leak.yml ] ] + MozDefVPC: + Type: AWS::CloudFormation::Stack + Properties: + Tags: + - Key: application + Value: mozdef + TemplateURL: !Join [ '', [ !FindInMap [ VariableMap, Variables, S3TemplateLocation ] , mozdef-vpc.yml ] ] + MozDefSecurityGroups: + Type: AWS::CloudFormation::Stack + Properties: + Parameters: + VpcId: !GetAtt MozDefVPC.Outputs.VpcId + SSHIngressCIDR: !Ref SSHIngressCIDR + Tags: + - Key: application + Value: mozdef + TemplateURL: !Join [ '', [ !FindInMap [ VariableMap, Variables, S3TemplateLocation ] , mozdef-security-group.yml ] ] + MozDefIAMRoleAndInstanceProfile: + Type: AWS::CloudFormation::Stack + Properties: + Parameters: + CloudTrailS3BucketName: !GetAtt MozDefCloudTrail.Outputs.CloudTrailS3BucketName + CloudTrailSQSQueueArn: !GetAtt MozDefCloudTrail.Outputs.CloudTrailSQSQueueArn + MozDefSQSQueueArn: !GetAtt MozDefSQS.Outputs.SQSQueueArn + MozDefAlertSqsQueueArn: !GetAtt MozDefSQS.Outputs.AlertTaskSQSQueueArn + # CloudTrailS3BucketIAMRoleArn we leave empty as we will consume CloudTrail logs from our own account + ESServiceLinkedRoleExists: !GetAtt ESServiceLinkedRoleExists.RoleExists + Tags: + - Key: application + Value: mozdef + TemplateURL: !Join [ '', [ !FindInMap [ VariableMap, Variables, S3TemplateLocation ], base-iam.yml ] ] + MozDefInstance: + Type: AWS::CloudFormation::Stack + Properties: + Parameters: + VpcId: !GetAtt MozDefVPC.Outputs.VpcId + InstanceType: !Ref InstanceType + KeyName: !Ref KeyName + IamInstanceProfile: !GetAtt MozDefIAMRoleAndInstanceProfile.Outputs.InstanceProfileArn + AutoScaleGroupSubnetIds: !Join [ ',', !GetAtt MozDefVPC.Outputs.Subnet1, !GetAtt MozDefVPC.Outputs.Subnet2, !GetAtt MozDefVPC.Outputs.Subnet3 ] + AMIImageId: !FindInMap [ RegionMap, !Ref 'AWS::Region', HVM64 ] + EFSID: !GetAtt MozDefEFS.Outputs.EFSID + MozDefSecurityGroupId: !GetAtt MozDefSecurityGroups.Outputs.MozDefSecurityGroupId + MozDefLoadBalancerSecurityGroupId: !GetAtt MozDefSecurityGroups.Outputs.MozDefLoadBalancerSecurityGroupId + MozDefACMCertArn: !Ref ACMCertArn + ESURL: !GetAtt MozDefES.Outputs.ElasticsearchURL + KibanaURL: !GetAtt MozDefES.Outputs.ElasticsearchKibanaURL + KibanaDomainOnlyURL: !GetAtt MozDefES.Outputs.ElasticsearchDomainOnlyURL + OIDCClientId: !Ref OIDCClientId + OIDCClientSecret: !Ref OIDCClientSecret + OIDCAuthorizationEndpoint: !Ref OIDCAuthorizationEndpoint + OIDCIssuer: !Ref OIDCIssuer + OIDCTokenEndpoint: !Ref OIDCTokenEndpoint + OIDCUserInfoEndpoint: !Ref OIDCUserInfoEndpoint + CloudTrailSQSNotificationQueueName: !GetAtt MozDefCloudTrail.Outputs.CloudTrailSQSQueueName + MozDefSQSQueueName: !GetAtt MozDefSQS.Outputs.SQSQueueName + DomainName: !Ref DomainName + AlertQueueUrl: !GetAtt MozDefSQS.Outputs.AlertTaskSQSQueueUrl + Tags: + - Key: application + Value: mozdef + - Key: stack + Value: !Ref AWS::StackName + TemplateURL: !Join [ '', [ !FindInMap [ VariableMap, Variables, S3TemplateLocation ], mozdef-instance.yml ] ] + MozDefES: + Type: AWS::CloudFormation::Stack + DependsOn: MozDefIAMRoleAndInstanceProfile + Properties: + Parameters: + SubnetIds: !Join [ ',', !GetAtt MozDefVPC.Outputs.Subnet1, !GetAtt MozDefVPC.Outputs.Subnet2, !GetAtt MozDefVPC.Outputs.Subnet3 ] + BlockStoreSizeGB: '100' + VpcId: !GetAtt MozDefVPC.Outputs.VpcId + MozDefInstanceSecurityGroup: !GetAtt MozDefSecurityGroups.Outputs.MozDefSecurityGroupId + ESInstanceCount: '1' + Tags: + - Key: application + Value: mozdef + - Key: stack + Value: !Ref AWS::StackName + TemplateURL: !Join [ '', [ !FindInMap [ VariableMap, Variables, S3TemplateLocation ], mozdef-es.yml ] ] + MozDefEFS: + Type: AWS::CloudFormation::Stack + Properties: + Parameters: + VpcId: !GetAtt MozDefVPC.Outputs.VpcId + SubnetList: !Join [ ',', !GetAtt MozDefVPC.Outputs.Subnet1, !GetAtt MozDefVPC.Outputs.Subnet2, !GetAtt MozDefVPC.Outputs.Subnet3 ] + NumberOfSubnets: !GetAtt NumberOfSubnets.Length + MozDefSecurityGroup: !GetAtt MozDefSecurityGroups.Outputs.MozDefSecurityGroupId + Tags: + - Key: application + Value: mozdef + - Key: stack + Value: !Ref AWS::StackName + TemplateURL: !Join [ '', [ !FindInMap [ VariableMap, Variables, S3TemplateLocation ], mozdef-efs.yml ] ] + MozDefSQS: + Type: AWS::CloudFormation::Stack + Properties: + Tags: + - Key: application + Value: mozdef + - Key: stack + Value: !Ref AWS::StackName + TemplateURL: !Join [ '', [ !FindInMap [ VariableMap, Variables, S3TemplateLocation ], mozdef-sqs.yml ] ] + MozDefCloudTrail: + Type: AWS::CloudFormation::Stack + Properties: + Tags: + - Key: application + Value: mozdef + - Key: stack + Value: !Ref AWS::StackName + TemplateURL: !Join [ '', [ !FindInMap [ VariableMap, Variables, S3TemplateLocation ], mozdef-cloudtrail.yml ] ] + MozDefVPCFlowLogs: + Type: AWS::CloudFormation::Stack + Properties: + Parameters: + VpcId: !GetAtt MozDefVPC.Outputs.VpcId + MozDefSQSQueueArn: !GetAtt MozDefSQS.Outputs.SQSQueueArn + MozDefSQSQueueUrl: !GetAtt MozDefSQS.Outputs.SQSQueueUrl + Tags: + - Key: application + Value: mozdef + - Key: stack + Value: !Ref AWS::StackName + TemplateURL: !Join [ '', [ !FindInMap [ VariableMap, Variables, S3TemplateLocation ], mozdef-vpc-flow-logs.yml ] ] + CloudFormationLambdaIAMRole: + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Principal: + Service: + - lambda.amazonaws.com + Action: + - sts:AssumeRole + Policies: + - + PolicyName: AllowLambdaLogging + PolicyDocument: + Version: 2012-10-17 + Statement: + - + Effect: Allow + Action: + - logs:* + - iam:ListRoles + Resource: '*' + GetArrayLengthLambdaFunction: + Type: AWS::Lambda::Function + DependsOn: CloudFormationLambdaIAMRole + # This DependsOn shouldn't be needed because the "Role" value is set to + # "!GetAtt CloudFormationLambdaIAMRole.Arn" but without DependsOn the error + # "Template error: IAM role mozdef-aws-nested-CloudFormationLambdaIAMRole-108UCUPESC6WG doesn't exist" + # occurs on stack creation for this Lambda Function resource. The DependsOn + # prevents the error. + Properties: + Code: + ZipFile: | + import cfnresponse + import secrets, string + def handler(event, context): + length = len(event['ResourceProperties']['Array']) + physical_id = ''.join(secrets.choice(string.ascii_uppercase + string.digits) for i in range(13)) + cfnresponse.send(event, context, cfnresponse.SUCCESS, {'Length': length}, "GetArrayLength-%s" % physical_id) + Handler: index.handler + Runtime: python3.6 + Role: !GetAtt CloudFormationLambdaIAMRole.Arn + Tags: + - Key: application + Value: mozdef + - Key: stack + Value: !Ref AWS::StackName + Timeout: 20 + NumberOfSubnets: + Type: AWS::CloudFormation::CustomResource + Properties: + Array: !Ref PublicSubnetIds + ServiceToken: !GetAtt GetArrayLengthLambdaFunction.Arn + DoesRoleExistLambdaFunction: + Type: AWS::Lambda::Function + DependsOn: CloudFormationLambdaIAMRole + # This DependsOn shouldn't be needed because the "Role" value is set to + # "!GetAtt CloudFormationLambdaIAMRole.Arn" but without DependsOn the error + # "Template error: IAM role mozdef-aws-nested-CloudFormationLambdaIAMRole-108UCUPESC6WG doesn't exist" + # occurs on stack creation for this Lambda Function resource. The DependsOn + # prevents the error. + Properties: + Code: + ZipFile: | + import cfnresponse + import boto3, secrets, string + def handler(event, context): + paginator = boto3.client('iam').get_paginator('list_roles') + args = {'PathPrefix': event['ResourceProperties']['PathPrefix']} if 'PathPrefix' in event['ResourceProperties'] else {} + iterator = paginator.paginate(**args).search( + "Roles[?RoleName == '%s'][]" % event['ResourceProperties']['RoleName']) + response = {'RoleExists': len([x for x in iterator]) > 0} + physical_id = ''.join( + secrets.choice(string.ascii_uppercase + string.digits) for i in + range(13)) + cfnresponse.send(event, context, cfnresponse.SUCCESS, response, + "DoesRoleExist-%s" % physical_id) + Handler: index.handler + Runtime: python3.6 + Role: !GetAtt CloudFormationLambdaIAMRole.Arn + Tags: + - Key: application + Value: mozdef + - Key: stack + Value: !Ref AWS::StackName + Timeout: 20 + ESServiceLinkedRoleExists: + Type: AWS::CloudFormation::CustomResource + Properties: + RoleName: AWSServiceRoleForAmazonElasticsearchService + PathPrefix: '/aws-service-role/es.amazonaws.com/' + ServiceToken: !GetAtt DoesRoleExistLambdaFunction.Arn + MozDefAlertWriterEnv: + Type: AWS::CloudFormation::Stack + Properties: + Parameters: + VpcId: !GetAtt MozDefVPC.Outputs.VpcId + PublicSubnetIds: !Join [ ',', !GetAtt MozDefVPC.Outputs.Subnet1, !GetAtt MozDefVPC.Outputs.Subnet2, !GetAtt MozDefVPC.Outputs.Subnet3 ] + MozDefSecurityGroup: !GetAtt MozDefSecurityGroups.Outputs.MozDefSecurityGroupId + ESUrl: !GetAtt MozDefES.Outputs.ElasticsearchURL + Tags: + - Key: application + Value: mozdef + TemplateURL: !Join [ '', [ !FindInMap [ VariableMap, Variables, S3TemplateLocation ] , mozdef-alert-developer.yml ] ] + MozDefVPCFlowLogs: + Parameters: + VpcId: !GetAtt MozDefVPC.Outputs.VpcId + MozDefSQSQueueArn: !GetAtt MozDefSQS.Outputs.SQSQueueArn + MozDefSQSQueueUrl: !GetAtt MozDefSQS.Outputs.SQSQueueUrl + Tags: + - Key: application + Value: mozdef + TemplateURL: !Join [ '', [ !FindInMap [ VariableMap, Variables, S3TemplateLocation ] , mozdef-vpc-flow-logs.yml ] ] +Outputs: + LoadBalancerDNSName: + Description: The DNS name of the ALB hosting MozDef. If using OIDC or SSL point your DNS at this. If using basic auth no DNS is necessary. + Value: !GetAtt MozDefInstance.Outputs.LoadBalancerDNSName diff --git a/cloudy_mozdef/cloudformation/mozdef-vpc.yml b/cloudy_mozdef/cloudformation/mozdef-vpc.yml new file mode 100644 index 00000000..57f85226 --- /dev/null +++ b/cloudy_mozdef/cloudformation/mozdef-vpc.yml @@ -0,0 +1,133 @@ +AWSTemplateFormatVersion: "2010-09-09" +Description: "Create a vpc for Mozilla Deployment of Cloudy Mozdef." +Resources: + InternetGateway: + Type: "AWS::EC2::InternetGateway" + Properties: + Tags: + - Key: application + Value: mozdef + - Key: stack + Value: !Ref AWS::StackName + VPC: + Type: "AWS::EC2::VPC" + Properties: + CidrBlock: "10.0.0.0/16" + EnableDnsSupport: True + EnableDnsHostnames: True + Tags: + - Key: application + Value: mozdef + - Key: stack + Value: !Ref AWS::StackName + AttachGateway: + Type: AWS::EC2::VPCGatewayAttachment + Properties: + VpcId: + Ref: VPC + InternetGatewayId: + Ref: InternetGateway + RouteTable: + Type: "AWS::EC2::RouteTable" + Properties: + VpcId: + Ref: VPC + Tags: + - Key: application + Value: mozdef + - Key: stack + Value: !Ref AWS::StackName + DefaultRoute: + Type: AWS::EC2::Route + Properties: + RouteTableId: + Ref: RouteTable + DestinationCidrBlock: 0.0.0.0/0 + GatewayId: + Ref: InternetGateway + Subnet1: + Type: "AWS::EC2::Subnet" + Properties: + AvailabilityZone: + Fn::Select: + - 0 + - Fn::GetAZs: "" + CidrBlock: "10.0.0.0/24" + MapPublicIpOnLaunch: True + VpcId: + Ref: VPC + Tags: + - Key: application + Value: mozdef + - Key: stack + Value: !Ref AWS::StackName + Subnet2: + Type: "AWS::EC2::Subnet" + Properties: + AvailabilityZone: + Fn::Select: + - 1 + - Fn::GetAZs: "" + CidrBlock: "10.0.1.0/24" + MapPublicIpOnLaunch: True + VpcId: + Ref: VPC + Tags: + - Key: application + Value: mozdef + - Key: stack + Value: !Ref AWS::StackName + Subnet3: + Type: "AWS::EC2::Subnet" + Properties: + AvailabilityZone: + Fn::Select: + - 2 + - Fn::GetAZs: "" + CidrBlock: "10.0.2.0/24" + MapPublicIpOnLaunch: True + VpcId: + Ref: VPC + Tags: + - Key: application + Value: mozdef + - Key: stack + Value: !Ref AWS::StackName + RouteAc1: + Type: "AWS::EC2::SubnetRouteTableAssociation" + Properties: + RouteTableId: + Ref: RouteTable + SubnetId: + Ref: Subnet1 + RouteAc2: + Type: "AWS::EC2::SubnetRouteTableAssociation" + Properties: + RouteTableId: + Ref: RouteTable + SubnetId: + Ref: Subnet2 + RouteAc3: + Type: "AWS::EC2::SubnetRouteTableAssociation" + Properties: + RouteTableId: + Ref: RouteTable + SubnetId: + Ref: Subnet3 +Outputs: + VpcId: + Description: The ID of the VPC created. + Value: + Ref: VPC + Subnet1: + Description: The id of subnet1 in the first az. + Value: + Ref: Subnet1 + Subnet2: + Description: The id of subnet2 in the second az. + Value: + Ref: Subnet2 + Subnet3: + Description: The id of subnet3 in the third az. + Value: + Ref: Subnet3 diff --git a/cloudy_mozdef/lambda_layer/.gitignore b/cloudy_mozdef/lambda_layer/.gitignore index ed84d590..a60ff36b 100644 --- a/cloudy_mozdef/lambda_layer/.gitignore +++ b/cloudy_mozdef/lambda_layer/.gitignore @@ -1,6 +1,7 @@ function-latest.zip +layer-latest.zip build/lib/* build/python/* lib/* python/* - +build/lambdalert.py From 84ce9ad987c8f7924d81c89d5b8894574b68d870 Mon Sep 17 00:00:00 2001 From: Andrew Krug Date: Thu, 23 May 2019 16:27:22 -0700 Subject: [PATCH 42/88] add support for injecting map to reinforce template --- cloudy_mozdef/ci/publish_versioned_templates | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/cloudy_mozdef/ci/publish_versioned_templates b/cloudy_mozdef/ci/publish_versioned_templates index 1f9876c9..1853d3d9 100755 --- a/cloudy_mozdef/ci/publish_versioned_templates +++ b/cloudy_mozdef/ci/publish_versioned_templates @@ -19,10 +19,16 @@ sed '/# INSERT MAPPING HERE.*/{ r '"${AMI_MAP_TEMP_FILE}"' }' cloudformation/mozdef-parent.yml > ${TMPDIR}/mozdef-parent.yml +echo "Injecting the region AMI mapping into the mozdef-parent.yml CloudFormation template" +sed '/# INSERT MAPPING HERE.*/{ + s/# INSERT MAPPING HERE.*//g + r '"${AMI_MAP_TEMP_FILE}"' +}' cloudformation/mozdef-parent.yml > ${TMPDIR}/mozdef-parent-reinforce.yml + echo "Uploading CloudFormation templates to S3 directory ${VERSIONED_BUCKET_URI}/" # Sync all .yml files except mozdef-parent.yml aws s3 sync cloudformation/ ${VERSIONED_BUCKET_URI} --exclude="*" --include="*.yml" --exclude="mozdef-parent.yml" # cp modified mozdef-parent.yml from TMPDIR to S3 aws s3 cp ${TMPDIR}/mozdef-parent.yml ${VERSIONED_BUCKET_URI}/ - +aws s3 cp ${TMPDIR}/mozdef-parent-reinforce.yml ${VERSIONED_BUCKET_URI}/ rm -rf "${TMPDIR}" From e14f3441d0e60ba333fbc5588c9d59d5c938ae81 Mon Sep 17 00:00:00 2001 From: Andrew Krug Date: Thu, 23 May 2019 16:43:16 -0700 Subject: [PATCH 43/88] fix tabs --- cloudy_mozdef/cloudformation/mozdef-credential-leak.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/cloudy_mozdef/cloudformation/mozdef-credential-leak.yml b/cloudy_mozdef/cloudformation/mozdef-credential-leak.yml index 0463c6fb..fe7b58b8 100644 --- a/cloudy_mozdef/cloudformation/mozdef-credential-leak.yml +++ b/cloudy_mozdef/cloudformation/mozdef-credential-leak.yml @@ -1,9 +1,9 @@ AWSTemplateFormatVersion: '2010-09-09' Description: Template to build out users for insecure account this is only used for training and testing. Parameters: - SNSReceiverArn: - Type: String - Description: The ARN of the SNS topic to post credentials to. Note that this leaks credentials. + SNSReceiverArn: + Type: String + Description: The ARN of the SNS topic to post credentials to. Note that this leaks credentials. Resources: IAMUser1: Type: AWS::IAM::User From 9fd89765caecbb7605d4109dab7a8c015ed8820d Mon Sep 17 00:00:00 2001 From: Gene Wood Date: Thu, 23 May 2019 17:54:43 -0700 Subject: [PATCH 44/88] Fix malformed LogGroup ARN --- cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml b/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml index 68b10c2e..4e8cf859 100644 --- a/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml +++ b/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml @@ -43,7 +43,8 @@ Resources: Type: AWS::EC2::FlowLog Properties: DeliverLogsPermissionArn: !GetAtt FlowLogRole.Arn - LogDestination: !GetAtt LogGroup.Arn + # We can't use !GetAtt LogGroup.Arn because it actually returns and Arn suffixed with ":*" + LogDestination: !Join [ ':', [ 'arn:aws:logs', !Ref 'AWS::Region', !Ref 'AWS::AccountId', 'log-group', !Ref 'LogGroup' ] ] ResourceId: !Ref VpcId ResourceType: VPC TrafficType: ALL @@ -153,12 +154,6 @@ Resources: Principal: !Join [ '.', [ 'logs', !Ref 'AWS::Region', 'amazonaws.com' ] ] SourceAccount: !Ref 'AWS::AccountId' SourceArn: !GetAtt LogGroup.Arn - # LogGroup.Arn claims to be a value like - # arn:aws:logs:us-west-1:123456789012:log-group:/mystack-testgroup-12ABC1AB12A1:* - # https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-logs-loggroup.html#aws-resource-logs-loggroup-return-values - # Which should be correct for SourceArn which is expecting something like - # arn:aws:logs:region:123456789123:log-group:TestLambda:* - # https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/SubscriptionFilters.html#LambdaFunctionExample FlowLogSubscriptionFilter: Type: AWS::Logs::SubscriptionFilter Properties: From 310fb8c4ee2fbb93da224d598d418e3a8692bd07 Mon Sep 17 00:00:00 2001 From: Gene Wood Date: Thu, 23 May 2019 21:39:11 -0700 Subject: [PATCH 45/88] Correctly parse flow log message as a space delimited string Cast int field values as ints Don't send empty SQS batches --- .../cloudformation/mozdef-vpc-flow-logs.yml | 56 ++++++++++++------- 1 file changed, 35 insertions(+), 21 deletions(-) diff --git a/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml b/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml index 4e8cf859..ccbba81f 100644 --- a/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml +++ b/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml @@ -83,6 +83,10 @@ Resources: from datetime import datetime PROTO_NUM_MAP = {num: name[8:] for name, num in vars(socket).items() if name.startswith("IPPROTO")} + FIELD_NAMES = [ + 'version', 'account-id', 'interface-id', 'srcaddr', 'dstaddr', 'srcport', + 'dstport', 'protocol', 'packets', 'bytes', 'start', 'end', 'action', + 'log-status'] def lambda_handler(event, context): client = boto3.client('sqs') @@ -90,9 +94,15 @@ Resources: data = json.loads( gzip.decompress(base64.b64decode(raw_data)).decode('utf-8')) entries = [] - for log_event in [x for x in data.get('logEvents', []) if x['extractedFields']['log_status'] == 'OK']: + for log_event_record in data.get('logEvents', ''): + log_event_record_values = log_event_record['message'].split(' ') + log_event = {FIELD_NAMES[i]: log_event_record_values[i] + for i in range(len(FIELD_NAMES))} + if log_event.get('log-status') != 'OK': + print('Skipping {} entry : {}'.format(log_event.get('log-status'), log_event_record['message'])) + continue + # TODO : Do we want to do something with log_status NODATA and SKIPDATA events? - fields = log_event['extractedFields'] message = dict( category='vpc-flow', hostname=socket.getfqdn(), @@ -101,37 +111,41 @@ Resources: severity='INFO', source='vpc_flow') message['utctimestamp'] = datetime.utcfromtimestamp( - int(data['timestamp'] / 1000)).strftime('%Y-%m-%dT%H:%M:%S+00:00') - message['summary'] = '{srcaddr}:{srcport} -> {dstaddr}:{dstport} {bytes} bytes {action}'.format(**fields) + int(log_event_record['timestamp'] / 1000)).strftime('%Y-%m-%dT%H:%M:%S+00:00') + message['summary'] = '{srcaddr}:{srcport} -> {dstaddr}:{dstport} {bytes} bytes {action}'.format(**log_event) message['details'] = dict( - destinationipaddress=fields['dstaddr'], - destinationport=fields['dstport'], - sourceipaddress=fields['srcaddr'], - sourceport=fields['srcport'], - success=fields['action'] == 'ACCEPT', + destinationipaddress=log_event['dstaddr'], + destinationport=int(log_event['dstport']), + sourceipaddress=log_event['srcaddr'], + sourceport=int(log_event['srcport']), + success=log_event['action'] == 'ACCEPT', capture_window_start=datetime.utcfromtimestamp( - fields['start']).strftime('%Y-%m-%dT%H:%M:%S+00:00'), + int(log_event['start'])).strftime('%Y-%m-%dT%H:%M:%S+00:00'), capture_window_end=datetime.utcfromtimestamp( - fields['end']).strftime('%Y-%m-%dT%H:%M:%S+00:00'), - version=fields['version'], - pkts=fields['packets'], - proto=PROTO_NUM_MAP.get(int(fields['protocol']), 'unknown').lower(), - recipientaccountid=fields['account_id'], - interface_id=fields['interface_id'], - bytes=fields['bytes']) + int(log_event['end'])).strftime('%Y-%m-%dT%H:%M:%S+00:00'), + version=int(log_event['version']), + pkts=int(log_event['packets']), + proto=PROTO_NUM_MAP.get(int(log_event['protocol']), 'unknown').lower(), + recipientaccountid=log_event['account-id'], + interface_id=log_event['interface-id'], + bytes=int(log_event['bytes'])) entry = dict( - Id=log_event['id'], + Id=log_event_record['id'], MessageBody=json.dumps(message)) entries.append(entry) + print('Going to send entry : {}'.format(entry)) if len(entries) == 10: + print('sending batch') response = client.send_message_batch( QueueUrl=os.getenv('SQS_URL'), Entries=entries) # TODO : Process the response and do something about failures del entries[:] - response = client.send_message_batch( - QueueUrl=os.getenv('SQS_URL'), - Entries=entries) + if len(entries) > 0: + print('sending final batch') + response = client.send_message_batch( + QueueUrl=os.getenv('SQS_URL'), + Entries=entries) Description: Transform VPC Flow logs into MozDef events Environment: Variables: From 679d160d8115da3526642412e52f8c8045f3be2e Mon Sep 17 00:00:00 2001 From: Gene Wood Date: Fri, 24 May 2019 09:36:09 -0700 Subject: [PATCH 46/88] Limit VPC Flow logs to accepted SSH connections --- cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml b/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml index ccbba81f..af26f213 100644 --- a/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml +++ b/cloudy_mozdef/cloudformation/mozdef-vpc-flow-logs.yml @@ -172,5 +172,5 @@ Resources: Type: AWS::Logs::SubscriptionFilter Properties: DestinationArn: !GetAtt FlowLogProcessor.Arn - FilterPattern: "" + FilterPattern: '[version, account, eni, source, destination, srcport, destport="22", protocol="6", packets, bytes, windowstart, windowend, action="ACCEPT", flowlogstatus]' LogGroupName: !Ref LogGroup From 8a9f64ea5c3d18055b4660fd49ff06dd0fe6da6f Mon Sep 17 00:00:00 2001 From: Gene Wood Date: Fri, 24 May 2019 10:05:56 -0700 Subject: [PATCH 47/88] Error on deploy if OIDC secret missing --- cloudy_mozdef/Makefile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/cloudy_mozdef/Makefile b/cloudy_mozdef/Makefile index 1bf41011..abe22e2b 100644 --- a/cloudy_mozdef/Makefile +++ b/cloudy_mozdef/Makefile @@ -7,6 +7,7 @@ AMI_MAP_TEMP_FILE := /tmp/mozdef-ami-map.txt DEV_STACK_PARAMS_FILENAME := aws_parameters.dev.json # For more information on the rationale behind the code in STACK_PARAMS see https://github.com/aws/aws-cli/issues/2429#issuecomment-441133480 DEV_STACK_PARAMS := $(shell test -e $(DEV_STACK_PARAMS_FILENAME) && python -c 'import json,sys;f=open(sys.argv[1]);print(" ".join([",".join(["%s=\\\"%s\\\""%(k,v) for k,v in x.items()]) for x in json.load(f)]));f.close()' $(DEV_STACK_PARAMS_FILENAME)) +OIDC_CLIENT_ID := $(shell test -e $(DEV_STACK_PARAMS_FILENAME) && python -c 'import json,sys;f=open(sys.argv[1]);print(next((x["ParameterValue"] for x in json.load(f) if x["ParameterKey"]=="OIDCClientId"),""));f.close()' $(DEV_STACK_PARAMS_FILENAME)) # MozDef uses a nested CF stack, the mozdef-parent.yml will tie all child stacks together and load them from S3 # See also mozdef.infosec.mozilla.org bucket S3_DEV_BUCKET_NAME := mozdef.infosec.allizom.org @@ -39,6 +40,7 @@ packer-build-github: ## Build the base AMI with packer create-dev-stack: test ## Create everything you need for a fresh new stack! @export AWS_REGION=$(AWS_REGION) @echo "Make sure you have an environment variable OIDC_CLIENT_SECRET set." + @test -n "$(OIDC_CLIENT_SECRET_PARAM_ARG)" -a -n "$(OIDC_CLIENT_ID)" -o -z "$(OIDC_CLIENT_SECRET_PARAM_ARG)" -a -z "$(OIDC_CLIENT_ID)" aws cloudformation create-stack --stack-name $(STACK_NAME) --template-url $(S3_DEV_STACK_URI)mozdef-parent.yml \ --capabilities CAPABILITY_IAM \ --parameters $(OIDC_CLIENT_SECRET_PARAM_ARG) \ @@ -53,6 +55,7 @@ create-dev-s3-bucket: .PHONY: update-dev-stack update-dev-stack: test ## Updates the nested stack on AWS @export AWS_REGION=$(AWS_REGION) + @test -n "$(OIDC_CLIENT_SECRET_PARAM_ARG)" -a -n "$(OIDC_CLIENT_ID)" -o -z "$(OIDC_CLIENT_SECRET_PARAM_ARG)" -a -z "$(OIDC_CLIENT_ID)" aws cloudformation update-stack --stack-name $(STACK_NAME) --template-url $(S3_DEV_STACK_URI)mozdef-parent.yml \ --capabilities CAPABILITY_IAM \ --parameters $(OIDC_CLIENT_SECRET_PARAM_ARG) \ From 9ee42eaf4a2ded00de5405fc7989713b69a286c2 Mon Sep 17 00:00:00 2001 From: Andrew Krug Date: Fri, 24 May 2019 13:47:50 -0700 Subject: [PATCH 48/88] fix template generation --- cloudy_mozdef/ci/publish_versioned_templates | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cloudy_mozdef/ci/publish_versioned_templates b/cloudy_mozdef/ci/publish_versioned_templates index 1853d3d9..b99623ca 100755 --- a/cloudy_mozdef/ci/publish_versioned_templates +++ b/cloudy_mozdef/ci/publish_versioned_templates @@ -23,7 +23,7 @@ echo "Injecting the region AMI mapping into the mozdef-parent.yml CloudFormation sed '/# INSERT MAPPING HERE.*/{ s/# INSERT MAPPING HERE.*//g r '"${AMI_MAP_TEMP_FILE}"' -}' cloudformation/mozdef-parent.yml > ${TMPDIR}/mozdef-parent-reinforce.yml +}' cloudformation/mozdef-parent-reinforce.yml > ${TMPDIR}/mozdef-parent-reinforce.yml echo "Uploading CloudFormation templates to S3 directory ${VERSIONED_BUCKET_URI}/" # Sync all .yml files except mozdef-parent.yml From 6a6a1c98d604747c9b558deb0bcaf88593b2f370 Mon Sep 17 00:00:00 2001 From: Andrew Krug Date: Fri, 24 May 2019 15:08:08 -0700 Subject: [PATCH 49/88] minor syntax fixes --- .../cloudformation/mozdef-credential-leak.yml | 2 +- .../mozdef-parent-reinforce.yml | 24 +++++++++++++++---- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/cloudy_mozdef/cloudformation/mozdef-credential-leak.yml b/cloudy_mozdef/cloudformation/mozdef-credential-leak.yml index fe7b58b8..2eb8b7a3 100644 --- a/cloudy_mozdef/cloudformation/mozdef-credential-leak.yml +++ b/cloudy_mozdef/cloudformation/mozdef-credential-leak.yml @@ -8,7 +8,7 @@ Resources: IAMUser1: Type: AWS::IAM::User Properties: - UserName: Bob213 + UserName: !Join [ '', ['bob123', !Ref AWS::StackId ]] Path: / ManagedPolicyArns: - arn:aws:iam::aws:policy/AdministratorAccess diff --git a/cloudy_mozdef/cloudformation/mozdef-parent-reinforce.yml b/cloudy_mozdef/cloudformation/mozdef-parent-reinforce.yml index 95ff4c4f..df00dd66 100644 --- a/cloudy_mozdef/cloudformation/mozdef-parent-reinforce.yml +++ b/cloudy_mozdef/cloudformation/mozdef-parent-reinforce.yml @@ -49,6 +49,7 @@ Metadata: default: OIDC oauth token endpoint. OIDCUserInfoEndpoint: default: OIDC user info endpoint. + LeakCredentialSNSArn: Arn of the SNS topic to post admin creds to. Parameters: InstanceType: Type: String @@ -101,6 +102,15 @@ Parameters: Description: The arn of the sns topic to post a credential back to from the account. Do not use unless you are deploying this for reinforce workshop. This will attack the MozDef account. # A RegionMap of AMI IDs is required by AWS Marketplace https://docs.aws.amazon.com/marketplace/latest/userguide/cloudformation.html#aws-cloudformation-template-preparation # INSERT MAPPING HERE : This template does not work in this state. The mapping is replaced with a working AWS region to AMI ID mapping as well as a variable map with the S3TemplateLocationPrefix by cloudy_mozdef/ci/publish_versioned_templates. The resulting functioning CloudFormation template is uploaded to S3 for the version being built. +Mappings: + RegionMap: + us-west-2: + HVM64: ami-084efd8afd6e93bd1 + us-east-1: + HVM64: ami-0a265236e4027c00d + VariableMap: + Variables: + S3TemplateLocation: https://s3-us-west-2.amazonaws.com/public.us-west-2.infosec.mozilla.org/mozdef/cf/reinforce2019/ Conditions: LeakACredential: !Not [!Equals [!Ref LeakCredentialSNSArn, ""]] Resources: @@ -108,6 +118,8 @@ Resources: Condition: LeakACredential Type: AWS::CloudFormation::Stack Properties: + Parameters: + SNSReceiverArn: !Ref LeakCredentialSNSArn Tags: - Key: application Value: mozdef @@ -151,7 +163,7 @@ Resources: InstanceType: !Ref InstanceType KeyName: !Ref KeyName IamInstanceProfile: !GetAtt MozDefIAMRoleAndInstanceProfile.Outputs.InstanceProfileArn - AutoScaleGroupSubnetIds: !Join [ ',', !GetAtt MozDefVPC.Outputs.Subnet1, !GetAtt MozDefVPC.Outputs.Subnet2, !GetAtt MozDefVPC.Outputs.Subnet3 ] + AutoScaleGroupSubnetIds: !Join [ ',', [!GetAtt MozDefVPC.Outputs.Subnet1, !GetAtt MozDefVPC.Outputs.Subnet2, !GetAtt MozDefVPC.Outputs.Subnet3 ]] AMIImageId: !FindInMap [ RegionMap, !Ref 'AWS::Region', HVM64 ] EFSID: !GetAtt MozDefEFS.Outputs.EFSID MozDefSecurityGroupId: !GetAtt MozDefSecurityGroups.Outputs.MozDefSecurityGroupId @@ -181,7 +193,7 @@ Resources: DependsOn: MozDefIAMRoleAndInstanceProfile Properties: Parameters: - SubnetIds: !Join [ ',', !GetAtt MozDefVPC.Outputs.Subnet1, !GetAtt MozDefVPC.Outputs.Subnet2, !GetAtt MozDefVPC.Outputs.Subnet3 ] + SubnetIds: !Join [ ',', [!GetAtt MozDefVPC.Outputs.Subnet1, !GetAtt MozDefVPC.Outputs.Subnet2, !GetAtt MozDefVPC.Outputs.Subnet3 ]] BlockStoreSizeGB: '100' VpcId: !GetAtt MozDefVPC.Outputs.VpcId MozDefInstanceSecurityGroup: !GetAtt MozDefSecurityGroups.Outputs.MozDefSecurityGroupId @@ -197,7 +209,7 @@ Resources: Properties: Parameters: VpcId: !GetAtt MozDefVPC.Outputs.VpcId - SubnetList: !Join [ ',', !GetAtt MozDefVPC.Outputs.Subnet1, !GetAtt MozDefVPC.Outputs.Subnet2, !GetAtt MozDefVPC.Outputs.Subnet3 ] + SubnetList: !Join [ ',', [!GetAtt MozDefVPC.Outputs.Subnet1, !GetAtt MozDefVPC.Outputs.Subnet2, !GetAtt MozDefVPC.Outputs.Subnet3 ]] NumberOfSubnets: !GetAtt NumberOfSubnets.Length MozDefSecurityGroup: !GetAtt MozDefSecurityGroups.Outputs.MozDefSecurityGroupId Tags: @@ -290,7 +302,7 @@ Resources: NumberOfSubnets: Type: AWS::CloudFormation::CustomResource Properties: - Array: !Ref PublicSubnetIds + Array: !Join [ ',', [!GetAtt MozDefVPC.Outputs.Subnet1, !GetAtt MozDefVPC.Outputs.Subnet2, !GetAtt MozDefVPC.Outputs.Subnet3 ]] ServiceToken: !GetAtt GetArrayLengthLambdaFunction.Arn DoesRoleExistLambdaFunction: Type: AWS::Lambda::Function @@ -336,7 +348,7 @@ Resources: Properties: Parameters: VpcId: !GetAtt MozDefVPC.Outputs.VpcId - PublicSubnetIds: !Join [ ',', !GetAtt MozDefVPC.Outputs.Subnet1, !GetAtt MozDefVPC.Outputs.Subnet2, !GetAtt MozDefVPC.Outputs.Subnet3 ] + PublicSubnetIds: !Join [ ',', [!GetAtt MozDefVPC.Outputs.Subnet1, !GetAtt MozDefVPC.Outputs.Subnet2, !GetAtt MozDefVPC.Outputs.Subnet3 ]] MozDefSecurityGroup: !GetAtt MozDefSecurityGroups.Outputs.MozDefSecurityGroupId ESUrl: !GetAtt MozDefES.Outputs.ElasticsearchURL Tags: @@ -344,6 +356,8 @@ Resources: Value: mozdef TemplateURL: !Join [ '', [ !FindInMap [ VariableMap, Variables, S3TemplateLocation ] , mozdef-alert-developer.yml ] ] MozDefVPCFlowLogs: + Type: AWS::CloudFormation::Stack + Properties: Parameters: VpcId: !GetAtt MozDefVPC.Outputs.VpcId MozDefSQSQueueArn: !GetAtt MozDefSQS.Outputs.SQSQueueArn From 6b04f21ab02133fe3c5118bce0f4f45c57267e06 Mon Sep 17 00:00:00 2001 From: Andrew Krug Date: Fri, 24 May 2019 15:17:26 -0700 Subject: [PATCH 50/88] remove username --- cloudy_mozdef/cloudformation/mozdef-credential-leak.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/cloudy_mozdef/cloudformation/mozdef-credential-leak.yml b/cloudy_mozdef/cloudformation/mozdef-credential-leak.yml index 2eb8b7a3..81caeffc 100644 --- a/cloudy_mozdef/cloudformation/mozdef-credential-leak.yml +++ b/cloudy_mozdef/cloudformation/mozdef-credential-leak.yml @@ -8,7 +8,6 @@ Resources: IAMUser1: Type: AWS::IAM::User Properties: - UserName: !Join [ '', ['bob123', !Ref AWS::StackId ]] Path: / ManagedPolicyArns: - arn:aws:iam::aws:policy/AdministratorAccess From ad1322a0f1f2f51f4f890c530fe35073ca9c1489 Mon Sep 17 00:00:00 2001 From: Gene Wood Date: Fri, 24 May 2019 15:33:26 -0700 Subject: [PATCH 51/88] Switch SQS worker from SNS+SQS to just SQS --- cloudy_mozdef/cloudformation/mozdef-instance.yml | 5 ++--- cloudy_mozdef/packer/packer.json | 2 +- docker/compose/docker-compose-cloudy-mozdef.yml | 4 ++-- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/cloudy_mozdef/cloudformation/mozdef-instance.yml b/cloudy_mozdef/cloudformation/mozdef-instance.yml index a1bd0e9f..ead40c03 100644 --- a/cloudy_mozdef/cloudformation/mozdef-instance.yml +++ b/cloudy_mozdef/cloudformation/mozdef-instance.yml @@ -280,9 +280,8 @@ Resources: OPTIONS_TASKEXCHANGE=${CloudTrailSQSNotificationQueueName} path: /opt/mozdef/docker/compose/cloudy_mozdef_mq_cloudtrail.env - content: | - # This is the additional worker reserved for future use OPTIONS_TASKEXCHANGE=${MozDefSQSQueueName} - path: /opt/mozdef/docker/compose/cloudy_mozdef_mq_sns_sqs.env + path: /opt/mozdef/docker/compose/cloudy_mozdef_mq_sqs.env - content: | [Unit] Description=Docker Compose container starter @@ -312,7 +311,7 @@ Resources: - chmod --verbose 600 /opt/mozdef/docker/compose/rabbitmq.env - chmod --verbose 600 /opt/mozdef/docker/compose/cloudy_mozdef.env - chmod --verbose 600 /opt/mozdef/docker/compose/cloudy_mozdef_kibana.env - - chmod --verbose 600 /opt/mozdef/docker/compose/cloudy_mozdef_mq_sns_sqs.env + - chmod --verbose 600 /opt/mozdef/docker/compose/cloudy_mozdef_mq_sqs.env - mkdir --verbose --parents ${EFSMountPoint} - echo '*.* @@127.0.0.1:514' >> /etc/rsyslog.conf - systemctl enable rsyslog diff --git a/cloudy_mozdef/packer/packer.json b/cloudy_mozdef/packer/packer.json index 27f39dbe..b4771c72 100644 --- a/cloudy_mozdef/packer/packer.json +++ b/cloudy_mozdef/packer/packer.json @@ -59,7 +59,7 @@ "cd /opt/mozdef", "sudo git checkout {{ user `github_branch`}}", "sudo git rev-parse HEAD", - "sudo touch docker/compose/cloudy_mozdef.env docker/compose/rabbitmq.env docker/compose/cloudy_mozdef_mq_cloudtrail.env docker/compose/cloudy_mozdef_mq_sns_sqs.env docker/compose/cloudy_mozdef_kibana.env", + "sudo touch docker/compose/cloudy_mozdef.env docker/compose/rabbitmq.env docker/compose/cloudy_mozdef_mq_cloudtrail.env docker/compose/cloudy_mozdef_mq_sqs.env docker/compose/cloudy_mozdef_kibana.env", "sudo sed --in-place s/latest/{{ user `github_branch`}}/g docker/compose/docker-compose-cloudy-mozdef.yml", "sudo docker-compose --file docker/compose/docker-compose-cloudy-mozdef.yml --project-name mozdef pull", "sudo rm --recursive --force --verbose /tmp/* /home/ec2-user/.bash_history /root/.ssh /home/ec2-user/.ssh/known_hosts /home/ec2-user/.ssh/authorized_keys" diff --git a/docker/compose/docker-compose-cloudy-mozdef.yml b/docker/compose/docker-compose-cloudy-mozdef.yml index 319483d4..30f27a21 100644 --- a/docker/compose/docker-compose-cloudy-mozdef.yml +++ b/docker/compose/docker-compose-cloudy-mozdef.yml @@ -265,9 +265,9 @@ services: max-size: "10m" env_file: - cloudy_mozdef.env - - cloudy_mozdef_mq_sns_sqs.env + - cloudy_mozdef_mq_sqs.env restart: always - command: bash -c 'python esworker_sns_sqs.py -c esworker_sns_sqs.conf' + command: bash -c 'python esworker_sqs.py -c esworker_sqs.conf' scale: 1 depends_on: - base From 364cfcdb2cf2993e50e3927dd12a8fd2090281b9 Mon Sep 17 00:00:00 2001 From: Andrew Krug Date: Fri, 24 May 2019 15:40:15 -0700 Subject: [PATCH 52/88] fixup subnet list passed to es --- .../cloudformation/mozdef-parent-reinforce.yml | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/cloudy_mozdef/cloudformation/mozdef-parent-reinforce.yml b/cloudy_mozdef/cloudformation/mozdef-parent-reinforce.yml index df00dd66..80e1ca39 100644 --- a/cloudy_mozdef/cloudformation/mozdef-parent-reinforce.yml +++ b/cloudy_mozdef/cloudformation/mozdef-parent-reinforce.yml @@ -102,15 +102,6 @@ Parameters: Description: The arn of the sns topic to post a credential back to from the account. Do not use unless you are deploying this for reinforce workshop. This will attack the MozDef account. # A RegionMap of AMI IDs is required by AWS Marketplace https://docs.aws.amazon.com/marketplace/latest/userguide/cloudformation.html#aws-cloudformation-template-preparation # INSERT MAPPING HERE : This template does not work in this state. The mapping is replaced with a working AWS region to AMI ID mapping as well as a variable map with the S3TemplateLocationPrefix by cloudy_mozdef/ci/publish_versioned_templates. The resulting functioning CloudFormation template is uploaded to S3 for the version being built. -Mappings: - RegionMap: - us-west-2: - HVM64: ami-084efd8afd6e93bd1 - us-east-1: - HVM64: ami-0a265236e4027c00d - VariableMap: - Variables: - S3TemplateLocation: https://s3-us-west-2.amazonaws.com/public.us-west-2.infosec.mozilla.org/mozdef/cf/reinforce2019/ Conditions: LeakACredential: !Not [!Equals [!Ref LeakCredentialSNSArn, ""]] Resources: @@ -193,7 +184,10 @@ Resources: DependsOn: MozDefIAMRoleAndInstanceProfile Properties: Parameters: - SubnetIds: !Join [ ',', [!GetAtt MozDefVPC.Outputs.Subnet1, !GetAtt MozDefVPC.Outputs.Subnet2, !GetAtt MozDefVPC.Outputs.Subnet3 ]] + SubnetIds: + - !GetAtt MozDefVPC.Outputs.Subnet1 + - !GetAtt MozDefVPC.Outputs.Subnet2 + - !GetAtt MozDefVPC.Outputs.Subnet3 BlockStoreSizeGB: '100' VpcId: !GetAtt MozDefVPC.Outputs.VpcId MozDefInstanceSecurityGroup: !GetAtt MozDefSecurityGroups.Outputs.MozDefSecurityGroupId From 11122142e040eb3575e657bf0f355db584edc358 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 24 May 2019 19:58:03 -0500 Subject: [PATCH 53/88] Fix watchlist process_alert function --- alerts/get_watchlist.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/alerts/get_watchlist.py b/alerts/get_watchlist.py index e0139f93..cb0157b1 100644 --- a/alerts/get_watchlist.py +++ b/alerts/get_watchlist.py @@ -35,9 +35,9 @@ class AlertWatchList(AlertTask): else: logger.error('The watchlist request failed. Status {0}.\n'.format(r)) - def process_alert(self, term): + def process_alert(self): search_query = SearchQuery(minutes=20) - content = QueryStringMatch(str(term)) + content = QueryStringMatch(str(self.watchterm)) search_query.add_must(content) self.filtersManual(search_query) self.searchEventsSimple() From 691ee439b5eef9c85e537a58d214631267aaf134 Mon Sep 17 00:00:00 2001 From: Andrew Krug Date: Sat, 25 May 2019 18:42:40 -0700 Subject: [PATCH 54/88] fix es subnet mappings --- .../mozdef-parent-reinforce.yml | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/cloudy_mozdef/cloudformation/mozdef-parent-reinforce.yml b/cloudy_mozdef/cloudformation/mozdef-parent-reinforce.yml index 80e1ca39..31afbee7 100644 --- a/cloudy_mozdef/cloudformation/mozdef-parent-reinforce.yml +++ b/cloudy_mozdef/cloudformation/mozdef-parent-reinforce.yml @@ -102,6 +102,15 @@ Parameters: Description: The arn of the sns topic to post a credential back to from the account. Do not use unless you are deploying this for reinforce workshop. This will attack the MozDef account. # A RegionMap of AMI IDs is required by AWS Marketplace https://docs.aws.amazon.com/marketplace/latest/userguide/cloudformation.html#aws-cloudformation-template-preparation # INSERT MAPPING HERE : This template does not work in this state. The mapping is replaced with a working AWS region to AMI ID mapping as well as a variable map with the S3TemplateLocationPrefix by cloudy_mozdef/ci/publish_versioned_templates. The resulting functioning CloudFormation template is uploaded to S3 for the version being built. +Mappings: + RegionMap: + us-west-2: + HVM64: ami-084efd8afd6e93bd1 + us-east-1: + HVM64: ami-0a265236e4027c00d + VariableMap: + Variables: + S3TemplateLocation: https://s3-us-west-2.amazonaws.com/public.us-west-2.infosec.mozilla.org/mozdef/cf/reinforce2019/ Conditions: LeakACredential: !Not [!Equals [!Ref LeakCredentialSNSArn, ""]] Resources: @@ -184,10 +193,7 @@ Resources: DependsOn: MozDefIAMRoleAndInstanceProfile Properties: Parameters: - SubnetIds: - - !GetAtt MozDefVPC.Outputs.Subnet1 - - !GetAtt MozDefVPC.Outputs.Subnet2 - - !GetAtt MozDefVPC.Outputs.Subnet3 + SubnetIds: !Join [ ',', [!GetAtt MozDefVPC.Outputs.Subnet1, !GetAtt MozDefVPC.Outputs.Subnet2, !GetAtt MozDefVPC.Outputs.Subnet3 ]] BlockStoreSizeGB: '100' VpcId: !GetAtt MozDefVPC.Outputs.VpcId MozDefInstanceSecurityGroup: !GetAtt MozDefSecurityGroups.Outputs.MozDefSecurityGroupId @@ -296,7 +302,10 @@ Resources: NumberOfSubnets: Type: AWS::CloudFormation::CustomResource Properties: - Array: !Join [ ',', [!GetAtt MozDefVPC.Outputs.Subnet1, !GetAtt MozDefVPC.Outputs.Subnet2, !GetAtt MozDefVPC.Outputs.Subnet3 ]] + Array: + - !GetAtt MozDefVPC.Outputs.Subnet1 + - !GetAtt MozDefVPC.Outputs.Subnet2 + - !GetAtt MozDefVPC.Outputs.Subnet3 ServiceToken: !GetAtt GetArrayLengthLambdaFunction.Arn DoesRoleExistLambdaFunction: Type: AWS::Lambda::Function From b22b0dbc7490f84b760193a88d5d25943475d035 Mon Sep 17 00:00:00 2001 From: Andrew Krug Date: Sat, 25 May 2019 18:44:31 -0700 Subject: [PATCH 55/88] fix accidental addition of template mappings --- cloudy_mozdef/cloudformation/mozdef-parent-reinforce.yml | 9 --------- 1 file changed, 9 deletions(-) diff --git a/cloudy_mozdef/cloudformation/mozdef-parent-reinforce.yml b/cloudy_mozdef/cloudformation/mozdef-parent-reinforce.yml index 31afbee7..f63e6997 100644 --- a/cloudy_mozdef/cloudformation/mozdef-parent-reinforce.yml +++ b/cloudy_mozdef/cloudformation/mozdef-parent-reinforce.yml @@ -102,15 +102,6 @@ Parameters: Description: The arn of the sns topic to post a credential back to from the account. Do not use unless you are deploying this for reinforce workshop. This will attack the MozDef account. # A RegionMap of AMI IDs is required by AWS Marketplace https://docs.aws.amazon.com/marketplace/latest/userguide/cloudformation.html#aws-cloudformation-template-preparation # INSERT MAPPING HERE : This template does not work in this state. The mapping is replaced with a working AWS region to AMI ID mapping as well as a variable map with the S3TemplateLocationPrefix by cloudy_mozdef/ci/publish_versioned_templates. The resulting functioning CloudFormation template is uploaded to S3 for the version being built. -Mappings: - RegionMap: - us-west-2: - HVM64: ami-084efd8afd6e93bd1 - us-east-1: - HVM64: ami-0a265236e4027c00d - VariableMap: - Variables: - S3TemplateLocation: https://s3-us-west-2.amazonaws.com/public.us-west-2.infosec.mozilla.org/mozdef/cf/reinforce2019/ Conditions: LeakACredential: !Not [!Equals [!Ref LeakCredentialSNSArn, ""]] Resources: From 3d34efc797ebab5084abe7a65bae5997cbdc8abd Mon Sep 17 00:00:00 2001 From: Gene Wood Date: Sat, 25 May 2019 19:31:36 -0700 Subject: [PATCH 56/88] Set deploy script to executable --- cloudy_mozdef/ci/deploy | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 cloudy_mozdef/ci/deploy diff --git a/cloudy_mozdef/ci/deploy b/cloudy_mozdef/ci/deploy old mode 100644 new mode 100755 From 7dca241d42ca82a5af6d4a80e2b8c13c4aa270b4 Mon Sep 17 00:00:00 2001 From: Gene Wood Date: Sat, 25 May 2019 19:32:08 -0700 Subject: [PATCH 57/88] Add make target to set domain name to stack's ALB --- cloudy_mozdef/Makefile | 7 ++++++- cloudy_mozdef/ci/bind_domain_name | 33 +++++++++++++++++++++++++++++++ 2 files changed, 39 insertions(+), 1 deletion(-) create mode 100755 cloudy_mozdef/ci/bind_domain_name diff --git a/cloudy_mozdef/Makefile b/cloudy_mozdef/Makefile index abe22e2b..4eaa0eb2 100644 --- a/cloudy_mozdef/Makefile +++ b/cloudy_mozdef/Makefile @@ -7,7 +7,8 @@ AMI_MAP_TEMP_FILE := /tmp/mozdef-ami-map.txt DEV_STACK_PARAMS_FILENAME := aws_parameters.dev.json # For more information on the rationale behind the code in STACK_PARAMS see https://github.com/aws/aws-cli/issues/2429#issuecomment-441133480 DEV_STACK_PARAMS := $(shell test -e $(DEV_STACK_PARAMS_FILENAME) && python -c 'import json,sys;f=open(sys.argv[1]);print(" ".join([",".join(["%s=\\\"%s\\\""%(k,v) for k,v in x.items()]) for x in json.load(f)]));f.close()' $(DEV_STACK_PARAMS_FILENAME)) -OIDC_CLIENT_ID := $(shell test -e $(DEV_STACK_PARAMS_FILENAME) && python -c 'import json,sys;f=open(sys.argv[1]);print(next((x["ParameterValue"] for x in json.load(f) if x["ParameterKey"]=="OIDCClientId"),""));f.close()' $(DEV_STACK_PARAMS_FILENAME)) +OIDC_CLIENT_ID := $(shell test -e $(DEV_STACK_PARAMS_FILENAME) && python -c 'import json,sys;f=open(sys.argv[1]);print(next((x["ParameterValue"] for x in json.load(f) if x["ParameterKey"]=="OIDCClientId"),""));f.close()' $(DEV_STACK_PARAMS_FILENAME)) +DOMAIN_NAME := $(shell test -e $(DEV_STACK_PARAMS_FILENAME) && python -c 'import json,sys;f=open(sys.argv[1]);print(next((x["ParameterValue"] for x in json.load(f) if x["ParameterKey"]=="DomainName"),""));f.close()' $(DEV_STACK_PARAMS_FILENAME)) # MozDef uses a nested CF stack, the mozdef-parent.yml will tie all child stacks together and load them from S3 # See also mozdef.infosec.mozilla.org bucket S3_DEV_BUCKET_NAME := mozdef.infosec.allizom.org @@ -96,3 +97,7 @@ diff-dev-templates: .PHONY: diff-prod-templates diff-prod-templates: tempdir=`mktemp --directory`; aws s3 sync $(S3_PROD_BUCKET_URI) "$$tempdir" --exclude="*" --include="*.yml"; diff --recursive --unified "$$tempdir" cloudformation; rm -rf "$$tempdir" + +.PHONY: bind-domain-name +bind-domain-name: + ci/bind_domain_name "$(DOMAIN_NAME)" "$(STACK_NAME)" diff --git a/cloudy_mozdef/ci/bind_domain_name b/cloudy_mozdef/ci/bind_domain_name new file mode 100755 index 00000000..2d3f6bc0 --- /dev/null +++ b/cloudy_mozdef/ci/bind_domain_name @@ -0,0 +1,33 @@ +#!/bin/bash + +# Exit on any error +set -e + +DOMAIN_NAME="$1" +STACK_NAME="$2" +test -n "${DOMAIN_NAME}" -a -n "${STACK_NAME}" + +DOMAIN_NAME_ZONE="${DOMAIN_NAME#*.}." +ZONE_ID="$(aws route53 list-hosted-zones-by-name --dns-name ${DOMAIN_NAME_ZONE} --query "HostedZones[?Name == '${DOMAIN_NAME_ZONE}'].Id" --output text)" +INSTANCE_STACK_ARN="$(aws cloudformation describe-stack-resource --stack-name ${STACK_NAME} --logical-resource-id MozDefInstance --query 'StackResourceDetail.PhysicalResourceId' --output text)" +instance_stack_name_prefix="${INSTANCE_STACK_ARN##*:stack/}" +INSTANCE_STACK_NAME="${instance_stack_name_prefix%%/*}" +ELB_ARN="$(aws cloudformation describe-stack-resource --stack-name ${INSTANCE_STACK_NAME} --logical-resource-id MozDefElasticLoadBalancingV2LoadBalancer --query 'StackResourceDetail.PhysicalResourceId' --output text)" +#elb_name_prefix="${ELB_ARN##*:loadbalancer/app/}" +#ELB_NAME="${elb_name_prefix%%/*}" +ELB_DNS_NAME=$(aws elbv2 describe-load-balancers --load-balancer-arns ${ELB_ARN} --query 'LoadBalancers[0].DNSName' --output text) +ELB_HOSTED_ZONE_ID=$(aws elbv2 describe-load-balancers --load-balancer-arns ${ELB_ARN} --query 'LoadBalancers[0].CanonicalHostedZoneId' --output text) +CHANGE_BATCH=$(cat < Date: Mon, 27 May 2019 17:27:00 -0400 Subject: [PATCH 58/88] First take at implementing an alert plugin to enrich port scan alerts with info about recent connections --- alerts/plugins/port_scan_enrichment.json | 13 ++ alerts/plugins/port_scan_enrichment.py | 173 +++++++++++++++++++++++ 2 files changed, 186 insertions(+) create mode 100644 alerts/plugins/port_scan_enrichment.json create mode 100644 alerts/plugins/port_scan_enrichment.py diff --git a/alerts/plugins/port_scan_enrichment.json b/alerts/plugins/port_scan_enrichment.json new file mode 100644 index 00000000..b59f9220 --- /dev/null +++ b/alerts/plugins/port_scan_enrichment.json @@ -0,0 +1,13 @@ +{ + "elasticSearchAddress": "http://127.0.0.1:9200", + "indicesToSearch": [ + "events-*" + ], + "maxConnections": 32, + "matchTags": [ + "portscan" + ], + "searchWindow": { + "hours": 24 + } +} diff --git a/alerts/plugins/port_scan_enrichment.py b/alerts/plugins/port_scan_enrichment.py new file mode 100644 index 00000000..7313b6a0 --- /dev/null +++ b/alerts/plugins/port_scan_enrichment.py @@ -0,0 +1,173 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# Copyright (c) 2017 Mozilla Corporation + + +import os + +from mozdef_util.query_models import SearchQuery, TermMatch, PhraseMatch +from mozdef_util.elasticsearch_client import ElasticsearchClient + + +CONFIG_FILE = os.path.join( + os.path.dirname(__file__), + 'port_scan_enrichment.json') + +MISSING_REQUIRED_KEY_ERR_MSG = 'invalid configuration; '\ + 'missing key "elasticSearchAddress" must be a URL '\ + 'pointing to the ElasticSearch instance used by MozDef' + + +class message(object): + '''Alert plugin that handles messages (alerts) tagged as containing + information about a port scan having been detected. This plugin + will add information to such alerts describing any connections + successfully established by the IP address from which the port + scan originates. + + The expected format of the configuration file, + `port_scan_enrichment.json`, is as such: + + ```json + { + "elasticSearchAddress": "http://127.0.0.1:9200", + "indicesToSearch": [ + "events-*" + ], + "maxConnections": 32, + "matchTags": [ + "portscan" + ], + "searchWindow": { + "hours": 12, + "minutes": 30, + "seconds": 59 + } + } + ``` + + `elasticSearchAddress` must be provided and must be a URL pointing + to the ElasticSearch instance containing MozDef alerts. + `indicesToSearch` is an array of names of indices to search in ES. + If not provided or else an empty array, it defaults to `["events-*"]`. + `maxConnections` is the maximum number of successful + connections to list. If set to 0, all will be listed. + `matchTags` is a list of tags to match against. This plugin will + run against any alert containing any of the specified tags. If + `matchTags` is not provided or is an empty array, it will default + to `["portscan"]` + The `searchWindow` option is an object containing keyword + arguments to be passed to Python's `datetime.timedelta` function + and can thus contain any keys corresponding to the keyword + arguments that would be passed to the `datetime.datetime` function. + If `searchWindow` is not present or is an empty object, the + default search window is 24 hours. + + The modified alert will have a `details.recentconnections` field + appended to it, formatted like so: + + ```json + { + "details": { + "recentconnections": [ + { + "destinationipaddress": "1.2.3.4", + "destinationport": 80, + "timestamp": "May 27th 2019, 20:29:27.516" + } + ] + } + } + ``` + + That is, each connection will be described in an array and be an + object containing the IP address and port over which the connection + was established and the time the connection was made. + ''' + + def __init__(self): + config = _load_config(CONFIG_FILE) + + try: + self.es_address = config['elasticSearchAddress'] + except KeyError: + raise KeyError(MISSING_REQUIRED_KEY_ERR_MSG) + + self.search_indices = config.get('searchIndices', []) + self.max_connections = config.get('maxConnections', 0) + self.match_tags = config.get('matchTags', ['portscan']) + self.search_window = config.get('searchWindow', {}) + + if len(self.search_indices) == 0: + self.search_indices = ['alerts'] + + if self.max_connections == 0: + self.max_connections = None + + if len(self.search_window) == 0: + self.search_window = { 'hours': 24 } + + + def onMessage(self, message): + alert_tags = message.get('tags', []) + + should_enrich = any([ + tag in alert_tags + for tag in self.match_tags + ]) + + if should_enrich: + return enrich( + message, + self.search_window, + self.max_connections, + self.search_indices) + + return message + + +def take(ls, n_items=None): + '''Take only N items from a list.''' + + if n_items is None: + return ls + + return ls[:n_items] + + +def enrich(alert, search_window, max_connections, indices): + '''Enrich an alert with information about recent connections made by + the 'details.sourceipaddress'. + ''' + + search_query = SearchQuery(**search_window) + + search_query.add_must([ + TermMatch('category', 'bro'), + TermMatch('source', 'conn'), + PhraseMatch( + 'details.sourceipaddress', + alert['details']['sourceipaddress']) + ]) + + es_client = ElasticsearchClient(self.es_address) + + results = search_query.execute(es_client, indices=indices) + + events = [ + hit.get('_source', {}) + for hit in results.get('hits', []) + ] + + alert = alert.copy() + alert['details']['recentconnections'] = [] + + for event in take(events, max_connections): + alert['details']['recentconnections'].append({ + 'destinationipaddress': event['details']['destinationipaddress'], + 'destinationport': event['details']['destinationport'], + 'timestamp': event['timestamp'] + }) + + return alert From 427707986827846c9f3b45b876a7b5b4d792e4fe Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Mon, 27 May 2019 18:36:52 -0400 Subject: [PATCH 59/88] Implement the _load_config function --- alerts/plugins/port_scan_enrichment.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/alerts/plugins/port_scan_enrichment.py b/alerts/plugins/port_scan_enrichment.py index 7313b6a0..6879a3dc 100644 --- a/alerts/plugins/port_scan_enrichment.py +++ b/alerts/plugins/port_scan_enrichment.py @@ -4,6 +4,7 @@ # Copyright (c) 2017 Mozilla Corporation +import json import os from mozdef_util.query_models import SearchQuery, TermMatch, PhraseMatch @@ -127,6 +128,16 @@ class message(object): return message +def _load_config(file_path): + '''Private + + Load the alert plugin configuration from a file. + ''' + + with open(file_path) as config_file: + return json.load(config_file) + + def take(ls, n_items=None): '''Take only N items from a list.''' From 01de6d091165fda863f352e9ee3d90d8ae304938 Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Mon, 27 May 2019 18:37:33 -0400 Subject: [PATCH 60/88] Abstract the ElasticSearchClient interface away to facilitate dependency injection in the enrich function --- alerts/plugins/port_scan_enrichment.py | 32 +++++++++++++++++++++----- 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/alerts/plugins/port_scan_enrichment.py b/alerts/plugins/port_scan_enrichment.py index 6879a3dc..a799c49d 100644 --- a/alerts/plugins/port_scan_enrichment.py +++ b/alerts/plugins/port_scan_enrichment.py @@ -19,7 +19,6 @@ MISSING_REQUIRED_KEY_ERR_MSG = 'invalid configuration; '\ 'missing key "elasticSearchAddress" must be a URL '\ 'pointing to the ElasticSearch instance used by MozDef' - class message(object): '''Alert plugin that handles messages (alerts) tagged as containing information about a port scan having been detected. This plugin @@ -91,10 +90,12 @@ class message(object): config = _load_config(CONFIG_FILE) try: - self.es_address = config['elasticSearchAddress'] + es_address = config['elasticSearchAddress'] except KeyError: raise KeyError(MISSING_REQUIRED_KEY_ERR_MSG) + es_client = ElasticsearchClient(es_address) + self.search_indices = config.get('searchIndices', []) self.max_connections = config.get('maxConnections', 0) self.match_tags = config.get('matchTags', ['portscan']) @@ -109,6 +110,15 @@ class message(object): if len(self.search_window) == 0: self.search_window = { 'hours': 24 } + # Store our ES client in a closure bound to the plugin object. + # The intent behind this approach is to make the interface to + # the `enrich` function require dependency injection for testing. + def search_fn(query): + indices = indices if indices is not None else [] + return query.execute(es_client, indices=self.search_indices) + + self.search = search_fn + def onMessage(self, message): alert_tags = message.get('tags', []) @@ -121,6 +131,7 @@ class message(object): if should_enrich: return enrich( message, + self.search, self.search_window, self.max_connections, self.search_indices) @@ -147,9 +158,20 @@ def take(ls, n_items=None): return ls[:n_items] -def enrich(alert, search_window, max_connections, indices): +def enrich(alert, search_fn, search_window, max_connections): '''Enrich an alert with information about recent connections made by the 'details.sourceipaddress'. + + `search_fn` is expected to be a function that accepts a single argument, + a `SearchQuery` object, and returns a list of results from Elastic Search. + + `search_window` is expected to be a dictionary specifying the amount of + time into the past to query for events. + + `max_connections` is expected to be the maximum number of connections to + list in the modified alert or else `None` if no limit should be applied. + + Returns a modified alert based on a copy of the original. ''' search_query = SearchQuery(**search_window) @@ -162,9 +184,7 @@ def enrich(alert, search_window, max_connections, indices): alert['details']['sourceipaddress']) ]) - es_client = ElasticsearchClient(self.es_address) - - results = search_query.execute(es_client, indices=indices) + results = search_fn(search_query) events = [ hit.get('_source', {}) From e26a5a62a83b5df6547dc108a81a5785dd8ba1dd Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Mon, 27 May 2019 18:50:04 -0400 Subject: [PATCH 61/88] Wrote a simple test to ensure the enrich function behaves roughly as expected --- .../plugins/test_port_scan_enrichment.py | 67 +++++++++++++++++++ 1 file changed, 67 insertions(+) create mode 100644 tests/alerts/plugins/test_port_scan_enrichment.py diff --git a/tests/alerts/plugins/test_port_scan_enrichment.py b/tests/alerts/plugins/test_port_scan_enrichment.py new file mode 100644 index 00000000..b3ac5c3f --- /dev/null +++ b/tests/alerts/plugins/test_port_scan_enrichment.py @@ -0,0 +1,67 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# Copyright (c) 2017 Mozilla Corporation + +import sys + +plugin_path = os.path.join(os.path.dirname(__file__), '../../../alerts/plugins') +sys.path.append(plugin_path) + +from port_scan_enrichment import enrich + + +def mock_search_fn(results): + def search_fn(_query): + return results + + return search_fn + + +class TestPortScanEnrichment(object): + def test_alert_enriched(self): + results = { + 'hits': [ + { + '_source': { + 'details': { + 'destinationipaddress': '1.2.3.4', + 'destinationport': 80 + }, + 'timestamp': '30 minutes ago' + } + }, + { + '_source': { + 'details': { + 'destinationipaddress': '4.3.2.1', + 'destinationport': 443 + }, + 'timestamp': 'an hour ago' + } + } + ] + } + + alert = { + 'details': { + 'sourceipaddress': '127.0.0.1' + } + } + + search_window = { + 'hours': 1 + } + + max_conns = 1 + + enriched = enrich( + alert, + mock_search_fn(results), + search_window, + max_conns) + + assert len(enriched['details']['recentconnections']) == 1 + assert enriched['details']['recentconnections'][0]['destinationipaddress'] in ['1.2.3.4', '4.3.2.1'] + assert enriched['details']['recentconnections'][0]['destinationport'] in [80, 443] + assert enriched['details']['recentconnections'][0]['timestamp'] in ['30 minutes ago'm 'an hour ago'] From bd80492c4d28d30b0fd5635f43254293d6de8cc6 Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Mon, 27 May 2019 19:05:10 -0400 Subject: [PATCH 62/88] Syntax and formatting fixups --- alerts/plugins/port_scan_enrichment.py | 18 ++++++++---------- .../plugins/test_port_scan_enrichment.py | 3 ++- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/alerts/plugins/port_scan_enrichment.py b/alerts/plugins/port_scan_enrichment.py index a799c49d..9c950635 100644 --- a/alerts/plugins/port_scan_enrichment.py +++ b/alerts/plugins/port_scan_enrichment.py @@ -19,6 +19,7 @@ MISSING_REQUIRED_KEY_ERR_MSG = 'invalid configuration; '\ 'missing key "elasticSearchAddress" must be a URL '\ 'pointing to the ElasticSearch instance used by MozDef' + class message(object): '''Alert plugin that handles messages (alerts) tagged as containing information about a port scan having been detected. This plugin @@ -96,30 +97,28 @@ class message(object): es_client = ElasticsearchClient(es_address) - self.search_indices = config.get('searchIndices', []) + search_indices = config.get('searchIndices', []) self.max_connections = config.get('maxConnections', 0) self.match_tags = config.get('matchTags', ['portscan']) self.search_window = config.get('searchWindow', {}) - if len(self.search_indices) == 0: - self.search_indices = ['alerts'] + if len(search_indices) == 0: + search_indices = ['alerts'] if self.max_connections == 0: self.max_connections = None if len(self.search_window) == 0: - self.search_window = { 'hours': 24 } + self.search_window = {'hours': 24} # Store our ES client in a closure bound to the plugin object. # The intent behind this approach is to make the interface to # the `enrich` function require dependency injection for testing. def search_fn(query): - indices = indices if indices is not None else [] - return query.execute(es_client, indices=self.search_indices) + return query.execute(es_client, indices=search_indices) self.search = search_fn - def onMessage(self, message): alert_tags = message.get('tags', []) @@ -133,9 +132,8 @@ class message(object): message, self.search, self.search_window, - self.max_connections, - self.search_indices) - + self.max_connections) + return message diff --git a/tests/alerts/plugins/test_port_scan_enrichment.py b/tests/alerts/plugins/test_port_scan_enrichment.py index b3ac5c3f..2fbf6286 100644 --- a/tests/alerts/plugins/test_port_scan_enrichment.py +++ b/tests/alerts/plugins/test_port_scan_enrichment.py @@ -3,6 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation +import os import sys plugin_path = os.path.join(os.path.dirname(__file__), '../../../alerts/plugins') @@ -64,4 +65,4 @@ class TestPortScanEnrichment(object): assert len(enriched['details']['recentconnections']) == 1 assert enriched['details']['recentconnections'][0]['destinationipaddress'] in ['1.2.3.4', '4.3.2.1'] assert enriched['details']['recentconnections'][0]['destinationport'] in [80, 443] - assert enriched['details']['recentconnections'][0]['timestamp'] in ['30 minutes ago'm 'an hour ago'] + assert enriched['details']['recentconnections'][0]['timestamp'] in ['30 minutes ago', 'an hour ago'] From cc9d76e5767ee352b86c8114e9587b097e713e00 Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Mon, 27 May 2019 19:11:32 -0400 Subject: [PATCH 63/88] Handle the default values for the matchTags configuration option --- alerts/plugins/port_scan_enrichment.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/alerts/plugins/port_scan_enrichment.py b/alerts/plugins/port_scan_enrichment.py index 9c950635..f537e688 100644 --- a/alerts/plugins/port_scan_enrichment.py +++ b/alerts/plugins/port_scan_enrichment.py @@ -98,8 +98,9 @@ class message(object): es_client = ElasticsearchClient(es_address) search_indices = config.get('searchIndices', []) + self.max_connections = config.get('maxConnections', 0) - self.match_tags = config.get('matchTags', ['portscan']) + self.match_tags = config.get('matchTags', []) self.search_window = config.get('searchWindow', {}) if len(search_indices) == 0: @@ -108,6 +109,9 @@ class message(object): if self.max_connections == 0: self.max_connections = None + if len(self.match_tags) == 0: + self.match_tags = ['portscan'] + if len(self.search_window) == 0: self.search_window = {'hours': 24} From b258d7cf5e4a61b0a60424acdab94b225bd4ed2b Mon Sep 17 00:00:00 2001 From: Phrozyn Date: Mon, 27 May 2019 19:56:09 -0500 Subject: [PATCH 64/88] Improving modal window text color, and button text color. --- meteor/imports/themes/classic/mozdef.css | 1 - meteor/imports/themes/dark/mozdef.css | 14 +++++++++++--- meteor/imports/themes/side_nav_dark/mozdef.css | 8 ++++++++ 3 files changed, 19 insertions(+), 4 deletions(-) diff --git a/meteor/imports/themes/classic/mozdef.css b/meteor/imports/themes/classic/mozdef.css index 656ac882..f2ae6237 100644 --- a/meteor/imports/themes/classic/mozdef.css +++ b/meteor/imports/themes/classic/mozdef.css @@ -262,7 +262,6 @@ h1, h2, h3, h4, h5, h6, .h1, .h2, .h3, .h4, .h5, .h6 { .btn { border: 1px outset; border-radius: 4px; - color: #999; } diff --git a/meteor/imports/themes/dark/mozdef.css b/meteor/imports/themes/dark/mozdef.css index b148e14e..4a45c6da 100644 --- a/meteor/imports/themes/dark/mozdef.css +++ b/meteor/imports/themes/dark/mozdef.css @@ -227,9 +227,17 @@ caption, legend { color: white; } -.modal-body .row { - color: black; -} +.modal-header { + color: var(--font-focus); + } + + .modal-body { + color: var(--font-focus); + } + + .modal-body .row { + color: black; + } /*bootstrap overrides*/ .btn { diff --git a/meteor/imports/themes/side_nav_dark/mozdef.css b/meteor/imports/themes/side_nav_dark/mozdef.css index 39c48f27..735fd8a8 100644 --- a/meteor/imports/themes/side_nav_dark/mozdef.css +++ b/meteor/imports/themes/side_nav_dark/mozdef.css @@ -232,6 +232,14 @@ caption, legend { color: white; } +.modal-header { + color: var(--font-focus); +} + +.modal-body { + color: var(--font-focus); +} + .modal-body .row { color: black; } From 678feda639b40621ba36e7467f01502e96894898 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Tue, 28 May 2019 09:41:57 -0500 Subject: [PATCH 65/88] Fix side nav dark relative kibana url --- meteor/client/themes/side_nav_dark/menu.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meteor/client/themes/side_nav_dark/menu.html b/meteor/client/themes/side_nav_dark/menu.html index f7c75f1d..c7587093 100644 --- a/meteor/client/themes/side_nav_dark/menu.html +++ b/meteor/client/themes/side_nav_dark/menu.html @@ -31,7 +31,7 @@ Copyright (c) 2014 Mozilla Corporation {{#if isFeature "kibana"}}
  • - + From 6d39acbfbaa3f8592130f41d8a9a3cd3c1947931 Mon Sep 17 00:00:00 2001 From: Andrew Krug Date: Tue, 28 May 2019 10:31:25 -0700 Subject: [PATCH 66/88] address PR nits --- alerts/lib/config.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/alerts/lib/config.py b/alerts/lib/config.py index b99be01e..0a06b269 100644 --- a/alerts/lib/config.py +++ b/alerts/lib/config.py @@ -32,14 +32,14 @@ RABBITMQ = { 'alertqueue': 'mozdef.alert' } +es_server = "http://localhost:9200" + if os.getenv('OPTIONS_ESSERVERS'): - ES = { - 'servers': [os.getenv('OPTIONS_ESSERVERS')] - } -else: - ES = { - 'servers': ['http://localhost:9200'] - } + es_server = os.getenv('OPTIONS_ESSERVERS') + +ES = { + 'servers': [es_server] +} LOGGING = { 'version': 1, From 2d48be6cb92ada54f919ba695640a3b879d3b094 Mon Sep 17 00:00:00 2001 From: Jon Moroney Date: Mon, 6 May 2019 10:57:42 -0700 Subject: [PATCH 67/88] Add json array input decode with try catch to fall back to string splitting --- loginput/index.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/loginput/index.py b/loginput/index.py index c3cf7005..387dc78f 100644 --- a/loginput/index.py +++ b/loginput/index.py @@ -41,9 +41,21 @@ def bulkindex(): bulkpost=request.body.read() # bottlelog('request:{0}\n'.format(bulkpost)) request.body.close() - if len(bulkpost)>10: # TODO Check for bulk format. - # iterate on messages and post to event message queue + try: # Handles json array bulk format [{},{},...] + messages = json.loads(bulkpost) + for event in messages: + # don't post the items telling us where to post things.. + if 'index' not in event: + ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10) + ensurePublish(event,exchange=eventTaskExchange,routing_key=options.taskexchange) + return + except ValueError as e: + bottlelog('Decoded raw input failed with {0}'.format(e)) + pass + if len(bulkpost)>10: # Handles single element format {} + # TODO Check for other bulk formats. + # iterate on messages and post to event message queue eventlist=[] for i in bulkpost.splitlines(): eventlist.append(i) From e7da241f0a20cf832344172ef839853d4d7dbf07 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Wed, 29 May 2019 12:45:14 -0500 Subject: [PATCH 68/88] Update changelog for 1.39.0 --- CHANGELOG | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index ecef385f..3224fa14 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -5,6 +5,22 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ## [Unreleased] +## [v1.39.0] - 2019-05-29 +### Added +- Pagination of Web UI tables +- Added support for SQS in replacement of Rabbitmq for alerts +- Support for no_auth for watchlist +- Cron script for closing indexes +- Documentation on AlertActions + +### Changed +- Removed dependency on '_type' field in Elasticsearch + +### Fixed +- Slackbot reconnects successfully during network errors +- Relative Kibana URLs now work correctly with protocol + + ## [v1.38.5] - 2019-04-09 ### Added - Support for CSS themes @@ -76,7 +92,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Added checks on sending SQS messages to only accept intra-account messages - Improved docker performance and disk space requirements -[Unreleased]: https://github.com/mozilla/MozDef/compare/v1.38.5...HEAD +[Unreleased]: https://github.com/mozilla/MozDef/compare/v1.39.0...HEAD +[v1.39.0]: https://github.com/mozilla/MozDef/compare/v1.38.5...v1.39.0 [v1.38.5]: https://github.com/mozilla/MozDef/compare/v1.38.4...v1.38.5 [v1.38.4]: https://github.com/mozilla/MozDef/compare/v1.38.3...v1.38.4 [v1.38.3]: https://github.com/mozilla/MozDef/compare/v1.38.2...v1.38.3 From d4e7a946886498f654fbe915fee679875a766976 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Wed, 29 May 2019 15:09:44 -0500 Subject: [PATCH 69/88] Revert "Use wildcard in indices for searching" This reverts commit 3e93e5ea51efc3d0e272509ab719878ea4c1d0c8. --- alerts/lib/alerttask.py | 5 +---- cron/syncAlertsToMongo.py | 2 +- mozdef_util/mozdef_util/query_models/search_query.py | 2 +- rest/plugins/logincounts.py | 5 +---- 4 files changed, 4 insertions(+), 10 deletions(-) diff --git a/alerts/lib/alerttask.py b/alerts/lib/alerttask.py index 0940379b..1313228e 100644 --- a/alerts/lib/alerttask.py +++ b/alerts/lib/alerttask.py @@ -106,10 +106,7 @@ class AlertTask(Task): self._configureKombu() self._configureES() - # We want to select all event indices - # and filter out the window based on timestamp - # from the search query - self.event_indices = ["events-*"] + self.event_indices = ['events', 'events-previous'] def classname(self): return self.__class__.__name__ diff --git a/cron/syncAlertsToMongo.py b/cron/syncAlertsToMongo.py index c41779e2..22e70faa 100755 --- a/cron/syncAlertsToMongo.py +++ b/cron/syncAlertsToMongo.py @@ -26,7 +26,7 @@ def getESAlerts(es): # We use an ExistsMatch here just to satisfy the # requirements of a search query must have some "Matchers" search_query.add_must(ExistsMatch('summary')) - results = search_query.execute(es, indices=['alerts-*'], size=10000) + results = search_query.execute(es, indices=['alerts'], size=10000) return results diff --git a/mozdef_util/mozdef_util/query_models/search_query.py b/mozdef_util/mozdef_util/query_models/search_query.py index ba9df955..4bfc4ee8 100644 --- a/mozdef_util/mozdef_util/query_models/search_query.py +++ b/mozdef_util/mozdef_util/query_models/search_query.py @@ -46,7 +46,7 @@ class SearchQuery(object): def add_aggregation(self, input_obj): self.append_to_array(self.aggregation, input_obj) - def execute(self, elasticsearch_client, indices=['events-*'], size=1000, request_timeout=30): + def execute(self, elasticsearch_client, indices=['events', 'events-previous'], size=1000, request_timeout=30): if self.must == [] and self.must_not == [] and self.should == [] and self.aggregation == []: raise AttributeError('Must define a must, must_not, should query, or aggregation') diff --git a/rest/plugins/logincounts.py b/rest/plugins/logincounts.py index 0cc7d958..62be468e 100644 --- a/rest/plugins/logincounts.py +++ b/rest/plugins/logincounts.py @@ -80,10 +80,7 @@ class message(object): search_query.add_aggregation(Aggregation('details.success')) search_query.add_aggregation(Aggregation('details.username')) - # We want to select all event indices - # and filter out the window based on timestamp - # from the search query - results = search_query.execute(es_client, indices=['events-*']) + results = search_query.execute(es_client, indices=['events','events-previous']) # any usernames or words to ignore # especially useful if ES is analyzing the username field and breaking apart user@somewhere.com From e04e7a7fbd7d8f68b7db06e9e51a7af099f72c76 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Wed, 29 May 2019 15:10:23 -0500 Subject: [PATCH 70/88] Override event indices in generic deadman alert --- alerts/deadman_generic.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/alerts/deadman_generic.py b/alerts/deadman_generic.py index 8c26cd02..279472ac 100644 --- a/alerts/deadman_generic.py +++ b/alerts/deadman_generic.py @@ -14,6 +14,10 @@ from mozdef_util.utilities.logger import logger class AlertDeadmanGeneric(DeadmanAlertTask): def main(self): + # We override the event indices to search for + # because our deadman alerts might look past 48 hours + self.event_indices = ["events-*"] + self._config = self.parse_json_alert_config('deadman_generic.json') for alert_cfg in self._config['alerts']: try: From 749979280bcfc2fa6799c2d6a79a56ffc0a77b6e Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Wed, 29 May 2019 16:29:03 -0400 Subject: [PATCH 71/88] Add missing json import... again? --- alerts/plugins/ip_source_enrichment.py | 1 + 1 file changed, 1 insertion(+) diff --git a/alerts/plugins/ip_source_enrichment.py b/alerts/plugins/ip_source_enrichment.py index 5c4fc351..7a44753a 100644 --- a/alerts/plugins/ip_source_enrichment.py +++ b/alerts/plugins/ip_source_enrichment.py @@ -3,6 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2014 Mozilla Corporation +import json from operator import add import os import re From bd3d2ba510d91e7bebdb068341c38124e68e5b4f Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Wed, 29 May 2019 18:31:48 -0400 Subject: [PATCH 72/88] Default to searching the events-weekly index since this is more appropriate in most cases --- alerts/plugins/port_scan_enrichment.json | 2 +- alerts/plugins/port_scan_enrichment.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/alerts/plugins/port_scan_enrichment.json b/alerts/plugins/port_scan_enrichment.json index b59f9220..a5dc3fe4 100644 --- a/alerts/plugins/port_scan_enrichment.json +++ b/alerts/plugins/port_scan_enrichment.json @@ -1,7 +1,7 @@ { "elasticSearchAddress": "http://127.0.0.1:9200", "indicesToSearch": [ - "events-*" + "events-weekly" ], "maxConnections": 32, "matchTags": [ diff --git a/alerts/plugins/port_scan_enrichment.py b/alerts/plugins/port_scan_enrichment.py index f537e688..71e35715 100644 --- a/alerts/plugins/port_scan_enrichment.py +++ b/alerts/plugins/port_scan_enrichment.py @@ -34,7 +34,7 @@ class message(object): { "elasticSearchAddress": "http://127.0.0.1:9200", "indicesToSearch": [ - "events-*" + "events-weekly" ], "maxConnections": 32, "matchTags": [ @@ -51,7 +51,7 @@ class message(object): `elasticSearchAddress` must be provided and must be a URL pointing to the ElasticSearch instance containing MozDef alerts. `indicesToSearch` is an array of names of indices to search in ES. - If not provided or else an empty array, it defaults to `["events-*"]`. + If not provided or else an empty array, it defaults to `["events-weekly"]`. `maxConnections` is the maximum number of successful connections to list. If set to 0, all will be listed. `matchTags` is a list of tags to match against. This plugin will From d46c6d01c923994d3267d75abd1bd7fb7639aabd Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Thu, 30 May 2019 10:07:56 -0500 Subject: [PATCH 73/88] Update deadman generic alert to use events-weekly as index --- alerts/deadman_generic.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/alerts/deadman_generic.py b/alerts/deadman_generic.py index 279472ac..0510d69c 100644 --- a/alerts/deadman_generic.py +++ b/alerts/deadman_generic.py @@ -16,7 +16,7 @@ class AlertDeadmanGeneric(DeadmanAlertTask): def main(self): # We override the event indices to search for # because our deadman alerts might look past 48 hours - self.event_indices = ["events-*"] + self.event_indices = ["events-weekly"] self._config = self.parse_json_alert_config('deadman_generic.json') for alert_cfg in self._config['alerts']: From e3c4ae124823c1af9fc9fb5a0354e01215325cab Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Thu, 30 May 2019 10:22:11 -0500 Subject: [PATCH 74/88] Add events-weekly in test suite --- tests/unit_test_suite.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/unit_test_suite.py b/tests/unit_test_suite.py index 14fd1e94..4d6b474c 100644 --- a/tests/unit_test_suite.py +++ b/tests/unit_test_suite.py @@ -60,6 +60,7 @@ class UnitTestSuite(object): self.es_client.create_alias('events', self.event_index_name) self.es_client.create_index(self.previous_event_index_name, index_config=self.mapping_options) self.es_client.create_alias('events-previous', self.previous_event_index_name) + self.es_client.create_alias_multiple_indices('events-weekly', ['events', 'events-previous']) self.es_client.create_index(self.alert_index_name, index_config=self.mapping_options) self.es_client.create_alias('alerts', self.alert_index_name) From abd2df15d675cc3d27e21b5f68661b0a2ab8f407 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Thu, 30 May 2019 17:19:41 -0500 Subject: [PATCH 75/88] Fix jquery highlights in web ui --- meteor/client/js/jquery.highlight.js | 108 --------------------------- meteor/client/mozdef.js | 31 ++++---- 2 files changed, 15 insertions(+), 124 deletions(-) delete mode 100644 meteor/client/js/jquery.highlight.js diff --git a/meteor/client/js/jquery.highlight.js b/meteor/client/js/jquery.highlight.js deleted file mode 100644 index 9dcf3c7a..00000000 --- a/meteor/client/js/jquery.highlight.js +++ /dev/null @@ -1,108 +0,0 @@ -/* - * jQuery Highlight plugin - * - * Based on highlight v3 by Johann Burkard - * http://johannburkard.de/blog/programming/javascript/highlight-javascript-text-higlighting-jquery-plugin.html - * - * Code a little bit refactored and cleaned (in my humble opinion). - * Most important changes: - * - has an option to highlight only entire words (wordsOnly - false by default), - * - has an option to be case sensitive (caseSensitive - false by default) - * - highlight element tag and class names can be specified in options - * - * Usage: - * // wrap every occurrance of text 'lorem' in content - * // with (default options) - * $('#content').highlight('lorem'); - * - * // search for and highlight more terms at once - * // so you can save some time on traversing DOM - * $('#content').highlight(['lorem', 'ipsum']); - * $('#content').highlight('lorem ipsum'); - * - * // search only for entire word 'lorem' - * $('#content').highlight('lorem', { wordsOnly: true }); - * - * // don't ignore case during search of term 'lorem' - * $('#content').highlight('lorem', { caseSensitive: true }); - * - * // wrap every occurrance of term 'ipsum' in content - * // with - * $('#content').highlight('ipsum', { element: 'em', className: 'important' }); - * - * // remove default highlight - * $('#content').unhighlight(); - * - * // remove custom highlight - * $('#content').unhighlight({ element: 'em', className: 'important' }); - * - * - * Copyright (c) 2009 Bartek Szopka - * - * Licensed under MIT license. - * - */ - -jQuery.extend({ - highlight: function (node, re, nodeName, className) { - if (node.nodeType === 3) { - var match = node.data.match(re); - if (match) { - var highlight = document.createElement(nodeName || 'span'); - highlight.className = className || 'highlight'; - var wordNode = node.splitText(match.index); - wordNode.splitText(match[0].length); - var wordClone = wordNode.cloneNode(true); - highlight.appendChild(wordClone); - wordNode.parentNode.replaceChild(highlight, wordNode); - return 1; //skip added node in parent - } - } else if ((node.nodeType === 1 && node.childNodes) && // only element nodes that have children - !/(script|style)/i.test(node.tagName) && // ignore script and style nodes - !(node.tagName === nodeName.toUpperCase() && node.className === className)) { // skip if already highlighted - for (var i = 0; i < node.childNodes.length; i++) { - i += jQuery.highlight(node.childNodes[i], re, nodeName, className); - } - } - return 0; - } -}); - -jQuery.fn.unhighlight = function (options) { - var settings = { className: 'highlight', element: 'span' }; - jQuery.extend(settings, options); - - return this.find(settings.element + "." + settings.className).each(function () { - var parent = this.parentNode; - parent.replaceChild(this.firstChild, this); - parent.normalize(); - }).end(); -}; - -jQuery.fn.highlight = function (words, options) { - var settings = { className: 'highlight', element: 'span', caseSensitive: false, wordsOnly: false }; - jQuery.extend(settings, options); - - if (words.constructor === String) { - words = [words]; - } - words = jQuery.grep(words, function(word, i){ - return word != ''; - }); - words = jQuery.map(words, function(word, i) { - return word.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"); - }); - if (words.length == 0) { return this; }; - - var flag = settings.caseSensitive ? "" : "i"; - var pattern = "(" + words.join("|") + ")"; - if (settings.wordsOnly) { - pattern = "\\b" + pattern + "\\b"; - } - var re = new RegExp(pattern, flag); - - return this.each(function () { - jQuery.highlight(this, re, settings.element, settings.className); - }); -}; - diff --git a/meteor/client/mozdef.js b/meteor/client/mozdef.js index e5257c07..53e9f79e 100644 --- a/meteor/client/mozdef.js +++ b/meteor/client/mozdef.js @@ -269,6 +269,19 @@ if ( Meteor.isClient ) { return pluginsForEndPoint( endpoint ); } ); + jQuery.fn.highlight = function (str, className) { + var regex = new RegExp(str, "gi"); + return this.each(function () { + $(this).contents().filter(function() { + return this.nodeType == 3 && regex.test(this.nodeValue); + }).replaceWith(function() { + return (this.nodeValue || "").replace(regex, function(match) { + return "" + match + ""; + }); + }); + }); + }; + UI.registerHelper( 'ipDecorate', function( elementText ) { //decorate text containing an ipv4 address var anelement = $( $.parseHTML( '' + elementText + '' ) ) @@ -277,23 +290,9 @@ if ( Meteor.isClient ) { //clean up potential interference chars w = w.replace( /,|:|;|\[|\]/g, '' ) if ( isIPv4( w ) ) { - //console.log(w); - anelement. - highlight( w, - { - wordsOnly: false, - element: "em", - className: "ipaddress" - } ); + anelement.highlight(w, 'ipaddress'); } else if ( isHostname( w ) ) { - //console.log(w); - anelement. - highlight( w, - { - wordsOnly: false, - element: "em", - className: "hostname" - } ); + anelement.highlight(w, 'hostname'); } } ); //add a drop down menu to any .ipaddress From d5b86e099efdeff1c62e1a11cf8eb0623d0d465c Mon Sep 17 00:00:00 2001 From: Phrozyn Date: Fri, 31 May 2019 14:39:24 -0500 Subject: [PATCH 76/88] Adjusting acked colors. --- meteor/imports/themes/classic/mozdef.css | 29 ++++++++++-- meteor/imports/themes/dark/mozdef.css | 46 +++++++++++++------ meteor/imports/themes/light/mozdef.css | 28 +++++++++-- .../imports/themes/side_nav_dark/mozdef.css | 12 ++++- 4 files changed, 92 insertions(+), 23 deletions(-) diff --git a/meteor/imports/themes/classic/mozdef.css b/meteor/imports/themes/classic/mozdef.css index f2ae6237..315528f3 100644 --- a/meteor/imports/themes/classic/mozdef.css +++ b/meteor/imports/themes/classic/mozdef.css @@ -13,6 +13,9 @@ Copyright (c) 2014 Mozilla Corporation --txt-secondary-color: #000; --txt-shadow-color: #000; --txt-highlight-color: rgba(165, 170, 172, 0.904); + --arm-color: #d1b61e; + --arm-focus-color: #e7c714a9; + --txt-disabled-color: #576d54; --a-link-color: rgb(245, 222, 179); --row-color-odd: rgba(30,87,153,.7); --row-color-even: #636c85; @@ -260,9 +263,29 @@ h1, h2, h3, h4, h5, h6, .h1, .h2, .h3, .h4, .h5, .h6 { } .btn { - border: 1px outset; - border-radius: 4px; -} + border: 1px outset; + border-radius: 4px; + color: var(--txt-primary-color); + background-color: var(--arm-color); + } + + .btn-warning.active, + .btn-warning:active, + .btn-warning:hover, + .open > .dropdown-toggle.btn-warning { + color: var(--txt-secondary-color); + background-color: var(--arm-focus-color); + border-color: var(--arm-color); + } + + .btnAlertAcked, + .btnAlertAcked.active, + .btnAlertAcked:active, + .btnAlertAcked:hover > .btn { + color: var(--txt-disabled-color); + background-color: var(--arm-focus-color); + border-color: var(--arm-color); + } input[type="search"] { diff --git a/meteor/imports/themes/dark/mozdef.css b/meteor/imports/themes/dark/mozdef.css index 4a45c6da..32c1acb8 100644 --- a/meteor/imports/themes/dark/mozdef.css +++ b/meteor/imports/themes/dark/mozdef.css @@ -11,14 +11,11 @@ Copyright (c) 2014 Mozilla Corporation --bg-secondary-color: #2d5fa0; --row-color-odd: #2a2f35; --row-color-even: #636c85; - --ack-edit-color: #a2a9b2; - --ack-edit-border-color: #adadad; - --ack-edit-focus-color: #557750; - --ack-edit-disabled-color: #557750; --arm-color: #e69006; --arm-focus-color: #d58512; --txt-primary-color: #fff; --txt-secondary-color: #000; + --txt-disabled-color: #576d54; --a-link-color: #a2a9b2; } @@ -193,23 +190,23 @@ caption, legend { .alert.alert-NOTICE { --alert-bg-color: #4a6785; --alert-color: white; - } +} .alert.alert-WARNING { --alert-bg-color: #ffd351; --alert-color: black; - } +} .alert.alert-CRITICAL { --alert-bg-color: #d04437; --alert-color: white; - } +} .alert.alert-INFO { --alert-bg-color: #cccccc; --alert-color: black; - } +} .alert.alert-ERROR { --alert-bg-color: #d04437; --alert-color: white; - } +} .alert { color: var(--alert-color); @@ -217,7 +214,7 @@ caption, legend { text-transform: uppercase; display: table-cell; font-weight: bold; - } +} .alert-row a { color: wheat; @@ -229,22 +226,41 @@ caption, legend { .modal-header { color: var(--font-focus); - } +} .modal-body { color: var(--font-focus); - } +} .modal-body .row { color: black; - } +} /*bootstrap overrides*/ .btn { border: 1px outset; border-radius: 4px; - color: #999; - } + color: var(--txt-primary-color); + background-color: var(--arm-color); +} + + .btn-warning.active, + .btn-warning:active, + .btn-warning:hover, + .open > .dropdown-toggle.btn-warning { + color: var(--txt-secondary-color); + background-color: var(--arm-focus-color); + border-color: var(--arm-color); +} + + .btnAlertAcked, + .btnAlertAcked.active, + .btnAlertAcked:active, + .btnAlertAcked:hover > .btn { + color: var(--txt-disabled-color); + background-color: var(--arm-focus-color); + border-color: var(--arm-color); +} input[type="search"] { diff --git a/meteor/imports/themes/light/mozdef.css b/meteor/imports/themes/light/mozdef.css index a75cc751..5a215f64 100644 --- a/meteor/imports/themes/light/mozdef.css +++ b/meteor/imports/themes/light/mozdef.css @@ -13,6 +13,8 @@ Copyright (c) 2014 Mozilla Corporation --txt-secondary-color: #fff; --txt-shadow-color: #aaa; --txt-highlight-color: rgba(165, 170, 172, 0.904); + --arm-color: #d1b61e; + --arm-focus-color: #e7c714a9; --a-link-color: rgb(49, 130, 189); --row-color-odd: rgba(30,87,153,.1); --row-color-even: #636c85; @@ -268,9 +270,29 @@ h1, h2, h3, h4, h5, h6, .h1, .h2, .h3, .h4, .h5, .h6 { } .btn { - border: 1px outset; - border-radius: 4px; -} + border: 1px outset; + border-radius: 4px; + color: var(--txt-primary-color); + background-color: var(--arm-color); + } + + .btn-warning.active, + .btn-warning:active, + .btn-warning:hover, + .open > .dropdown-toggle.btn-warning { + color: var(--txt-secondary-color); + background-color: var(--arm-focus-color); + border-color: var(--arm-color); + } + + .btnAlertAcked, + .btnAlertAcked.active, + .btnAlertAcked:active, + .btnAlertAcked:hover > .btn { + color: var(--txt-shadow-color); + background-color: var(--arm-focus-color); + border-color: var(--arm-color); + } input[type="search"] { diff --git a/meteor/imports/themes/side_nav_dark/mozdef.css b/meteor/imports/themes/side_nav_dark/mozdef.css index 735fd8a8..c498f6bb 100644 --- a/meteor/imports/themes/side_nav_dark/mozdef.css +++ b/meteor/imports/themes/side_nav_dark/mozdef.css @@ -18,8 +18,7 @@ Copyright (c) 2014 Mozilla Corporation --row-color-even: #636c85; --ack-edit-color: #a2a9b2; --ack-edit-border-color: #adadad; - --ack-edit-focus-color: #557750; - --ack-edit-disabled-color: #557750; + --txt-shadow-color: #576d54; --arm-color: #e69006; --arm-focus-color: #d58512; --font-main: #fff; @@ -261,6 +260,15 @@ caption, legend { border-color: var(--arm-color); } +.btnAlertAcked, +.btnAlertAcked.active, +.btnAlertAcked:active, +.btnAlertAcked:hover > .btn { + color: var(--txt-shadow-color); + background-color: var(--arm-focus-color); + border-color: var(--arm-color); +} + .btn-notice { border: 1px outset; border-radius: 4px; From 17fa966521950d916150358b2a13a739ad110d5d Mon Sep 17 00:00:00 2001 From: Phrozyn Date: Fri, 31 May 2019 14:47:08 -0500 Subject: [PATCH 77/88] adjusting format. --- meteor/imports/themes/classic/mozdef.css | 4 +- meteor/imports/themes/dark/mozdef.css | 168 +++++++++++------------ meteor/imports/themes/light/mozdef.css | 18 +-- 3 files changed, 95 insertions(+), 95 deletions(-) diff --git a/meteor/imports/themes/classic/mozdef.css b/meteor/imports/themes/classic/mozdef.css index 315528f3..8becb163 100644 --- a/meteor/imports/themes/classic/mozdef.css +++ b/meteor/imports/themes/classic/mozdef.css @@ -19,7 +19,7 @@ Copyright (c) 2014 Mozilla Corporation --a-link-color: rgb(245, 222, 179); --row-color-odd: rgba(30,87,153,.7); --row-color-even: #636c85; - } +} html{ background: none; @@ -313,7 +313,7 @@ input[type="search"] { .table-striped > tbody > tr:nth-of-type(2n+1) { background-color: var(--row-color-even) - } +} .table-hover tbody tr:hover > td, .table-hover tbody tr:hover > th, diff --git a/meteor/imports/themes/dark/mozdef.css b/meteor/imports/themes/dark/mozdef.css index 32c1acb8..cee6db3c 100644 --- a/meteor/imports/themes/dark/mozdef.css +++ b/meteor/imports/themes/dark/mozdef.css @@ -17,7 +17,7 @@ Copyright (c) 2014 Mozilla Corporation --txt-secondary-color: #000; --txt-disabled-color: #576d54; --a-link-color: #a2a9b2; - } +} /*base css */ html{ @@ -428,104 +428,104 @@ sidenav { -ms-transition: all 400ms ease; -o-transition: all 400ms ease; transition: all 400ms ease; - } +} /*pull out triangle*/ +sidenav:after { + position: absolute; + content: ' '; + width: 0; + height: 0; + right: -75px; + top: 50%; + border-width: 30px 30px; + border-style: solid; + border-color: transparent transparent transparent var(--bg-secondary-color); +} +sidenav ul { + width: 14em; + list-style-type: none; + margin: auto; + padding: 1em; +} +sidenav div{ + margin:auto; +} +sidenav:hover { + left: 0; +} +sidenav .filters-col .row { + margin-top: 45px; + padding: 0 0.5em; +} +sidenav .reset-filter { + text-align: center; + margin-top: 20px; +} +.form-horizontal .form-group { + margin-left: 5px; + margin-right: 5px; + padding-top: 5px; +} + +@media screen and (max-width: 1000px) { + sidenav { + background: var(--bg-primary-color); + border-left: 15px solid var(--bg-secondary-color); + text-align: left; + font-weight: bolder; + position: fixed; + top: 0; + bottom: 0; + height: 100%; + right: -16em; + margin: 0em; + padding-top: 1em; + display: inline-block; + line-height: normal; + -webkit-transform: translateZ(0) scale(1, 1); + z-index: 3; + -webkit-transition: all 400ms ease; + -moz-transition: all 400ms ease; + -ms-transition: all 400ms ease; + -o-transition: all 400ms ease; + transition: all 400ms ease; + } sidenav:after { - position: absolute; - content: ' '; - width: 0; - height: 0; - right: -75px; - top: 50%; - border-width: 30px 30px; - border-style: solid; - border-color: transparent transparent transparent var(--bg-secondary-color); + right: 230px; + border-top: 0; + border-bottom: 0; + border-right: 0; + content: none; } sidenav ul { - width: 14em; - list-style-type: none; - margin: auto; - padding: 1em; + width: 14em; + list-style-type: none; + margin: auto; + padding: 1em; } sidenav div{ - margin:auto; + margin:auto; } sidenav:hover { - left: 0; + right: 0; + width: 230px; + overflow-y: scroll; + scrollbar-width: inherit; + scrollbar-color: var(--bg-secondary-color) black; } sidenav .filters-col .row { - margin-top: 45px; - padding: 0 0.5em; + margin-top: 25px; + padding: 0 1.5em; } sidenav .reset-filter { - text-align: center; - margin-top: 20px; + text-align: center; + margin-top: 20px; } - .form-horizontal .form-group { - margin-left: 5px; - margin-right: 5px; - padding-top: 5px; - } - - @media screen and (max-width: 1000px) { - sidenav { - background: var(--bg-primary-color); - border-left: 15px solid var(--bg-secondary-color); - text-align: left; - font-weight: bolder; - position: fixed; - top: 0; - bottom: 0; - height: 100%; - right: -16em; - margin: 0em; - padding-top: 1em; - display: inline-block; - line-height: normal; - -webkit-transform: translateZ(0) scale(1, 1); - z-index: 3; - -webkit-transition: all 400ms ease; - -moz-transition: all 400ms ease; - -ms-transition: all 400ms ease; - -o-transition: all 400ms ease; - transition: all 400ms ease; - } - sidenav:after { - right: 230px; - border-top: 0; - border-bottom: 0; - border-right: 0; - content: none; - } - sidenav ul { - width: 14em; - list-style-type: none; - margin: auto; - padding: 1em; - } - sidenav div{ - margin:auto; - } - sidenav:hover { - right: 0; - width: 230px; - overflow-y: scroll; - scrollbar-width: inherit; - scrollbar-color: var(--bg-secondary-color) black; - } - sidenav .filters-col .row { - margin-top: 25px; - padding: 0 1.5em; - } - sidenav .reset-filter { - text-align: center; - margin-top: 20px; - } - div.dc-chart { - float: none; - } + div.dc-chart { + float: none; } +} /* globe styling */ .globe-container { diff --git a/meteor/imports/themes/light/mozdef.css b/meteor/imports/themes/light/mozdef.css index 5a215f64..d352ce24 100644 --- a/meteor/imports/themes/light/mozdef.css +++ b/meteor/imports/themes/light/mozdef.css @@ -195,23 +195,23 @@ caption, legend { .alert.alert-NOTICE { --alert-bg-color: #4a6785; --alert-color: black; - } +} .alert.alert-WARNING { --alert-bg-color: #ffd351; --alert-color: black; - } +} .alert.alert-CRITICAL { --alert-bg-color: #d04437; --alert-color: black; - } +} .alert.alert-INFO { --alert-bg-color: #cccccc; --alert-color: black; - } +} .alert.alert-ERROR { --alert-bg-color: #d04437; --alert-color: black; - } +} .alert { color: var(--alert-color); @@ -219,7 +219,7 @@ caption, legend { text-transform: uppercase; display: table-cell; font-weight: bold; - } +} .alert-row a { color: var(--a-link-color); @@ -274,7 +274,7 @@ h1, h2, h3, h4, h5, h6, .h1, .h2, .h3, .h4, .h5, .h6 { border-radius: 4px; color: var(--txt-primary-color); background-color: var(--arm-color); - } +} .btn-warning.active, .btn-warning:active, @@ -283,7 +283,7 @@ h1, h2, h3, h4, h5, h6, .h1, .h2, .h3, .h4, .h5, .h6 { color: var(--txt-secondary-color); background-color: var(--arm-focus-color); border-color: var(--arm-color); - } +} .btnAlertAcked, .btnAlertAcked.active, @@ -292,7 +292,7 @@ h1, h2, h3, h4, h5, h6, .h1, .h2, .h3, .h4, .h5, .h6 { color: var(--txt-shadow-color); background-color: var(--arm-focus-color); border-color: var(--arm-color); - } +} input[type="search"] { From 7904b32b442777e0f8af7895cd2c1952519876fe Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Fri, 31 May 2019 17:02:59 -0400 Subject: [PATCH 78/88] Use a timestamp example consistent with the format we actually use --- alerts/plugins/port_scan_enrichment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/alerts/plugins/port_scan_enrichment.py b/alerts/plugins/port_scan_enrichment.py index 71e35715..b66971a5 100644 --- a/alerts/plugins/port_scan_enrichment.py +++ b/alerts/plugins/port_scan_enrichment.py @@ -75,7 +75,7 @@ class message(object): { "destinationipaddress": "1.2.3.4", "destinationport": 80, - "timestamp": "May 27th 2019, 20:29:27.516" + "timestamp": "2016-07-13 22:33:31.625443+00:00" } ] } From 0a1783e8fcbc66f14aa329b6a6816e1942fe7c93 Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Fri, 31 May 2019 17:03:18 -0400 Subject: [PATCH 79/88] Don't need to copy the alert before modifying; mutation is part of the interface expectations --- alerts/plugins/port_scan_enrichment.py | 1 - 1 file changed, 1 deletion(-) diff --git a/alerts/plugins/port_scan_enrichment.py b/alerts/plugins/port_scan_enrichment.py index b66971a5..c649fbbf 100644 --- a/alerts/plugins/port_scan_enrichment.py +++ b/alerts/plugins/port_scan_enrichment.py @@ -193,7 +193,6 @@ def enrich(alert, search_fn, search_window, max_connections): for hit in results.get('hits', []) ] - alert = alert.copy() alert['details']['recentconnections'] = [] for event in take(events, max_connections): From 53b623e77de20c860cb7712f86e5ad0ac5ec959c Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Fri, 31 May 2019 17:09:58 -0400 Subject: [PATCH 80/88] Use a timestamp example consistent with the format we actually use --- tests/alerts/plugins/test_port_scan_enrichment.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tests/alerts/plugins/test_port_scan_enrichment.py b/tests/alerts/plugins/test_port_scan_enrichment.py index 2fbf6286..a7567e0a 100644 --- a/tests/alerts/plugins/test_port_scan_enrichment.py +++ b/tests/alerts/plugins/test_port_scan_enrichment.py @@ -12,6 +12,9 @@ sys.path.append(plugin_path) from port_scan_enrichment import enrich +EXAMPLE_TIMESTAMP = '2016-07-13 22:33:31.625443+00:00' + + def mock_search_fn(results): def search_fn(_query): return results @@ -29,7 +32,7 @@ class TestPortScanEnrichment(object): 'destinationipaddress': '1.2.3.4', 'destinationport': 80 }, - 'timestamp': '30 minutes ago' + 'timestamp': EXAMPLE_TIMESTAMP } }, { @@ -38,7 +41,7 @@ class TestPortScanEnrichment(object): 'destinationipaddress': '4.3.2.1', 'destinationport': 443 }, - 'timestamp': 'an hour ago' + 'timestamp': EXAMPLE_TIMESTAMP } } ] @@ -65,4 +68,4 @@ class TestPortScanEnrichment(object): assert len(enriched['details']['recentconnections']) == 1 assert enriched['details']['recentconnections'][0]['destinationipaddress'] in ['1.2.3.4', '4.3.2.1'] assert enriched['details']['recentconnections'][0]['destinationport'] in [80, 443] - assert enriched['details']['recentconnections'][0]['timestamp'] in ['30 minutes ago', 'an hour ago'] + assert enriched['details']['recentconnections'][0]['timestamp'] == EXAMPLE_TIMESTAMP From d9a0c44c53a1623baef89248c24c910494b33566 Mon Sep 17 00:00:00 2001 From: Emma Rose Date: Fri, 31 May 2019 17:12:21 -0400 Subject: [PATCH 81/88] Use a TermMatch instead of a PhraseMatch --- alerts/plugins/port_scan_enrichment.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/alerts/plugins/port_scan_enrichment.py b/alerts/plugins/port_scan_enrichment.py index c649fbbf..a7f97772 100644 --- a/alerts/plugins/port_scan_enrichment.py +++ b/alerts/plugins/port_scan_enrichment.py @@ -7,7 +7,7 @@ import json import os -from mozdef_util.query_models import SearchQuery, TermMatch, PhraseMatch +from mozdef_util.query_models import SearchQuery, TermMatch from mozdef_util.elasticsearch_client import ElasticsearchClient @@ -181,7 +181,7 @@ def enrich(alert, search_fn, search_window, max_connections): search_query.add_must([ TermMatch('category', 'bro'), TermMatch('source', 'conn'), - PhraseMatch( + TermMatch( 'details.sourceipaddress', alert['details']['sourceipaddress']) ]) From 3bd574571cb8f8786825056f5954b04c558c10ab Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Mon, 3 Jun 2019 13:14:40 -0500 Subject: [PATCH 82/88] Move vidyo cron script and dependencies into mozdef-deprecated --- cloudy_mozdef/packer/packer.json | 2 +- cron/vidyo2MozDef.conf | 6 - cron/vidyo2MozDef.py | 198 -------------------------- cron/vidyo2MozDef.sh | 10 -- docker/compose/mozdef_base/Dockerfile | 5 +- docs/source/installation.rst | 4 +- mq/plugins/vidyoCallID.py | 25 ---- requirements.txt | 1 - 8 files changed, 5 insertions(+), 246 deletions(-) delete mode 100644 cron/vidyo2MozDef.conf delete mode 100644 cron/vidyo2MozDef.py delete mode 100755 cron/vidyo2MozDef.sh delete mode 100644 mq/plugins/vidyoCallID.py diff --git a/cloudy_mozdef/packer/packer.json b/cloudy_mozdef/packer/packer.json index b4771c72..41e345c5 100644 --- a/cloudy_mozdef/packer/packer.json +++ b/cloudy_mozdef/packer/packer.json @@ -50,7 +50,7 @@ "set -e", "sudo yum update -y", "sudo yum makecache fast", - "sudo yum install -y glibc-devel gcc libstdc++ libffi-devel zlib-devel make mysql-devel python python-devel python-pip git docker python3", + "sudo yum install -y glibc-devel gcc libstdc++ libffi-devel zlib-devel make python python-devel python-pip git docker python3", "sudo pip install virtualenv docker-compose", "sudo systemctl enable docker", "sudo systemctl start docker", diff --git a/cron/vidyo2MozDef.conf b/cron/vidyo2MozDef.conf deleted file mode 100644 index 3c41bb6c..00000000 --- a/cron/vidyo2MozDef.conf +++ /dev/null @@ -1,6 +0,0 @@ -[options] -hostname= -username= -password= -database= -url=http://localhost:8080/events diff --git a/cron/vidyo2MozDef.py b/cron/vidyo2MozDef.py deleted file mode 100644 index 6258e740..00000000 --- a/cron/vidyo2MozDef.py +++ /dev/null @@ -1,198 +0,0 @@ -#!/usr/bin/env python - -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# Copyright (c) 2014 Mozilla Corporation - -import copy -import os -import sys -import json -import ConfigParser -import socket -import MySQLdb -from requests import Session -from optparse import OptionParser -from datetime import datetime - - -class MozDefError(Exception): - def __init__(self, msg): - self.msg = msg - - def __str__(self): - return repr(self.msg) - - -class MozDefEvent(): - # create requests session to allow for keep alives - httpsession = Session() - # Turns off needless and repetitive .netrc check for creds - httpsession.trust_env = False - debug = False - verify_certificate = False - # Never fail (ie no unexcepted exceptions sent to user, such as server/network not responding) - fire_and_forget_mode = True - log = {} - log['timestamp'] = datetime.isoformat(datetime.now()) - log['hostname'] = socket.getfqdn() - log['processid'] = os.getpid() - log['processname'] = sys.argv[0] - log['severity'] = 'INFO' - log['summary'] = None - log['category'] = 'event' - log['tags'] = list() - log['details'] = dict() - - def __init__(self, url='http://localhost/events', summary=None, category='event', severity='INFO', tags=[], details={}): - self.summary = summary - self.category = category - self.severity = severity - self.tags = tags - self.details = details - self.url = url - - def send(self, timestamp=None, summary=None, category=None, severity=None, tags=None, details=None, hostname=None): - log_msg = copy.copy(self.log) - - if timestamp is None: - log_msg['timestamp'] = self.timestamp - - else: - log_msg['timestamp'] = timestamp - - if summary is None: - log_msg['summary'] = self.summary - else: - log_msg['summary'] = summary - - if category is None: - log_msg['category'] = self.category - else: - log_msg['category'] = category - - if severity is None: - log_msg['severity'] = self.severity - else: - log_msg['severity'] = severity - - if tags is None: - log_msg['tags'] = self.tags - else: - log_msg['tags'] = tags - - if details is None: - log_msg['details'] = self.details - else: - log_msg['details'] = details - - if hostname is None: - log_msg['hostname'] = self.hostname - else: - log_msg['hostname'] = hostname - - if type(log_msg['details']) != dict: - raise MozDefError('details must be a dict') - elif type(log_msg['tags']) != list: - raise MozDefError('tags must be a list') - elif summary is None: - raise MozDefError('Summary is a required field') - - try: - self.httpsession.post(self.url, json.dumps(log_msg, encoding='utf-8'), verify=self.verify_certificate) - - except Exception as e: - if not self.fire_and_forget_mode: - raise e - - -def main(): - ''' - connect to vidyo's mysql, read in calls and write to mozdef - ''' - mdEvent = MozDefEvent(options.url) - mdEvent.debug = True - mdEvent.fire_and_forget_mode = False - - # connect to mysql - db=MySQLdb.connect(host=options.hostname, user=options.username,passwd=options.password,db=options.database) - c=db.cursor(MySQLdb.cursors.DictCursor) - - c.execute("select * from ConferenceCall2 where JoinTime between NOW() - INTERVAL 30 MINUTE and NOW() or LeaveTime between NOW() - INTERVAL 30 MINUTE and NOW()") - rows=c.fetchall() - c.close() - - # Build dictionary of calls in order to consolidate multiple rows for a single call - calls = {} - for row in rows: - id = row['UniqueCallID'] - # Copy the row's info if we don't already have the final completed call state - if id not in calls or (id in calls and calls[id]['CallState'] != 'COMPLETED'): - calls[id] = row - - # Massage call data and send to MozDef - for key in calls.keys(): - call = calls[key] - if call['LeaveTime'] is not None: - duration = call['LeaveTime'] - call['JoinTime'] - call['CallDuration'] = duration.seconds - - # fix up the data for json - for k in call.keys(): - # convert datetime objects to isoformat for json serialization - if isinstance(call[k], datetime): - call[k] = call[k].isoformat() - # make sure it's a string, not unicode forced into a string - if isinstance(call[k],str): - # db has unicode stored as string, so decode, then encode - call[k] = call[k].decode('utf-8','ignore').encode('ascii','ignore') - - mdEvent.send(timestamp=call['JoinTime'], - summary='Vidyo call status for ' + call['UniqueCallID'].encode('ascii', 'ignore'), - tags=['vidyo'], - details=call, - category='vidyo', - hostname=socket.gethostname() - ) - - -def getConfig(optionname, thedefault, configfile): - """read an option from a config file or set a default - send 'thedefault' as the data class you want to get a string back - i.e. 'True' will return a string - True will return a bool - 1 will return an int - """ - retvalue = thedefault - opttype = type(thedefault) - if os.path.isfile(configfile): - config = ConfigParser.ConfigParser() - config.readfp(open(configfile)) - if config.has_option('options', optionname): - if opttype == bool: - retvalue = config.getboolean('options', optionname) - elif opttype == int: - retvalue = config.getint('options', optionname) - elif opttype == float: - retvalue = config.getfloat('options', optionname) - else: - retvalue = config.get('options', optionname) - return retvalue - - -def initConfig(configfile): - # default options - options.url = getConfig('url', 'http://localhost:8080/events', configfile) - options.username = getConfig('username', '', configfile) - options.password = getConfig('password', '', configfile) - options.database = getConfig('database', '', configfile) - options.hostname = getConfig('hostname', '', configfile) - - -if __name__ == '__main__': - parser = OptionParser() - parser.add_option("-c", dest='configfile', default=sys.argv[0].replace('.py', '.conf'), help="configuration file to use") - (options, args) = parser.parse_args() - initConfig(options.configfile) - main() diff --git a/cron/vidyo2MozDef.sh b/cron/vidyo2MozDef.sh deleted file mode 100755 index 7de0c15a..00000000 --- a/cron/vidyo2MozDef.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env bash - -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# Copyright (c) 2014 Mozilla Corporation - -source /opt/mozdef/envs/python/bin/activate -/opt/mozdef/envs/mozdef/cron/vidyo2MozDef.py -c /opt/mozdef/envs/mozdef/cron/vidyo2MozDef.conf - diff --git a/docker/compose/mozdef_base/Dockerfile b/docker/compose/mozdef_base/Dockerfile index 0701a8d9..7df5c549 100644 --- a/docker/compose/mozdef_base/Dockerfile +++ b/docker/compose/mozdef_base/Dockerfile @@ -14,15 +14,14 @@ RUN \ libffi-devel \ zlib-devel \ libcurl-devel \ - openssl \ + openssl \ openssl-devel \ git \ make && \ useradd -ms /bin/bash -d /opt/mozdef -m mozdef && \ mkdir /opt/mozdef/envs && \ cd /opt/mozdef && \ - yum install -y mysql-devel \ - python \ + yum install -y python \ python-devel \ python-pip && \ yum clean all && \ diff --git a/docs/source/installation.rst b/docs/source/installation.rst index 6f8d9057..ed202d6b 100644 --- a/docs/source/installation.rst +++ b/docs/source/installation.rst @@ -70,11 +70,11 @@ We need to install a python2.7 virtualenv. On Yum-based systems:: - sudo yum install make zlib-devel bzip2-devel openssl-devel ncurses-devel sqlite-devel readline-devel tk-devel pcre-devel gcc gcc-c++ mysql-devel + sudo yum install make zlib-devel bzip2-devel openssl-devel ncurses-devel sqlite-devel readline-devel tk-devel pcre-devel gcc gcc-c++ On APT-based systems:: - sudo apt-get install make zlib1g-dev libbz2-dev libssl-dev libncurses5-dev libsqlite3-dev libreadline-dev tk-dev libpcre3-dev libpcre++-dev build-essential g++ libmysqlclient-dev + sudo apt-get install make zlib1g-dev libbz2-dev libssl-dev libncurses5-dev libsqlite3-dev libreadline-dev tk-dev libpcre3-dev libpcre++-dev build-essential g++ Then:: diff --git a/mq/plugins/vidyoCallID.py b/mq/plugins/vidyoCallID.py deleted file mode 100644 index dac34e7a..00000000 --- a/mq/plugins/vidyoCallID.py +++ /dev/null @@ -1,25 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# Copyright (c) 2014 Mozilla Corporation - -import hashlib - - -class message(object): - def __init__(self): - ''' - takes an incoming vidyo call record and assigns a static ID - so we always update the same doc for current status. - ''' - - # this plugin - # sets the type field - self.registration = ['uniquecallid'] - self.priority = 5 - - def onMessage(self, message, metadata): - docid = hashlib.md5('vidyouniquecallid' + message['details']['uniquecallid']).hexdigest() - metadata['id'] = docid - message['type'] = 'vidyo' - return (message, metadata) diff --git a/requirements.txt b/requirements.txt index 4c620ec1..7d752437 100644 --- a/requirements.txt +++ b/requirements.txt @@ -33,7 +33,6 @@ kombu==4.1.0 meld3==1.0.2 mozdef-client==1.0.11 mozdef-util==1.0.8 -MySQL-python==1.2.5 netaddr==0.7.1 nose==1.3.7 oauth2client==1.4.12 From f15b04807ddff9b622e7ea7e1a26da9fef45c006 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Mon, 3 Jun 2019 15:54:50 -0500 Subject: [PATCH 83/88] Modify health and status cron to save object instead of event --- cron/healthAndStatus.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cron/healthAndStatus.py b/cron/healthAndStatus.py index f2119f7c..e0198f10 100755 --- a/cron/healthAndStatus.py +++ b/cron/healthAndStatus.py @@ -109,11 +109,11 @@ def main(): # post to elastic search servers directly without going through # message queues in case there is an availability issue - es.save_event(index=index, body=json.dumps(healthlog)) + es.save_object(index=index, body=json.dumps(healthlog)) # post another doc with a static docid and tag # for use when querying for the latest status healthlog['tags'] = ['mozdef', 'status', 'latest'] - es.save_event(index=index, doc_id=getDocID(server), body=json.dumps(healthlog)) + es.save_object(index=index, doc_id=getDocID(server), body=json.dumps(healthlog)) def initConfig(): From 4d28c6d2736472ecdb53d56a415e42e2baa01e8e Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Mon, 3 Jun 2019 16:43:53 -0500 Subject: [PATCH 84/88] Update cloudtrail mapping --- mq/plugins/cloudtrail.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mq/plugins/cloudtrail.py b/mq/plugins/cloudtrail.py index 8b3750ce..fec89527 100644 --- a/mq/plugins/cloudtrail.py +++ b/mq/plugins/cloudtrail.py @@ -36,7 +36,9 @@ class message(object): 'details.requestparameters.logstreamname', 'details.requestparameters.source', 'details.requestparameters.tagging', + 'details.requestparameters.logging', 'details.responseelements.role', + 'details.responseelements.policy', 'details.requestparameters.rule', 'details.responseelements.subnets', 'details.responseelements.endpoint', From 88a451f4ef3582c589d26588e62ab23da6f28fc1 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Thu, 6 Jun 2019 12:52:27 -0500 Subject: [PATCH 85/88] Update urllib version --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 7d752437..12e7f765 100644 --- a/requirements.txt +++ b/requirements.txt @@ -59,7 +59,7 @@ slackclient==1.0.9 supervisor==3.3.1 tzlocal==1.4 uritemplate==0.6 -urllib3==1.23 +urllib3==1.25.3 uwsgi==2.0.17.1 virtualenv==1.11.4 tldextract==2.2.0 From 215ff0b88e5a9c0964c47cf01b35a468d16c0c66 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Thu, 6 Jun 2019 12:52:37 -0500 Subject: [PATCH 86/88] Remove urllib requirement from auth0 script --- cron/auth02mozdef.py | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) diff --git a/cron/auth02mozdef.py b/cron/auth02mozdef.py index aea1b23e..a1a86f7e 100644 --- a/cron/auth02mozdef.py +++ b/cron/auth02mozdef.py @@ -11,20 +11,12 @@ import hjson import sys import os import requests -import mozdef_client as mozdef -from mozdef_util.utilities.dot_dict import DotDict - -try: - import urllib.parse - - quote_url = urllib.parse.quote -except ImportError: - # Well hello there python2 user! - import urllib - - quote_url = urllib.quote import traceback +import mozdef_client as mozdef + +from mozdef_util.utilities.dot_dict import DotDict + def fatal(msg): print(msg) From 3ff902a7e2acba1d2127a826ef8e7c865eb95da8 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 7 Jun 2019 13:09:16 -0500 Subject: [PATCH 87/88] Downgrade urllib version to 1.24.3 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 12e7f765..53479ffc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -59,7 +59,7 @@ slackclient==1.0.9 supervisor==3.3.1 tzlocal==1.4 uritemplate==0.6 -urllib3==1.25.3 +urllib3==1.24.3 uwsgi==2.0.17.1 virtualenv==1.11.4 tldextract==2.2.0 From b7b9b5613c97f9596cb5c229b3feb32928018faf Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Mon, 10 Jun 2019 18:03:27 -0500 Subject: [PATCH 88/88] Add alerts-* index mapping for docker environment --- .../mozdef_bootstrap/files/index_mappings/alerts-star.json | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 docker/compose/mozdef_bootstrap/files/index_mappings/alerts-star.json diff --git a/docker/compose/mozdef_bootstrap/files/index_mappings/alerts-star.json b/docker/compose/mozdef_bootstrap/files/index_mappings/alerts-star.json new file mode 100644 index 00000000..410286e4 --- /dev/null +++ b/docker/compose/mozdef_bootstrap/files/index_mappings/alerts-star.json @@ -0,0 +1,6 @@ +{ + "title": "alerts-*", + "timeFieldName": "utctimestamp", + "notExpandable": true, + "fields": "[{\"name\":\"_id\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_index\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"_score\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_source\",\"type\":\"_source\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_type\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"category\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"notify_mozdefbot\",\"type\":\"boolean\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"severity\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"summary\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"tags\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"utctimestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true}]" +} \ No newline at end of file