Merge branch 'master' into geomodel-v-0-1

This commit is contained in:
Emma Rose 2019-08-02 18:33:45 -04:00
Родитель 74c2092374 e46d4de509
Коммит c4e222133e
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 1486642516ED3535
58 изменённых файлов: 742 добавлений и 294 удалений

4
.gitignore поставляемый
Просмотреть файл

@ -13,8 +13,8 @@ alerts/generic_alerts
/.project
/data
.vscode
cloudy_mozdef/aws_parameters.json
cloudy_mozdef/aws_parameters.sh
cloudy_mozdef/aws_parameters.*.json
cloudy_mozdef/aws_parameters.*.sh
docs/source/_build
docs/source/_static
*.swp

Просмотреть файл

@ -5,6 +5,58 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
## [Unreleased]
## [v3.1.1] - 2019-07-25
### Added
- Ability to get open indices in ElasticsearchClient
- Documentation on installing dependencies on Mac OS X
### Changed
- AWS Managed Elasticsearch/Kibana version to 6.7
### Fixed
- Disk free/total in /about page shows at most 2 decimal places
- Connections to SQS and S3 without access key and secret
- Ability to block IPs and add to Watchlist
## [v3.1.0] - 2019-07-18
### Added
- Captured the AWS CodeBuild CI/CD configuration in code with documentation
- Support for HTTP Basic Auth in AWS deployment
- Docker healthchecks to docker containers
- Descriptions to all AWS Lambda functions
- Support for alerts-* index in docker environment
- Alert that detects excessive numbers of AWS API describe calls
- Additional AWS infrastructure to support AWS re:Inforce 2019 workshop
- Documentation specific to MozDef installation now that MozDef uses Python 3
- Config setting for CloudTrail notification SQS queue polling time
- Config setting for Slack bot welcome message
### Changed
- Kibana port from 9443 to 9090
- AWS CloudFormation default values from "unset" to empty string
- Simplify mozdef-mq logic determining AMQP endpoint URI
- SQS to always use secure transport
- CloudTrail alert unit tests
- Incident summary placeholder text for greater clarity
- Display of Veris data for easier viewing
- All Dockerfiles to reduce image size, pin package signing keys and improve
clarity
### Fixed
- Workers starting before GeoIP data is available
- Mismatched MozDefACMCertArn parameter name in CloudFormation template
- Duplicate mozdefvpcflowlogs object
- Hard coded AWS Availability Zone
- httplib2 by updating to version to 0.13.0 for python3
- mozdef_util by modifying bulk queue to acquire lock before saving events
- Dashboard Kibana URL
- Unnecessary and conflicting package dependencies from MozDef and mozdef_util
- get_indices to include closed indices
## [v3.0.0] - 2019-07-08
### Added
- Support for Python3
@ -132,7 +184,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
- Added checks on sending SQS messages to only accept intra-account messages
- Improved docker performance and disk space requirements
[Unreleased]: https://github.com/mozilla/MozDef/compare/v3.0.0...HEAD
[Unreleased]: https://github.com/mozilla/MozDef/compare/v3.1.1...HEAD
[v3.1.1]: https://github.com/mozilla/MozDef/compare/v3.1.0...v3.1.1
[v3.1.0]: https://github.com/mozilla/MozDef/compare/v3.0.0...v3.1.0
[v3.0.0]: https://github.com/mozilla/MozDef/compare/v2.0.1...v3.0.0
[v2.0.1]: https://github.com/mozilla/MozDef/compare/v2.0.0...v2.0.1
[v2.0.0]: https://github.com/mozilla/MozDef/compare/v1.40.0...v2.0.0

Просмотреть файл

@ -8,5 +8,5 @@
# Entire set can review certain documentation files
/README.md @pwnbus @mpurzynski @Phrozyn @tristanweir @gene1wood @andrewkrug
/CHANGELOG @pwnbus @mpurzynski @Phrozyn @tristanweir @gene1wood @andrewkrug
/CHANGELOG.md @pwnbus @mpurzynski @Phrozyn @tristanweir @gene1wood @andrewkrug
/docs/ @pwnbus @mpurzynski @Phrozyn @tristanweir @gene1wood @andrewkrug

Просмотреть файл

@ -25,7 +25,7 @@ The Mozilla Enterprise Defense Platform (MozDef) seeks to automate the security
## Goals:
* Provide a platform for use by defenders to rapidly discover and respond to security incidents.
* Provide a platform for use by defenders to rapidly discover and respond to security incidents
* Automate interfaces to other systems like bunker, cymon, mig
* Provide metrics for security events and incidents
* Facilitate real-time collaboration amongst incident handlers
@ -36,7 +36,7 @@ The Mozilla Enterprise Defense Platform (MozDef) seeks to automate the security
MozDef is in production at Mozilla where we are using it to process over 300 million events per day.
[1]: https://console.aws.amazon.com/cloudformation/home?region=us-west-2#/stacks/new?stackName=mozdef-for-aws&templateURL=https://s3-us-west-2.amazonaws.com/public.us-west-2.infosec.mozilla.org/mozdef/cf/v1.38.5/mozdef-parent.yml
[1]: https://console.aws.amazon.com/cloudformation/home?region=us-west-2#/stacks/new?stackName=mozdef-for-aws&templateURL=https://s3-us-west-2.amazonaws.com/public.us-west-2.infosec.mozilla.org/mozdef/cf/v3.1.1/mozdef-parent.yml
## Survey & Contacting us

Просмотреть файл

@ -0,0 +1,3 @@
[options]
threshold_count = 1
search_depth_min = 60

Просмотреть файл

@ -0,0 +1,48 @@
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
from lib.alerttask import AlertTask
from mozdef_util.query_models import SearchQuery, TermMatch
import re
class AlertLdapPasswordSpray(AlertTask):
def main(self):
self.parse_config('ldap_password_spray.conf', ['threshold_count', 'search_depth_min'])
search_query = SearchQuery(minutes=int(self.config.search_depth_min))
search_query.add_must([
TermMatch('category', 'ldap'),
TermMatch('details.response.error', 'LDAP_INVALID_CREDENTIALS')
])
self.filtersManual(search_query)
self.searchEventsAggregated('details.client', samplesLimit=10)
self.walkAggregations(threshold=int(self.config.threshold_count))
def onAggregation(self, aggreg):
category = 'ldap'
tags = ['ldap']
severity = 'WARNING'
email_list = set()
email_regex = r'.*mail=([a-zA-Z0-9._-]+@[a-zA-Z0-9._-]+\.[a-zA-Z0-9_-]+)'
for event in aggreg['allevents']:
for request in event['_source']['details']['requests']:
match_object = re.match(email_regex, request['details'][0])
if match_object:
email_list.add(match_object.group(1))
# If no emails, don't throw alert
# if len(email_list) == 0:
# return None
summary = 'LDAP Password Spray Attack in Progress from {0} targeting the following account(s): {1}'.format(
aggreg['value'],
",".join(sorted(email_list))
)
return self.createAlertDict(summary, category, tags, aggreg['events'], severity)

Просмотреть файл

@ -20,6 +20,7 @@ from celery import Task
from celery.utils.log import get_task_logger
from mozdef_util.utilities.toUTC import toUTC
from mozdef_util.utilities.logger import logger
from mozdef_util.elasticsearch_client import ElasticsearchClient
from mozdef_util.query_models import TermMatch, ExistsMatch
@ -545,6 +546,6 @@ class AlertTask(Task):
try:
json_obj = json.load(fd)
except ValueError:
sys.stderr.write("FAILED to open the configuration file\n")
logger.error("FAILED to open the configuration file\n")
return json_obj

Просмотреть файл

@ -20,8 +20,8 @@ CONFIG_FILE = os.path.join(
def _find_ip_addresses(string):
'''List all of the IPv4 and IPv6 addresses found in a string.'''
ipv4_rx = '(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)'
ipv6_rx = '(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))'
ipv4_rx = r'(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)'
ipv6_rx = r'(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))'
ipv4 = re.findall(ipv4_rx, string)
ipv6_map = map(
@ -59,6 +59,8 @@ def enrich(alert, known_ips):
alert = alert.copy()
if 'details' not in alert:
alert['details'] = {}
alert['details']['sites'] = []
for ip in set(ips):
@ -140,6 +142,8 @@ class message(object):
'''
def __init__(self):
# Run plugin on all alerts
self.registration = '*'
self._config = _load_config(CONFIG_FILE)
def onMessage(self, message):

Просмотреть файл

@ -0,0 +1,2 @@
[options]
keywords =

130
alerts/plugins/ipaddr.py Normal file
Просмотреть файл

@ -0,0 +1,130 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
from configlib import getConfig, OptionParser
import netaddr
import os
def isIPv4(ip):
try:
return netaddr.valid_ipv4(ip)
except:
return False
def isIPv6(ip):
try:
return netaddr.valid_ipv6(ip)
except:
return False
def addError(message, error):
'''add an error note to a message'''
if 'errors' not in message:
message['errors'] = list()
if isinstance(message['errors'], list):
message['errors'].append(error)
class message(object):
def __init__(self):
'''
uses heuristic to find and attach the source IP address of the alert
'''
# set my own conf file
# relative path to the rest index.py file
self.configfile = os.path.join(os.path.dirname(__file__), 'ipaddr.conf')
self.options = None
if os.path.exists(self.configfile):
self.initConfiguration()
self.registration = self.options.keywords.split(" ")
self.priority = 1
def initConfiguration(self):
myparser = OptionParser()
# setup self.options by sending empty list [] to parse_args
(self.options, args) = myparser.parse_args([])
# fill self.options with plugin-specific options
self.options.keywords = getConfig('keywords', 'localhost', self.configfile)
def onMessage(self, message):
'''
Examine possible ip addresses for the following:
ipv6 in an ipv4 field
ipv4 in another field
'-' or other invalid ip in the ip field
Also sets ipv4 in two fields:
ipaddress (decimal mapping IP)
ipv4address (string mapping)
Elastic search is inconsistent about returning IPs as
decimal or IPs.
In a query an IP field is returned as string.
In a facets an IP field is returned as decimal.
No ES field type exists for ipv6, so always having
a string version is the most flexible option.
'''
# here is where you do something with the incoming alert message
if 'events' in message:
if 'documentsource' in message['events'][0]:
if 'details' in message['events'][0]['documentsource']:
event = message['events'][0]['documentsource']['details']
if 'details' not in message:
message['details'] = {}
# forwarded header can be spoofed, so try it first,
# but override later if we've a better field.
if 'http_x_forwarded_for' in event:
# should be a comma delimited list of ips with the original client listed first
ipText = event['http_x_forwarded_for'].split(',')[0]
if isIPv4(ipText) and 'sourceipaddress' not in event:
message['details']['sourceipaddress'] = ipText
if isIPv4(ipText) and 'sourceipv4address' not in event:
message['details']['sourceipv4address'] = ipText
if isIPv6(ipText) and 'sourceipv6address' not in event:
message['details']['sourceipv6address'] = ipText
if 'sourceipaddress' in event:
ipText = event['sourceipaddress']
if isIPv6(ipText):
event['sourceipv6address'] = ipText
message['details']['sourceipaddress'] = '0.0.0.0'
addError(message, 'plugin: {0} error: {1}'.format('ipFixUp.py', 'sourceipaddress is ipv6, moved'))
elif isIPv4(ipText):
message['details']['sourceipv4address'] = ipText
message['details']['sourceipaddress'] = ipText
else:
# Smells like a hostname, let's save it as source field
message['details']['source'] = event['sourceipaddress']
message['details']['sourceipaddress'] = None
if 'destinationipaddress' in event:
ipText = event['destinationipaddress']
if isIPv6(ipText):
message['details']['destinationipv6address'] = ipText
message['details']['destinationipaddress'] = '0.0.0.0'
addError(message, 'plugin: {0} error: {1}'.format('ipFixUp.py', 'destinationipaddress is ipv6, moved'))
elif isIPv4(ipText):
message['details']['destinationipv4address'] = ipText
message['details']['destinationipaddress'] = ipText
else:
# Smells like a hostname, let's save it as destination field
message['details']['destination'] = event['destinationipaddress']
message['details']['destinationipaddress'] = None
if 'cluster_client_ip' in event:
ipText = event['cluster_client_ip']
if isIPv4(ipText):
message['details']['sourceipaddress'] = ipText
# you can modify the message if needed
# plugins registered with lower (>2) priority
# will receive the message and can also act on it
# but even if not modified, you must return it
return message

Просмотреть файл

@ -88,6 +88,9 @@ class message(object):
'''
def __init__(self):
# Run plugin on portscan alerts
self.registration = 'portscan'
config = _load_config(CONFIG_FILE)
try:

Просмотреть файл

@ -24,7 +24,7 @@ class AlertProxyDropExecutable(AlertTask):
)
# Only notify on certain file extensions from config
filename_regex = "/.*\.({0})/".format(self.config.extensions.replace(",", "|"))
filename_regex = r"/.*\.({0})/".format(self.config.extensions.replace(",", "|"))
search_query.add_must(
[QueryStringMatch("details.destination: {}".format(filename_regex))]
)

Просмотреть файл

@ -57,7 +57,7 @@ class AlertAuthSignRelengSSH(AlertTask):
sourceipaddress = x['details']['sourceipaddress']
targetuser = 'unknown'
expr = re.compile('Accepted publickey for ([A-Za-z0-9]+) from')
expr = re.compile(r'Accepted publickey for ([A-Za-z0-9]+) from')
m = expr.match(event['_source']['summary'])
groups = m.groups()
if len(groups) > 0:

Просмотреть файл

@ -124,7 +124,7 @@ class SshLateral(AlertTask):
source_ips = []
users = []
for x in aggreg['events']:
m = re.match('Accepted publickey for (\S+) from (\S+).*', x['_source']['summary'])
m = re.match(r'Accepted publickey for (\S+) from (\S+).*', x['_source']['summary'])
if m is not None and len(m.groups()) == 2:
ipaddr = netaddr.IPAddress(m.group(2))
for y in self._config['alertifsource']:

Просмотреть файл

@ -65,7 +65,7 @@ class AlertUnauthSSH(AlertTask):
sourceipaddress = x['details']['sourceipaddress']
targetuser = 'unknown'
expr = re.compile('Accepted publickey for ([A-Za-z0-9@.\-]+) from')
expr = re.compile(r'Accepted publickey for ([A-Za-z0-9@.\-]+) from')
m = expr.match(event['_source']['summary'])
groups = m.groups()
if len(groups) > 0:

Просмотреть файл

@ -63,7 +63,7 @@ Resources:
EBSEnabled: true
VolumeType: gp2
VolumeSize: !Ref BlockStoreSizeGB
ElasticsearchVersion: '5.6'
ElasticsearchVersion: '6.7'
ElasticsearchClusterConfig:
InstanceCount: !Ref ESInstanceCount
AccessPolicies:

Просмотреть файл

@ -5,5 +5,5 @@ if $programname == 'eventtask-worker' then /var/log/mozdef/eventtask.log
if $programname == 'alertactions-worker' then /var/log/mozdef/alertactions.log
if $programname == 'mongod.3002' then /var/log/mozdef/mongo/meteor-mongo.log
if $programname == 'mongod' then /var/log/mozdef/mongo/mongo.log
if $programname == 'kibana5' then /var/log/mozdef/kibana.log
if $programname == 'kibana' then /var/log/mozdef/kibana.log
& stop

Просмотреть файл

@ -16,6 +16,7 @@ import traceback
import mozdef_client as mozdef
from mozdef_util.utilities.dot_dict import DotDict
from mozdef_util.utilities.logger import logger
def fatal(msg):
@ -23,10 +24,6 @@ def fatal(msg):
sys.exit(1)
def debug(msg):
sys.stderr.write("+++ {}\n".format(msg))
# This is from https://auth0.com/docs/api/management/v2#!/Logs/get_logs
# auth0 calls these events with an acronym and description
# The logs have the acronym, but not the description
@ -163,7 +160,7 @@ def process_msg(mozmsg, msg):
details.success = True
except KeyError:
# New message type, check https://manage-dev.mozilla.auth0.com/docs/api/management/v2#!/Logs/get_logs for ex.
debug("New auth0 message type, please add support: {}".format(msg.type))
logger.error("New auth0 message type, please add support: {}".format(msg.type))
details["eventname"] = msg.type
# determine severity level
@ -323,7 +320,7 @@ def main():
config = DotDict(hjson.load(fd))
if config is None:
print("No configuration file 'auth02mozdef.json' found.")
logger.error("No configuration file 'auth02mozdef.json' found.")
sys.exit(1)
headers = {"Authorization": "Bearer {}".format(config.auth0.token), "Accept": "application/json"}

Просмотреть файл

@ -49,7 +49,7 @@ def isFQDN(fqdn):
# We could resolve FQDNs here, but that could tip our hand and it's
# possible us investigating could trigger other alerts.
# validate using the regex from https://github.com/yolothreat/utilitybelt
fqdn_re = re.compile('(?=^.{4,255}$)(^((?!-)[a-zA-Z0-9-]{1,63}(?<!-)\.)+[a-zA-Z]{2,63}$)', re.I | re.S | re.M)
fqdn_re = re.compile(r'(?=^.{4,255}$)(^((?!-)[a-zA-Z0-9-]{1,63}(?<!-)\.)+[a-zA-Z]{2,63}$)', re.I | re.S | re.M)
return bool(re.match(fqdn_re,fqdn))
except:
return False

Просмотреть файл

@ -77,11 +77,14 @@ def getEsNodesStats():
load_str = "{0},{1},{2}".format(load_average['1m'], load_average['5m'], load_average['15m'])
hostname = nodeid
if 'host' in jsonobj['nodes'][nodeid]:
hostname=jsonobj['nodes'][nodeid]['host']
hostname = jsonobj['nodes'][nodeid]['host']
disk_free = "{0:.2f}".format(jsonobj['nodes'][nodeid]['fs']['total']['free_in_bytes'] / (1024 * 1024 * 1024))
disk_total = "{0:.2f}".format(jsonobj['nodes'][nodeid]['fs']['total']['total_in_bytes'] / (1024 * 1024 * 1024))
results.append({
'hostname': hostname,
'disk_free': jsonobj['nodes'][nodeid]['fs']['total']['free_in_bytes'] / (1024 * 1024 * 1024),
'disk_total': jsonobj['nodes'][nodeid]['fs']['total']['total_in_bytes'] / (1024 * 1024 * 1024),
'disk_free': disk_free,
'disk_total': disk_total,
'mem_heap_per': jsonobj['nodes'][nodeid]['jvm']['mem']['heap_used_percent'],
'gc_old': jsonobj['nodes'][nodeid]['jvm']['gc']['collectors']['old']['collection_time_in_millis'] / 1000,
'cpu_usage': jsonobj['nodes'][nodeid]['os']['cpu']['percent'],

Просмотреть файл

@ -5,7 +5,7 @@ discovery.type: single-node
action.destructive_requires_name: true
# Disable auto creation unless these indexes
action.auto_create_index: .watches,.triggered_watches,.watcher-history-*
action.auto_create_index: .watches,.triggered_watches,.watcher-history-*,.kibana_*
# Add these to prevent requiring a user/pass and termination of ES when looking for "ingest" assignments.
# The watcher directive allows for the deletion of failed watcher indices as they sometimes get created with glitches.

Просмотреть файл

@ -125,14 +125,14 @@ if state_index_name not in all_indices:
client.create_index(state_index_name, index_config=state_index_settings)
# Wait for kibana service to get ready
total_num_tries = 10
total_num_tries = 20
for attempt in range(total_num_tries):
try:
if requests.get(kibana_url).ok:
if requests.get(kibana_url, allow_redirects=True):
break
except Exception:
pass
print('Unable to connect to Elasticsearch...retrying')
print('Unable to connect to Kibana ({0})...retrying'.format(kibana_url))
sleep(5)
else:
print('Cannot connect to Kibana after ' + str(total_num_tries) + ' tries, exiting script.')

Просмотреть файл

@ -0,0 +1,13 @@
{
"visualization": {
"title": "Cloudtrail Eventname Pie-Graph",
"visState": "{\"title\":\"Cloudtrail Eventname Pie-Graph\",\"type\":\"pie\",\"params\":{\"type\":\"pie\",\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"isDonut\":true,\"labels\":{\"show\":false,\"values\":true,\"last_level\":true,\"truncate\":100}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"details.sourceipaddress\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\",\"customLabel\":\"Source IP Address\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"details.eventname\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\",\"customLabel\":\"AWS Api Call\"}}]}",
"uiStateJSON": "{}",
"description": "",
"version": 1,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"index\":\"events-*\",\"query\":{\"query\":\"\",\"language\":\"lucene\"},\"filter\":[]}"
}
},
"type": "visualization"
}

Просмотреть файл

@ -0,0 +1,13 @@
{
"visualization": {
"title": "Cloudtrail Eventname Table",
"visState": "{\"title\":\"Cloudtrail Eventname Table\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showPartialRows\":false,\"showMetricsAtAllLevels\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"showTotal\":false,\"totalFunc\":\"sum\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"details.eventname\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\"}}]}",
"uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
"description": "",
"version": 1,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"index\":\"events-*\",\"query\":{\"query\":\"\",\"language\":\"lucene\"},\"filter\":[]}"
}
},
"type": "visualization"
}

Просмотреть файл

@ -0,0 +1,15 @@
{
"dashboard": {
"title": "Cloudtrail Events",
"hits": 0,
"description": "",
"panelsJSON": "[{\"embeddableConfig\":{\"vis\":{\"legendOpen\":false}},\"gridData\":{\"x\":12,\"y\":16,\"w\":12,\"h\":18,\"i\":\"1\"},\"id\":\"cloudtrail_eventname_pie_graph\",\"panelIndex\":\"1\",\"title\":\"Event Names\",\"type\":\"visualization\",\"version\":\"6.8.0\"},{\"embeddableConfig\":{\"vis\":{\"legendOpen\":true}},\"gridData\":{\"x\":12,\"y\":0,\"w\":36,\"h\":16,\"i\":\"2\"},\"id\":\"cloudtrail_events_line_graph\",\"panelIndex\":\"2\",\"type\":\"visualization\",\"version\":\"6.8.0\"},{\"embeddableConfig\":{},\"gridData\":{\"x\":0,\"y\":0,\"w\":12,\"h\":7,\"i\":\"3\"},\"id\":\"cloudtrail_total_event_count\",\"panelIndex\":\"3\",\"title\":\"# Events\",\"type\":\"visualization\",\"version\":\"6.8.0\"},{\"embeddableConfig\":{},\"gridData\":{\"x\":24,\"y\":16,\"w\":24,\"h\":18,\"i\":\"4\"},\"id\":\"cloudtrail_events_map\",\"panelIndex\":\"4\",\"type\":\"visualization\",\"version\":\"6.8.0\"},{\"embeddableConfig\":{},\"gridData\":{\"x\":0,\"y\":7,\"w\":12,\"h\":15,\"i\":\"5\"},\"id\":\"cloudtrail_user_identity_table\",\"panelIndex\":\"5\",\"type\":\"visualization\",\"version\":\"6.8.0\"},{\"embeddableConfig\":{},\"gridData\":{\"x\":0,\"y\":22,\"w\":12,\"h\":12,\"i\":\"8\"},\"id\":\"cloudtrail_eventname_table\",\"panelIndex\":\"8\",\"type\":\"visualization\",\"version\":\"6.8.0\"}]",
"optionsJSON": "{\"darkTheme\":false,\"hidePanelTitles\":false,\"useMargins\":true}",
"version": 1,
"timeRestore": false,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"query\":{\"language\":\"lucene\",\"query\":\"\"},\"filter\":[{\"$state\":{\"store\":\"appState\"},\"meta\":{\"alias\":null,\"disabled\":false,\"index\":\"e3d06450-9b8d-11e9-9b0e-b35568cb01e3\",\"key\":\"source\",\"negate\":false,\"params\":{\"query\":\"cloudtrail\",\"type\":\"phrase\"},\"type\":\"phrase\",\"value\":\"cloudtrail\"},\"query\":{\"match\":{\"source\":{\"query\":\"cloudtrail\",\"type\":\"phrase\"}}}}]}"
}
},
"type": "dashboard"
}

Просмотреть файл

@ -0,0 +1,13 @@
{
"visualization": {
"title": "Cloudtrail Events Line Graph",
"visState": "{\"title\":\"Cloudtrail Events Line Graph\",\"type\":\"line\",\"params\":{\"addLegend\":true,\"addTimeMarker\":true,\"addTooltip\":true,\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"labels\":{\"show\":true,\"truncate\":100},\"position\":\"bottom\",\"scale\":{\"type\":\"linear\"},\"show\":true,\"style\":{},\"title\":{},\"type\":\"category\"}],\"grid\":{\"categoryLines\":true,\"style\":{\"color\":\"#eee\"},\"valueAxis\":\"ValueAxis-1\"},\"legendPosition\":\"right\",\"seriesParams\":[{\"data\":{\"id\":\"1\",\"label\":\"Count\"},\"drawLinesBetweenPoints\":true,\"mode\":\"normal\",\"show\":\"true\",\"showCircles\":true,\"type\":\"line\",\"valueAxis\":\"ValueAxis-1\"}],\"times\":[],\"type\":\"line\",\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"labels\":{\"filter\":false,\"rotate\":0,\"show\":true,\"truncate\":100},\"name\":\"LeftAxis-1\",\"position\":\"left\",\"scale\":{\"mode\":\"normal\",\"type\":\"linear\"},\"show\":true,\"style\":{},\"title\":{\"text\":\"Count\"},\"type\":\"value\"}]},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"receivedtimestamp\",\"timeRange\":{\"from\":\"now-24h\",\"to\":\"now\",\"mode\":\"quick\"},\"useNormalizedEsInterval\":true,\"interval\":\"auto\",\"drop_partials\":false,\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"group\",\"params\":{\"field\":\"details.awsregion\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\",\"customLabel\":\"User\"}}]}",
"uiStateJSON": "{\"vis\":{\"legendOpen\":false}}",
"description": "",
"version": 1,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"index\":\"events-*\",\"query\":{\"language\":\"lucene\",\"query\":\"\"},\"filter\":[]}"
}
},
"type": "visualization"
}

Просмотреть файл

@ -0,0 +1,13 @@
{
"visualization": {
"title": "Cloudtrail Events Map",
"visState": "{\"title\":\"Cloudtrail Events Map\",\"type\":\"tile_map\",\"params\":{\"colorSchema\":\"Yellow to Red\",\"mapType\":\"Shaded Circle Markers\",\"isDesaturated\":true,\"addTooltip\":true,\"heatClusterSize\":1.5,\"legendPosition\":\"bottomright\",\"mapZoom\":2,\"mapCenter\":[0,0],\"wms\":{\"enabled\":false,\"options\":{\"format\":\"image/png\",\"transparent\":true},\"selectedTmsLayer\":{\"origin\":\"elastic_maps_service\",\"id\":\"road_map\",\"minZoom\":0,\"maxZoom\":18,\"attribution\":\"<p>&#169; <a href=\\\"https://www.openstreetmap.org/copyright\\\">OpenStreetMap contributors</a>|<a href=\\\"https://openmaptiles.org\\\">OpenMapTiles</a>|<a href=\\\"https://www.maptiler.com\\\">MapTiler</a>|<a href=\\\"https://www.elastic.co/elastic-maps-service\\\">Elastic Maps Service</a></p>&#10;\"}}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"geohash_grid\",\"schema\":\"segment\",\"params\":{\"field\":\"details.sourceipgeopoint\",\"autoPrecision\":true,\"isFilteredByCollar\":true,\"useGeocentroid\":true,\"mapZoom\":2,\"mapCenter\":{\"lon\":0,\"lat\":-0.17578097424708533},\"mapBounds\":{\"bottom_right\":{\"lat\":-83.94227191521858,\"lon\":282.30468750000006},\"top_left\":{\"lat\":83.9050579559856,\"lon\":-282.30468750000006}},\"precision\":2}}]}",
"uiStateJSON": "{}",
"description": "",
"version": 1,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"index\":\"events-*\",\"query\":{\"query\":\"\",\"language\":\"lucene\"},\"filter\":[]}"
}
},
"type": "visualization"
}

Просмотреть файл

@ -0,0 +1,13 @@
{
"visualization": {
"title": "Cloudtrail Total Event Count",
"visState": "{\"title\":\"Cloudtrail Total Event Count\",\"type\":\"metric\",\"params\":{\"addTooltip\":true,\"addLegend\":false,\"type\":\"metric\",\"metric\":{\"percentageMode\":false,\"useRanges\":false,\"colorSchema\":\"Green to Red\",\"metricColorMode\":\"None\",\"colorsRange\":[{\"from\":0,\"to\":10000}],\"labels\":{\"show\":true},\"invertColors\":false,\"style\":{\"bgFill\":\"#000\",\"bgColor\":false,\"labelColor\":false,\"subText\":\"\",\"fontSize\":60}}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{\"customLabel\":\"Total\"}}]}",
"uiStateJSON": "{}",
"description": "",
"version": 1,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"index\":\"events-*\",\"query\":{\"query\":\"\",\"language\":\"lucene\"},\"filter\":[]}"
}
},
"type": "visualization"
}

Просмотреть файл

@ -0,0 +1,13 @@
{
"visualization": {
"title": "Cloudtrail User Identify Table",
"visState": "{\"title\":\"Cloudtrail User Identify Table\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showPartialRows\":false,\"showMetricsAtAllLevels\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"showTotal\":false,\"totalFunc\":\"sum\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"details.useridentity.arn\",\"size\":2,\"order\":\"desc\",\"orderBy\":\"1\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":true,\"missingBucketLabel\":\"Missing\"}}]}",
"uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
"description": "",
"version": 1,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"index\":\"events-*\",\"query\":{\"language\":\"lucene\",\"query\":\"\"},\"filter\":[]}"
}
},
"type": "visualization"
}

Просмотреть файл

@ -53,7 +53,7 @@ At this point, begin development and periodically run your unit-tests locally wi
Background on concepts
----------------------
- Logs - These are individual log entries that are typically emitted from systems, like an Apache log
- Logs - These are individual log entries that are typically emitted from systems, like an Apache log.
- Events - The entry point into MozDef, a log parsed into JSON by some log shipper (syslog-ng, nxlog) or a native JSON data source like GuardDuty, CloudTrail, most SaaS systems, etc.
- Alerts - These are either a 1:1 events to alerts (this thing happens and alert) or a M:1 events to alerts (N of these things happen and alert).

Просмотреть файл

@ -27,7 +27,8 @@ insert_simple.js
Usage: `node ./insert_simple.js <processes> <totalInserts> <host1> [host2] [host3] [...]`
* `processes`: Number of processes to spawn
* `totalInserts`: Number of inserts to perform, please note after a certain number node will slow down. You want to have a lower number if you are in this case.
* `totalInserts`: Number of inserts to perform
* Please note after a certain number node will slow down. You want to have a lower number if you are in this case.
* `host1`, `host2`, `host3`, etc: Elasticsearch hosts to which you want to send the HTTP requests
insert_bulk.js
@ -39,7 +40,8 @@ Usage: `node ./insert_bulk.js <processes> <insertsPerQuery> <totalInserts> <host
* `processes`: Number of processes to spawn
* `insertsPerQuery`: Number of logs per request
* `totalInserts`: Number of inserts to perform, please note after a certain number node will slow down. You want to have a lower number if you are in this case.
* `totalInserts`: Number of inserts to perform
* Please note after a certain number node will slow down. You want to have a lower number if you are in this case.
* `host1`, `host2`, `host3`, etc: Elasticsearch hosts to which you want to send the HTTP requests
search_all_fulltext.js
@ -50,7 +52,8 @@ search_all_fulltext.js
Usage: `node ./search_all_fulltext.js <processes> <totalSearches> <host1> [host2] [host3] [...]`
* `processes`: Number of processes to spawn
* `totalSearches`: Number of search requests to perform, please note after a certain number node will slow down. You want to have a lower number if you are in this case.
* `totalSearches`: Number of search requests to perform
* Please note after a certain number node will slow down. You want to have a lower number if you are in this case.
* `host1`, `host2`, `host3`, etc: Elasticsearch hosts to which you want to send the HTTP requests

Просмотреть файл

@ -19,34 +19,34 @@ The Test Sequence
_________________
* Travis CI creates webhooks when first setup which allow commits to the MozDef
GitHub repo to trigger Travis
GitHub repo to trigger Travis.
* When a commit is made to MozDef, Travis CI follows the instructions in the
`.travis.yml <https://github.com/mozilla/MozDef/blob/master/.travis.yml>`_
file
* `.travis.yml` installs `docker-compose` in the `before_install` phase
* in the `install` phase, Travis runs the
file.
* `.travis.yml` installs `docker-compose` in the `before_install` phase.
* In the `install` phase, Travis runs the
`build-tests <https://github.com/mozilla/MozDef/blob/cfeafb77f9d4d4d8df02117a0ffca0ec9379a7d5/Makefile#L88-L89>`_
make target which calls `docker-compose build` on the
`docker/compose/docker-compose-tests.yml`_ file which builds a few docker
containers to use for testing
* in the `script` phase, Travis runs the
containers to use for testing.
* In the `script` phase, Travis runs the
`tests <https://github.com/mozilla/MozDef/blob/cfeafb77f9d4d4d8df02117a0ffca0ec9379a7d5/Makefile#L52>`_
make target which
* calls the `build-tests` make target which again runs `docker-compose build`
on the `docker/compose/docker-compose-tests.yml`_ file
on the `docker/compose/docker-compose-tests.yml`_ file.
* calls the
`run-tests <https://github.com/mozilla/MozDef/blob/cfeafb77f9d4d4d8df02117a0ffca0ec9379a7d5/Makefile#L67-L69>`_
make target which
make target which.
* calls the
`run-tests-resources <https://github.com/mozilla/MozDef/blob/cfeafb77f9d4d4d8df02117a0ffca0ec9379a7d5/Makefile#L60-L61>`_
make target which starts the docker
containers listed in `docker/compose/docker-compose-tests.yml`_
containers listed in `docker/compose/docker-compose-tests.yml`_.
* runs `flake8` with the
`.flake8 <https://github.com/mozilla/MozDef/blob/master/.flake8>`_
config file to check code style
* runs `py.test tests` which runs all the test cases
config file to check code style.
* runs `py.test tests` which runs all the test cases.
AWS CodeBuild
-------------
@ -111,24 +111,24 @@ The Build Sequence
__________________
* A branch is merged into `master` in the GitHub repo or a version git tag is
applied to a commit
* GitHub emits a webhook event to AWS CodeBuild indicating this
applied to a commit.
* GitHub emits a webhook event to AWS CodeBuild indicating this.
* AWS CodeBuild considers the Filter Groups configured to decide if the tag
or branch warrants triggering a build. These Filter Groups are defined in
the ``mozdef-cicd-codebuild.yml`` CloudFormation template. Assuming the tag
or branch are acceptable, CodeBuild continues.
* AWS CodeBuild reads the
`buildspec.yml <https://github.com/mozilla/MozDef/blob/master/cloudy_mozdef/buildspec.yml>`_
file to know what to do
file to know what to do.
* The `install` phase of the `buildspec.yml` fetches
`packer <https://www.packer.io/>`_ and unzips it
`packer <https://www.packer.io/>`_ and unzips it.
* `packer` is a tool that spawns an ec2 instance, provisions it, and renders
an AWS Machine Image (AMI) from it.
* The `build` phase of the `buildspec.yml` runs the
`cloudy_mozdef/ci/deploy <https://github.com/mozilla/MozDef/blob/master/cloudy_mozdef/ci/deploy>`_
script in the AWS CodeBuild Ubuntu 14.04 environment
script in the AWS CodeBuild Ubuntu 14.04 environment.
* The `deploy` script calls the
`build-from-cwd <https://github.com/mozilla/MozDef/blob/cfeafb77f9d4d4d8df02117a0ffca0ec9379a7d5/Makefile#L78-L79>`_
target of the `Makefile` which calls `docker-compose build` on the
@ -153,16 +153,16 @@ __________________
* Uploads the local image that was just built by AWS CodeBuild to DockerHub.
If the branch being built is `master` then the image is uploaded both with
a tag of `master` as well as with a tag of `latest`
a tag of `master` as well as with a tag of `latest`.
* If the branch being built is from a version tag (e.g. `v1.2.3`) then the
image is uploaded with only that version tag applied
image is uploaded with only that version tag applied.
* The `deploy` script next calls the
`packer-build-github <https://github.com/mozilla/MozDef/blob/cfeafb77f9d4d4d8df02117a0ffca0ec9379a7d5/cloudy_mozdef/Makefile#L34-L36>`_
make target in the
`cloudy_mozdef/Makefile <https://github.com/mozilla/MozDef/blob/master/cloudy_mozdef/Makefile>`_
which calls the
`ci/pack_and_copy <https://github.com/mozilla/MozDef/blob/master/cloudy_mozdef/ci/pack_and_copy>`_
script which does the following steps
script which does the following steps.
* Calls packer which launches an ec2 instance, executing a bunch of steps and
and producing an AMI
@ -179,19 +179,19 @@ __________________
* Within this ec2 instance, packer `clones the MozDef GitHub repo and checks
out the branch that triggered this build
<https://github.com/mozilla/MozDef/blob/c7a166f2e29dde8e5d71853a279fb0c47a48e1b2/cloudy_mozdef/packer/packer.json#L58-L60>`_
* packer replaces all instances of the word `latest` in the
<https://github.com/mozilla/MozDef/blob/c7a166f2e29dde8e5d71853a279fb0c47a48e1b2/cloudy_mozdef/packer/packer.json#L58-L60>`_.
* Packer replaces all instances of the word `latest` in the
`docker-compose-cloudy-mozdef.yml <https://github.com/mozilla/MozDef/blob/master/docker/compose/docker-compose-cloudy-mozdef.yml>`_
file with either the branch `master` or the version tag (e.g. `v1.2.3`)
* packer runs `docker-compose pull` on the
file with either the branch `master` or the version tag (e.g. `v1.2.3`).
* Packer runs `docker-compose pull` on the
`docker-compose-cloudy-mozdef.yml <https://github.com/mozilla/MozDef/blob/master/docker/compose/docker-compose-cloudy-mozdef.yml>`_
file to pull down both the docker images that were just built by AWS
CodeBuild and uploaded to Dockerhub as well as other non MozDef docker
images
images.
* After packer completes executing the steps laid out in `packer.json` inside
the ec2 instance, it generates an AMI from that instance and continues with
the copying, tagging and sharing steps described above
the copying, tagging and sharing steps described above.
* Now back in the AWS CodeBuild environment, the `deploy` script continues by
calling the
`publish-versioned-templates <https://github.com/mozilla/MozDef/blob/cfeafb77f9d4d4d8df02117a0ffca0ec9379a7d5/cloudy_mozdef/Makefile#L85-L87>`_
@ -205,7 +205,7 @@ __________________
CloudFormation template so that the template knows the AMI IDs of that
specific branch of code.
* uploads the CloudFormation templates to S3 in a directory either called
`master` or the tag version that was built (e.g. `v1.2.3`)
`master` or the tag version that was built (e.g. `v1.2.3`).
.. _docker/compose/docker-compose-tests.yml: https://github.com/mozilla/MozDef/blob/master/docker/compose/docker-compose-tests.yml
.. _tag-images: https://github.com/mozilla/MozDef/blob/cfeafb77f9d4d4d8df02117a0ffca0ec9379a7d5/Makefile#L109-L110

Просмотреть файл

@ -7,7 +7,7 @@ Cloud based MozDef is an opinionated deployment of the MozDef services created i
ingest CloudTrail, GuardDuty, and provide security services.
.. image:: images/cloudformation-launch-stack.png
:target: https://console.aws.amazon.com/cloudformation/home?region=us-west-2#/stacks/new?stackName=mozdef-for-aws&templateURL=https://s3-us-west-2.amazonaws.com/public.us-west-2.infosec.mozilla.org/mozdef/cf/v1.38.5/mozdef-parent.yml
:target: https://console.aws.amazon.com/cloudformation/home?region=us-west-2#/stacks/new?stackName=mozdef-for-aws&templateURL=https://s3-us-west-2.amazonaws.com/public.us-west-2.infosec.mozilla.org/mozdef/cf/v3.1.0/mozdef-parent.yml
Feedback
@ -32,18 +32,19 @@ MozDef requires the following:
- An OIDC Provider with ClientID, ClientSecret, and Discovery URL
- Mozilla uses Auth0 but you can use any OIDC provider you like: Shibboleth,
KeyCloak, AWS Cognito, Okta, Ping (etc.)
KeyCloak, AWS Cognito, Okta, Ping (etc.).
- You will need to configure the redirect URI of ``/redirect_uri`` as allowed in
your OIDC provider configuration
your OIDC provider configuration.
- An ACM Certificate in the deployment region for your DNS name
- A VPC with three public subnets available.
- A VPC with three public subnets available
- It is advised that this VPC be dedicated to MozDef or used solely for security automation.
- The three public subnets must all be in different `availability zones <https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-regions-availability-zones.html#using-regions-availability-zones-describe>`_
and have a large enough number of IP addresses to accommodate the infrastructure
and have a large enough number of IP addresses to accommodate the infrastructure.
- The VPC must have an `internet gateway <https://docs.aws.amazon.com/vpc/latest/userguide/VPC_Internet_Gateway.html>`_
enabled on it so that MozDef can reach the internet
- An SQS queue receiving GuardDuty events. At the time of writing this is not required but may be required in future.
enabled on it so that MozDef can reach the internet.
- An SQS queue receiving GuardDuty events
- At the time of writing this is not required but may be required in future.
Supported Regions

Просмотреть файл

@ -113,6 +113,13 @@ Then::
PYCURL_SSL_LIBRARY=nss pip install -r requirements.txt
If you're using Mac OS X::
export PYCURL_SSL_LIBRARY=openssl
export LDFLAGS=-L/usr/local/opt/openssl/lib;export CPPFLAGS=-I/usr/local/opt/openssl/include
pip install -r requirements.txt
Copy the following into a file called .bash_profile for the mozdef user within /opt/mozdef::
[mozdef@server ~]$ vim /opt/mozdef/.bash_profile

Просмотреть файл

@ -20,7 +20,7 @@ Goals
High level
**********
* Provide a platform for use by defenders to rapidly discover and respond to security incidents.
* Provide a platform for use by defenders to rapidly discover and respond to security incidents
* Automate interfaces to other systems like firewalls, cloud protections and anything that has an API
* Provide metrics for security events and incidents
* Facilitate real-time collaboration amongst incident handlers
@ -31,25 +31,25 @@ Technical
*********
* Offer micro services that make up an Open Source Security Information and Event Management (SIEM)
* Scalable, should be able to handle thousands of events per second, provide fast searching, alerting, correlation and handle interactions between teams of incident handlers.
* Scalable, should be able to handle thousands of events per second, provide fast searching, alerting, correlation and handle interactions between teams of incident handlers
MozDef aims to provide traditional SIEM functionality including:
* Accepting events/logs from a variety of systems
* Storing events/logs
* Facilitating searches
* Facilitating alerting
* Facilitating log management (archiving,restoration)
* Accepting events/logs from a variety of systems.
* Storing events/logs.
* Facilitating searches.
* Facilitating alerting.
* Facilitating log management (archiving,restoration).
It is non-traditional in that it:
* Accepts only JSON input
* Provides you open access to your data
* Accepts only JSON input.
* Provides you open access to your data.
* Integrates with a variety of log shippers including logstash, beaver, nxlog, syslog-ng and any shipper that can send JSON to either rabbit-mq or an HTTP(s) endpoint.
* Provides easy integration to Cloud-based data sources such as cloudtrail or guard duty
* Provides easy python plugins to manipulate your data in transit
* Provides extensive plug-in opportunities to customize your event enrichment stream, your alert workflow, etc
* Provides realtime access to teams of incident responders to allow each other to see their work simultaneously
* Provides easy integration to Cloud-based data sources such as CloudTrail or GuardDuty.
* Provides easy python plugins to manipulate your data in transit.
* Provides extensive plug-in opportunities to customize your event enrichment stream, your alert workflow, etc.
* Provides realtime access to teams of incident responders to allow each other to see their work simultaneously.
Architecture
@ -60,7 +60,7 @@ MozDef is based on open source technologies including:
* RabbitMQ (message queue and amqp(s)-based log input)
* uWSGI (supervisory control of python-based workers)
* bottle.py (simple python interface for web request handling)
* elasticsearch (scalable indexing and searching of JSON documents)
* Elasticsearch (scalable indexing and searching of JSON documents)
* Meteor (responsive framework for Node.js enabling real-time data sharing)
* MongoDB (scalable data store, tightly integrated to Meteor)
* VERIS from verizon (open source taxonomy of security incident categorizations)
@ -74,11 +74,11 @@ Frontend processing
Frontend processing for MozDef consists of receiving an event/log (in json) over HTTP(S), AMQP(S), or SQS
doing data transformation including normalization, adding metadata, etc. and pushing
the data to elasticsearch.
the data to Elasticsearch.
Internally MozDef uses RabbitMQ to queue events that are still to be processed.
The diagram below shows the interactions between the python scripts (controlled by uWSGI),
the RabbitMQ exchanges and elasticsearch indices.
the RabbitMQ exchanges and Elasticsearch indices.
.. image:: images/frontend_processing.png
@ -95,7 +95,7 @@ Initial Release:
* Facilitate replacing base SIEM functionality including log input, event management, search, alerts, basic correlations
* Enhance the incident workflow UI to enable realtime collaboration
* Enable basic plug-ins to the event input stream for meta data, additional parsing, categorization and basic machine learning
* Support as many common event/log shippers as possible with repeatable recipies
* Support as many common event/log shippers as possible with repeatable recipes
* Base integration into Mozilla's defense mechanisms for automation
* 3D visualizations of threat actors
* Fine tuning of interactions between meteor, mongo, dc.js
@ -106,7 +106,7 @@ Recently implemented:
* Docker containers for each service
* Updates to support recent (breaking) versions of Elasticsearch
Future (join us!):
Future (join us!):
* Correlation through machine learning, AI
* Enhanced search for alerts, events, attackers within the MozDef UI

Просмотреть файл

@ -131,11 +131,11 @@ Background
Mozilla used CEF as a logging standard for compatibility with Arcsight and for standardization across systems. While CEF is an admirable standard, MozDef prefers JSON logging for the following reasons:
* Every development language can create a JSON structure
* JSON is easily parsed by computers/programs which are the primary consumer of logs
* CEF is primarily used by Arcsight and rarely seen outside that platform and doesn't offer the extensibility of JSON
* Every development language can create a JSON structure.
* JSON is easily parsed by computers/programs which are the primary consumer of logs.
* CEF is primarily used by Arcsight and rarely seen outside that platform and doesn't offer the extensibility of JSON.
* A wide variety of log shippers (heka, logstash, fluentd, nxlog, beaver) are readily available to meet almost any need to transport logs as JSON.
* JSON is already the standard for cloud platforms like amazon's cloudtrail logging
* JSON is already the standard for cloud platforms like amazon's cloudtrail logging.
Description
***********
@ -288,8 +288,10 @@ Alerts are stored in the `alerts`_ folder.
There are two types of alerts:
* simple alerts that consider events on at a time. For example you may want to get an alert everytime a single LDAP modification is detected.
* aggregation alerts allow you to aggregate events on the field of your choice. For example you may want to alert when more than 3 login attempts failed for the same username.
* simple alerts that consider events on at a time
* For example you may want to get an alert everytime a single LDAP modification is detected.
* aggregation alerts that allow you to aggregate events on the field of your choice
* For example you may want to alert when more than 3 login attempts failed for the same username.
You'll find documented examples in the `alerts`_ folder.

Просмотреть файл

@ -7,7 +7,7 @@ Copyright (c) 2014 Mozilla Corporation
<template name="side_nav_menu">
<div class="container itemcontainer">
<div class="container headercontainer">
<nav class="main-menu">
{{#if true }}
<ul>

Просмотреть файл

@ -287,19 +287,6 @@ caption, legend {
color: var(--txt-secondary-color);
}
/* incident/investigation styles */
.daterangepicker, .daterangepicker td {
color: var(--txt-secondary-color);
}
.tabcontent {
display: none;
}
.tabcontent.active {
display: block;
}
textarea {
overflow: auto;
vertical-align: top;
@ -389,9 +376,14 @@ td {
}
.tabcontent {
display: none;
margin-top: 20px;
}
.tabcontent.active {
display: block;
}
.tabnav a {
color: rgb(173, 216, 230);
}

Просмотреть файл

@ -274,11 +274,11 @@ caption, legend {
}
.modal-header {
color: var(--font-focus);
color: var(--txt-secondary-color);
}
.modal-body {
color: var(--font-focus);
color: var(--txt-secondary-color);
}
.modal-body .row {
@ -343,12 +343,12 @@ td {
.welcome {
height: 180px;
width: 600px;
margin-left: 25%;
text-align: center;
color: var(--txt-primary-color);
vertical-align: middle;
height: 180px;
width: 600px;
margin-left: 25%;
text-align: center;
color: var(--txt-primary-color);
vertical-align: middle;
}
.mozdeflogo{
@ -357,11 +357,16 @@ td {
}
.tabcontent {
margin-top: 20px;
display: none;
margin-top: 20px;
}
.tabcontent.active {
display: block;
}
.tabnav a {
color: lightblue;
color: lightblue;
}
/* uncomment this login ui css to hide the local account/password signup options

Просмотреть файл

@ -260,14 +260,6 @@ caption, legend {
color: var(--txt-primary-color);
}
.tabcontent {
display: none;
}
.tabcontent.active {
display: block;
}
textarea {
overflow: auto;
vertical-align: top;
@ -343,12 +335,12 @@ td {
}
.welcome {
height: 180px;
width: 600px;
margin-left: 25%;
text-align: center;
color: var(--txt-primary-color);
vertical-align: middle;
height: 180px;
width: 600px;
margin-left: 25%;
text-align: center;
color: var(--txt-primary-color);
vertical-align: middle;
}
.mozdeflogo{
@ -357,11 +349,16 @@ td {
}
.tabcontent {
margin-top: 20px;
display: none;
margin-top: 20px;
}
.tabcontent.active {
display: block;
}
.tabnav a {
color: lightblue;
color: lightblue;
}
/* don't float the 'create account' link*/

Просмотреть файл

@ -21,8 +21,8 @@ Copyright (c) 2014 Mozilla Corporation
--txt-shadow-color: #576d54;
--arm-color: #e69006;
--arm-focus-color: #d58512;
--font-main: #fff;
--font-focus: #000;
--txt-primary-color: #fff;
--txt-secondary-color: #000;
--a-link-color: #a2a9b2;
}
@ -45,7 +45,7 @@ body{
/*margin: 0;*/
/*min-width: 990px;*/
padding: 0;
color: var(--font-main);
color: var(--txt-primary-color);
line-height: normal;
text-align: left;
}
@ -56,12 +56,12 @@ body{
/*mozdef custom */
.upperwhite {
color: var(--font-main);
color: var(--txt-primary-color);
text-transform: uppercase;
}
caption, legend {
color: var(--font-main);
color: var(--txt-primary-color);
}
.shadow {
@ -69,7 +69,7 @@ caption, legend {
}
.ipaddress {
color: var(--font-main);
color: var(--txt-primary-color);
font-style: normal;
font-weight: normal;
@ -101,7 +101,7 @@ caption, legend {
opacity: .3;
z-index: 2;
font-size: 13px;
color: var(--font-main);
color: var(--txt-primary-color);
}
#bottom-toolbar:hover {
@ -282,11 +282,11 @@ caption, legend {
}
.modal-header {
color: var(--font-focus);
color: var(--txt-secondary-color);
}
.modal-body {
color: var(--font-focus);
color: var(--txt-secondary-color);
}
.modal-body .row {
@ -297,7 +297,7 @@ caption, legend {
.btn {
border: 1px outset;
border-radius: 4px;
color: var(--font-main);
color: var(--txt-primary-color);
background-color: var(--arm-color);
}
@ -305,7 +305,7 @@ caption, legend {
.btn-warning:active,
.btn-warning:hover,
.open > .dropdown-toggle.btn-warning {
color: var(--font-focus);
color: var(--txt-secondary-color);
background-color: var(--arm-focus-color);
border-color: var(--arm-color);
}
@ -322,7 +322,7 @@ caption, legend {
.btn-notice {
border: 1px outset;
border-radius: 4px;
color: var(--font-focus);
color: var(--txt-secondary-color);
background-color: var(--ack-edit-color);
}
@ -330,13 +330,13 @@ caption, legend {
.btn-notice:active,
.btn-notice:hover,
.open > .dropdown-toggle.btn-notice {
color: var(--font-main);
color: var(--txt-primary-color);
background-color: var(--ack-edit-focus-color);
border-color: var(--ack-edit-border-color);
}
.btn-notice:disabled, button[disabled] {
color: var(--font-main);
color: var(--txt-primary-color);
background-color: var(--ack-edit-disabled-color);
border-color: var(--ack-edit-border-color);
}
@ -344,12 +344,12 @@ caption, legend {
.btn-generic {
border: 1px outset;
border-radius: 4px;
color: var(--font-focus);
color: var(--txt-secondary-color);
background-color: var(--ack-edit-color);
}
.btn-generic:focus {
color: var(--font-main);
color: var(--txt-primary-color);
background-color: #286090;
border-color: #204d74;
}
@ -358,7 +358,7 @@ caption, legend {
.btn-generic:active,
.btn-genric:hover,
.open > .dropdown-toggle.btn-generic {
color: var(--font-focus);
color: var(--txt-secondary-color);
background-color: var(--ack-edit-focus-color);
border-color: var(--ack-edit-border-color);
}
@ -394,11 +394,11 @@ input[type="search"] {
.table-hover tbody tr:hover > th,
.table-hover > tbody > tr:hover {
background-color: #9a9ea5;
color: var(--font-focus);
color: var(--txt-secondary-color);
}
td{
color: var(--font-main);
color: var(--txt-primary-color);
}
.welcome {
@ -407,7 +407,7 @@ td{
width: 600px;
margin-left: 25%;
text-align: center;
color: var(--font-focus);
color: var(--txt-secondary-color);
border: none;
vertical-align: middle;
}
@ -416,8 +416,13 @@ td{
width: 500px;
}
.tabcontent{
margin-top: 20px;
.tabcontent {
display: none;
margin-top: 20px;
}
.tabcontent.active {
display: block;
}
.tabnav a{
@ -468,17 +473,17 @@ td{
}
circle:hover{
fill: var(--font-main);
fill: var(--txt-primary-color);
}
.node {
stroke: var(--font-focus);
stroke: var(--txt-secondary-color);
stroke-width: 1.5px;
}
.textlabel{
stroke-width: .2px;
stroke: var(--font-focus);
stroke: var(--txt-secondary-color);
}
.vtagholders {

Просмотреть файл

@ -411,7 +411,6 @@ def initConfig():
# rabbit message queue options
options.mqserver = getConfig('mqserver', 'localhost', options.configfile)
options.taskexchange = getConfig('taskexchange', 'eventtask', options.configfile)
options.eventexchange = getConfig('eventexchange', 'events', options.configfile)
# rabbit: how many messages to ask for at once from the message queue
options.prefetch = getConfig('prefetch', 10, options.configfile)
# rabbit: user creds

Просмотреть файл

@ -354,7 +354,6 @@ def initConfig():
# rabbit message queue options
options.mqserver = getConfig('mqserver', 'localhost', options.configfile)
options.taskexchange = getConfig('taskexchange', 'eventtask', options.configfile)
options.eventexchange = getConfig('eventexchange', 'events', options.configfile)
# rabbit: how many messages to ask for at once from the message queue
options.prefetch = getConfig('prefetch', 10, options.configfile)
# rabbit: user creds

Просмотреть файл

@ -228,9 +228,10 @@ class message(object):
if logtype == 'dhcp':
if 'assigned_addr' not in newmessage['details']:
newmessage['details']['assigned_addr'] = '0.0.0.0'
newmessage['details']['assigned_addr'] = "0.0.0.0"
if 'mac' not in newmessage['details']:
newmessage['details']['mac'] = '00:00:00:00:00:00'
newmessage['details']['mac'] = "000000000000"
newmessage['details']['mac'] = newmessage['details']['mac'].replace(':', '')
newmessage['summary'] = (
'{assigned_addr} assigned to '
'{mac}'

Просмотреть файл

@ -23,11 +23,13 @@ class message(object):
'details.apiversion',
'details.serviceeventdetails',
'details.requestparameters.attribute',
'details.requestparameters.bucketpolicy.statement.principal',
'details.requestparameters.bucketpolicy.statement.principal.service',
'details.requestparameters.bucketpolicy.statement.principal.aws',
'details.requestparameters.callerreference',
'details.requestparameters.description',
'details.requestparameters.describeflowlogsrequest.filter.value',
'details.requestparameters.disableapitermination',
'details.requestparameters.distributionconfig.callerreference',
'details.requestparameters.domainname',
'details.requestparameters.domainnames',
'details.requestparameters.ebsoptimized',

Просмотреть файл

@ -19,13 +19,13 @@ class message(object):
def onMessage(self, message, metadata):
self.accepted_regex = re.compile('^(?P<authstatus>\w+) (?P<authmethod>\w+) for (?P<username>[a-zA-Z0-9\@._-]+) from (?P<sourceipaddress>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}) port (?P<sourceport>\d{1,5}) ssh2(\:\sRSA\s)?(?:(?P<rsakeyfingerprint>(\S+)))?$')
self.session_opened_regex = re.compile('^pam_unix\(sshd\:session\)\: session (opened|closed) for user (?P<username>[a-zA-Z0-9\@._-]+)(?: by \(uid\=\d*\))?$')
self.postponed_regex = re.compile('^Postponed (?P<authmethod>\w+) for (?P<username>[a-zA-Z0-9\@._-]+) from (?P<sourceipaddress>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}) port (?P<sourceport>\d{1,5}) ssh2(?: \[preauth\])?$')
self.starting_session_regex = re.compile('^Starting session: (?P<sessiontype>\w+)(?: on )?(?P<device>pts/0)? for (?P<username>[a-zA-Z0-9\@._-]+) from (?P<sourceipaddress>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}) port (?P<sourceport>\d{1,5})$')
self.unauthorized_user_regex = re.compile('^Invalid user (?P<username>[a-zA-Z0-9\@._-]+) from (?P<sourceipaddress>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})?$')
self.userauth_request_regex = re.compile('^input_userauth_request: invalid user (?P<username>[a-zA-Z0-9\@._-]+) \[preauth\]')
self.disconnect_regex = re.compile('^Received disconnect from (?P<sourceipaddress>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}): (?P<sourceport>\d{1,5}): (|Bye Bye|Normal Shutdown, Thank you for playing) \[preauth\]')
self.accepted_regex = re.compile(r'^(?P<authstatus>\w+) (?P<authmethod>\w+) for (?P<username>[a-zA-Z0-9\@._-]+) from (?P<sourceipaddress>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}) port (?P<sourceport>\d{1,5}) ssh2(\:\sRSA\s)?(?:(?P<rsakeyfingerprint>(\S+)))?$')
self.session_opened_regex = re.compile(r'^pam_unix\(sshd\:session\)\: session (opened|closed) for user (?P<username>[a-zA-Z0-9\@._-]+)(?: by \(uid\=\d*\))?$')
self.postponed_regex = re.compile(r'^Postponed (?P<authmethod>\w+) for (?P<username>[a-zA-Z0-9\@._-]+) from (?P<sourceipaddress>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}) port (?P<sourceport>\d{1,5}) ssh2(?: \[preauth\])?$')
self.starting_session_regex = re.compile(r'^Starting session: (?P<sessiontype>\w+)(?: on )?(?P<device>pts/0)? for (?P<username>[a-zA-Z0-9\@._-]+) from (?P<sourceipaddress>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}) port (?P<sourceport>\d{1,5})$')
self.unauthorized_user_regex = re.compile(r'^Invalid user (?P<username>[a-zA-Z0-9\@._-]+) from (?P<sourceipaddress>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})?$')
self.userauth_request_regex = re.compile(r'^input_userauth_request: invalid user (?P<username>[a-zA-Z0-9\@._-]+) \[preauth\]')
self.disconnect_regex = re.compile(r'^Received disconnect from (?P<sourceipaddress>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}): (?P<sourceport>\d{1,5}): (|Bye Bye|Normal Shutdown, Thank you for playing) \[preauth\]')
if 'processname' in message and message['processname'] == 'sshd':
msg_unparsed = message['summary']

Просмотреть файл

@ -22,7 +22,7 @@ class message(object):
def onMessage(self, message, metadata):
self.session_regexp = re.compile('^pam_unix\(su(?:-l)?\:session\)\: session (?P<status>\w+) for user (?P<username>\w+)(?: (?:by (?:(?P<originuser>\w+))?\(uid\=(?P<uid>[0-9]+)\)?)?)?$')
self.session_regexp = re.compile(r'^pam_unix\(su(?:-l)?\:session\)\: session (?P<status>\w+) for user (?P<username>\w+)(?: (?:by (?:(?P<originuser>\w+))?\(uid\=(?P<uid>[0-9]+)\)?)?)?$')
if 'details' in message:
if 'program' in message['details']:

Просмотреть файл

@ -11,7 +11,6 @@ import pynsive
import random
import re
import requests
import sys
import socket
import importlib
from bottle import route, run, response, request, default_app, post
@ -536,10 +535,10 @@ def kibanaDashboards():
})
except ElasticsearchInvalidIndex as e:
sys.stderr.write('Kibana dashboard index not found: {0}\n'.format(e))
logger.error('Kibana dashboard index not found: {0}\n'.format(e))
except Exception as e:
sys.stderr.write('Kibana dashboard received error: {0}\n'.format(e))
logger.error('Kibana dashboard received error: {0}\n'.format(e))
return json.dumps(resultsList)
@ -555,7 +554,7 @@ def getWatchlist():
# Log the entries we are removing to maintain an audit log
expired = watchlistentries.find({'dateExpiring': {"$lte": datetime.utcnow() - timedelta(hours=1)}})
for entry in expired:
sys.stdout.write('Deleting entry {0} from watchlist /n'.format(entry))
logger.debug('Deleting entry {0} from watchlist /n'.format(entry))
# delete any that expired
watchlistentries.delete_many({'dateExpiring': {"$lte": datetime.utcnow() - timedelta(hours=1)}})
@ -578,7 +577,7 @@ def getWatchlist():
)
return json.dumps(WatchList)
except ValueError as e:
sys.stderr.write('Exception {0} collecting watch list\n'.format(e))
logger.error('Exception {0} collecting watch list\n'.format(e))
def getWhois(ipaddress):
@ -591,7 +590,7 @@ def getWhois(ipaddress):
whois['fqdn']=socket.getfqdn(str(netaddr.IPNetwork(ipaddress)[0]))
return (json.dumps(whois))
except Exception as e:
sys.stderr.write('Error looking up whois for {0}: {1}\n'.format(ipaddress, e))
logger.error('Error looking up whois for {0}: {1}\n'.format(ipaddress, e))
def verisSummary(verisRegex=None):
@ -617,7 +616,7 @@ def verisSummary(verisRegex=None):
else:
return json.dumps(list())
except Exception as e:
sys.stderr.write('Exception while aggregating veris summary: {0}\n'.format(e))
logger.error('Exception while aggregating veris summary: {0}\n'.format(e))
def initConfig():

Просмотреть файл

@ -6,9 +6,10 @@
import requests
import json
import os
import sys
from configlib import getConfig, OptionParser
from mozdef_util.utilities.logger import logger
class message(object):
def __init__(self):
@ -41,7 +42,7 @@ class message(object):
self.configfile = './plugins/cymon.conf'
self.options = None
if os.path.exists(self.configfile):
sys.stdout.write('found conf file {0}\n'.format(self.configfile))
logger.debug('found conf file {0}\n'.format(self.configfile))
self.initConfiguration()
def onMessage(self, request, response):
@ -58,9 +59,8 @@ class message(object):
except ValueError:
response.status = 500
print(requestDict, requestDict.keys())
if 'ipaddress' in requestDict:
url="https://cymon.io/api/nexus/v1/ip/{0}/events?combined=true&format=json".format(requestDict['ipaddress'])
url = "https://cymon.io/api/nexus/v1/ip/{0}/events?combined=true&format=json".format(requestDict['ipaddress'])
# add the cymon api key?
if self.options is not None:

Просмотреть файл

@ -7,18 +7,19 @@ import os
import random
import requests
import re
import sys
from configlib import getConfig, OptionParser
from datetime import datetime, timedelta
from pymongo import MongoClient
from mozdef_util.utilities.logger import logger
def isFQDN(fqdn):
try:
# We could resolve FQDNs here, but that could tip our hand and it's
# possible us investigating could trigger other alerts.
# validate using the regex from https://github.com/yolothreat/utilitybelt
fqdn_re = re.compile('(?=^.{4,255}$)(^((?!-)[a-zA-Z0-9-]{1,63}(?<!-)\.)+[a-zA-Z]{2,63}$)', re.I | re.S | re.M)
fqdn_re = re.compile(r'(?=^.{4,255}$)(^((?!-)[a-zA-Z0-9-]{1,63}(?<!-)\.)+[a-zA-Z]{2,63}$)', re.I | re.S | re.M)
return bool(re.match(fqdn_re,fqdn))
except:
return False
@ -59,7 +60,7 @@ class message(object):
self.configfile = './plugins/fqdnblocklist.conf'
self.options = None
if os.path.exists(self.configfile):
sys.stdout.write('found conf file {0}\n'.format(self.configfile))
logger.debug('found conf file {0}\n'.format(self.configfile))
self.initConfiguration()
def parse_fqdn_whitelist(self, fqdn_whitelist_location):
@ -146,8 +147,8 @@ class message(object):
fqdnblock['creator'] = userID
fqdnblock['reference'] = referenceID
ref = fqdnblocklist.insert(fqdnblock)
sys.stdout.write('{0} written to db\n'.format(ref))
sys.stdout.write('%s: added to the fqdnblocklist table\n' % (fqdn))
logger.debug('{0} written to db\n'.format(ref))
logger.debug('%s: added to the fqdnblocklist table\n' % (fqdn))
# send to statuspage.io?
if len(self.options.statuspage_api_key) > 1:
@ -170,17 +171,17 @@ class message(object):
headers=headers,
data=post_data)
if response.ok:
sys.stdout.write('%s: notification sent to statuspage.io\n' % (fqdn))
logger.info('%s: notification sent to statuspage.io\n' % (fqdn))
else:
sys.stderr.write('%s: statuspage.io notification failed %s\n' % (fqdn, response.json()))
logger.error('%s: statuspage.io notification failed %s\n' % (fqdn, response.json()))
except Exception as e:
sys.stderr.write('Error while notifying statuspage.io for %s: %s\n' %(fqdn, e))
logger.error('Error while notifying statuspage.io for %s: %s\n' % (fqdn, e))
else:
sys.stderr.write('%s: is already present in the fqdnblocklist table\n' % (fqdn))
logger.error('%s: is already present in the fqdnblocklist table\n' % (fqdn))
else:
sys.stderr.write('%s: is not a valid fqdn\n' % (fqdn))
logger.error('%s: is not a valid fqdn\n' % (fqdn))
except Exception as e:
sys.stderr.write('Error while blocking %s: %s\n' % (fqdn, e))
logger.error('Error while blocking %s: %s\n' % (fqdn, e))
def onMessage(self, request, response):
'''
@ -203,28 +204,27 @@ class message(object):
# loop through the fields of the form
# and fill in our values
try:
for i in request.json:
for field in request.json:
# were we checked?
if self.name in i:
blockfqdn = i.values()[0]
if 'fqdn' in i:
fqdn = i.values()[0]
if 'duration' in i:
duration = i.values()[0]
if 'comment' in i:
comment = i.values()[0]
if 'referenceid' in i:
referenceID = i.values()[0]
if 'userid' in i:
userid = i.values()[0]
if self.name in field:
blockfqdn = field[self.name]
if 'fqdn' in field:
fqdn = field['fqdn']
if 'duration' in field:
duration = field['duration']
if 'comment' in field:
comment = field['comment']
if 'referenceid' in field:
referenceID = field['referenceid']
if 'userid' in field:
userid = field['userid']
if blockfqdn and fqdn is not None:
if isFQDN(fqdn):
whitelisted = False
for whitelist_fqdn in self.options.fqdnwhitelist:
if fqdn == whitelist_fqdn:
whitelisted = True
sys.stdout.write('{0} is whitelisted as part of {1}\n'.format(fqdn, whitelist_fqdn))
logger.debug('{0} is whitelisted as part of {1}\n'.format(fqdn, whitelist_fqdn))
if not whitelisted:
self.blockFQDN(
@ -234,15 +234,15 @@ class message(object):
referenceID,
userid
)
sys.stdout.write('added {0} to blocklist\n'.format(fqdn))
logger.debug('added {0} to blocklist\n'.format(fqdn))
else:
sys.stdout.write('not adding {0} to blocklist, it was found in whitelist\n'.format(fqdn))
logger.debug('not adding {0} to blocklist, it was found in whitelist\n'.format(fqdn))
else:
sys.stdout.write('not adding {0} to blocklist, invalid fqdn\n'.format(fqdn))
logger.error('not adding {0} to blocklist, invalid fqdn\n'.format(fqdn))
response.status = "400 invalid FQDN"
response.body = "invalid FQDN"
except Exception as e:
sys.stderr.write('Error handling request.json %r \n' % (e))
logger.error('Error handling request.json %r \n' % (e))
response.status = "500"
return (request, response)

Просмотреть файл

@ -7,10 +7,10 @@ import netaddr
import os
import random
import requests
import sys
from configlib import getConfig, OptionParser
from datetime import datetime, timedelta
from pymongo import MongoClient
from mozdef_util.utilities.logger import logger
def isIPv4(ip):
@ -70,14 +70,14 @@ class message(object):
self.configfile = './plugins/ipblocklist.conf'
self.options = None
if os.path.exists(self.configfile):
sys.stdout.write('found conf file {0}\n'.format(self.configfile))
logger.debug('found conf file {0}\n'.format(self.configfile))
self.initConfiguration()
def parse_network_whitelist(self, network_whitelist_location):
networks = []
with open(network_whitelist_location, "r") as text_file:
for line in text_file:
line=line.strip().strip("'").strip('"')
line = line.strip().strip("'").strip('"')
if isIPv4(line) or isIPv6(line):
networks.append(line)
return networks
@ -140,11 +140,11 @@ class message(object):
ipblock = ipblocklist.find_one({'ipaddress': str(ipcidr)})
if ipblock is None:
# insert
ipblock= dict()
ipblock = dict()
ipblock['_id'] = genMeteorID()
# str to get the ip/cidr rather than netblock cidr.
# i.e. '1.2.3.4/24' not '1.2.3.0/24'
ipblock['address']= str(ipcidr)
ipblock['address'] = str(ipcidr)
ipblock['dateAdded'] = datetime.utcnow()
# Compute start and end dates
# default
@ -166,8 +166,8 @@ class message(object):
ipblock['creator'] = userID
ipblock['reference'] = referenceID
ref = ipblocklist.insert(ipblock)
sys.stdout.write('{0} written to db\n'.format(ref))
sys.stdout.write('%s: added to the ipblocklist table\n' % (ipaddress))
logger.debug('{0} written to db\n'.format(ref))
logger.debug('%s: added to the ipblocklist table\n' % (ipaddress))
# send to statuspage.io?
if len(self.options.statuspage_api_key) > 1:
@ -190,17 +190,17 @@ class message(object):
headers=headers,
data=post_data)
if response.ok:
sys.stdout.write('%s: notification sent to statuspage.io\n' % (str(ipcidr)))
logger.debug('%s: notification sent to statuspage.io\n' % (str(ipcidr)))
else:
sys.stderr.write('%s: statuspage.io notification failed %s\n' % (str(ipcidr),response.json()))
logger.error('%s: statuspage.io notification failed %s\n' % (str(ipcidr), response.json()))
except Exception as e:
sys.stderr.write('Error while notifying statuspage.io for %s: %s\n' %(str(ipcidr),e))
logger.error('Error while notifying statuspage.io for %s: %s\n' % (str(ipcidr), e))
else:
sys.stderr.write('%s: is already present in the ipblocklist table\n' % (str(ipcidr)))
logger.error('%s: is already present in the ipblocklist table\n' % (str(ipcidr)))
else:
sys.stderr.write('%s: is not a valid ip address\n' % (ipaddress))
logger.error('%s: is not a valid ip address\n' % (ipaddress))
except Exception as e:
sys.stderr.write('Error while blocking %s: %s\n' % (ipaddress, e))
logger.exception('Error while blocking %s: %s\n' % (ipaddress, e))
def onMessage(self, request, response):
'''
@ -220,23 +220,23 @@ class message(object):
userid = None
blockip = False
# loop through the fields of the form
# and fill in our values
try:
for i in request.json:
# loop through the fields of the form
# and fill in our values
for field in request.json:
# were we checked?
if self.name in i:
blockip = i.values()[0]
if 'ipaddress' in i:
ipaddress = i.values()[0]
if 'duration' in i:
duration = i.values()[0]
if 'comment' in i:
comment = i.values()[0]
if 'referenceid' in i:
referenceID = i.values()[0]
if 'userid' in i:
userid = i.values()[0]
if self.name in field:
blockip = field[self.name]
if 'ipaddress' in field:
ipaddress = field['ipaddress']
if 'duration' in field:
duration = field['duration']
if 'comment' in field:
comment = field['comment']
if 'referenceid' in field:
referenceID = field['referenceid']
if 'userid' in field:
userid = field['userid']
if blockip and ipaddress is not None:
# figure out the CIDR mask
@ -251,7 +251,7 @@ class message(object):
whitelist_network = netaddr.IPNetwork(whitelist_range)
if ipcidr in whitelist_network:
whitelisted = True
sys.stdout.write('{0} is whitelisted as part of {1}\n'.format(ipcidr, whitelist_network))
logger.debug('{0} is whitelisted as part of {1}\n'.format(ipcidr, whitelist_network))
if not whitelisted:
self.blockIP(str(ipcidr),
@ -259,10 +259,10 @@ class message(object):
duration,
referenceID,
userid)
sys.stdout.write('added {0} to blocklist\n'.format(ipaddress))
logger.info('added {0} to blocklist\n'.format(ipaddress))
else:
sys.stdout.write('not adding {0} to blocklist, it was found in whitelist\n'.format(ipaddress))
logger.info('not adding {0} to blocklist, it was found in whitelist\n'.format(ipaddress))
except Exception as e:
sys.stderr.write('Error handling request.json %r \n'% (e))
logger.error('Error handling request.json %r \n' % (e))
return (request, response)

Просмотреть файл

@ -5,12 +5,12 @@
import json
import os
import sys
from configlib import getConfig, OptionParser
from datetime import datetime, timedelta
from mozdef_util.elasticsearch_client import ElasticsearchClient
from mozdef_util.query_models import SearchQuery, RangeMatch, Aggregation, ExistsMatch, PhraseMatch
from mozdef_util.utilities.toUTC import toUTC
from mozdef_util.utilities.logger import logger
class message(object):
@ -44,7 +44,7 @@ class message(object):
self.configfile = './plugins/logincounts.conf'
self.options = None
if os.path.exists(self.configfile):
sys.stdout.write('found conf file {0}\n'.format(self.configfile))
logger.debug('found conf file {0}\n'.format(self.configfile))
self.initConfiguration()
def onMessage(self, request, response):

Просмотреть файл

@ -4,11 +4,12 @@
# Copyright (c) 2014 Mozilla Corporation
import os
import sys
import configparser
import netaddr
from boto3.session import Session
from mozdef_util.utilities.logger import logger
def isIPv4(ip):
try:
@ -63,7 +64,7 @@ class message(object):
self.options = None
self.multioptions = []
if os.path.exists(self.configfile):
sys.stdout.write('found conf file {0}\n'.format(self.configfile))
logger.debug('found conf file {0}\n'.format(self.configfile))
self.initConfiguration()
def initConfiguration(self):
@ -100,7 +101,7 @@ class message(object):
if len(routetable['Associations']) > 0:
if 'SubnetId' in routetable['Associations'][0]:
subnet_id = routetable['Associations'][0]['SubnetId']
sys.stdout.write('{0} {1}\n'.format(rt_id, vpc_id))
logger.debug('{0} {1}\n'.format(rt_id, vpc_id))
response = client.describe_network_interfaces(
Filters=[
@ -131,10 +132,10 @@ class message(object):
]
)
sys.stdout.write('{0}\n'.format(response))
logger.debug('{0}\n'.format(response))
if len(response['NetworkInterfaces']) > 0:
bheni_id = response['NetworkInterfaces'][0]['NetworkInterfaceId']
sys.stdout.write('{0} {1} {2}\n'.format(rt_id, vpc_id, bheni_id))
logger.debug('{0} {1} {2}\n'.format(rt_id, vpc_id, bheni_id))
# get a handle to a route table associated with a netsec-private subnet
route_table = ec2.RouteTable(rt_id)
@ -144,11 +145,11 @@ class message(object):
NetworkInterfaceId=bheni_id,
)
else:
sys.stdout.write('Skipping route table {0} in the VPC {1} - blackhole ENI could not be found\n'.format(rt_id, vpc_id))
logger.debug('Skipping route table {0} in the VPC {1} - blackhole ENI could not be found\n'.format(rt_id, vpc_id))
continue
except Exception as e:
sys.stderr.write('Error while creating a blackhole entry %s: %r\n' % (ipaddress, e))
logger.error('Error while creating a blackhole entry %s: %r\n' % (ipaddress, e))
def onMessage(self, request, response):
'''
@ -163,29 +164,28 @@ class message(object):
# loop through the fields of the form
# and fill in our values
try:
for i in request.json:
for field in request.json:
# were we checked?
if self.name in i:
sendToBHVPC = i.values()[0]
if 'ipaddress' in i:
ipaddress = i.values()[0]
if self.name in field:
sendToBHVPC = field[self.name]
if 'ipaddress' in field:
ipaddress = field['ipaddress']
# are we configured?
if self.multioptions is None:
sys.stderr.write("Customs server blockip requested but not configured\n")
logger.error("Customs server blockip requested but not configured\n")
sendToBHVPC = False
if sendToBHVPC and ipaddress is not None:
# figure out the CIDR mask
if isIPv4(ipaddress) or isIPv6(ipaddress):
ipcidr=netaddr.IPNetwork(ipaddress)
ipcidr = netaddr.IPNetwork(ipaddress)
if not ipcidr.ip.is_loopback() \
and not ipcidr.ip.is_private() \
and not ipcidr.ip.is_reserved():
ipaddress = str(ipcidr.cidr)
self.addBlackholeEntry(ipaddress)
sys.stdout.write('Blackholed {0}\n'.format(ipaddress))
logger.info('Blackholed {0}\n'.format(ipaddress))
except Exception as e:
sys.stderr.write('Error handling request.json %r \n'% (e))
logger.error('Error handling request.json %r \n' % (e))
return (request, response)

Просмотреть файл

@ -5,11 +5,12 @@
import os
import random
import sys
from configlib import getConfig, OptionParser
from datetime import datetime, timedelta
from pymongo import MongoClient
from mozdef_util.utilities.logger import logger
def genMeteorID():
return('%024x' % random.randrange(16**24))
@ -43,7 +44,7 @@ class message(object):
self.configfile = './plugins/watchlist.conf'
self.options = None
if os.path.exists(self.configfile):
sys.stdout.write('found conf file {0}\n'.format(self.configfile))
logger.debug('found conf file {0}\n'.format(self.configfile))
self.initConfiguration()
def initConfiguration(self):
@ -100,13 +101,13 @@ class message(object):
watched['creator']=userID
watched['reference']=referenceID
ref=watchlist.insert(watched)
sys.stdout.write('{0} written to db.\n'.format(ref))
sys.stdout.write('%s added to the watchlist table.\n' % (watchcontent))
logger.debug('{0} written to db.\n'.format(ref))
logger.debug('%s added to the watchlist table.\n' % (watchcontent))
else:
sys.stderr.write('%s is already present in the watchlist table\n' % (str(watchcontent)))
logger.error('%s is already present in the watchlist table\n' % (str(watchcontent)))
except Exception as e:
sys.stderr.write('Error while watching %s: %s\n' % (watchcontent, e))
logger.error('Error while watching %s: %s\n' % (watchcontent, e))
def onMessage(self, request, response):
'''
@ -125,24 +126,22 @@ class message(object):
# loop through the fields of the form
# and fill in our values
try:
for i in request.json:
# were we checked?
if self.name in i.keys():
watchitem = i.values()[0]
if 'watchcontent' in i.keys():
watchcontent = i.values()[0]
if 'duration' in i.keys():
duration = i.values()[0]
if 'comment' in i.keys():
comment = i.values()[0]
if 'referenceid' in i.keys():
referenceID = i.values()[0]
if 'userid' in i.keys():
userid = i.values()[0]
for field in request.json:
if self.name in field:
watchitem = field[self.name]
if 'watchcontent' in field:
watchcontent = field['watchcontent']
if 'duration' in field:
duration = field['duration']
if 'comment' in field:
comment = field['comment']
if 'referenceid' in field:
referenceID = field['referenceid']
if 'userid' in field:
userid = field['userid']
if watchitem and watchcontent is not None:
if len(watchcontent) < 2:
sys.stderr.write('{0} does not meet requirements. Not added. \n'.format(watchcontent))
logger.error('{0} does not meet requirements. Not added. \n'.format(watchcontent))
else:
self.watchItem(str(watchcontent),
@ -152,6 +151,6 @@ class message(object):
userid)
except Exception as e:
sys.stderr.write('Error handling request.json %r \n'% (e))
logger.error('Error handling request.json %r \n' % (e))
return (request, response)

Просмотреть файл

@ -0,0 +1,99 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
from .positive_alert_test_case import PositiveAlertTestCase
from .negative_alert_test_case import NegativeAlertTestCase
from .alert_test_suite import AlertTestSuite
class TestAlertLdapPasswordSpray(AlertTestSuite):
alert_filename = "ldap_password_spray"
# This event is the default positive event that will cause the
# alert to trigger
default_event = {
"_source": {
"category": "ldap",
"details": {
"client": "1.2.3.4",
"requests": [
{
'verb': 'BIND',
'details': [
'dn="mail=jsmith@example.com,o=com,dc=example"',
'method=128'
]
}
],
"response": {
"error": 'LDAP_INVALID_CREDENTIALS',
}
}
}
}
# This alert is the expected result from running this task
default_alert = {
"category": "ldap",
"tags": ["ldap"],
"severity": "WARNING",
"summary": "LDAP Password Spray Attack in Progress from 1.2.3.4 targeting the following account(s): jsmith@example.com",
}
# This alert is the expected result from this task against multiple matching events
default_alert_aggregated = AlertTestSuite.copy(default_alert)
default_alert_aggregated[
"summary"
] = "LDAP Password Spray Attack in Progress from 1.2.3.4 targeting the following account(s): jsmith@example.com"
test_cases = []
test_cases.append(
PositiveAlertTestCase(
description="Positive test with default events and default alert expected",
events=AlertTestSuite.create_events(default_event, 1),
expected_alert=default_alert,
)
)
test_cases.append(
PositiveAlertTestCase(
description="Positive test with default events and default alert expected - dedup",
events=AlertTestSuite.create_events(default_event, 2),
expected_alert=default_alert,
)
)
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event["_source"]["details"]["response"]["error"] = "LDAP_SUCCESS"
test_cases.append(
NegativeAlertTestCase(
description="Negative test with default negative event", events=events
)
)
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event["_source"]["category"] = "bad"
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with events with incorrect category",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event["_source"][
"utctimestamp"
] = AlertTestSuite.subtract_from_timestamp_lambda({"minutes": 241})
event["_source"][
"receivedtimestamp"
] = AlertTestSuite.subtract_from_timestamp_lambda({"minutes": 241})
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with old timestamp", events=events
)
)

Просмотреть файл

@ -4,11 +4,11 @@ from .negative_test_suite import NegativeTestSuite
from mozdef_util.query_models import QueryStringMatch
hostname_test_regex = 'hostname: /(.*\.)*(groupa|groupb)\.(.*\.)*subdomain\.(.*\.)*.*/'
filename_matcher = 'summary: /.*\.(exe|sh)/'
hostname_test_regex = r'hostname: /(.*\.)*(groupa|groupb)\.(.*\.)*subdomain\.(.*\.)*.*/'
filename_matcher = r'summary: /.*\.(exe|sh)/'
# Note that this has potential for over-matching on foo.bar.baz.com, which needs further validation in alerts
ip_matcher = 'destination: /.*\..{1,3}\..{1,3}\..{1,3}(:.*|\/.*)/'
ip_matcher = r'destination: /.*\..{1,3}\..{1,3}\..{1,3}(:.*|\/.*)/'
class TestQueryStringMatchPositiveTestSuite(PositiveTestSuite):

Просмотреть файл

@ -550,7 +550,7 @@ class TestBroFixup(object):
"uids": ["C6uJBE1z3CKfrA9FE4", "CdCBtl1fKEIMNvebrb", "CNJJ9g1HgefKR09ied", "CuXKNM1R5MEJ9GsMIi", "CMIYsm2weaHvzBRJIi", "C0vslbmXr3Psyy5Ff", "Ct0BRQ2Y84MWhag1Ik", "C5BNK71HlfhlXf8Pq", "C5ZrPG3DfQNzsiUMi2", "CMJHze3BH9o7yg9yM6", "CMSyg03ZZcdic8pTMc"],
"client_addr": "10.251.255.10",
"server_addr": "10.251.24.1",
"mac": "f0:18:98:55:0e:0e",
"mac": "f01898550e0e",
"host_name": "aliczekkroliczek",
"domain": "ala.ma.kota",
"assigned_addr": "10.251.30.202",
@ -569,7 +569,7 @@ class TestBroFixup(object):
if not key.startswith('id.'):
assert key in result['details']
assert MESSAGE[key] == result['details'][key]
assert result['summary'] == '10.251.30.202 assigned to f0:18:98:55:0e:0e'
assert result['summary'] == '10.251.30.202 assigned to f01898550e0e'
def test_dhcp_log2(self):
event = {
@ -595,7 +595,7 @@ class TestBroFixup(object):
if not key.startswith('id.'):
assert key in result['details']
assert MESSAGE[key] == result['details'][key]
assert result['summary'] == '0.0.0.0 assigned to 00:00:00:00:00:00'
assert result['summary'] == '0.0.0.0 assigned to 000000000000'
def test_ftp_log(self):
event = {

Просмотреть файл

@ -352,18 +352,18 @@ class TestSuricataFixup(object):
"start":"2018-10-12T22:24:09.546736+0000"
},
"payload":"Q09OTkVDVCBzZWN1cml0eS10cmFja2VyLmRlYmlhbi5vcmc6NDQzIEhUVFAvMS4xDQpIb3N0OiBzZWN1cml0eS10cmFja2VyLmRlYmlhbi5vcmc6NDQzDQpVc2VyLUFnZW50OiBHby1odHRwLWNsaWVudC8xLjENCg0K",
"payload_printable":"CONNECT security-tracker.debian.org:443 HTTP\/1.1\r\nHost: security-tracker.debian.org:443\r\nUser-Agent: Go-http-client\/1.1\r\n\r\n",
"payload_printable":"CONNECT security-tracker.debian.org:443 HTTP/1.1\r\nHost: security-tracker.debian.org:443\r\nUser-Agent: Go-http-client/1.1\r\n\r\n",
"stream":0,
"packet":"RQAAKAAAAABABgAACjBLMAowShHR6Aw4ClEmlrx\/mcdQEgoAAAAAAA==",
"packet":"RQAAKAAAAABABgAACjBLMAowShHR6Aw4ClEmlrx/mcdQEgoAAAAAAA==",
"packet_info":{
"linktype":12
},
"http": {
"hostname":"security-tracker.debian.org",
"url":"security-tracker.debian.org:443",
"http_user_agent":"Go-http-client\/1.1",
"http_user_agent":"Go-http-client/1.1",
"http_method":"CONNECT",
"protocol":"HTTP\/1.1",
"protocol":"HTTP/1.1",
"status":200,
"length":0,
"redirect":"afakedestination"
@ -428,7 +428,7 @@ class TestSuricataFixup(object):
"url":"/allyourfiles",
"http_user_agent":"FirefoxRulez",
"http_method":"GET",
"protocol":"HTTP\/1.2",
"protocol":"HTTP/1.2",
"status":200,
"length":5000,
"redirect":"afakedestination"