Merge remote-tracking branch 'origin/master' into update_slack_bot

This commit is contained in:
Brandon Myers 2019-01-16 15:41:01 -06:00
Родитель 52f0a9b8bb c6866d5f5a
Коммит 905853675b
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 8AA79AD83045BBC7
71 изменённых файлов: 1156 добавлений и 293 удалений

Просмотреть файл

@ -24,7 +24,7 @@ MozDef is in production at Mozilla where we are using it to process over 300 mil
## Give MozDef a Try in AWS: ## Give MozDef a Try in AWS:
[![Launch MozDef](docs/source/images/cloudformation-launch-stack.png)](https://console.aws.amazon.com/cloudformation/home?region=us-west-2#/stacks/new?stackName=mozdef-for-aws&templateURL=https://s3-us-west-2.amazonaws.com/mozdef.infosec.allizom.org/cf/mozdef-parent.yml) [![Launch MozDef](docs/source/images/cloudformation-launch-stack.png)](https://console.aws.amazon.com/cloudformation/home?region=us-west-2#/stacks/new?stackName=mozdef-for-aws&templateURL=https://s3-us-west-2.amazonaws.com/public.us-west-2.infosec.mozilla.org/mozdef/cf/mozdef-parent.yml)
## Documentation: ## Documentation:

Просмотреть файл

@ -15,7 +15,7 @@ class AlertAuditdCommands(AlertTask):
search_query = SearchQuery(minutes=30) search_query = SearchQuery(minutes=30)
auditd_match = TermMatch('category', 'auditd') auditd_match = TermMatch('category', 'auditd')
auditd_match |= TermMatch('tags', 'audit') auditd_match |= TermMatch('category', 'execve')
search_query.add_must(auditd_match) search_query.add_must(auditd_match)
command_names_matcher = None command_names_matcher = None

Просмотреть файл

@ -18,7 +18,7 @@ class AlertBruteforceSsh(AlertTask):
search_query.add_must([ search_query.add_must([
PhraseMatch('summary', 'failed'), PhraseMatch('summary', 'failed'),
TermMatch('details.program', 'sshd'), TermMatch('details.program', 'sshd'),
TermsMatch('summary', ['login', 'invalid', 'ldap_count_entries', 'publickey']) TermsMatch('summary', ['login', 'invalid', 'ldap_count_entries', 'publickey', 'keyboard'])
]) ])
for ip_address in self.config.skiphosts.split(): for ip_address in self.config.skiphosts.split():

Просмотреть файл

@ -59,12 +59,16 @@ app.config_from_object('celeryconfig', force=True)
# As a result of celery 3 to celery 4, we need to dynamically # As a result of celery 3 to celery 4, we need to dynamically
# register all of the alert tasks specifically # register all of the alert tasks specifically
for alert_namespace in CELERYBEAT_SCHEDULE: for alert_namespace in CELERYBEAT_SCHEDULE:
alert_tokens = alert_namespace.split('.') try:
alert_module_name = alert_tokens[0] alert_tokens = alert_namespace.split('.')
alert_classname = alert_tokens[1] alert_module_name = alert_tokens[0]
alert_module = import_module(alert_module_name) alert_classname = alert_tokens[1]
alert_class = getattr(alert_module, alert_classname) alert_module = import_module(alert_module_name)
app.register_task(alert_class()) alert_class = getattr(alert_module, alert_classname)
app.register_task(alert_class())
except Exception as e:
print "Error addding alert"
print e
if __name__ == '__main__': if __name__ == '__main__':
app.start() app.start()

Просмотреть файл

@ -15,10 +15,10 @@ class AlertCloudtrailLoggingDisabled(AlertTask):
search_query.add_must([ search_query.add_must([
TermMatch('source', 'cloudtrail'), TermMatch('source', 'cloudtrail'),
TermMatch('eventName', 'StopLogging') TermMatch('eventname', 'StopLogging')
]) ])
search_query.add_must_not(TermMatch('errorCode', 'AccessDenied')) search_query.add_must_not(TermMatch('errorcode', 'AccessDenied'))
self.filtersManual(search_query) self.filtersManual(search_query)
self.searchEventsSimple() self.searchEventsSimple()
@ -29,6 +29,6 @@ class AlertCloudtrailLoggingDisabled(AlertTask):
tags = ['cloudtrail', 'aws', 'cloudtrailpagerduty'] tags = ['cloudtrail', 'aws', 'cloudtrailpagerduty']
severity = 'CRITICAL' severity = 'CRITICAL'
summary = 'Cloudtrail Logging Disabled: ' + event['_source']['requestParameters']['name'] summary = 'Cloudtrail Logging Disabled: ' + event['_source']['requestparameters']['name']
return self.createAlertDict(summary, category, tags, [event], severity) return self.createAlertDict(summary, category, tags, [event], severity)

Просмотреть файл

@ -13,7 +13,7 @@ class AlertDuoAuthFail(AlertTask):
search_query = SearchQuery(minutes=15) search_query = SearchQuery(minutes=15)
search_query.add_must([ search_query.add_must([
TermMatch('category', 'event'), TermMatch('category', 'authentication'),
ExistsMatch('details.sourceipaddress'), ExistsMatch('details.sourceipaddress'),
ExistsMatch('details.username'), ExistsMatch('details.username'),
PhraseMatch('details.result', 'FRAUD') PhraseMatch('details.result', 'FRAUD')

Просмотреть файл

@ -0,0 +1,4 @@
{
"sourcemustmatch":"[10.0.0.0 TO 10.255.255.255]",
"sourcemustnotmatch":"10.33.44.54 OR 10.88.77.54 OR 10.76.54.54 OR 10.251.30.138 OR 10.54.65.234"
}

Просмотреть файл

@ -0,0 +1,47 @@
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2018 Mozilla Corporation
from lib.alerttask import AlertTask, add_hostname_to_ip
from mozdef_util.query_models import SearchQuery, TermMatch, QueryStringMatch, PhraseMatch
class NSMScanAddress(AlertTask):
def __init__(self):
AlertTask.__init__(self)
self._config = self.parse_json_alert_config('nsm_scan_address.json')
def main(self):
search_query = SearchQuery(minutes=1)
search_query.add_must([
TermMatch('category', 'bro'),
TermMatch('details.source', 'notice'),
PhraseMatch('details.note', 'Scan::Address_Scan'),
QueryStringMatch('details.sourceipaddress: {}'.format(self._config['sourcemustmatch']))
])
search_query.add_must_not([
QueryStringMatch('details.sourceipaddress: {}'.format(self._config['sourcemustnotmatch']))
])
self.filtersManual(search_query)
self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10)
self.walkAggregations(threshold=1)
def onAggregation(self, aggreg):
category = 'nsm'
severity = 'NOTICE'
tags = ['nsm', "bro", 'addressscan']
indicators = 'unknown'
x = aggreg['events'][0]['_source']
if 'details' in x:
if 'indicators' in x['details']:
indicators = x['details']['sourceipaddress']
indicators_info = add_hostname_to_ip(indicators, '{0} ({1})', require_internal=False)
summary = 'Address scan from {}'.format(indicators_info)
return self.createAlertDict(summary, category, tags, aggreg['events'], severity)

Просмотреть файл

@ -0,0 +1,4 @@
{
"sourcemustmatch":"[10.0.0.0 TO 10.255.255.255]",
"sourcemustnotmatch":"10.33.44.54 OR 10.88.77.54 OR 10.76.54.54 OR 10.251.30.138 OR 10.54.65.234"
}

46
alerts/nsm_scan_random.py Normal file
Просмотреть файл

@ -0,0 +1,46 @@
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2018 Mozilla Corporation
from lib.alerttask import AlertTask
from mozdef_util.query_models import SearchQuery, TermMatch, QueryStringMatch, PhraseMatch
class NSMScanRandom(AlertTask):
def __init__(self):
AlertTask.__init__(self)
self._config = self.parse_json_alert_config('nsm_scan_random.json')
def main(self):
search_query = SearchQuery(minutes=1)
search_query.add_must([
TermMatch('category', 'bro'),
TermMatch('details.source', 'notice'),
PhraseMatch('details.note', 'Scan::Random_Scan'),
QueryStringMatch('details.sourceipaddress: {}'.format(self._config['sourcemustmatch']))
])
search_query.add_must_not([
QueryStringMatch('details.sourceipaddress: {}'.format(self._config['sourcemustnotmatch']))
])
self.filtersManual(search_query)
self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10)
self.walkAggregations(threshold=1)
def onAggregation(self, aggreg):
category = 'nsm'
severity = 'WARNING'
tags = ['nsm', "bro", 'randomscan']
indicators = 'unknown'
x = aggreg['events'][0]['_source']
if 'details' in x:
if 'indicators' in x['details']:
indicators = x['details']['sourceipaddress']
summary = 'Random scan from {}'.format(indicators)
return self.createAlertDict(summary, category, tags, aggreg['events'], severity)

Просмотреть файл

@ -32,12 +32,12 @@ class TraceAudit(AlertTask):
severity = 'WARNING' severity = 'WARNING'
tags = ['audit'] tags = ['audit']
summary = ('{0} instances of Strace or Ptrace executed on a system by {1}'.format(aggreg['count'], aggreg['value'], )) hosts = set([event['_source']['hostname'] for event in aggreg['events']])
hostnames = self.mostCommon(aggreg['allevents'], '_source.hostname')
# did they modify more than one host? summary = '{0} instances of Strace or Ptrace executed by {1} on {2}'.format(
# or just modify an existing configuration more than once? aggreg['count'],
if len(hostnames) > 1: aggreg['value'],
for i in hostnames[:5]: ','.join(hosts)
summary += ' on {0} ({1} hosts)'.format(i[0], i[1]) )
return self.createAlertDict(summary, category, tags, aggreg['events'], severity) return self.createAlertDict(summary, category, tags, aggreg['events'], severity)

Просмотреть файл

@ -1,2 +1,3 @@
[options] [options]
skipprocess = process1 process2 skipprocess = process1 process2
expectedusers = user1 user2

Просмотреть файл

@ -13,7 +13,7 @@ from mozdef_util.query_models import SearchQuery, TermMatch, PhraseMatch
class WriteAudit(AlertTask): class WriteAudit(AlertTask):
def main(self): def main(self):
self.parse_config('write_audit.conf', ['skipprocess']) self.parse_config('write_audit.conf', ['skipprocess', 'expectedusers'])
search_query = SearchQuery(minutes=15) search_query = SearchQuery(minutes=15)
search_query.add_must([ search_query.add_must([
@ -33,7 +33,22 @@ class WriteAudit(AlertTask):
severity = 'WARNING' severity = 'WARNING'
tags = ['audit'] tags = ['audit']
summary = ('{0} Filesystem write(s) to an auditd path by {1}'.format(aggreg['count'], aggreg['value'], )) users = set()
paths = set()
for event in aggreg['events']:
users.add(event['_source']['details']['user'])
paths.add(event['_source']['summary'].split(' ')[1])
summary = '{0} Filesystem write(s) to an auditd path ({1}) by {2} ({3})'.format(
aggreg['count'],
', '.join(paths),
', '.join(users),
aggreg['value']
)
if aggreg['value'] in self.config.expectedusers.split(' '):
severity = 'NOTICE'
hostnames = self.mostCommon(aggreg['allevents'], '_source.hostname') hostnames = self.mostCommon(aggreg['allevents'], '_source.hostname')
# did they modify more than one host? # did they modify more than one host?
# or just modify an existing configuration more than once? # or just modify an existing configuration more than once?

Просмотреть файл

@ -7,9 +7,14 @@ STACK_PARAMS_FILENAME := aws_parameters.json
STACK_PARAMS := $(shell test -e $(STACK_PARAMS_FILENAME) && python -c 'import json,sys;f=open(sys.argv[1]);print(" ".join([",".join(["%s=\\\"%s\\\""%(k,v) for k,v in x.items()]) for x in json.load(f)]));f.close()' $(STACK_PARAMS_FILENAME)) STACK_PARAMS := $(shell test -e $(STACK_PARAMS_FILENAME) && python -c 'import json,sys;f=open(sys.argv[1]);print(" ".join([",".join(["%s=\\\"%s\\\""%(k,v) for k,v in x.items()]) for x in json.load(f)]));f.close()' $(STACK_PARAMS_FILENAME))
# MozDef uses a nested CF stack, the mozdef-parent.yml will tie all child stacks together and load them from S3 # MozDef uses a nested CF stack, the mozdef-parent.yml will tie all child stacks together and load them from S3
# See also mozdef.infosec.mozilla.org bucket # See also mozdef.infosec.mozilla.org bucket
S3_BUCKET_NAME := mozdef.infosec.allizom.org S3_BUCKET_NAME := mozdef.infosec.allizom.org
S3_BUCKET_PATH := cf S3_BUCKET_PATH := cf
S3_BUCKET_URI := s3://$(S3_BUCKET_NAME)/$(S3_BUCKET_PATH) S3_BUCKET_URI := s3://$(S3_BUCKET_NAME)/$(S3_BUCKET_PATH)
# Location to publish templates for public consumption
S3_PUBLISHED_BUCKET_NAME := public.us-west-2.infosec.mozilla.org
S3_PUBLISHED_BUCKET_PATH := mozdef/cf
S3_PUBLISHED_BUCKET_URI := s3://$(S3_PUBLISHED_BUCKET_NAME)/$(S3_PUBLISHED_BUCKET_PATH)
S3_STACK_URI := https://s3-$(AWS_REGION).amazonaws.com/$(S3_BUCKET_NAME)/$(S3_BUCKET_PATH)/ S3_STACK_URI := https://s3-$(AWS_REGION).amazonaws.com/$(S3_BUCKET_NAME)/$(S3_BUCKET_PATH)/
# OIDC_CLIENT_SECRET is set in an environment variable by running "source aws_parameters.sh" # OIDC_CLIENT_SECRET is set in an environment variable by running "source aws_parameters.sh"
OIDC_CLIENT_SECRET_PARAM_ARG := $(shell test -n "$(OIDC_CLIENT_SECRET)" && echo "ParameterKey=OIDCClientSecret,ParameterValue=$(OIDC_CLIENT_SECRET)") OIDC_CLIENT_SECRET_PARAM_ARG := $(shell test -n "$(OIDC_CLIENT_SECRET)" && echo "ParameterKey=OIDCClientSecret,ParameterValue=$(OIDC_CLIENT_SECRET)")
@ -63,4 +68,9 @@ stack-status: ## Output current CloudFormation stack status
.PHONY: upload-templates .PHONY: upload-templates
upload-templates: upload-templates:
@export AWS_REGION=$(AWS_REGION) @export AWS_REGION=$(AWS_REGION)
aws s3 sync cloudformation/ $(S3_BUCKET_URI) --acl public-read aws s3 sync cloudformation/ $(S3_BUCKET_URI) --acl public-read --exclude="*" --include="*.yml"
.PHONY: publish-templates
publish-templates:
@export AWS_REGION=$(AWS_REGION)
aws s3 sync cloudformation/ $(S3_PUBLISHED_BUCKET_URI) --exclude="*" --include="*.yml"

Просмотреть файл

@ -90,9 +90,10 @@ Parameters:
NoEcho: true NoEcho: true
S3TemplateLocation: S3TemplateLocation:
Type: String Type: String
AllowedPattern: '^https?:\/\/.*\.amazonaws\.com\/.*' AllowedPattern: '^https?:\/\/.*\.amazonaws\.com\/.*\/'
ConstraintDescription: A valid amazonaws.com S3 URL ConstraintDescription: A valid amazonaws.com S3 URL
Description: "The URL to the S3 bucket used to fetch the nested stack templates (Example: https://s3-us-west-2.amazonaws.com/example-bucket-name/cloudformation/path/)" Description: "The URL to the S3 bucket used to fetch the nested stack templates (Example: https://s3-us-west-2.amazonaws.com/example-bucket-name/cloudformation/path/)"
Default: https://s3-us-west-2.amazonaws.com/public.us-west-2.infosec.mozilla.org/mozdef/cf/
Resources: Resources:
MozDefSecurityGroups: MozDefSecurityGroups:
Type: AWS::CloudFormation::Stack Type: AWS::CloudFormation::Stack

Просмотреть файл

@ -263,6 +263,9 @@ def process_msg(mozmsg, msg):
See also https://auth0.com/docs/api/management/v2#!/Logs/get_logs See also https://auth0.com/docs/api/management/v2#!/Logs/get_logs
""" """
details = DotDict({}) details = DotDict({})
# defaults
details.username = "UNKNOWN"
details.userid = "UNKNNOWN"
# key words used to set category and success/failure markers # key words used to set category and success/failure markers
authentication_words = ['Login', 'Logout', 'Auth'] authentication_words = ['Login', 'Logout', 'Auth']
@ -298,7 +301,7 @@ def process_msg(mozmsg, msg):
pass pass
try: try:
mozmsg.useragent = msg.user_agent details['useragent'] = msg.user_agent
except KeyError: except KeyError:
pass pass

Просмотреть файл

@ -120,7 +120,7 @@
}, },
"requestparameters" : { "requestparameters" : {
"properties" : { "properties" : {
"logStreamName": { "logstreamname": {
"properties": { "properties": {
"raw_value": { "raw_value": {
"type" : "keyword" "type" : "keyword"

Просмотреть файл

@ -39,15 +39,32 @@ def normalize(details):
normalized["sourceipaddress"] = details[f] normalized["sourceipaddress"] = details[f]
continue continue
if f == "result": if f == "result":
if details[f] == "SUCCESS": if details[f].lower() == "success":
normalized["success"] = True normalized["success"] = True
else: else:
normalized["success"] = False normalized["success"] = False
normalized[f] = details[f] normalized[f] = details[f]
if 'user' in normalized and type(normalized['user']) is dict:
if 'name' in normalized['user']:
normalized['username'] = normalized['user']['name']
if 'key' in normalized['user']:
normalized['userkey'] = normalized['user']['key']
del(normalized['user'])
return normalized return normalized
def process_events(mozmsg, duo_events, etype, state): def process_events(mozmsg, duo_events, etype, state):
"""
Data format of duo_events in api_version == 2 (str):
duo_events.metadata = {u'total_objects': 49198, u'next_offset': [u'1547244648000', u'4da7180c-b1e5-47b4-9f4d-ee10dc3b5ac8']}
duo_events.authlogs = [{...}, {...}, ...]
authlogs entry = {u'access_device': {u'ip': u'a.b.c.d', u'location': {u'city': None, u'state': u'Anhui', u'country':
u'China'}}, u'event_type': u'authentication', u'timestamp': 1547244800, u'factor': u'not_available', u'reason':
u'deny_unenrolled_user', u'txid': u'68b33dd3-d341-46c6-a985-0640592fb7b0', u'application': {u'name': u'Integration
Name Here', u'key': u'SOME KEY HERE'}, u'host': u'api-blah.duosecurity.com', u'result': u'denied', u'eventtype': u'authentication', u'auth_device': {u'ip': None, u'location': {u'city': None, u'state': None, u'country': None}, u'name': None}, u'user': {u'name': u'root', u'key': None}}
"""
# There are some key fields that we use as MozDef fields, those are set to "noconsume" # There are some key fields that we use as MozDef fields, those are set to "noconsume"
# After processing these fields, we just pour everything into the "details" fields of Mozdef, except for the # After processing these fields, we just pour everything into the "details" fields of Mozdef, except for the
# noconsume fields. # noconsume fields.
@ -61,6 +78,13 @@ def process_events(mozmsg, duo_events, etype, state):
else: else:
return return
# Care for API v2
if isinstance(duo_events, dict) and 'authlogs' in duo_events.keys():
duo_events = duo_events['authlogs']
api_version = 2
else:
api_version = 1
for e in duo_events: for e in duo_events:
details = {} details = {}
# Timestamp format: http://mozdef.readthedocs.io/en/latest/usage.html#mandatory-fields # Timestamp format: http://mozdef.readthedocs.io/en/latest/usage.html#mandatory-fields
@ -87,7 +111,10 @@ def process_events(mozmsg, duo_events, etype, state):
elif etype == 'telephony': elif etype == 'telephony':
mozmsg.summary = e['context'] mozmsg.summary = e['context']
elif etype == 'authentication': elif etype == 'authentication':
mozmsg.summary = e['eventtype'] + ' ' + e['result'] + ' for ' + e['username'] if (api_version == 1):
mozmsg.summary = e['eventtype'] + ' ' + e['result'] + ' for ' + e['username']
else:
mozmsg.summary = e['eventtype'] + ' ' + e['result'] + ' for ' + e['user']['name']
mozmsg.send() mozmsg.send()
@ -105,7 +132,13 @@ def main():
state = pickle.load(open(options.statepath, 'rb')) state = pickle.load(open(options.statepath, 'rb'))
except IOError: except IOError:
# Oh, you're new. # Oh, you're new.
state = {'administration': 0, 'authentication': 0, 'telephony': 0} # Note API v2 expect full, correct and within range timestamps in millisec so we start recently
# API v1 uses normal timestamps in seconds instead
state = {'administration': 0, 'authentication': 1547000000000, 'telephony': 0}
# Convert v1 (sec) timestamp to v2 (ms)...
if state['authentication'] < 1547000000000:
state['authentication'] = int(str(state['authentication']) + '000')
duo = duo_client.Admin(ikey=options.IKEY, skey=options.SKEY, host=options.URL) duo = duo_client.Admin(ikey=options.IKEY, skey=options.SKEY, host=options.URL)
mozmsg = mozdef.MozDefEvent(options.MOZDEF_URL) mozmsg = mozdef.MozDefEvent(options.MOZDEF_URL)
@ -121,7 +154,8 @@ def main():
# This will process events for all 3 log types and send them to MozDef. the state stores the last position in the # This will process events for all 3 log types and send them to MozDef. the state stores the last position in the
# log when this script was last called. # log when this script was last called.
state = process_events(mozmsg, duo.get_administrator_log(mintime=state['administration'] + 1), 'administration', state) state = process_events(mozmsg, duo.get_administrator_log(mintime=state['administration'] + 1), 'administration', state)
state = process_events(mozmsg, duo.get_authentication_log(mintime=state['authentication'] + 1), 'authentication', state) # TODO Should use `next_offset` instead of mintime in the future (for api v2) as its more efficient
state = process_events(mozmsg, duo.get_authentication_log(api_version=2, mintime=state['authentication'] + 1), 'authentication', state)
state = process_events(mozmsg, duo.get_telephony_log(mintime=state['telephony'] + 1), 'telephony', state) state = process_events(mozmsg, duo.get_telephony_log(mintime=state['telephony'] + 1), 'telephony', state)
pickle.dump(state, open(options.statepath, 'wb')) pickle.dump(state, open(options.statepath, 'wb'))
@ -132,7 +166,6 @@ def initConfig():
options.SKEY = getConfig('SKEY', '', options.configfile) options.SKEY = getConfig('SKEY', '', options.configfile)
options.URL = getConfig('URL', '', options.configfile) options.URL = getConfig('URL', '', options.configfile)
options.MOZDEF_URL = getConfig('MOZDEF_URL', '', options.configfile) options.MOZDEF_URL = getConfig('MOZDEF_URL', '', options.configfile)
options.MOZDEF_URL = getConfig('MOZDEF_URL', '', options.configfile)
options.DEBUG = getConfig('DEBUG', True, options.configfile) options.DEBUG = getConfig('DEBUG', True, options.configfile)
options.statepath = getConfig('statepath', '', options.configfile) options.statepath = getConfig('statepath', '', options.configfile)
options.update_tags = getConfig('addtag', '', options.configfile) options.update_tags = getConfig('addtag', '', options.configfile)

Просмотреть файл

@ -0,0 +1,24 @@
---
version: '3.7'
services:
alerts:
image: mozdef/mozdef_alerts
build:
context: ../../
dockerfile: docker/compose/mozdef_alerts/Dockerfile
cache_from:
- mozdef/mozdef_alerts
- mozdef_alerts:latest
restart: always
command: bash -c 'python -i'
stdin_open: true
tty: true
depends_on:
- base
- elasticsearch
- rabbitmq
- bootstrap
networks:
- default
volumes:
- ../../alerts:/opt/mozdef/envs/mozdef/alerts

Просмотреть файл

@ -19,4 +19,4 @@ services:
networks: networks:
- default - default
stdin_open: true stdin_open: true
tty: true tty: true

Просмотреть файл

@ -0,0 +1,20 @@
---
version: '3.7'
services:
meteor:
build:
context: ../../
dockerfile: docker/compose/mozdef_meteor/Dockerfile
args:
METEOR_BUILD: 'NO'
restart: always
command: bash -c 'node -i'
depends_on:
- mongodb
- rest
networks:
- default
volumes:
- ../../meteor:/opt/mozdef/envs/mozdef/meteor
stdin_open: true
tty: true

Просмотреть файл

@ -6,7 +6,7 @@ services:
context: ../../ context: ../../
dockerfile: docker/compose/mozdef_rest/Dockerfile dockerfile: docker/compose/mozdef_rest/Dockerfile
restart: always restart: always
command: bash -c 'source /opt/mozdef/envs/python/bin/activate && python index.py -c index.conf' command: bash -c 'python index.py -c index.conf'
depends_on: depends_on:
- base - base
- elasticsearch - elasticsearch
@ -17,4 +17,4 @@ services:
volumes: volumes:
- ../../rest:/opt/mozdef/envs/mozdef/rest - ../../rest:/opt/mozdef/envs/mozdef/rest
stdin_open: true stdin_open: true
tty: true tty: true

Просмотреть файл

@ -6,7 +6,7 @@ services:
context: ../../ context: ../../
dockerfile: docker/compose/mozdef_sampledata/Dockerfile dockerfile: docker/compose/mozdef_sampledata/Dockerfile
restart: always restart: always
command: bash -c 'source /opt/mozdef/envs/python/bin/activate && /opt/mozdef/envs/mozdef/examples/demo/sampleevents.sh' command: bash -c '/opt/mozdef/envs/mozdef/examples/demo/sampleevents.sh'
links: links:
- elasticsearch - elasticsearch
depends_on: depends_on:
@ -16,3 +16,5 @@ services:
- bootstrap - bootstrap
networks: networks:
- default - default
volumes:
- ../../examples:/opt/mozdef/envs/mozdef/examples

Просмотреть файл

@ -40,7 +40,7 @@ services:
image: mozdef/mozdef_bootstrap image: mozdef/mozdef_bootstrap
env_file: env_file:
- cloudy_mozdef.env - cloudy_mozdef.env
command: bash -c 'source /opt/mozdef/envs/python/bin/activate && python initial_setup.py http://elasticsearch:9200 cron/defaultMappingTemplate.json cron/backup.conf' command: bash -c 'python initial_setup.py http://elasticsearch:9200 cron/defaultMappingTemplate.json cron/backup.conf'
depends_on: depends_on:
- base - base
networks: networks:
@ -58,7 +58,7 @@ services:
env_file: env_file:
- cloudy_mozdef.env - cloudy_mozdef.env
restart: always restart: always
command: bash -c 'source /opt/mozdef/envs/python/bin/activate && python alert_worker.py -c alert_worker.conf' command: bash -c 'python alert_worker.py -c alert_worker.conf'
depends_on: depends_on:
- base - base
- alerts - alerts
@ -72,7 +72,7 @@ services:
volumes: volumes:
- /opt/mozdef/docker/compose/mozdef_alerts/files/config.py:/opt/mozdef/envs/mozdef/alerts/lib/config.py - /opt/mozdef/docker/compose/mozdef_alerts/files/config.py:/opt/mozdef/envs/mozdef/alerts/lib/config.py
restart: always restart: always
command: bash -c 'source /opt/mozdef/envs/python/bin/activate && celery -A celeryconfig worker --loglevel=info --beat' command: bash -c 'celery -A celeryconfig worker --loglevel=info --beat'
depends_on: depends_on:
- base - base
- bootstrap - bootstrap
@ -97,7 +97,7 @@ services:
env_file: env_file:
- cloudy_mozdef.env - cloudy_mozdef.env
restart: always restart: always
command: bash -c 'source /opt/mozdef/envs/python/bin/activate && python index.py -c index.conf' command: bash -c 'python index.py -c index.conf'
depends_on: depends_on:
- base - base
- bootstrap - bootstrap
@ -124,7 +124,7 @@ services:
env_file: env_file:
- cloudy_mozdef.env - cloudy_mozdef.env
restart: always restart: always
command: bash -c 'source /opt/mozdef/envs/python/bin/activate && python index.py -c index.conf' command: bash -c 'python index.py -c index.conf'
depends_on: depends_on:
- base - base
- mongodb - mongodb
@ -160,7 +160,7 @@ services:
env_file: env_file:
- cloudy_mozdef.env - cloudy_mozdef.env
restart: always restart: always
command: bash -c 'source /opt/mozdef/envs/python/bin/activate && python esworker_eventtask.py -c esworker_eventtask.conf' command: bash -c 'python esworker_eventtask.py -c esworker_eventtask.conf'
scale: 1 scale: 1
depends_on: depends_on:
- base - base
@ -177,7 +177,7 @@ services:
- cloudy_mozdef.env - cloudy_mozdef.env
- cloudy_mozdef_mq_cloudtrail.env - cloudy_mozdef_mq_cloudtrail.env
restart: always restart: always
command: bash -c 'source /opt/mozdef/envs/python/bin/activate && python esworker_cloudtrail.py -c esworker_cloudtrail.conf' command: bash -c 'python esworker_cloudtrail.py -c esworker_cloudtrail.conf'
scale: 1 scale: 1
depends_on: depends_on:
- base - base
@ -194,7 +194,7 @@ services:
- cloudy_mozdef.env - cloudy_mozdef.env
- cloudy_mozdef_mq_sns_sqs.env - cloudy_mozdef_mq_sns_sqs.env
restart: always restart: always
command: bash -c 'source /opt/mozdef/envs/python/bin/activate && python esworker_sns_sqs.py -c esworker_sns_sqs.conf' command: bash -c 'python esworker_sns_sqs.py -c esworker_sns_sqs.conf'
scale: 1 scale: 1
depends_on: depends_on:
- base - base

Просмотреть файл

@ -103,7 +103,7 @@ services:
cache_from: cache_from:
- mozdef/mozdef_bootstrap - mozdef/mozdef_bootstrap
- mozdef_bootstrap:latest - mozdef_bootstrap:latest
command: bash -c 'while ! timeout 1 bash -c "echo > /dev/tcp/elasticsearch/9200";do sleep 1;done && source /opt/mozdef/envs/python/bin/activate && python initial_setup.py http://elasticsearch:9200 cron/defaultMappingTemplate.json cron/backup.conf' command: bash -c 'while ! timeout 1 bash -c "echo > /dev/tcp/elasticsearch/9200";do sleep 1;done && python initial_setup.py http://elasticsearch:9200 cron/defaultMappingTemplate.json cron/backup.conf'
depends_on: depends_on:
- base - base
- elasticsearch - elasticsearch
@ -120,7 +120,7 @@ services:
- mozdef/mozdef_alertplugins - mozdef/mozdef_alertplugins
- mozdef_alertplugins:latest - mozdef_alertplugins:latest
restart: always restart: always
command: bash -c 'while ! timeout 1 bash -c "echo > /dev/tcp/elasticsearch/9200";do sleep 1;done && source /opt/mozdef/envs/python/bin/activate && python alert_worker.py -c alert_worker.conf' command: bash -c 'while ! timeout 1 bash -c "echo > /dev/tcp/elasticsearch/9200";do sleep 1;done && python alert_worker.py -c alert_worker.conf'
depends_on: depends_on:
- base - base
- elasticsearch - elasticsearch
@ -138,7 +138,7 @@ services:
- mozdef/mozdef_alerts - mozdef/mozdef_alerts
- mozdef_alerts:latest - mozdef_alerts:latest
restart: always restart: always
command: bash -c 'while ! timeout 1 bash -c "echo > /dev/tcp/elasticsearch/9200";do sleep 1;done && source /opt/mozdef/envs/python/bin/activate && celery -A celeryconfig worker --loglevel=info --beat' command: bash -c 'while ! timeout 1 bash -c "echo > /dev/tcp/elasticsearch/9200";do sleep 1;done && celery -A celeryconfig worker --loglevel=info --beat'
depends_on: depends_on:
- base - base
- elasticsearch - elasticsearch
@ -158,7 +158,7 @@ services:
# # This is either 'irc', or 'slack' # # This is either 'irc', or 'slack'
# BOT_TYPE: slack # BOT_TYPE: slack
# restart: always # restart: always
# command: bash -c 'sleep 90 && source /opt/mozdef/envs/python/bin/activate && python mozdefbot.py -c mozdefbot.conf' # command: bash -c 'sleep 90 && python mozdefbot.py -c mozdefbot.conf'
# depends_on: # depends_on:
# - base # - base
# - rabbitmq # - rabbitmq
@ -198,7 +198,7 @@ services:
- mozdef/mozdef_loginput - mozdef/mozdef_loginput
- mozdef_loginput:latest - mozdef_loginput:latest
restart: always restart: always
command: bash -c 'while ! timeout 1 bash -c "echo > /dev/tcp/elasticsearch/9200";do sleep 1;done && source /opt/mozdef/envs/python/bin/activate && python index.py -c index.conf' command: bash -c 'while ! timeout 1 bash -c "echo > /dev/tcp/elasticsearch/9200";do sleep 1;done && python index.py -c index.conf'
depends_on: depends_on:
- base - base
- elasticsearch - elasticsearch
@ -215,7 +215,7 @@ services:
- mozdef/mozdef_mq_worker - mozdef/mozdef_mq_worker
- mozdef_mq_worker:latest - mozdef_mq_worker:latest
restart: always restart: always
command: bash -c 'while ! timeout 1 bash -c "echo > /dev/tcp/elasticsearch/9200";do sleep 1;done && source /opt/mozdef/envs/python/bin/activate && python esworker_eventtask.py -c esworker_eventtask.conf' command: bash -c 'while ! timeout 1 bash -c "echo > /dev/tcp/elasticsearch/9200";do sleep 1;done && python esworker_eventtask.py -c esworker_eventtask.conf'
depends_on: depends_on:
- base - base
- rabbitmq - rabbitmq
@ -250,7 +250,7 @@ services:
- mozdef/mozdef_rest - mozdef/mozdef_rest
- mozdef_rest:latest - mozdef_rest:latest
restart: always restart: always
command: bash -c 'while ! timeout 1 bash -c "echo > /dev/tcp/elasticsearch/9200";do sleep 1;done && source /opt/mozdef/envs/python/bin/activate && python index.py -c index.conf' command: bash -c 'while ! timeout 1 bash -c "echo > /dev/tcp/elasticsearch/9200";do sleep 1;done && python index.py -c index.conf'
depends_on: depends_on:
- base - base
- elasticsearch - elasticsearch

Просмотреть файл

@ -2,6 +2,8 @@ FROM centos:7
LABEL maintainer="mozdef@mozilla.com" LABEL maintainer="mozdef@mozilla.com"
# When changing kibana version remember to edit
# docker/compose/mozdef_bootstrap/files/initial_setup.py accordingly
ENV KIBANA_VERSION 5.6.7 ENV KIBANA_VERSION 5.6.7
RUN \ RUN \

Просмотреть файл

@ -1,35 +0,0 @@
---
version: '3.7'
services:
meteor:
build:
context: ../../
dockerfile: docker/compose/mozdef_meteor/Dockerfile
args:
METEOR_BUILD: 'NO'
restart: always
command: bash -c 'node -i'
depends_on:
- mongodb
- rest
networks:
- default
volumes:
- ../../meteor:/opt/mozdef/envs/mozdef/meteor
stdin_open: true
tty: true
sampledata:
build:
context: ../../
dockerfile: docker/compose/mozdef_sampledata/Dockerfile
restart: always
command: bash -c 'source /opt/mozdef/envs/python/bin/activate && /opt/mozdef/envs/mozdef/examples/demo/sampleevents.sh'
links:
- elasticsearch
depends_on:
- loginput
- elasticsearch
- mongodb
- bootstrap
networks:
- default

Просмотреть файл

@ -6,6 +6,11 @@ ENV MONGO_VERSION 3.4
RUN \ RUN \
echo -e "[mongodb-org-$MONGO_VERSION]\nname=MongoDB Repository\nbaseurl=https://repo.mongodb.org/yum/redhat/\$releasever/mongodb-org/$MONGO_VERSION/x86_64/\ngpgcheck=1\nenabled=1\ngpgkey=https://www.mongodb.org/static/pgp/server-$MONGO_VERSION.asc" > /etc/yum.repos.d/mongodb.repo && \ echo -e "[mongodb-org-$MONGO_VERSION]\nname=MongoDB Repository\nbaseurl=https://repo.mongodb.org/yum/redhat/\$releasever/mongodb-org/$MONGO_VERSION/x86_64/\ngpgcheck=1\nenabled=1\ngpgkey=https://www.mongodb.org/static/pgp/server-$MONGO_VERSION.asc" > /etc/yum.repos.d/mongodb.repo && \
gpg="gpg --no-default-keyring --secret-keyring /dev/null --keyring /dev/null --no-option --keyid-format 0xlong" && \
rpmkeys --import /etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7 && \
rpm -qi gpg-pubkey-f4a80eb5 | $gpg | grep 0x24C6A8A7F4A80EB5 && \
rpmkeys --import https://www.mongodb.org/static/pgp/server-3.4.asc && \
rpm -qi gpg-pubkey-a15703c6 | $gpg | grep 0xBC711F9BA15703C6 && \
yum install -y mongodb-org && \ yum install -y mongodb-org && \
yum clean all yum clean all

Просмотреть файл

@ -2,7 +2,6 @@ FROM centos:7
LABEL maintainer="mozdef@mozilla.com" LABEL maintainer="mozdef@mozilla.com"
ENV PYTHON_VERSION 2.7.11
ENV TZ UTC ENV TZ UTC
RUN \ RUN \
@ -34,23 +33,17 @@ COPY cron/update_geolite_db.py /opt/mozdef/envs/mozdef/cron/update_geolite_db.py
COPY cron/update_geolite_db.conf /opt/mozdef/envs/mozdef/cron/update_geolite_db.conf COPY cron/update_geolite_db.conf /opt/mozdef/envs/mozdef/cron/update_geolite_db.conf
COPY cron/update_geolite_db.sh /opt/mozdef/envs/mozdef/cron/update_geolite_db.sh COPY cron/update_geolite_db.sh /opt/mozdef/envs/mozdef/cron/update_geolite_db.sh
COPY mozdef_util /opt/mozdef/envs/mozdef/mozdef_util
RUN chown -R mozdef:mozdef /opt/mozdef/ RUN chown -R mozdef:mozdef /opt/mozdef/
USER mozdef USER mozdef
RUN \ RUN \
virtualenv /opt/mozdef/envs/python && \ virtualenv /opt/mozdef/envs/python && \
source /opt/mozdef/envs/python/bin/activate && \ source /opt/mozdef/envs/python/bin/activate && \
pip install -r /opt/mozdef/envs/mozdef/requirements.txt pip install -r /opt/mozdef/envs/mozdef/requirements.txt && \
# Local copy for dev
COPY mozdef_util /opt/mozdef/envs/mozdef/mozdef_util
USER root
RUN \
source /opt/mozdef/envs/python/bin/activate && \
cd /opt/mozdef/envs/mozdef/mozdef_util && \ cd /opt/mozdef/envs/mozdef/mozdef_util && \
pip install -e . pip install -e .
RUN chown -R mozdef:mozdef /opt/mozdef/
USER mozdef
RUN mkdir /opt/mozdef/envs/mozdef/data RUN mkdir /opt/mozdef/envs/mozdef/data
@ -58,4 +51,7 @@ WORKDIR /opt/mozdef/envs/mozdef
VOLUME /opt/mozdef/envs/mozdef/data VOLUME /opt/mozdef/envs/mozdef/data
# Automatically source into python virtual environment
ENV PATH=/opt/mozdef/envs/python/bin:$PATH
USER root USER root

Просмотреть файл

@ -7,6 +7,8 @@ RUN mkdir -p /opt/mozdef/envs/mozdef/docker/conf
COPY cron/defaultMappingTemplate.json /opt/mozdef/envs/mozdef/cron/defaultMappingTemplate.json COPY cron/defaultMappingTemplate.json /opt/mozdef/envs/mozdef/cron/defaultMappingTemplate.json
COPY docker/compose/mozdef_cron/files/backup.conf /opt/mozdef/envs/mozdef/cron/backup.conf COPY docker/compose/mozdef_cron/files/backup.conf /opt/mozdef/envs/mozdef/cron/backup.conf
COPY docker/compose/mozdef_bootstrap/files/initial_setup.py /opt/mozdef/envs/mozdef/initial_setup.py COPY docker/compose/mozdef_bootstrap/files/initial_setup.py /opt/mozdef/envs/mozdef/initial_setup.py
COPY docker/compose/mozdef_bootstrap/files/index_mappings /opt/mozdef/envs/mozdef/index_mappings
COPY docker/compose/mozdef_bootstrap/files/dashboards /opt/mozdef/envs/mozdef/dashboards
RUN chown -R mozdef:mozdef /opt/mozdef/envs/mozdef/ RUN chown -R mozdef:mozdef /opt/mozdef/envs/mozdef/

Просмотреть файл

@ -0,0 +1,17 @@
{
"_index": ".kibana",
"_type": "visualization",
"_id": "AWg-MFyzlDhKjHVbouwj",
"_version": 1,
"_score": 1,
"_source": {
"title": "All Events Area",
"visState": "{\"title\":\"All Events Area\",\"type\":\"area\",\"params\":{\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{\"text\":\"utctimestamp per second\"}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"area\",\"mode\":\"stacked\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"drawLinesBetweenPoints\":true,\"showCircles\":true,\"interpolate\":\"linear\",\"valueAxis\":\"ValueAxis-1\"}],\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"times\":[],\"addTimeMarker\":false,\"type\":\"area\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"utctimestamp\",\"interval\":\"s\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}}],\"listeners\":{}}",
"uiStateJSON": "{}",
"description": "",
"version": 1,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"index\":\"events-weekly\",\"query\":{\"match_all\":{}},\"filter\":[]}"
}
}
}

Просмотреть файл

@ -0,0 +1,17 @@
{
"_index": ".kibana",
"_type": "visualization",
"_id": "AWg6F1PFk1EXv5E12DRN",
"_version": 1,
"_score": 1,
"_source": {
"title": "Category Pie Graph",
"visState": "{\"aggs\":[{\"enabled\":true,\"id\":\"1\",\"params\":{},\"schema\":\"metric\",\"type\":\"count\"},{\"enabled\":true,\"id\":\"2\",\"params\":{\"field\":\"category\",\"order\":\"desc\",\"orderBy\":\"1\",\"size\":5},\"schema\":\"segment\",\"type\":\"terms\"}],\"listeners\":{},\"params\":{\"addLegend\":true,\"addTooltip\":true,\"isDonut\":false,\"legendPosition\":\"right\",\"type\":\"pie\"},\"title\":\"Category Pie Graph\",\"type\":\"pie\"}",
"uiStateJSON": "{}",
"description": "",
"version": 1,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"index\":\"events-weekly\",\"query\":{\"match_all\":{}},\"filter\":[]}"
}
}
}

Просмотреть файл

@ -0,0 +1,17 @@
{
"_index": ".kibana",
"_type": "visualization",
"_id": "AWg-LfEalDhKjHVbouwc",
"_version": 1,
"_score": 1,
"_source": {
"title": "DestinationIP Bar Graph",
"visState": "{\"title\":\"DestinationIP Bar Graph\",\"type\":\"histogram\",\"params\":{\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{\"text\":\"details.destinationipaddress: Descending\"}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"histogram\",\"mode\":\"stacked\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"valueAxis\":\"ValueAxis-1\",\"drawLinesBetweenPoints\":true,\"showCircles\":true}],\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"times\":[],\"addTimeMarker\":false,\"type\":\"histogram\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"details.destinationipaddress\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}",
"uiStateJSON": "{}",
"description": "",
"version": 1,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"index\":\"events-weekly\",\"query\":{\"match_all\":{}},\"filter\":[]}"
}
}
}

Просмотреть файл

@ -0,0 +1,20 @@
{
"_index": ".kibana",
"_type": "dashboard",
"_id": "AWg6GHH5k1EXv5E12DRQ",
"_version": 5,
"_score": 1,
"_source": {
"title": "Sample Dashboard",
"hits": 0,
"description": "A sample dashboard for playing around.",
"panelsJSON": "[{\"col\":7,\"id\":\"AWg6F1PFk1EXv5E12DRN\",\"panelIndex\":1,\"row\":1,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"AWg-LWqElDhKjHVbouwZ\",\"panelIndex\":2,\"row\":4,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"AWg-LfEalDhKjHVbouwc\",\"panelIndex\":3,\"row\":4,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"size_x\":6,\"size_y\":3,\"panelIndex\":4,\"type\":\"visualization\",\"id\":\"AWg-MFyzlDhKjHVbouwj\",\"col\":1,\"row\":1}]",
"optionsJSON": "{\"darkTheme\":false}",
"uiStateJSON": "{}",
"version": 1,
"timeRestore": false,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"filter\":[{\"query\":{\"match_all\":{}}}],\"highlightAll\":true,\"version\":true}"
}
}
}

Просмотреть файл

@ -0,0 +1,17 @@
{
"_index": ".kibana",
"_type": "visualization",
"_id": "AWg-LWqElDhKjHVbouwZ",
"_version": 1,
"_score": 1,
"_source": {
"title": "SourceIP Bar Graph",
"visState": "{\"title\":\"SourceIP Bar Graph\",\"type\":\"histogram\",\"params\":{\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{\"text\":\"details.sourceipaddress: Descending\"}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"histogram\",\"mode\":\"stacked\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"valueAxis\":\"ValueAxis-1\",\"drawLinesBetweenPoints\":true,\"showCircles\":true}],\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"times\":[],\"addTimeMarker\":false,\"type\":\"histogram\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"details.sourceipaddress\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}",
"uiStateJSON": "{}",
"description": "",
"version": 1,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"index\":\"events-weekly\",\"query\":{\"match_all\":{}},\"filter\":[]}"
}
}
}

Просмотреть файл

@ -0,0 +1,6 @@
{
"title": "alerts",
"timeFieldName": "utctimestamp",
"notExpandable": true,
"fields": "[{\"name\":\"_id\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_index\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"_score\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_source\",\"type\":\"_source\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_type\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"category\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"notify_mozdefbot\",\"type\":\"boolean\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"severity\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"summary\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"tags\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"utctimestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true}]"
}

Просмотреть файл

@ -0,0 +1,6 @@
{
"title": "events-weekly",
"timeFieldName": "utctimestamp",
"notExpandable": true,
"fields": "[{\"name\":\"_id\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_index\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"_score\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_source\",\"type\":\"_source\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_type\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"category\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.apiversion.raw_value\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.destinationipaddress\",\"type\":\"ip\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.destinationport\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.hostname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.requestparameters.logstreamname.raw_value\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.sourceipaddress\",\"type\":\"ip\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.sourceipv4address\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.sourceport\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.srcip\",\"type\":\"ip\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.success\",\"type\":\"boolean\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"hostname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"mozdefhostname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"processid\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"processname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"receivedtimestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"severity\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"source\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"summary\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"timestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"utctimestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"version\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true}]"
}

Просмотреть файл

@ -0,0 +1,6 @@
{
"title": "events",
"timeFieldName": "utctimestamp",
"notExpandable": true,
"fields": "[{\"name\":\"_id\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_index\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"_score\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_source\",\"type\":\"_source\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_type\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"category\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.apiversion.raw_value\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.destinationipaddress\",\"type\":\"ip\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.destinationport\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.hostname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.requestparameters.logstreamname.raw_value\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.sourceipaddress\",\"type\":\"ip\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.sourceipv4address\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.sourceport\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.srcip\",\"type\":\"ip\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.success\",\"type\":\"boolean\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"hostname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"mozdefhostname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"processid\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"processname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"receivedtimestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"severity\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"source\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"summary\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"timestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"utctimestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"version\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true}]"
}

Просмотреть файл

@ -11,11 +11,14 @@ from datetime import datetime, timedelta
from time import sleep from time import sleep
from configlib import getConfig from configlib import getConfig
import json import json
import time
import os
import sys
from elasticsearch.exceptions import ConnectionError from elasticsearch.exceptions import ConnectionError
import os
from mozdef_util.elasticsearch_client import ElasticsearchClient from mozdef_util.elasticsearch_client import ElasticsearchClient
from mozdef_util.query_models import SearchQuery, TermMatch
parser = argparse.ArgumentParser(description='Create the correct indexes and aliases in elasticsearch') parser = argparse.ArgumentParser(description='Create the correct indexes and aliases in elasticsearch')
@ -38,6 +41,8 @@ event_index_name = current_date.strftime("events-%Y%m%d")
previous_event_index_name = (current_date - timedelta(days=1)).strftime("events-%Y%m%d") previous_event_index_name = (current_date - timedelta(days=1)).strftime("events-%Y%m%d")
weekly_index_alias = 'events-weekly' weekly_index_alias = 'events-weekly'
alert_index_name = current_date.strftime("alerts-%Y%m") alert_index_name = current_date.strftime("alerts-%Y%m")
kibana_index_name = '.kibana'
kibana_version = '5.6.7'
index_settings_str = '' index_settings_str = ''
with open(args.default_mapping_file) as data_file: with open(args.default_mapping_file) as data_file:
@ -77,6 +82,7 @@ index_settings['settings'] = {
} }
} }
# Create initial indices
if event_index_name not in all_indices: if event_index_name not in all_indices:
print "Creating " + event_index_name print "Creating " + event_index_name
client.create_index(event_index_name, index_config=index_settings) client.create_index(event_index_name, index_config=index_settings)
@ -95,3 +101,59 @@ client.create_alias('alerts', alert_index_name)
if weekly_index_alias not in all_indices: if weekly_index_alias not in all_indices:
print "Creating " + weekly_index_alias print "Creating " + weekly_index_alias
client.create_alias_multiple_indices(weekly_index_alias, [event_index_name, previous_event_index_name]) client.create_alias_multiple_indices(weekly_index_alias, [event_index_name, previous_event_index_name])
if kibana_index_name not in all_indices:
print "Creating " + kibana_index_name
client.create_index(kibana_index_name)
# Wait for .kibana index to be ready
num_times = 0
while not client.index_exists('.kibana'):
if num_times < 3:
print("Waiting for .kibana index to be ready")
time.sleep(1)
num_times += 1
else:
print(".kibana index not created...exiting")
sys.exit(1)
# Check to see if index patterns exist in .kibana
query = SearchQuery()
query.add_must(TermMatch('_type', 'index-pattern'))
results = query.execute(client, indices=['.kibana'])
if len(results['hits']) == 0:
# Create index patterns and assign default index mapping
index_mappings_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'index_mappings')
listing = os.listdir(index_mappings_path)
for infile in listing:
json_file_path = os.path.join(index_mappings_path, infile)
with open(json_file_path) as json_data:
mapping_data = json.load(json_data)
print "Creating {0} index mapping".format(mapping_data['title'])
client.save_object(body=mapping_data, index='.kibana', doc_type='index-pattern', doc_id=mapping_data['title'])
# Assign default index to 'events'
client.flush('.kibana')
default_mapping_data = {
"defaultIndex": 'events'
}
print "Assigning events as default index mapping"
client.save_object(default_mapping_data, '.kibana', 'config', kibana_version)
# Check to see if dashboards already exist in .kibana
query = SearchQuery()
query.add_must(TermMatch('_type', 'dashboard'))
results = query.execute(client, indices=['.kibana'])
if len(results['hits']) == 0:
dashboards_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'dashboards')
listing = os.listdir(dashboards_path)
for infile in listing:
json_file_path = os.path.join(dashboards_path, infile)
with open(json_file_path) as json_data:
mapping_data = json.load(json_data)
print("Creating {0} {1}".format(
mapping_data['_source']['title'],
mapping_data['_type']
))
client.save_object(body=mapping_data['_source'], index='.kibana', doc_type=mapping_data['_type'], doc_id=mapping_data['_id'])

Просмотреть файл

@ -5,6 +5,13 @@ LABEL maintainer="mozdef@mozilla.com"
COPY docker/compose/mozdef_syslog/files/syslog-ng.repo /etc/yum.repos.d/syslog-ng.repo COPY docker/compose/mozdef_syslog/files/syslog-ng.repo /etc/yum.repos.d/syslog-ng.repo
RUN \ RUN \
gpg="gpg --no-default-keyring --secret-keyring /dev/null --keyring /dev/null --no-option --keyid-format 0xlong" && \
rpmkeys --import /etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7 && \
rpm -qi gpg-pubkey-f4a80eb5 | $gpg | grep 0x24C6A8A7F4A80EB5 && \
rpmkeys --import https://copr-be.cloud.fedoraproject.org/results/czanik/syslog-ng312/pubkey.gpg && \
rpm -qi gpg-pubkey-2b04b9af | $gpg | grep 0x1AACFE032B04B9AF && \
rpmkeys --import https://dl.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-7 && \
rpm -qi gpg-pubkey-352c64e5 | $gpg | grep 0x6A2FAEA2352C64E5 && \
yum install -y epel-release && \ yum install -y epel-release && \
yum install -y syslog-ng.x86_64 syslog-ng-json && \ yum install -y syslog-ng.x86_64 syslog-ng-json && \
yum clean all yum clean all

Просмотреть файл

@ -3,8 +3,13 @@ FROM centos:7
LABEL maintainer="mozdef@mozilla.com" LABEL maintainer="mozdef@mozilla.com"
RUN \ RUN \
gpg="gpg --no-default-keyring --secret-keyring /dev/null --keyring /dev/null --no-option --keyid-format 0xlong" && \
rpmkeys --import /etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7 && \
rpm -qi gpg-pubkey-f4a80eb5 | $gpg | grep 0x24C6A8A7F4A80EB5 && \
mkdir /var/log/mozdef && \ mkdir /var/log/mozdef && \
yum makecache fast && \ yum makecache fast && \
rpmkeys --import https://dl.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-7 && \
rpm -qi gpg-pubkey-352c64e5 | $gpg | grep 0x6A2FAEA2352C64E5 && \
yum install -y epel-release && \ yum install -y epel-release && \
yum install -y nginx && \ yum install -y nginx && \
yum clean all yum clean all

Просмотреть файл

@ -15,8 +15,4 @@ COPY docker/compose/tester/files/tests_config.conf /opt/mozdef/envs/mozdef/tests
COPY docker/compose/tester/files/loginput_index.conf /opt/mozdef/envs/mozdef/tests/loginput/index.conf COPY docker/compose/tester/files/loginput_index.conf /opt/mozdef/envs/mozdef/tests/loginput/index.conf
COPY docker/compose/tester/files/rest_index.conf /opt/mozdef/envs/mozdef/tests/rest/index.conf COPY docker/compose/tester/files/rest_index.conf /opt/mozdef/envs/mozdef/tests/rest/index.conf
USER root RUN pip install -r /opt/mozdef/envs/mozdef/tests/requirements_tests.txt
RUN \
source /opt/mozdef/envs/python/bin/activate && \
pip install -r /opt/mozdef/envs/mozdef/tests/requirements_tests.txt

Просмотреть файл

@ -8,15 +8,15 @@ This guide is for someone seeking to write a MozDef alert.
Starting a feature branch Starting a feature branch
------------------------- -------------------------
Before you do anything else, start with checking out the MozDef repo and starting a feature branch:: Before you do anything else, start by checking out the MozDef repo and starting a feature branch::
git clone git@github.com:mozilla/MozDef.git git clone git@github.com:mozilla/MozDef.git
cd MozDef cd MozDef
git checkout -b name_of_alert_you_want_to_create git checkout -b name_of_alert_you_want_to_create
How to start your alert How to start developing your new alert
----------------------- --------------------------------------
Run:: Run::
@ -24,8 +24,8 @@ Run::
This will prompt for information and create two things: This will prompt for information and create two things:
- The new alert file - <The new alert file>
- The new alert test file - <The new alert test file>
You can now edit these files in place, but it is recommended that you run unit-tests on the new alert to make sure it passes before editing (instructions below). You can now edit these files in place, but it is recommended that you run unit-tests on the new alert to make sure it passes before editing (instructions below).
@ -35,7 +35,7 @@ How to run tests on your alert
Requirements: Requirements:
- Make sure you have the latest version of docker installed. - Make sure you have the latest version of docker installed.
- Known Issue: docker's overlayfs has a known issue, so you will need to go to Docker => Preferences => Daemon => Advanced and add the following key pair ("storage-driver" : "aufs") - Known Issues: docker's overlayfs has a known issue with tar files, so you will need to go to Docker => Preferences => Daemon => Advanced and add the following key pair ("storage-driver" : "aufs"). You may also need to allow more than 2GB for docker depending on which containers you run.
:: ::
@ -53,24 +53,18 @@ At this point, begin development and periodically run your unit-tests locally wi
Background on concepts Background on concepts
---------------------- ----------------------
- Logs - These are individual log line that are emitted from systems, like an Apache log - Logs - These are individual log entries that are typically emitted from systems, like an Apache log
- Events - These logs parsed into a JSON format, which exist in MozDef and used with the ELK stack - Events - The entry point into MozDef, a log parsed into JSON by some log shipper (syslog-ng, nxlog) or a native JSON data source like GuardDuty, CloudTrail, most SaaS systems, etc.
- Alerts - These are effectively either a 1:1 events to alerts (this thing happens and alert) or a M:1 events to alerts (N of these things happen and alert). - Alerts - These are either a 1:1 events to alerts (this thing happens and alert) or a M:1 events to alerts (N of these things happen and alert).
When writing alerts, it's important to keep the above concepts in mind. Alerts in MozDef are mini python programs. Most of the work is done by the alert library so the portions you will need to code fall into two functions:
Each alert tends to have two different blocks of code: - main - This is where the alert defines the criteria for the types of events that will trigger the alert.
- main - This is where the alert defines the criteria for the types of events it wants to look at
- onAggregation/onEvent - This is where the alert defines what happens when it sees those events, such as post processing of events and making them into a useful summary to emit as an alert. - onAggregation/onEvent - This is where the alert defines what happens when it sees those events, such as post processing of events and making them into a useful summary to emit as an alert.
In both cases, because the alert is simple Python, you will find that getting started writing alerts is pretty easy. It's important to note that when you iterate on the alert to regularly test to ensure that the alert is still firing. Should you run into a space where it's not firing, the best way to approach this is to backout the recent change and review the alert and tests to ensure that the expectations are still in sync. In both cases the alert is simple python, and you have all the facility of python at your disposal including any of the python libraries you might want to add to the project.
It's important to note that when you iterate on the alert to regularly test to ensure that the alert is still firing. Should you run into a situation where it's not firing, the best way to approach this is to backout the most recent change and review the alert and tests to ensure that the expectations are still in sync.
How to get the alert in MozDef?
-------------------------------
The best way to get your alert into MozDef (once it's completed) is to propose a pull request and ask for a review from a MozDef developer. They will be able to help you get the most out of the alert and help point out pitfalls. Once the alert is accepted into MozDef master, there is a process by which MozDef installations can make use or 'enable' that alert. It's best to work with that MozDef instance's maintainer to enable any new alerts.
Example first alert Example first alert
@ -108,17 +102,21 @@ Here's the head of the auto generated class.
]) ])
... ...
In essence this code will tell MozDef to query the collection of logs for messages timestamped within 20 minutes (from time of query execution) and to look for messages which are of category "helloworld" which also have a source IP address. This code tells MozDef to query the collection of events for messages timestamped within 20 minutes from time of query execution which are of category "helloworld" and also have a source IP address.
If you're pumping logs into MozDef odds are you don't have any which will be tagged as "helloworld". You can of course create those logs, but lets assume that you have logs tagged as "syslog" for the moment. If you're pumping events into MozDef odds are you don't have any which will be tagged as "helloworld". You can of course create those events, but lets assume that you have events tagged as "syslog" for the moment.
Change the TermMatch line to Change the TermMatch line to
:: ::
TermMatch('category', 'syslog'), TermMatch('category', 'syslog'),
and you will get alerts for syslog labeled messages. and you will create alerts for events marked with the category of 'syslog'.
Ideally you should edit your test to match, but it's not strictly necessary. Ideally you should edit your test to match, but it's not strictly necessary.
Next we will need to enable the log and to schedule it. At time of writing this is a bit annoying. Scheduling your alert
---------------------
Next we will need to enable the alert. Alerts in MozDef are scheduled via the celery task scheduler. The schedule
passed to celery is in the config.py file:
Open the file Open the file
:: ::
@ -139,15 +137,99 @@ and add your new foo alert to the others with a crontab style schedule
'unauth_ssh.AlertUnauthSSH': {'schedule': crontab(minute='*/1')}, 'unauth_ssh.AlertUnauthSSH': {'schedule': crontab(minute='*/1')},
} }
Restart your MozDef instance and you should begin seeing alerts on the alerts page. The format is 'pythonfilename.classname': {'schedule': crontab(timeunit='*/x')} and you can use any celery time unit (minute, hour) along with any schedule that makes sense for your environment.
Alerts don't take many resources to execute, typically finishing in sub second times, so it's easiest to start by running them every minute.
How to run the alert in the docker containers
----------------------------------------------
Once you've got your alert passing tests, you'd probably like to send in events in a docker environment to further refine, test, etc.
There are two ways to go about integration testing this with docker:
1) Use 'make run' to rebuild the containers each time you iterate on an alert
2) Use docker-compose with overlays to instantiate a docker environment with a live container you can use to iterate your alert
In general, the 'make run' approach is simpler, but can take 5-10mins each iteration to rebuild the containers (even if cached).
To use the 'make run' approach, you edit your alert. Add it to the docker/compose/mozdef_alerts/files/config.py file for scheduling as discussed above and simply:
::
make run
This will rebuild any container that needs it, use cache for any that haven't changed and restart mozdef with your alert.
To use a live, iterative environment via docker-compose:
::
docker-compose -f docker/compose/docker-compose.yml -f docker/compose/dev-alerts.yml -p mozdef up
This will start up all the containers for a mozdef environment and in addition will allow you an adhoc alerts container to work in that loads the /alerts directory as a volume in the container.
To run the alert you are developing you will need to edit the alerts/lib/config.py file as detailed above to schedule your alert. You will also need to edit it to reference the container environment as follows
::
RABBITMQ = {
'mqserver': 'rabbitmq',
...
ES = {
'servers': ['http://elasticsearch:9200']
}
Once you've reference the containers, you can shell into the alerts container:
::
docker exec -it mozdef_alerts_1 bash
Next, start celery
::
celery -A celeryconfig worker --loglevel=info --beat
If you need to send in adhoc events you can usually do it via curl as follows:
::
curl -v --header "Content-Type: application/json" --request POST --data '{"tags": ["test"],"category": "helloworld","details":{"sourceipaddress":"1.2.3.4"}}' http://loginput:8080/events
How to get the alert in a release of MozDef?
--------------------------------------------
If you'd like your alert included in the release version of Mozdef, the best way is to propose a pull request and ask for a review from a MozDef developer. They will be able to help you get the most out of the alert and help point out pitfalls. Once the alert is accepted into MozDef master, there is a process by which MozDef installations can make use or 'enable' that alert. It's best to work with that MozDef instance's maintainer to enable any new alerts.
Customizing the alert summary
-----------------------------
On the alerts page of the MozDef web UI each alert is given a quick summary and for many alerts it is useful to have contextual information displayed here. Looking at the example foo alert we see
::
def onAggregation(self, aggreg):
# aggreg['count']: number of items in the aggregation, ex: number of failed login attempts
# aggreg['value']: value of the aggregation field, ex: toto@example.com
# aggreg['events']: list of events in the aggregation
category = 'My first alert!'
tags = ['Foo']
severity = 'NOTICE'
summary = "Foo alert"
# Create the alert object based on these properties
return self.createAlertDict(summary, category, tags, aggreg['events'], severity)
This is where the alert object gets created and returned. In the above code the summary will simply be "Foo Alert", but say we want to know how many log entries were collected in the alert? The aggreg object is here to help.
::
summary = "Foo alert " + aggreg['count']
Gives us an alert with a count. Similarly
::
summary = "Foo alert " + aggreg['value']
Will append the aggregation field to the summary text. The final list aggreg['events'] contains the full log entries of all logs collected and is in general the most useful. Suppose we want one string if the tag 'foo' exists on these logs and another otherwise
::
if 'foo' in aggreg['events'][0]['_source']['tags']:
summary = "Foo alert"
else:
summary = "Bar alert"
All source log data is held within the ['_source'] and [0] represents the first log found. Beware that no specific ordering of the logs is guaranteed and so [0] may be first, last, or otherwise chronologically.
Questions? Questions?
---------- ----------
This guide is not intended to teach you how to develop in Python, there are good resources below to help you get more experience with Python. However, should you have questions or run into problems trying to write an alert, we would like to hear from you (in IRC/Slack) so we can: Feel free to file a github issue in this repository if you find yourself with a question not answered here. Likely the answer will help someone else and will help us improve the docs.
- help you get what you want to get done
- make it easier for anyone to contribue alerts
Resources Resources

Просмотреть файл

@ -7,7 +7,7 @@ Cloud based MozDef is an opinionated deployment of the MozDef services created i
ingest cloudtrail, guardduty, and provide security services. ingest cloudtrail, guardduty, and provide security services.
.. image:: images/cloudformation-launch-stack.png .. image:: images/cloudformation-launch-stack.png
:target: https://console.aws.amazon.com/cloudformation/home?region=us-west-2#/stacks/new?stackName=mozdef-for-aws&templateURL=https://s3-us-west-2.amazonaws.com/mozdef.infosec.allizom.org/cf/mozdef-parent.yml :target: https://console.aws.amazon.com/cloudformation/home?region=us-west-2#/stacks/new?stackName=mozdef-for-aws&templateURL=https://s3-us-west-2.amazonaws.com/public.us-west-2.infosec.mozilla.org/mozdef/cf/mozdef-parent.yml
Feedback Feedback
@ -80,3 +80,9 @@ Using MozDef
Refer back to our other docs on how to use MozDef for general guidance. Cloud specific instructions will evolve here. Refer back to our other docs on how to use MozDef for general guidance. Cloud specific instructions will evolve here.
If you saw something about MozDef for AWS at re: Invent 2018 and you want to contribute we'd love your PRs. If you saw something about MozDef for AWS at re: Invent 2018 and you want to contribute we'd love your PRs.
AWS re:invent 2018 SEC403 Presentation
---------------------------------------
* `Watch our presentation on MozDef in AWS <https://www.youtube.com/watch?v=M5yQpegaYF8&feature=youtu.be&t=2471>`_ at AWS re:Invent 2018
* `Read the slides <https://www.slideshare.net/AmazonWebServices/five-new-security-automations-using-aws-security-services-open-source-sec403-aws-reinvent-2018/65>`_

Просмотреть файл

@ -44,6 +44,16 @@ if (Meteor.isClient) {
Session.set('ipintelipaddress',($(e.target).attr('data-ipaddress'))); Session.set('ipintelipaddress',($(e.target).attr('data-ipaddress')));
$('#modalintelwindow').modal() $('#modalintelwindow').modal()
}, },
"click .ipmenu-search": function(e){
Session.set('ipsearchipaddress',($(e.target).attr('data-ipaddress')));
var ipText=$(e.target).attr('data-ipaddress');
//console.log("IP: " + ipText)
var searchDomain=getSetting('kibanaURL');
var searchPath="#/discover?_g=(refreshInterval:(display:Off,pause:!f,value:0),time:(from:now-1h,mode:quick,to:now))&_a=(columns:!(_source),index:events-weekly,interval:auto,query:(query_string:(analyze_wildcard:!t,query:'details.sourceipaddress:"+ipText+"')),sort:!(utctimestamp,desc))";
var url=searchDomain+searchPath;
console.log("Opening url: " + url);
window.open(url, '_blank');
},
"click .dropdown": function(e,t){ "click .dropdown": function(e,t){
$(e.target).addClass("hover"); $(e.target).addClass("hover");
$('ul:first',$(e.target)).css('visibility', 'visible'); $('ul:first',$(e.target)).css('visibility', 'visible');

Просмотреть файл

@ -94,6 +94,17 @@ if (Meteor.isClient) {
return false; return false;
}; };
isHostname=function(entry) {
var blocks = entry.split(".");
if(blocks.length >= 3 && blocks.length <= 6) {
return blocks.every(function(block){
return /^(\w+.)+$/.test(block) && !/^(\d+.)+$/.test(block);
});
}else{
return false;
}
};
isURL=function(astring){ isURL=function(astring){
return validator.isURL(astring); return validator.isURL(astring);
}; };
@ -287,7 +298,7 @@ if (Meteor.isClient) {
var words=anelement.text().split(' '); var words=anelement.text().split(' ');
words.forEach(function(w){ words.forEach(function(w){
//clean up potential interference chars //clean up potential interference chars
w=w.replace(/,|:|;/g,'') w=w.replace(/,|:|;|\[|\]/g,'')
if ( isIPv4(w) ){ if ( isIPv4(w) ){
//console.log(w); //console.log(w);
anelement. anelement.
@ -295,6 +306,13 @@ if (Meteor.isClient) {
{wordsOnly:false, {wordsOnly:false,
element: "em", element: "em",
className:"ipaddress"}); className:"ipaddress"});
} else if ( isHostname(w) ){
//console.log(w);
anelement.
highlight( w,
{wordsOnly:false,
element: "em",
className:"hostname"});
} }
}); });
//add a drop down menu to any .ipaddress //add a drop down menu to any .ipaddress
@ -312,16 +330,26 @@ if (Meteor.isClient) {
whoisitem=$("<li><a class='ipmenu-whois' data-ipaddress='" + iptext + "'href='#'>whois</a></li>"); whoisitem=$("<li><a class='ipmenu-whois' data-ipaddress='" + iptext + "'href='#'>whois</a></li>");
dshielditem=$("<li><a class='ipmenu-dshield' data-ipaddress='" + iptext + "'href='#'>dshield</a></li>"); dshielditem=$("<li><a class='ipmenu-dshield' data-ipaddress='" + iptext + "'href='#'>dshield</a></li>");
intelitem=$("<li><a class='ipmenu-intel' data-ipaddress='" + iptext + "'href='#'>ip intel</a></li>"); intelitem=$("<li><a class='ipmenu-intel' data-ipaddress='" + iptext + "'href='#'>ip intel</a></li>");
searchitem=$("<li><a class='ipmenu-search' data-ipaddress='" + iptext + "'href='#'>search kibana</a></li>");
if ( isFeature('blockip') ){ if ( isFeature('blockip') ){
blockIPitem=$("<li><a class='ipmenu-blockip' data-ipaddress='" + iptext + "'href='#'>block</a></li>"); blockIPitem=$("<li><a class='ipmenu-blockip' data-ipaddress='" + iptext + "'href='#'>block</a></li>");
}else{ }else{
blockIPitem=$(); blockIPitem=$();
} }
ipmenu.append(copyitem,whoisitem,dshielditem,intelitem,blockIPitem); ipmenu.append(copyitem,whoisitem,dshielditem,intelitem,searchitem,blockIPitem);
$(this).parent().parent().append(ipmenu); $(this).parent().parent().append(ipmenu);
}); });
anelement.children( '.hostname').each(function( index ){
hosttext=$(this).text();
$(this).append('<b></b>');
var searchDomain=getSetting('kibanaURL');
searchPath="#/discover?_g=(refreshInterval:(display:Off,pause:!f,value:0),time:(from:now-1h,mode:quick,to:now))&_a=(columns:!(_source),index:events-weekly,interval:auto,query:(query_string:(analyze_wildcard:!t,query:'hostname:"+hosttext+"')),sort:!(utctimestamp,desc))"
searchURL=searchDomain+searchPath;
$(this).wrap("<a href="+searchURL+" target='_blank'></a>" );
});
//return raw html, consume as {{{ ipDecorate fieldname }}} in a meteor template //return raw html, consume as {{{ ipDecorate fieldname }}} in a meteor template
return anelement.prop('outerHTML'); return anelement.prop('outerHTML');
}); });

Просмотреть файл

@ -6,7 +6,7 @@
ul.dropdown { ul.dropdown {
position: relative; position: relative;
display: inline-block; display: inline-block;
width: min-content; width: max-content;
border: 1px solid rgba(0, 0, 0, 0.2); border: 1px solid rgba(0, 0, 0, 0.2);
border-radius: 3px; border-radius: 3px;
box-shadow: 0px 5px 10px rgba(0, 0, 0, 0.2); box-shadow: 0px 5px 10px rgba(0, 0, 0, 0.2);
@ -93,4 +93,4 @@ ul.dropdown ul ul {
} }
ul.dropdown li:hover > ul { ul.dropdown li:hover > ul {
visibility: visible; visibility: visible;
} }

Просмотреть файл

@ -19,6 +19,7 @@ if (Meteor.isServer) {
'ipwhois': ipwhois, 'ipwhois': ipwhois,
'ipdshield': ipdshield, 'ipdshield': ipdshield,
'ipintel': ipintel, 'ipintel': ipintel,
'ipsearch': ipsearch,
'verisstats': verisstats, 'verisstats': verisstats,
'logincounts': logincounts, 'logincounts': logincounts,
'getplugins': getplugins, 'getplugins': getplugins,
@ -109,6 +110,19 @@ if (Meteor.isServer) {
} }
} }
function ipsearch(ipaddress){
//console.log('Posting ' + ipaddress + 'to ' + mozdef.rootAPI + '/ipwhois/');
var ipsearchResponse = HTTP.post(mozdef.rootAPI + '/ipsearch/',{data: {'ipaddress':ipaddress}});
if ( typeof ipsearchResponse == 'undefined') {
console.log("ipsearch: no response from server")
return "";
} else {
//console.log(ipdshieldResponse);
return ipsearchResponse;
}
}
function ipintel(ipaddress){ function ipintel(ipaddress){
//console.log('Posting ' + ipaddress + 'to ' + mozdef.rootAPI + '/ipintel/'); //console.log('Posting ' + ipaddress + 'to ' + mozdef.rootAPI + '/ipintel/');
var ipintelResponse = HTTP.post(mozdef.rootAPI + '/ipintel/',{data: {'ipaddress':ipaddress}}); var ipintelResponse = HTTP.post(mozdef.rootAPI + '/ipintel/',{data: {'ipaddress':ipaddress}});

Просмотреть файл

@ -255,6 +255,8 @@ class taskConsumer(object):
event['summary'] = event['details']['message'] event['summary'] = event['details']['message']
if 'severity' in event['details']: if 'severity' in event['details']:
event['severity'] = event['details']['severity'] event['severity'] = event['details']['severity']
if 'source_ip' in event['details']:
event['sourceipaddress'] = event['details']['source_ip']
else: else:
event['severity'] = 'INFO' event['severity'] = 'INFO'
event['category'] = 'syslog' event['category'] = 'syslog'

Просмотреть файл

@ -21,28 +21,28 @@ class message(object):
self.modify_keys = [ self.modify_keys = [
'details.additionaleventdata', 'details.additionaleventdata',
'details.apiversion', 'details.apiversion',
'details.requestparameters.attribute',
'details.requestparameters.bucketPolicy.Statement.Principal',
'details.requestparameters.callerReference',
'details.requestparameters.description',
'details.requestparameters.disableApiTermination',
'details.requestparameters.domainName',
'details.requestparameters.domainNames',
'details.requestparameters.ebsOptimized',
'details.requestparameters.filter',
'details.requestparameters.iamInstanceProfile',
'details.requestparameters.instanceType',
'details.requestparameters.logStreamName',
'details.requestparameters.rule',
'details.requestparameters.source',
'details.responseelements.distribution.distributionConfig.callerReference',
'details.responseelements.endpoint',
'details.responseelements.findings.service.additionalInfo.unusual',
'details.responseelements.lastModified',
'details.responseelements.role',
'details.responseelements.securityGroups',
'details.responseelements.subnets',
'details.serviceeventdetails', 'details.serviceeventdetails',
'details.requestparameters.attribute',
'details.requestparameters.bucketpolicy.statement.principal',
'details.requestparameters.callerreference',
'details.requestparameters.description',
'details.requestparameters.disableapitermination',
'details.requestparameters.domainname',
'details.requestparameters.domainnames',
'details.requestparameters.ebsoptimized',
'details.requestparameters.filter',
'details.requestparameters.iaminstanceprofile',
'details.requestparameters.instancetype',
'details.requestparameters.logstreamname',
'details.requestparameters.source',
'details.responseelements.role',
'details.requestparameters.rule',
'details.responseelements.subnets',
'details.responseelements.endpoint',
'details.responseelements.securitygroups',
'details.responseelements.lastmodified',
'details.responseelements.findings.service.additionalinfo.unusual',
'details.responseelements.distribution.distributionconfig.callerreference'
] ]
def convert_key_raw_str(self, needle, haystack): def convert_key_raw_str(self, needle, haystack):

Просмотреть файл

@ -23,17 +23,17 @@ class message(object):
message['details'] = {} message['details'] = {}
summary_items = message['summary'].split(',') summary_items = message['summary'].split(',')
message['details']['rule_number'] = summary_items[0] message['details']['rulenumber'] = summary_items[0]
message['details']['sub_rule_number'] = summary_items[1] message['details']['subrulenumber'] = summary_items[1]
message['details']['anchor'] = summary_items[2] message['details']['anchor'] = summary_items[2]
message['details']['trackor'] = summary_items[3] message['details']['trackor'] = summary_items[3]
message['details']['interface'] = summary_items[4] message['details']['interface'] = summary_items[4]
message['details']['reason'] = summary_items[5] message['details']['reason'] = summary_items[5]
message['details']['action'] = summary_items[6] message['details']['action'] = summary_items[6]
message['details']['direction'] = summary_items[7] message['details']['direction'] = summary_items[7]
message['details']['ip_version'] = summary_items[8] message['details']['ipversion'] = summary_items[8]
ip_version = int(message['details']['ip_version']) ip_version = int(message['details']['ipversion'])
if ip_version == 4: if ip_version == 4:
if 'ip' not in message['details']: if 'ip' not in message['details']:
message['details']['ip'] = {} message['details']['ip'] = {}
@ -45,8 +45,8 @@ class message(object):
message['details']['ip']['id'] = summary_items[12] message['details']['ip']['id'] = summary_items[12]
message['details']['ip']['offset'] = summary_items[13] message['details']['ip']['offset'] = summary_items[13]
message['details']['ip']['flags'] = summary_items[14] message['details']['ip']['flags'] = summary_items[14]
message['details']['ip']['protocol_id'] = summary_items[15] message['details']['ip']['protocolid'] = summary_items[15]
message['details']['ip']['protocol_text'] = summary_items[16] message['details']['ip']['protocoltext'] = summary_items[16]
last_index = 16 last_index = 16
elif ip_version == 6: elif ip_version == 6:
if 'ip' not in message['details']: if 'ip' not in message['details']:
@ -55,9 +55,9 @@ class message(object):
message['details']['ip']['version'] = 6 message['details']['ip']['version'] = 6
message['details']['ip']['class'] = summary_items[9] message['details']['ip']['class'] = summary_items[9]
message['details']['ip']['flow_label'] = summary_items[10] message['details']['ip']['flow_label'] = summary_items[10]
message['details']['ip']['hop_limit'] = summary_items[11] message['details']['ip']['hoplimit'] = summary_items[11]
message['details']['ip']['protocol'] = summary_items[12] message['details']['ip']['protocol'] = summary_items[12]
message['details']['ip']['protocol_id'] = summary_items[13] message['details']['ip']['protocolid'] = summary_items[13]
last_index = 13 last_index = 13
if ip_version == 4 or ip_version == 6: if ip_version == 4 or ip_version == 6:
@ -65,27 +65,24 @@ class message(object):
message['details']['sourceipaddress'] = summary_items[last_index + 2] message['details']['sourceipaddress'] = summary_items[last_index + 2]
message['details']['destinationipaddress'] = summary_items[last_index + 3] message['details']['destinationipaddress'] = summary_items[last_index + 3]
proto_id = int(message['details']['ip']['protocol_id']) proto_id = int(message['details']['ip']['protocolid'])
if proto_id == 6: if proto_id == 6:
if 'tcp' not in message['details']: if 'tcp' not in message['details']:
message['details']['tcp'] = {} message['details']['tcp'] = {}
message['details']['tcp']['source_port'] = summary_items[last_index + 4] message['details']['sourceport'] = summary_items[last_index + 4]
message['details']['tcp']['destination_port'] = summary_items[last_index + 5] message['details']['destinationport'] = summary_items[last_index + 5]
message['details']['tcp']['data_length'] = summary_items[last_index + 6] message['details']['datalength'] = summary_items[last_index + 6]
message['details']['tcp']['flags'] = summary_items[last_index + 7] message['details']['tcp']['flags'] = summary_items[last_index + 7]
message['details']['tcp']['seq_number'] = summary_items[last_index + 8] message['details']['tcp']['seqnumber'] = summary_items[last_index + 8]
message['details']['tcp']['ack_number'] = summary_items[last_index + 9] message['details']['tcp']['acknumber'] = summary_items[last_index + 9]
message['details']['tcp']['window'] = summary_items[last_index + 10] message['details']['tcp']['window'] = summary_items[last_index + 10]
message['details']['tcp']['urg'] = summary_items[last_index + 11] message['details']['tcp']['urg'] = summary_items[last_index + 11]
message['details']['tcp']['options'] = summary_items[last_index + 12] message['details']['tcp']['options'] = summary_items[last_index + 12]
elif proto_id == 17: elif proto_id == 17:
if 'udp' not in message['details']: message['details']['sourceport'] = summary_items[last_index + 4]
message['details']['udp'] = {} message['details']['destinationport'] = summary_items[last_index + 5]
message['details']['datalength'] = summary_items[last_index + 6]
message['details']['udp']['source_port'] = summary_items[last_index + 4]
message['details']['udp']['destination_port'] = summary_items[last_index + 5]
message['details']['udp']['data_length'] = summary_items[last_index + 6]
return (message, metadata) return (message, metadata)

Просмотреть файл

@ -81,23 +81,23 @@ class message(object):
if message['category'] == 'logfile': if message['category'] == 'logfile':
message['category'] = 'weblog' message['category'] = 'weblog'
if 'remoteAddressChain' in message['details'].keys(): if 'remoteaddresschain' in message['details'].keys():
if isinstance(message['details']['remoteAddressChain'], list): if isinstance(message['details']['remoteaddresschain'], list):
sourceIP = message['details']['remoteAddressChain'][0] sourceIP = message['details']['remoteaddresschain'][0]
if isIP(sourceIP): if isIP(sourceIP):
message['details']['sourceipaddress'] = sourceIP message['details']['sourceipaddress'] = sourceIP
# handle the case of an escaped list: # handle the case of an escaped list:
# "remoteAddressChain": "[\"1.2.3.4\",\"5.6.7.8\",\"127.0.0.1\"]" # "remoteaddresschain": "[\"1.2.3.4\",\"5.6.7.8\",\"127.0.0.1\"]"
if (isinstance(message['details']['remoteAddressChain'], unicode) and if (isinstance(message['details']['remoteaddresschain'], unicode) and
message['details']['remoteAddressChain'][0] == '[' and message['details']['remoteaddresschain'][0] == '[' and
message['details']['remoteAddressChain'][-1] == ']'): message['details']['remoteaddresschain'][-1] == ']'):
# remove the brackets and double quotes # remove the brackets and double quotes
for i in ['[', ']', '"']: for i in ['[', ']', '"']:
message['details']['remoteAddressChain'] = message['details']['remoteAddressChain'].replace(i, '') message['details']['remoteaddresschain'] = message['details']['remoteaddresschain'].replace(i, '')
# make sure it's still a list # make sure it's still a list
if ',' in message['details']['remoteAddressChain']: if ',' in message['details']['remoteaddresschain']:
sourceIP = message['details']['remoteAddressChain'].split(',')[0] sourceIP = message['details']['remoteaddresschain'].split(',')[0]
if isIP(sourceIP): if isIP(sourceIP):
message['details']['sourceipaddress'] = sourceIP message['details']['sourceipaddress'] = sourceIP

Просмотреть файл

@ -19,19 +19,19 @@ class message(object):
# AWS guard duty sends dates as iso_8601 which ES doesn't appreciate # AWS guard duty sends dates as iso_8601 which ES doesn't appreciate
# here's a list of date fields we'll convert to isoformat # here's a list of date fields we'll convert to isoformat
self.date_keys = [ self.date_keys = [
'details.finding.eventLastSeen', 'details.finding.eventlastseen',
'details.finding.eventFirstSeen', 'details.finding.eventfirstseen',
'details.resource.instanceDetails.launchTime', 'details.resource.instancedetails.launchtime',
'details.createdAt', 'details.createdat',
'details.updatedAt' 'details.updatedat'
] ]
# AWS guard duty can send IPs in a bunch of places # AWS guard duty can send IPs in a bunch of places
# Lets pick out some likely targets and format them # Lets pick out some likely targets and format them
# so other mozdef plugins can rely on their location # so other mozdef plugins can rely on their location
self.ipaddress_keys = [ self.ipaddress_keys = [
'details.finding.action.networkConnectionAction.remoteIpDetails.ipAddressV4', 'details.finding.action.networkconnectionaction.remoteipdetails.ipaddressv4',
'details.finding.action.awsApiCallAction.remoteIpDetails.ipAdrressV4' 'details.finding.action.awsapicallaction.remoteipdetails.ipadrressv4'
] ]
def convert_key_date_format(self, needle, haystack): def convert_key_date_format(self, needle, haystack):
@ -73,14 +73,14 @@ class message(object):
# if we still haven't found what we are looking for #U2 # if we still haven't found what we are looking for #U2
# sometimes it's in a list # sometimes it's in a list
if 'sourceipaddress' not in message['details'].keys(): if 'sourceipaddress' not in message['details'].keys():
if key_exists('details.finding.action.portProbeAction.portProbeDetails', message) \ if key_exists('details.finding.action.portprobeaction.portprobedetails', message) \
and isinstance(message.details.finding.action.portProbeAction.portProbeDetails, list): and isinstance(message.details.finding.action.portprobeaction.portprobedetails, list):
# inspect the first list entry and see if it contains an IP # inspect the first list entry and see if it contains an IP
portProbeDetails = DotDict( portprobedetails = DotDict(
message.details.finding.action.portProbeAction.portProbeDetails[0]) message.details.finding.action.portprobeaction.portprobedetails[0])
if key_exists('remoteIpDetails.ipAddressV4', portProbeDetails): if key_exists('remoteipdetails.ipaddressv4', portprobedetails):
message.details.sourceipaddress = portProbeDetails.remoteIpDetails.ipAddressV4 message.details.sourceipaddress = portprobedetails.remoteipdetails.ipaddressv4
# recovert the message back to a plain dict # recovert the message back to a plain dict
return (dict(message), metadata) return (dict(message), metadata)

31
mq/plugins/lower_keys.py Normal file
Просмотреть файл

@ -0,0 +1,31 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
class message(object):
def __init__(self):
'''
takes an incoming message
and sets the keys to lowercase
'''
self.registration = ['cloudtrail', 'fxa-customsmozsvc', 'vidyo', 'suricata', 'guardduty']
self.priority = 4
def onMessage(self, message, metadata):
def renameKeysToLower(message):
if isinstance(message, dict):
for key in message.keys():
message[key.lower()] = message.pop(key)
if isinstance(message[key.lower()], dict) or isinstance(message[key.lower()], list):
message[key.lower()] = renameKeysToLower(message[key.lower()])
elif isinstance(message, list):
for item in message:
item = renameKeysToLower(item)
return message
message = renameKeysToLower(message)
return (message, metadata)

Просмотреть файл

@ -46,21 +46,21 @@ class message(object):
newmessage = dict() newmessage = dict()
try: try:
newmessage['details'] = json.loads(message['MESSAGE']) newmessage['details'] = json.loads(message['message'])
except: except:
newmessage['details'] = {} newmessage['details'] = {}
newmessage['rawdetails'] = message newmessage['rawdetails'] = message
# move some fields that are expected at the event 'root' where they belong # move some fields that are expected at the event 'root' where they belong
if 'HOST_FROM' in message: if 'host_from' in message:
newmessage['hostname'] = message['HOST_FROM'] newmessage['hostname'] = message['host_from']
if 'tags' in message: if 'tags' in message:
newmessage['tags'] = message['tags'] newmessage['tags'] = message['tags']
if 'category' in message: if 'category' in message:
newmessage['category'] = message['category'] newmessage['category'] = message['category']
newmessage[u'source'] = u'unknown' newmessage[u'source'] = u'unknown'
if 'SOURCE' in message: if 'source' in message:
newmessage[u'source'] = message['SOURCE'] newmessage[u'source'] = message['source']
logtype = newmessage['source'] logtype = newmessage['source']
newmessage[u'event_type'] = u'unknown' newmessage[u'event_type'] = u'unknown'
if 'event_type' in message: if 'event_type' in message:
@ -103,12 +103,12 @@ class message(object):
newmessage[u'details'][u'destinationport'] = newmessage['details']['dest_port'] newmessage[u'details'][u'destinationport'] = newmessage['details']['dest_port']
del(newmessage['details']['dest_port']) del(newmessage['details']['dest_port'])
if 'FILE_NAME' in newmessage['details']: if 'file_name' in newmessage['details']:
del(newmessage['details']['FILE_NAME']) del(newmessage['details']['file_name'])
if 'MESSAGE' in newmessage['details']: if 'message' in newmessage['details']:
del(newmessage['details']['MESSAGE']) del(newmessage['details']['message'])
if 'SOURCE' in newmessage['details']: if 'source' in newmessage['details']:
del(newmessage['details']['SOURCE']) del(newmessage['details']['source'])
if logtype == 'eve-log': if logtype == 'eve-log':
if eventtype == 'alert': if eventtype == 'alert':

Просмотреть файл

@ -21,7 +21,7 @@ class message(object):
self.priority = 5 self.priority = 5
def onMessage(self, message, metadata): def onMessage(self, message, metadata):
docid = hashlib.md5('vidyoUniqueCallID' + message['details']['UniqueCallID']).hexdigest() docid = hashlib.md5('vidyouniquecallid' + message['details']['uniquecallid']).hexdigest()
metadata['id'] = docid metadata['id'] = docid
metadata['doc_type'] = 'vidyo' metadata['doc_type'] = 'vidyo'
return (message, metadata) return (message, metadata)

Просмотреть файл

@ -7,12 +7,12 @@ botocore==1.10.67
bottle==0.12.4 bottle==0.12.4
celery==4.1.0 celery==4.1.0
cffi==1.9.1 cffi==1.9.1
configlib==2.0.2 configlib==2.0.3
configparser==3.5.0b2 configparser==3.5.0b2
cryptography==2.3.1 cryptography==2.3.1
dnspython==1.15.0 dnspython==1.15.0
docutils==0.14 docutils==0.14
duo-client==3.0 duo-client==4.1
elasticsearch==5.5.2 elasticsearch==5.5.2
elasticsearch-dsl==5.4.0 elasticsearch-dsl==5.4.0
enum34==1.1.6 enum34==1.1.6

Просмотреть файл

@ -40,11 +40,10 @@ class TestAlertAuditdCommands(AlertTestSuite):
) )
event = AlertTestSuite.create_event(default_event) event = AlertTestSuite.create_event(default_event)
event['_source']['category'] = "someother" event['_source']['category'] = "execve"
event['_source']['tags'] = ["audit", "othervalue"]
test_cases.append( test_cases.append(
PositiveAlertTestCase( PositiveAlertTestCase(
description="Positive test case with audit in tags", description="Positive test case with execve as the category",
events=[event], events=[event],
expected_alert=default_alert expected_alert=default_alert
) )

Просмотреть файл

@ -12,10 +12,10 @@ class TestAlertCloudtrailLoggingDisabled(AlertTestSuite):
default_event = { default_event = {
"_type": "cloudtrail", "_type": "cloudtrail",
"_source": { "_source": {
"eventName": "StopLogging", "eventname": "StopLogging",
"source": "cloudtrail", "source": "cloudtrail",
"requestParameters": { "requestparameters": {
"name": "cloudtrail_example_name" "name": "cloudtrail_example_name",
} }
} }
} }
@ -60,7 +60,7 @@ class TestAlertCloudtrailLoggingDisabled(AlertTestSuite):
) )
event = AlertTestSuite.create_event(default_event) event = AlertTestSuite.create_event(default_event)
event['_source']['eventName'] = 'Badeventname' event['_source']['eventname'] = 'Badeventname'
test_cases.append( test_cases.append(
NegativeAlertTestCase( NegativeAlertTestCase(
description="Negative test case with bad eventName", description="Negative test case with bad eventName",
@ -79,7 +79,7 @@ class TestAlertCloudtrailLoggingDisabled(AlertTestSuite):
) )
event = AlertTestSuite.create_event(default_event) event = AlertTestSuite.create_event(default_event)
event['_source']['errorCode'] = 'AccessDenied' event['_source']['errorcode'] = 'AccessDenied'
test_cases.append( test_cases.append(
NegativeAlertTestCase( NegativeAlertTestCase(
description="Negative test case with excluding errorCode", description="Negative test case with excluding errorCode",

Просмотреть файл

@ -17,7 +17,7 @@ class TestAlertDuoAuthFail(AlertTestSuite):
default_event = { default_event = {
"_type": "event", "_type": "event",
"_source": { "_source": {
"category": "event", "category": "authentication",
"summary": 'authentication FRAUD for you@somewhere.com', "summary": 'authentication FRAUD for you@somewhere.com',
"details": { "details": {
"sourceipaddress": "1.2.3.4", "sourceipaddress": "1.2.3.4",

Просмотреть файл

@ -0,0 +1,134 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
from positive_alert_test_case import PositiveAlertTestCase
from negative_alert_test_case import NegativeAlertTestCase
from alert_test_suite import AlertTestSuite
class TestNSMScanAddress(AlertTestSuite):
alert_filename = "nsm_scan_address"
# This event is the default positive event that will cause the
# alert to trigger
default_event = {
"_source": {
"category": "bro",
"summary": "Scan::Address_Scan source 10.252.25.90 destination unknown port unknown",
"hostname": "your.friendly.nsm.sensor",
"tags": ["bro"],
"details": {
"sourceipaddress": "10.99.88.77",
"indicators": "10.99.88.77",
"source": "notice",
"note": "Scan::Address_Scan",
}
}
}
# This alert is the expected result from running this task
default_alert = {
"category": "nsm",
"severity": "NOTICE",
"tags": ['nsm', 'bro', 'addressscan'],
"summary": "Address scan from 10.99.88.77 (mock.mozilla.org)",
'notify_mozdefbot': False
}
test_cases = []
test_cases.append(
PositiveAlertTestCase(
description="Positive test with default event and default alert expected",
events=AlertTestSuite.create_events(default_event, 5),
expected_alert=default_alert
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['utctimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda(date_timedelta={'minutes': 1})
event['_source']['receivedtimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda(date_timedelta={'minutes': 1})
test_cases.append(
PositiveAlertTestCase(
description="Positive test with events a minute earlier",
events=events,
expected_alert=default_alert
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['category'] = 'syslog'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with a different category",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['details']['source'] = 'intel'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with a different details.source",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['details']['note'] = 'Scan::Random_Scan'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with a different scan type (note)",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['details']['note'] = 'Scan::Port_Scan'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with a different scan type (note)",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['details']['sourceipaddress'] = '10.54.65.234'
event['_source']['details']['indicators'] = '1.2.3.4'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with an excluded IP address",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['details']['sourceipaddress'] = '1.2.3.4'
event['_source']['details']['indicators'] = '1.2.3.4'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with an excluded subnet",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['utctimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda({'minutes': 15})
event['_source']['receivedtimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda({'minutes': 15})
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with old timestamp",
events=events,
)
)

Просмотреть файл

@ -0,0 +1,124 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
from positive_alert_test_case import PositiveAlertTestCase
from negative_alert_test_case import NegativeAlertTestCase
from alert_test_suite import AlertTestSuite
class TestNSMScanRandom(AlertTestSuite):
alert_filename = "nsm_scan_random"
# This event is the default positive event that will cause the
# alert to trigger
default_event = {
"_source": {
"category": "bro",
"summary": "Scan::Random_Scan source 10.252.25.90 destination unknown port unknown",
"hostname": "your.friendly.nsm.sensor",
"tags": ["bro"],
"details": {
"sourceipaddress": "10.99.88.77",
"indicators": "10.99.88.77",
"source": "notice",
"note": "Scan::Random_Scan",
}
}
}
# This alert is the expected result from running this task
default_alert = {
"category": "nsm",
"severity": "WARNING",
"summary": "Random scan from 10.99.88.77",
"tags": ['nsm', 'bro', 'randomscan'],
"notify_mozdefbot": True,
}
test_cases = []
test_cases.append(
PositiveAlertTestCase(
description="Positive test with default event and default alert expected",
events=AlertTestSuite.create_events(default_event, 5),
expected_alert=default_alert
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['utctimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda(date_timedelta={'minutes': 1})
event['_source']['receivedtimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda(date_timedelta={'minutes': 1})
test_cases.append(
PositiveAlertTestCase(
description="Positive test with events a minute earlier",
events=events,
expected_alert=default_alert
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['category'] = 'syslog'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with a different category",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['details']['source'] = 'intel'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with a different details.source",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['details']['note'] = 'Scan::Address_Scan'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with a different scan type (note)",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['details']['sourceipaddress'] = '10.54.65.234'
event['_source']['details']['indicators'] = '1.2.3.4'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with an excluded IP address",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['details']['sourceipaddress'] = '1.2.3.4'
event['_source']['details']['indicators'] = '1.2.3.4'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with an excluded subnet",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['utctimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda({'minutes': 15})
event['_source']['receivedtimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda({'minutes': 15})
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with old timestamp",
events=events,
)
)

Просмотреть файл

@ -33,9 +33,8 @@ class TestTraceAudit(AlertTestSuite):
default_alert = { default_alert = {
"category": "trace", "category": "trace",
"severity": "WARNING", "severity": "WARNING",
"summary": "5 instances of Strace or Ptrace executed on a system by randomjoe", "summary": "5 instances of Strace or Ptrace executed by randomjoe on exhostname",
"tags": ['audit'], "tags": ['audit'],
"notify_mozdefbot": True,
} }
test_cases = [] test_cases = []

Просмотреть файл

@ -19,12 +19,17 @@ class TestWriteAudit(AlertTestSuite):
"_type": "auditd", "_type": "auditd",
"_source": { "_source": {
"category": "write", "category": "write",
"summary": "Write: /etc/audit/", "summary": "Write: /etc/audit/plugins.d/temp-file.conf",
"hostname": "exhostname", "hostname": "exhostname",
"tags": ["audisp-json","2.1.0", "audit"], "tags": [
"audisp-json",
"2.1.0",
"audit"
],
"details": { "details": {
"processname": "vi", "processname": "vi",
"originaluser": "randomjoe", "originaluser": "randomjoe",
"user": "root",
"auditkey": "audit", "auditkey": "audit",
} }
} }
@ -34,7 +39,7 @@ class TestWriteAudit(AlertTestSuite):
default_alert = { default_alert = {
"category": "write", "category": "write",
"severity": "WARNING", "severity": "WARNING",
"summary": "5 Filesystem write(s) to an auditd path by randomjoe", "summary": "5 Filesystem write(s) to an auditd path (/etc/audit/plugins.d/temp-file.conf) by root (randomjoe)",
"tags": ['audit'], "tags": ['audit'],
} }
@ -50,23 +55,15 @@ class TestWriteAudit(AlertTestSuite):
events = AlertTestSuite.create_events(default_event, 5) events = AlertTestSuite.create_events(default_event, 5)
for event in events: for event in events:
event['_source']['summary'] = 'Write: /etc/audit/rules.d/.audit.rules.swp' event['_source']['details']['originaluser'] = 'user1'
expected_alert = AlertTestSuite.create_alert(default_alert)
expected_alert['severity'] = 'NOTICE'
expected_alert['summary'] = "5 Filesystem write(s) to an auditd path (/etc/audit/plugins.d/temp-file.conf) by root (user1)"
test_cases.append( test_cases.append(
PositiveAlertTestCase( PositiveAlertTestCase(
description="Positive test with events with a summary of 'Write: /etc/audit/rules.d/.audit.rules.swp'", description="Positive test with expected downgraded severity",
events=events, events=events,
expected_alert=default_alert expected_alert=expected_alert
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['summary'] = 'Write: /etc/audit/rules.d/'
test_cases.append(
PositiveAlertTestCase(
description="Positive test with events with a summary of 'Write: /etc/audit/rules.d/'",
events=events,
expected_alert=default_alert
) )
) )

Просмотреть файл

@ -134,7 +134,6 @@ class TestBroFixup(object):
"eventsource": "systemslogs", "eventsource": "systemslogs",
"details": { "details": {
"processid": "21233", "processid": "21233",
"Random": 2,
"sourceipv4address": "10.22.74.208", "sourceipv4address": "10.22.74.208",
"hostname": "hostname1.subdomain.domain.com", "hostname": "hostname1.subdomain.domain.com",
"program": "sshd", "program": "sshd",

Просмотреть файл

@ -43,7 +43,7 @@ class TestCloudtrailPlugin():
'source': 'cloudtrail', 'source': 'cloudtrail',
'details': { 'details': {
'requestparameters': { 'requestparameters': {
'iamInstanceProfile': 'astringvalue', 'iaminstanceprofile': 'astringvalue',
} }
} }
} }
@ -53,7 +53,7 @@ class TestCloudtrailPlugin():
'source': 'cloudtrail', 'source': 'cloudtrail',
'details': { 'details': {
'requestparameters': { 'requestparameters': {
'iamInstanceProfile': { 'iaminstanceprofile': {
'raw_value': 'astringvalue', 'raw_value': 'astringvalue',
} }
} }
@ -295,7 +295,7 @@ class TestCloudtrailPlugin():
'source': 'cloudtrail', 'source': 'cloudtrail',
'details': { 'details': {
'requestparameters': { 'requestparameters': {
'ebsOptimized': 'astringvalue', 'ebsoptimized': 'astringvalue',
} }
} }
} }
@ -305,7 +305,7 @@ class TestCloudtrailPlugin():
'source': 'cloudtrail', 'source': 'cloudtrail',
'details': { 'details': {
'requestparameters': { 'requestparameters': {
'ebsOptimized': { 'ebsoptimized': {
'raw_value': 'astringvalue', 'raw_value': 'astringvalue',
} }
} }
@ -319,7 +319,7 @@ class TestCloudtrailPlugin():
'source': 'cloudtrail', 'source': 'cloudtrail',
'details': { 'details': {
'responseelements': { 'responseelements': {
'securityGroups': 'astringvalue', 'securitygroups': 'astringvalue',
} }
} }
} }
@ -329,7 +329,7 @@ class TestCloudtrailPlugin():
'source': 'cloudtrail', 'source': 'cloudtrail',
'details': { 'details': {
'responseelements': { 'responseelements': {
'securityGroups': { 'securitygroups': {
'raw_value': 'astringvalue', 'raw_value': 'astringvalue',
} }
} }
@ -343,7 +343,7 @@ class TestCloudtrailPlugin():
'source': 'cloudtrail', 'source': 'cloudtrail',
'details': { 'details': {
'requestparameters': { 'requestparameters': {
'disableApiTermination': 'astringvalue' 'disableapitermination': 'astringvalue'
} }
} }
} }
@ -353,7 +353,7 @@ class TestCloudtrailPlugin():
'source': 'cloudtrail', 'source': 'cloudtrail',
'details': { 'details': {
'requestparameters': { 'requestparameters': {
'disableApiTermination': { 'disableapitermination': {
'raw_value': 'astringvalue' 'raw_value': 'astringvalue'
} }
} }
@ -367,7 +367,7 @@ class TestCloudtrailPlugin():
'source': 'cloudtrail', 'source': 'cloudtrail',
'details': { 'details': {
'responseelements': { 'responseelements': {
'lastModified': 'astringvalue' 'lastmodified': 'astringvalue'
} }
} }
} }
@ -377,7 +377,7 @@ class TestCloudtrailPlugin():
'source': 'cloudtrail', 'source': 'cloudtrail',
'details': { 'details': {
'responseelements': { 'responseelements': {
'lastModified': { 'lastmodified': {
'raw_value': 'astringvalue' 'raw_value': 'astringvalue'
} }
} }
@ -393,7 +393,7 @@ class TestCloudtrailPlugin():
'responseelements': { 'responseelements': {
'findings': { 'findings': {
'service': { 'service': {
'additionalInfo': { 'additionalinfo': {
'unusual': 'astringvalue' 'unusual': 'astringvalue'
} }
} }
@ -409,7 +409,7 @@ class TestCloudtrailPlugin():
'responseelements': { 'responseelements': {
'findings': { 'findings': {
'service': { 'service': {
'additionalInfo': { 'additionalinfo': {
'unusual': { 'unusual': {
'raw_value': 'astringvalue' 'raw_value': 'astringvalue'
} }

Просмотреть файл

@ -33,23 +33,21 @@ class TestFilterlog():
'id': '60624', 'id': '60624',
'length': '92', 'length': '92',
'offset': '0', 'offset': '0',
'protocol_id': '17', 'protocolid': '17',
'protocol_text': 'udp', 'protocoltext': 'udp',
'tos': '0x0', 'tos': '0x0',
'ttl': '6', 'ttl': '6',
'version': 4 'version': 4
}, },
'ip_version': '4', 'ipversion': '4',
'reason': 'match', 'reason': 'match',
'rule_number': '9', 'rulenumber': '9',
'sourceipaddress': '175.41.7.2', 'sourceipaddress': '175.41.7.2',
'sub_rule_number': '', 'subrulenumber': '',
'trackor': '1000000103', 'trackor': '1000000103',
'udp': { 'datalength': '72',
'data_length': '72', 'destinationport': '33443',
'destination_port': '33443', 'sourceport': '57434'
'source_port': '57434'
}
}, },
'summary': '9,,,1000000103,igb0,match,block,in,4,0x0,,6,60624,0,DF,17,udp,92,175.41.7.2,21.143.56.109,57434,33443,72' 'summary': '9,,,1000000103,igb0,match,block,in,4,0x0,,6,60624,0,DF,17,udp,92,175.41.7.2,21.143.56.109,57434,33443,72'
} }

Просмотреть файл

@ -0,0 +1,86 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../../mq/plugins"))
from lower_keys import message
class TestLowerKeysPlugin():
def setup(self):
self.plugin = message()
def test_uppercase_details(self):
msg = {
'source': 'cloudtrail',
'Details': {
'requestparameters': {
'description': 'astringvalue',
}
}
}
(retmessage, retmeta) = self.plugin.onMessage(msg, {})
expected_message = {
'source': 'cloudtrail',
'details': {
'requestparameters': {
'description': 'astringvalue',
}
}
}
assert retmessage == expected_message
assert retmeta == {}
def test_uppercase_nested_keys(self):
msg = {
'source': 'cloudtrail',
'details': {
'RequestParameters': {
'Description': 'astringvalue',
}
}
}
(retmessage, retmeta) = self.plugin.onMessage(msg, {})
expected_message = {
'source': 'cloudtrail',
'details': {
'requestparameters': {
'description': 'astringvalue',
}
}
}
assert retmessage == expected_message
assert retmeta == {}
def test_uppercase_nested_keys2(self):
msg = {
'source': 'cloudtrail',
'details': {
'RequestParameters': {
'Description': 'astringvalue',
'ApplicationSource': {
'someKey:': 'anothervalue',
}
}
}
}
(retmessage, retmeta) = self.plugin.onMessage(msg, {})
expected_message = {
'source': 'cloudtrail',
'details': {
'requestparameters': {
'description': 'astringvalue',
'applicationsource': {
'somekey:': 'anothervalue',
}
}
}
}
assert retmessage == expected_message
assert retmeta == {}

Просмотреть файл

@ -24,7 +24,6 @@ accept_message['mozdefhostname'] = 'mozdef4.private.scl3.mozilla.com'
accept_message['eventsource'] = 'systemlogs' accept_message['eventsource'] = 'systemlogs'
accept_message['details'] = {} accept_message['details'] = {}
accept_message['details']['processid'] = '5413' accept_message['details']['processid'] = '5413'
accept_message['details']['Random'] = '9'
accept_message['details']['sourceipv4address'] = '10.22.74.208' accept_message['details']['sourceipv4address'] = '10.22.74.208'
accept_message['details']['hostname'] = 'mysuperhost.somewhere.com' accept_message['details']['hostname'] = 'mysuperhost.somewhere.com'
accept_message['details']['program'] = 'sshd' accept_message['details']['program'] = 'sshd'

Просмотреть файл

@ -22,7 +22,6 @@ session_su['eventsource'] = 'systemlogs'
session_su['hostname'] = 'syslog1.private.scl3.mozilla.com' session_su['hostname'] = 'syslog1.private.scl3.mozilla.com'
session_su['mozdefhostname'] = 'mozdef4.private.scl3.mozilla.com' session_su['mozdefhostname'] = 'mozdef4.private.scl3.mozilla.com'
session_su['details'] = {} session_su['details'] = {}
session_su['details']['Random'] = '9'
session_su['details']['program'] = 'su' session_su['details']['program'] = 'su'
session_su['details']['hostname'] = 'irc1.dmz.scl3.mozilla.com' session_su['details']['hostname'] = 'irc1.dmz.scl3.mozilla.com'

Просмотреть файл

@ -56,7 +56,7 @@ class TestSuricataFixup(object):
} }
event = { event = {
'category': 'suricata', 'category': 'suricata',
'SOURCE': 'eve-log', 'source': 'eve-log',
'event_type': 'alert' 'event_type': 'alert'
} }
@ -72,7 +72,7 @@ class TestSuricataFixup(object):
} }
event = { event = {
'customendpoint': '', 'customendpoint': '',
'SOURCE': 'eve-log', 'source': 'eve-log',
'event_type': 'alert' 'event_type': 'alert'
} }
@ -89,7 +89,7 @@ class TestSuricataFixup(object):
event = { event = {
'customendpoint': '', 'customendpoint': '',
'category': 'alamakota', 'category': 'alamakota',
'SOURCE': 'eve-log', 'source': 'eve-log',
'event_type': 'alert' 'event_type': 'alert'
} }
@ -108,7 +108,7 @@ class TestSuricataFixup(object):
'category': 'suricata', 'category': 'suricata',
'customendpoint': '', 'customendpoint': '',
'category': 'suricata', 'category': 'suricata',
'SOURCE': 'eve-log' 'source': 'eve-log'
} }
result, metadata = self.plugin.onMessage(event, metadata) result, metadata = self.plugin.onMessage(event, metadata)
@ -125,7 +125,7 @@ class TestSuricataFixup(object):
event = { event = {
'customendpoint': '', 'customendpoint': '',
'category': 'suricata', 'category': 'suricata',
'SOURCE': 'eve-log', 'source': 'eve-log',
'event_type': 'alamakota' 'event_type': 'alamakota'
} }
@ -143,7 +143,7 @@ class TestSuricataFixup(object):
MESSAGE = { MESSAGE = {
'ts': 1505701210.163043 'ts': 1505701210.163043
} }
event['MESSAGE'] = json.dumps(MESSAGE) event['message'] = json.dumps(MESSAGE)
result, metadata = self.plugin.onMessage(event, self.metadata) result, metadata = self.plugin.onMessage(event, self.metadata)
assert result['category'] == 'suricata' assert result['category'] == 'suricata'
@ -154,13 +154,13 @@ class TestSuricataFixup(object):
event = { event = {
'customendpoint': '', 'customendpoint': '',
'category': 'suricata', 'category': 'suricata',
'SOURCE': 'alamakota', 'source': 'alamakota',
'event_type': 'alert' 'event_type': 'alert'
} }
MESSAGE = { MESSAGE = {
'ts': 1505701210.163043 'ts': 1505701210.163043
} }
event['MESSAGE'] = json.dumps(MESSAGE) event['message'] = json.dumps(MESSAGE)
result, metadata = self.plugin.onMessage(event, self.metadata) result, metadata = self.plugin.onMessage(event, self.metadata)
assert result['category'] == 'suricata' assert result['category'] == 'suricata'
@ -174,7 +174,7 @@ class TestSuricataFixup(object):
event = { event = {
'customendpoint': '', 'customendpoint': '',
'category': 'suricata', 'category': 'suricata',
'SOURCE': 'eve-log', 'source': 'eve-log',
'event_type': 'alert' 'event_type': 'alert'
} }
result, metadata = self.plugin.onMessage(event, self.metadata) result, metadata = self.plugin.onMessage(event, self.metadata)
@ -197,7 +197,6 @@ class TestSuricataFixup(object):
"eventsource": "systemslogs", "eventsource": "systemslogs",
"details": { "details": {
"processid": "21233", "processid": "21233",
"Random": 2,
"sourceipv4address": "10.22.74.208", "sourceipv4address": "10.22.74.208",
"hostname": "hostname1.subdomain.domain.com", "hostname": "hostname1.subdomain.domain.com",
"program": "sshd", "program": "sshd",
@ -274,7 +273,7 @@ class TestSuricataFixup(object):
event = { event = {
'customendpoint': '', 'customendpoint': '',
'category': 'suricata', 'category': 'suricata',
'SOURCE': 'eve-log', 'source': 'eve-log',
'event_type': 'alert' 'event_type': 'alert'
} }
MESSAGE = { MESSAGE = {
@ -313,7 +312,7 @@ class TestSuricataFixup(object):
"linktype":1 "linktype":1
} }
} }
event['MESSAGE'] = json.dumps(MESSAGE) event['message'] = json.dumps(MESSAGE)
result, metadata = self.plugin.onMessage(event, self.metadata) result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result) self.verify_defaults(result)
@ -336,7 +335,7 @@ class TestSuricataFixup(object):
event = { event = {
'customendpoint': '', 'customendpoint': '',
'category': 'suricata', 'category': 'suricata',
'SOURCE': 'eve-log', 'source': 'eve-log',
'event_type': 'alert' 'event_type': 'alert'
} }
MESSAGE = { MESSAGE = {
@ -385,7 +384,7 @@ class TestSuricataFixup(object):
"redirect":"afakedestination" "redirect":"afakedestination"
}, },
} }
event['MESSAGE'] = json.dumps(MESSAGE) event['message'] = json.dumps(MESSAGE)
result, metadata = self.plugin.onMessage(event, self.metadata) result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result) self.verify_defaults(result)
@ -404,7 +403,7 @@ class TestSuricataFixup(object):
event = { event = {
'customendpoint': '', 'customendpoint': '',
'category': 'suricata', 'category': 'suricata',
'SOURCE': 'eve-log', 'source': 'eve-log',
'event_type': 'alert' 'event_type': 'alert'
} }
MESSAGE = { MESSAGE = {
@ -457,7 +456,7 @@ class TestSuricataFixup(object):
MESSAGE['payload_printable'] = large_pseudorandom_string MESSAGE['payload_printable'] = large_pseudorandom_string
MESSAGE['http']['http_response_body'] = large_pseudorandom_string MESSAGE['http']['http_response_body'] = large_pseudorandom_string
MESSAGE['http']['http_response_body_printable'] = large_pseudorandom_string MESSAGE['http']['http_response_body_printable'] = large_pseudorandom_string
event['MESSAGE'] = json.dumps(MESSAGE) event['message'] = json.dumps(MESSAGE)
result, metadata = self.plugin.onMessage(event, self.metadata) result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result) self.verify_defaults(result)
@ -479,7 +478,7 @@ class TestSuricataFixup(object):
event = { event = {
'customendpoint': '', 'customendpoint': '',
'category': 'suricata', 'category': 'suricata',
'SOURCE': 'eve-log', 'source': 'eve-log',
'event_type': 'alert' 'event_type': 'alert'
} }
MESSAGE = { MESSAGE = {
@ -519,27 +518,27 @@ class TestSuricataFixup(object):
}, },
"vars":{ "vars":{
"flowbits":{ "flowbits":{
"ET.http.javaclient.vulnerable":"true", "et.http.javaclient.vulnerable":"true",
"ET.JavaNotJar":"true", "et.javanotjar":"true",
"ET.http.javaclient":"true" "et.http.javaclient":"true"
} }
} }
} }
event['MESSAGE'] = json.dumps(MESSAGE) event['message'] = json.dumps(MESSAGE)
result, metadata = self.plugin.onMessage(event, self.metadata) result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result) self.verify_defaults(result)
self.verify_metadata(metadata) self.verify_metadata(metadata)
assert 'vars' in result['details'] assert 'vars' in result['details']
assert 'flowbits' in result['details']['vars'] assert 'flowbits' in result['details']['vars']
assert result['details']['vars']['flowbits']['ET.http.javaclient.vulnerable'] == "True" assert result['details']['vars']['flowbits']['et.http.javaclient.vulnerable'] == "true"
assert result['details']['vars']['flowbits']['ET.JavaNotJar'] == "true" assert result['details']['vars']['flowbits']['et.javanotjar'] == "true"
def test_eve_log_alert_rename(self): def test_eve_log_alert_rename(self):
event = { event = {
'customendpoint': '', 'customendpoint': '',
'category': 'suricata', 'category': 'suricata',
'SOURCE': 'eve-log', 'source': 'eve-log',
'event_type': 'alert' 'event_type': 'alert'
} }
MESSAGE = { MESSAGE = {
@ -578,7 +577,7 @@ class TestSuricataFixup(object):
"linktype":1 "linktype":1
} }
} }
event['MESSAGE'] = json.dumps(MESSAGE) event['message'] = json.dumps(MESSAGE)
result, metadata = self.plugin.onMessage(event, self.metadata) result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result) self.verify_defaults(result)