зеркало из https://github.com/mozilla/MozDef.git
Merge pull request #998 from mozilla/enable_library_unused_pep8_check
Enable library unused pep8 check
This commit is contained in:
Коммит
8af926d9ef
26
.flake8
26
.flake8
|
@ -1,21 +1,19 @@
|
||||||
[flake8]
|
[flake8]
|
||||||
exclude =
|
per-file-ignores =
|
||||||
.flake8
|
# Ignore 'library imported but unused' for only the alert config files
|
||||||
.git
|
# since we stub timedelta and crontab
|
||||||
*__init__.py
|
alerts/lib/config.py: F401
|
||||||
|
docker/compose/mozdef_alerts/files/config.py: F401
|
||||||
|
|
||||||
|
# Ignore any import statements in __init__ files
|
||||||
|
mozdef_util/mozdef_util/query_models/__init__.py: F401
|
||||||
|
|
||||||
|
# Ignore redefinition of index name
|
||||||
|
rest/index.py: F811
|
||||||
|
|
||||||
ignore =
|
ignore =
|
||||||
E123 # closing bracket does not match indentation of opening bracket's line
|
|
||||||
E225 # missing whitespace around operator
|
E225 # missing whitespace around operator
|
||||||
E226 # missing whitespace around arithmetic operator
|
|
||||||
E228 # missing whitespace around modulo operator
|
|
||||||
E231 # missing whitespace after ','
|
E231 # missing whitespace after ','
|
||||||
E265 # block comment should start with '# '
|
|
||||||
E402 # module level import not at top of file
|
E402 # module level import not at top of file
|
||||||
E501 # line too long
|
E501 # line too long
|
||||||
E722 # do not use bare except'
|
E722 # do not use bare except'
|
||||||
F401 # library imported but unused
|
|
||||||
F601 # dictionary key 'tags' repeated with different values
|
|
||||||
F811 # redefinition of unused 'datetime' from line 10
|
|
||||||
F821 # undefined name 'SysLogHandler'
|
|
||||||
F841 # local variable 'CIDR' is assigned to but never used
|
|
||||||
W503 # line break before binary operator
|
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
# Copyright (c) 2017 Mozilla Corporation
|
# Copyright (c) 2017 Mozilla Corporation
|
||||||
|
|
||||||
from lib.alerttask import AlertTask
|
from lib.alerttask import AlertTask
|
||||||
from mozdef_util.query_models import SearchQuery, TermMatch, ExistsMatch, PhraseMatch
|
from mozdef_util.query_models import SearchQuery, TermMatch, PhraseMatch
|
||||||
|
|
||||||
|
|
||||||
class AlertBugzillaPBruteforce(AlertTask):
|
class AlertBugzillaPBruteforce(AlertTask):
|
||||||
|
|
|
@ -40,9 +40,9 @@ class AlertAccountCreations(AlertTask):
|
||||||
severity = 'INFO'
|
severity = 'INFO'
|
||||||
|
|
||||||
summary = ('{0} fxa account creation attempts by {1}'.format(aggreg['count'], aggreg['value']))
|
summary = ('{0} fxa account creation attempts by {1}'.format(aggreg['count'], aggreg['value']))
|
||||||
emails = self.mostCommon(aggreg['allevents'],'_source.details.email')
|
emails = self.mostCommon(aggreg['allevents'], '_source.details.email')
|
||||||
#did they try to create more than one email account?
|
# did they try to create more than one email account?
|
||||||
#or just retry an existing one
|
# or just retry an existing one
|
||||||
if len(emails) > 1:
|
if len(emails) > 1:
|
||||||
for i in emails[:5]:
|
for i in emails[:5]:
|
||||||
summary += ' {0} ({1} hits)'.format(i[0], i[1])
|
summary += ' {0} ({1} hits)'.format(i[0], i[1])
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
|
|
||||||
|
|
||||||
from lib.alerttask import AlertTask
|
from lib.alerttask import AlertTask
|
||||||
from mozdef_util.query_models import SearchQuery, TermMatch, QueryStringMatch, ExistsMatch, PhraseMatch, WildcardMatch
|
from mozdef_util.query_models import SearchQuery, TermMatch, ExistsMatch
|
||||||
|
|
||||||
|
|
||||||
class AlertGuardDutyProbe(AlertTask):
|
class AlertGuardDutyProbe(AlertTask):
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
# Copyright (c) 2017 Mozilla Corporation
|
# Copyright (c) 2017 Mozilla Corporation
|
||||||
|
|
||||||
from lib.alerttask import AlertTask
|
from lib.alerttask import AlertTask
|
||||||
from mozdef_util.query_models import SearchQuery, TermMatch, ExistsMatch, PhraseMatch
|
from mozdef_util.query_models import SearchQuery, TermMatch, PhraseMatch
|
||||||
|
|
||||||
|
|
||||||
class AlertHTTPBruteforce(AlertTask):
|
class AlertHTTPBruteforce(AlertTask):
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
# Copyright (c) 2017 Mozilla Corporation
|
# Copyright (c) 2017 Mozilla Corporation
|
||||||
|
|
||||||
from lib.alerttask import AlertTask
|
from lib.alerttask import AlertTask
|
||||||
from mozdef_util.query_models import SearchQuery, TermMatch, ExistsMatch, PhraseMatch
|
from mozdef_util.query_models import SearchQuery, TermMatch, PhraseMatch
|
||||||
|
|
||||||
|
|
||||||
class AlertHTTPErrors(AlertTask):
|
class AlertHTTPErrors(AlertTask):
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from mozdef_util.plugin_set import PluginSet
|
from mozdef_util.plugin_set import PluginSet
|
||||||
from mozdef_util.utilities.logger import logger
|
from mozdef_util.utilities.logger import logger
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
# Copyright (c) 2018 Mozilla Corporation
|
# Copyright (c) 2018 Mozilla Corporation
|
||||||
|
|
||||||
from lib.alerttask import AlertTask, add_hostname_to_ip
|
from lib.alerttask import AlertTask, add_hostname_to_ip
|
||||||
from mozdef_util.query_models import SearchQuery, TermMatch, QueryStringMatch, PhraseMatch, ExistsMatch
|
from mozdef_util.query_models import SearchQuery, TermMatch, QueryStringMatch, PhraseMatch
|
||||||
|
|
||||||
|
|
||||||
class NSMScanPort(AlertTask):
|
class NSMScanPort(AlertTask):
|
||||||
|
|
|
@ -5,7 +5,6 @@
|
||||||
|
|
||||||
import hjson
|
import hjson
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
from binascii import b2a_hex
|
from binascii import b2a_hex
|
||||||
import boto3
|
import boto3
|
||||||
import datetime
|
import datetime
|
||||||
|
|
|
@ -65,7 +65,7 @@ class message(object):
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
})
|
})
|
||||||
r = requests.post(
|
requests.post(
|
||||||
'https://events.pagerduty.com/generic/2010-04-15/create_event.json',
|
'https://events.pagerduty.com/generic/2010-04-15/create_event.json',
|
||||||
headers=headers,
|
headers=headers,
|
||||||
data=payload,
|
data=payload,
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
|
|
||||||
from urlparse import urlparse
|
from urlparse import urlparse
|
||||||
from lib.alerttask import AlertTask
|
from lib.alerttask import AlertTask
|
||||||
from mozdef_util.query_models import SearchQuery, TermMatch, QueryStringMatch, ExistsMatch, PhraseMatch, WildcardMatch
|
from mozdef_util.query_models import SearchQuery, TermMatch, QueryStringMatch
|
||||||
|
|
||||||
|
|
||||||
class AlertProxyExfilDomains(AlertTask):
|
class AlertProxyExfilDomains(AlertTask):
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
# Copyright (c) 2017 Mozilla Corporation
|
# Copyright (c) 2017 Mozilla Corporation
|
||||||
|
|
||||||
from lib.alerttask import AlertTask
|
from lib.alerttask import AlertTask
|
||||||
from mozdef_util.query_models import SearchQuery, TermMatch, ExistsMatch, PhraseMatch
|
from mozdef_util.query_models import SearchQuery, TermMatch, PhraseMatch
|
||||||
|
|
||||||
|
|
||||||
class AlertSSHManyConns(AlertTask):
|
class AlertSSHManyConns(AlertTask):
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
# This code alerts on every successfully opened session on any of the host from a given list
|
# This code alerts on every successfully opened session on any of the host from a given list
|
||||||
|
|
||||||
from lib.alerttask import AlertTask
|
from lib.alerttask import AlertTask
|
||||||
from mozdef_util.query_models import SearchQuery, TermMatch, QueryStringMatch, PhraseMatch
|
from mozdef_util.query_models import SearchQuery, TermMatch, PhraseMatch
|
||||||
|
|
||||||
|
|
||||||
class TraceAudit(AlertTask):
|
class TraceAudit(AlertTask):
|
||||||
|
@ -33,9 +33,9 @@ class TraceAudit(AlertTask):
|
||||||
tags = ['audit']
|
tags = ['audit']
|
||||||
|
|
||||||
summary = ('{0} instances of Strace or Ptrace executed on a system by {1}'.format(aggreg['count'], aggreg['value'], ))
|
summary = ('{0} instances of Strace or Ptrace executed on a system by {1}'.format(aggreg['count'], aggreg['value'], ))
|
||||||
hostnames = self.mostCommon(aggreg['allevents'],'_source.hostname')
|
hostnames = self.mostCommon(aggreg['allevents'], '_source.hostname')
|
||||||
#did they modify more than one host?
|
# did they modify more than one host?
|
||||||
#or just modify an existing configuration more than once?
|
# or just modify an existing configuration more than once?
|
||||||
if len(hostnames) > 1:
|
if len(hostnames) > 1:
|
||||||
for i in hostnames[:5]:
|
for i in hostnames[:5]:
|
||||||
summary += ' on {0} ({1} hosts)'.format(i[0], i[1])
|
summary += ' on {0} ({1} hosts)'.format(i[0], i[1])
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
# This code alerts on every successfully opened session on any of the host from a given list
|
# This code alerts on every successfully opened session on any of the host from a given list
|
||||||
|
|
||||||
from lib.alerttask import AlertTask
|
from lib.alerttask import AlertTask
|
||||||
from mozdef_util.query_models import SearchQuery, TermMatch, QueryStringMatch, PhraseMatch
|
from mozdef_util.query_models import SearchQuery, TermMatch, PhraseMatch
|
||||||
|
|
||||||
|
|
||||||
class WriteAudit(AlertTask):
|
class WriteAudit(AlertTask):
|
||||||
|
@ -34,9 +34,9 @@ class WriteAudit(AlertTask):
|
||||||
tags = ['audit']
|
tags = ['audit']
|
||||||
|
|
||||||
summary = ('{0} Filesystem write(s) to an auditd path by {1}'.format(aggreg['count'], aggreg['value'], ))
|
summary = ('{0} Filesystem write(s) to an auditd path by {1}'.format(aggreg['count'], aggreg['value'], ))
|
||||||
hostnames = self.mostCommon(aggreg['allevents'],'_source.hostname')
|
hostnames = self.mostCommon(aggreg['allevents'], '_source.hostname')
|
||||||
#did they modify more than one host?
|
# did they modify more than one host?
|
||||||
#or just modify an existing configuration more than once?
|
# or just modify an existing configuration more than once?
|
||||||
if len(hostnames) > 1:
|
if len(hostnames) > 1:
|
||||||
for i in hostnames[:5]:
|
for i in hostnames[:5]:
|
||||||
summary += ' on {0} ({1} hosts)'.format(i[0], i[1])
|
summary += ' on {0} ({1} hosts)'.format(i[0], i[1])
|
||||||
|
|
|
@ -11,22 +11,18 @@ from datetime import datetime
|
||||||
import pytz
|
import pytz
|
||||||
import json
|
import json
|
||||||
import socket
|
import socket
|
||||||
import json
|
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
from requests_futures.sessions import FuturesSession
|
from requests_futures.sessions import FuturesSession
|
||||||
from multiprocessing import Process, Queue
|
from multiprocessing import Process, Queue
|
||||||
import random
|
|
||||||
import logging
|
import logging
|
||||||
from logging.handlers import SysLogHandler
|
|
||||||
from Queue import Empty
|
from Queue import Empty
|
||||||
from requests.packages.urllib3.exceptions import ClosedPoolError
|
from requests.packages.urllib3.exceptions import ClosedPoolError
|
||||||
import requests
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
httpsession = FuturesSession(max_workers=5)
|
httpsession = FuturesSession(max_workers=5)
|
||||||
httpsession.trust_env=False # turns of needless .netrc check for creds
|
httpsession.trust_env=False # turns of needless .netrc check for creds
|
||||||
#a = requests.adapters.HTTPAdapter(max_retries=2)
|
# a = requests.adapters.HTTPAdapter(max_retries=2)
|
||||||
#httpsession.mount('http://', a)
|
# httpsession.mount('http://', a)
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(sys.argv[0])
|
logger = logging.getLogger(sys.argv[0])
|
||||||
|
@ -36,13 +32,13 @@ formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(messag
|
||||||
|
|
||||||
|
|
||||||
def postLogs(logcache):
|
def postLogs(logcache):
|
||||||
#post logs asynchronously with requests workers and check on the results
|
# post logs asynchronously with requests workers and check on the results
|
||||||
#expects a queue object from the multiprocessing library
|
# expects a queue object from the multiprocessing library
|
||||||
posts=[]
|
posts=[]
|
||||||
try:
|
try:
|
||||||
while not logcache.empty():
|
while not logcache.empty():
|
||||||
postdata=logcache.get_nowait()
|
postdata=logcache.get_nowait()
|
||||||
if len(postdata)>0:
|
if len(postdata) > 0:
|
||||||
url=options.url
|
url=options.url
|
||||||
a=httpsession.get_adapter(url)
|
a=httpsession.get_adapter(url)
|
||||||
a.max_retries=3
|
a.max_retries=3
|
||||||
|
@ -52,15 +48,15 @@ def postLogs(logcache):
|
||||||
pass
|
pass
|
||||||
for p,postdata,url in posts:
|
for p,postdata,url in posts:
|
||||||
try:
|
try:
|
||||||
if p.result().status_code >=500:
|
if p.result().status_code >= 500:
|
||||||
logger.error("exception posting to %s %r [will retry]\n"%(url,p.result().status_code))
|
logger.error("exception posting to %s %r [will retry]\n" % (url, p.result().status_code))
|
||||||
#try again later when the next message in forces other attempts at posting.
|
# try again later when the next message in forces other attempts at posting.
|
||||||
logcache.put(postdata)
|
logcache.put(postdata)
|
||||||
except ClosedPoolError as e:
|
except ClosedPoolError as e:
|
||||||
#logger.fatal("Closed Pool Error exception posting to %s %r %r [will retry]\n"%(url,e,postdata))
|
# logger.fatal("Closed Pool Error exception posting to %s %r %r [will retry]\n"%(url,e,postdata))
|
||||||
logcache.put(postdata)
|
logcache.put(postdata)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.fatal("exception posting to %s %r %r [will not retry]\n"%(url,e,postdata))
|
logger.fatal("exception posting to %s %r %r [will not retry]\n" % (url, e, postdata))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
@ -71,7 +67,7 @@ if __name__ == '__main__':
|
||||||
sh=logging.StreamHandler(sys.stdout)
|
sh=logging.StreamHandler(sys.stdout)
|
||||||
sh.setFormatter(formatter)
|
sh.setFormatter(formatter)
|
||||||
logger.addHandler(sh)
|
logger.addHandler(sh)
|
||||||
#create a list of logs we can append json to and call for a post when we want.
|
# create a list of logs we can append json to and call for a post when we want.
|
||||||
logcache=Queue()
|
logcache=Queue()
|
||||||
try:
|
try:
|
||||||
for i in range(0,10):
|
for i in range(0,10):
|
||||||
|
@ -98,21 +94,21 @@ if __name__ == '__main__':
|
||||||
postingProcess=Process(target=postLogs,args=(logcache,),name="json2MozdefStressTest")
|
postingProcess=Process(target=postLogs,args=(logcache,),name="json2MozdefStressTest")
|
||||||
postingProcess.start()
|
postingProcess.start()
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno==35: # resource temporarily unavailable.
|
if e.errno == 35: # resource temporarily unavailable.
|
||||||
print(e)
|
print(e)
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
logger.error('%r'%e)
|
logger.error('%r' % e)
|
||||||
|
|
||||||
while not logcache.empty():
|
while not logcache.empty():
|
||||||
try:
|
try:
|
||||||
postingProcess=Process(target=postLogs,args=(logcache,),name="json2MozdefStressTest")
|
postingProcess=Process(target=postLogs,args=(logcache,),name="json2MozdefStressTest")
|
||||||
postingProcess.start()
|
postingProcess.start()
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno==35: # resource temporarily unavailable.
|
if e.errno == 35: # resource temporarily unavailable.
|
||||||
print(e)
|
print(e)
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
logger.error('%r'%e)
|
logger.error('%r' % e)
|
||||||
except KeyboardInterrupt as e:
|
except KeyboardInterrupt as e:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
|
@ -10,7 +10,6 @@
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import random
|
import random
|
||||||
from kitnirc.client import Channel
|
|
||||||
from kitnirc.modular import Module
|
from kitnirc.modular import Module
|
||||||
from kitnirc.user import User
|
from kitnirc.user import User
|
||||||
|
|
||||||
|
|
|
@ -7,9 +7,7 @@
|
||||||
# Copyright (c) 2014 Mozilla Corporation
|
# Copyright (c) 2014 Mozilla Corporation
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from kitnirc.client import Channel
|
|
||||||
from kitnirc.modular import Module
|
from kitnirc.modular import Module
|
||||||
from kitnirc.user import User
|
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
|
@ -41,7 +39,7 @@ class Zilla(Module):
|
||||||
self.interval = 9999999
|
self.interval = 9999999
|
||||||
self.channel = '#test'
|
self.channel = '#test'
|
||||||
|
|
||||||
self._bugzilla = bugzilla.Bugzilla(url=self.url+'rest/', api_key=self.api_key)
|
self._bugzilla = bugzilla.Bugzilla(url=self.url + 'rest/', api_key=self.api_key)
|
||||||
|
|
||||||
_log.info("zilla module initialized for {}, pooling every {} seconds.".format(self.url, self.interval))
|
_log.info("zilla module initialized for {}, pooling every {} seconds.".format(self.url, self.interval))
|
||||||
|
|
||||||
|
@ -49,8 +47,8 @@ class Zilla(Module):
|
||||||
last = 0
|
last = 0
|
||||||
while not self._stop:
|
while not self._stop:
|
||||||
now = time.time()
|
now = time.time()
|
||||||
if ((now-last) > self.interval):
|
if ((now - last) > self.interval):
|
||||||
#Add all the actions you want to do with bugzilla here ;)
|
# Add all the actions you want to do with bugzilla here ;)
|
||||||
self.bugzilla_search()
|
self.bugzilla_search()
|
||||||
last = now
|
last = now
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|
|
@ -8,25 +8,18 @@
|
||||||
import json
|
import json
|
||||||
import kitnirc.client
|
import kitnirc.client
|
||||||
import kitnirc.modular
|
import kitnirc.modular
|
||||||
import kombu
|
|
||||||
import logging
|
import logging
|
||||||
import netaddr
|
import netaddr
|
||||||
import os
|
import os
|
||||||
import pytz
|
|
||||||
import random
|
import random
|
||||||
import select
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import threading
|
|
||||||
from configlib import getConfig, OptionParser
|
from configlib import getConfig, OptionParser
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from dateutil.parser import parse
|
|
||||||
from kombu import Connection, Queue, Exchange
|
from kombu import Connection, Queue, Exchange
|
||||||
from kombu.mixins import ConsumerMixin
|
from kombu.mixins import ConsumerMixin
|
||||||
from ipwhois import IPWhois
|
from ipwhois import IPWhois
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
from mozdef_util.utilities.toUTC import toUTC
|
||||||
from mozdef_util.geo_ip import GeoIP
|
from mozdef_util.geo_ip import GeoIP
|
||||||
|
|
||||||
|
@ -134,7 +127,7 @@ def ipLocation(ip):
|
||||||
if geoDict['country_code'] in ('US'):
|
if geoDict['country_code'] in ('US'):
|
||||||
if geoDict['metro_code']:
|
if geoDict['metro_code']:
|
||||||
location = location + '/{0}'.format(geoDict['metro_code'])
|
location = location + '/{0}'.format(geoDict['metro_code'])
|
||||||
except Exception as e:
|
except Exception:
|
||||||
location = ""
|
location = ""
|
||||||
return location
|
return location
|
||||||
|
|
||||||
|
@ -151,10 +144,11 @@ def formatAlert(jsonDictIn):
|
||||||
if 'category' in jsonDictIn.keys():
|
if 'category' in jsonDictIn.keys():
|
||||||
category = jsonDictIn['category']
|
category = jsonDictIn['category']
|
||||||
|
|
||||||
return colorify('{0}: {1} {2}'.format(severity, colors['blue']
|
return colorify('{0}: {1} {2}'.format(
|
||||||
+ category
|
severity,
|
||||||
+ colors['normal'],
|
colors['blue'] + category + colors['normal'],
|
||||||
summary.encode('ascii', 'replace')))
|
summary.encode('ascii', 'replace')
|
||||||
|
))
|
||||||
|
|
||||||
|
|
||||||
class mozdefBot():
|
class mozdefBot():
|
||||||
|
@ -197,15 +191,6 @@ class mozdefBot():
|
||||||
# start the mq consumer
|
# start the mq consumer
|
||||||
consumeAlerts(self)
|
consumeAlerts(self)
|
||||||
|
|
||||||
@self.client.handle('LINE')
|
|
||||||
def line_handler(client, *params):
|
|
||||||
try:
|
|
||||||
self.root_logger.debug('linegot:' + line)
|
|
||||||
except AttributeError as e:
|
|
||||||
# catch error in kitnrc : chan.remove(actor) where channel
|
|
||||||
# object has no attribute remove
|
|
||||||
pass
|
|
||||||
|
|
||||||
@self.client.handle('PRIVMSG')
|
@self.client.handle('PRIVMSG')
|
||||||
def priv_handler(client, actor, recipient, message):
|
def priv_handler(client, actor, recipient, message):
|
||||||
self.root_logger.debug(
|
self.root_logger.debug(
|
||||||
|
|
|
@ -20,8 +20,6 @@ from kombu.mixins import ConsumerMixin
|
||||||
|
|
||||||
from slackclient import SlackClient
|
from slackclient import SlackClient
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
from mozdef_util.utilities.toUTC import toUTC
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -50,8 +50,8 @@ def main():
|
||||||
aws_access_key_id=options.aws_access_key_id,
|
aws_access_key_id=options.aws_access_key_id,
|
||||||
aws_secret_access_key=options.aws_secret_access_key
|
aws_secret_access_key=options.aws_secret_access_key
|
||||||
)
|
)
|
||||||
idate = date.strftime(datetime.utcnow()-timedelta(days=1),'%Y%m%d')
|
idate = date.strftime(datetime.utcnow() - timedelta(days=1), '%Y%m%d')
|
||||||
bucketdate = date.strftime(datetime.utcnow()-timedelta(days=1),'%Y-%m')
|
bucketdate = date.strftime(datetime.utcnow() - timedelta(days=1), '%Y-%m')
|
||||||
hostname = socket.gethostname()
|
hostname = socket.gethostname()
|
||||||
|
|
||||||
# Create or update snapshot configuration
|
# Create or update snapshot configuration
|
||||||
|
@ -120,7 +120,7 @@ echo "DONE!"
|
||||||
except boto.exception.NoAuthHandlerFound:
|
except boto.exception.NoAuthHandlerFound:
|
||||||
logger.error("No auth handler found, check your credentials")
|
logger.error("No auth handler found, check your credentials")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Unhandled exception, terminating: %r"%e)
|
logger.error("Unhandled exception, terminating: %r" % e)
|
||||||
|
|
||||||
|
|
||||||
def initConfig():
|
def initConfig():
|
||||||
|
|
|
@ -19,8 +19,6 @@ from pymongo import MongoClient
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
from kombu import Connection, Exchange
|
from kombu import Connection, Exchange
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
from mozdef_util.utilities.toUTC import toUTC
|
||||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||||
from mozdef_util.query_models import SearchQuery, PhraseMatch
|
from mozdef_util.query_models import SearchQuery, PhraseMatch
|
||||||
|
@ -260,10 +258,10 @@ def searchMongoAlerts(mozdefdb):
|
||||||
# summarize the alert categories
|
# summarize the alert categories
|
||||||
# returns list of tuples: [(u'bruteforce', 8)]
|
# returns list of tuples: [(u'bruteforce', 8)]
|
||||||
categoryCounts= mostCommon(matchingalerts,'category')
|
categoryCounts= mostCommon(matchingalerts,'category')
|
||||||
#are the alerts all the same category?
|
# are the alerts all the same category?
|
||||||
|
|
||||||
if len(categoryCounts) == 1:
|
if len(categoryCounts) == 1:
|
||||||
#is the alert category mapped to an attacker category?
|
# is the alert category mapped to an attacker category?
|
||||||
for category in options.categorymapping:
|
for category in options.categorymapping:
|
||||||
if category.keys()[0] == categoryCounts[0][0]:
|
if category.keys()[0] == categoryCounts[0][0]:
|
||||||
attacker['category'] = category[category.keys()[0]]
|
attacker['category'] = category[category.keys()[0]]
|
||||||
|
|
|
@ -7,7 +7,6 @@
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
@ -15,8 +14,6 @@ from configlib import getConfig, OptionParser
|
||||||
from logging.handlers import SysLogHandler
|
from logging.handlers import SysLogHandler
|
||||||
from hashlib import md5
|
from hashlib import md5
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
from mozdef_util.utilities.toUTC import toUTC
|
||||||
from mozdef_util.elasticsearch_client import ElasticsearchClient, ElasticsearchBadServer
|
from mozdef_util.elasticsearch_client import ElasticsearchClient, ElasticsearchBadServer
|
||||||
from mozdef_util.query_models import SearchQuery, TermMatch, PhraseMatch
|
from mozdef_util.query_models import SearchQuery, TermMatch, PhraseMatch
|
||||||
|
@ -63,7 +60,7 @@ def readOUIFile(ouifilename):
|
||||||
for i in ouifile.readlines()[0::]:
|
for i in ouifile.readlines()[0::]:
|
||||||
i=i.strip()
|
i=i.strip()
|
||||||
if '(hex)' in i:
|
if '(hex)' in i:
|
||||||
#print(i)
|
# print(i)
|
||||||
fields=i.split('\t')
|
fields=i.split('\t')
|
||||||
macprefix=fields[0][0:8].replace('-',':').lower()
|
macprefix=fields[0][0:8].replace('-',':').lower()
|
||||||
entity=fields[2]
|
entity=fields[2]
|
||||||
|
|
|
@ -17,7 +17,6 @@ from configlib import getConfig, OptionParser
|
||||||
from logging.handlers import SysLogHandler
|
from logging.handlers import SysLogHandler
|
||||||
from pymongo import MongoClient
|
from pymongo import MongoClient
|
||||||
|
|
||||||
import os
|
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
from mozdef_util.utilities.toUTC import toUTC
|
||||||
|
|
||||||
|
|
||||||
|
@ -62,7 +61,7 @@ def parse_fqdn_whitelist(fqdn_whitelist_location):
|
||||||
fqdns = []
|
fqdns = []
|
||||||
with open(fqdn_whitelist_location, "r") as text_file:
|
with open(fqdn_whitelist_location, "r") as text_file:
|
||||||
for line in text_file:
|
for line in text_file:
|
||||||
line=line.strip().strip("'").strip('"')
|
line = line.strip().strip("'").strip('"')
|
||||||
if isFQDN(line):
|
if isFQDN(line):
|
||||||
fqdns.append(line)
|
fqdns.append(line)
|
||||||
return fqdns
|
return fqdns
|
||||||
|
@ -77,10 +76,10 @@ def main():
|
||||||
mozdefdb = client.meteor
|
mozdefdb = client.meteor
|
||||||
fqdnblocklist = mozdefdb['fqdnblocklist']
|
fqdnblocklist = mozdefdb['fqdnblocklist']
|
||||||
# ensure indexes
|
# ensure indexes
|
||||||
fqdnblocklist.create_index([('dateExpiring',-1)])
|
fqdnblocklist.create_index([('dateExpiring', -1)])
|
||||||
|
|
||||||
# delete any that expired
|
# delete any that expired
|
||||||
fqdnblocklist.delete_many({'dateExpiring': {"$lte": datetime.utcnow()-timedelta(days=options.expireage)}})
|
fqdnblocklist.delete_many({'dateExpiring': {"$lte": datetime.utcnow() - timedelta(days=options.expireage)}})
|
||||||
|
|
||||||
# Lastly, export the combined blocklist
|
# Lastly, export the combined blocklist
|
||||||
fqdnCursor = mozdefdb['fqdnblocklist'].aggregate([
|
fqdnCursor = mozdefdb['fqdnblocklist'].aggregate([
|
||||||
|
@ -95,7 +94,7 @@ def main():
|
||||||
{"$project": {"address": 1}},
|
{"$project": {"address": 1}},
|
||||||
{"$limit": options.fqdnlimit}
|
{"$limit": options.fqdnlimit}
|
||||||
])
|
])
|
||||||
FQDNList=[]
|
FQDNList = []
|
||||||
for fqdn in fqdnCursor:
|
for fqdn in fqdnCursor:
|
||||||
if fqdn not in options.fqdnwhitelist:
|
if fqdn not in options.fqdnwhitelist:
|
||||||
FQDNList.append(fqdn['address'])
|
FQDNList.append(fqdn['address'])
|
||||||
|
@ -105,7 +104,7 @@ def main():
|
||||||
outputfile.write("{0}\n".format(fqdn))
|
outputfile.write("{0}\n".format(fqdn))
|
||||||
outputfile.close()
|
outputfile.close()
|
||||||
# to s3?
|
# to s3?
|
||||||
if len(options.aws_bucket_name)>0:
|
if len(options.aws_bucket_name) > 0:
|
||||||
s3_upload_file(options.outputfile, options.aws_bucket_name, options.aws_document_key_name)
|
s3_upload_file(options.outputfile, options.aws_bucket_name, options.aws_document_key_name)
|
||||||
|
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
|
@ -134,26 +133,26 @@ def initConfig():
|
||||||
options.outputfile = getConfig('outputfile', 'fqdnblocklist.txt', options.configfile)
|
options.outputfile = getConfig('outputfile', 'fqdnblocklist.txt', options.configfile)
|
||||||
|
|
||||||
# Days after expiration that we purge an fqdnblocklist entry (from the ui, they don't end up in the export after expiring)
|
# Days after expiration that we purge an fqdnblocklist entry (from the ui, they don't end up in the export after expiring)
|
||||||
options.expireage = getConfig('expireage',1,options.configfile)
|
options.expireage = getConfig('expireage', 1, options.configfile)
|
||||||
|
|
||||||
# Max FQDNs to emit
|
# Max FQDNs to emit
|
||||||
options.fqdnlimit = getConfig('fqdnlimit', 1000, options.configfile)
|
options.fqdnlimit = getConfig('fqdnlimit', 1000, options.configfile)
|
||||||
|
|
||||||
# AWS creds
|
# AWS creds
|
||||||
options.aws_access_key_id=getConfig('aws_access_key_id','',options.configfile) # aws credentials to use to connect to mozilla_infosec_blocklist
|
options.aws_access_key_id = getConfig('aws_access_key_id', '', options.configfile) # aws credentials to use to connect to mozilla_infosec_blocklist
|
||||||
options.aws_secret_access_key=getConfig('aws_secret_access_key','',options.configfile)
|
options.aws_secret_access_key = getConfig('aws_secret_access_key', '', options.configfile)
|
||||||
options.aws_bucket_name=getConfig('aws_bucket_name','',options.configfile)
|
options.aws_bucket_name = getConfig('aws_bucket_name', '', options.configfile)
|
||||||
options.aws_document_key_name=getConfig('aws_document_key_name','',options.configfile)
|
options.aws_document_key_name = getConfig('aws_document_key_name', '', options.configfile)
|
||||||
|
|
||||||
|
|
||||||
def s3_upload_file(file_path, bucket_name, key_name):
|
def s3_upload_file(file_path, bucket_name, key_name):
|
||||||
"""
|
"""
|
||||||
Upload a file to the given s3 bucket and return a template url.
|
Upload a file to the given s3 bucket and return a template url.
|
||||||
"""
|
"""
|
||||||
conn = boto.connect_s3(aws_access_key_id=options.aws_access_key_id,aws_secret_access_key=options.aws_secret_access_key)
|
conn = boto.connect_s3(aws_access_key_id=options.aws_access_key_id, aws_secret_access_key=options.aws_secret_access_key)
|
||||||
try:
|
try:
|
||||||
bucket = conn.get_bucket(bucket_name, validate=False)
|
bucket = conn.get_bucket(bucket_name, validate=False)
|
||||||
except boto.exception.S3ResponseError as e:
|
except boto.exception.S3ResponseError:
|
||||||
conn.create_bucket(bucket_name)
|
conn.create_bucket(bucket_name)
|
||||||
bucket = conn.get_bucket(bucket_name, validate=False)
|
bucket = conn.get_bucket(bucket_name, validate=False)
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,6 @@ from configlib import getConfig, OptionParser
|
||||||
from logging.handlers import SysLogHandler
|
from logging.handlers import SysLogHandler
|
||||||
from pymongo import MongoClient
|
from pymongo import MongoClient
|
||||||
|
|
||||||
import os
|
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
from mozdef_util.utilities.toUTC import toUTC
|
||||||
|
|
||||||
|
|
||||||
|
@ -52,7 +51,7 @@ def isIPv4(ip):
|
||||||
# netaddr on it's own considers 1 and 0 to be valid_ipv4
|
# netaddr on it's own considers 1 and 0 to be valid_ipv4
|
||||||
# so a little sanity check prior to netaddr.
|
# so a little sanity check prior to netaddr.
|
||||||
# Use IPNetwork instead of valid_ipv4 to allow CIDR
|
# Use IPNetwork instead of valid_ipv4 to allow CIDR
|
||||||
if '.' in ip and len(ip.split('.'))==4:
|
if '.' in ip and len(ip.split('.')) == 4:
|
||||||
# some ips are quoted
|
# some ips are quoted
|
||||||
netaddr.IPNetwork(ip.strip("'").strip('"'))
|
netaddr.IPNetwork(ip.strip("'").strip('"'))
|
||||||
return True
|
return True
|
||||||
|
@ -89,7 +88,7 @@ def aggregateAttackerIPs(attackers):
|
||||||
whitelisted = False
|
whitelisted = False
|
||||||
logger.debug('working {0}'.format(i))
|
logger.debug('working {0}'.format(i))
|
||||||
ip = i['_id']['ipv4address']
|
ip = i['_id']['ipv4address']
|
||||||
ipcidr=netaddr.IPNetwork(ip)
|
ipcidr = netaddr.IPNetwork(ip)
|
||||||
if not ipcidr.ip.is_loopback() and not ipcidr.ip.is_private() and not ipcidr.ip.is_reserved():
|
if not ipcidr.ip.is_loopback() and not ipcidr.ip.is_private() and not ipcidr.ip.is_reserved():
|
||||||
for whitelist_range in options.ipwhitelist:
|
for whitelist_range in options.ipwhitelist:
|
||||||
whitelist_network = netaddr.IPNetwork(whitelist_range)
|
whitelist_network = netaddr.IPNetwork(whitelist_range)
|
||||||
|
@ -97,8 +96,8 @@ def aggregateAttackerIPs(attackers):
|
||||||
logger.debug(str(ipcidr) + " is whitelisted as part of " + str(whitelist_network))
|
logger.debug(str(ipcidr) + " is whitelisted as part of " + str(whitelist_network))
|
||||||
whitelisted = True
|
whitelisted = True
|
||||||
|
|
||||||
#strip any host bits 192.168.10/24 -> 192.168.0/24
|
# strip any host bits 192.168.10/24 -> 192.168.0/24
|
||||||
ipcidrnet=str(ipcidr.cidr)
|
ipcidrnet = str(ipcidr.cidr)
|
||||||
if ipcidrnet not in iplist and not whitelisted:
|
if ipcidrnet not in iplist and not whitelisted:
|
||||||
iplist.append(ipcidrnet)
|
iplist.append(ipcidrnet)
|
||||||
else:
|
else:
|
||||||
|
@ -110,7 +109,7 @@ def parse_network_whitelist(network_whitelist_location):
|
||||||
networks = []
|
networks = []
|
||||||
with open(network_whitelist_location, "r") as text_file:
|
with open(network_whitelist_location, "r") as text_file:
|
||||||
for line in text_file:
|
for line in text_file:
|
||||||
line=line.strip().strip("'").strip('"')
|
line = line.strip().strip("'").strip('"')
|
||||||
if isIPv4(line) or isIPv6(line):
|
if isIPv4(line) or isIPv6(line):
|
||||||
networks.append(line)
|
networks.append(line)
|
||||||
return networks
|
return networks
|
||||||
|
@ -124,29 +123,29 @@ def main():
|
||||||
client = MongoClient(options.mongohost, options.mongoport)
|
client = MongoClient(options.mongohost, options.mongoport)
|
||||||
mozdefdb = client.meteor
|
mozdefdb = client.meteor
|
||||||
ipblocklist = mozdefdb['ipblocklist']
|
ipblocklist = mozdefdb['ipblocklist']
|
||||||
attackers=mozdefdb['attackers']
|
attackers = mozdefdb['attackers']
|
||||||
# ensure indexes
|
# ensure indexes
|
||||||
ipblocklist.create_index([('dateExpiring',-1)])
|
ipblocklist.create_index([('dateExpiring', -1)])
|
||||||
attackers.create_index([('lastseentimestamp',-1)])
|
attackers.create_index([('lastseentimestamp', -1)])
|
||||||
attackers.create_index([('category',1)])
|
attackers.create_index([('category', 1)])
|
||||||
|
|
||||||
# First, gather IP addresses from recent attackers and add to the block list
|
# First, gather IP addresses from recent attackers and add to the block list
|
||||||
attackerIPList = aggregateAttackerIPs(attackers)
|
attackerIPList = aggregateAttackerIPs(attackers)
|
||||||
|
|
||||||
# add attacker IPs to the blocklist
|
# add attacker IPs to the blocklist
|
||||||
# first delete ones we've created from an attacker
|
# first delete ones we've created from an attacker
|
||||||
ipblocklist.delete_many({'creator': 'mozdef','reference':'attacker'})
|
ipblocklist.delete_many({'creator': 'mozdef', 'reference': 'attacker'})
|
||||||
|
|
||||||
# delete any that expired
|
# delete any that expired
|
||||||
ipblocklist.delete_many({'dateExpiring': {"$lte": datetime.utcnow()-timedelta(days=options.expireage)}})
|
ipblocklist.delete_many({'dateExpiring': {"$lte": datetime.utcnow() - timedelta(days=options.expireage)}})
|
||||||
|
|
||||||
# add the aggregations we've found recently
|
# add the aggregations we've found recently
|
||||||
for ip in attackerIPList:
|
for ip in attackerIPList:
|
||||||
ipblocklist.insert_one(
|
ipblocklist.insert_one(
|
||||||
{'_id': genMeteorID(),
|
{'_id': genMeteorID(),
|
||||||
'address':ip,
|
'address': ip,
|
||||||
'reference': 'attacker',
|
'reference': 'attacker',
|
||||||
'creator':'mozdef',
|
'creator': 'mozdef',
|
||||||
'dateAdded': datetime.utcnow()})
|
'dateAdded': datetime.utcnow()})
|
||||||
|
|
||||||
# Lastly, export the combined blocklist
|
# Lastly, export the combined blocklist
|
||||||
|
@ -162,7 +161,7 @@ def main():
|
||||||
{"$project": {"address": 1}},
|
{"$project": {"address": 1}},
|
||||||
{"$limit": options.iplimit}
|
{"$limit": options.iplimit}
|
||||||
])
|
])
|
||||||
IPList=[]
|
IPList = []
|
||||||
for ip in ipCursor:
|
for ip in ipCursor:
|
||||||
IPList.append(ip['address'])
|
IPList.append(ip['address'])
|
||||||
# to text
|
# to text
|
||||||
|
@ -171,7 +170,7 @@ def main():
|
||||||
outputfile.write("{0}\n".format(ip))
|
outputfile.write("{0}\n".format(ip))
|
||||||
outputfile.close()
|
outputfile.close()
|
||||||
# to s3?
|
# to s3?
|
||||||
if len(options.aws_bucket_name)>0:
|
if len(options.aws_bucket_name) > 0:
|
||||||
s3_upload_file(options.outputfile, options.aws_bucket_name, options.aws_document_key_name)
|
s3_upload_file(options.outputfile, options.aws_bucket_name, options.aws_document_key_name)
|
||||||
|
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
|
@ -203,29 +202,29 @@ def initConfig():
|
||||||
options.category = getConfig('category', 'bruteforcer', options.configfile)
|
options.category = getConfig('category', 'bruteforcer', options.configfile)
|
||||||
|
|
||||||
# Max days to look back for attackers
|
# Max days to look back for attackers
|
||||||
options.attackerage = getConfig('attackerage',90,options.configfile)
|
options.attackerage = getConfig('attackerage', 90, options.configfile)
|
||||||
|
|
||||||
# Days after expiration that we purge an ipblocklist entry (from the ui, they don't end up in the export after expiring)
|
# Days after expiration that we purge an ipblocklist entry (from the ui, they don't end up in the export after expiring)
|
||||||
options.expireage = getConfig('expireage',1,options.configfile)
|
options.expireage = getConfig('expireage', 1, options.configfile)
|
||||||
|
|
||||||
# Max IPs to emit
|
# Max IPs to emit
|
||||||
options.iplimit = getConfig('iplimit', 1000, options.configfile)
|
options.iplimit = getConfig('iplimit', 1000, options.configfile)
|
||||||
|
|
||||||
# AWS creds
|
# AWS creds
|
||||||
options.aws_access_key_id=getConfig('aws_access_key_id','',options.configfile) # aws credentials to use to connect to mozilla_infosec_blocklist
|
options.aws_access_key_id = getConfig('aws_access_key_id', '', options.configfile) # aws credentials to use to connect to mozilla_infosec_blocklist
|
||||||
options.aws_secret_access_key=getConfig('aws_secret_access_key','',options.configfile)
|
options.aws_secret_access_key = getConfig('aws_secret_access_key', '', options.configfile)
|
||||||
options.aws_bucket_name=getConfig('aws_bucket_name','',options.configfile)
|
options.aws_bucket_name = getConfig('aws_bucket_name', '', options.configfile)
|
||||||
options.aws_document_key_name=getConfig('aws_document_key_name','',options.configfile)
|
options.aws_document_key_name = getConfig('aws_document_key_name', '', options.configfile)
|
||||||
|
|
||||||
|
|
||||||
def s3_upload_file(file_path, bucket_name, key_name):
|
def s3_upload_file(file_path, bucket_name, key_name):
|
||||||
"""
|
"""
|
||||||
Upload a file to the given s3 bucket and return a template url.
|
Upload a file to the given s3 bucket and return a template url.
|
||||||
"""
|
"""
|
||||||
conn = boto.connect_s3(aws_access_key_id=options.aws_access_key_id,aws_secret_access_key=options.aws_secret_access_key)
|
conn = boto.connect_s3(aws_access_key_id=options.aws_access_key_id, aws_secret_access_key=options.aws_secret_access_key)
|
||||||
try:
|
try:
|
||||||
bucket = conn.get_bucket(bucket_name, validate=False)
|
bucket = conn.get_bucket(bucket_name, validate=False)
|
||||||
except boto.exception.S3ResponseError as e:
|
except boto.exception.S3ResponseError:
|
||||||
conn.create_bucket(bucket_name)
|
conn.create_bucket(bucket_name)
|
||||||
bucket = conn.get_bucket(bucket_name, validate=False)
|
bucket = conn.get_bucket(bucket_name, validate=False)
|
||||||
|
|
||||||
|
|
|
@ -5,13 +5,12 @@
|
||||||
#
|
#
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
|
||||||
from datetime import datetime, timedelta, tzinfo
|
from datetime import datetime, timedelta, tzinfo
|
||||||
try:
|
try:
|
||||||
from datetime import timezone
|
from datetime import timezone
|
||||||
utc = timezone.utc
|
utc = timezone.utc
|
||||||
except ImportError:
|
except ImportError:
|
||||||
#Hi there python2 user
|
# Hi there python2 user
|
||||||
class UTC(tzinfo):
|
class UTC(tzinfo):
|
||||||
def utcoffset(self, dt):
|
def utcoffset(self, dt):
|
||||||
return timedelta(0)
|
return timedelta(0)
|
||||||
|
@ -36,7 +35,7 @@ def normalize(details):
|
||||||
normalized = {}
|
normalized = {}
|
||||||
|
|
||||||
for f in details:
|
for f in details:
|
||||||
if f in ("ip", "ip_address","client_ip"):
|
if f in ("ip", "ip_address", "client_ip"):
|
||||||
normalized["sourceipaddress"] = details[f]
|
normalized["sourceipaddress"] = details[f]
|
||||||
continue
|
continue
|
||||||
if f == "result":
|
if f == "result":
|
||||||
|
@ -88,7 +87,7 @@ def process_events(mozmsg, duo_events, etype, state):
|
||||||
elif etype == 'telephony':
|
elif etype == 'telephony':
|
||||||
mozmsg.summary = e['context']
|
mozmsg.summary = e['context']
|
||||||
elif etype == 'authentication':
|
elif etype == 'authentication':
|
||||||
mozmsg.summary = e['eventtype']+' '+e['result']+' for '+e['username']
|
mozmsg.summary = e['eventtype'] + ' ' + e['result'] + ' for ' + e['username']
|
||||||
|
|
||||||
mozmsg.send()
|
mozmsg.send()
|
||||||
|
|
||||||
|
@ -110,7 +109,7 @@ def main():
|
||||||
|
|
||||||
duo = duo_client.Admin(ikey=options.IKEY, skey=options.SKEY, host=options.URL)
|
duo = duo_client.Admin(ikey=options.IKEY, skey=options.SKEY, host=options.URL)
|
||||||
mozmsg = mozdef.MozDefEvent(options.MOZDEF_URL)
|
mozmsg = mozdef.MozDefEvent(options.MOZDEF_URL)
|
||||||
mozmsg.tags=['duosecurity']
|
mozmsg.tags = ['duosecurity']
|
||||||
if options.update_tags != '':
|
if options.update_tags != '':
|
||||||
mozmsg.tags.append(options.update_tags)
|
mozmsg.tags.append(options.update_tags)
|
||||||
mozmsg.set_category('authentication')
|
mozmsg.set_category('authentication')
|
||||||
|
@ -121,9 +120,9 @@ def main():
|
||||||
|
|
||||||
# This will process events for all 3 log types and send them to MozDef. the state stores the last position in the
|
# This will process events for all 3 log types and send them to MozDef. the state stores the last position in the
|
||||||
# log when this script was last called.
|
# log when this script was last called.
|
||||||
state = process_events(mozmsg, duo.get_administrator_log(mintime=state['administration']+1), 'administration', state)
|
state = process_events(mozmsg, duo.get_administrator_log(mintime=state['administration'] + 1), 'administration', state)
|
||||||
state = process_events(mozmsg, duo.get_authentication_log(mintime=state['authentication']+1), 'authentication', state)
|
state = process_events(mozmsg, duo.get_authentication_log(mintime=state['authentication'] + 1), 'authentication', state)
|
||||||
state = process_events(mozmsg, duo.get_telephony_log(mintime=state['telephony']+1), 'telephony', state)
|
state = process_events(mozmsg, duo.get_telephony_log(mintime=state['telephony'] + 1), 'telephony', state)
|
||||||
|
|
||||||
pickle.dump(state, open(options.statepath, 'wb'))
|
pickle.dump(state, open(options.statepath, 'wb'))
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,6 @@ import logging
|
||||||
from configlib import getConfig, OptionParser
|
from configlib import getConfig, OptionParser
|
||||||
from datetime import datetime, date, timedelta
|
from datetime import datetime, date, timedelta
|
||||||
|
|
||||||
import os
|
|
||||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||||
from mozdef_util.utilities.logger import logger
|
from mozdef_util.utilities.logger import logger
|
||||||
|
|
||||||
|
|
|
@ -15,10 +15,8 @@ from logging.handlers import SysLogHandler
|
||||||
from time import sleep
|
from time import sleep
|
||||||
import socket
|
import socket
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
from mozdef_util.utilities.toUTC import toUTC
|
||||||
from mozdef_util.elasticsearch_client import ElasticsearchClient, ElasticsearchBadServer
|
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||||
from mozdef_util.query_models import SearchQuery, Aggregation
|
from mozdef_util.query_models import SearchQuery, Aggregation
|
||||||
|
|
||||||
logger = logging.getLogger(sys.argv[0])
|
logger = logging.getLogger(sys.argv[0])
|
||||||
|
|
|
@ -5,21 +5,17 @@
|
||||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
# Copyright (c) 2014 Mozilla Corporation
|
# Copyright (c) 2014 Mozilla Corporation
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
import logging
|
import logging
|
||||||
import requests
|
import requests
|
||||||
import json
|
import json
|
||||||
from configlib import getConfig, OptionParser
|
from configlib import getConfig, OptionParser
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from datetime import timedelta
|
|
||||||
from logging.handlers import SysLogHandler
|
from logging.handlers import SysLogHandler
|
||||||
from httplib2 import Http
|
from httplib2 import Http
|
||||||
from oauth2client.client import SignedJwtAssertionCredentials
|
from oauth2client.client import SignedJwtAssertionCredentials
|
||||||
from apiclient.discovery import build
|
from apiclient.discovery import build
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
from mozdef_util.utilities.toUTC import toUTC
|
||||||
|
|
||||||
logger = logging.getLogger(sys.argv[0])
|
logger = logging.getLogger(sys.argv[0])
|
||||||
|
@ -187,7 +183,7 @@ def main():
|
||||||
state.data['lastrun'] = lastrun
|
state.data['lastrun'] = lastrun
|
||||||
state.write_state_file()
|
state.write_state_file()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Unhandled exception, terminating: %r"%e)
|
logger.error("Unhandled exception, terminating: %r" % e)
|
||||||
|
|
||||||
|
|
||||||
def initConfig():
|
def initConfig():
|
||||||
|
@ -203,10 +199,10 @@ def initConfig():
|
||||||
# for detailed information on delegating a service account for use in gathering google admin sdk reports
|
# for detailed information on delegating a service account for use in gathering google admin sdk reports
|
||||||
#
|
#
|
||||||
|
|
||||||
#google's json credential file exported from the project/admin console
|
# google's json credential file exported from the project/admin console
|
||||||
options.jsoncredentialfile=getConfig('jsoncredentialfile','/path/to/filename.json',options.configfile)
|
options.jsoncredentialfile=getConfig('jsoncredentialfile','/path/to/filename.json',options.configfile)
|
||||||
|
|
||||||
#email of admin to impersonate as a service account
|
# email of admin to impersonate as a service account
|
||||||
options.impersonate = getConfig('impersonate', 'someone@yourcompany.com', options.configfile)
|
options.impersonate = getConfig('impersonate', 'someone@yourcompany.com', options.configfile)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -16,8 +16,6 @@ from requests.auth import HTTPBasicAuth
|
||||||
from configlib import getConfig, OptionParser
|
from configlib import getConfig, OptionParser
|
||||||
from logging.handlers import SysLogHandler
|
from logging.handlers import SysLogHandler
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
from mozdef_util.utilities.toUTC import toUTC
|
||||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||||
|
|
||||||
|
@ -71,7 +69,7 @@ def main():
|
||||||
logger.debug('Creating %s index' % index)
|
logger.debug('Creating %s index' % index)
|
||||||
es.create_index(index, default_mapping_contents)
|
es.create_index(index, default_mapping_contents)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Unhandled exception, terminating: %r"%e)
|
logger.error("Unhandled exception, terminating: %r" % e)
|
||||||
|
|
||||||
auth = HTTPBasicAuth(options.mquser, options.mqpassword)
|
auth = HTTPBasicAuth(options.mquser, options.mqpassword)
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,6 @@ from configlib import getConfig, OptionParser
|
||||||
from logging.handlers import SysLogHandler
|
from logging.handlers import SysLogHandler
|
||||||
from pymongo import MongoClient
|
from pymongo import MongoClient
|
||||||
|
|
||||||
import os
|
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
from mozdef_util.utilities.toUTC import toUTC
|
||||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||||
from mozdef_util.query_models import SearchQuery, TermMatch
|
from mozdef_util.query_models import SearchQuery, TermMatch
|
||||||
|
|
|
@ -5,26 +5,21 @@
|
||||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
# Copyright (c) 2014 Mozilla Corporation
|
# Copyright (c) 2014 Mozilla Corporation
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
from configlib import getConfig,OptionParser
|
from configlib import getConfig, OptionParser
|
||||||
import logging
|
import logging
|
||||||
from logging.handlers import SysLogHandler
|
from logging.handlers import SysLogHandler
|
||||||
import json
|
import json
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from datetime import timedelta
|
|
||||||
from datetime import date
|
|
||||||
import requests
|
import requests
|
||||||
import netaddr
|
import netaddr
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
from mozdef_util.utilities.toUTC import toUTC
|
||||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(sys.argv[0])
|
logger = logging.getLogger(sys.argv[0])
|
||||||
logger.level=logging.INFO
|
logger.level = logging.INFO
|
||||||
formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s %(message)s')
|
formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s %(message)s')
|
||||||
|
|
||||||
|
|
||||||
|
@ -69,20 +64,17 @@ def main():
|
||||||
logger.addHandler(sh)
|
logger.addHandler(sh)
|
||||||
|
|
||||||
logger.debug('started')
|
logger.debug('started')
|
||||||
#logger.debug(options)
|
# logger.debug(options)
|
||||||
try:
|
try:
|
||||||
es = ElasticsearchClient((list('{0}'.format(s) for s in options.esservers)))
|
es = ElasticsearchClient((list('{0}'.format(s) for s in options.esservers)))
|
||||||
s = requests.Session()
|
s = requests.Session()
|
||||||
s.headers.update({'Accept': 'application/json'})
|
s.headers.update({'Accept': 'application/json'})
|
||||||
s.headers.update({'Content-type': 'application/json'})
|
s.headers.update({'Content-type': 'application/json'})
|
||||||
s.headers.update({'Authorization':'SSWS {0}'.format(options.apikey)})
|
s.headers.update({'Authorization': 'SSWS {0}'.format(options.apikey)})
|
||||||
|
|
||||||
#capture the time we start running so next time we catch any events created while we run.
|
# capture the time we start running so next time we catch any events created while we run.
|
||||||
state = State(options.state_file)
|
state = State(options.state_file)
|
||||||
lastrun = toUTC(datetime.now()).isoformat()
|
lastrun = toUTC(datetime.now()).isoformat()
|
||||||
#in case we don't archive files..only look at today and yesterday's files.
|
|
||||||
yesterday=date.strftime(datetime.utcnow()-timedelta(days=1),'%Y/%m/%d')
|
|
||||||
today = date.strftime(datetime.utcnow(),'%Y/%m/%d')
|
|
||||||
|
|
||||||
r = s.get('https://{0}/api/v1/events?startDate={1}&limit={2}'.format(
|
r = s.get('https://{0}/api/v1/events?startDate={1}&limit={2}'.format(
|
||||||
options.oktadomain,
|
options.oktadomain,
|
||||||
|
@ -138,7 +130,7 @@ def main():
|
||||||
else:
|
else:
|
||||||
logger.error('Could not get Okta events HTTP error code {} reason {}'.format(r.status_code, r.reason))
|
logger.error('Could not get Okta events HTTP error code {} reason {}'.format(r.status_code, r.reason))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Unhandled exception, terminating: %r"%e)
|
logger.error("Unhandled exception, terminating: %r" % e)
|
||||||
|
|
||||||
|
|
||||||
def initConfig():
|
def initConfig():
|
||||||
|
|
|
@ -18,14 +18,14 @@ from datetime import date
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from configlib import getConfig, OptionParser
|
from configlib import getConfig, OptionParser
|
||||||
|
|
||||||
import sys
|
from logging.handlers import SysLogHandler
|
||||||
import os
|
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
from mozdef_util.utilities.toUTC import toUTC
|
||||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(sys.argv[0])
|
logger = logging.getLogger(sys.argv[0])
|
||||||
logger.level=logging.WARNING
|
logger.level = logging.WARNING
|
||||||
formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s %(message)s')
|
formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s %(message)s')
|
||||||
|
|
||||||
|
|
||||||
|
@ -47,10 +47,10 @@ def esPruneIndexes():
|
||||||
if pruning != '0':
|
if pruning != '0':
|
||||||
index_to_prune = index
|
index_to_prune = index
|
||||||
if rotation == 'daily':
|
if rotation == 'daily':
|
||||||
idate = date.strftime(toUTC(datetime.now()) - timedelta(days=int(pruning)),'%Y%m%d')
|
idate = date.strftime(toUTC(datetime.now()) - timedelta(days=int(pruning)), '%Y%m%d')
|
||||||
index_to_prune += '-%s' % idate
|
index_to_prune += '-%s' % idate
|
||||||
elif rotation == 'monthly':
|
elif rotation == 'monthly':
|
||||||
idate = date.strftime(datetime.utcnow() - timedelta(days=31*int(pruning)),'%Y%m')
|
idate = date.strftime(datetime.utcnow() - timedelta(days=31 * int(pruning)), '%Y%m')
|
||||||
index_to_prune += '-%s' % idate
|
index_to_prune += '-%s' % idate
|
||||||
|
|
||||||
if index_to_prune in indices:
|
if index_to_prune in indices:
|
||||||
|
@ -62,7 +62,7 @@ def esPruneIndexes():
|
||||||
logger.error("Unhandled exception while deleting %s, terminating: %r" % (index_to_prune, e))
|
logger.error("Unhandled exception while deleting %s, terminating: %r" % (index_to_prune, e))
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Unhandled exception, terminating: %r"%e)
|
logger.error("Unhandled exception, terminating: %r" % e)
|
||||||
|
|
||||||
|
|
||||||
def initConfig():
|
def initConfig():
|
||||||
|
|
|
@ -19,7 +19,6 @@ from datetime import timedelta
|
||||||
from configlib import getConfig, OptionParser
|
from configlib import getConfig, OptionParser
|
||||||
import json
|
import json
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
import os
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
from mozdef_util.utilities.toUTC import toUTC
|
||||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||||
|
|
|
@ -9,7 +9,6 @@
|
||||||
# You only need to run it once, it will setup the templates
|
# You only need to run it once, it will setup the templates
|
||||||
# used as future indexes are created
|
# used as future indexes are created
|
||||||
|
|
||||||
import requests
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
from configlib import getConfig, OptionParser
|
from configlib import getConfig, OptionParser
|
||||||
|
|
|
@ -14,22 +14,14 @@
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import socket
|
|
||||||
import time
|
|
||||||
from configlib import getConfig, OptionParser
|
from configlib import getConfig, OptionParser
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from hashlib import md5
|
from hashlib import md5
|
||||||
import boto.sqs
|
import boto.sqs
|
||||||
from boto.sqs.message import RawMessage
|
|
||||||
import base64
|
|
||||||
import kombu
|
|
||||||
|
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
from mozdef_util.utilities.toUTC import toUTC
|
||||||
from mozdef_util.utilities.to_unicode import toUnicode
|
|
||||||
from mozdef_util.utilities.remove_at import removeAt
|
|
||||||
from mozdef_util.utilities.is_cef import isCEF
|
|
||||||
from mozdef_util.utilities.logger import logger, initLogger
|
from mozdef_util.utilities.logger import logger, initLogger
|
||||||
from mozdef_util.elasticsearch_client import ElasticsearchClient, ElasticsearchBadServer, ElasticsearchInvalidIndex, ElasticsearchException
|
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||||
|
|
||||||
|
|
||||||
def getDocID(sqsregionidentifier):
|
def getDocID(sqsregionidentifier):
|
||||||
|
|
|
@ -14,8 +14,6 @@ from configlib import getConfig, OptionParser
|
||||||
from logging.handlers import SysLogHandler
|
from logging.handlers import SysLogHandler
|
||||||
from pymongo import MongoClient
|
from pymongo import MongoClient
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
from mozdef_util.utilities.toUTC import toUTC
|
||||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||||
from mozdef_util.query_models import SearchQuery, TermMatch
|
from mozdef_util.query_models import SearchQuery, TermMatch
|
||||||
|
|
|
@ -6,7 +6,6 @@
|
||||||
# Copyright (c) 2017 Mozilla Corporation
|
# Copyright (c) 2017 Mozilla Corporation
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
|
||||||
from configlib import getConfig, OptionParser
|
from configlib import getConfig, OptionParser
|
||||||
|
|
||||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||||
|
|
|
@ -8,21 +8,13 @@
|
||||||
import copy
|
import copy
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import re
|
|
||||||
import json
|
import json
|
||||||
import csv
|
|
||||||
import string
|
|
||||||
import ConfigParser
|
import ConfigParser
|
||||||
import tempfile
|
|
||||||
import logging
|
|
||||||
import socket
|
import socket
|
||||||
import hashlib
|
|
||||||
import MySQLdb
|
import MySQLdb
|
||||||
from requests import Session
|
from requests import Session
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from os import stat
|
|
||||||
from os.path import exists, getsize
|
|
||||||
|
|
||||||
|
|
||||||
class MozDefError(Exception):
|
class MozDefError(Exception):
|
||||||
|
@ -108,7 +100,7 @@ class MozDefEvent():
|
||||||
raise MozDefError('Summary is a required field')
|
raise MozDefError('Summary is a required field')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
r = self.httpsession.post(self.url, json.dumps(log_msg, encoding='utf-8'), verify=self.verify_certificate)
|
self.httpsession.post(self.url, json.dumps(log_msg, encoding='utf-8'), verify=self.verify_certificate)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if not self.fire_and_forget_mode:
|
if not self.fire_and_forget_mode:
|
||||||
|
@ -123,7 +115,7 @@ def main():
|
||||||
mdEvent.debug = True
|
mdEvent.debug = True
|
||||||
mdEvent.fire_and_forget_mode = False
|
mdEvent.fire_and_forget_mode = False
|
||||||
|
|
||||||
#connect to mysql
|
# connect to mysql
|
||||||
db=MySQLdb.connect(host=options.hostname, user=options.username,passwd=options.password,db=options.database)
|
db=MySQLdb.connect(host=options.hostname, user=options.username,passwd=options.password,db=options.database)
|
||||||
c=db.cursor(MySQLdb.cursors.DictCursor)
|
c=db.cursor(MySQLdb.cursors.DictCursor)
|
||||||
|
|
||||||
|
@ -146,7 +138,7 @@ def main():
|
||||||
duration = call['LeaveTime'] - call['JoinTime']
|
duration = call['LeaveTime'] - call['JoinTime']
|
||||||
call['CallDuration'] = duration.seconds
|
call['CallDuration'] = duration.seconds
|
||||||
|
|
||||||
#fix up the data for json
|
# fix up the data for json
|
||||||
for k in call.keys():
|
for k in call.keys():
|
||||||
# convert datetime objects to isoformat for json serialization
|
# convert datetime objects to isoformat for json serialization
|
||||||
if isinstance(call[k], datetime):
|
if isinstance(call[k], datetime):
|
||||||
|
@ -157,7 +149,7 @@ def main():
|
||||||
call[k] = call[k].decode('utf-8','ignore').encode('ascii','ignore')
|
call[k] = call[k].decode('utf-8','ignore').encode('ascii','ignore')
|
||||||
|
|
||||||
mdEvent.send(timestamp=call['JoinTime'],
|
mdEvent.send(timestamp=call['JoinTime'],
|
||||||
summary='Vidyo call status for '+call['UniqueCallID'].encode('ascii', 'ignore'),
|
summary='Vidyo call status for ' + call['UniqueCallID'].encode('ascii', 'ignore'),
|
||||||
tags=['vidyo'],
|
tags=['vidyo'],
|
||||||
details=call,
|
details=call,
|
||||||
category='vidyo',
|
category='vidyo',
|
||||||
|
|
|
@ -14,7 +14,6 @@ import json
|
||||||
|
|
||||||
from elasticsearch.exceptions import ConnectionError
|
from elasticsearch.exceptions import ConnectionError
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
import os
|
||||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||||
|
|
||||||
|
|
|
@ -10,37 +10,25 @@ import sys
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import pytz
|
import pytz
|
||||||
import json
|
import json
|
||||||
import socket
|
|
||||||
import json
|
|
||||||
from requests_futures.sessions import FuturesSession
|
|
||||||
from multiprocessing import Process, Queue
|
from multiprocessing import Process, Queue
|
||||||
import random
|
import random
|
||||||
import logging
|
import logging
|
||||||
from logging.handlers import SysLogHandler
|
|
||||||
from Queue import Empty
|
from Queue import Empty
|
||||||
from requests.packages.urllib3.exceptions import ClosedPoolError
|
|
||||||
import requests
|
import requests
|
||||||
import time
|
import time
|
||||||
from configlib import getConfig, OptionParser
|
from configlib import getConfig, OptionParser
|
||||||
import ConfigParser
|
import ConfigParser
|
||||||
import glob
|
import glob
|
||||||
from datetime import datetime
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from dateutil.parser import parse
|
|
||||||
from datetime import date
|
|
||||||
import pytz
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
from mozdef_util.utilities.toUTC import toUTC
|
||||||
|
|
||||||
#use futures to run in the background
|
# use futures to run in the background
|
||||||
#httpsession = FuturesSession(max_workers=5)
|
# httpsession = FuturesSession(max_workers=5)
|
||||||
httpsession = requests.session()
|
httpsession = requests.session()
|
||||||
httpsession.trust_env=False # turns of needless .netrc check for creds
|
httpsession.trust_env=False # turns of needless .netrc check for creds
|
||||||
#a = requests.adapters.HTTPAdapter(max_retries=2)
|
# a = requests.adapters.HTTPAdapter(max_retries=2)
|
||||||
#httpsession.mount('http://', a)
|
# httpsession.mount('http://', a)
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(sys.argv[0])
|
logger = logging.getLogger(sys.argv[0])
|
||||||
|
@ -48,7 +36,7 @@ logger.level=logging.INFO
|
||||||
|
|
||||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||||
|
|
||||||
#create a list of logs we can append json to and call for a post when we want.
|
# create a list of logs we can append json to and call for a post when we want.
|
||||||
logcache=Queue()
|
logcache=Queue()
|
||||||
|
|
||||||
|
|
||||||
|
@ -67,9 +55,9 @@ def setConfig(option,value,configfile):
|
||||||
|
|
||||||
|
|
||||||
def postLogs(logcache):
|
def postLogs(logcache):
|
||||||
#post logs asynchronously with requests workers and check on the results
|
# post logs asynchronously with requests workers and check on the results
|
||||||
#expects a queue object from the multiprocessing library
|
# expects a queue object from the multiprocessing library
|
||||||
posts=[]
|
# posts=[]
|
||||||
try:
|
try:
|
||||||
while not logcache.empty():
|
while not logcache.empty():
|
||||||
postdata=logcache.get_nowait()
|
postdata=logcache.get_nowait()
|
||||||
|
@ -79,10 +67,10 @@ def postLogs(logcache):
|
||||||
a.max_retries=3
|
a.max_retries=3
|
||||||
r=httpsession.post(url,data=postdata)
|
r=httpsession.post(url,data=postdata)
|
||||||
print(r, postdata)
|
print(r, postdata)
|
||||||
#append to posts if this is long running and you want
|
# append to posts if this is long running and you want
|
||||||
#events to try again later.
|
# events to try again later.
|
||||||
#posts.append((r,postdata,url))
|
# posts.append((r,postdata,url))
|
||||||
except Empty as e:
|
except Empty:
|
||||||
pass
|
pass
|
||||||
# for p, postdata, url in posts:
|
# for p, postdata, url in posts:
|
||||||
# try:
|
# try:
|
||||||
|
@ -99,18 +87,18 @@ def postLogs(logcache):
|
||||||
|
|
||||||
|
|
||||||
def genRandomIPv4():
|
def genRandomIPv4():
|
||||||
#random, IPs
|
# random, IPs
|
||||||
return '.'.join("%d" % (random.randint(0,254)) for x in range(4))
|
return '.'.join("%d" % (random.randint(0,254)) for x in range(4))
|
||||||
|
|
||||||
|
|
||||||
def genAttackerIPv4():
|
def genAttackerIPv4():
|
||||||
#random, but not too random as to allow for alerting about attacks from
|
# random, but not too random as to allow for alerting about attacks from
|
||||||
#the same IP.
|
# the same IP.
|
||||||
coreIPs=['1.93.25.',
|
coreIPs=['1.93.25.',
|
||||||
'222.73.115.',
|
'222.73.115.',
|
||||||
'116.10.191.',
|
'116.10.191.',
|
||||||
'144.0.0.']
|
'144.0.0.']
|
||||||
#change this to non zero according to taste for semi-random-ness
|
# change this to non zero according to taste for semi-random-ness
|
||||||
if random.randint(0,10)>= 0:
|
if random.randint(0,10)>= 0:
|
||||||
return '{0}{1}'.format(random.choice(coreIPs), random.randint(1,2))
|
return '{0}{1}'.format(random.choice(coreIPs), random.randint(1,2))
|
||||||
else:
|
else:
|
||||||
|
@ -120,28 +108,28 @@ def genAttackerIPv4():
|
||||||
def makeEvents():
|
def makeEvents():
|
||||||
try:
|
try:
|
||||||
eventfiles = glob.glob(options.eventsglob)
|
eventfiles = glob.glob(options.eventsglob)
|
||||||
#pick a random number of events to send
|
# pick a random number of events to send
|
||||||
for i in range(1, random.randrange(20, 100)):
|
for i in range(1, random.randrange(20, 100)):
|
||||||
#pick a random type of event to send
|
# pick a random type of event to send
|
||||||
eventfile = random.choice(eventfiles)
|
eventfile = random.choice(eventfiles)
|
||||||
#print(eventfile)
|
# print(eventfile)
|
||||||
events = json.load(open(eventfile))
|
events = json.load(open(eventfile))
|
||||||
target = random.randint(0, len(events))
|
target = random.randint(0, len(events))
|
||||||
for event in events[target:target+1]:
|
for event in events[target:target + 1]:
|
||||||
event['timestamp'] = pytz.timezone('UTC').localize(datetime.utcnow()).isoformat()
|
event['timestamp'] = pytz.timezone('UTC').localize(datetime.utcnow()).isoformat()
|
||||||
#remove stored times
|
# remove stored times
|
||||||
if 'utctimestamp' in event.keys():
|
if 'utctimestamp' in event.keys():
|
||||||
del event['utctimestamp']
|
del event['utctimestamp']
|
||||||
if 'receivedtimestamp' in event.keys():
|
if 'receivedtimestamp' in event.keys():
|
||||||
del event['receivedtimestamp']
|
del event['receivedtimestamp']
|
||||||
|
|
||||||
#add demo to the tags so it's clear it's not real data.
|
# add demo to the tags so it's clear it's not real data.
|
||||||
if 'tags' not in event.keys():
|
if 'tags' not in event.keys():
|
||||||
event['tags'] = list()
|
event['tags'] = list()
|
||||||
|
|
||||||
event['tags'].append('demodata')
|
event['tags'].append('demodata')
|
||||||
|
|
||||||
#replace potential <randomipaddress> with a random ip address
|
# replace potential <randomipaddress> with a random ip address
|
||||||
if 'summary' in event.keys() and '<randomipaddress>' in event['summary']:
|
if 'summary' in event.keys() and '<randomipaddress>' in event['summary']:
|
||||||
randomIP = genRandomIPv4()
|
randomIP = genRandomIPv4()
|
||||||
event['summary'] = event['summary'].replace("<randomipaddress>", randomIP)
|
event['summary'] = event['summary'].replace("<randomipaddress>", randomIP)
|
||||||
|
@ -150,20 +138,20 @@ def makeEvents():
|
||||||
event['details']['sourceipaddress'] = randomIP
|
event['details']['sourceipaddress'] = randomIP
|
||||||
event['details']['sourceipv4address'] = randomIP
|
event['details']['sourceipv4address'] = randomIP
|
||||||
|
|
||||||
#print(event['timestamp'], event['tags'], event['summary'])
|
# print(event['timestamp'], event['tags'], event['summary'])
|
||||||
|
|
||||||
logcache.put(json.dumps(event))
|
logcache.put(json.dumps(event))
|
||||||
if not logcache.empty():
|
if not logcache.empty():
|
||||||
time.sleep(.01)
|
time.sleep(.01)
|
||||||
try:
|
try:
|
||||||
postingProcess=Process(target=postLogs,args=(logcache,),name="json2MozdefDemoData")
|
postingProcess = Process(target=postLogs, args=(logcache,), name="json2MozdefDemoData")
|
||||||
postingProcess.start()
|
postingProcess.start()
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno==35: # resource temporarily unavailable.
|
if e.errno == 35: # resource temporarily unavailable.
|
||||||
print(e)
|
print(e)
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
logger.error('%r'%e)
|
logger.error('%r' % e)
|
||||||
|
|
||||||
except KeyboardInterrupt as e:
|
except KeyboardInterrupt as e:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
@ -174,39 +162,39 @@ def makeAlerts():
|
||||||
send events that will be correlated into alerts
|
send events that will be correlated into alerts
|
||||||
'''
|
'''
|
||||||
try:
|
try:
|
||||||
#time for us to run?
|
# time for us to run?
|
||||||
timetoRun=toUTC(options.lastalert) + timedelta(minutes=options.alertsminutesinterval)
|
timetoRun = toUTC(options.lastalert) + timedelta(minutes=options.alertsminutesinterval)
|
||||||
if timetoRun > toUTC(datetime.now()):
|
if timetoRun > toUTC(datetime.now()):
|
||||||
#print(timetoRun)
|
# print(timetoRun)
|
||||||
return
|
return
|
||||||
|
|
||||||
#print(timetoRun, options.lastalert)
|
# print(timetoRun, options.lastalert)
|
||||||
eventfiles = glob.glob(options.alertsglob)
|
eventfiles = glob.glob(options.alertsglob)
|
||||||
#pick a random number of events to send
|
# pick a random number of events to send
|
||||||
for i in range(0, options.alertscount):
|
for i in range(0, options.alertscount):
|
||||||
#pick a random type of event to send
|
# pick a random type of event to send
|
||||||
eventfile = random.choice(eventfiles)
|
eventfile = random.choice(eventfiles)
|
||||||
events = json.load(open(eventfile))
|
events = json.load(open(eventfile))
|
||||||
target = random.randint(0, len(events))
|
target = random.randint(0, len(events))
|
||||||
# if there's only one event in the file..use it.
|
# if there's only one event in the file..use it.
|
||||||
if len(events) == 1 and target == 1:
|
if len(events) == 1 and target == 1:
|
||||||
target = 0
|
target = 0
|
||||||
for event in events[target:target+1]:
|
for event in events[target:target + 1]:
|
||||||
event['timestamp'] = pytz.timezone('UTC').localize(datetime.utcnow()).isoformat()
|
event['timestamp'] = pytz.timezone('UTC').localize(datetime.utcnow()).isoformat()
|
||||||
#remove stored times
|
# remove stored times
|
||||||
if 'utctimestamp' in event.keys():
|
if 'utctimestamp' in event.keys():
|
||||||
del event['utctimestamp']
|
del event['utctimestamp']
|
||||||
if 'receivedtimestamp' in event.keys():
|
if 'receivedtimestamp' in event.keys():
|
||||||
del event['receivedtimestamp']
|
del event['receivedtimestamp']
|
||||||
|
|
||||||
#add demo to the tags so it's clear it's not real data.
|
# add demo to the tags so it's clear it's not real data.
|
||||||
if 'tags' not in event.keys():
|
if 'tags' not in event.keys():
|
||||||
event['tags'] = list()
|
event['tags'] = list()
|
||||||
|
|
||||||
event['tags'].append('demodata')
|
event['tags'].append('demodata')
|
||||||
event['tags'].append('demoalert')
|
event['tags'].append('demoalert')
|
||||||
|
|
||||||
#replace potential <randomipaddress> with a random ip address
|
# replace potential <randomipaddress> with a random ip address
|
||||||
if 'summary' in event.keys() and '<randomipaddress>' in event['summary']:
|
if 'summary' in event.keys() and '<randomipaddress>' in event['summary']:
|
||||||
randomIP = genRandomIPv4()
|
randomIP = genRandomIPv4()
|
||||||
event['summary'] = event['summary'].replace("<randomipaddress>", randomIP)
|
event['summary'] = event['summary'].replace("<randomipaddress>", randomIP)
|
||||||
|
@ -229,11 +217,11 @@ def makeAlerts():
|
||||||
postingProcess=Process(target=postLogs,args=(logcache,),name="json2MozdefDemoData")
|
postingProcess=Process(target=postLogs,args=(logcache,),name="json2MozdefDemoData")
|
||||||
postingProcess.start()
|
postingProcess.start()
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno==35: # resource temporarily unavailable.
|
if e.errno == 35: # resource temporarily unavailable.
|
||||||
print(e)
|
print(e)
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
logger.error('%r'%e)
|
logger.error('%r' % e)
|
||||||
|
|
||||||
except KeyboardInterrupt as e:
|
except KeyboardInterrupt as e:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
@ -244,39 +232,39 @@ def makeAttackers():
|
||||||
send events that will be correlated into attackers using pre-defined IPs
|
send events that will be correlated into attackers using pre-defined IPs
|
||||||
'''
|
'''
|
||||||
try:
|
try:
|
||||||
#time for us to run?
|
# time for us to run?
|
||||||
timetoRun=toUTC(options.lastattacker) + timedelta(minutes=options.attackersminutesinterval)
|
timetoRun=toUTC(options.lastattacker) + timedelta(minutes=options.attackersminutesinterval)
|
||||||
if timetoRun > toUTC(datetime.now()):
|
if timetoRun > toUTC(datetime.now()):
|
||||||
#print(timetoRun)
|
# print(timetoRun)
|
||||||
return
|
return
|
||||||
|
|
||||||
#print(timetoRun, options.lastalert)
|
# print(timetoRun, options.lastalert)
|
||||||
eventfiles = glob.glob(options.alertsglob)
|
eventfiles = glob.glob(options.alertsglob)
|
||||||
#pick a random number of events to send
|
# pick a random number of events to send
|
||||||
for i in range(0, options.alertscount):
|
for i in range(0, options.alertscount):
|
||||||
#pick a random type of event to send
|
# pick a random type of event to send
|
||||||
eventfile = random.choice(eventfiles)
|
eventfile = random.choice(eventfiles)
|
||||||
events = json.load(open(eventfile))
|
events = json.load(open(eventfile))
|
||||||
target = random.randint(0, len(events))
|
target = random.randint(0, len(events))
|
||||||
# if there's only one event in the file..use it.
|
# if there's only one event in the file..use it.
|
||||||
if len(events) == 1 and target == 1:
|
if len(events) == 1 and target == 1:
|
||||||
target = 0
|
target = 0
|
||||||
for event in events[target:target+1]:
|
for event in events[target:target + 1]:
|
||||||
event['timestamp'] = pytz.timezone('UTC').localize(datetime.utcnow()).isoformat()
|
event['timestamp'] = pytz.timezone('UTC').localize(datetime.utcnow()).isoformat()
|
||||||
#remove stored times
|
# remove stored times
|
||||||
if 'utctimestamp' in event.keys():
|
if 'utctimestamp' in event.keys():
|
||||||
del event['utctimestamp']
|
del event['utctimestamp']
|
||||||
if 'receivedtimestamp' in event.keys():
|
if 'receivedtimestamp' in event.keys():
|
||||||
del event['receivedtimestamp']
|
del event['receivedtimestamp']
|
||||||
|
|
||||||
#add demo to the tags so it's clear it's not real data.
|
# add demo to the tags so it's clear it's not real data.
|
||||||
if 'tags' not in event.keys():
|
if 'tags' not in event.keys():
|
||||||
event['tags'] = list()
|
event['tags'] = list()
|
||||||
|
|
||||||
event['tags'].append('demodata')
|
event['tags'].append('demodata')
|
||||||
event['tags'].append('demoalert')
|
event['tags'].append('demoalert')
|
||||||
|
|
||||||
#replace potential <randomipaddress> with a random ip address
|
# replace potential <randomipaddress> with a random ip address
|
||||||
if 'summary' in event.keys() and '<randomipaddress>' in event['summary']:
|
if 'summary' in event.keys() and '<randomipaddress>' in event['summary']:
|
||||||
randomIP = genAttackerIPv4()
|
randomIP = genAttackerIPv4()
|
||||||
event['summary'] = event['summary'].replace("<randomipaddress>", randomIP)
|
event['summary'] = event['summary'].replace("<randomipaddress>", randomIP)
|
||||||
|
@ -299,11 +287,11 @@ def makeAttackers():
|
||||||
postingProcess=Process(target=postLogs,args=(logcache,),name="json2MozdefDemoData")
|
postingProcess=Process(target=postLogs,args=(logcache,),name="json2MozdefDemoData")
|
||||||
postingProcess.start()
|
postingProcess.start()
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno==35: # resource temporarily unavailable.
|
if e.errno == 35: # resource temporarily unavailable.
|
||||||
print(e)
|
print(e)
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
logger.error('%r'%e)
|
logger.error('%r' % e)
|
||||||
|
|
||||||
except KeyboardInterrupt as e:
|
except KeyboardInterrupt as e:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
@ -314,15 +302,15 @@ def initConfig():
|
||||||
options.eventsglob = getConfig('eventsglob', './sampleevents/events*json', options.configfile)
|
options.eventsglob = getConfig('eventsglob', './sampleevents/events*json', options.configfile)
|
||||||
options.alertsglob = getConfig('alertsglob', './sampleevents/alert*json', options.configfile)
|
options.alertsglob = getConfig('alertsglob', './sampleevents/alert*json', options.configfile)
|
||||||
options.attackersglob = getConfig('attackersglob', './sampleevents/attacker*json', options.configfile)
|
options.attackersglob = getConfig('attackersglob', './sampleevents/attacker*json', options.configfile)
|
||||||
#how many alerts to create
|
# how many alerts to create
|
||||||
options.alertscount = getConfig('alertscount', 2, options.configfile)
|
options.alertscount = getConfig('alertscount', 2, options.configfile)
|
||||||
#how many minutes to wait between creating ^ alerts
|
# how many minutes to wait between creating ^ alerts
|
||||||
options.alertsminutesinterval = getConfig('alertsminutesinterval', 5, options.configfile)
|
options.alertsminutesinterval = getConfig('alertsminutesinterval', 5, options.configfile)
|
||||||
options.lastalert = getConfig('lastalert', datetime.now() - timedelta(hours=1), options.configfile)
|
options.lastalert = getConfig('lastalert', datetime.now() - timedelta(hours=1), options.configfile)
|
||||||
|
|
||||||
#how many attackers to create
|
# how many attackers to create
|
||||||
options.attackerscount = getConfig('attackers', 1, options.configfile)
|
options.attackerscount = getConfig('attackers', 1, options.configfile)
|
||||||
#how many minutes to wait between creating ^ attackers
|
# how many minutes to wait between creating ^ attackers
|
||||||
options.attackersminutesinterval = getConfig('attackersminutesinterval', 5, options.configfile)
|
options.attackersminutesinterval = getConfig('attackersminutesinterval', 5, options.configfile)
|
||||||
options.lastattacker = getConfig('lastattacker', datetime.now() - timedelta(hours=1), options.configfile)
|
options.lastattacker = getConfig('lastattacker', datetime.now() - timedelta(hours=1), options.configfile)
|
||||||
|
|
||||||
|
@ -349,8 +337,8 @@ if __name__ == '__main__':
|
||||||
postingProcess=Process(target=postLogs,args=(logcache,),name="json2MozdefDemoData")
|
postingProcess=Process(target=postLogs,args=(logcache,),name="json2MozdefDemoData")
|
||||||
postingProcess.start()
|
postingProcess.start()
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno==35: # resource temporarily unavailable.
|
if e.errno == 35: # resource temporarily unavailable.
|
||||||
print(e)
|
print(e)
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
logger.error('%r'%e)
|
logger.error('%r' % e)
|
||||||
|
|
|
@ -7,7 +7,6 @@
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import inspect
|
|
||||||
from configlib import getConfig, OptionParser
|
from configlib import getConfig, OptionParser
|
||||||
|
|
||||||
sys.path.insert(1, os.path.join(sys.path[0], '../..'))
|
sys.path.insert(1, os.path.join(sys.path[0], '../..'))
|
||||||
|
|
|
@ -67,9 +67,9 @@ defaultTemplate = r'''
|
||||||
}
|
}
|
||||||
'''
|
'''
|
||||||
|
|
||||||
#valid json?
|
# valid json?
|
||||||
templateJson = json.loads(defaultTemplate)
|
templateJson = json.loads(defaultTemplate)
|
||||||
|
|
||||||
#post it:
|
# post it
|
||||||
r = requests.put(url="http://servername:9200/_template/defaulttemplate", data=defaultTemplate)
|
r = requests.put(url="http://servername:9200/_template/defaulttemplate", data=defaultTemplate)
|
||||||
print(r)
|
print(r)
|
||||||
|
|
|
@ -4,14 +4,11 @@
|
||||||
# Copyright (c) 2017 Mozilla Corporation
|
# Copyright (c) 2017 Mozilla Corporation
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
from bottle import route, run, response, request, default_app
|
||||||
import bottle
|
|
||||||
from bottle import debug,route, run, template, response,request,post, default_app
|
|
||||||
from bottle import _stdout as bottlelog
|
from bottle import _stdout as bottlelog
|
||||||
import kombu
|
from kombu import Connection, Queue, Exchange
|
||||||
from kombu import Connection,Queue,Exchange
|
|
||||||
import json
|
import json
|
||||||
from configlib import getConfig,OptionParser
|
from configlib import getConfig, OptionParser
|
||||||
|
|
||||||
|
|
||||||
@route('/status')
|
@route('/status')
|
||||||
|
@ -30,11 +27,11 @@ def status():
|
||||||
@route('/test')
|
@route('/test')
|
||||||
@route('/test/')
|
@route('/test/')
|
||||||
def testindex():
|
def testindex():
|
||||||
ip = request.environ.get('REMOTE_ADDR')
|
# ip = request.environ.get('REMOTE_ADDR')
|
||||||
#response.headers['X-IP'] = '{0}'.format(ip)
|
# response.headers['X-IP'] = '{0}'.format(ip)
|
||||||
response.status=200
|
response.status=200
|
||||||
|
|
||||||
#act like elastic search bulk index
|
# act like elastic search bulk index
|
||||||
|
|
||||||
|
|
||||||
@route('/_bulk',method='POST')
|
@route('/_bulk',method='POST')
|
||||||
|
@ -42,10 +39,10 @@ def testindex():
|
||||||
def bulkindex():
|
def bulkindex():
|
||||||
if request.body:
|
if request.body:
|
||||||
bulkpost=request.body.read()
|
bulkpost=request.body.read()
|
||||||
#bottlelog('request:{0}\n'.format(bulkpost))
|
# bottlelog('request:{0}\n'.format(bulkpost))
|
||||||
request.body.close()
|
request.body.close()
|
||||||
if len(bulkpost)>10: # TODO Check for bulk format.
|
if len(bulkpost)>10: # TODO Check for bulk format.
|
||||||
#iterate on messages and post to event message queue
|
# iterate on messages and post to event message queue
|
||||||
|
|
||||||
eventlist=[]
|
eventlist=[]
|
||||||
for i in bulkpost.splitlines():
|
for i in bulkpost.splitlines():
|
||||||
|
@ -53,10 +50,10 @@ def bulkindex():
|
||||||
|
|
||||||
for i in eventlist:
|
for i in eventlist:
|
||||||
try:
|
try:
|
||||||
#valid json?
|
# valid json?
|
||||||
try:
|
try:
|
||||||
eventDict=json.loads(i)
|
eventDict=json.loads(i)
|
||||||
except ValueError as e:
|
except ValueError:
|
||||||
response.status=500
|
response.status=500
|
||||||
return
|
return
|
||||||
# don't post the items telling us where to post things..
|
# don't post the items telling us where to post things..
|
||||||
|
@ -77,17 +74,17 @@ def bulkindex():
|
||||||
def eventsindex():
|
def eventsindex():
|
||||||
if request.body:
|
if request.body:
|
||||||
anevent=request.body.read()
|
anevent=request.body.read()
|
||||||
#bottlelog('request:{0}\n'.format(anevent))
|
# bottlelog('request:{0}\n'.format(anevent))
|
||||||
request.body.close()
|
request.body.close()
|
||||||
#valid json?
|
# valid json?
|
||||||
try:
|
try:
|
||||||
eventDict=json.loads(anevent)
|
eventDict=json.loads(anevent)
|
||||||
except ValueError as e:
|
except ValueError:
|
||||||
response.status=500
|
response.status=500
|
||||||
return
|
return
|
||||||
#let the message queue worker who gets this know where it was posted
|
# let the message queue worker who gets this know where it was posted
|
||||||
eventDict['endpoint']='events'
|
eventDict['endpoint']='events'
|
||||||
#post to event message queue
|
# post to event message queue
|
||||||
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
|
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
|
||||||
ensurePublish(eventDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
|
ensurePublish(eventDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
|
||||||
|
|
||||||
|
@ -96,21 +93,21 @@ def eventsindex():
|
||||||
|
|
||||||
@route('/cef', method=['POST','PUT'])
|
@route('/cef', method=['POST','PUT'])
|
||||||
@route('/cef/',method=['POST','PUT'])
|
@route('/cef/',method=['POST','PUT'])
|
||||||
#debug(True)
|
# debug(True)
|
||||||
def cefindex():
|
def cefindex():
|
||||||
if request.body:
|
if request.body:
|
||||||
anevent=request.body.read()
|
anevent=request.body.read()
|
||||||
request.body.close()
|
request.body.close()
|
||||||
#valid json?
|
# valid json?
|
||||||
try:
|
try:
|
||||||
cefDict=json.loads(anevent)
|
cefDict=json.loads(anevent)
|
||||||
except ValueError as e:
|
except ValueError:
|
||||||
response.status=500
|
response.status=500
|
||||||
return
|
return
|
||||||
#let the message queue worker who gets this know where it was posted
|
# let the message queue worker who gets this know where it was posted
|
||||||
cefDict['endpoint']='cef'
|
cefDict['endpoint']='cef'
|
||||||
|
|
||||||
#post to eventtask exchange
|
# post to eventtask exchange
|
||||||
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
|
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
|
||||||
ensurePublish(cefDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
|
ensurePublish(cefDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
|
||||||
return
|
return
|
||||||
|
@ -129,17 +126,17 @@ def customindex(application):
|
||||||
if request.body:
|
if request.body:
|
||||||
anevent=request.body.read()
|
anevent=request.body.read()
|
||||||
request.body.close()
|
request.body.close()
|
||||||
#valid json?
|
# valid json?
|
||||||
try:
|
try:
|
||||||
customDict=json.loads(anevent)
|
customDict=json.loads(anevent)
|
||||||
except ValueError as e:
|
except ValueError:
|
||||||
response.status=500
|
response.status=500
|
||||||
return
|
return
|
||||||
#let the message queue worker who gets this know where it was posted
|
# let the message queue worker who gets this know where it was posted
|
||||||
customDict['endpoint']= application
|
customDict['endpoint']= application
|
||||||
customDict['customendpoint'] = True
|
customDict['customendpoint'] = True
|
||||||
|
|
||||||
#post to eventtask exchange
|
# post to eventtask exchange
|
||||||
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
|
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
|
||||||
ensurePublish(customDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
|
ensurePublish(customDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
|
||||||
return
|
return
|
||||||
|
@ -154,13 +151,13 @@ def initConfig():
|
||||||
options.listen_host=getConfig('listen_host', '127.0.0.1', options.configfile)
|
options.listen_host=getConfig('listen_host', '127.0.0.1', options.configfile)
|
||||||
|
|
||||||
|
|
||||||
#get config info:
|
# get config info:
|
||||||
parser=OptionParser()
|
parser=OptionParser()
|
||||||
parser.add_option("-c", dest='configfile', default=os.path.join(os.path.dirname(__file__), __file__).replace('.py', '.conf'), help="configuration file to use")
|
parser.add_option("-c", dest='configfile', default=os.path.join(os.path.dirname(__file__), __file__).replace('.py', '.conf'), help="configuration file to use")
|
||||||
(options,args) = parser.parse_args()
|
(options,args) = parser.parse_args()
|
||||||
initConfig()
|
initConfig()
|
||||||
|
|
||||||
#connect and declare the message queue/kombu objects.
|
# connect and declare the message queue/kombu objects.
|
||||||
connString='amqp://{0}:{1}@{2}:{3}//'.format(options.mquser,options.mqpassword,options.mqserver,options.mqport)
|
connString='amqp://{0}:{1}@{2}:{3}//'.format(options.mquser,options.mqpassword,options.mqserver,options.mqport)
|
||||||
mqConn=Connection(connString)
|
mqConn=Connection(connString)
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,7 @@ def toUTC(suspectedDate):
|
||||||
else:
|
else:
|
||||||
# epoch? but seconds/milliseconds/nanoseconds (lookin at you heka)
|
# epoch? but seconds/milliseconds/nanoseconds (lookin at you heka)
|
||||||
epochDivisor = int(str(1) + '0' * (len(str(suspectedDate)) % 10))
|
epochDivisor = int(str(1) + '0' * (len(str(suspectedDate)) % 10))
|
||||||
objDate = datetime.fromtimestamp(float(suspectedDate/epochDivisor), LOCAL_TIMEZONE)
|
objDate = datetime.fromtimestamp(float(suspectedDate / epochDivisor), LOCAL_TIMEZONE)
|
||||||
elif type(suspectedDate) in (str, unicode):
|
elif type(suspectedDate) in (str, unicode):
|
||||||
# try to parse float or negative number from string:
|
# try to parse float or negative number from string:
|
||||||
objDate = None
|
objDate = None
|
||||||
|
|
|
@ -20,6 +20,7 @@ from StringIO import StringIO
|
||||||
from threading import Timer
|
from threading import Timer
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
|
import kombu
|
||||||
from ssl import SSLEOFError, SSLError
|
from ssl import SSLEOFError, SSLError
|
||||||
|
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
from mozdef_util.utilities.toUTC import toUTC
|
||||||
|
@ -307,8 +308,6 @@ class taskConsumer(object):
|
||||||
|
|
||||||
def authenticate(self):
|
def authenticate(self):
|
||||||
if options.cloudtrail_arn not in ['<cloudtrail_arn>', 'cloudtrail_arn']:
|
if options.cloudtrail_arn not in ['<cloudtrail_arn>', 'cloudtrail_arn']:
|
||||||
role_manager_args = {}
|
|
||||||
|
|
||||||
role_manager = RoleManager(**get_aws_credentials(
|
role_manager = RoleManager(**get_aws_credentials(
|
||||||
options.region,
|
options.region,
|
||||||
options.accesskey,
|
options.accesskey,
|
||||||
|
@ -368,7 +367,7 @@ class taskConsumer(object):
|
||||||
self.on_message(event)
|
self.on_message(event)
|
||||||
|
|
||||||
self.taskQueue.delete_message(msg)
|
self.taskQueue.delete_message(msg)
|
||||||
except (SSLEOFError, SSLError, socket.error) as e:
|
except (SSLEOFError, SSLError, socket.error):
|
||||||
logger.info('Received network related error...reconnecting')
|
logger.info('Received network related error...reconnecting')
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
self.connection, self.taskQueue = connect_sqs(
|
self.connection, self.taskQueue = connect_sqs(
|
||||||
|
|
|
@ -8,7 +8,6 @@
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import kombu
|
import kombu
|
||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
import socket
|
import socket
|
||||||
from configlib import getConfig, OptionParser
|
from configlib import getConfig, OptionParser
|
||||||
|
|
|
@ -10,7 +10,6 @@
|
||||||
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
|
||||||
import kombu
|
import kombu
|
||||||
import sys
|
import sys
|
||||||
import socket
|
import socket
|
||||||
|
|
|
@ -7,7 +7,6 @@
|
||||||
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import socket
|
import socket
|
||||||
|
|
|
@ -12,7 +12,6 @@
|
||||||
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
import socket
|
import socket
|
||||||
import time
|
import time
|
||||||
|
|
|
@ -6,7 +6,6 @@
|
||||||
# Copyright (c) 2017 Mozilla Corporation
|
# Copyright (c) 2017 Mozilla Corporation
|
||||||
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
import os
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
|
@ -101,7 +101,7 @@ class message(object):
|
||||||
if 'ts' in newmessage['details']:
|
if 'ts' in newmessage['details']:
|
||||||
newmessage[u'utctimestamp'] = toUTC(float(newmessage['details']['ts'])).isoformat()
|
newmessage[u'utctimestamp'] = toUTC(float(newmessage['details']['ts'])).isoformat()
|
||||||
newmessage[u'timestamp'] = toUTC(float(newmessage['details']['ts'])).isoformat()
|
newmessage[u'timestamp'] = toUTC(float(newmessage['details']['ts'])).isoformat()
|
||||||
#del(newmessage['details']['ts'])
|
# del(newmessage['details']['ts'])
|
||||||
else:
|
else:
|
||||||
# a malformed message somehow managed to crawl to us, let's put it somewhat together
|
# a malformed message somehow managed to crawl to us, let's put it somewhat together
|
||||||
newmessage[u'utctimestamp'] = toUTC(datetime.now()).isoformat()
|
newmessage[u'utctimestamp'] = toUTC(datetime.now()).isoformat()
|
||||||
|
|
|
@ -3,8 +3,6 @@
|
||||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
# Copyright (c) 2017 Mozilla Corporation
|
# Copyright (c) 2017 Mozilla Corporation
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
from mozdef_util.utilities.key_exists import key_exists
|
from mozdef_util.utilities.key_exists import key_exists
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -4,8 +4,6 @@
|
||||||
# Copyright (c) 2014 Mozilla Corporation
|
# Copyright (c) 2014 Mozilla Corporation
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
from mozdef_util.utilities.logger import logger
|
from mozdef_util.utilities.logger import logger
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -28,15 +28,15 @@ class message(object):
|
||||||
# early exit by setting message = None and return
|
# early exit by setting message = None and return
|
||||||
if 'details' in message.keys():
|
if 'details' in message.keys():
|
||||||
# drop disabled for now
|
# drop disabled for now
|
||||||
#if 'signatureid' in message['details']:
|
# if 'signatureid' in message['details']:
|
||||||
#if message['details'].lower() == 'execve' and \
|
# if message['details'].lower() == 'execve' and \
|
||||||
#'command' not in message['details']:
|
# 'command' not in message['details']:
|
||||||
# auditd entry without a command
|
# auditd entry without a command
|
||||||
# likely a result of another command (java starting a job, etc.)
|
# likely a result of another command (java starting a job, etc.)
|
||||||
# signal a drop
|
# signal a drop
|
||||||
|
|
||||||
#message = None
|
# message = None
|
||||||
#return message
|
# return message
|
||||||
if 'http_user_agent' in message['details']:
|
if 'http_user_agent' in message['details']:
|
||||||
if message['details']['http_user_agent'] == 'ELB-HealthChecker/1.0':
|
if message['details']['http_user_agent'] == 'ELB-HealthChecker/1.0':
|
||||||
message = None
|
message = None
|
||||||
|
|
|
@ -6,8 +6,6 @@
|
||||||
# This script copies the format/handling mechanism of ipFixup.py (git f5734b0c7e412424b44a6d7af149de6250fc70a2)
|
# This script copies the format/handling mechanism of ipFixup.py (git f5734b0c7e412424b44a6d7af149de6250fc70a2)
|
||||||
|
|
||||||
import netaddr
|
import netaddr
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
from mozdef_util.utilities.toUTC import toUTC
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -38,17 +38,17 @@ class message(object):
|
||||||
|
|
||||||
if 'eventsource' not in message:
|
if 'eventsource' not in message:
|
||||||
return (message, metadata)
|
return (message, metadata)
|
||||||
#drop non-relevant messages
|
# drop non-relevant messages
|
||||||
if message['eventsource'] in ('Fxa-customsMozSvc', 'FxaContentWebserver', 'FxaAuthWebserver', 'FxaOauthWebserver', 'FxaAuth', 'fxa-auth-server'):
|
if message['eventsource'] in ('Fxa-customsMozSvc', 'FxaContentWebserver', 'FxaAuthWebserver', 'FxaOauthWebserver', 'FxaAuth', 'fxa-auth-server'):
|
||||||
if 'details' in message.keys():
|
if 'details' in message.keys():
|
||||||
if 'status' in message['details']:
|
if 'status' in message['details']:
|
||||||
if message['details']['status'] == 200:
|
if message['details']['status'] == 200:
|
||||||
#normal 200 returns for web content
|
# normal 200 returns for web content
|
||||||
return(None, metadata)
|
return(None, metadata)
|
||||||
# FxaAuth sends http status as 'code'
|
# FxaAuth sends http status as 'code'
|
||||||
if 'code' in message['details']:
|
if 'code' in message['details']:
|
||||||
if message['details']['code'] == 200:
|
if message['details']['code'] == 200:
|
||||||
#normal 200 returns for web content
|
# normal 200 returns for web content
|
||||||
return(None, metadata)
|
return(None, metadata)
|
||||||
if 'op' in message['details']:
|
if 'op' in message['details']:
|
||||||
if message['details']['op'] == 'mailer.send.1':
|
if message['details']['op'] == 'mailer.send.1':
|
||||||
|
@ -93,15 +93,15 @@ class message(object):
|
||||||
message['details']['remoteAddressChain'][0] == '[' and
|
message['details']['remoteAddressChain'][0] == '[' and
|
||||||
message['details']['remoteAddressChain'][-1] == ']'):
|
message['details']['remoteAddressChain'][-1] == ']'):
|
||||||
# remove the brackets and double quotes
|
# remove the brackets and double quotes
|
||||||
for i in ['[',']','"']:
|
for i in ['[', ']', '"']:
|
||||||
message['details']['remoteAddressChain']=message['details']['remoteAddressChain'].replace(i,'')
|
message['details']['remoteAddressChain'] = message['details']['remoteAddressChain'].replace(i, '')
|
||||||
# make sure it's still a list
|
# make sure it's still a list
|
||||||
if ',' in message['details']['remoteAddressChain']:
|
if ',' in message['details']['remoteAddressChain']:
|
||||||
sourceIP = message['details']['remoteAddressChain'].split(',')[0]
|
sourceIP = message['details']['remoteAddressChain'].split(',')[0]
|
||||||
if isIP(sourceIP):
|
if isIP(sourceIP):
|
||||||
message['details']['sourceipaddress'] = sourceIP
|
message['details']['sourceipaddress'] = sourceIP
|
||||||
|
|
||||||
#fxacustoms sends source ip as just 'ip'
|
# fxacustoms sends source ip as just 'ip'
|
||||||
if 'ip' in message['details'].keys():
|
if 'ip' in message['details'].keys():
|
||||||
if isIP(message['details']['ip']):
|
if isIP(message['details']['ip']):
|
||||||
message['details']['sourceipaddress'] = message['details']['ip']
|
message['details']['sourceipaddress'] = message['details']['ip']
|
||||||
|
|
|
@ -6,7 +6,6 @@
|
||||||
import netaddr
|
import netaddr
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import sys
|
|
||||||
from mozdef_util.geo_ip import GeoIP
|
from mozdef_util.geo_ip import GeoIP
|
||||||
|
|
||||||
|
|
||||||
|
@ -38,7 +37,7 @@ class message(object):
|
||||||
return geoDict
|
return geoDict
|
||||||
else:
|
else:
|
||||||
location['location'] = 'unknown'
|
location['location'] = 'unknown'
|
||||||
except ValueError as e:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
return location
|
return location
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
# Copyright (c) 2017 Mozilla Corporation
|
# Copyright (c) 2017 Mozilla Corporation
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
import os
|
||||||
import jmespath
|
import jmespath
|
||||||
import yaml
|
import yaml
|
||||||
|
|
|
@ -3,8 +3,6 @@
|
||||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
# Copyright (c) 2017 Mozilla Corporation
|
# Copyright (c) 2017 Mozilla Corporation
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
from mozdef_util.utilities.key_exists import key_exists
|
from mozdef_util.utilities.key_exists import key_exists
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
from mozdef_util.utilities.toUTC import toUTC
|
||||||
from mozdef_util.utilities.dot_dict import DotDict
|
from mozdef_util.utilities.dot_dict import DotDict
|
||||||
|
|
|
@ -21,6 +21,6 @@ class message(object):
|
||||||
self.priority = 5
|
self.priority = 5
|
||||||
|
|
||||||
def onMessage(self, message, metadata):
|
def onMessage(self, message, metadata):
|
||||||
docid=hashlib.md5('nagiosstatus'+message['details']['nagios_hostname']).hexdigest()
|
docid = hashlib.md5('nagiosstatus' + message['details']['nagios_hostname']).hexdigest()
|
||||||
metadata['id']=docid
|
metadata['id'] = docid
|
||||||
return (message, metadata)
|
return (message, metadata)
|
||||||
|
|
|
@ -3,10 +3,7 @@
|
||||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
# Copyright (c) 2017 Mozilla Corporation
|
# Copyright (c) 2017 Mozilla Corporation
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
import re
|
import re
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
|
||||||
|
|
||||||
|
|
||||||
class message(object):
|
class message(object):
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
# Copyright (c) 2018 Mozilla Corporation
|
# Copyright (c) 2018 Mozilla Corporation
|
||||||
|
|
||||||
import netaddr
|
|
||||||
import json
|
import json
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from platform import node
|
from platform import node
|
||||||
|
|
|
@ -21,7 +21,7 @@ class message(object):
|
||||||
self.priority = 5
|
self.priority = 5
|
||||||
|
|
||||||
def onMessage(self, message, metadata):
|
def onMessage(self, message, metadata):
|
||||||
docid=hashlib.md5('vidyoUniqueCallID'+message['details']['UniqueCallID']).hexdigest()
|
docid = hashlib.md5('vidyoUniqueCallID' + message['details']['UniqueCallID']).hexdigest()
|
||||||
metadata['id']=docid
|
metadata['id'] = docid
|
||||||
metadata['doc_type'] = 'vidyo'
|
metadata['doc_type'] = 'vidyo'
|
||||||
return (message, metadata)
|
return (message, metadata)
|
||||||
|
|
|
@ -4,8 +4,7 @@
|
||||||
# Copyright (c) 2015 Mozilla Corporation
|
# Copyright (c) 2015 Mozilla Corporation
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
from mozdef_util.utilities.logger import logger
|
from mozdef_util.utilities.logger import logger
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,7 @@ import requests
|
||||||
import sys
|
import sys
|
||||||
import socket
|
import socket
|
||||||
from bottle import route, run, response, request, default_app, post
|
from bottle import route, run, response, request, default_app, post
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime
|
||||||
from configlib import getConfig, OptionParser
|
from configlib import getConfig, OptionParser
|
||||||
from ipwhois import IPWhois
|
from ipwhois import IPWhois
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
|
@ -22,9 +22,8 @@ from pymongo import MongoClient
|
||||||
from bson import json_util
|
from bson import json_util
|
||||||
|
|
||||||
from mozdef_util.elasticsearch_client import ElasticsearchClient, ElasticsearchInvalidIndex
|
from mozdef_util.elasticsearch_client import ElasticsearchClient, ElasticsearchInvalidIndex
|
||||||
from mozdef_util.query_models import SearchQuery, TermMatch, RangeMatch, Aggregation
|
from mozdef_util.query_models import SearchQuery, TermMatch
|
||||||
|
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
|
||||||
from mozdef_util.utilities.logger import logger, initLogger
|
from mozdef_util.utilities.logger import logger, initLogger
|
||||||
|
|
||||||
|
|
||||||
|
@ -51,7 +50,7 @@ def enable_cors(fn):
|
||||||
@route('/test/')
|
@route('/test/')
|
||||||
def test():
|
def test():
|
||||||
'''test endpoint for..testing'''
|
'''test endpoint for..testing'''
|
||||||
ip = request.environ.get('REMOTE_ADDR')
|
# ip = request.environ.get('REMOTE_ADDR')
|
||||||
# response.headers['X-IP'] = '{0}'.format(ip)
|
# response.headers['X-IP'] = '{0}'.format(ip)
|
||||||
response.status = 200
|
response.status = 200
|
||||||
|
|
||||||
|
@ -144,7 +143,7 @@ def index():
|
||||||
# valid json?
|
# valid json?
|
||||||
try:
|
try:
|
||||||
requestDict = json.loads(arequest)
|
requestDict = json.loads(arequest)
|
||||||
except ValueError as e:
|
except ValueError:
|
||||||
response.status = 500
|
response.status = 500
|
||||||
|
|
||||||
if 'ipaddress' in requestDict.keys() and isIPv4(requestDict['ipaddress']):
|
if 'ipaddress' in requestDict.keys() and isIPv4(requestDict['ipaddress']):
|
||||||
|
@ -164,11 +163,11 @@ def ipintel():
|
||||||
'''send an IP address through plugins for intel enhancement'''
|
'''send an IP address through plugins for intel enhancement'''
|
||||||
if request.body:
|
if request.body:
|
||||||
arequest = request.body.read()
|
arequest = request.body.read()
|
||||||
#request.body.close()
|
# request.body.close()
|
||||||
# valid json?
|
# valid json?
|
||||||
try:
|
try:
|
||||||
requestDict = json.loads(arequest)
|
requestDict = json.loads(arequest)
|
||||||
except ValueError as e:
|
except ValueError:
|
||||||
response.status = 500
|
response.status = 500
|
||||||
if 'ipaddress' in requestDict.keys() and isIPv4(requestDict['ipaddress']):
|
if 'ipaddress' in requestDict.keys() and isIPv4(requestDict['ipaddress']):
|
||||||
response.content_type = "application/json"
|
response.content_type = "application/json"
|
||||||
|
@ -193,7 +192,7 @@ def index():
|
||||||
# valid json?
|
# valid json?
|
||||||
try:
|
try:
|
||||||
requestDict = json.loads(arequest)
|
requestDict = json.loads(arequest)
|
||||||
except ValueError as e:
|
except ValueError:
|
||||||
response.status = 500
|
response.status = 500
|
||||||
return
|
return
|
||||||
if 'ipaddress' in requestDict.keys() and isIPv4(requestDict['ipaddress']):
|
if 'ipaddress' in requestDict.keys() and isIPv4(requestDict['ipaddress']):
|
||||||
|
|
|
@ -55,7 +55,7 @@ class message(object):
|
||||||
request.body.close()
|
request.body.close()
|
||||||
try:
|
try:
|
||||||
requestDict = json.loads(arequest)
|
requestDict = json.loads(arequest)
|
||||||
except ValueError as e:
|
except ValueError:
|
||||||
response.status = 500
|
response.status = 500
|
||||||
|
|
||||||
print(requestDict, requestDict.keys())
|
print(requestDict, requestDict.keys())
|
||||||
|
|
|
@ -6,8 +6,6 @@
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import ConfigParser
|
import ConfigParser
|
||||||
from datetime import datetime, timedelta
|
|
||||||
import json
|
|
||||||
import netaddr
|
import netaddr
|
||||||
from boto3.session import Session
|
from boto3.session import Session
|
||||||
|
|
||||||
|
@ -160,7 +158,6 @@ class message(object):
|
||||||
'''
|
'''
|
||||||
# format/validate request.json:
|
# format/validate request.json:
|
||||||
ipaddress = None
|
ipaddress = None
|
||||||
CIDR = None
|
|
||||||
sendToBHVPC = False
|
sendToBHVPC = False
|
||||||
|
|
||||||
# loop through the fields of the form
|
# loop through the fields of the form
|
||||||
|
@ -179,7 +176,7 @@ class message(object):
|
||||||
sendToBHVPC = False
|
sendToBHVPC = False
|
||||||
|
|
||||||
if sendToBHVPC and ipaddress is not None:
|
if sendToBHVPC and ipaddress is not None:
|
||||||
#figure out the CIDR mask
|
# figure out the CIDR mask
|
||||||
if isIPv4(ipaddress) or isIPv6(ipaddress):
|
if isIPv4(ipaddress) or isIPv6(ipaddress):
|
||||||
ipcidr=netaddr.IPNetwork(ipaddress)
|
ipcidr=netaddr.IPNetwork(ipaddress)
|
||||||
if not ipcidr.ip.is_loopback() \
|
if not ipcidr.ip.is_loopback() \
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
from positive_test_suite import PositiveTestSuite
|
from positive_test_suite import PositiveTestSuite
|
||||||
from negative_test_suite import NegativeTestSuite
|
from negative_test_suite import NegativeTestSuite
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from mozdef_util.query_models import ExistsMatch
|
from mozdef_util.query_models import ExistsMatch
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
from positive_test_suite import PositiveTestSuite
|
from positive_test_suite import PositiveTestSuite
|
||||||
from negative_test_suite import NegativeTestSuite
|
from negative_test_suite import NegativeTestSuite
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from mozdef_util.query_models import LessThanMatch
|
from mozdef_util.query_models import LessThanMatch
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
from positive_test_suite import PositiveTestSuite
|
from positive_test_suite import PositiveTestSuite
|
||||||
from negative_test_suite import NegativeTestSuite
|
from negative_test_suite import NegativeTestSuite
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from mozdef_util.query_models import PhraseMatch
|
from mozdef_util.query_models import PhraseMatch
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
from positive_test_suite import PositiveTestSuite
|
from positive_test_suite import PositiveTestSuite
|
||||||
from negative_test_suite import NegativeTestSuite
|
from negative_test_suite import NegativeTestSuite
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from mozdef_util.query_models import QueryStringMatch
|
from mozdef_util.query_models import QueryStringMatch
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
from positive_test_suite import PositiveTestSuite
|
from positive_test_suite import PositiveTestSuite
|
||||||
from negative_test_suite import NegativeTestSuite
|
from negative_test_suite import NegativeTestSuite
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from mozdef_util.query_models import RangeMatch
|
from mozdef_util.query_models import RangeMatch
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
from positive_test_suite import PositiveTestSuite
|
from positive_test_suite import PositiveTestSuite
|
||||||
from negative_test_suite import NegativeTestSuite
|
from negative_test_suite import NegativeTestSuite
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from mozdef_util.query_models import TermMatch
|
from mozdef_util.query_models import TermMatch
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
from positive_test_suite import PositiveTestSuite
|
from positive_test_suite import PositiveTestSuite
|
||||||
from negative_test_suite import NegativeTestSuite
|
from negative_test_suite import NegativeTestSuite
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from mozdef_util.query_models import TermsMatch
|
from mozdef_util.query_models import TermsMatch
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
from positive_test_suite import PositiveTestSuite
|
from positive_test_suite import PositiveTestSuite
|
||||||
from negative_test_suite import NegativeTestSuite
|
from negative_test_suite import NegativeTestSuite
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from mozdef_util.query_models import WildcardMatch
|
from mozdef_util.query_models import WildcardMatch
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,6 @@ import time
|
||||||
import json
|
import json
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from elasticsearch.exceptions import ConnectionTimeout
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
@ -358,7 +357,7 @@ class TestWriteWithIDExists(ElasticsearchClientTest):
|
||||||
saved_event = self.es_client.save_event(body=event, doc_id=event_id)
|
saved_event = self.es_client.save_event(body=event, doc_id=event_id)
|
||||||
assert saved_event['_id'] == event_id
|
assert saved_event['_id'] == event_id
|
||||||
self.flush(self.event_index_name)
|
self.flush(self.event_index_name)
|
||||||
fetched_event = self.es_client.get_event_by_id(event_id)
|
self.es_client.get_event_by_id(event_id)
|
||||||
|
|
||||||
|
|
||||||
class TestGetIndices(ElasticsearchClientTest):
|
class TestGetIndices(ElasticsearchClientTest):
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from mozdef_util.event import Event
|
from mozdef_util.event import Event
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
from mozdef_util.utilities.toUTC import toUTC
|
||||||
import socket
|
import socket
|
||||||
|
@ -60,7 +58,6 @@ class TestEvent(object):
|
||||||
'utctimestamp': '2017-09-14T20:05:20.299387+00:00',
|
'utctimestamp': '2017-09-14T20:05:20.299387+00:00',
|
||||||
'timestamp': '2017-09-14T20:05:19.116195+00:00',
|
'timestamp': '2017-09-14T20:05:19.116195+00:00',
|
||||||
'mozdefhostname': 'randomhostname',
|
'mozdefhostname': 'randomhostname',
|
||||||
'tags': [],
|
|
||||||
'category': 'Authentication',
|
'category': 'Authentication',
|
||||||
'hostname': 'host.domain.com',
|
'hostname': 'host.domain.com',
|
||||||
'processid': 12345,
|
'processid': 12345,
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
from mozdef_util.plugin_set import PluginSet
|
from mozdef_util.plugin_set import PluginSet
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
from mozdef_util.state import State, StateParsingError
|
from mozdef_util.state import State, StateParsingError
|
||||||
|
|
||||||
states_directory_location = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'states')
|
states_directory_location = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'states')
|
||||||
|
|
|
@ -5,9 +5,6 @@
|
||||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
# Copyright (c) 2017 Mozilla Corporation
|
# Copyright (c) 2017 Mozilla Corporation
|
||||||
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from mozdef_util.utilities.key_exists import key_exists
|
from mozdef_util.utilities.key_exists import key_exists
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -22,12 +22,9 @@ def utc_timezone():
|
||||||
|
|
||||||
tzlocal.get_localzone = utc_timezone
|
tzlocal.get_localzone = utc_timezone
|
||||||
|
|
||||||
import sys
|
|
||||||
if 'mozdef_util.utilities.toUTC' in sys.modules:
|
if 'mozdef_util.utilities.toUTC' in sys.modules:
|
||||||
reload(sys.modules['mozdef_util.utilities.toUTC'])
|
reload(sys.modules['mozdef_util.utilities.toUTC'])
|
||||||
|
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
|
||||||
|
|
||||||
|
|
||||||
class TestToUTC():
|
class TestToUTC():
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,7 @@
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import mock
|
|
||||||
import json
|
|
||||||
import random
|
|
||||||
import string
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../mq/plugins'))
|
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../mq/plugins'))
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
|
||||||
from github_webhooks import message
|
from github_webhooks import message
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,6 @@ import os
|
||||||
import sys
|
import sys
|
||||||
from mozdef_util.utilities.toUTC import toUTC
|
from mozdef_util.utilities.toUTC import toUTC
|
||||||
|
|
||||||
import mock
|
|
||||||
import json
|
import json
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
freezegun==0.3.9
|
freezegun==0.3.9
|
||||||
flake8==3.5.0
|
flake8==3.5.0
|
||||||
|
flake8-per-file-ignores==0.6
|
||||||
mock==2.0.0
|
mock==2.0.0
|
||||||
pytest==3.1.1
|
pytest==3.1.1
|
||||||
WebTest==2.0.27
|
WebTest==2.0.27
|
||||||
|
|
|
@ -9,11 +9,9 @@ from configlib import getConfig
|
||||||
|
|
||||||
from kombu import Connection, Queue, Exchange
|
from kombu import Connection, Queue, Exchange
|
||||||
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
|
||||||
|
|
||||||
|
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||||
from mozdef_util.utilities.dot_dict import DotDict
|
from mozdef_util.utilities.dot_dict import DotDict
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -11,9 +11,6 @@ from dateutil.parser import parse
|
||||||
import random
|
import random
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from mozdef_util.utilities import toUTC
|
from mozdef_util.utilities import toUTC
|
||||||
|
|
||||||
from suite_helper import parse_config_file, parse_mapping_file, setup_es_client, setup_rabbitmq_client
|
from suite_helper import parse_config_file, parse_mapping_file, setup_es_client, setup_rabbitmq_client
|
||||||
|
|
Загрузка…
Ссылка в новой задаче