зеркало из https://github.com/mozilla/MozDef.git
Merge pull request #998 from mozilla/enable_library_unused_pep8_check
Enable library unused pep8 check
This commit is contained in:
Коммит
8af926d9ef
26
.flake8
26
.flake8
|
@ -1,21 +1,19 @@
|
|||
[flake8]
|
||||
exclude =
|
||||
.flake8
|
||||
.git
|
||||
*__init__.py
|
||||
per-file-ignores =
|
||||
# Ignore 'library imported but unused' for only the alert config files
|
||||
# since we stub timedelta and crontab
|
||||
alerts/lib/config.py: F401
|
||||
docker/compose/mozdef_alerts/files/config.py: F401
|
||||
|
||||
# Ignore any import statements in __init__ files
|
||||
mozdef_util/mozdef_util/query_models/__init__.py: F401
|
||||
|
||||
# Ignore redefinition of index name
|
||||
rest/index.py: F811
|
||||
|
||||
ignore =
|
||||
E123 # closing bracket does not match indentation of opening bracket's line
|
||||
E225 # missing whitespace around operator
|
||||
E226 # missing whitespace around arithmetic operator
|
||||
E228 # missing whitespace around modulo operator
|
||||
E231 # missing whitespace after ','
|
||||
E265 # block comment should start with '# '
|
||||
E402 # module level import not at top of file
|
||||
E501 # line too long
|
||||
E722 # do not use bare except'
|
||||
F401 # library imported but unused
|
||||
F601 # dictionary key 'tags' repeated with different values
|
||||
F811 # redefinition of unused 'datetime' from line 10
|
||||
F821 # undefined name 'SysLogHandler'
|
||||
F841 # local variable 'CIDR' is assigned to but never used
|
||||
W503 # line break before binary operator
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
# Copyright (c) 2017 Mozilla Corporation
|
||||
|
||||
from lib.alerttask import AlertTask
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch, ExistsMatch, PhraseMatch
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch, PhraseMatch
|
||||
|
||||
|
||||
class AlertBugzillaPBruteforce(AlertTask):
|
||||
|
|
|
@ -40,9 +40,9 @@ class AlertAccountCreations(AlertTask):
|
|||
severity = 'INFO'
|
||||
|
||||
summary = ('{0} fxa account creation attempts by {1}'.format(aggreg['count'], aggreg['value']))
|
||||
emails = self.mostCommon(aggreg['allevents'],'_source.details.email')
|
||||
#did they try to create more than one email account?
|
||||
#or just retry an existing one
|
||||
emails = self.mostCommon(aggreg['allevents'], '_source.details.email')
|
||||
# did they try to create more than one email account?
|
||||
# or just retry an existing one
|
||||
if len(emails) > 1:
|
||||
for i in emails[:5]:
|
||||
summary += ' {0} ({1} hits)'.format(i[0], i[1])
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
|
||||
from lib.alerttask import AlertTask
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch, QueryStringMatch, ExistsMatch, PhraseMatch, WildcardMatch
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch, ExistsMatch
|
||||
|
||||
|
||||
class AlertGuardDutyProbe(AlertTask):
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
# Copyright (c) 2017 Mozilla Corporation
|
||||
|
||||
from lib.alerttask import AlertTask
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch, ExistsMatch, PhraseMatch
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch, PhraseMatch
|
||||
|
||||
|
||||
class AlertHTTPBruteforce(AlertTask):
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
# Copyright (c) 2017 Mozilla Corporation
|
||||
|
||||
from lib.alerttask import AlertTask
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch, ExistsMatch, PhraseMatch
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch, PhraseMatch
|
||||
|
||||
|
||||
class AlertHTTPErrors(AlertTask):
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
from mozdef_util.plugin_set import PluginSet
|
||||
from mozdef_util.utilities.logger import logger
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
# Copyright (c) 2018 Mozilla Corporation
|
||||
|
||||
from lib.alerttask import AlertTask, add_hostname_to_ip
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch, QueryStringMatch, PhraseMatch, ExistsMatch
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch, QueryStringMatch, PhraseMatch
|
||||
|
||||
|
||||
class NSMScanPort(AlertTask):
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
|
||||
import hjson
|
||||
import os
|
||||
import sys
|
||||
from binascii import b2a_hex
|
||||
import boto3
|
||||
import datetime
|
||||
|
|
|
@ -65,7 +65,7 @@ class message(object):
|
|||
}
|
||||
]
|
||||
})
|
||||
r = requests.post(
|
||||
requests.post(
|
||||
'https://events.pagerduty.com/generic/2010-04-15/create_event.json',
|
||||
headers=headers,
|
||||
data=payload,
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
|
||||
from urlparse import urlparse
|
||||
from lib.alerttask import AlertTask
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch, QueryStringMatch, ExistsMatch, PhraseMatch, WildcardMatch
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch, QueryStringMatch
|
||||
|
||||
|
||||
class AlertProxyExfilDomains(AlertTask):
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
# Copyright (c) 2017 Mozilla Corporation
|
||||
|
||||
from lib.alerttask import AlertTask
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch, ExistsMatch, PhraseMatch
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch, PhraseMatch
|
||||
|
||||
|
||||
class AlertSSHManyConns(AlertTask):
|
||||
|
|
|
@ -56,7 +56,7 @@ class SSHKey(AlertTask):
|
|||
self._whitelist.append({
|
||||
'hostre': entry[:pindex],
|
||||
'path': entry[pindex + 1:]
|
||||
})
|
||||
})
|
||||
|
||||
# Return false if the key path is present in the whitelist, otherwise return
|
||||
# true
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
# This code alerts on every successfully opened session on any of the host from a given list
|
||||
|
||||
from lib.alerttask import AlertTask
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch, QueryStringMatch, PhraseMatch
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch, PhraseMatch
|
||||
|
||||
|
||||
class TraceAudit(AlertTask):
|
||||
|
@ -33,9 +33,9 @@ class TraceAudit(AlertTask):
|
|||
tags = ['audit']
|
||||
|
||||
summary = ('{0} instances of Strace or Ptrace executed on a system by {1}'.format(aggreg['count'], aggreg['value'], ))
|
||||
hostnames = self.mostCommon(aggreg['allevents'],'_source.hostname')
|
||||
#did they modify more than one host?
|
||||
#or just modify an existing configuration more than once?
|
||||
hostnames = self.mostCommon(aggreg['allevents'], '_source.hostname')
|
||||
# did they modify more than one host?
|
||||
# or just modify an existing configuration more than once?
|
||||
if len(hostnames) > 1:
|
||||
for i in hostnames[:5]:
|
||||
summary += ' on {0} ({1} hosts)'.format(i[0], i[1])
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
# This code alerts on every successfully opened session on any of the host from a given list
|
||||
|
||||
from lib.alerttask import AlertTask
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch, QueryStringMatch, PhraseMatch
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch, PhraseMatch
|
||||
|
||||
|
||||
class WriteAudit(AlertTask):
|
||||
|
@ -34,9 +34,9 @@ class WriteAudit(AlertTask):
|
|||
tags = ['audit']
|
||||
|
||||
summary = ('{0} Filesystem write(s) to an auditd path by {1}'.format(aggreg['count'], aggreg['value'], ))
|
||||
hostnames = self.mostCommon(aggreg['allevents'],'_source.hostname')
|
||||
#did they modify more than one host?
|
||||
#or just modify an existing configuration more than once?
|
||||
hostnames = self.mostCommon(aggreg['allevents'], '_source.hostname')
|
||||
# did they modify more than one host?
|
||||
# or just modify an existing configuration more than once?
|
||||
if len(hostnames) > 1:
|
||||
for i in hostnames[:5]:
|
||||
summary += ' on {0} ({1} hosts)'.format(i[0], i[1])
|
||||
|
|
|
@ -11,22 +11,18 @@ from datetime import datetime
|
|||
import pytz
|
||||
import json
|
||||
import socket
|
||||
import json
|
||||
from optparse import OptionParser
|
||||
from requests_futures.sessions import FuturesSession
|
||||
from multiprocessing import Process, Queue
|
||||
import random
|
||||
import logging
|
||||
from logging.handlers import SysLogHandler
|
||||
from Queue import Empty
|
||||
from requests.packages.urllib3.exceptions import ClosedPoolError
|
||||
import requests
|
||||
import time
|
||||
|
||||
httpsession = FuturesSession(max_workers=5)
|
||||
httpsession.trust_env=False # turns of needless .netrc check for creds
|
||||
#a = requests.adapters.HTTPAdapter(max_retries=2)
|
||||
#httpsession.mount('http://', a)
|
||||
# a = requests.adapters.HTTPAdapter(max_retries=2)
|
||||
# httpsession.mount('http://', a)
|
||||
|
||||
|
||||
logger = logging.getLogger(sys.argv[0])
|
||||
|
@ -36,13 +32,13 @@ formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(messag
|
|||
|
||||
|
||||
def postLogs(logcache):
|
||||
#post logs asynchronously with requests workers and check on the results
|
||||
#expects a queue object from the multiprocessing library
|
||||
# post logs asynchronously with requests workers and check on the results
|
||||
# expects a queue object from the multiprocessing library
|
||||
posts=[]
|
||||
try:
|
||||
while not logcache.empty():
|
||||
postdata=logcache.get_nowait()
|
||||
if len(postdata)>0:
|
||||
if len(postdata) > 0:
|
||||
url=options.url
|
||||
a=httpsession.get_adapter(url)
|
||||
a.max_retries=3
|
||||
|
@ -52,15 +48,15 @@ def postLogs(logcache):
|
|||
pass
|
||||
for p,postdata,url in posts:
|
||||
try:
|
||||
if p.result().status_code >=500:
|
||||
logger.error("exception posting to %s %r [will retry]\n"%(url,p.result().status_code))
|
||||
#try again later when the next message in forces other attempts at posting.
|
||||
if p.result().status_code >= 500:
|
||||
logger.error("exception posting to %s %r [will retry]\n" % (url, p.result().status_code))
|
||||
# try again later when the next message in forces other attempts at posting.
|
||||
logcache.put(postdata)
|
||||
except ClosedPoolError as e:
|
||||
#logger.fatal("Closed Pool Error exception posting to %s %r %r [will retry]\n"%(url,e,postdata))
|
||||
# logger.fatal("Closed Pool Error exception posting to %s %r %r [will retry]\n"%(url,e,postdata))
|
||||
logcache.put(postdata)
|
||||
except Exception as e:
|
||||
logger.fatal("exception posting to %s %r %r [will not retry]\n"%(url,e,postdata))
|
||||
logger.fatal("exception posting to %s %r %r [will not retry]\n" % (url, e, postdata))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
|
@ -71,7 +67,7 @@ if __name__ == '__main__':
|
|||
sh=logging.StreamHandler(sys.stdout)
|
||||
sh.setFormatter(formatter)
|
||||
logger.addHandler(sh)
|
||||
#create a list of logs we can append json to and call for a post when we want.
|
||||
# create a list of logs we can append json to and call for a post when we want.
|
||||
logcache=Queue()
|
||||
try:
|
||||
for i in range(0,10):
|
||||
|
@ -98,21 +94,21 @@ if __name__ == '__main__':
|
|||
postingProcess=Process(target=postLogs,args=(logcache,),name="json2MozdefStressTest")
|
||||
postingProcess.start()
|
||||
except OSError as e:
|
||||
if e.errno==35: # resource temporarily unavailable.
|
||||
if e.errno == 35: # resource temporarily unavailable.
|
||||
print(e)
|
||||
pass
|
||||
else:
|
||||
logger.error('%r'%e)
|
||||
logger.error('%r' % e)
|
||||
|
||||
while not logcache.empty():
|
||||
try:
|
||||
postingProcess=Process(target=postLogs,args=(logcache,),name="json2MozdefStressTest")
|
||||
postingProcess.start()
|
||||
except OSError as e:
|
||||
if e.errno==35: # resource temporarily unavailable.
|
||||
if e.errno == 35: # resource temporarily unavailable.
|
||||
print(e)
|
||||
pass
|
||||
else:
|
||||
logger.error('%r'%e)
|
||||
logger.error('%r' % e)
|
||||
except KeyboardInterrupt as e:
|
||||
sys.exit(1)
|
||||
|
|
|
@ -10,7 +10,6 @@
|
|||
|
||||
import logging
|
||||
import random
|
||||
from kitnirc.client import Channel
|
||||
from kitnirc.modular import Module
|
||||
from kitnirc.user import User
|
||||
|
||||
|
|
|
@ -7,9 +7,7 @@
|
|||
# Copyright (c) 2014 Mozilla Corporation
|
||||
|
||||
import logging
|
||||
from kitnirc.client import Channel
|
||||
from kitnirc.modular import Module
|
||||
from kitnirc.user import User
|
||||
import threading
|
||||
import time
|
||||
import json
|
||||
|
@ -41,7 +39,7 @@ class Zilla(Module):
|
|||
self.interval = 9999999
|
||||
self.channel = '#test'
|
||||
|
||||
self._bugzilla = bugzilla.Bugzilla(url=self.url+'rest/', api_key=self.api_key)
|
||||
self._bugzilla = bugzilla.Bugzilla(url=self.url + 'rest/', api_key=self.api_key)
|
||||
|
||||
_log.info("zilla module initialized for {}, pooling every {} seconds.".format(self.url, self.interval))
|
||||
|
||||
|
@ -49,8 +47,8 @@ class Zilla(Module):
|
|||
last = 0
|
||||
while not self._stop:
|
||||
now = time.time()
|
||||
if ((now-last) > self.interval):
|
||||
#Add all the actions you want to do with bugzilla here ;)
|
||||
if ((now - last) > self.interval):
|
||||
# Add all the actions you want to do with bugzilla here ;)
|
||||
self.bugzilla_search()
|
||||
last = now
|
||||
time.sleep(1)
|
||||
|
|
|
@ -8,25 +8,18 @@
|
|||
import json
|
||||
import kitnirc.client
|
||||
import kitnirc.modular
|
||||
import kombu
|
||||
import logging
|
||||
import netaddr
|
||||
import os
|
||||
import pytz
|
||||
import random
|
||||
import select
|
||||
import sys
|
||||
import time
|
||||
import threading
|
||||
from configlib import getConfig, OptionParser
|
||||
from datetime import datetime
|
||||
from dateutil.parser import parse
|
||||
from kombu import Connection, Queue, Exchange
|
||||
from kombu.mixins import ConsumerMixin
|
||||
from ipwhois import IPWhois
|
||||
|
||||
import sys
|
||||
import os
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
from mozdef_util.geo_ip import GeoIP
|
||||
|
||||
|
@ -134,7 +127,7 @@ def ipLocation(ip):
|
|||
if geoDict['country_code'] in ('US'):
|
||||
if geoDict['metro_code']:
|
||||
location = location + '/{0}'.format(geoDict['metro_code'])
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
location = ""
|
||||
return location
|
||||
|
||||
|
@ -151,10 +144,11 @@ def formatAlert(jsonDictIn):
|
|||
if 'category' in jsonDictIn.keys():
|
||||
category = jsonDictIn['category']
|
||||
|
||||
return colorify('{0}: {1} {2}'.format(severity, colors['blue']
|
||||
+ category
|
||||
+ colors['normal'],
|
||||
summary.encode('ascii', 'replace')))
|
||||
return colorify('{0}: {1} {2}'.format(
|
||||
severity,
|
||||
colors['blue'] + category + colors['normal'],
|
||||
summary.encode('ascii', 'replace')
|
||||
))
|
||||
|
||||
|
||||
class mozdefBot():
|
||||
|
@ -197,15 +191,6 @@ class mozdefBot():
|
|||
# start the mq consumer
|
||||
consumeAlerts(self)
|
||||
|
||||
@self.client.handle('LINE')
|
||||
def line_handler(client, *params):
|
||||
try:
|
||||
self.root_logger.debug('linegot:' + line)
|
||||
except AttributeError as e:
|
||||
# catch error in kitnrc : chan.remove(actor) where channel
|
||||
# object has no attribute remove
|
||||
pass
|
||||
|
||||
@self.client.handle('PRIVMSG')
|
||||
def priv_handler(client, actor, recipient, message):
|
||||
self.root_logger.debug(
|
||||
|
|
|
@ -20,8 +20,6 @@ from kombu.mixins import ConsumerMixin
|
|||
|
||||
from slackclient import SlackClient
|
||||
|
||||
import sys
|
||||
import os
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
|
||||
|
||||
|
|
|
@ -50,8 +50,8 @@ def main():
|
|||
aws_access_key_id=options.aws_access_key_id,
|
||||
aws_secret_access_key=options.aws_secret_access_key
|
||||
)
|
||||
idate = date.strftime(datetime.utcnow()-timedelta(days=1),'%Y%m%d')
|
||||
bucketdate = date.strftime(datetime.utcnow()-timedelta(days=1),'%Y-%m')
|
||||
idate = date.strftime(datetime.utcnow() - timedelta(days=1), '%Y%m%d')
|
||||
bucketdate = date.strftime(datetime.utcnow() - timedelta(days=1), '%Y-%m')
|
||||
hostname = socket.gethostname()
|
||||
|
||||
# Create or update snapshot configuration
|
||||
|
@ -120,7 +120,7 @@ echo "DONE!"
|
|||
except boto.exception.NoAuthHandlerFound:
|
||||
logger.error("No auth handler found, check your credentials")
|
||||
except Exception as e:
|
||||
logger.error("Unhandled exception, terminating: %r"%e)
|
||||
logger.error("Unhandled exception, terminating: %r" % e)
|
||||
|
||||
|
||||
def initConfig():
|
||||
|
@ -129,65 +129,65 @@ def initConfig():
|
|||
'output',
|
||||
'stdout',
|
||||
options.configfile
|
||||
)
|
||||
)
|
||||
# syslog hostname
|
||||
options.sysloghostname = getConfig(
|
||||
'sysloghostname',
|
||||
'localhost',
|
||||
options.configfile
|
||||
)
|
||||
)
|
||||
options.syslogport = getConfig(
|
||||
'syslogport',
|
||||
514,
|
||||
options.configfile
|
||||
)
|
||||
)
|
||||
options.esservers = list(getConfig(
|
||||
'esservers',
|
||||
'http://localhost:9200',
|
||||
options.configfile).split(',')
|
||||
)
|
||||
)
|
||||
options.indices = list(getConfig(
|
||||
'backup_indices',
|
||||
'events,alerts,.kibana',
|
||||
options.configfile).split(',')
|
||||
)
|
||||
)
|
||||
options.dobackup = list(getConfig(
|
||||
'backup_dobackup',
|
||||
'1,1,1',
|
||||
options.configfile).split(',')
|
||||
)
|
||||
)
|
||||
options.rotation = list(getConfig(
|
||||
'backup_rotation',
|
||||
'daily,monthly,none',
|
||||
options.configfile).split(',')
|
||||
)
|
||||
)
|
||||
options.pruning = list(getConfig(
|
||||
'backup_pruning',
|
||||
'20,0,0',
|
||||
options.configfile).split(',')
|
||||
)
|
||||
)
|
||||
# aws credentials to use to send files to s3
|
||||
options.aws_access_key_id = getConfig(
|
||||
'aws_access_key_id',
|
||||
'',
|
||||
options.configfile
|
||||
)
|
||||
)
|
||||
options.aws_secret_access_key = getConfig(
|
||||
'aws_secret_access_key',
|
||||
'',
|
||||
options.configfile
|
||||
)
|
||||
)
|
||||
options.aws_region = getConfig(
|
||||
'aws_region',
|
||||
'us-west-1',
|
||||
options.configfile
|
||||
)
|
||||
)
|
||||
|
||||
options.aws_bucket = getConfig(
|
||||
'aws_bucket',
|
||||
'',
|
||||
options.configfile
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -19,8 +19,6 @@ from pymongo import MongoClient
|
|||
from collections import Counter
|
||||
from kombu import Connection, Exchange
|
||||
|
||||
import sys
|
||||
import os
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||
from mozdef_util.query_models import SearchQuery, PhraseMatch
|
||||
|
@ -260,10 +258,10 @@ def searchMongoAlerts(mozdefdb):
|
|||
# summarize the alert categories
|
||||
# returns list of tuples: [(u'bruteforce', 8)]
|
||||
categoryCounts= mostCommon(matchingalerts,'category')
|
||||
#are the alerts all the same category?
|
||||
# are the alerts all the same category?
|
||||
|
||||
if len(categoryCounts) == 1:
|
||||
#is the alert category mapped to an attacker category?
|
||||
# is the alert category mapped to an attacker category?
|
||||
for category in options.categorymapping:
|
||||
if category.keys()[0] == categoryCounts[0][0]:
|
||||
attacker['category'] = category[category.keys()[0]]
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
@ -15,8 +14,6 @@ from configlib import getConfig, OptionParser
|
|||
from logging.handlers import SysLogHandler
|
||||
from hashlib import md5
|
||||
|
||||
import sys
|
||||
import os
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
from mozdef_util.elasticsearch_client import ElasticsearchClient, ElasticsearchBadServer
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch, PhraseMatch
|
||||
|
@ -63,7 +60,7 @@ def readOUIFile(ouifilename):
|
|||
for i in ouifile.readlines()[0::]:
|
||||
i=i.strip()
|
||||
if '(hex)' in i:
|
||||
#print(i)
|
||||
# print(i)
|
||||
fields=i.split('\t')
|
||||
macprefix=fields[0][0:8].replace('-',':').lower()
|
||||
entity=fields[2]
|
||||
|
|
|
@ -17,7 +17,6 @@ from configlib import getConfig, OptionParser
|
|||
from logging.handlers import SysLogHandler
|
||||
from pymongo import MongoClient
|
||||
|
||||
import os
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
|
||||
|
||||
|
@ -62,7 +61,7 @@ def parse_fqdn_whitelist(fqdn_whitelist_location):
|
|||
fqdns = []
|
||||
with open(fqdn_whitelist_location, "r") as text_file:
|
||||
for line in text_file:
|
||||
line=line.strip().strip("'").strip('"')
|
||||
line = line.strip().strip("'").strip('"')
|
||||
if isFQDN(line):
|
||||
fqdns.append(line)
|
||||
return fqdns
|
||||
|
@ -77,10 +76,10 @@ def main():
|
|||
mozdefdb = client.meteor
|
||||
fqdnblocklist = mozdefdb['fqdnblocklist']
|
||||
# ensure indexes
|
||||
fqdnblocklist.create_index([('dateExpiring',-1)])
|
||||
fqdnblocklist.create_index([('dateExpiring', -1)])
|
||||
|
||||
# delete any that expired
|
||||
fqdnblocklist.delete_many({'dateExpiring': {"$lte": datetime.utcnow()-timedelta(days=options.expireage)}})
|
||||
fqdnblocklist.delete_many({'dateExpiring': {"$lte": datetime.utcnow() - timedelta(days=options.expireage)}})
|
||||
|
||||
# Lastly, export the combined blocklist
|
||||
fqdnCursor = mozdefdb['fqdnblocklist'].aggregate([
|
||||
|
@ -95,7 +94,7 @@ def main():
|
|||
{"$project": {"address": 1}},
|
||||
{"$limit": options.fqdnlimit}
|
||||
])
|
||||
FQDNList=[]
|
||||
FQDNList = []
|
||||
for fqdn in fqdnCursor:
|
||||
if fqdn not in options.fqdnwhitelist:
|
||||
FQDNList.append(fqdn['address'])
|
||||
|
@ -105,7 +104,7 @@ def main():
|
|||
outputfile.write("{0}\n".format(fqdn))
|
||||
outputfile.close()
|
||||
# to s3?
|
||||
if len(options.aws_bucket_name)>0:
|
||||
if len(options.aws_bucket_name) > 0:
|
||||
s3_upload_file(options.outputfile, options.aws_bucket_name, options.aws_document_key_name)
|
||||
|
||||
except ValueError as e:
|
||||
|
@ -134,26 +133,26 @@ def initConfig():
|
|||
options.outputfile = getConfig('outputfile', 'fqdnblocklist.txt', options.configfile)
|
||||
|
||||
# Days after expiration that we purge an fqdnblocklist entry (from the ui, they don't end up in the export after expiring)
|
||||
options.expireage = getConfig('expireage',1,options.configfile)
|
||||
options.expireage = getConfig('expireage', 1, options.configfile)
|
||||
|
||||
# Max FQDNs to emit
|
||||
options.fqdnlimit = getConfig('fqdnlimit', 1000, options.configfile)
|
||||
|
||||
# AWS creds
|
||||
options.aws_access_key_id=getConfig('aws_access_key_id','',options.configfile) # aws credentials to use to connect to mozilla_infosec_blocklist
|
||||
options.aws_secret_access_key=getConfig('aws_secret_access_key','',options.configfile)
|
||||
options.aws_bucket_name=getConfig('aws_bucket_name','',options.configfile)
|
||||
options.aws_document_key_name=getConfig('aws_document_key_name','',options.configfile)
|
||||
options.aws_access_key_id = getConfig('aws_access_key_id', '', options.configfile) # aws credentials to use to connect to mozilla_infosec_blocklist
|
||||
options.aws_secret_access_key = getConfig('aws_secret_access_key', '', options.configfile)
|
||||
options.aws_bucket_name = getConfig('aws_bucket_name', '', options.configfile)
|
||||
options.aws_document_key_name = getConfig('aws_document_key_name', '', options.configfile)
|
||||
|
||||
|
||||
def s3_upload_file(file_path, bucket_name, key_name):
|
||||
"""
|
||||
Upload a file to the given s3 bucket and return a template url.
|
||||
"""
|
||||
conn = boto.connect_s3(aws_access_key_id=options.aws_access_key_id,aws_secret_access_key=options.aws_secret_access_key)
|
||||
conn = boto.connect_s3(aws_access_key_id=options.aws_access_key_id, aws_secret_access_key=options.aws_secret_access_key)
|
||||
try:
|
||||
bucket = conn.get_bucket(bucket_name, validate=False)
|
||||
except boto.exception.S3ResponseError as e:
|
||||
except boto.exception.S3ResponseError:
|
||||
conn.create_bucket(bucket_name)
|
||||
bucket = conn.get_bucket(bucket_name, validate=False)
|
||||
|
||||
|
|
|
@ -17,7 +17,6 @@ from configlib import getConfig, OptionParser
|
|||
from logging.handlers import SysLogHandler
|
||||
from pymongo import MongoClient
|
||||
|
||||
import os
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
|
||||
|
||||
|
@ -52,7 +51,7 @@ def isIPv4(ip):
|
|||
# netaddr on it's own considers 1 and 0 to be valid_ipv4
|
||||
# so a little sanity check prior to netaddr.
|
||||
# Use IPNetwork instead of valid_ipv4 to allow CIDR
|
||||
if '.' in ip and len(ip.split('.'))==4:
|
||||
if '.' in ip and len(ip.split('.')) == 4:
|
||||
# some ips are quoted
|
||||
netaddr.IPNetwork(ip.strip("'").strip('"'))
|
||||
return True
|
||||
|
@ -89,7 +88,7 @@ def aggregateAttackerIPs(attackers):
|
|||
whitelisted = False
|
||||
logger.debug('working {0}'.format(i))
|
||||
ip = i['_id']['ipv4address']
|
||||
ipcidr=netaddr.IPNetwork(ip)
|
||||
ipcidr = netaddr.IPNetwork(ip)
|
||||
if not ipcidr.ip.is_loopback() and not ipcidr.ip.is_private() and not ipcidr.ip.is_reserved():
|
||||
for whitelist_range in options.ipwhitelist:
|
||||
whitelist_network = netaddr.IPNetwork(whitelist_range)
|
||||
|
@ -97,8 +96,8 @@ def aggregateAttackerIPs(attackers):
|
|||
logger.debug(str(ipcidr) + " is whitelisted as part of " + str(whitelist_network))
|
||||
whitelisted = True
|
||||
|
||||
#strip any host bits 192.168.10/24 -> 192.168.0/24
|
||||
ipcidrnet=str(ipcidr.cidr)
|
||||
# strip any host bits 192.168.10/24 -> 192.168.0/24
|
||||
ipcidrnet = str(ipcidr.cidr)
|
||||
if ipcidrnet not in iplist and not whitelisted:
|
||||
iplist.append(ipcidrnet)
|
||||
else:
|
||||
|
@ -110,7 +109,7 @@ def parse_network_whitelist(network_whitelist_location):
|
|||
networks = []
|
||||
with open(network_whitelist_location, "r") as text_file:
|
||||
for line in text_file:
|
||||
line=line.strip().strip("'").strip('"')
|
||||
line = line.strip().strip("'").strip('"')
|
||||
if isIPv4(line) or isIPv6(line):
|
||||
networks.append(line)
|
||||
return networks
|
||||
|
@ -124,29 +123,29 @@ def main():
|
|||
client = MongoClient(options.mongohost, options.mongoport)
|
||||
mozdefdb = client.meteor
|
||||
ipblocklist = mozdefdb['ipblocklist']
|
||||
attackers=mozdefdb['attackers']
|
||||
attackers = mozdefdb['attackers']
|
||||
# ensure indexes
|
||||
ipblocklist.create_index([('dateExpiring',-1)])
|
||||
attackers.create_index([('lastseentimestamp',-1)])
|
||||
attackers.create_index([('category',1)])
|
||||
ipblocklist.create_index([('dateExpiring', -1)])
|
||||
attackers.create_index([('lastseentimestamp', -1)])
|
||||
attackers.create_index([('category', 1)])
|
||||
|
||||
# First, gather IP addresses from recent attackers and add to the block list
|
||||
attackerIPList = aggregateAttackerIPs(attackers)
|
||||
|
||||
# add attacker IPs to the blocklist
|
||||
# first delete ones we've created from an attacker
|
||||
ipblocklist.delete_many({'creator': 'mozdef','reference':'attacker'})
|
||||
ipblocklist.delete_many({'creator': 'mozdef', 'reference': 'attacker'})
|
||||
|
||||
# delete any that expired
|
||||
ipblocklist.delete_many({'dateExpiring': {"$lte": datetime.utcnow()-timedelta(days=options.expireage)}})
|
||||
ipblocklist.delete_many({'dateExpiring': {"$lte": datetime.utcnow() - timedelta(days=options.expireage)}})
|
||||
|
||||
# add the aggregations we've found recently
|
||||
for ip in attackerIPList:
|
||||
ipblocklist.insert_one(
|
||||
{'_id': genMeteorID(),
|
||||
'address':ip,
|
||||
'address': ip,
|
||||
'reference': 'attacker',
|
||||
'creator':'mozdef',
|
||||
'creator': 'mozdef',
|
||||
'dateAdded': datetime.utcnow()})
|
||||
|
||||
# Lastly, export the combined blocklist
|
||||
|
@ -162,7 +161,7 @@ def main():
|
|||
{"$project": {"address": 1}},
|
||||
{"$limit": options.iplimit}
|
||||
])
|
||||
IPList=[]
|
||||
IPList = []
|
||||
for ip in ipCursor:
|
||||
IPList.append(ip['address'])
|
||||
# to text
|
||||
|
@ -171,7 +170,7 @@ def main():
|
|||
outputfile.write("{0}\n".format(ip))
|
||||
outputfile.close()
|
||||
# to s3?
|
||||
if len(options.aws_bucket_name)>0:
|
||||
if len(options.aws_bucket_name) > 0:
|
||||
s3_upload_file(options.outputfile, options.aws_bucket_name, options.aws_document_key_name)
|
||||
|
||||
except ValueError as e:
|
||||
|
@ -203,29 +202,29 @@ def initConfig():
|
|||
options.category = getConfig('category', 'bruteforcer', options.configfile)
|
||||
|
||||
# Max days to look back for attackers
|
||||
options.attackerage = getConfig('attackerage',90,options.configfile)
|
||||
options.attackerage = getConfig('attackerage', 90, options.configfile)
|
||||
|
||||
# Days after expiration that we purge an ipblocklist entry (from the ui, they don't end up in the export after expiring)
|
||||
options.expireage = getConfig('expireage',1,options.configfile)
|
||||
options.expireage = getConfig('expireage', 1, options.configfile)
|
||||
|
||||
# Max IPs to emit
|
||||
options.iplimit = getConfig('iplimit', 1000, options.configfile)
|
||||
|
||||
# AWS creds
|
||||
options.aws_access_key_id=getConfig('aws_access_key_id','',options.configfile) # aws credentials to use to connect to mozilla_infosec_blocklist
|
||||
options.aws_secret_access_key=getConfig('aws_secret_access_key','',options.configfile)
|
||||
options.aws_bucket_name=getConfig('aws_bucket_name','',options.configfile)
|
||||
options.aws_document_key_name=getConfig('aws_document_key_name','',options.configfile)
|
||||
options.aws_access_key_id = getConfig('aws_access_key_id', '', options.configfile) # aws credentials to use to connect to mozilla_infosec_blocklist
|
||||
options.aws_secret_access_key = getConfig('aws_secret_access_key', '', options.configfile)
|
||||
options.aws_bucket_name = getConfig('aws_bucket_name', '', options.configfile)
|
||||
options.aws_document_key_name = getConfig('aws_document_key_name', '', options.configfile)
|
||||
|
||||
|
||||
def s3_upload_file(file_path, bucket_name, key_name):
|
||||
"""
|
||||
Upload a file to the given s3 bucket and return a template url.
|
||||
"""
|
||||
conn = boto.connect_s3(aws_access_key_id=options.aws_access_key_id,aws_secret_access_key=options.aws_secret_access_key)
|
||||
conn = boto.connect_s3(aws_access_key_id=options.aws_access_key_id, aws_secret_access_key=options.aws_secret_access_key)
|
||||
try:
|
||||
bucket = conn.get_bucket(bucket_name, validate=False)
|
||||
except boto.exception.S3ResponseError as e:
|
||||
except boto.exception.S3ResponseError:
|
||||
conn.create_bucket(bucket_name)
|
||||
bucket = conn.get_bucket(bucket_name, validate=False)
|
||||
|
||||
|
|
|
@ -5,13 +5,12 @@
|
|||
#
|
||||
|
||||
import sys
|
||||
import os
|
||||
from datetime import datetime, timedelta, tzinfo
|
||||
try:
|
||||
from datetime import timezone
|
||||
utc = timezone.utc
|
||||
except ImportError:
|
||||
#Hi there python2 user
|
||||
# Hi there python2 user
|
||||
class UTC(tzinfo):
|
||||
def utcoffset(self, dt):
|
||||
return timedelta(0)
|
||||
|
@ -36,7 +35,7 @@ def normalize(details):
|
|||
normalized = {}
|
||||
|
||||
for f in details:
|
||||
if f in ("ip", "ip_address","client_ip"):
|
||||
if f in ("ip", "ip_address", "client_ip"):
|
||||
normalized["sourceipaddress"] = details[f]
|
||||
continue
|
||||
if f == "result":
|
||||
|
@ -88,7 +87,7 @@ def process_events(mozmsg, duo_events, etype, state):
|
|||
elif etype == 'telephony':
|
||||
mozmsg.summary = e['context']
|
||||
elif etype == 'authentication':
|
||||
mozmsg.summary = e['eventtype']+' '+e['result']+' for '+e['username']
|
||||
mozmsg.summary = e['eventtype'] + ' ' + e['result'] + ' for ' + e['username']
|
||||
|
||||
mozmsg.send()
|
||||
|
||||
|
@ -110,7 +109,7 @@ def main():
|
|||
|
||||
duo = duo_client.Admin(ikey=options.IKEY, skey=options.SKEY, host=options.URL)
|
||||
mozmsg = mozdef.MozDefEvent(options.MOZDEF_URL)
|
||||
mozmsg.tags=['duosecurity']
|
||||
mozmsg.tags = ['duosecurity']
|
||||
if options.update_tags != '':
|
||||
mozmsg.tags.append(options.update_tags)
|
||||
mozmsg.set_category('authentication')
|
||||
|
@ -121,9 +120,9 @@ def main():
|
|||
|
||||
# This will process events for all 3 log types and send them to MozDef. the state stores the last position in the
|
||||
# log when this script was last called.
|
||||
state = process_events(mozmsg, duo.get_administrator_log(mintime=state['administration']+1), 'administration', state)
|
||||
state = process_events(mozmsg, duo.get_authentication_log(mintime=state['authentication']+1), 'authentication', state)
|
||||
state = process_events(mozmsg, duo.get_telephony_log(mintime=state['telephony']+1), 'telephony', state)
|
||||
state = process_events(mozmsg, duo.get_administrator_log(mintime=state['administration'] + 1), 'administration', state)
|
||||
state = process_events(mozmsg, duo.get_authentication_log(mintime=state['authentication'] + 1), 'authentication', state)
|
||||
state = process_events(mozmsg, duo.get_telephony_log(mintime=state['telephony'] + 1), 'telephony', state)
|
||||
|
||||
pickle.dump(state, open(options.statepath, 'wb'))
|
||||
|
||||
|
|
|
@ -13,7 +13,6 @@ import logging
|
|||
from configlib import getConfig, OptionParser
|
||||
from datetime import datetime, date, timedelta
|
||||
|
||||
import os
|
||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||
from mozdef_util.utilities.logger import logger
|
||||
|
||||
|
|
|
@ -15,10 +15,8 @@ from logging.handlers import SysLogHandler
|
|||
from time import sleep
|
||||
import socket
|
||||
|
||||
import sys
|
||||
import os
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
from mozdef_util.elasticsearch_client import ElasticsearchClient, ElasticsearchBadServer
|
||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||
from mozdef_util.query_models import SearchQuery, Aggregation
|
||||
|
||||
logger = logging.getLogger(sys.argv[0])
|
||||
|
|
|
@ -5,21 +5,17 @@
|
|||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
# Copyright (c) 2014 Mozilla Corporation
|
||||
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import requests
|
||||
import json
|
||||
from configlib import getConfig, OptionParser
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
from logging.handlers import SysLogHandler
|
||||
from httplib2 import Http
|
||||
from oauth2client.client import SignedJwtAssertionCredentials
|
||||
from apiclient.discovery import build
|
||||
|
||||
import sys
|
||||
import os
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
|
||||
logger = logging.getLogger(sys.argv[0])
|
||||
|
@ -187,7 +183,7 @@ def main():
|
|||
state.data['lastrun'] = lastrun
|
||||
state.write_state_file()
|
||||
except Exception as e:
|
||||
logger.error("Unhandled exception, terminating: %r"%e)
|
||||
logger.error("Unhandled exception, terminating: %r" % e)
|
||||
|
||||
|
||||
def initConfig():
|
||||
|
@ -203,10 +199,10 @@ def initConfig():
|
|||
# for detailed information on delegating a service account for use in gathering google admin sdk reports
|
||||
#
|
||||
|
||||
#google's json credential file exported from the project/admin console
|
||||
# google's json credential file exported from the project/admin console
|
||||
options.jsoncredentialfile=getConfig('jsoncredentialfile','/path/to/filename.json',options.configfile)
|
||||
|
||||
#email of admin to impersonate as a service account
|
||||
# email of admin to impersonate as a service account
|
||||
options.impersonate = getConfig('impersonate', 'someone@yourcompany.com', options.configfile)
|
||||
|
||||
|
||||
|
|
|
@ -16,8 +16,6 @@ from requests.auth import HTTPBasicAuth
|
|||
from configlib import getConfig, OptionParser
|
||||
from logging.handlers import SysLogHandler
|
||||
|
||||
import sys
|
||||
import os
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||
|
||||
|
@ -71,7 +69,7 @@ def main():
|
|||
logger.debug('Creating %s index' % index)
|
||||
es.create_index(index, default_mapping_contents)
|
||||
except Exception as e:
|
||||
logger.error("Unhandled exception, terminating: %r"%e)
|
||||
logger.error("Unhandled exception, terminating: %r" % e)
|
||||
|
||||
auth = HTTPBasicAuth(options.mquser, options.mqpassword)
|
||||
|
||||
|
|
|
@ -14,7 +14,6 @@ from configlib import getConfig, OptionParser
|
|||
from logging.handlers import SysLogHandler
|
||||
from pymongo import MongoClient
|
||||
|
||||
import os
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch
|
||||
|
|
|
@ -5,26 +5,21 @@
|
|||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
# Copyright (c) 2014 Mozilla Corporation
|
||||
|
||||
import os
|
||||
import sys
|
||||
from configlib import getConfig,OptionParser
|
||||
from configlib import getConfig, OptionParser
|
||||
import logging
|
||||
from logging.handlers import SysLogHandler
|
||||
import json
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
from datetime import date
|
||||
import requests
|
||||
import netaddr
|
||||
|
||||
import sys
|
||||
import os
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||
|
||||
|
||||
logger = logging.getLogger(sys.argv[0])
|
||||
logger.level=logging.INFO
|
||||
logger.level = logging.INFO
|
||||
formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s %(message)s')
|
||||
|
||||
|
||||
|
@ -69,20 +64,17 @@ def main():
|
|||
logger.addHandler(sh)
|
||||
|
||||
logger.debug('started')
|
||||
#logger.debug(options)
|
||||
# logger.debug(options)
|
||||
try:
|
||||
es = ElasticsearchClient((list('{0}'.format(s) for s in options.esservers)))
|
||||
s = requests.Session()
|
||||
s.headers.update({'Accept': 'application/json'})
|
||||
s.headers.update({'Content-type': 'application/json'})
|
||||
s.headers.update({'Authorization':'SSWS {0}'.format(options.apikey)})
|
||||
s.headers.update({'Authorization': 'SSWS {0}'.format(options.apikey)})
|
||||
|
||||
#capture the time we start running so next time we catch any events created while we run.
|
||||
# capture the time we start running so next time we catch any events created while we run.
|
||||
state = State(options.state_file)
|
||||
lastrun = toUTC(datetime.now()).isoformat()
|
||||
#in case we don't archive files..only look at today and yesterday's files.
|
||||
yesterday=date.strftime(datetime.utcnow()-timedelta(days=1),'%Y/%m/%d')
|
||||
today = date.strftime(datetime.utcnow(),'%Y/%m/%d')
|
||||
|
||||
r = s.get('https://{0}/api/v1/events?startDate={1}&limit={2}'.format(
|
||||
options.oktadomain,
|
||||
|
@ -138,7 +130,7 @@ def main():
|
|||
else:
|
||||
logger.error('Could not get Okta events HTTP error code {} reason {}'.format(r.status_code, r.reason))
|
||||
except Exception as e:
|
||||
logger.error("Unhandled exception, terminating: %r"%e)
|
||||
logger.error("Unhandled exception, terminating: %r" % e)
|
||||
|
||||
|
||||
def initConfig():
|
||||
|
|
|
@ -18,14 +18,14 @@ from datetime import date
|
|||
from datetime import timedelta
|
||||
from configlib import getConfig, OptionParser
|
||||
|
||||
import sys
|
||||
import os
|
||||
from logging.handlers import SysLogHandler
|
||||
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||
|
||||
|
||||
logger = logging.getLogger(sys.argv[0])
|
||||
logger.level=logging.WARNING
|
||||
logger.level = logging.WARNING
|
||||
formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s %(message)s')
|
||||
|
||||
|
||||
|
@ -47,10 +47,10 @@ def esPruneIndexes():
|
|||
if pruning != '0':
|
||||
index_to_prune = index
|
||||
if rotation == 'daily':
|
||||
idate = date.strftime(toUTC(datetime.now()) - timedelta(days=int(pruning)),'%Y%m%d')
|
||||
idate = date.strftime(toUTC(datetime.now()) - timedelta(days=int(pruning)), '%Y%m%d')
|
||||
index_to_prune += '-%s' % idate
|
||||
elif rotation == 'monthly':
|
||||
idate = date.strftime(datetime.utcnow() - timedelta(days=31*int(pruning)),'%Y%m')
|
||||
idate = date.strftime(datetime.utcnow() - timedelta(days=31 * int(pruning)), '%Y%m')
|
||||
index_to_prune += '-%s' % idate
|
||||
|
||||
if index_to_prune in indices:
|
||||
|
@ -62,7 +62,7 @@ def esPruneIndexes():
|
|||
logger.error("Unhandled exception while deleting %s, terminating: %r" % (index_to_prune, e))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Unhandled exception, terminating: %r"%e)
|
||||
logger.error("Unhandled exception, terminating: %r" % e)
|
||||
|
||||
|
||||
def initConfig():
|
||||
|
@ -71,43 +71,43 @@ def initConfig():
|
|||
'output',
|
||||
'stdout',
|
||||
options.configfile
|
||||
)
|
||||
)
|
||||
# syslog hostname
|
||||
options.sysloghostname = getConfig(
|
||||
'sysloghostname',
|
||||
'localhost',
|
||||
options.configfile
|
||||
)
|
||||
)
|
||||
options.syslogport = getConfig(
|
||||
'syslogport',
|
||||
514,
|
||||
options.configfile
|
||||
)
|
||||
)
|
||||
options.esservers = list(getConfig(
|
||||
'esservers',
|
||||
'http://localhost:9200',
|
||||
options.configfile).split(',')
|
||||
)
|
||||
)
|
||||
options.indices = list(getConfig(
|
||||
'backup_indices',
|
||||
'events,alerts,.kibana',
|
||||
options.configfile).split(',')
|
||||
)
|
||||
)
|
||||
options.dobackup = list(getConfig(
|
||||
'backup_dobackup',
|
||||
'1,1,1',
|
||||
options.configfile).split(',')
|
||||
)
|
||||
)
|
||||
options.rotation = list(getConfig(
|
||||
'backup_rotation',
|
||||
'daily,monthly,none',
|
||||
options.configfile).split(',')
|
||||
)
|
||||
)
|
||||
options.pruning = list(getConfig(
|
||||
'backup_pruning',
|
||||
'20,0,0',
|
||||
options.configfile).split(',')
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -19,7 +19,6 @@ from datetime import timedelta
|
|||
from configlib import getConfig, OptionParser
|
||||
import json
|
||||
|
||||
import sys
|
||||
import os
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||
|
@ -133,48 +132,48 @@ def initConfig():
|
|||
'output',
|
||||
'stdout',
|
||||
options.configfile
|
||||
)
|
||||
)
|
||||
# syslog hostname
|
||||
options.sysloghostname = getConfig(
|
||||
'sysloghostname',
|
||||
'localhost',
|
||||
options.configfile
|
||||
)
|
||||
)
|
||||
options.syslogport = getConfig(
|
||||
'syslogport',
|
||||
514,
|
||||
options.configfile
|
||||
)
|
||||
)
|
||||
options.esservers = list(getConfig(
|
||||
'esservers',
|
||||
'http://localhost:9200',
|
||||
options.configfile).split(',')
|
||||
)
|
||||
)
|
||||
options.indices = list(getConfig(
|
||||
'backup_indices',
|
||||
'events,alerts,.kibana',
|
||||
options.configfile).split(',')
|
||||
)
|
||||
)
|
||||
options.dobackup = list(getConfig(
|
||||
'backup_dobackup',
|
||||
'1,1,1',
|
||||
options.configfile).split(',')
|
||||
)
|
||||
)
|
||||
options.rotation = list(getConfig(
|
||||
'backup_rotation',
|
||||
'daily,monthly,none',
|
||||
options.configfile).split(',')
|
||||
)
|
||||
)
|
||||
options.pruning = list(getConfig(
|
||||
'backup_pruning',
|
||||
'20,0,0',
|
||||
options.configfile).split(',')
|
||||
)
|
||||
)
|
||||
options.weekly_rotation_indices = list(getConfig(
|
||||
'weekly_rotation_indices',
|
||||
'events',
|
||||
options.configfile).split(',')
|
||||
)
|
||||
)
|
||||
|
||||
default_mapping_location = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'defaultMappingTemplate.json')
|
||||
options.default_mapping_file = getConfig('default_mapping_file', default_mapping_location, options.configfile)
|
||||
|
|
|
@ -9,7 +9,6 @@
|
|||
# You only need to run it once, it will setup the templates
|
||||
# used as future indexes are created
|
||||
|
||||
import requests
|
||||
import sys
|
||||
import os
|
||||
from configlib import getConfig, OptionParser
|
||||
|
@ -23,17 +22,17 @@ def initConfig():
|
|||
'esservers',
|
||||
'http://localhost:9200',
|
||||
options.configfile).split(',')
|
||||
)
|
||||
)
|
||||
options.templatenames = list(getConfig(
|
||||
'templatenames',
|
||||
'defaulttemplate',
|
||||
options.configfile).split(',')
|
||||
)
|
||||
)
|
||||
options.templatefiles = list(getConfig(
|
||||
'templatefiles',
|
||||
'',
|
||||
options.configfile).split(',')
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -14,22 +14,14 @@
|
|||
import json
|
||||
import os
|
||||
import sys
|
||||
import socket
|
||||
import time
|
||||
from configlib import getConfig, OptionParser
|
||||
from datetime import datetime
|
||||
from hashlib import md5
|
||||
import boto.sqs
|
||||
from boto.sqs.message import RawMessage
|
||||
import base64
|
||||
import kombu
|
||||
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
from mozdef_util.utilities.to_unicode import toUnicode
|
||||
from mozdef_util.utilities.remove_at import removeAt
|
||||
from mozdef_util.utilities.is_cef import isCEF
|
||||
from mozdef_util.utilities.logger import logger, initLogger
|
||||
from mozdef_util.elasticsearch_client import ElasticsearchClient, ElasticsearchBadServer, ElasticsearchInvalidIndex, ElasticsearchException
|
||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||
|
||||
|
||||
def getDocID(sqsregionidentifier):
|
||||
|
|
|
@ -14,8 +14,6 @@ from configlib import getConfig, OptionParser
|
|||
from logging.handlers import SysLogHandler
|
||||
from pymongo import MongoClient
|
||||
|
||||
import sys
|
||||
import os
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
# Copyright (c) 2017 Mozilla Corporation
|
||||
|
||||
import sys
|
||||
import os
|
||||
from configlib import getConfig, OptionParser
|
||||
|
||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||
|
|
|
@ -8,21 +8,13 @@
|
|||
import copy
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import json
|
||||
import csv
|
||||
import string
|
||||
import ConfigParser
|
||||
import tempfile
|
||||
import logging
|
||||
import socket
|
||||
import hashlib
|
||||
import MySQLdb
|
||||
from requests import Session
|
||||
from optparse import OptionParser
|
||||
from datetime import datetime
|
||||
from os import stat
|
||||
from os.path import exists, getsize
|
||||
|
||||
|
||||
class MozDefError(Exception):
|
||||
|
@ -108,7 +100,7 @@ class MozDefEvent():
|
|||
raise MozDefError('Summary is a required field')
|
||||
|
||||
try:
|
||||
r = self.httpsession.post(self.url, json.dumps(log_msg, encoding='utf-8'), verify=self.verify_certificate)
|
||||
self.httpsession.post(self.url, json.dumps(log_msg, encoding='utf-8'), verify=self.verify_certificate)
|
||||
|
||||
except Exception as e:
|
||||
if not self.fire_and_forget_mode:
|
||||
|
@ -123,7 +115,7 @@ def main():
|
|||
mdEvent.debug = True
|
||||
mdEvent.fire_and_forget_mode = False
|
||||
|
||||
#connect to mysql
|
||||
# connect to mysql
|
||||
db=MySQLdb.connect(host=options.hostname, user=options.username,passwd=options.password,db=options.database)
|
||||
c=db.cursor(MySQLdb.cursors.DictCursor)
|
||||
|
||||
|
@ -146,7 +138,7 @@ def main():
|
|||
duration = call['LeaveTime'] - call['JoinTime']
|
||||
call['CallDuration'] = duration.seconds
|
||||
|
||||
#fix up the data for json
|
||||
# fix up the data for json
|
||||
for k in call.keys():
|
||||
# convert datetime objects to isoformat for json serialization
|
||||
if isinstance(call[k], datetime):
|
||||
|
@ -157,7 +149,7 @@ def main():
|
|||
call[k] = call[k].decode('utf-8','ignore').encode('ascii','ignore')
|
||||
|
||||
mdEvent.send(timestamp=call['JoinTime'],
|
||||
summary='Vidyo call status for '+call['UniqueCallID'].encode('ascii', 'ignore'),
|
||||
summary='Vidyo call status for ' + call['UniqueCallID'].encode('ascii', 'ignore'),
|
||||
tags=['vidyo'],
|
||||
details=call,
|
||||
category='vidyo',
|
||||
|
|
|
@ -14,7 +14,6 @@ import json
|
|||
|
||||
from elasticsearch.exceptions import ConnectionError
|
||||
|
||||
import sys
|
||||
import os
|
||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||
|
||||
|
|
|
@ -10,37 +10,25 @@ import sys
|
|||
from datetime import datetime
|
||||
import pytz
|
||||
import json
|
||||
import socket
|
||||
import json
|
||||
from requests_futures.sessions import FuturesSession
|
||||
from multiprocessing import Process, Queue
|
||||
import random
|
||||
import logging
|
||||
from logging.handlers import SysLogHandler
|
||||
from Queue import Empty
|
||||
from requests.packages.urllib3.exceptions import ClosedPoolError
|
||||
import requests
|
||||
import time
|
||||
from configlib import getConfig, OptionParser
|
||||
import ConfigParser
|
||||
import glob
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
from dateutil.parser import parse
|
||||
from datetime import date
|
||||
import pytz
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
|
||||
#use futures to run in the background
|
||||
#httpsession = FuturesSession(max_workers=5)
|
||||
# use futures to run in the background
|
||||
# httpsession = FuturesSession(max_workers=5)
|
||||
httpsession = requests.session()
|
||||
httpsession.trust_env=False # turns of needless .netrc check for creds
|
||||
#a = requests.adapters.HTTPAdapter(max_retries=2)
|
||||
#httpsession.mount('http://', a)
|
||||
# a = requests.adapters.HTTPAdapter(max_retries=2)
|
||||
# httpsession.mount('http://', a)
|
||||
|
||||
|
||||
logger = logging.getLogger(sys.argv[0])
|
||||
|
@ -48,7 +36,7 @@ logger.level=logging.INFO
|
|||
|
||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||
|
||||
#create a list of logs we can append json to and call for a post when we want.
|
||||
# create a list of logs we can append json to and call for a post when we want.
|
||||
logcache=Queue()
|
||||
|
||||
|
||||
|
@ -67,9 +55,9 @@ def setConfig(option,value,configfile):
|
|||
|
||||
|
||||
def postLogs(logcache):
|
||||
#post logs asynchronously with requests workers and check on the results
|
||||
#expects a queue object from the multiprocessing library
|
||||
posts=[]
|
||||
# post logs asynchronously with requests workers and check on the results
|
||||
# expects a queue object from the multiprocessing library
|
||||
# posts=[]
|
||||
try:
|
||||
while not logcache.empty():
|
||||
postdata=logcache.get_nowait()
|
||||
|
@ -79,10 +67,10 @@ def postLogs(logcache):
|
|||
a.max_retries=3
|
||||
r=httpsession.post(url,data=postdata)
|
||||
print(r, postdata)
|
||||
#append to posts if this is long running and you want
|
||||
#events to try again later.
|
||||
#posts.append((r,postdata,url))
|
||||
except Empty as e:
|
||||
# append to posts if this is long running and you want
|
||||
# events to try again later.
|
||||
# posts.append((r,postdata,url))
|
||||
except Empty:
|
||||
pass
|
||||
# for p, postdata, url in posts:
|
||||
# try:
|
||||
|
@ -99,18 +87,18 @@ def postLogs(logcache):
|
|||
|
||||
|
||||
def genRandomIPv4():
|
||||
#random, IPs
|
||||
# random, IPs
|
||||
return '.'.join("%d" % (random.randint(0,254)) for x in range(4))
|
||||
|
||||
|
||||
def genAttackerIPv4():
|
||||
#random, but not too random as to allow for alerting about attacks from
|
||||
#the same IP.
|
||||
# random, but not too random as to allow for alerting about attacks from
|
||||
# the same IP.
|
||||
coreIPs=['1.93.25.',
|
||||
'222.73.115.',
|
||||
'116.10.191.',
|
||||
'144.0.0.']
|
||||
#change this to non zero according to taste for semi-random-ness
|
||||
# change this to non zero according to taste for semi-random-ness
|
||||
if random.randint(0,10)>= 0:
|
||||
return '{0}{1}'.format(random.choice(coreIPs), random.randint(1,2))
|
||||
else:
|
||||
|
@ -120,28 +108,28 @@ def genAttackerIPv4():
|
|||
def makeEvents():
|
||||
try:
|
||||
eventfiles = glob.glob(options.eventsglob)
|
||||
#pick a random number of events to send
|
||||
# pick a random number of events to send
|
||||
for i in range(1, random.randrange(20, 100)):
|
||||
#pick a random type of event to send
|
||||
# pick a random type of event to send
|
||||
eventfile = random.choice(eventfiles)
|
||||
#print(eventfile)
|
||||
# print(eventfile)
|
||||
events = json.load(open(eventfile))
|
||||
target = random.randint(0, len(events))
|
||||
for event in events[target:target+1]:
|
||||
for event in events[target:target + 1]:
|
||||
event['timestamp'] = pytz.timezone('UTC').localize(datetime.utcnow()).isoformat()
|
||||
#remove stored times
|
||||
# remove stored times
|
||||
if 'utctimestamp' in event.keys():
|
||||
del event['utctimestamp']
|
||||
if 'receivedtimestamp' in event.keys():
|
||||
del event['receivedtimestamp']
|
||||
|
||||
#add demo to the tags so it's clear it's not real data.
|
||||
# add demo to the tags so it's clear it's not real data.
|
||||
if 'tags' not in event.keys():
|
||||
event['tags'] = list()
|
||||
|
||||
event['tags'].append('demodata')
|
||||
|
||||
#replace potential <randomipaddress> with a random ip address
|
||||
# replace potential <randomipaddress> with a random ip address
|
||||
if 'summary' in event.keys() and '<randomipaddress>' in event['summary']:
|
||||
randomIP = genRandomIPv4()
|
||||
event['summary'] = event['summary'].replace("<randomipaddress>", randomIP)
|
||||
|
@ -150,20 +138,20 @@ def makeEvents():
|
|||
event['details']['sourceipaddress'] = randomIP
|
||||
event['details']['sourceipv4address'] = randomIP
|
||||
|
||||
#print(event['timestamp'], event['tags'], event['summary'])
|
||||
# print(event['timestamp'], event['tags'], event['summary'])
|
||||
|
||||
logcache.put(json.dumps(event))
|
||||
if not logcache.empty():
|
||||
time.sleep(.01)
|
||||
try:
|
||||
postingProcess=Process(target=postLogs,args=(logcache,),name="json2MozdefDemoData")
|
||||
postingProcess = Process(target=postLogs, args=(logcache,), name="json2MozdefDemoData")
|
||||
postingProcess.start()
|
||||
except OSError as e:
|
||||
if e.errno==35: # resource temporarily unavailable.
|
||||
if e.errno == 35: # resource temporarily unavailable.
|
||||
print(e)
|
||||
pass
|
||||
else:
|
||||
logger.error('%r'%e)
|
||||
logger.error('%r' % e)
|
||||
|
||||
except KeyboardInterrupt as e:
|
||||
sys.exit(1)
|
||||
|
@ -174,39 +162,39 @@ def makeAlerts():
|
|||
send events that will be correlated into alerts
|
||||
'''
|
||||
try:
|
||||
#time for us to run?
|
||||
timetoRun=toUTC(options.lastalert) + timedelta(minutes=options.alertsminutesinterval)
|
||||
# time for us to run?
|
||||
timetoRun = toUTC(options.lastalert) + timedelta(minutes=options.alertsminutesinterval)
|
||||
if timetoRun > toUTC(datetime.now()):
|
||||
#print(timetoRun)
|
||||
# print(timetoRun)
|
||||
return
|
||||
|
||||
#print(timetoRun, options.lastalert)
|
||||
# print(timetoRun, options.lastalert)
|
||||
eventfiles = glob.glob(options.alertsglob)
|
||||
#pick a random number of events to send
|
||||
# pick a random number of events to send
|
||||
for i in range(0, options.alertscount):
|
||||
#pick a random type of event to send
|
||||
# pick a random type of event to send
|
||||
eventfile = random.choice(eventfiles)
|
||||
events = json.load(open(eventfile))
|
||||
target = random.randint(0, len(events))
|
||||
# if there's only one event in the file..use it.
|
||||
if len(events) == 1 and target == 1:
|
||||
target = 0
|
||||
for event in events[target:target+1]:
|
||||
for event in events[target:target + 1]:
|
||||
event['timestamp'] = pytz.timezone('UTC').localize(datetime.utcnow()).isoformat()
|
||||
#remove stored times
|
||||
# remove stored times
|
||||
if 'utctimestamp' in event.keys():
|
||||
del event['utctimestamp']
|
||||
if 'receivedtimestamp' in event.keys():
|
||||
del event['receivedtimestamp']
|
||||
|
||||
#add demo to the tags so it's clear it's not real data.
|
||||
# add demo to the tags so it's clear it's not real data.
|
||||
if 'tags' not in event.keys():
|
||||
event['tags'] = list()
|
||||
|
||||
event['tags'].append('demodata')
|
||||
event['tags'].append('demoalert')
|
||||
|
||||
#replace potential <randomipaddress> with a random ip address
|
||||
# replace potential <randomipaddress> with a random ip address
|
||||
if 'summary' in event.keys() and '<randomipaddress>' in event['summary']:
|
||||
randomIP = genRandomIPv4()
|
||||
event['summary'] = event['summary'].replace("<randomipaddress>", randomIP)
|
||||
|
@ -229,11 +217,11 @@ def makeAlerts():
|
|||
postingProcess=Process(target=postLogs,args=(logcache,),name="json2MozdefDemoData")
|
||||
postingProcess.start()
|
||||
except OSError as e:
|
||||
if e.errno==35: # resource temporarily unavailable.
|
||||
if e.errno == 35: # resource temporarily unavailable.
|
||||
print(e)
|
||||
pass
|
||||
else:
|
||||
logger.error('%r'%e)
|
||||
logger.error('%r' % e)
|
||||
|
||||
except KeyboardInterrupt as e:
|
||||
sys.exit(1)
|
||||
|
@ -244,39 +232,39 @@ def makeAttackers():
|
|||
send events that will be correlated into attackers using pre-defined IPs
|
||||
'''
|
||||
try:
|
||||
#time for us to run?
|
||||
# time for us to run?
|
||||
timetoRun=toUTC(options.lastattacker) + timedelta(minutes=options.attackersminutesinterval)
|
||||
if timetoRun > toUTC(datetime.now()):
|
||||
#print(timetoRun)
|
||||
# print(timetoRun)
|
||||
return
|
||||
|
||||
#print(timetoRun, options.lastalert)
|
||||
# print(timetoRun, options.lastalert)
|
||||
eventfiles = glob.glob(options.alertsglob)
|
||||
#pick a random number of events to send
|
||||
# pick a random number of events to send
|
||||
for i in range(0, options.alertscount):
|
||||
#pick a random type of event to send
|
||||
# pick a random type of event to send
|
||||
eventfile = random.choice(eventfiles)
|
||||
events = json.load(open(eventfile))
|
||||
target = random.randint(0, len(events))
|
||||
# if there's only one event in the file..use it.
|
||||
if len(events) == 1 and target == 1:
|
||||
target = 0
|
||||
for event in events[target:target+1]:
|
||||
for event in events[target:target + 1]:
|
||||
event['timestamp'] = pytz.timezone('UTC').localize(datetime.utcnow()).isoformat()
|
||||
#remove stored times
|
||||
# remove stored times
|
||||
if 'utctimestamp' in event.keys():
|
||||
del event['utctimestamp']
|
||||
if 'receivedtimestamp' in event.keys():
|
||||
del event['receivedtimestamp']
|
||||
|
||||
#add demo to the tags so it's clear it's not real data.
|
||||
# add demo to the tags so it's clear it's not real data.
|
||||
if 'tags' not in event.keys():
|
||||
event['tags'] = list()
|
||||
|
||||
event['tags'].append('demodata')
|
||||
event['tags'].append('demoalert')
|
||||
|
||||
#replace potential <randomipaddress> with a random ip address
|
||||
# replace potential <randomipaddress> with a random ip address
|
||||
if 'summary' in event.keys() and '<randomipaddress>' in event['summary']:
|
||||
randomIP = genAttackerIPv4()
|
||||
event['summary'] = event['summary'].replace("<randomipaddress>", randomIP)
|
||||
|
@ -299,11 +287,11 @@ def makeAttackers():
|
|||
postingProcess=Process(target=postLogs,args=(logcache,),name="json2MozdefDemoData")
|
||||
postingProcess.start()
|
||||
except OSError as e:
|
||||
if e.errno==35: # resource temporarily unavailable.
|
||||
if e.errno == 35: # resource temporarily unavailable.
|
||||
print(e)
|
||||
pass
|
||||
else:
|
||||
logger.error('%r'%e)
|
||||
logger.error('%r' % e)
|
||||
|
||||
except KeyboardInterrupt as e:
|
||||
sys.exit(1)
|
||||
|
@ -314,15 +302,15 @@ def initConfig():
|
|||
options.eventsglob = getConfig('eventsglob', './sampleevents/events*json', options.configfile)
|
||||
options.alertsglob = getConfig('alertsglob', './sampleevents/alert*json', options.configfile)
|
||||
options.attackersglob = getConfig('attackersglob', './sampleevents/attacker*json', options.configfile)
|
||||
#how many alerts to create
|
||||
# how many alerts to create
|
||||
options.alertscount = getConfig('alertscount', 2, options.configfile)
|
||||
#how many minutes to wait between creating ^ alerts
|
||||
# how many minutes to wait between creating ^ alerts
|
||||
options.alertsminutesinterval = getConfig('alertsminutesinterval', 5, options.configfile)
|
||||
options.lastalert = getConfig('lastalert', datetime.now() - timedelta(hours=1), options.configfile)
|
||||
|
||||
#how many attackers to create
|
||||
# how many attackers to create
|
||||
options.attackerscount = getConfig('attackers', 1, options.configfile)
|
||||
#how many minutes to wait between creating ^ attackers
|
||||
# how many minutes to wait between creating ^ attackers
|
||||
options.attackersminutesinterval = getConfig('attackersminutesinterval', 5, options.configfile)
|
||||
options.lastattacker = getConfig('lastattacker', datetime.now() - timedelta(hours=1), options.configfile)
|
||||
|
||||
|
@ -349,8 +337,8 @@ if __name__ == '__main__':
|
|||
postingProcess=Process(target=postLogs,args=(logcache,),name="json2MozdefDemoData")
|
||||
postingProcess.start()
|
||||
except OSError as e:
|
||||
if e.errno==35: # resource temporarily unavailable.
|
||||
if e.errno == 35: # resource temporarily unavailable.
|
||||
print(e)
|
||||
pass
|
||||
else:
|
||||
logger.error('%r'%e)
|
||||
logger.error('%r' % e)
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
|
||||
import os
|
||||
import sys
|
||||
import inspect
|
||||
from configlib import getConfig, OptionParser
|
||||
|
||||
sys.path.insert(1, os.path.join(sys.path[0], '../..'))
|
||||
|
|
|
@ -67,9 +67,9 @@ defaultTemplate = r'''
|
|||
}
|
||||
'''
|
||||
|
||||
#valid json?
|
||||
# valid json?
|
||||
templateJson = json.loads(defaultTemplate)
|
||||
|
||||
#post it:
|
||||
# post it
|
||||
r = requests.put(url="http://servername:9200/_template/defaulttemplate", data=defaultTemplate)
|
||||
print(r)
|
||||
|
|
|
@ -4,14 +4,11 @@
|
|||
# Copyright (c) 2017 Mozilla Corporation
|
||||
|
||||
import os
|
||||
import sys
|
||||
import bottle
|
||||
from bottle import debug,route, run, template, response,request,post, default_app
|
||||
from bottle import route, run, response, request, default_app
|
||||
from bottle import _stdout as bottlelog
|
||||
import kombu
|
||||
from kombu import Connection,Queue,Exchange
|
||||
from kombu import Connection, Queue, Exchange
|
||||
import json
|
||||
from configlib import getConfig,OptionParser
|
||||
from configlib import getConfig, OptionParser
|
||||
|
||||
|
||||
@route('/status')
|
||||
|
@ -30,11 +27,11 @@ def status():
|
|||
@route('/test')
|
||||
@route('/test/')
|
||||
def testindex():
|
||||
ip = request.environ.get('REMOTE_ADDR')
|
||||
#response.headers['X-IP'] = '{0}'.format(ip)
|
||||
# ip = request.environ.get('REMOTE_ADDR')
|
||||
# response.headers['X-IP'] = '{0}'.format(ip)
|
||||
response.status=200
|
||||
|
||||
#act like elastic search bulk index
|
||||
# act like elastic search bulk index
|
||||
|
||||
|
||||
@route('/_bulk',method='POST')
|
||||
|
@ -42,10 +39,10 @@ def testindex():
|
|||
def bulkindex():
|
||||
if request.body:
|
||||
bulkpost=request.body.read()
|
||||
#bottlelog('request:{0}\n'.format(bulkpost))
|
||||
# bottlelog('request:{0}\n'.format(bulkpost))
|
||||
request.body.close()
|
||||
if len(bulkpost)>10: # TODO Check for bulk format.
|
||||
#iterate on messages and post to event message queue
|
||||
# iterate on messages and post to event message queue
|
||||
|
||||
eventlist=[]
|
||||
for i in bulkpost.splitlines():
|
||||
|
@ -53,10 +50,10 @@ def bulkindex():
|
|||
|
||||
for i in eventlist:
|
||||
try:
|
||||
#valid json?
|
||||
# valid json?
|
||||
try:
|
||||
eventDict=json.loads(i)
|
||||
except ValueError as e:
|
||||
except ValueError:
|
||||
response.status=500
|
||||
return
|
||||
# don't post the items telling us where to post things..
|
||||
|
@ -77,17 +74,17 @@ def bulkindex():
|
|||
def eventsindex():
|
||||
if request.body:
|
||||
anevent=request.body.read()
|
||||
#bottlelog('request:{0}\n'.format(anevent))
|
||||
# bottlelog('request:{0}\n'.format(anevent))
|
||||
request.body.close()
|
||||
#valid json?
|
||||
# valid json?
|
||||
try:
|
||||
eventDict=json.loads(anevent)
|
||||
except ValueError as e:
|
||||
except ValueError:
|
||||
response.status=500
|
||||
return
|
||||
#let the message queue worker who gets this know where it was posted
|
||||
# let the message queue worker who gets this know where it was posted
|
||||
eventDict['endpoint']='events'
|
||||
#post to event message queue
|
||||
# post to event message queue
|
||||
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
|
||||
ensurePublish(eventDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
|
||||
|
||||
|
@ -96,21 +93,21 @@ def eventsindex():
|
|||
|
||||
@route('/cef', method=['POST','PUT'])
|
||||
@route('/cef/',method=['POST','PUT'])
|
||||
#debug(True)
|
||||
# debug(True)
|
||||
def cefindex():
|
||||
if request.body:
|
||||
anevent=request.body.read()
|
||||
request.body.close()
|
||||
#valid json?
|
||||
# valid json?
|
||||
try:
|
||||
cefDict=json.loads(anevent)
|
||||
except ValueError as e:
|
||||
except ValueError:
|
||||
response.status=500
|
||||
return
|
||||
#let the message queue worker who gets this know where it was posted
|
||||
# let the message queue worker who gets this know where it was posted
|
||||
cefDict['endpoint']='cef'
|
||||
|
||||
#post to eventtask exchange
|
||||
# post to eventtask exchange
|
||||
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
|
||||
ensurePublish(cefDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
|
||||
return
|
||||
|
@ -129,17 +126,17 @@ def customindex(application):
|
|||
if request.body:
|
||||
anevent=request.body.read()
|
||||
request.body.close()
|
||||
#valid json?
|
||||
# valid json?
|
||||
try:
|
||||
customDict=json.loads(anevent)
|
||||
except ValueError as e:
|
||||
except ValueError:
|
||||
response.status=500
|
||||
return
|
||||
#let the message queue worker who gets this know where it was posted
|
||||
# let the message queue worker who gets this know where it was posted
|
||||
customDict['endpoint']= application
|
||||
customDict['customendpoint'] = True
|
||||
|
||||
#post to eventtask exchange
|
||||
# post to eventtask exchange
|
||||
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
|
||||
ensurePublish(customDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
|
||||
return
|
||||
|
@ -154,13 +151,13 @@ def initConfig():
|
|||
options.listen_host=getConfig('listen_host', '127.0.0.1', options.configfile)
|
||||
|
||||
|
||||
#get config info:
|
||||
# get config info:
|
||||
parser=OptionParser()
|
||||
parser.add_option("-c", dest='configfile', default=os.path.join(os.path.dirname(__file__), __file__).replace('.py', '.conf'), help="configuration file to use")
|
||||
(options,args) = parser.parse_args()
|
||||
initConfig()
|
||||
|
||||
#connect and declare the message queue/kombu objects.
|
||||
# connect and declare the message queue/kombu objects.
|
||||
connString='amqp://{0}:{1}@{2}:{3}//'.format(options.mquser,options.mqpassword,options.mqserver,options.mqport)
|
||||
mqConn=Connection(connString)
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ def toUTC(suspectedDate):
|
|||
else:
|
||||
# epoch? but seconds/milliseconds/nanoseconds (lookin at you heka)
|
||||
epochDivisor = int(str(1) + '0' * (len(str(suspectedDate)) % 10))
|
||||
objDate = datetime.fromtimestamp(float(suspectedDate/epochDivisor), LOCAL_TIMEZONE)
|
||||
objDate = datetime.fromtimestamp(float(suspectedDate / epochDivisor), LOCAL_TIMEZONE)
|
||||
elif type(suspectedDate) in (str, unicode):
|
||||
# try to parse float or negative number from string:
|
||||
objDate = None
|
||||
|
|
|
@ -20,6 +20,7 @@ from StringIO import StringIO
|
|||
from threading import Timer
|
||||
import re
|
||||
import time
|
||||
import kombu
|
||||
from ssl import SSLEOFError, SSLError
|
||||
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
|
@ -307,8 +308,6 @@ class taskConsumer(object):
|
|||
|
||||
def authenticate(self):
|
||||
if options.cloudtrail_arn not in ['<cloudtrail_arn>', 'cloudtrail_arn']:
|
||||
role_manager_args = {}
|
||||
|
||||
role_manager = RoleManager(**get_aws_credentials(
|
||||
options.region,
|
||||
options.accesskey,
|
||||
|
@ -368,7 +367,7 @@ class taskConsumer(object):
|
|||
self.on_message(event)
|
||||
|
||||
self.taskQueue.delete_message(msg)
|
||||
except (SSLEOFError, SSLError, socket.error) as e:
|
||||
except (SSLEOFError, SSLError, socket.error):
|
||||
logger.info('Received network related error...reconnecting')
|
||||
time.sleep(5)
|
||||
self.connection, self.taskQueue = connect_sqs(
|
||||
|
|
|
@ -8,7 +8,6 @@
|
|||
|
||||
import json
|
||||
import kombu
|
||||
import os
|
||||
import sys
|
||||
import socket
|
||||
from configlib import getConfig, OptionParser
|
||||
|
|
|
@ -10,7 +10,6 @@
|
|||
|
||||
|
||||
import json
|
||||
import os
|
||||
import kombu
|
||||
import sys
|
||||
import socket
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import sys
|
||||
import socket
|
||||
|
|
|
@ -12,7 +12,6 @@
|
|||
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import socket
|
||||
import time
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
# Copyright (c) 2017 Mozilla Corporation
|
||||
|
||||
|
||||
import sys
|
||||
import os
|
||||
from operator import itemgetter
|
||||
from datetime import datetime
|
||||
|
|
|
@ -101,7 +101,7 @@ class message(object):
|
|||
if 'ts' in newmessage['details']:
|
||||
newmessage[u'utctimestamp'] = toUTC(float(newmessage['details']['ts'])).isoformat()
|
||||
newmessage[u'timestamp'] = toUTC(float(newmessage['details']['ts'])).isoformat()
|
||||
#del(newmessage['details']['ts'])
|
||||
# del(newmessage['details']['ts'])
|
||||
else:
|
||||
# a malformed message somehow managed to crawl to us, let's put it somewhat together
|
||||
newmessage[u'utctimestamp'] = toUTC(datetime.now()).isoformat()
|
||||
|
@ -419,7 +419,7 @@ class message(object):
|
|||
u'source {src} '
|
||||
u'destination {dst} '
|
||||
u'port {p}'
|
||||
).format(**sumstruct)
|
||||
).format(**sumstruct)
|
||||
# Thank you for your service
|
||||
return (newmessage, metadata)
|
||||
|
||||
|
|
|
@ -3,8 +3,6 @@
|
|||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
# Copyright (c) 2017 Mozilla Corporation
|
||||
|
||||
import sys
|
||||
import os
|
||||
from mozdef_util.utilities.key_exists import key_exists
|
||||
|
||||
|
||||
|
|
|
@ -4,8 +4,6 @@
|
|||
# Copyright (c) 2014 Mozilla Corporation
|
||||
|
||||
import hashlib
|
||||
import sys
|
||||
import os
|
||||
from mozdef_util.utilities.logger import logger
|
||||
|
||||
|
||||
|
|
|
@ -28,15 +28,15 @@ class message(object):
|
|||
# early exit by setting message = None and return
|
||||
if 'details' in message.keys():
|
||||
# drop disabled for now
|
||||
#if 'signatureid' in message['details']:
|
||||
#if message['details'].lower() == 'execve' and \
|
||||
#'command' not in message['details']:
|
||||
# if 'signatureid' in message['details']:
|
||||
# if message['details'].lower() == 'execve' and \
|
||||
# 'command' not in message['details']:
|
||||
# auditd entry without a command
|
||||
# likely a result of another command (java starting a job, etc.)
|
||||
# signal a drop
|
||||
|
||||
#message = None
|
||||
#return message
|
||||
# message = None
|
||||
# return message
|
||||
if 'http_user_agent' in message['details']:
|
||||
if message['details']['http_user_agent'] == 'ELB-HealthChecker/1.0':
|
||||
message = None
|
||||
|
|
|
@ -6,8 +6,6 @@
|
|||
# This script copies the format/handling mechanism of ipFixup.py (git f5734b0c7e412424b44a6d7af149de6250fc70a2)
|
||||
|
||||
import netaddr
|
||||
import sys
|
||||
import os
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
|
||||
|
||||
|
|
|
@ -38,17 +38,17 @@ class message(object):
|
|||
|
||||
if 'eventsource' not in message:
|
||||
return (message, metadata)
|
||||
#drop non-relevant messages
|
||||
# drop non-relevant messages
|
||||
if message['eventsource'] in ('Fxa-customsMozSvc', 'FxaContentWebserver', 'FxaAuthWebserver', 'FxaOauthWebserver', 'FxaAuth', 'fxa-auth-server'):
|
||||
if 'details' in message.keys():
|
||||
if 'status' in message['details']:
|
||||
if message['details']['status'] == 200:
|
||||
#normal 200 returns for web content
|
||||
# normal 200 returns for web content
|
||||
return(None, metadata)
|
||||
# FxaAuth sends http status as 'code'
|
||||
if 'code' in message['details']:
|
||||
if message['details']['code'] == 200:
|
||||
#normal 200 returns for web content
|
||||
# normal 200 returns for web content
|
||||
return(None, metadata)
|
||||
if 'op' in message['details']:
|
||||
if message['details']['op'] == 'mailer.send.1':
|
||||
|
@ -93,15 +93,15 @@ class message(object):
|
|||
message['details']['remoteAddressChain'][0] == '[' and
|
||||
message['details']['remoteAddressChain'][-1] == ']'):
|
||||
# remove the brackets and double quotes
|
||||
for i in ['[',']','"']:
|
||||
message['details']['remoteAddressChain']=message['details']['remoteAddressChain'].replace(i,'')
|
||||
for i in ['[', ']', '"']:
|
||||
message['details']['remoteAddressChain'] = message['details']['remoteAddressChain'].replace(i, '')
|
||||
# make sure it's still a list
|
||||
if ',' in message['details']['remoteAddressChain']:
|
||||
sourceIP = message['details']['remoteAddressChain'].split(',')[0]
|
||||
if isIP(sourceIP):
|
||||
message['details']['sourceipaddress'] = sourceIP
|
||||
|
||||
#fxacustoms sends source ip as just 'ip'
|
||||
# fxacustoms sends source ip as just 'ip'
|
||||
if 'ip' in message['details'].keys():
|
||||
if isIP(message['details']['ip']):
|
||||
message['details']['sourceipaddress'] = message['details']['ip']
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
import netaddr
|
||||
import os
|
||||
|
||||
import sys
|
||||
from mozdef_util.geo_ip import GeoIP
|
||||
|
||||
|
||||
|
@ -38,7 +37,7 @@ class message(object):
|
|||
return geoDict
|
||||
else:
|
||||
location['location'] = 'unknown'
|
||||
except ValueError as e:
|
||||
except ValueError:
|
||||
pass
|
||||
return location
|
||||
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
# Copyright (c) 2017 Mozilla Corporation
|
||||
|
||||
import sys
|
||||
import os
|
||||
import jmespath
|
||||
import yaml
|
||||
|
|
|
@ -3,8 +3,6 @@
|
|||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
# Copyright (c) 2017 Mozilla Corporation
|
||||
|
||||
import sys
|
||||
import os
|
||||
from mozdef_util.utilities.key_exists import key_exists
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
from mozdef_util.utilities.dot_dict import DotDict
|
||||
|
|
|
@ -21,6 +21,6 @@ class message(object):
|
|||
self.priority = 5
|
||||
|
||||
def onMessage(self, message, metadata):
|
||||
docid=hashlib.md5('nagiosstatus'+message['details']['nagios_hostname']).hexdigest()
|
||||
metadata['id']=docid
|
||||
docid = hashlib.md5('nagiosstatus' + message['details']['nagios_hostname']).hexdigest()
|
||||
metadata['id'] = docid
|
||||
return (message, metadata)
|
||||
|
|
|
@ -3,10 +3,7 @@
|
|||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
# Copyright (c) 2017 Mozilla Corporation
|
||||
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
|
||||
|
||||
class message(object):
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
# Copyright (c) 2018 Mozilla Corporation
|
||||
|
||||
import netaddr
|
||||
import json
|
||||
from datetime import datetime
|
||||
from platform import node
|
||||
|
|
|
@ -21,7 +21,7 @@ class message(object):
|
|||
self.priority = 5
|
||||
|
||||
def onMessage(self, message, metadata):
|
||||
docid=hashlib.md5('vidyoUniqueCallID'+message['details']['UniqueCallID']).hexdigest()
|
||||
metadata['id']=docid
|
||||
docid = hashlib.md5('vidyoUniqueCallID' + message['details']['UniqueCallID']).hexdigest()
|
||||
metadata['id'] = docid
|
||||
metadata['doc_type'] = 'vidyo'
|
||||
return (message, metadata)
|
||||
|
|
|
@ -4,8 +4,7 @@
|
|||
# Copyright (c) 2015 Mozilla Corporation
|
||||
|
||||
import hashlib
|
||||
import sys
|
||||
import os
|
||||
|
||||
from mozdef_util.utilities.logger import logger
|
||||
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ import requests
|
|||
import sys
|
||||
import socket
|
||||
from bottle import route, run, response, request, default_app, post
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime
|
||||
from configlib import getConfig, OptionParser
|
||||
from ipwhois import IPWhois
|
||||
from operator import itemgetter
|
||||
|
@ -22,9 +22,8 @@ from pymongo import MongoClient
|
|||
from bson import json_util
|
||||
|
||||
from mozdef_util.elasticsearch_client import ElasticsearchClient, ElasticsearchInvalidIndex
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch, RangeMatch, Aggregation
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch
|
||||
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
from mozdef_util.utilities.logger import logger, initLogger
|
||||
|
||||
|
||||
|
@ -51,7 +50,7 @@ def enable_cors(fn):
|
|||
@route('/test/')
|
||||
def test():
|
||||
'''test endpoint for..testing'''
|
||||
ip = request.environ.get('REMOTE_ADDR')
|
||||
# ip = request.environ.get('REMOTE_ADDR')
|
||||
# response.headers['X-IP'] = '{0}'.format(ip)
|
||||
response.status = 200
|
||||
|
||||
|
@ -144,7 +143,7 @@ def index():
|
|||
# valid json?
|
||||
try:
|
||||
requestDict = json.loads(arequest)
|
||||
except ValueError as e:
|
||||
except ValueError:
|
||||
response.status = 500
|
||||
|
||||
if 'ipaddress' in requestDict.keys() and isIPv4(requestDict['ipaddress']):
|
||||
|
@ -164,11 +163,11 @@ def ipintel():
|
|||
'''send an IP address through plugins for intel enhancement'''
|
||||
if request.body:
|
||||
arequest = request.body.read()
|
||||
#request.body.close()
|
||||
# request.body.close()
|
||||
# valid json?
|
||||
try:
|
||||
requestDict = json.loads(arequest)
|
||||
except ValueError as e:
|
||||
except ValueError:
|
||||
response.status = 500
|
||||
if 'ipaddress' in requestDict.keys() and isIPv4(requestDict['ipaddress']):
|
||||
response.content_type = "application/json"
|
||||
|
@ -193,7 +192,7 @@ def index():
|
|||
# valid json?
|
||||
try:
|
||||
requestDict = json.loads(arequest)
|
||||
except ValueError as e:
|
||||
except ValueError:
|
||||
response.status = 500
|
||||
return
|
||||
if 'ipaddress' in requestDict.keys() and isIPv4(requestDict['ipaddress']):
|
||||
|
|
|
@ -55,7 +55,7 @@ class message(object):
|
|||
request.body.close()
|
||||
try:
|
||||
requestDict = json.loads(arequest)
|
||||
except ValueError as e:
|
||||
except ValueError:
|
||||
response.status = 500
|
||||
|
||||
print(requestDict, requestDict.keys())
|
||||
|
|
|
@ -6,8 +6,6 @@
|
|||
import os
|
||||
import sys
|
||||
import ConfigParser
|
||||
from datetime import datetime, timedelta
|
||||
import json
|
||||
import netaddr
|
||||
from boto3.session import Session
|
||||
|
||||
|
@ -160,7 +158,6 @@ class message(object):
|
|||
'''
|
||||
# format/validate request.json:
|
||||
ipaddress = None
|
||||
CIDR = None
|
||||
sendToBHVPC = False
|
||||
|
||||
# loop through the fields of the form
|
||||
|
@ -179,7 +176,7 @@ class message(object):
|
|||
sendToBHVPC = False
|
||||
|
||||
if sendToBHVPC and ipaddress is not None:
|
||||
#figure out the CIDR mask
|
||||
# figure out the CIDR mask
|
||||
if isIPv4(ipaddress) or isIPv6(ipaddress):
|
||||
ipcidr=netaddr.IPNetwork(ipaddress)
|
||||
if not ipcidr.ip.is_loopback() \
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
from positive_test_suite import PositiveTestSuite
|
||||
from negative_test_suite import NegativeTestSuite
|
||||
|
||||
import os
|
||||
import sys
|
||||
from mozdef_util.query_models import ExistsMatch
|
||||
|
||||
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
from positive_test_suite import PositiveTestSuite
|
||||
from negative_test_suite import NegativeTestSuite
|
||||
|
||||
import os
|
||||
import sys
|
||||
from mozdef_util.query_models import LessThanMatch
|
||||
|
||||
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
from positive_test_suite import PositiveTestSuite
|
||||
from negative_test_suite import NegativeTestSuite
|
||||
|
||||
import os
|
||||
import sys
|
||||
from mozdef_util.query_models import PhraseMatch
|
||||
|
||||
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
from positive_test_suite import PositiveTestSuite
|
||||
from negative_test_suite import NegativeTestSuite
|
||||
|
||||
import os
|
||||
import sys
|
||||
from mozdef_util.query_models import QueryStringMatch
|
||||
|
||||
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
from positive_test_suite import PositiveTestSuite
|
||||
from negative_test_suite import NegativeTestSuite
|
||||
|
||||
import os
|
||||
import sys
|
||||
from mozdef_util.query_models import RangeMatch
|
||||
|
||||
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
from positive_test_suite import PositiveTestSuite
|
||||
from negative_test_suite import NegativeTestSuite
|
||||
|
||||
import os
|
||||
import sys
|
||||
from mozdef_util.query_models import TermMatch
|
||||
|
||||
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
from positive_test_suite import PositiveTestSuite
|
||||
from negative_test_suite import NegativeTestSuite
|
||||
|
||||
import os
|
||||
import sys
|
||||
from mozdef_util.query_models import TermsMatch
|
||||
|
||||
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
from positive_test_suite import PositiveTestSuite
|
||||
from negative_test_suite import NegativeTestSuite
|
||||
|
||||
import os
|
||||
import sys
|
||||
from mozdef_util.query_models import WildcardMatch
|
||||
|
||||
|
||||
|
|
|
@ -9,7 +9,6 @@ import time
|
|||
import json
|
||||
|
||||
import pytest
|
||||
from elasticsearch.exceptions import ConnectionTimeout
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
@ -358,7 +357,7 @@ class TestWriteWithIDExists(ElasticsearchClientTest):
|
|||
saved_event = self.es_client.save_event(body=event, doc_id=event_id)
|
||||
assert saved_event['_id'] == event_id
|
||||
self.flush(self.event_index_name)
|
||||
fetched_event = self.es_client.get_event_by_id(event_id)
|
||||
self.es_client.get_event_by_id(event_id)
|
||||
|
||||
|
||||
class TestGetIndices(ElasticsearchClientTest):
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
import os
|
||||
import sys
|
||||
from mozdef_util.event import Event
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
import socket
|
||||
|
@ -60,7 +58,6 @@ class TestEvent(object):
|
|||
'utctimestamp': '2017-09-14T20:05:20.299387+00:00',
|
||||
'timestamp': '2017-09-14T20:05:19.116195+00:00',
|
||||
'mozdefhostname': 'randomhostname',
|
||||
'tags': [],
|
||||
'category': 'Authentication',
|
||||
'hostname': 'host.domain.com',
|
||||
'processid': 12345,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
from mozdef_util.plugin_set import PluginSet
|
||||
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import pytest
|
||||
|
||||
import os
|
||||
import sys
|
||||
from mozdef_util.state import State, StateParsingError
|
||||
|
||||
states_directory_location = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'states')
|
||||
|
|
|
@ -5,9 +5,6 @@
|
|||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
# Copyright (c) 2017 Mozilla Corporation
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
from mozdef_util.utilities.key_exists import key_exists
|
||||
|
||||
|
||||
|
|
|
@ -22,12 +22,9 @@ def utc_timezone():
|
|||
|
||||
tzlocal.get_localzone = utc_timezone
|
||||
|
||||
import sys
|
||||
if 'mozdef_util.utilities.toUTC' in sys.modules:
|
||||
reload(sys.modules['mozdef_util.utilities.toUTC'])
|
||||
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
|
||||
|
||||
class TestToUTC():
|
||||
|
||||
|
|
|
@ -157,7 +157,7 @@ class TestBroFixup(object):
|
|||
"audisp-json",
|
||||
"2.1.1",
|
||||
"audit"
|
||||
],
|
||||
],
|
||||
"summary": "Execve: sh -c sudo bro nsm /usr/lib64/nagios/plugins/custom/check_auditd.sh",
|
||||
"processname": "audisp-json",
|
||||
"details": {
|
||||
|
|
|
@ -1,13 +1,7 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
import mock
|
||||
import json
|
||||
import random
|
||||
import string
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../mq/plugins'))
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
from github_webhooks import message
|
||||
|
||||
|
||||
|
|
|
@ -2,7 +2,6 @@ import os
|
|||
import sys
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
|
||||
import mock
|
||||
import json
|
||||
import random
|
||||
import string
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
freezegun==0.3.9
|
||||
flake8==3.5.0
|
||||
flake8-per-file-ignores==0.6
|
||||
mock==2.0.0
|
||||
pytest==3.1.1
|
||||
WebTest==2.0.27
|
||||
|
|
|
@ -9,11 +9,9 @@ from configlib import getConfig
|
|||
|
||||
from kombu import Connection, Queue, Exchange
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||
|
||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||
from mozdef_util.utilities.dot_dict import DotDict
|
||||
|
||||
|
||||
|
|
|
@ -11,9 +11,6 @@ from dateutil.parser import parse
|
|||
import random
|
||||
import pytest
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from mozdef_util.utilities import toUTC
|
||||
|
||||
from suite_helper import parse_config_file, parse_mapping_file, setup_es_client, setup_rabbitmq_client
|
||||
|
|
Загрузка…
Ссылка в новой задаче