2014-02-25 21:55:02 +04:00
|
|
|
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
2014-04-16 22:40:15 +04:00
|
|
|
# Copyright (c) 2014 Mozilla Corporation
|
|
|
|
#
|
|
|
|
# Contributors:
|
|
|
|
# Jeff Bryner jbryner@mozilla.com
|
2014-04-17 22:23:31 +04:00
|
|
|
# Anthony Verez averez@mozilla.com
|
2014-02-25 21:55:02 +04:00
|
|
|
|
2014-02-18 11:55:10 +04:00
|
|
|
import bottle
|
|
|
|
import json
|
2014-07-10 04:26:41 +04:00
|
|
|
import MySQLdb
|
|
|
|
import netaddr
|
2014-02-18 11:55:10 +04:00
|
|
|
import pyes
|
2014-07-10 04:26:41 +04:00
|
|
|
import pytz
|
2014-07-19 00:55:17 +04:00
|
|
|
import requests
|
2014-07-10 04:26:41 +04:00
|
|
|
import sys
|
|
|
|
from bottle import debug, route, run, response, request, default_app, post
|
|
|
|
from datetime import datetime, timedelta
|
|
|
|
from configlib import getConfig, OptionParser
|
2014-02-18 11:55:10 +04:00
|
|
|
from elasticutils import S
|
|
|
|
from datetime import datetime
|
|
|
|
from datetime import timedelta
|
|
|
|
from dateutil.parser import parse
|
2014-07-10 04:26:41 +04:00
|
|
|
from ipwhois import IPWhois
|
2014-02-18 11:55:10 +04:00
|
|
|
|
2014-04-17 22:23:31 +04:00
|
|
|
options = None
|
2014-06-27 10:54:27 +04:00
|
|
|
dbcursor = None
|
|
|
|
mysqlconn = None
|
2014-04-17 22:23:31 +04:00
|
|
|
|
|
|
|
|
2014-07-10 04:26:41 +04:00
|
|
|
# cors decorator for rest/ajax
|
2014-02-18 11:55:10 +04:00
|
|
|
def enable_cors(fn):
|
|
|
|
def _enable_cors(*args, **kwargs):
|
|
|
|
# set CORS headers
|
|
|
|
response.headers['Access-Control-Allow-Origin'] = '*'
|
|
|
|
response.headers['Access-Control-Allow-Methods'] = 'GET, POST, PUT, OPTIONS'
|
|
|
|
response.headers['Access-Control-Allow-Headers'] = 'Origin, Accept, Content-Type, X-Requested-With, X-CSRF-Token'
|
|
|
|
|
|
|
|
if bottle.request.method != 'OPTIONS':
|
|
|
|
# actual request; reply with the actual response
|
|
|
|
return fn(*args, **kwargs)
|
|
|
|
|
|
|
|
return _enable_cors
|
|
|
|
|
2014-04-17 22:23:31 +04:00
|
|
|
|
2014-02-18 11:55:10 +04:00
|
|
|
@route('/test')
|
|
|
|
@route('/test/')
|
|
|
|
def index():
|
|
|
|
ip = request.environ.get('REMOTE_ADDR')
|
2014-07-10 04:26:41 +04:00
|
|
|
# response.headers['X-IP'] = '{0}'.format(ip)
|
2014-04-17 22:23:31 +04:00
|
|
|
response.status = 200
|
|
|
|
|
2014-02-18 11:55:10 +04:00
|
|
|
|
|
|
|
@route('/status')
|
|
|
|
@route('/status/')
|
|
|
|
def index():
|
|
|
|
if request.body:
|
|
|
|
request.body.read()
|
|
|
|
request.body.close()
|
2014-04-17 22:23:31 +04:00
|
|
|
response.status = 200
|
|
|
|
response.content_type = "application/json"
|
2014-02-18 11:55:10 +04:00
|
|
|
return(json.dumps(dict(status='ok')))
|
|
|
|
|
2014-04-17 22:23:31 +04:00
|
|
|
|
2014-02-18 11:55:10 +04:00
|
|
|
@route('/ldapLogins')
|
|
|
|
@route('/ldapLogins/')
|
|
|
|
@enable_cors
|
|
|
|
def index():
|
|
|
|
if request.body:
|
|
|
|
request.body.read()
|
|
|
|
request.body.close()
|
2014-04-17 22:23:31 +04:00
|
|
|
response.content_type = "application/json"
|
|
|
|
return(esLdapResults())
|
2014-02-18 11:55:10 +04:00
|
|
|
|
|
|
|
|
|
|
|
@route('/alerts')
|
|
|
|
@route('/alerts/')
|
|
|
|
@enable_cors
|
|
|
|
def index():
|
|
|
|
if request.body:
|
|
|
|
request.body.read()
|
|
|
|
request.body.close()
|
2014-04-17 22:23:31 +04:00
|
|
|
response.content_type = "application/json"
|
|
|
|
return(esAlertsSummary())
|
|
|
|
|
|
|
|
|
|
|
|
@route('/kibanadashboards')
|
|
|
|
@route('/kibanadashboards/')
|
|
|
|
@enable_cors
|
|
|
|
def index():
|
|
|
|
if request.body:
|
|
|
|
request.body.read()
|
|
|
|
request.body.close()
|
|
|
|
response.content_type = "application/json"
|
|
|
|
return(kibanaDashboards())
|
|
|
|
|
|
|
|
|
2014-07-17 03:22:51 +04:00
|
|
|
@post('/blockip', methods=['POST'])
|
|
|
|
@post('/blockip/', methods=['POST'])
|
2014-06-27 10:54:27 +04:00
|
|
|
@enable_cors
|
|
|
|
def index():
|
2014-07-17 03:22:51 +04:00
|
|
|
if options.enableBlockIP:
|
2014-06-27 20:33:57 +04:00
|
|
|
try:
|
|
|
|
return(banhammer(request.json))
|
|
|
|
except Exception as e:
|
|
|
|
sys.stderr.write('Error parsing json sent to POST /banhammer\n')
|
2014-06-27 10:54:27 +04:00
|
|
|
|
|
|
|
|
2014-07-10 04:26:41 +04:00
|
|
|
@post('/ipwhois', methods=['POST'])
|
|
|
|
@post('/ipwhois/', methods=['POST'])
|
|
|
|
@enable_cors
|
|
|
|
def index():
|
|
|
|
'''return a json version of whois for an ip address'''
|
|
|
|
if request.body:
|
|
|
|
arequest = request.body.read()
|
|
|
|
request.body.close()
|
|
|
|
# valid json?
|
|
|
|
try:
|
|
|
|
requestDict = json.loads(arequest)
|
|
|
|
except ValueError as e:
|
|
|
|
response.status = 500
|
|
|
|
return
|
|
|
|
if 'ipaddress' in requestDict.keys() and isIPv4(requestDict['ipaddress']):
|
|
|
|
response.content_type = "application/json"
|
|
|
|
return(getWhois(requestDict['ipaddress']))
|
|
|
|
else:
|
|
|
|
response.status = 500
|
|
|
|
return
|
2014-07-19 00:55:17 +04:00
|
|
|
|
|
|
|
@post('/ipcifquery', methods=['POST'])
|
|
|
|
@post('/ipcifquery/', methods=['POST'])
|
|
|
|
@enable_cors
|
|
|
|
def index():
|
|
|
|
'''return a json version of cif query for an ip address'''
|
|
|
|
if request.body:
|
|
|
|
arequest = request.body.read()
|
|
|
|
request.body.close()
|
|
|
|
# valid json?
|
|
|
|
try:
|
|
|
|
requestDict = json.loads(arequest)
|
|
|
|
except ValueError as e:
|
|
|
|
response.status = 500
|
|
|
|
return
|
|
|
|
if 'ipaddress' in requestDict.keys() and isIPv4(requestDict['ipaddress']):
|
|
|
|
response.content_type = "application/json"
|
|
|
|
return(getIPCIF(requestDict['ipaddress']))
|
|
|
|
else:
|
|
|
|
response.status = 500
|
|
|
|
return
|
|
|
|
|
|
|
|
@post('/ipdshieldquery', methods=['POST'])
|
|
|
|
@post('/ipdshieldquery/', methods=['POST'])
|
|
|
|
@enable_cors
|
|
|
|
def index():
|
|
|
|
'''
|
|
|
|
return a json version of dshield query for an ip address
|
|
|
|
https://isc.sans.edu/api/index.html
|
|
|
|
'''
|
|
|
|
if request.body:
|
|
|
|
arequest = request.body.read()
|
|
|
|
request.body.close()
|
|
|
|
# valid json?
|
|
|
|
try:
|
|
|
|
requestDict = json.loads(arequest)
|
|
|
|
except ValueError as e:
|
|
|
|
response.status = 500
|
|
|
|
return
|
|
|
|
if 'ipaddress' in requestDict.keys() and isIPv4(requestDict['ipaddress']):
|
|
|
|
url="https://isc.sans.edu/api/ip/"
|
|
|
|
|
|
|
|
dresponse = requests.get('{0}{1}?json'.format(url, requestDict['ipaddress']))
|
|
|
|
if dresponse.status_code == 200:
|
|
|
|
response.content_type = "application/json"
|
|
|
|
return(dresponse.content)
|
|
|
|
else:
|
|
|
|
response.status = dresponse.status_code
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
response.status = 500
|
|
|
|
return
|
|
|
|
|
2014-07-10 04:26:41 +04:00
|
|
|
|
2014-04-17 22:23:31 +04:00
|
|
|
def toUTC(suspectedDate, localTimeZone="US/Pacific"):
|
2014-02-18 11:55:10 +04:00
|
|
|
'''make a UTC date out of almost anything'''
|
2014-04-17 22:23:31 +04:00
|
|
|
utc = pytz.UTC
|
|
|
|
objDate = None
|
|
|
|
if type(suspectedDate) == str:
|
|
|
|
objDate = parse(suspectedDate, fuzzy=True)
|
|
|
|
elif type(suspectedDate) == datetime:
|
|
|
|
objDate = suspectedDate
|
|
|
|
|
2014-02-18 11:55:10 +04:00
|
|
|
if objDate.tzinfo is None:
|
2014-04-17 22:23:31 +04:00
|
|
|
objDate = pytz.timezone(localTimeZone).localize(objDate)
|
|
|
|
objDate = utc.normalize(objDate)
|
2014-02-18 11:55:10 +04:00
|
|
|
else:
|
2014-04-17 22:23:31 +04:00
|
|
|
objDate = utc.normalize(objDate)
|
2014-02-18 11:55:10 +04:00
|
|
|
if objDate is not None:
|
2014-04-17 22:23:31 +04:00
|
|
|
objDate = utc.normalize(objDate)
|
|
|
|
|
2014-02-18 11:55:10 +04:00
|
|
|
return objDate
|
|
|
|
|
2014-04-17 22:23:31 +04:00
|
|
|
|
2014-07-10 04:26:41 +04:00
|
|
|
def isIPv4(ip):
|
|
|
|
try:
|
|
|
|
# netaddr on it's own considers 1 and 0 to be valid_ipv4
|
|
|
|
# so a little sanity check prior to netaddr.
|
|
|
|
# Use IPNetwork instead of valid_ipv4 to allow CIDR
|
|
|
|
if '.' in ip and len(ip.split('.')) == 4:
|
|
|
|
# some ips are quoted
|
|
|
|
netaddr.IPNetwork(ip.strip("'").strip('"'))
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
except:
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2014-02-18 11:55:10 +04:00
|
|
|
def esAlertsSummary(begindateUTC=None, enddateUTC=None):
|
2014-04-17 22:23:31 +04:00
|
|
|
resultsList = list()
|
2014-02-18 11:55:10 +04:00
|
|
|
if begindateUTC is None:
|
2014-04-17 22:23:31 +04:00
|
|
|
begindateUTC = datetime.now() - timedelta(hours=12)
|
|
|
|
begindateUTC = toUTC(begindateUTC)
|
2014-02-18 11:55:10 +04:00
|
|
|
if enddateUTC is None:
|
2014-04-17 22:23:31 +04:00
|
|
|
enddateUTC = datetime.now()
|
|
|
|
enddateUTC = toUTC(enddateUTC)
|
2014-02-18 11:55:10 +04:00
|
|
|
try:
|
|
|
|
|
|
|
|
#q=S().es(urls=['http://{0}:{1}'.format(options.esserver,options.esport)]).query(_type='alert').filter(utctimestamp__range=[begindateUTC.isoformat(),enddateUTC.isoformat()])
|
|
|
|
#f=q.facet_raw(alerttype={"terms" : {"script_field" : "_source.type","size" : 500}})
|
2014-04-17 22:23:31 +04:00
|
|
|
|
2014-02-18 11:55:10 +04:00
|
|
|
#get all alerts
|
2014-03-20 23:37:19 +04:00
|
|
|
#q= S().es(urls=['http://{0}:{1}'.format(options.esserver,options.esport)]).query(_type='alert')
|
|
|
|
q= S().es(urls=list('{0}'.format(s) for s in options.esservers)).query(_type='alert')
|
2014-04-14 03:17:37 +04:00
|
|
|
#create a facet field using the entire 'category' field (not the sub terms) and filter it by date.
|
2014-02-18 11:55:10 +04:00
|
|
|
f=q.facet_raw(\
|
|
|
|
alerttype={"terms" : {"script_field" : "_source.category"},\
|
|
|
|
"facet_filter":{'range': {'utctimestamp': \
|
|
|
|
{'gte': begindateUTC.isoformat(), 'lte': enddateUTC.isoformat()}}}\
|
|
|
|
|
2014-04-17 22:23:31 +04:00
|
|
|
})
|
2014-02-18 11:55:10 +04:00
|
|
|
return(json.dumps(f.facet_counts()['alerttype']))
|
2014-04-17 22:23:31 +04:00
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
sys.stderr.write('%r' % e)
|
|
|
|
|
|
|
|
|
2014-02-18 11:55:10 +04:00
|
|
|
def esLdapResults(begindateUTC=None, enddateUTC=None):
|
2014-04-17 22:23:31 +04:00
|
|
|
resultsList = list()
|
2014-02-18 11:55:10 +04:00
|
|
|
if begindateUTC is None:
|
2014-04-17 22:23:31 +04:00
|
|
|
begindateUTC = datetime.now() - timedelta(hours=12)
|
|
|
|
begindateUTC = toUTC(begindateUTC)
|
2014-02-18 11:55:10 +04:00
|
|
|
if enddateUTC is None:
|
2014-04-17 22:23:31 +04:00
|
|
|
enddateUTC = datetime.now()
|
|
|
|
enddateUTC = toUTC(enddateUTC)
|
2014-02-18 11:55:10 +04:00
|
|
|
try:
|
2014-04-17 22:23:31 +04:00
|
|
|
es = pyes.ES((list('{0}'.format(s) for s in options.esservers)))
|
|
|
|
qDate = pyes.RangeQuery(qrange=pyes.ESRange('utctimestamp',
|
|
|
|
from_value=begindateUTC, to_value=enddateUTC))
|
2014-02-18 11:55:10 +04:00
|
|
|
q = pyes.MatchAllQuery()
|
2014-04-17 22:23:31 +04:00
|
|
|
q = pyes.FilteredQuery(q, qDate)
|
|
|
|
q = pyes.FilteredQuery(q, pyes.TermFilter('tags', 'ldap'))
|
|
|
|
q = pyes.FilteredQuery(q,
|
|
|
|
pyes.TermFilter('details.result', 'ldap_invalid_credentials'))
|
|
|
|
q2 = q.search()
|
2014-02-18 11:55:10 +04:00
|
|
|
q2.facet.add_term_facet('details.result')
|
2014-04-17 22:23:31 +04:00
|
|
|
q2.facet.add_term_facet('details.dn', size=20)
|
|
|
|
results = es.search(q2, indices='events')
|
|
|
|
|
|
|
|
stoplist = ('o', 'mozilla', 'dc', 'com', 'mozilla.com',
|
|
|
|
'mozillafoundation.org', 'org')
|
2014-02-18 11:55:10 +04:00
|
|
|
for t in results.facets['details.dn'].terms:
|
|
|
|
if t['term'] in stoplist:
|
|
|
|
continue
|
|
|
|
#print(t['term'])
|
2014-04-17 22:23:31 +04:00
|
|
|
failures = 0
|
|
|
|
success = 0
|
|
|
|
dn = t['term']
|
2014-02-18 11:55:10 +04:00
|
|
|
|
|
|
|
#re-query with the terms of the details.dn
|
|
|
|
qt = pyes.MatchAllQuery()
|
2014-04-17 22:23:31 +04:00
|
|
|
qt = pyes.FilteredQuery(qt, qDate)
|
|
|
|
qt = pyes.FilteredQuery(qt, pyes.TermFilter('tags', 'ldap'))
|
|
|
|
qt = pyes.FilteredQuery(qt,
|
|
|
|
pyes.TermFilter('details.dn', t['term']))
|
|
|
|
qt2 = qt.search()
|
2014-02-18 11:55:10 +04:00
|
|
|
qt2.facet.add_term_facet('details.result')
|
2014-04-17 22:23:31 +04:00
|
|
|
results = es.search(qt2)
|
2014-02-18 11:55:10 +04:00
|
|
|
#sys.stdout.write('{0}\n'.format(results.facets['details.result'].terms))
|
2014-04-17 22:23:31 +04:00
|
|
|
|
2014-02-18 11:55:10 +04:00
|
|
|
for t in results.facets['details.result'].terms:
|
|
|
|
#print(t['term'],t['count'])
|
2014-04-17 22:23:31 +04:00
|
|
|
if t['term'] == 'ldap_success':
|
|
|
|
success = t['count']
|
|
|
|
if t['term'] == 'ldap_invalid_credentials':
|
|
|
|
failures = t['count']
|
|
|
|
resultsList.append(dict(dn=dn, failures=failures,
|
|
|
|
success=success, begin=begindateUTC.isoformat(),
|
|
|
|
end=enddateUTC.isoformat()))
|
|
|
|
|
2014-02-18 11:55:10 +04:00
|
|
|
return(json.dumps(resultsList))
|
|
|
|
except pyes.exceptions.NoServerAvailable:
|
|
|
|
sys.stderr.write('Elastic Search server could not be reached, check network connectivity\n')
|
2014-04-17 22:23:31 +04:00
|
|
|
|
|
|
|
|
|
|
|
def kibanaDashboards():
|
|
|
|
try:
|
|
|
|
resultsList = []
|
|
|
|
es = pyes.ES((list('{0}'.format(s) for s in options.esservers)))
|
|
|
|
r = es.search(pyes.Search(pyes.MatchAllQuery(), size=100),
|
|
|
|
'kibana-int', 'dashboard')
|
|
|
|
if r:
|
|
|
|
for dashboard in r:
|
|
|
|
dashboardJson = json.loads(dashboard.dashboard)
|
|
|
|
resultsList.append({
|
|
|
|
'name': dashboardJson['title'],
|
2014-04-17 22:53:09 +04:00
|
|
|
'url': "%s/%s/%s" % (options.kibanaurl,
|
2014-04-17 22:23:31 +04:00
|
|
|
"index.html#/dashboard/elasticsearch",
|
|
|
|
dashboardJson['title'])
|
|
|
|
})
|
|
|
|
return json.dumps(resultsList)
|
|
|
|
else:
|
|
|
|
sys.stderr.write('No Kibana dashboard found\n')
|
|
|
|
except pyes.exceptions.NoServerAvailable:
|
|
|
|
sys.stderr.write('Elastic Search server could not be reached, check network connectivity\n')
|
|
|
|
|
|
|
|
|
2014-06-27 10:54:27 +04:00
|
|
|
def banhammer(action):
|
|
|
|
try:
|
2014-07-15 20:07:35 +04:00
|
|
|
mysqlconn = MySQLdb.connect(
|
|
|
|
host=options.banhammerdbhost,
|
|
|
|
user=options.banhammerdbuser,
|
|
|
|
passwd=options.banhammerdbpasswd,
|
|
|
|
db=options.banhammerdbdb)
|
|
|
|
dbcursor = mysqlconn.cursor()
|
2014-06-27 10:54:27 +04:00
|
|
|
# Look if attacker already in the DB, if yes get id
|
|
|
|
dbcursor.execute("""SELECT id FROM blacklist_offender
|
|
|
|
WHERE address = "%s" AND cidr = %d""" % (action['address'], int(action['cidr'])))
|
|
|
|
qresult = dbcursor.fetchone()
|
|
|
|
if not qresult:
|
|
|
|
# insert new attacker in banhammer DB
|
|
|
|
created_date = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
|
|
|
|
dbcursor.execute("""
|
2014-07-02 02:40:58 +04:00
|
|
|
INSERT INTO blacklist_offender(address, cidr)
|
|
|
|
VALUES ("%s", %d)
|
|
|
|
""" % (action['address'], action['cidr']))
|
2014-06-27 10:54:27 +04:00
|
|
|
# get the ID of this query
|
|
|
|
dbcursor.execute("""SELECT id FROM blacklist_offender
|
|
|
|
WHERE address = "%s" AND cidr = %d""" % (action['address'], int(action['cidr'])))
|
2014-07-02 02:40:58 +04:00
|
|
|
qresult = dbcursor.fetchone()
|
2014-06-27 10:54:27 +04:00
|
|
|
(attacker_id,) = qresult
|
|
|
|
# Compute start and end dates
|
|
|
|
start_date = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
|
|
|
|
end_date = datetime.utcnow() + timedelta(hours=1)
|
|
|
|
if action['duration'] == '12hr':
|
|
|
|
end_date = datetime.utcnow() + timedelta(hours=12)
|
|
|
|
elif action['duration'] == '1d':
|
|
|
|
end_date = datetime.utcnow() + timedelta(days=1)
|
|
|
|
elif action['duration'] == '1w':
|
|
|
|
end_date = datetime.utcnow() + timedelta(days=7)
|
|
|
|
elif action['duration'] == '30d':
|
|
|
|
end_date = datetime.utcnow() + timedelta(days=30)
|
|
|
|
|
|
|
|
if action['bugid']:
|
|
|
|
# Insert in DB
|
|
|
|
dbcursor.execute("""
|
2014-07-02 02:40:58 +04:00
|
|
|
INSERT INTO blacklist_blacklist(offender_id, start_date, end_date, comment, reporter, bug_number)
|
|
|
|
VALUES (%d, "%s", "%s", "%s", "%s", %d)
|
2014-06-27 10:54:27 +04:00
|
|
|
""" % (attacker_id, start_date, end_date, action['comment'], action['reporter'], int(action['bugid'])))
|
|
|
|
else:
|
|
|
|
dbcursor.execute("""
|
2014-07-02 02:40:58 +04:00
|
|
|
INSERT INTO blacklist_blacklist(offender_id, start_date, end_date, comment, reporter)
|
|
|
|
VALUES (%d, "%s", "%s", "%s", "%s")
|
2014-06-27 10:54:27 +04:00
|
|
|
""" % (attacker_id, start_date, end_date, action['comment'], action['reporter']))
|
|
|
|
mysqlconn.commit()
|
2014-07-02 02:40:58 +04:00
|
|
|
sys.stderr.write('%s/%d: banhammered\n' % (action['address'], action['cidr']))
|
2014-06-27 10:54:27 +04:00
|
|
|
except Exception as e:
|
|
|
|
sys.stderr.write('Error while banhammering %s/%d: %s\n' % (action['address'], action['cidr'], e))
|
|
|
|
|
|
|
|
|
2014-07-10 04:26:41 +04:00
|
|
|
def getWhois(ipaddress):
|
|
|
|
try:
|
|
|
|
whois = IPWhois(netaddr.IPNetwork(ipaddress)[0]).lookup()
|
|
|
|
return (json.dumps(whois))
|
|
|
|
except Exception as e:
|
|
|
|
sys.stderr.write('Error looking up whois for {0}: {1}\n'.format(ipaddress, e))
|
|
|
|
|
2014-07-19 00:55:17 +04:00
|
|
|
def getIPCIF(ipaddress):
|
|
|
|
# query a CIF service for information on this IP address per:
|
|
|
|
# https://code.google.com/p/collective-intelligence-framework/wiki/API_HTTP_v1
|
|
|
|
try:
|
|
|
|
resultsList = []
|
|
|
|
url='{0}api?apikey={1}&limit=20&q={2}'.format(options.cifhosturl,
|
|
|
|
options.cifapikey,
|
|
|
|
ipaddress)
|
|
|
|
headers = {'Accept': 'application/json'}
|
|
|
|
r=requests.get(url=url,verify=False,headers=headers)
|
|
|
|
if r.status_code == 200:
|
|
|
|
# we get a \n delimited list of json entries
|
|
|
|
cifjsons=r.text.split('\n')
|
|
|
|
for c in cifjsons:
|
|
|
|
# test for valid json
|
|
|
|
try:
|
|
|
|
resultsList.append(json.loads(c))
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
return json.dumps(resultsList)
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
sys.stderr.write('Error looking up CIF results for {0}: {1}\n'.format(ipaddress, e))
|
2014-07-10 04:26:41 +04:00
|
|
|
|
2014-07-19 00:55:17 +04:00
|
|
|
|
2014-07-17 03:22:51 +04:00
|
|
|
def checkBlockIPService():
|
|
|
|
if options.enableBlockIP:
|
|
|
|
try:
|
|
|
|
mysqlconn = MySQLdb.connect(
|
|
|
|
host=options.banhammerdbhost,
|
|
|
|
user=options.banhammerdbuser,
|
|
|
|
passwd=options.banhammerdbpasswd,
|
|
|
|
db=options.banhammerdbdb)
|
|
|
|
dbcursor = mysqlconn.cursor()
|
|
|
|
except Exception as e:
|
|
|
|
sys.stderr.write('Failed to connect to the Banhammer DB\n')
|
|
|
|
|
|
|
|
|
2014-02-18 11:55:10 +04:00
|
|
|
def initConfig():
|
|
|
|
#change this to your default zone for when it's not specified
|
2014-07-19 00:55:17 +04:00
|
|
|
options.defaultTimeZone = getConfig('defaulttimezone',
|
|
|
|
'US/Pacific',
|
|
|
|
options.configfile)
|
|
|
|
options.esservers = list(getConfig('esservers',
|
|
|
|
'http://localhost:9200',
|
|
|
|
options.configfile).split(','))
|
|
|
|
options.kibanaurl = getConfig('kibanaurl',
|
|
|
|
'http://localhost:9090',
|
|
|
|
options.configfile)
|
2014-07-17 03:22:51 +04:00
|
|
|
|
|
|
|
# options for your custom/internal ip blocking service
|
|
|
|
# mozilla's is called banhammer
|
|
|
|
# and uses an intermediary mysql DB
|
2014-07-19 00:55:17 +04:00
|
|
|
# here we set credentials
|
|
|
|
options.enableBlockIP = getConfig('enableBlockIP',
|
|
|
|
False,
|
|
|
|
options.configfile)
|
|
|
|
options.banhammerdbhost = getConfig('banhammerdbhost',
|
|
|
|
'localhost',
|
|
|
|
options.configfile)
|
|
|
|
options.banhammerdbuser = getConfig('banhammerdbuser',
|
|
|
|
'auser',
|
|
|
|
options.configfile)
|
|
|
|
options.banhammerdbpasswd = getConfig('banhammerdbpasswd',
|
|
|
|
'',
|
|
|
|
options.configfile)
|
|
|
|
options.banhammerdbdb = getConfig('banhammerdbdb',
|
|
|
|
'banhammer',
|
|
|
|
options.configfile)
|
|
|
|
|
|
|
|
# options for your CIF service
|
|
|
|
options.cifapikey = getConfig('cifapikey', '', options.configfile)
|
|
|
|
options.cifhosturl = getConfig('cifhosturl',
|
|
|
|
'http://localhost/',
|
|
|
|
options.configfile)
|
|
|
|
|
2014-07-17 03:22:51 +04:00
|
|
|
|
2014-07-19 00:55:17 +04:00
|
|
|
# check any service you'd like at startup rather than waiting
|
|
|
|
# for a client request.
|
2014-07-17 03:22:51 +04:00
|
|
|
checkBlockIPService()
|
2014-04-17 22:23:31 +04:00
|
|
|
|
|
|
|
|
2014-02-18 11:55:10 +04:00
|
|
|
if __name__ == "__main__":
|
2014-04-17 22:23:31 +04:00
|
|
|
parser = OptionParser()
|
|
|
|
parser.add_option("-c", dest='configfile',
|
|
|
|
default=sys.argv[0].replace('.py', '.conf'),
|
|
|
|
help="configuration file to use")
|
|
|
|
(options, args) = parser.parse_args()
|
2014-02-18 11:55:10 +04:00
|
|
|
initConfig()
|
2014-07-17 03:22:51 +04:00
|
|
|
|
2014-02-18 11:55:10 +04:00
|
|
|
run(host="localhost", port=8081)
|
|
|
|
else:
|
2014-04-17 22:23:31 +04:00
|
|
|
parser = OptionParser()
|
|
|
|
parser.add_option("-c", dest='configfile',
|
|
|
|
default=sys.argv[0].replace('.py', '.conf'),
|
|
|
|
help="configuration file to use")
|
|
|
|
(options, args) = parser.parse_args()
|
|
|
|
initConfig()
|
2014-07-17 03:22:51 +04:00
|
|
|
|
2014-02-18 11:55:10 +04:00
|
|
|
application = default_app()
|