2014-04-01 22:50:21 +04:00
|
|
|
#!/usr/bin/env python
|
2014-04-16 22:40:15 +04:00
|
|
|
|
|
|
|
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
|
|
|
# Copyright (c) 2014 Mozilla Corporation
|
|
|
|
#
|
|
|
|
# Contributors:
|
|
|
|
# Jeff Bryner jbryner@mozilla.com
|
2014-05-02 04:22:23 +04:00
|
|
|
# Anthony Verez averez@mozilla.com
|
2014-04-16 22:40:15 +04:00
|
|
|
|
2014-04-01 22:50:21 +04:00
|
|
|
import json
|
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import pyes
|
|
|
|
import pytz
|
|
|
|
import requests
|
|
|
|
import sys
|
|
|
|
from datetime import datetime
|
2014-06-08 20:16:35 +04:00
|
|
|
from hashlib import md5
|
2014-04-01 22:50:21 +04:00
|
|
|
from requests.auth import HTTPBasicAuth
|
2014-05-02 04:22:23 +04:00
|
|
|
from configlib import getConfig, OptionParser
|
2014-04-01 22:50:21 +04:00
|
|
|
from logging.handlers import SysLogHandler
|
2014-05-02 04:22:23 +04:00
|
|
|
from dateutil.parser import parse
|
2014-04-01 22:50:21 +04:00
|
|
|
|
|
|
|
logger = logging.getLogger(sys.argv[0])
|
|
|
|
|
2014-05-02 04:22:23 +04:00
|
|
|
|
2014-04-01 22:50:21 +04:00
|
|
|
def loggerTimeStamp(self, record, datefmt=None):
|
|
|
|
return toUTC(datetime.now()).isoformat()
|
|
|
|
|
2014-05-02 04:22:23 +04:00
|
|
|
|
2014-04-01 22:50:21 +04:00
|
|
|
def initLogger():
|
2014-05-02 04:22:23 +04:00
|
|
|
logger.level = logging.INFO
|
|
|
|
formatter = logging.Formatter(
|
|
|
|
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
2014-04-01 22:50:21 +04:00
|
|
|
formatter.formatTime = loggerTimeStamp
|
2014-05-02 04:22:23 +04:00
|
|
|
if options.output == 'syslog':
|
|
|
|
logger.addHandler(
|
2014-06-08 20:16:35 +04:00
|
|
|
SysLogHandler(address=(options.sysloghostname,
|
|
|
|
options.syslogport)))
|
2014-04-01 22:50:21 +04:00
|
|
|
else:
|
2014-05-02 04:22:23 +04:00
|
|
|
sh = logging.StreamHandler(sys.stderr)
|
2014-04-01 22:50:21 +04:00
|
|
|
sh.setFormatter(formatter)
|
|
|
|
logger.addHandler(sh)
|
2014-05-02 04:22:23 +04:00
|
|
|
|
|
|
|
|
2014-08-02 18:53:24 +04:00
|
|
|
def toUTC(suspectedDate, localTimeZone='UTC'):
|
2014-04-01 22:50:21 +04:00
|
|
|
'''make a UTC date out of almost anything'''
|
2014-05-02 04:22:23 +04:00
|
|
|
utc = pytz.UTC
|
|
|
|
objDate = None
|
|
|
|
if type(suspectedDate) == str:
|
|
|
|
objDate = parse(suspectedDate, fuzzy=True)
|
|
|
|
elif type(suspectedDate) == datetime:
|
|
|
|
objDate = suspectedDate
|
|
|
|
|
2014-04-01 22:50:21 +04:00
|
|
|
if objDate.tzinfo is None:
|
2014-05-02 04:22:23 +04:00
|
|
|
objDate = pytz.timezone(localTimeZone).localize(objDate)
|
|
|
|
objDate = utc.normalize(objDate)
|
2014-04-01 22:50:21 +04:00
|
|
|
else:
|
2014-05-02 04:22:23 +04:00
|
|
|
objDate = utc.normalize(objDate)
|
2014-04-01 22:50:21 +04:00
|
|
|
if objDate is not None:
|
2014-05-02 04:22:23 +04:00
|
|
|
objDate = utc.normalize(objDate)
|
|
|
|
|
2014-04-01 22:50:21 +04:00
|
|
|
return objDate
|
|
|
|
|
2014-05-02 04:22:23 +04:00
|
|
|
|
2014-06-08 20:16:35 +04:00
|
|
|
def getDocID(servername):
|
|
|
|
# create a hash to use as the ES doc id
|
|
|
|
# hostname plus salt as doctype.latest
|
|
|
|
hash = md5()
|
|
|
|
hash.update('{0}.mozdefhealth.latest'.format(servername))
|
|
|
|
return hash.hexdigest()
|
|
|
|
|
|
|
|
|
2014-04-01 22:50:21 +04:00
|
|
|
def main():
|
2014-06-08 20:16:35 +04:00
|
|
|
'''
|
|
|
|
Get health and status stats and post to ES
|
|
|
|
Post both as a historical reference (for charts)
|
|
|
|
and as a static docid (for realtime current health/EPS displays)
|
|
|
|
'''
|
2014-04-01 22:50:21 +04:00
|
|
|
logger.debug('starting')
|
|
|
|
logger.debug(options)
|
2014-05-02 04:22:23 +04:00
|
|
|
es = pyes.ES(server=(list('{0}'.format(s) for s in options.esservers)))
|
|
|
|
try:
|
|
|
|
auth = HTTPBasicAuth(options.mquser, options.mqpassword)
|
2014-04-01 22:50:21 +04:00
|
|
|
|
|
|
|
for server in options.mqservers:
|
|
|
|
logger.debug('checking message queues on {0}'.format(server))
|
2014-06-08 20:16:35 +04:00
|
|
|
r = requests.get(
|
|
|
|
'http://{0}:{1}/api/queues'.format(server,
|
|
|
|
options.mqapiport),
|
|
|
|
auth=auth)
|
|
|
|
|
2014-05-02 04:22:23 +04:00
|
|
|
mq = r.json()
|
2014-06-08 20:16:35 +04:00
|
|
|
# setup a log entry for health/status.
|
2014-05-02 04:22:23 +04:00
|
|
|
healthlog = dict(
|
2014-08-02 18:53:24 +04:00
|
|
|
utctimestamp=toUTC(datetime.now(), options.defaulttimezone).isoformat(),
|
2014-05-02 04:22:23 +04:00
|
|
|
hostname=server,
|
|
|
|
processid=os.getpid(),
|
|
|
|
processname=sys.argv[0],
|
|
|
|
severity='INFO',
|
|
|
|
summary='mozdef health/status',
|
|
|
|
category='mozdef',
|
|
|
|
tags=[],
|
|
|
|
details=[])
|
|
|
|
|
|
|
|
healthlog['details'] = dict(username='mozdef')
|
|
|
|
healthlog['details']['loadaverage'] = list(os.getloadavg())
|
2014-06-09 23:34:29 +04:00
|
|
|
healthlog['details']['queues']=list()
|
|
|
|
healthlog['details']['total_deliver_eps'] = 0
|
|
|
|
healthlog['details']['total_publish_eps'] = 0
|
|
|
|
healthlog['details']['total_messages_ready'] = 0
|
2014-05-02 04:22:23 +04:00
|
|
|
healthlog['tags'] = ['mozdef', 'status']
|
2014-04-01 22:50:21 +04:00
|
|
|
for m in mq:
|
2014-06-08 20:16:35 +04:00
|
|
|
if 'message_stats' in m.keys() and isinstance(m['message_stats'], dict):
|
2014-05-05 00:55:31 +04:00
|
|
|
if 'messages_ready' in m.keys():
|
|
|
|
mready = m['messages_ready']
|
2014-06-09 23:34:29 +04:00
|
|
|
healthlog['details']['total_messages_ready'] += m['messages_ready']
|
2014-05-05 00:55:31 +04:00
|
|
|
else:
|
|
|
|
mready = 0
|
|
|
|
if 'messages_unacknowledged' in m.keys():
|
|
|
|
munack = m['messages_unacknowledged']
|
|
|
|
else:
|
|
|
|
munack = 0
|
2014-06-09 23:34:29 +04:00
|
|
|
queueinfo=dict(
|
|
|
|
queue=m['name'],
|
|
|
|
vhost=m['vhost'],
|
2014-05-05 00:55:31 +04:00
|
|
|
messages_ready=mready,
|
|
|
|
messages_unacknowledged=munack)
|
2014-06-09 23:34:29 +04:00
|
|
|
|
2014-04-01 22:50:21 +04:00
|
|
|
if 'deliver_details' in m['message_stats'].keys():
|
2014-11-15 00:14:30 +03:00
|
|
|
queueinfo['deliver_eps'] = round(m['message_stats']['deliver_details']['rate'], 2)
|
|
|
|
healthlog['details']['total_deliver_eps'] += round(m['message_stats']['deliver_details']['rate'], 2)
|
2014-06-19 02:07:35 +04:00
|
|
|
if 'deliver_no_ack_details' in m['message_stats'].keys():
|
2014-11-15 00:14:30 +03:00
|
|
|
queueinfo['deliver_eps'] = round(m['message_stats']['deliver_no_ack_details']['rate'], 2)
|
|
|
|
healthlog['details']['total_deliver_eps'] += round(m['message_stats']['deliver_no_ack_details']['rate'], 2)
|
2014-04-01 22:50:21 +04:00
|
|
|
if 'publish_details' in m['message_stats'].keys():
|
2014-11-15 00:14:30 +03:00
|
|
|
queueinfo['publish_eps'] = round(m['message_stats']['publish_details']['rate'], 2)
|
|
|
|
healthlog['details']['total_publish_eps'] += round(m['message_stats']['publish_details']['rate'], 2)
|
2014-06-09 23:34:29 +04:00
|
|
|
healthlog['details']['queues'].append(queueinfo)
|
2014-04-01 22:57:15 +04:00
|
|
|
|
2014-06-08 20:16:35 +04:00
|
|
|
# post to elastic search servers directly without going through
|
|
|
|
# message queues in case there is an availability issue
|
|
|
|
es.index(index='events',
|
|
|
|
doc_type='mozdefhealth',
|
|
|
|
doc=json.dumps(healthlog),
|
|
|
|
bulk=False)
|
|
|
|
# post another doc with a static docid and tag
|
|
|
|
# for use when querying for the latest status
|
|
|
|
healthlog['tags'] = ['mozdef', 'status', 'latest']
|
|
|
|
es.index(index='events',
|
|
|
|
id=getDocID(server),
|
|
|
|
doc_type='mozdefhealth',
|
|
|
|
doc=json.dumps(healthlog),
|
|
|
|
bulk=False)
|
2014-04-01 22:50:21 +04:00
|
|
|
except Exception as e:
|
2014-05-02 04:22:23 +04:00
|
|
|
logger.error("Exception %r when gathering health and status " % e)
|
|
|
|
|
2014-04-01 22:50:21 +04:00
|
|
|
|
|
|
|
def initConfig():
|
2014-05-02 04:22:23 +04:00
|
|
|
# output our log to stdout or syslog
|
|
|
|
options.output = getConfig('output', 'stdout', options.configfile)
|
|
|
|
# syslog hostname
|
2014-06-08 20:16:35 +04:00
|
|
|
options.sysloghostname = getConfig('sysloghostname',
|
|
|
|
'localhost',
|
|
|
|
options.configfile)
|
2014-05-02 04:22:23 +04:00
|
|
|
# syslog port
|
|
|
|
options.syslogport = getConfig('syslogport', 514, options.configfile)
|
|
|
|
|
|
|
|
# msg queue servers to check in on (list of servernames)
|
|
|
|
# message queue server(s) hostname
|
2014-06-08 20:16:35 +04:00
|
|
|
options.mqservers = list(getConfig('mqservers',
|
|
|
|
'localhost',
|
|
|
|
options.configfile).split(','))
|
2014-05-02 04:22:23 +04:00
|
|
|
options.mquser = getConfig('mquser', 'guest', options.configfile)
|
|
|
|
options.mqpassword = getConfig('mqpassword', 'guest', options.configfile)
|
|
|
|
# port of the rabbitmq json management interface
|
|
|
|
options.mqapiport = getConfig('mqapiport', 15672, options.configfile)
|
|
|
|
|
|
|
|
# change this to your default zone for when it's not specified
|
2014-08-02 18:53:24 +04:00
|
|
|
options.defaulttimezone = getConfig('defaulttimezone',
|
|
|
|
'UTC',
|
2014-06-08 20:16:35 +04:00
|
|
|
options.configfile)
|
2014-05-02 04:22:23 +04:00
|
|
|
|
|
|
|
# elastic search server settings
|
2014-06-08 20:16:35 +04:00
|
|
|
options.esservers = list(getConfig('esservers',
|
|
|
|
'http://localhost:9200',
|
|
|
|
options.configfile).split(','))
|
2014-05-02 04:22:23 +04:00
|
|
|
|
2014-04-01 22:50:21 +04:00
|
|
|
if __name__ == '__main__':
|
2014-05-02 04:22:23 +04:00
|
|
|
parser = OptionParser()
|
|
|
|
parser.add_option(
|
|
|
|
"-c",
|
|
|
|
dest='configfile',
|
|
|
|
default=sys.argv[0].replace('.py', '.conf'),
|
|
|
|
help="configuration file to use")
|
|
|
|
(options, args) = parser.parse_args()
|
2014-04-01 22:50:21 +04:00
|
|
|
initConfig()
|
|
|
|
initLogger()
|
2014-05-02 04:22:23 +04:00
|
|
|
main()
|