зеркало из https://github.com/mozilla/MozDef.git
Improve unit test suite to connect to resources once
This commit is contained in:
Родитель
f300276e04
Коммит
3e2e95ab63
|
@ -0,0 +1,93 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
# Copyright (c) 2017 Mozilla Corporation
|
||||||
|
|
||||||
|
from configlib import getConfig
|
||||||
|
|
||||||
|
from kombu import Connection, Queue, Exchange
|
||||||
|
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
sys.path.append(os.path.join(os.path.dirname(__file__), "../lib"))
|
||||||
|
from elasticsearch_client import ElasticsearchClient
|
||||||
|
|
||||||
|
from utilities.toUTC import toUTC
|
||||||
|
from utilities.dot_dict import DotDict
|
||||||
|
|
||||||
|
|
||||||
|
# The following functions before the UnitTest class definition
|
||||||
|
# are a poor man's way to setup resourcing and ensure
|
||||||
|
# that we only setup a client on time per test suite run
|
||||||
|
# todo: Fix this by defining an init method when we switch
|
||||||
|
# from pytest to UnitTest
|
||||||
|
def parse_config_file():
|
||||||
|
global CONFIG_FILE_CONTENTS
|
||||||
|
try:
|
||||||
|
CONFIG_FILE_CONTENTS
|
||||||
|
except NameError:
|
||||||
|
default_config = os.path.join(os.path.dirname(__file__), "config.conf")
|
||||||
|
options = DotDict()
|
||||||
|
options.configfile = default_config
|
||||||
|
|
||||||
|
options.esservers = list(getConfig('esservers', 'http://localhost:9200', options.configfile).split(','))
|
||||||
|
|
||||||
|
options.alertExchange = getConfig('alertexchange', 'alerts', options.configfile)
|
||||||
|
options.queueName = getConfig('alertqueuename', 'alertBot', options.configfile)
|
||||||
|
options.alerttopic = getConfig('alerttopic', 'mozdef.*', options.configfile)
|
||||||
|
|
||||||
|
options.mquser = getConfig('mquser', 'guest', options.configfile)
|
||||||
|
options.mqalertserver = getConfig('mqalertserver', 'localhost', options.configfile)
|
||||||
|
options.mqpassword = getConfig('mqpassword', 'guest', options.configfile)
|
||||||
|
options.mqport = getConfig('mqport', 5672, options.configfile)
|
||||||
|
options.mqack = getConfig('mqack', True, options.configfile)
|
||||||
|
CONFIG_FILE_CONTENTS = options
|
||||||
|
|
||||||
|
return CONFIG_FILE_CONTENTS
|
||||||
|
|
||||||
|
|
||||||
|
def parse_mapping_file():
|
||||||
|
global MAPPING_FILE_CONTENTS
|
||||||
|
try:
|
||||||
|
MAPPING_FILE_CONTENTS
|
||||||
|
except NameError:
|
||||||
|
default_mapping_file = os.path.join(os.path.dirname(__file__), "../config/defaultMappingTemplate.json")
|
||||||
|
with open(default_mapping_file) as data_file:
|
||||||
|
MAPPING_FILE_CONTENTS = data_file.read()
|
||||||
|
return MAPPING_FILE_CONTENTS
|
||||||
|
|
||||||
|
|
||||||
|
def setup_es_client(options):
|
||||||
|
global ES_CLIENT
|
||||||
|
try:
|
||||||
|
ES_CLIENT
|
||||||
|
except NameError:
|
||||||
|
ES_CLIENT = ElasticsearchClient(list('{0}'.format(s) for s in options.esservers))
|
||||||
|
return ES_CLIENT
|
||||||
|
|
||||||
|
|
||||||
|
def setup_rabbitmq_client(options):
|
||||||
|
global RABBITMQ_CLIENT
|
||||||
|
try:
|
||||||
|
RABBITMQ_CLIENT
|
||||||
|
except NameError:
|
||||||
|
mqConnString = 'amqp://{0}:{1}@{2}:{3}//'.format(options.mquser,
|
||||||
|
options.mqpassword,
|
||||||
|
options.mqalertserver,
|
||||||
|
options.mqport)
|
||||||
|
mqAlertConn = Connection(mqConnString)
|
||||||
|
alertExchange = Exchange(name=options.alertExchange, type='topic', durable=True, delivery_mode=1)
|
||||||
|
alertExchange(mqAlertConn).declare()
|
||||||
|
|
||||||
|
alertQueue = Queue(options.queueName,
|
||||||
|
exchange=alertExchange,
|
||||||
|
routing_key=options.alerttopic,
|
||||||
|
durable=False,
|
||||||
|
no_ack=(not options.mqack))
|
||||||
|
alertQueue(mqAlertConn).declare()
|
||||||
|
|
||||||
|
RABBITMQ_CLIENT = mqAlertConn.Consumer(alertQueue, accept=['json'])
|
||||||
|
return RABBITMQ_CLIENT
|
|
@ -5,56 +5,32 @@
|
||||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
# Copyright (c) 2017 Mozilla Corporation
|
# Copyright (c) 2017 Mozilla Corporation
|
||||||
|
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from dateutil.parser import parse
|
||||||
|
|
||||||
|
import random
|
||||||
|
import pytest
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
sys.path.append(os.path.join(os.path.dirname(__file__), "../lib"))
|
sys.path.append(os.path.join(os.path.dirname(__file__), "../lib"))
|
||||||
from elasticsearch_client import ElasticsearchClient
|
|
||||||
|
|
||||||
from utilities.toUTC import toUTC
|
from utilities.toUTC import toUTC
|
||||||
from utilities.dot_dict import DotDict
|
|
||||||
|
|
||||||
from datetime import datetime, timedelta
|
from suite_helper import parse_config_file, parse_mapping_file, setup_es_client, setup_rabbitmq_client
|
||||||
from dateutil.parser import parse
|
|
||||||
|
|
||||||
from configlib import getConfig
|
|
||||||
|
|
||||||
from kombu import Connection, Queue, Exchange
|
|
||||||
|
|
||||||
import random
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
class UnitTestSuite(object):
|
class UnitTestSuite(object):
|
||||||
|
|
||||||
def setup(self):
|
def setup(self):
|
||||||
|
self.options = parse_config_file()
|
||||||
|
self.mapping_options = parse_mapping_file()
|
||||||
|
self.es_client = setup_es_client(self.options)
|
||||||
|
self.rabbitmq_alerts_consumer = setup_rabbitmq_client(self.options)
|
||||||
|
|
||||||
current_date = datetime.now()
|
current_date = datetime.now()
|
||||||
self.event_index_name = current_date.strftime("events-%Y%m%d")
|
self.event_index_name = current_date.strftime("events-%Y%m%d")
|
||||||
self.previous_event_index_name = (current_date - timedelta(days=1)).strftime("events-%Y%m%d")
|
self.previous_event_index_name = (current_date - timedelta(days=1)).strftime("events-%Y%m%d")
|
||||||
self.alert_index_name = current_date.strftime("alerts-%Y%m")
|
self.alert_index_name = current_date.strftime("alerts-%Y%m")
|
||||||
self.parse_config()
|
|
||||||
|
|
||||||
# Elasticsearch
|
|
||||||
self.es_client = ElasticsearchClient(list('{0}'.format(s) for s in self.options.esservers))
|
|
||||||
|
|
||||||
# RabbitMQ
|
|
||||||
mqConnString = 'amqp://{0}:{1}@{2}:{3}//'.format(self.options.mquser,
|
|
||||||
self.options.mqpassword,
|
|
||||||
self.options.mqalertserver,
|
|
||||||
self.options.mqport)
|
|
||||||
|
|
||||||
mqAlertConn = Connection(mqConnString)
|
|
||||||
alertExchange = Exchange(name=self.options.alertExchange, type='topic', durable=True, delivery_mode=1)
|
|
||||||
alertExchange(mqAlertConn).declare()
|
|
||||||
|
|
||||||
alertQueue = Queue(self.options.queueName,
|
|
||||||
exchange=alertExchange,
|
|
||||||
routing_key=self.options.alerttopic,
|
|
||||||
durable=False,
|
|
||||||
no_ack=(not self.options.mqack))
|
|
||||||
alertQueue(mqAlertConn).declare()
|
|
||||||
|
|
||||||
self.rabbitmq_alerts_consumer = mqAlertConn.Consumer(alertQueue, accept=['json'])
|
|
||||||
|
|
||||||
if pytest.config.option.delete_indexes:
|
if pytest.config.option.delete_indexes:
|
||||||
self.reset_elasticsearch()
|
self.reset_elasticsearch()
|
||||||
|
@ -63,25 +39,6 @@ class UnitTestSuite(object):
|
||||||
if pytest.config.option.delete_queues:
|
if pytest.config.option.delete_queues:
|
||||||
self.reset_rabbitmq()
|
self.reset_rabbitmq()
|
||||||
|
|
||||||
def parse_config(self):
|
|
||||||
default_config = os.path.join(os.path.dirname(__file__), "config.conf")
|
|
||||||
options = DotDict()
|
|
||||||
options.configfile = default_config
|
|
||||||
|
|
||||||
options.esservers = list(getConfig('esservers', 'http://localhost:9200', options.configfile).split(','))
|
|
||||||
|
|
||||||
options.alertExchange = getConfig('alertexchange', 'alerts', options.configfile)
|
|
||||||
options.queueName = getConfig('alertqueuename', 'alertBot', options.configfile)
|
|
||||||
options.alerttopic = getConfig('alerttopic', 'mozdef.*', options.configfile)
|
|
||||||
|
|
||||||
options.mquser = getConfig('mquser', 'guest', options.configfile)
|
|
||||||
options.mqalertserver = getConfig('mqalertserver', 'localhost', options.configfile)
|
|
||||||
options.mqpassword = getConfig('mqpassword', 'guest', options.configfile)
|
|
||||||
options.mqport = getConfig('mqport', 5672, options.configfile)
|
|
||||||
options.mqack = getConfig('mqack', True, options.configfile)
|
|
||||||
|
|
||||||
self.options = options
|
|
||||||
|
|
||||||
def reset_rabbitmq(self):
|
def reset_rabbitmq(self):
|
||||||
self.rabbitmq_alerts_consumer.channel.queue_purge()
|
self.rabbitmq_alerts_consumer.channel.queue_purge()
|
||||||
|
|
||||||
|
@ -91,9 +48,6 @@ class UnitTestSuite(object):
|
||||||
if pytest.config.option.delete_queues:
|
if pytest.config.option.delete_queues:
|
||||||
self.reset_rabbitmq()
|
self.reset_rabbitmq()
|
||||||
|
|
||||||
self.rabbitmq_alerts_consumer.connection.close()
|
|
||||||
self.rabbitmq_alerts_consumer.close()
|
|
||||||
|
|
||||||
def populate_test_event(self, event, event_type='event'):
|
def populate_test_event(self, event, event_type='event'):
|
||||||
self.es_client.save_event(body=event, doc_type=event_type)
|
self.es_client.save_event(body=event, doc_type=event_type)
|
||||||
|
|
||||||
|
@ -101,16 +55,11 @@ class UnitTestSuite(object):
|
||||||
self.es_client.save_object(index='events', body=event, doc_type=event_type)
|
self.es_client.save_object(index='events', body=event, doc_type=event_type)
|
||||||
|
|
||||||
def setup_elasticsearch(self):
|
def setup_elasticsearch(self):
|
||||||
default_mapping_file = os.path.join(os.path.dirname(__file__), "../config/defaultMappingTemplate.json")
|
self.es_client.create_index(self.event_index_name, index_config=self.mapping_options)
|
||||||
mapping_str = ''
|
|
||||||
with open(default_mapping_file) as data_file:
|
|
||||||
mapping_str = data_file.read()
|
|
||||||
|
|
||||||
self.es_client.create_index(self.event_index_name, index_config=mapping_str)
|
|
||||||
self.es_client.create_alias('events', self.event_index_name)
|
self.es_client.create_alias('events', self.event_index_name)
|
||||||
self.es_client.create_index(self.previous_event_index_name, index_config=mapping_str)
|
self.es_client.create_index(self.previous_event_index_name, index_config=self.mapping_options)
|
||||||
self.es_client.create_alias('events-previous', self.previous_event_index_name)
|
self.es_client.create_alias('events-previous', self.previous_event_index_name)
|
||||||
self.es_client.create_index(self.alert_index_name, index_config=mapping_str)
|
self.es_client.create_index(self.alert_index_name, index_config=self.mapping_options)
|
||||||
self.es_client.create_alias('alerts', self.alert_index_name)
|
self.es_client.create_alias('alerts', self.alert_index_name)
|
||||||
|
|
||||||
def reset_elasticsearch(self):
|
def reset_elasticsearch(self):
|
||||||
|
|
Загрузка…
Ссылка в новой задаче