зеркало из https://github.com/mozilla/MozDef.git
Merge branch 'master' into reinforce2019
# Conflicts: # README.md # cloudy_mozdef/cloudformation/mozdef-alert-developer.yml # docker/compose/mozdef_bootstrap/files/index_mappings/alerts-star.json
This commit is contained in:
Коммит
b540e28aef
|
@ -17,3 +17,4 @@ cloudy_mozdef/aws_parameters.json
|
|||
cloudy_mozdef/aws_parameters.sh
|
||||
docs/source/_build
|
||||
docs/source/_static
|
||||
*.swp
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
language: python
|
||||
python:
|
||||
- "2.7"
|
||||
- "3.6"
|
||||
sudo: required
|
||||
services:
|
||||
- docker
|
||||
|
|
63
CHANGELOG
63
CHANGELOG
|
@ -5,6 +5,62 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
|
|||
|
||||
## [Unreleased]
|
||||
|
||||
## [v3.0.0] - 2019-07-08
|
||||
### Added
|
||||
- Support for Python3
|
||||
|
||||
### Removed
|
||||
- Support for Python2
|
||||
- Usage of boto (boto3 now preferred)
|
||||
|
||||
|
||||
## [v2.0.1] - 2019-07-08
|
||||
### Fixed
|
||||
- Ensure all print statements use parenthesis
|
||||
- Improved broFixup plugin to handle new zeek format
|
||||
|
||||
|
||||
## [v2.0.0] - 2019-06-28
|
||||
### Added
|
||||
- Source IP and Destination IP GeoPoints
|
||||
- Elasticsearch 6.8 Support
|
||||
- Kibana 6.8 Support
|
||||
- All doc_types have been set to _doc to support Elasticsearch >= 6
|
||||
|
||||
### Removed
|
||||
- Elasticsearch <= 5 Support
|
||||
- Kibana <= 5 Support
|
||||
- Specifying AWS keys in S3 backup script, moved to Elasticsearch Secrets
|
||||
|
||||
|
||||
## [v1.40.0] - 2019-06-27
|
||||
### Added
|
||||
- Alertplugin for ip source enrichment
|
||||
- Alertplugin for port scan enrichment
|
||||
|
||||
### Fixed
|
||||
- Bulk message support in loginput
|
||||
|
||||
### Removed
|
||||
- Vidyo2Mozdef cron script to https://github.com/mozilla/mozdef-deprecated/blob/master/cron/vidyo2MozDef.py
|
||||
|
||||
|
||||
## [v1.39.0] - 2019-05-29
|
||||
### Added
|
||||
- Pagination of Web UI tables
|
||||
- Added support for SQS in replacement of Rabbitmq for alerts
|
||||
- Support for no_auth for watchlist
|
||||
- Cron script for closing indexes
|
||||
- Documentation on AlertActions
|
||||
|
||||
### Changed
|
||||
- Removed dependency on '_type' field in Elasticsearch
|
||||
|
||||
### Fixed
|
||||
- Slackbot reconnects successfully during network errors
|
||||
- Relative Kibana URLs now work correctly with protocol
|
||||
|
||||
|
||||
## [v1.38.5] - 2019-04-09
|
||||
### Added
|
||||
- Support for CSS themes
|
||||
|
@ -76,7 +132,12 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
|
|||
- Added checks on sending SQS messages to only accept intra-account messages
|
||||
- Improved docker performance and disk space requirements
|
||||
|
||||
[Unreleased]: https://github.com/mozilla/MozDef/compare/v1.38.5...HEAD
|
||||
[Unreleased]: https://github.com/mozilla/MozDef/compare/v3.0.0...HEAD
|
||||
[v3.0.0]: https://github.com/mozilla/MozDef/compare/v2.0.1...v3.0.0
|
||||
[v2.0.1]: https://github.com/mozilla/MozDef/compare/v2.0.0...v2.0.1
|
||||
[v2.0.0]: https://github.com/mozilla/MozDef/compare/v1.40.0...v2.0.0
|
||||
[v1.40.0]: https://github.com/mozilla/MozDef/compare/v1.40.0...v1.39.0
|
||||
[v1.39.0]: https://github.com/mozilla/MozDef/compare/v1.38.5...v1.39.0
|
||||
[v1.38.5]: https://github.com/mozilla/MozDef/compare/v1.38.4...v1.38.5
|
||||
[v1.38.4]: https://github.com/mozilla/MozDef/compare/v1.38.3...v1.38.4
|
||||
[v1.38.3]: https://github.com/mozilla/MozDef/compare/v1.38.2...v1.38.3
|
||||
|
|
2
Makefile
2
Makefile
|
@ -66,6 +66,7 @@ build: build-from-cwd
|
|||
|
||||
.PHONY: build-from-cwd
|
||||
build-from-cwd: ## Build local MozDef images (use make NO_CACHE=--no-cache build to disable caching)
|
||||
docker-compose -f docker/compose/docker-compose.yml -p $(NAME) $(BUILD_MODE) $(PARALLEL) $(NO_CACHE) base
|
||||
docker-compose -f docker/compose/docker-compose.yml -p $(NAME) $(BUILD_MODE) $(PARALLEL) $(NO_CACHE)
|
||||
|
||||
.PHONY: build-from-github
|
||||
|
@ -76,6 +77,7 @@ build-from-github: ## Build local MozDef images from the github branch (use mak
|
|||
|
||||
.PHONY: build-tests
|
||||
build-tests: ## Build end-to-end test environment only
|
||||
docker-compose -f docker/compose/docker-compose-tests.yml -p test-$(NAME) $(NO_CACHE) $(BUILD_MODE) base
|
||||
docker-compose -f docker/compose/docker-compose-tests.yml -p test-$(NAME) $(NO_CACHE) $(BUILD_MODE)
|
||||
|
||||
.PHONY: stop
|
||||
|
|
|
@ -43,7 +43,7 @@ MozDef is in production at Mozilla where we are using it to process over 300 mil
|
|||
If you're interested in running MozDef and would like to give us feedback, please take the following surveys:
|
||||
|
||||
- [I have not used MozDef yet but I'm interested](https://www.surveygizmo.com/s3/5040959/fdd6121c7d32)
|
||||
- [I have used MozDef and I have feeback!](https://www.surveygizmo.com/s3/5040962/7a801108021b)
|
||||
- [I have used MozDef and I have feedback!](https://www.surveygizmo.com/s3/5040962/7a801108021b)
|
||||
|
||||
These survey also include a contact form where you can reach us if you would like to (it's optional)
|
||||
|
||||
|
|
|
@ -85,30 +85,30 @@ class message(object):
|
|||
whois = IPWhois(source_ip).lookup_whois()
|
||||
whois_str = whois['nets'][0]['description']
|
||||
source_ip_isp = whois_str.replace('\n', ', ').replace('\r', '')
|
||||
new_ip_info = u'{} ({})'.format(source_ip, source_ip_isp)
|
||||
new_ip_info = '{} ({})'.format(source_ip, source_ip_isp)
|
||||
except Exception:
|
||||
new_ip_info = u'{}'.format(source_ip)
|
||||
new_ip_info = '{}'.format(source_ip)
|
||||
|
||||
new_location_str = u""
|
||||
new_location_str = ""
|
||||
if city.lower() == 'unknown':
|
||||
new_location_str += u'{0}'.format(country)
|
||||
new_location_str += '{0}'.format(country)
|
||||
else:
|
||||
new_location_str += u'{0}, {1}'.format(city, country)
|
||||
new_location_str += '{0}, {1}'.format(city, country)
|
||||
|
||||
event_timestamp = toUTC(message['events'][0]['documentsource']['details']['event_time'])
|
||||
event_day = event_timestamp.strftime('%B %d, %Y')
|
||||
summary = u'On {0} (UTC), did you login from {1} ({2})?'.format(event_day, new_location_str, source_ip)
|
||||
summary = 'On {0} (UTC), did you login from {1} ({2})?'.format(event_day, new_location_str, source_ip)
|
||||
|
||||
previous_city = message['details']['previous_locality_details']['city']
|
||||
previous_country = message['details']['previous_locality_details']['country']
|
||||
if previous_city.lower() == 'unknown':
|
||||
previous_location_str = u'{0}'.format(previous_country)
|
||||
previous_location_str = '{0}'.format(previous_country)
|
||||
else:
|
||||
previous_location_str = u'{0}, {1}'.format(previous_city, previous_country)
|
||||
previous_location_str = '{0}, {1}'.format(previous_city, previous_country)
|
||||
|
||||
alert_record = {
|
||||
'alert_id': b2a_hex(os.urandom(15)),
|
||||
'alert_code': b2a_hex(self.alert_classname),
|
||||
'alert_id': b2a_hex(os.urandom(15)).decode(),
|
||||
'alert_code': b2a_hex(self.alert_classname.encode()).decode(),
|
||||
'user_id': auth_full_username,
|
||||
'risk': self.config['risk'],
|
||||
'summary': summary,
|
||||
|
|
|
@ -47,7 +47,7 @@ class alertConsumer(ConsumerMixin):
|
|||
# just to be safe..check what we were sent.
|
||||
if isinstance(body, dict):
|
||||
bodyDict = body
|
||||
elif isinstance(body, str) or isinstance(body, unicode):
|
||||
elif isinstance(body, str):
|
||||
try:
|
||||
bodyDict = json.loads(body) # lets assume it's json
|
||||
except ValueError as e:
|
||||
|
|
|
@ -52,7 +52,7 @@ for alert in ALERTS.keys():
|
|||
# Load logging config
|
||||
dictConfig(LOGGING)
|
||||
|
||||
# print CELERYBEAT_SCHEDULE
|
||||
# print(CELERYBEAT_SCHEDULE)
|
||||
|
||||
# Optional configuration, see the application user guide.
|
||||
# app.conf.update(
|
||||
|
@ -72,7 +72,7 @@ for alert_namespace in CELERYBEAT_SCHEDULE:
|
|||
alert_class = getattr(alert_module, alert_classname)
|
||||
app.register_task(alert_class())
|
||||
except ImportError as e:
|
||||
print("Error importing {}").format(alert_namespace)
|
||||
print("Error importing {}".format(alert_namespace))
|
||||
print(e)
|
||||
pass
|
||||
except Exception as e:
|
||||
|
|
|
@ -14,6 +14,10 @@ from mozdef_util.utilities.logger import logger
|
|||
class AlertDeadmanGeneric(DeadmanAlertTask):
|
||||
|
||||
def main(self):
|
||||
# We override the event indices to search for
|
||||
# because our deadman alerts might look past 48 hours
|
||||
self.event_indices = ["events-weekly"]
|
||||
|
||||
self._config = self.parse_json_alert_config('deadman_generic.json')
|
||||
for alert_cfg in self._config['alerts']:
|
||||
try:
|
||||
|
|
|
@ -31,9 +31,9 @@ class AlertFeedbackEvents(AlertTask):
|
|||
user = event['_source']['details']['alert_information']['user_id']
|
||||
event_summary = event['_source']['summary']
|
||||
event_date = event['_source']['details']['alert_information']['date']
|
||||
summary = u"{} escalated alert within single-sign on (SSO) dashboard. Event Date: {} Summary: \"{}\"".format(user, event_date, event_summary)
|
||||
summary = "{} escalated alert within single-sign on (SSO) dashboard. Event Date: {} Summary: \"{}\"".format(user, event_date, event_summary)
|
||||
|
||||
for alert_code, tag in self._config.iteritems():
|
||||
for alert_code, tag in self._config.items():
|
||||
if event['_source']['details']['alert_information']['alert_code'] == alert_code:
|
||||
tags.append(tag)
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ class AlertPluginSet(PluginSet):
|
|||
|
||||
def send_message_to_plugin(self, plugin_class, message, metadata=None):
|
||||
if 'utctimestamp' in message and 'summary' in message:
|
||||
message_log_str = u'{0} received message: ({1}) {2}'.format(plugin_class.__module__, message['utctimestamp'], message['summary'])
|
||||
message_log_str = '{0} received message: ({1}) {2}'.format(plugin_class.__module__, message['utctimestamp'], message['summary'])
|
||||
logger.info(message_log_str)
|
||||
|
||||
return plugin_class.onMessage(message), metadata
|
||||
|
|
|
@ -18,12 +18,14 @@ from datetime import datetime
|
|||
from collections import Counter
|
||||
from celery import Task
|
||||
from celery.utils.log import get_task_logger
|
||||
from config import RABBITMQ, ES, ALERT_PLUGINS
|
||||
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||
from mozdef_util.query_models import TermMatch, ExistsMatch
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../"))
|
||||
from lib.config import RABBITMQ, ES, ALERT_PLUGINS
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), "../../lib"))
|
||||
from lib.alert_plugin_set import AlertPluginSet
|
||||
|
||||
|
@ -35,7 +37,7 @@ def keypaths(nested):
|
|||
""" return a list of nested dict key paths
|
||||
like: [u'_source', u'details', u'program']
|
||||
"""
|
||||
for key, value in nested.iteritems():
|
||||
for key, value in nested.items():
|
||||
if isinstance(value, collections.Mapping):
|
||||
for subkey, subvalue in keypaths(value):
|
||||
yield [key] + subkey, subvalue
|
||||
|
@ -106,10 +108,7 @@ class AlertTask(Task):
|
|||
self._configureKombu()
|
||||
self._configureES()
|
||||
|
||||
# We want to select all event indices
|
||||
# and filter out the window based on timestamp
|
||||
# from the search query
|
||||
self.event_indices = ["events-*"]
|
||||
self.event_indices = ['events', 'events-previous']
|
||||
|
||||
def classname(self):
|
||||
return self.__class__.__name__
|
||||
|
@ -126,6 +125,9 @@ class AlertTask(Task):
|
|||
temp_value = getConfig(config_key, "", config_filename)
|
||||
setattr(self.config, config_key, temp_value)
|
||||
|
||||
def close_connections(self):
|
||||
self.mqConn.release()
|
||||
|
||||
def _discover_task_exchange(self):
|
||||
"""Use configuration information to understand the message queue protocol.
|
||||
return: amqp, sqs
|
||||
|
@ -348,11 +350,7 @@ class AlertTask(Task):
|
|||
for i in Counter(aggregationValues).most_common():
|
||||
idict = {"value": i[0], "count": i[1], "events": [], "allevents": []}
|
||||
for r in results:
|
||||
if (
|
||||
getValueByPath(r["_source"], aggregationPath).encode(
|
||||
"ascii", "ignore"
|
||||
) == i[0]
|
||||
):
|
||||
if getValueByPath(r["_source"], aggregationPath) == i[0]:
|
||||
# copy events detail into this aggregation up to our samples limit
|
||||
if len(idict["events"]) < samplesLimit:
|
||||
idict["events"].append(r)
|
||||
|
@ -514,11 +512,9 @@ class AlertTask(Task):
|
|||
event["_source"]["alert_names"] = []
|
||||
event["_source"]["alert_names"].append(self.determine_alert_classname())
|
||||
|
||||
self.es.save_event(
|
||||
index=event["_index"], body=event["_source"], doc_id=event["_id"]
|
||||
)
|
||||
# We refresh here to ensure our changes to the events will show up for the next search query results
|
||||
self.es.refresh(event["_index"])
|
||||
self.es.save_event(index=event["_index"], body=event["_source"], doc_id=event["_id"])
|
||||
# We refresh here to ensure our changes to the events will show up for the next search query results
|
||||
self.es.refresh(event["_index"])
|
||||
except Exception as e:
|
||||
self.log.error("Error while updating events in ES: {0}".format(e))
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from alerttask import AlertTask
|
||||
from .alerttask import AlertTask
|
||||
|
||||
|
||||
class DeadmanAlertTask(AlertTask):
|
||||
|
@ -6,4 +6,5 @@ class DeadmanAlertTask(AlertTask):
|
|||
def executeSearchEventsSimple(self):
|
||||
# We override this method to specify the size as 1
|
||||
# since we only care about if ANY events are found or not
|
||||
return self.main_query.execute(self.es, indices=self.event_indices, size=1)
|
||||
results = self.main_query.execute(self.es, indices=self.event_indices, size=1)
|
||||
return results
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
"known": [
|
||||
{
|
||||
"range": "8.32.0.0/16",
|
||||
"site": "OFFICE1",
|
||||
"format": "{0} is in OFFICE1."
|
||||
},
|
||||
{
|
||||
"range": "4a00:7a49:232::/48",
|
||||
"site": "OFFICE2",
|
||||
"format": "{0} is in OFFICE2."
|
||||
}
|
||||
]
|
||||
}
|
|
@ -0,0 +1,148 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
# Copyright (c) 2014 Mozilla Corporation
|
||||
|
||||
import json
|
||||
from operator import add
|
||||
import os
|
||||
import re
|
||||
|
||||
import functools
|
||||
import netaddr
|
||||
|
||||
|
||||
CONFIG_FILE = os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
'ip_source_enrichment.json')
|
||||
|
||||
|
||||
def _find_ip_addresses(string):
|
||||
'''List all of the IPv4 and IPv6 addresses found in a string.'''
|
||||
|
||||
ipv4_rx = '(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)'
|
||||
ipv6_rx = '(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))'
|
||||
|
||||
ipv4 = re.findall(ipv4_rx, string)
|
||||
ipv6_map = map(
|
||||
lambda match: match[0] if isinstance(match, tuple) else match,
|
||||
re.findall(ipv6_rx, string))
|
||||
|
||||
ipv6 = [x for x in ipv6_map]
|
||||
return ipv4 + ipv6
|
||||
|
||||
|
||||
def enrich(alert, known_ips):
|
||||
'''Add information to alerts containing IP addresses that describes
|
||||
the source location of the IP address if it can be determined based
|
||||
on a configured mapping.
|
||||
'''
|
||||
|
||||
def find_ips(value):
|
||||
if isinstance(value, str):
|
||||
return _find_ip_addresses(value)
|
||||
|
||||
if isinstance(value, list) or isinstance(value, tuple):
|
||||
found = [find_ips(item) for item in value]
|
||||
return functools.reduce(add, found, [])
|
||||
|
||||
if isinstance(value, dict):
|
||||
found = [find_ips(item) for item in value.values()]
|
||||
return functools.reduce(add, found, [])
|
||||
|
||||
return []
|
||||
|
||||
def ip_in_range(ip):
|
||||
return lambda known: netaddr.IPAddress(ip) in netaddr.IPSet([known['range']])
|
||||
|
||||
ips = find_ips(alert)
|
||||
|
||||
alert = alert.copy()
|
||||
|
||||
alert['details']['sites'] = []
|
||||
|
||||
for ip in set(ips):
|
||||
matching_descriptions = filter(ip_in_range(ip), known_ips)
|
||||
|
||||
for desc in matching_descriptions:
|
||||
enriched = desc['format'].format(ip, desc['site'])
|
||||
|
||||
alert['summary'] += '; ' + enriched
|
||||
|
||||
alert['details']['sites'].append({
|
||||
'ip': ip,
|
||||
'site': desc['site'],
|
||||
})
|
||||
|
||||
return alert
|
||||
|
||||
|
||||
def _load_config(file_path):
|
||||
'''Private
|
||||
|
||||
Read and parse a file from disk as JSON into a dictionary.
|
||||
'''
|
||||
|
||||
with open(file_path) as config_file:
|
||||
return json.load(config_file)
|
||||
|
||||
|
||||
class message(object):
|
||||
'''Alert plugin interface that handles messages (alerts).
|
||||
This plugin will look for IP addresses in any of the values of an
|
||||
alert dictionary. For each IP address found, it will append some
|
||||
text to the summary of the alert to provide more information
|
||||
about where the IP originates from if it is recognized.
|
||||
|
||||
The expected format of the configuration file,
|
||||
`ip_source_enrichment.json.conf`, is as follows:
|
||||
|
||||
```json
|
||||
{
|
||||
"known": [
|
||||
{
|
||||
"range": "1.2.3.4/8",
|
||||
"site": "office1",
|
||||
"format": "IPv4 {0} is from {1}"
|
||||
},
|
||||
{
|
||||
"range": "1a2b:3c4d:123::/48",
|
||||
"site": "office2",
|
||||
"format": "IPv6 {0} is from {1}"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
The format string can accept zero to two parameters. The first
|
||||
will be the IP address found and the second will be the
|
||||
value of the corresponding 'site'.
|
||||
|
||||
The modified alert will have a `details.sites` field added to it,
|
||||
with the following form:
|
||||
|
||||
```json
|
||||
{
|
||||
"details": {
|
||||
"sites": [
|
||||
{
|
||||
"ip": "1.2.3.4",
|
||||
"site": "office1"
|
||||
},
|
||||
{
|
||||
"ip": "1a2b:3c4d:123::",
|
||||
"site": "office2"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
'''
|
||||
|
||||
def __init__(self):
|
||||
self._config = _load_config(CONFIG_FILE)
|
||||
|
||||
def onMessage(self, message):
|
||||
known_ips = self._config.get('known', [])
|
||||
|
||||
return enrich(message, known_ips)
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"elasticSearchAddress": "http://127.0.0.1:9200",
|
||||
"indicesToSearch": [
|
||||
"events-weekly"
|
||||
],
|
||||
"maxConnections": 32,
|
||||
"matchTags": [
|
||||
"portscan"
|
||||
],
|
||||
"searchWindow": {
|
||||
"hours": 24
|
||||
}
|
||||
}
|
|
@ -0,0 +1,205 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
# Copyright (c) 2017 Mozilla Corporation
|
||||
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch
|
||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||
|
||||
|
||||
CONFIG_FILE = os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
'port_scan_enrichment.json')
|
||||
|
||||
MISSING_REQUIRED_KEY_ERR_MSG = 'invalid configuration; '\
|
||||
'missing key "elasticSearchAddress" must be a URL '\
|
||||
'pointing to the ElasticSearch instance used by MozDef'
|
||||
|
||||
|
||||
class message(object):
|
||||
'''Alert plugin that handles messages (alerts) tagged as containing
|
||||
information about a port scan having been detected. This plugin
|
||||
will add information to such alerts describing any connections
|
||||
successfully established by the IP address from which the port
|
||||
scan originates.
|
||||
|
||||
The expected format of the configuration file,
|
||||
`port_scan_enrichment.json`, is as such:
|
||||
|
||||
```json
|
||||
{
|
||||
"elasticSearchAddress": "http://127.0.0.1:9200",
|
||||
"indicesToSearch": [
|
||||
"events-weekly"
|
||||
],
|
||||
"maxConnections": 32,
|
||||
"matchTags": [
|
||||
"portscan"
|
||||
],
|
||||
"searchWindow": {
|
||||
"hours": 12,
|
||||
"minutes": 30,
|
||||
"seconds": 59
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
`elasticSearchAddress` must be provided and must be a URL pointing
|
||||
to the ElasticSearch instance containing MozDef alerts.
|
||||
`indicesToSearch` is an array of names of indices to search in ES.
|
||||
If not provided or else an empty array, it defaults to `["events-weekly"]`.
|
||||
`maxConnections` is the maximum number of successful
|
||||
connections to list. If set to 0, all will be listed.
|
||||
`matchTags` is a list of tags to match against. This plugin will
|
||||
run against any alert containing any of the specified tags. If
|
||||
`matchTags` is not provided or is an empty array, it will default
|
||||
to `["portscan"]`
|
||||
The `searchWindow` option is an object containing keyword
|
||||
arguments to be passed to Python's `datetime.timedelta` function
|
||||
and can thus contain any keys corresponding to the keyword
|
||||
arguments that would be passed to the `datetime.datetime` function.
|
||||
If `searchWindow` is not present or is an empty object, the
|
||||
default search window is 24 hours.
|
||||
|
||||
The modified alert will have a `details.recentconnections` field
|
||||
appended to it, formatted like so:
|
||||
|
||||
```json
|
||||
{
|
||||
"details": {
|
||||
"recentconnections": [
|
||||
{
|
||||
"destinationipaddress": "1.2.3.4",
|
||||
"destinationport": 80,
|
||||
"timestamp": "2016-07-13 22:33:31.625443+00:00"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
That is, each connection will be described in an array and be an
|
||||
object containing the IP address and port over which the connection
|
||||
was established and the time the connection was made.
|
||||
'''
|
||||
|
||||
def __init__(self):
|
||||
config = _load_config(CONFIG_FILE)
|
||||
|
||||
try:
|
||||
es_address = config['elasticSearchAddress']
|
||||
except KeyError:
|
||||
raise KeyError(MISSING_REQUIRED_KEY_ERR_MSG)
|
||||
|
||||
es_client = ElasticsearchClient(es_address)
|
||||
|
||||
search_indices = config.get('searchIndices', [])
|
||||
|
||||
self.max_connections = config.get('maxConnections', 0)
|
||||
self.match_tags = config.get('matchTags', [])
|
||||
self.search_window = config.get('searchWindow', {})
|
||||
|
||||
if len(search_indices) == 0:
|
||||
search_indices = ['alerts']
|
||||
|
||||
if self.max_connections == 0:
|
||||
self.max_connections = None
|
||||
|
||||
if len(self.match_tags) == 0:
|
||||
self.match_tags = ['portscan']
|
||||
|
||||
if len(self.search_window) == 0:
|
||||
self.search_window = {'hours': 24}
|
||||
|
||||
# Store our ES client in a closure bound to the plugin object.
|
||||
# The intent behind this approach is to make the interface to
|
||||
# the `enrich` function require dependency injection for testing.
|
||||
def search_fn(query):
|
||||
return query.execute(es_client, indices=search_indices)
|
||||
|
||||
self.search = search_fn
|
||||
|
||||
def onMessage(self, message):
|
||||
alert_tags = message.get('tags', [])
|
||||
|
||||
should_enrich = any([
|
||||
tag in alert_tags
|
||||
for tag in self.match_tags
|
||||
])
|
||||
|
||||
if should_enrich:
|
||||
return enrich(
|
||||
message,
|
||||
self.search,
|
||||
self.search_window,
|
||||
self.max_connections)
|
||||
|
||||
return message
|
||||
|
||||
|
||||
def _load_config(file_path):
|
||||
'''Private
|
||||
|
||||
Load the alert plugin configuration from a file.
|
||||
'''
|
||||
|
||||
with open(file_path) as config_file:
|
||||
return json.load(config_file)
|
||||
|
||||
|
||||
def take(ls, n_items=None):
|
||||
'''Take only N items from a list.'''
|
||||
|
||||
if n_items is None:
|
||||
return ls
|
||||
|
||||
return ls[:n_items]
|
||||
|
||||
|
||||
def enrich(alert, search_fn, search_window, max_connections):
|
||||
'''Enrich an alert with information about recent connections made by
|
||||
the 'details.sourceipaddress'.
|
||||
|
||||
`search_fn` is expected to be a function that accepts a single argument,
|
||||
a `SearchQuery` object, and returns a list of results from Elastic Search.
|
||||
|
||||
`search_window` is expected to be a dictionary specifying the amount of
|
||||
time into the past to query for events.
|
||||
|
||||
`max_connections` is expected to be the maximum number of connections to
|
||||
list in the modified alert or else `None` if no limit should be applied.
|
||||
|
||||
Returns a modified alert based on a copy of the original.
|
||||
'''
|
||||
|
||||
search_query = SearchQuery(**search_window)
|
||||
|
||||
search_query.add_must([
|
||||
TermMatch('category', 'bro'),
|
||||
TermMatch('source', 'conn'),
|
||||
TermMatch(
|
||||
'details.sourceipaddress',
|
||||
alert['details']['sourceipaddress'])
|
||||
])
|
||||
|
||||
results = search_fn(search_query)
|
||||
|
||||
events = [
|
||||
hit.get('_source', {})
|
||||
for hit in results.get('hits', [])
|
||||
]
|
||||
|
||||
alert['details']['recentconnections'] = []
|
||||
|
||||
for event in take(events, max_connections):
|
||||
alert['details']['recentconnections'].append({
|
||||
'destinationipaddress': event['details']['destinationipaddress'],
|
||||
'destinationport': event['details']['destinationport'],
|
||||
'timestamp': event['timestamp']
|
||||
})
|
||||
|
||||
return alert
|
|
@ -28,7 +28,7 @@ class AlertAuthSignRelengSSH(AlertTask):
|
|||
|
||||
for exclusion in self.config['exclusions']:
|
||||
exclusion_query = None
|
||||
for key, value in exclusion.iteritems():
|
||||
for key, value in exclusion.items():
|
||||
phrase_exclusion = PhraseMatch(key, value)
|
||||
if exclusion_query is None:
|
||||
exclusion_query = phrase_exclusion
|
||||
|
|
|
@ -15,18 +15,18 @@ from optparse import OptionParser
|
|||
from requests_futures.sessions import FuturesSession
|
||||
from multiprocessing import Process, Queue
|
||||
import logging
|
||||
from Queue import Empty
|
||||
from queue import Empty
|
||||
from requests.packages.urllib3.exceptions import ClosedPoolError
|
||||
import time
|
||||
|
||||
httpsession = FuturesSession(max_workers=5)
|
||||
httpsession.trust_env=False # turns of needless .netrc check for creds
|
||||
httpsession.trust_env = False # turns of needless .netrc check for creds
|
||||
# a = requests.adapters.HTTPAdapter(max_retries=2)
|
||||
# httpsession.mount('http://', a)
|
||||
|
||||
|
||||
logger = logging.getLogger(sys.argv[0])
|
||||
logger.level=logging.DEBUG
|
||||
logger.level = logging.DEBUG
|
||||
|
||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||
|
||||
|
@ -34,19 +34,19 @@ formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(messag
|
|||
def postLogs(logcache):
|
||||
# post logs asynchronously with requests workers and check on the results
|
||||
# expects a queue object from the multiprocessing library
|
||||
posts=[]
|
||||
posts = []
|
||||
try:
|
||||
while not logcache.empty():
|
||||
postdata=logcache.get_nowait()
|
||||
postdata = logcache.get_nowait()
|
||||
if len(postdata) > 0:
|
||||
url=options.url
|
||||
a=httpsession.get_adapter(url)
|
||||
a.max_retries=3
|
||||
r=httpsession.post(url,data=postdata)
|
||||
url = options.url
|
||||
a = httpsession.get_adapter(url)
|
||||
a.max_retries = 3
|
||||
r = httpsession.post(url,data=postdata)
|
||||
posts.append((r,postdata,url))
|
||||
except Empty as e:
|
||||
pass
|
||||
for p,postdata,url in posts:
|
||||
for p, postdata, url in posts:
|
||||
try:
|
||||
if p.result().status_code >= 500:
|
||||
logger.error("exception posting to %s %r [will retry]\n" % (url, p.result().status_code))
|
||||
|
@ -63,12 +63,12 @@ def postLogs(logcache):
|
|||
if __name__ == '__main__':
|
||||
parser=OptionParser()
|
||||
parser.add_option("-u", dest='url', default='http://localhost:8080/events/', help="mozdef events URL to use when posting events")
|
||||
(options,args) = parser.parse_args()
|
||||
sh=logging.StreamHandler(sys.stdout)
|
||||
(options, args) = parser.parse_args()
|
||||
sh = logging.StreamHandler(sys.stdout)
|
||||
sh.setFormatter(formatter)
|
||||
logger.addHandler(sh)
|
||||
# create a list of logs we can append json to and call for a post when we want.
|
||||
logcache=Queue()
|
||||
logcache = Queue()
|
||||
try:
|
||||
for i in range(0,10):
|
||||
|
||||
|
@ -84,14 +84,14 @@ if __name__ == '__main__':
|
|||
tags=[],
|
||||
details=[]
|
||||
)
|
||||
alog['details']=dict(success=True,username='mozdef')
|
||||
alog['tags']=['mozdef','stresstest']
|
||||
alog['details'] = dict(success=True, username='mozdef')
|
||||
alog['tags'] = ['mozdef', 'stresstest']
|
||||
|
||||
logcache.put(json.dumps(alog))
|
||||
if not logcache.empty():
|
||||
time.sleep(.001)
|
||||
try:
|
||||
postingProcess=Process(target=postLogs,args=(logcache,),name="json2MozdefStressTest")
|
||||
postingProcess = Process(target=postLogs, args=(logcache,), name="json2MozdefStressTest")
|
||||
postingProcess.start()
|
||||
except OSError as e:
|
||||
if e.errno == 35: # resource temporarily unavailable.
|
||||
|
@ -102,7 +102,7 @@ if __name__ == '__main__':
|
|||
|
||||
while not logcache.empty():
|
||||
try:
|
||||
postingProcess=Process(target=postLogs,args=(logcache,),name="json2MozdefStressTest")
|
||||
postingProcess = Process(target=postLogs, args=(logcache,), name = "json2MozdefStressTest")
|
||||
postingProcess.start()
|
||||
except OSError as e:
|
||||
if e.errno == 35: # resource temporarily unavailable.
|
||||
|
|
|
@ -147,7 +147,7 @@ def formatAlert(jsonDictIn):
|
|||
return colorify('{0}: {1} {2}'.format(
|
||||
severity,
|
||||
colors['blue'] + category + colors['normal'],
|
||||
summary.encode('ascii', 'replace')
|
||||
summary
|
||||
))
|
||||
|
||||
|
||||
|
@ -219,7 +219,7 @@ class mozdefBot():
|
|||
ip = netaddr.IPNetwork(field)[0]
|
||||
if (not ip.is_loopback() and not ip.is_private() and not ip.is_reserved()):
|
||||
whois = IPWhois(ip).lookup_whois()
|
||||
description = whois['nets'][0]['description'].encode('string_escape')
|
||||
description = whois['nets'][0]['description']
|
||||
self.client.msg(
|
||||
recipient, "{0} description: {1}".format(field, description))
|
||||
else:
|
||||
|
@ -290,7 +290,7 @@ class alertConsumer(ConsumerMixin):
|
|||
# just to be safe..check what we were sent.
|
||||
if isinstance(body, dict):
|
||||
bodyDict = body
|
||||
elif isinstance(body, str) or isinstance(body, unicode):
|
||||
elif isinstance(body, str):
|
||||
try:
|
||||
bodyDict = json.loads(body) # lets assume it's json
|
||||
except ValueError as e:
|
||||
|
@ -402,7 +402,7 @@ def initConfig():
|
|||
|
||||
# Our config parser stomps out the '#' so we gotta readd
|
||||
channelkeys = {}
|
||||
for key, value in options.channelkeys.iteritems():
|
||||
for key, value in options.channelkeys.items():
|
||||
if not key.startswith('#'):
|
||||
key = '#{0}'.format(key)
|
||||
channelkeys[key] = value
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import os
|
||||
import pynsive
|
||||
import importlib
|
||||
|
||||
from mozdef_util.utilities.logger import logger
|
||||
|
||||
|
@ -32,7 +33,7 @@ class BotPluginSet():
|
|||
continue
|
||||
|
||||
module_obj = pynsive.import_module(found_module)
|
||||
reload(module_obj)
|
||||
importlib.reload(module_obj)
|
||||
plugin_class_obj = module_obj.Command()
|
||||
logger.info('Plugin {0} registered to receive command with {1}'.format(module_name, plugin_class_obj.command_name))
|
||||
plugins.append(
|
||||
|
|
|
@ -16,7 +16,7 @@ class Command():
|
|||
ip = netaddr.IPNetwork(ip_token)[0]
|
||||
if (not ip.is_loopback() and not ip.is_private() and not ip.is_reserved()):
|
||||
whois = IPWhois(ip).lookup_whois()
|
||||
description = str(whois['nets'][0]['description']).encode('string_escape')
|
||||
description = whois['nets'][0]['description']
|
||||
response += "{0} description: {1}\n".format(ip_token, description)
|
||||
else:
|
||||
response += "{0}: hrm...loopback? private ip?\n".format(ip_token)
|
||||
|
|
|
@ -49,7 +49,7 @@ class AlertConsumer(ConsumerMixin):
|
|||
# just to be safe..check what we were sent.
|
||||
if isinstance(body, dict):
|
||||
body_dict = body
|
||||
elif isinstance(body, str) or isinstance(body, unicode):
|
||||
elif isinstance(body, str):
|
||||
try:
|
||||
body_dict = json.loads(body) # lets assume it's json
|
||||
except ValueError as e:
|
||||
|
|
|
@ -50,7 +50,7 @@ class SlackBot():
|
|||
|
||||
if command == '!help':
|
||||
response = "\nHelp is on it's way...try these:\n"
|
||||
for command_name, plugin in self.plugins.iteritems():
|
||||
for command_name, plugin in self.plugins.items():
|
||||
response += "\n{0} -- {1}".format(
|
||||
command_name,
|
||||
plugin['help_text']
|
||||
|
|
|
@ -23,7 +23,7 @@ Resources:
|
|||
S3Bucket: public.us-west-2.security.allizom.org
|
||||
S3Key: mozdef-lambda-layer/layer-latest.zip
|
||||
CompatibleRuntimes:
|
||||
- python2.7
|
||||
- python3.6
|
||||
LicenseInfo: 'MPL 2.0'
|
||||
LambdalertIAMRole:
|
||||
Type: AWS::IAM::Role
|
||||
|
@ -60,5 +60,5 @@ Resources:
|
|||
- !Ref MozDefSecurityGroup
|
||||
SubnetIds: !Ref PublicSubnetIds
|
||||
ReservedConcurrentExecutions: 1
|
||||
Runtime: "python2.7"
|
||||
Runtime: "python3.6"
|
||||
Timeout: 120
|
||||
|
|
|
@ -23,7 +23,7 @@ Resources:
|
|||
S3Bucket: public.us-west-2.security.allizom.org
|
||||
S3Key: mozdef-lambda-layer/layer-latest.zip
|
||||
CompatibleRuntimes:
|
||||
- python2.7
|
||||
- python3.6
|
||||
LicenseInfo: 'MPL 2.0'
|
||||
LambdalertIAMRole:
|
||||
Type: AWS::IAM::Role
|
||||
|
@ -36,16 +36,16 @@ Resources:
|
|||
Service: lambda.amazonaws.com
|
||||
Action: sts:AssumeRole
|
||||
ManagedPolicyArns:
|
||||
- arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole
|
||||
AlertWritersEnv:
|
||||
- arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole
|
||||
AlertWritersEnv:
|
||||
Type: "AWS::Lambda::Function"
|
||||
Properties:
|
||||
Properties:
|
||||
Handler: "lambdalert.handle"
|
||||
Role:
|
||||
Fn::GetAtt:
|
||||
Role:
|
||||
Fn::GetAtt:
|
||||
- "LambdalertIAMRole"
|
||||
- "Arn"
|
||||
Code:
|
||||
Code:
|
||||
S3Bucket: public.us-west-2.security.allizom.org
|
||||
S3Key: mozdef-lambda-layer/function-latest.zip
|
||||
Layers:
|
||||
|
@ -55,9 +55,9 @@ Resources:
|
|||
OPTIONS_ESSERVERS: !Ref ESUrl
|
||||
OPTIONS_MQPROTOCOL: sqs
|
||||
VpcConfig:
|
||||
SecurityGroupIds:
|
||||
SecurityGroupIds:
|
||||
- !Ref MozDefSecurityGroup
|
||||
SubnetIds: !Ref PublicSubnetIds
|
||||
ReservedConcurrentExecutions: 1
|
||||
Runtime: "python2.7"
|
||||
Runtime: "python3.6"
|
||||
Timeout: 120
|
||||
|
|
|
@ -13,7 +13,7 @@ clean:
|
|||
rm -rf $(ROOT_DIR)/build/*
|
||||
|
||||
.PHONY: deploy-shell
|
||||
deploy-shell:
|
||||
deploy-shell:
|
||||
docker run -ti -v ~/.aws:/root/.aws -v ${PWD}:/var/task mozdef/mozdef_base:latest
|
||||
|
||||
.PHONY: package-layer
|
||||
|
@ -34,13 +34,13 @@ package-function: clean
|
|||
zip -r /var/task/cloudy_mozdef/lambda_layer/function-latest.zip ."
|
||||
|
||||
.PHONY: upload-s3
|
||||
upload-s3:
|
||||
upload-s3:
|
||||
aws s3 cp ${PWD}/cloudy_mozdef/lambda_layer/layer-latest.zip s3://public.us-west-2.security.allizom.org/mozdef-lambda-layer/layer-latest.zip
|
||||
aws s3 cp ${PWD}/cloudy_mozdef/lambda_layer/function-latest.zip s3://public.us-west-2.security.allizom.org/mozdef-lambda-layer/function-latest.zip
|
||||
|
||||
.PHONY: publish-layer
|
||||
publish-layer: upload-s3
|
||||
aws lambda publish-layer-version \
|
||||
--layer-name mozdef --compatible-runtimes python2.7 \
|
||||
--layer-name mozdef --compatible-runtimes python3.6 \
|
||||
--content S3Bucket=public.us-west-2.security.allizom.org,S3Key=mozdef-lambda-layer/layer-latest.zip
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ class AlertPluginSet(PluginSet):
|
|||
|
||||
def send_message_to_plugin(self, plugin_class, message, metadata=None):
|
||||
if 'utctimestamp' in message and 'summary' in message:
|
||||
message_log_str = u'{0} received message: ({1}) {2}'.format(plugin_class.__module__, message['utctimestamp'], message['summary'])
|
||||
message_log_str = '{0} received message: ({1}) {2}'.format(plugin_class.__module__, message['utctimestamp'], message['summary'])
|
||||
logger.info(message_log_str)
|
||||
|
||||
return plugin_class.onMessage(message), metadata
|
||||
|
|
|
@ -35,7 +35,7 @@ def keypaths(nested):
|
|||
""" return a list of nested dict key paths
|
||||
like: [u'_source', u'details', u'program']
|
||||
"""
|
||||
for key, value in nested.iteritems():
|
||||
for key, value in nested.items():
|
||||
if isinstance(value, collections.Mapping):
|
||||
for subkey, subvalue in keypaths(value):
|
||||
yield [key] + subkey, subvalue
|
||||
|
@ -347,11 +347,7 @@ class AlertTask(Task):
|
|||
for i in Counter(aggregationValues).most_common():
|
||||
idict = {"value": i[0], "count": i[1], "events": [], "allevents": []}
|
||||
for r in results:
|
||||
if (
|
||||
getValueByPath(r["_source"], aggregationPath).encode(
|
||||
"ascii", "ignore"
|
||||
) == i[0]
|
||||
):
|
||||
if getValueByPath(r["_source"], aggregationPath) == i[0]:
|
||||
# copy events detail into this aggregation up to our samples limit
|
||||
if len(idict["events"]) < samplesLimit:
|
||||
idict["events"].append(r)
|
||||
|
|
|
@ -50,7 +50,7 @@
|
|||
"set -e",
|
||||
"sudo yum update -y",
|
||||
"sudo yum makecache fast",
|
||||
"sudo yum install -y glibc-devel gcc libstdc++ libffi-devel zlib-devel make mysql-devel python python-devel python-pip git docker python3",
|
||||
"sudo yum install -y glibc-devel gcc libstdc++ libffi-devel zlib-devel make python python-devel python-pip git docker python3",
|
||||
"sudo pip install virtualenv docker-compose",
|
||||
"sudo systemctl enable docker",
|
||||
"sudo systemctl start docker",
|
||||
|
|
|
@ -11,20 +11,12 @@ import hjson
|
|||
import sys
|
||||
import os
|
||||
import requests
|
||||
import mozdef_client as mozdef
|
||||
from mozdef_util.utilities.dot_dict import DotDict
|
||||
|
||||
try:
|
||||
import urllib.parse
|
||||
|
||||
quote_url = urllib.parse.quote
|
||||
except ImportError:
|
||||
# Well hello there python2 user!
|
||||
import urllib
|
||||
|
||||
quote_url = urllib.quote
|
||||
import traceback
|
||||
|
||||
import mozdef_client as mozdef
|
||||
|
||||
from mozdef_util.utilities.dot_dict import DotDict
|
||||
|
||||
|
||||
def fatal(msg):
|
||||
print(msg)
|
||||
|
@ -257,11 +249,11 @@ def save_state(fpath, state):
|
|||
def byteify(input):
|
||||
"""Convert input to ascii"""
|
||||
if isinstance(input, dict):
|
||||
return {byteify(key): byteify(value) for key, value in input.iteritems()}
|
||||
return {byteify(key): byteify(value) for key, value in input.items()}
|
||||
elif isinstance(input, list):
|
||||
return [byteify(element) for element in input]
|
||||
elif isinstance(input, unicode):
|
||||
return input.encode("utf-8")
|
||||
elif not isinstance(input, str):
|
||||
return str(input)
|
||||
else:
|
||||
return input
|
||||
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
[options]
|
||||
esservers=http://localhost:9200
|
||||
aws_access_key_id = <add_aws_access_key_id>
|
||||
aws_secret_access_key = <add_aws_secret_access_key>
|
||||
aws_bucket = <add_s3_bucket_name>
|
||||
backup_indices = <add_backup_indices>
|
||||
backup_dobackup = 1,1,1,1,1,1,1,1
|
||||
|
|
|
@ -16,29 +16,25 @@
|
|||
# options.aws_bucket/elasticsearch/YYYY-MM/servername/indices/indexname
|
||||
|
||||
import sys
|
||||
import os
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
from datetime import date
|
||||
from configlib import getConfig, OptionParser
|
||||
import calendar
|
||||
import socket
|
||||
import boto
|
||||
import boto.s3
|
||||
import requests
|
||||
import json
|
||||
from os.path import expanduser
|
||||
from mozdef_util.utilities.logger import logger
|
||||
|
||||
|
||||
def main():
|
||||
logger.debug('started')
|
||||
|
||||
json_headers = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
try:
|
||||
esserver = options.esservers[0]
|
||||
s3 = boto.connect_s3(
|
||||
aws_access_key_id=options.aws_access_key_id,
|
||||
aws_secret_access_key=options.aws_secret_access_key
|
||||
)
|
||||
idate = date.strftime(datetime.utcnow() - timedelta(days=1), '%Y%m%d')
|
||||
bucketdate = date.strftime(datetime.utcnow() - timedelta(days=1), '%Y-%m')
|
||||
hostname = socket.gethostname()
|
||||
|
@ -49,11 +45,10 @@ def main():
|
|||
"type": "s3",
|
||||
"settings": {
|
||||
"bucket": options.aws_bucket,
|
||||
"base_path": "elasticsearch/{0}/{1}".format(bucketdate, hostname),
|
||||
"region": "{0}".format(options.aws_region)
|
||||
"base_path": "elasticsearch/{0}/{1}".format(bucketdate, hostname)
|
||||
}
|
||||
}
|
||||
r = requests.put('%s/_snapshot/s3backup' % esserver, data=json.dumps(snapshot_config))
|
||||
r = requests.put('%s/_snapshot/s3backup' % esserver, headers=json_headers, data=json.dumps(snapshot_config))
|
||||
if 'status' in r.json():
|
||||
logger.error("Error while registering snapshot repo: %s" % r.text)
|
||||
else:
|
||||
|
@ -75,6 +70,7 @@ def main():
|
|||
epoch = calendar.timegm(datetime.utcnow().utctimetuple())
|
||||
r = requests.put(
|
||||
'{0}/_snapshot/s3backup/{1}-{2}?wait_for_completion=true'.format(esserver, index_to_snapshot, epoch),
|
||||
headers=json_headers,
|
||||
data=json.dumps(snapshot_config)
|
||||
)
|
||||
if 'status' in r.json():
|
||||
|
@ -82,32 +78,6 @@ def main():
|
|||
else:
|
||||
logger.debug('snapshot %s finished' % index_to_snapshot)
|
||||
|
||||
# create a restore script
|
||||
# referencing the latest snapshot
|
||||
localpath = '%s/%s-restore.sh' % (expanduser("~"), index)
|
||||
|
||||
with open(localpath, 'w') as f:
|
||||
logger.debug('Writing %s' % localpath)
|
||||
f.write("""
|
||||
#!/bin/bash
|
||||
|
||||
echo -n "Restoring the snapshot..."
|
||||
curl -s -XPOST "%s/_snapshot/s3backup/%s-%s/_restore?wait_for_completion=true"
|
||||
|
||||
echo "DONE!"
|
||||
""" % (esserver, index_to_snapshot, epoch))
|
||||
|
||||
# upload the restore script
|
||||
bucket = s3.get_bucket(options.aws_bucket)
|
||||
key = bucket.new_key('elasticsearch/%s/%s/%s-%s-%s-restore.sh' % (
|
||||
bucketdate, hostname, index, idate, epoch))
|
||||
key.set_contents_from_filename(localpath)
|
||||
|
||||
# removing local file
|
||||
os.remove(localpath)
|
||||
|
||||
except boto.exception.NoAuthHandlerFound:
|
||||
logger.error("No auth handler found, check your credentials")
|
||||
except Exception as e:
|
||||
logger.error("Unhandled exception, terminating: %r" % e)
|
||||
|
||||
|
@ -155,23 +125,6 @@ def initConfig():
|
|||
'20,0,0',
|
||||
options.configfile).split(',')
|
||||
)
|
||||
# aws credentials to use to send files to s3
|
||||
options.aws_access_key_id = getConfig(
|
||||
'aws_access_key_id',
|
||||
'',
|
||||
options.configfile
|
||||
)
|
||||
options.aws_secret_access_key = getConfig(
|
||||
'aws_secret_access_key',
|
||||
'',
|
||||
options.configfile
|
||||
)
|
||||
options.aws_region = getConfig(
|
||||
'aws_region',
|
||||
'us-west-1',
|
||||
options.configfile
|
||||
)
|
||||
|
||||
options.aws_bucket = getConfig(
|
||||
'aws_bucket',
|
||||
'',
|
||||
|
|
|
@ -46,7 +46,7 @@ def keypaths(nested):
|
|||
''' return a list of nested dict key paths
|
||||
like: [u'_source', u'details', u'hostname']
|
||||
'''
|
||||
for key, value in nested.iteritems():
|
||||
for key, value in nested.items():
|
||||
if isinstance(value, collections.Mapping):
|
||||
for subkey, subvalue in keypaths(value):
|
||||
yield [key] + subkey, subvalue
|
||||
|
@ -240,8 +240,8 @@ def searchMongoAlerts(mozdefdb):
|
|||
if len(categoryCounts) == 1:
|
||||
# is the alert category mapped to an attacker category?
|
||||
for category in options.categorymapping:
|
||||
if category.keys()[0] == categoryCounts[0][0]:
|
||||
attacker['category'] = category[category.keys()[0]]
|
||||
if list(category.keys())[0] == categoryCounts[0][0]:
|
||||
attacker['category'] = category[list(category.keys())[0]]
|
||||
attackers.save(attacker)
|
||||
|
||||
|
||||
|
|
|
@ -19,7 +19,8 @@ from mozdef_util.query_models import SearchQuery, TermMatch, PhraseMatch
|
|||
def getDocID(usermacaddress):
|
||||
# create a hash to use as the ES doc id
|
||||
hash = md5()
|
||||
hash.update('{0}.mozdefintel.usernamemacaddress'.format(usermacaddress))
|
||||
seed = '{0}.mozdefintel.usernamemacaddress'.format(usermacaddress)
|
||||
hash.update(seed.encode())
|
||||
return hash.hexdigest()
|
||||
|
||||
|
||||
|
|
|
@ -4,9 +4,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
# Copyright (c) 2014 Mozilla Corporation
|
||||
|
||||
import boto
|
||||
import boto.s3
|
||||
import boto3
|
||||
import logging
|
||||
import random
|
||||
import re
|
||||
|
@ -149,19 +147,14 @@ def s3_upload_file(file_path, bucket_name, key_name):
|
|||
"""
|
||||
Upload a file to the given s3 bucket and return a template url.
|
||||
"""
|
||||
conn = boto.connect_s3(aws_access_key_id=options.aws_access_key_id, aws_secret_access_key=options.aws_secret_access_key)
|
||||
try:
|
||||
bucket = conn.get_bucket(bucket_name, validate=False)
|
||||
except boto.exception.S3ResponseError:
|
||||
conn.create_bucket(bucket_name)
|
||||
bucket = conn.get_bucket(bucket_name, validate=False)
|
||||
|
||||
key = boto.s3.key.Key(bucket)
|
||||
key.key = key_name
|
||||
key.set_contents_from_filename(file_path)
|
||||
|
||||
key.set_acl('public-read')
|
||||
url = "https://s3.amazonaws.com/{}/{}".format(bucket.name, key.name)
|
||||
s3 = boto3.resource(
|
||||
's3',
|
||||
aws_access_key_id=options.aws_access_key_id,
|
||||
aws_secret_access_key=options.aws_secret_access_key
|
||||
)
|
||||
s3.meta.client.upload_file(
|
||||
file_path, bucket_name, key_name, ExtraArgs={'ACL': 'public-read'})
|
||||
url = "https://s3.amazonaws.com/{}/{}".format(bucket_name, key_name)
|
||||
print("URL: {}".format(url))
|
||||
return url
|
||||
|
||||
|
|
|
@ -5,8 +5,7 @@
|
|||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
# Copyright (c) 2014 Mozilla Corporation
|
||||
|
||||
import boto
|
||||
import boto.s3
|
||||
import boto3
|
||||
import netaddr
|
||||
import random
|
||||
import sys
|
||||
|
@ -137,12 +136,13 @@ def main():
|
|||
{"$project": {"address": 1}},
|
||||
{"$limit": options.iplimit}
|
||||
])
|
||||
IPList = []
|
||||
ips = []
|
||||
for ip in ipCursor:
|
||||
IPList.append(ip['address'])
|
||||
ips.append(ip['address'])
|
||||
uniq_ranges = netaddr.cidr_merge(ips)
|
||||
# to text
|
||||
with open(options.outputfile, 'w') as outputfile:
|
||||
for ip in IPList:
|
||||
for ip in uniq_ranges:
|
||||
outputfile.write("{0}\n".format(ip))
|
||||
outputfile.close()
|
||||
# to s3?
|
||||
|
@ -197,19 +197,14 @@ def s3_upload_file(file_path, bucket_name, key_name):
|
|||
"""
|
||||
Upload a file to the given s3 bucket and return a template url.
|
||||
"""
|
||||
conn = boto.connect_s3(aws_access_key_id=options.aws_access_key_id, aws_secret_access_key=options.aws_secret_access_key)
|
||||
try:
|
||||
bucket = conn.get_bucket(bucket_name, validate=False)
|
||||
except boto.exception.S3ResponseError:
|
||||
conn.create_bucket(bucket_name)
|
||||
bucket = conn.get_bucket(bucket_name, validate=False)
|
||||
|
||||
key = boto.s3.key.Key(bucket)
|
||||
key.key = key_name
|
||||
key.set_contents_from_filename(file_path)
|
||||
|
||||
key.set_acl('public-read')
|
||||
url = "https://s3.amazonaws.com/{}/{}".format(bucket.name, key.name)
|
||||
s3 = boto3.resource(
|
||||
's3',
|
||||
aws_access_key_id=options.aws_access_key_id,
|
||||
aws_secret_access_key=options.aws_secret_access_key
|
||||
)
|
||||
s3.meta.client.upload_file(
|
||||
file_path, bucket_name, key_name, ExtraArgs={'ACL': 'public-read'})
|
||||
url = "https://s3.amazonaws.com/{}/{}".format(bucket_name, key_name)
|
||||
print("URL: {}".format(url))
|
||||
return url
|
||||
|
||||
|
|
|
@ -1,152 +1,133 @@
|
|||
{
|
||||
"order" : 0,
|
||||
"template" : "*",
|
||||
"settings" : { },
|
||||
"mappings" : {
|
||||
"_default_" : {
|
||||
"dynamic_templates" : [ {
|
||||
"string_fields" : {
|
||||
"mapping" : {
|
||||
"type" : "keyword"
|
||||
"order": 0,
|
||||
"index_patterns": [
|
||||
["events*", "alerts*"]
|
||||
],
|
||||
"settings": {},
|
||||
"mappings": {
|
||||
"_doc": {
|
||||
"dynamic_templates": [
|
||||
{
|
||||
"string_fields": {
|
||||
"mapping": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"match" : "*",
|
||||
"match_mapping_type" : "string"
|
||||
"match": "*",
|
||||
"match_mapping_type": "string"
|
||||
}
|
||||
}, {
|
||||
"float_fields" : {
|
||||
"mapping" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
{
|
||||
"binary_fields": {
|
||||
"mapping": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"match" : "*",
|
||||
"match_mapping_type" : "float"
|
||||
"match": "*",
|
||||
"match_mapping_type": "binary"
|
||||
}
|
||||
}, {
|
||||
"double_fields" : {
|
||||
"mapping" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
{
|
||||
"double_fields": {
|
||||
"mapping": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"match" : "*",
|
||||
"match_mapping_type" : "double"
|
||||
"match": "*",
|
||||
"match_mapping_type": "double"
|
||||
}
|
||||
}, {
|
||||
"byte_fields" : {
|
||||
"mapping" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
{
|
||||
"long_fields": {
|
||||
"mapping": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"match" : "*",
|
||||
"match_mapping_type" : "byte"
|
||||
"match": "*",
|
||||
"match_mapping_type": "long"
|
||||
}
|
||||
}, {
|
||||
"short_fields" : {
|
||||
"mapping" : {
|
||||
"type" : "keyword"
|
||||
}
|
||||
],
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"category": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"hostname": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"processid": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"processname": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"severity": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"source": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"summary": {
|
||||
"type": "text"
|
||||
},
|
||||
"details": {
|
||||
"properties": {
|
||||
"sourceipaddress": {
|
||||
"type": "ip"
|
||||
},
|
||||
"match" : "*",
|
||||
"match_mapping_type" : "short"
|
||||
}
|
||||
}, {
|
||||
"integer_fields" : {
|
||||
"mapping" : {
|
||||
"type" : "keyword"
|
||||
"sourceipv4address": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"match" : "*",
|
||||
"match_mapping_type" : "integer"
|
||||
}
|
||||
}, {
|
||||
"long_fields" : {
|
||||
"mapping" : {
|
||||
"type" : "keyword"
|
||||
"destinationipaddress": {
|
||||
"type": "ip"
|
||||
},
|
||||
"match" : "*",
|
||||
"match_mapping_type" : "long"
|
||||
}
|
||||
} ],
|
||||
"properties" : {
|
||||
"category" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"hostname" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"processid" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"processname": {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"severity" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"source" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"summary" : {
|
||||
"type" : "text"
|
||||
},
|
||||
"details" : {
|
||||
"properties" : {
|
||||
"destinationport" : {
|
||||
"index" : "not_analyzed",
|
||||
"type" : "long"
|
||||
},
|
||||
"hostname" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"sourceipaddress" : {
|
||||
"type" : "ip"
|
||||
},
|
||||
"srcip" : {
|
||||
"type" : "ip"
|
||||
},
|
||||
"sourceipv4address" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"destinationipaddress" : {
|
||||
"type" : "ip"
|
||||
},
|
||||
"success" : {
|
||||
"type" : "boolean"
|
||||
},
|
||||
"sourceport" : {
|
||||
"index" : "not_analyzed",
|
||||
"type" : "long"
|
||||
},
|
||||
"apiversion" : {
|
||||
"properties" : {
|
||||
"raw_value": {
|
||||
"type" : "keyword"
|
||||
}
|
||||
"destinationport": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"destinationipgeopoint": {
|
||||
"type": "geo_point"
|
||||
},
|
||||
"sourceipgeopoint": {
|
||||
"type": "geo_point"
|
||||
},
|
||||
"success": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"sourceport": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"apiversion": {
|
||||
"properties": {
|
||||
"raw_value": {
|
||||
"type": "keyword"
|
||||
}
|
||||
},
|
||||
"requestparameters" : {
|
||||
"properties" : {
|
||||
"logstreamname": {
|
||||
"properties": {
|
||||
"raw_value": {
|
||||
"type" : "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"requestparameters": {
|
||||
"properties": {
|
||||
"logstreamname": {
|
||||
"properties": {
|
||||
"raw_value": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"receivedtimestamp" : {
|
||||
"format" : "dateOptionalTime",
|
||||
"type" : "date"
|
||||
},
|
||||
"utctimestamp" : {
|
||||
"format" : "dateOptionalTime",
|
||||
"type" : "date"
|
||||
},
|
||||
"version" : {
|
||||
"type" : "keyword"
|
||||
}
|
||||
},
|
||||
"_all" : {
|
||||
"enabled" : true
|
||||
"receivedtimestamp": {
|
||||
"format": "dateOptionalTime",
|
||||
"type": "date"
|
||||
},
|
||||
"utctimestamp": {
|
||||
"format": "dateOptionalTime",
|
||||
"type": "date"
|
||||
},
|
||||
"version": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"aliases" : { }
|
||||
}
|
||||
},
|
||||
"aliases": {}
|
||||
}
|
||||
|
|
|
@ -52,7 +52,7 @@ def flattenDict(inDict, pre=None, values=True):
|
|||
'''
|
||||
pre = pre[:] if pre else []
|
||||
if isinstance(inDict, dict):
|
||||
for key, value in inDict.iteritems():
|
||||
for key, value in inDict.items():
|
||||
if isinstance(value, dict):
|
||||
for d in flattenDict(value, pre + [key], values):
|
||||
yield d
|
||||
|
@ -65,8 +65,6 @@ def flattenDict(inDict, pre=None, values=True):
|
|||
if values:
|
||||
if isinstance(value, str):
|
||||
yield '.'.join(pre) + '.' + key + '=' + str(value)
|
||||
elif isinstance(value, unicode):
|
||||
yield '.'.join(pre) + '.' + key + '=' + value.encode('ascii', 'ignore')
|
||||
elif value is None:
|
||||
yield '.'.join(pre) + '.' + key + '=None'
|
||||
else:
|
||||
|
@ -75,8 +73,6 @@ def flattenDict(inDict, pre=None, values=True):
|
|||
if values:
|
||||
if isinstance(value, str):
|
||||
yield key + '=' + str(value)
|
||||
elif isinstance(value, unicode):
|
||||
yield key + '=' + value.encode('ascii', 'ignore')
|
||||
elif value is None:
|
||||
yield key + '=None'
|
||||
else:
|
||||
|
@ -110,7 +106,7 @@ def main():
|
|||
# or you will get access denied even with correct delegations/scope
|
||||
|
||||
credentials = SignedJwtAssertionCredentials(client_email,
|
||||
private_key,
|
||||
private_key.encode(),
|
||||
scope=scope,
|
||||
sub=options.impersonate)
|
||||
http = Http()
|
||||
|
@ -138,8 +134,10 @@ def main():
|
|||
# change key/values like:
|
||||
# actor.email=someone@mozilla.com
|
||||
# to actor_email=value
|
||||
|
||||
key,value =keyValue.split('=')
|
||||
try:
|
||||
key,value =keyValue.split('=')
|
||||
except ValueError as e:
|
||||
continue
|
||||
key=key.replace('.','_').lower()
|
||||
details[key]=value
|
||||
|
||||
|
|
|
@ -23,7 +23,8 @@ def getDocID(servername):
|
|||
# create a hash to use as the ES doc id
|
||||
# hostname plus salt as doctype.latest
|
||||
hash = md5()
|
||||
hash.update('{0}.mozdefhealth.latest'.format(servername))
|
||||
seed = '{0}.mozdefhealth.latest'.format(servername)
|
||||
hash.update(seed.encode())
|
||||
return hash.hexdigest()
|
||||
|
||||
|
||||
|
@ -109,11 +110,11 @@ def main():
|
|||
|
||||
# post to elastic search servers directly without going through
|
||||
# message queues in case there is an availability issue
|
||||
es.save_event(index=index, body=json.dumps(healthlog))
|
||||
es.save_object(index=index, body=json.dumps(healthlog))
|
||||
# post another doc with a static docid and tag
|
||||
# for use when querying for the latest status
|
||||
healthlog['tags'] = ['mozdef', 'status', 'latest']
|
||||
es.save_event(index=index, doc_id=getDocID(server), body=json.dumps(healthlog))
|
||||
es.save_object(index=index, doc_id=getDocID(server), body=json.dumps(healthlog))
|
||||
|
||||
|
||||
def initConfig():
|
||||
|
|
|
@ -1,25 +1,23 @@
|
|||
{
|
||||
"order": 0,
|
||||
"template": "mozdefstate",
|
||||
"index_patterns": "mozdefstate",
|
||||
"settings": {
|
||||
"number_of_shards": 1,
|
||||
"number_of_replicas": 0
|
||||
},
|
||||
"mappings": {
|
||||
"doc": {
|
||||
"_all": {
|
||||
"enabled": true
|
||||
},
|
||||
"dynamic_templates": [{
|
||||
"string_fields": {
|
||||
"match": "*",
|
||||
"match_mapping_type": "string",
|
||||
"mapping": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"_doc": {
|
||||
"dynamic_templates": [
|
||||
{
|
||||
"string_fields": {
|
||||
"match": "*",
|
||||
"match_mapping_type": "string",
|
||||
"mapping": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"properties": {
|
||||
"category": {
|
||||
"type": "keyword"
|
||||
|
|
|
@ -17,7 +17,7 @@ import sys
|
|||
from configlib import getConfig, OptionParser
|
||||
from datetime import datetime
|
||||
from hashlib import md5
|
||||
import boto.sqs
|
||||
import boto3
|
||||
|
||||
from mozdef_util.utilities.toUTC import toUTC
|
||||
from mozdef_util.utilities.logger import logger
|
||||
|
@ -28,7 +28,8 @@ def getDocID(sqsregionidentifier):
|
|||
# create a hash to use as the ES doc id
|
||||
# hostname plus salt as doctype.latest
|
||||
hash = md5()
|
||||
hash.update('{0}.mozdefhealth.latest'.format(sqsregionidentifier))
|
||||
seed = '{0}.mozdefhealth.latest'.format(sqsregionidentifier)
|
||||
hash.update(seed.encode())
|
||||
return hash.hexdigest()
|
||||
|
||||
|
||||
|
@ -36,26 +37,35 @@ def getQueueSizes():
|
|||
logger.debug('starting')
|
||||
logger.debug(options)
|
||||
es = ElasticsearchClient(options.esservers)
|
||||
sqslist = {}
|
||||
sqslist['queue_stats'] = {}
|
||||
qcount = len(options.taskexchange)
|
||||
qcounter = qcount - 1
|
||||
|
||||
mqConn = boto.sqs.connect_to_region(
|
||||
options.region,
|
||||
sqs_client = boto3.client(
|
||||
"sqs",
|
||||
region_name=options.region,
|
||||
aws_access_key_id=options.accesskey,
|
||||
aws_secret_access_key=options.secretkey
|
||||
)
|
||||
queues_stats = {
|
||||
'queues': [],
|
||||
'total_feeds': len(options.taskexchange),
|
||||
'total_messages_ready': 0,
|
||||
'username': 'mozdef'
|
||||
}
|
||||
for queue_name in options.taskexchange:
|
||||
logger.debug('Looking for sqs queue stats in queue' + queue_name)
|
||||
queue_url = sqs_client.get_queue_url(QueueName=queue_name)['QueueUrl']
|
||||
queue_attributes = sqs_client.get_queue_attributes(QueueUrl=queue_url, AttributeNames=['All'])['Attributes']
|
||||
queue_stats = {
|
||||
'queue': queue_name,
|
||||
}
|
||||
if 'ApproximateNumberOfMessages' in queue_attributes:
|
||||
queue_stats['messages_ready'] = int(queue_attributes['ApproximateNumberOfMessages'])
|
||||
queues_stats['total_messages_ready'] += queue_stats['messages_ready']
|
||||
if 'ApproximateNumberOfMessagesNotVisible' in queue_attributes:
|
||||
queue_stats['messages_inflight'] = int(queue_attributes['ApproximateNumberOfMessagesNotVisible'])
|
||||
if 'ApproximateNumberOfMessagesDelayed' in queue_attributes:
|
||||
queue_stats['messages_delayed'] = int(queue_attributes['ApproximateNumberOfMessagesDelayed'])
|
||||
|
||||
while qcounter >= 0:
|
||||
for exchange in options.taskexchange:
|
||||
logger.debug('Looking for sqs queue stats in queue' + exchange)
|
||||
eventTaskQueue = mqConn.get_queue(exchange)
|
||||
# get queue stats
|
||||
taskQueueStats = eventTaskQueue.get_attributes('All')
|
||||
sqslist['queue_stats'][qcounter] = taskQueueStats
|
||||
sqslist['queue_stats'][qcounter]['name'] = exchange
|
||||
qcounter -= 1
|
||||
queues_stats['queues'].append(queue_stats)
|
||||
|
||||
# setup a log entry for health/status.
|
||||
sqsid = '{0}-{1}'.format(options.account, options.region)
|
||||
|
@ -69,35 +79,8 @@ def getQueueSizes():
|
|||
category='mozdef',
|
||||
source='aws-sqs',
|
||||
tags=[],
|
||||
details=[])
|
||||
healthlog['details'] = dict(username='mozdef')
|
||||
healthlog['details']['queues']= list()
|
||||
healthlog['details']['total_messages_ready'] = 0
|
||||
healthlog['details']['total_feeds'] = qcount
|
||||
details=queues_stats)
|
||||
healthlog['tags'] = ['mozdef', 'status', 'sqs']
|
||||
ready = 0
|
||||
qcounter = qcount - 1
|
||||
for q in sqslist['queue_stats'].keys():
|
||||
queuelist = sqslist['queue_stats'][qcounter]
|
||||
if 'ApproximateNumberOfMessages' in queuelist:
|
||||
ready1 = int(queuelist['ApproximateNumberOfMessages'])
|
||||
ready = ready1 + ready
|
||||
healthlog['details']['total_messages_ready'] = ready
|
||||
if 'ApproximateNumberOfMessages' in queuelist:
|
||||
messages = int(queuelist['ApproximateNumberOfMessages'])
|
||||
if 'ApproximateNumberOfMessagesNotVisible' in queuelist:
|
||||
inflight = int(queuelist['ApproximateNumberOfMessagesNotVisible'])
|
||||
if 'ApproximateNumberOfMessagesDelayed' in queuelist:
|
||||
delayed = int(queuelist['ApproximateNumberOfMessagesDelayed'])
|
||||
if 'name' in queuelist:
|
||||
name = queuelist['name']
|
||||
queueinfo=dict(
|
||||
queue=name,
|
||||
messages_delayed=delayed,
|
||||
messages_ready=messages,
|
||||
messages_inflight=inflight)
|
||||
healthlog['details']['queues'].append(queueinfo)
|
||||
qcounter -= 1
|
||||
healthlog['type'] = 'mozdefhealth'
|
||||
# post to elasticsearch servers directly without going through
|
||||
# message queues in case there is an availability issue
|
||||
|
@ -106,8 +89,6 @@ def getQueueSizes():
|
|||
# for use when querying for the latest sqs status
|
||||
healthlog['tags'] = ['mozdef', 'status', 'sqs-latest']
|
||||
es.save_event(index=options.index, doc_id=getDocID(sqsid), body=json.dumps(healthlog))
|
||||
# except Exception as e:
|
||||
# logger.error("Exception %r when gathering health and status " % e)
|
||||
|
||||
|
||||
def main():
|
||||
|
|
|
@ -26,7 +26,7 @@ def getESAlerts(es):
|
|||
# We use an ExistsMatch here just to satisfy the
|
||||
# requirements of a search query must have some "Matchers"
|
||||
search_query.add_must(ExistsMatch('summary'))
|
||||
results = search_query.execute(es, indices=['alerts-*'], size=10000)
|
||||
results = search_query.execute(es, indices=['alerts'], size=10000)
|
||||
return results
|
||||
|
||||
|
||||
|
|
|
@ -9,7 +9,9 @@ import sys
|
|||
import os
|
||||
from configlib import getConfig, OptionParser
|
||||
|
||||
import urllib2
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import tempfile
|
||||
import tarfile
|
||||
|
||||
|
@ -19,7 +21,7 @@ from mozdef_util.utilities.logger import logger, initLogger
|
|||
|
||||
def fetch_db_data(db_download_location):
|
||||
logger.debug('Fetching db data from ' + db_download_location)
|
||||
response = urllib2.urlopen(db_download_location)
|
||||
response = urllib.request.urlopen(db_download_location)
|
||||
db_raw_data = response.read()
|
||||
with tempfile.NamedTemporaryFile(mode='wb') as temp:
|
||||
logger.debug('Writing compressed gzip to temp file: ' + temp.name)
|
||||
|
|
|
@ -8,21 +8,24 @@
|
|||
import sys
|
||||
import os
|
||||
from configlib import getConfig, OptionParser
|
||||
import boto
|
||||
import boto3
|
||||
|
||||
from mozdef_util.utilities.logger import logger, initLogger
|
||||
|
||||
|
||||
def fetch_ip_list(aws_key_id, aws_secret_key, s3_bucket, ip_list_filename):
|
||||
logger.debug("Fetching ip list from s3")
|
||||
s3 = boto.connect_s3(
|
||||
client = boto3.client(
|
||||
's3',
|
||||
aws_access_key_id=aws_key_id,
|
||||
aws_secret_access_key=aws_secret_key
|
||||
)
|
||||
bucket = s3.get_bucket(s3_bucket)
|
||||
ip_list_key = bucket.lookup(ip_list_filename)
|
||||
contents = ip_list_key.get_contents_as_string().rstrip()
|
||||
return contents.split("\n")
|
||||
response = client.get_object(Bucket=s3_bucket, Key=ip_list_filename)
|
||||
ip_content_list = response['Body'].read().rstrip().splitlines()
|
||||
ips = []
|
||||
for ip in ip_content_list:
|
||||
ips.append(ip.decode())
|
||||
return ips
|
||||
|
||||
|
||||
def save_ip_list(save_path, ips):
|
||||
|
|
|
@ -1,6 +0,0 @@
|
|||
[options]
|
||||
hostname=<add_hostname>
|
||||
username=<add_username>
|
||||
password=<add_password>
|
||||
database=<add_database>
|
||||
url=http://localhost:8080/events
|
|
@ -1,198 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
# Copyright (c) 2014 Mozilla Corporation
|
||||
|
||||
import copy
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import ConfigParser
|
||||
import socket
|
||||
import MySQLdb
|
||||
from requests import Session
|
||||
from optparse import OptionParser
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class MozDefError(Exception):
|
||||
def __init__(self, msg):
|
||||
self.msg = msg
|
||||
|
||||
def __str__(self):
|
||||
return repr(self.msg)
|
||||
|
||||
|
||||
class MozDefEvent():
|
||||
# create requests session to allow for keep alives
|
||||
httpsession = Session()
|
||||
# Turns off needless and repetitive .netrc check for creds
|
||||
httpsession.trust_env = False
|
||||
debug = False
|
||||
verify_certificate = False
|
||||
# Never fail (ie no unexcepted exceptions sent to user, such as server/network not responding)
|
||||
fire_and_forget_mode = True
|
||||
log = {}
|
||||
log['timestamp'] = datetime.isoformat(datetime.now())
|
||||
log['hostname'] = socket.getfqdn()
|
||||
log['processid'] = os.getpid()
|
||||
log['processname'] = sys.argv[0]
|
||||
log['severity'] = 'INFO'
|
||||
log['summary'] = None
|
||||
log['category'] = 'event'
|
||||
log['tags'] = list()
|
||||
log['details'] = dict()
|
||||
|
||||
def __init__(self, url='http://localhost/events', summary=None, category='event', severity='INFO', tags=[], details={}):
|
||||
self.summary = summary
|
||||
self.category = category
|
||||
self.severity = severity
|
||||
self.tags = tags
|
||||
self.details = details
|
||||
self.url = url
|
||||
|
||||
def send(self, timestamp=None, summary=None, category=None, severity=None, tags=None, details=None, hostname=None):
|
||||
log_msg = copy.copy(self.log)
|
||||
|
||||
if timestamp is None:
|
||||
log_msg['timestamp'] = self.timestamp
|
||||
|
||||
else:
|
||||
log_msg['timestamp'] = timestamp
|
||||
|
||||
if summary is None:
|
||||
log_msg['summary'] = self.summary
|
||||
else:
|
||||
log_msg['summary'] = summary
|
||||
|
||||
if category is None:
|
||||
log_msg['category'] = self.category
|
||||
else:
|
||||
log_msg['category'] = category
|
||||
|
||||
if severity is None:
|
||||
log_msg['severity'] = self.severity
|
||||
else:
|
||||
log_msg['severity'] = severity
|
||||
|
||||
if tags is None:
|
||||
log_msg['tags'] = self.tags
|
||||
else:
|
||||
log_msg['tags'] = tags
|
||||
|
||||
if details is None:
|
||||
log_msg['details'] = self.details
|
||||
else:
|
||||
log_msg['details'] = details
|
||||
|
||||
if hostname is None:
|
||||
log_msg['hostname'] = self.hostname
|
||||
else:
|
||||
log_msg['hostname'] = hostname
|
||||
|
||||
if type(log_msg['details']) != dict:
|
||||
raise MozDefError('details must be a dict')
|
||||
elif type(log_msg['tags']) != list:
|
||||
raise MozDefError('tags must be a list')
|
||||
elif summary is None:
|
||||
raise MozDefError('Summary is a required field')
|
||||
|
||||
try:
|
||||
self.httpsession.post(self.url, json.dumps(log_msg, encoding='utf-8'), verify=self.verify_certificate)
|
||||
|
||||
except Exception as e:
|
||||
if not self.fire_and_forget_mode:
|
||||
raise e
|
||||
|
||||
|
||||
def main():
|
||||
'''
|
||||
connect to vidyo's mysql, read in calls and write to mozdef
|
||||
'''
|
||||
mdEvent = MozDefEvent(options.url)
|
||||
mdEvent.debug = True
|
||||
mdEvent.fire_and_forget_mode = False
|
||||
|
||||
# connect to mysql
|
||||
db=MySQLdb.connect(host=options.hostname, user=options.username,passwd=options.password,db=options.database)
|
||||
c=db.cursor(MySQLdb.cursors.DictCursor)
|
||||
|
||||
c.execute("select * from ConferenceCall2 where JoinTime between NOW() - INTERVAL 30 MINUTE and NOW() or LeaveTime between NOW() - INTERVAL 30 MINUTE and NOW()")
|
||||
rows=c.fetchall()
|
||||
c.close()
|
||||
|
||||
# Build dictionary of calls in order to consolidate multiple rows for a single call
|
||||
calls = {}
|
||||
for row in rows:
|
||||
id = row['UniqueCallID']
|
||||
# Copy the row's info if we don't already have the final completed call state
|
||||
if id not in calls or (id in calls and calls[id]['CallState'] != 'COMPLETED'):
|
||||
calls[id] = row
|
||||
|
||||
# Massage call data and send to MozDef
|
||||
for key in calls.keys():
|
||||
call = calls[key]
|
||||
if call['LeaveTime'] is not None:
|
||||
duration = call['LeaveTime'] - call['JoinTime']
|
||||
call['CallDuration'] = duration.seconds
|
||||
|
||||
# fix up the data for json
|
||||
for k in call.keys():
|
||||
# convert datetime objects to isoformat for json serialization
|
||||
if isinstance(call[k], datetime):
|
||||
call[k] = call[k].isoformat()
|
||||
# make sure it's a string, not unicode forced into a string
|
||||
if isinstance(call[k],str):
|
||||
# db has unicode stored as string, so decode, then encode
|
||||
call[k] = call[k].decode('utf-8','ignore').encode('ascii','ignore')
|
||||
|
||||
mdEvent.send(timestamp=call['JoinTime'],
|
||||
summary='Vidyo call status for ' + call['UniqueCallID'].encode('ascii', 'ignore'),
|
||||
tags=['vidyo'],
|
||||
details=call,
|
||||
category='vidyo',
|
||||
hostname=socket.gethostname()
|
||||
)
|
||||
|
||||
|
||||
def getConfig(optionname, thedefault, configfile):
|
||||
"""read an option from a config file or set a default
|
||||
send 'thedefault' as the data class you want to get a string back
|
||||
i.e. 'True' will return a string
|
||||
True will return a bool
|
||||
1 will return an int
|
||||
"""
|
||||
retvalue = thedefault
|
||||
opttype = type(thedefault)
|
||||
if os.path.isfile(configfile):
|
||||
config = ConfigParser.ConfigParser()
|
||||
config.readfp(open(configfile))
|
||||
if config.has_option('options', optionname):
|
||||
if opttype == bool:
|
||||
retvalue = config.getboolean('options', optionname)
|
||||
elif opttype == int:
|
||||
retvalue = config.getint('options', optionname)
|
||||
elif opttype == float:
|
||||
retvalue = config.getfloat('options', optionname)
|
||||
else:
|
||||
retvalue = config.get('options', optionname)
|
||||
return retvalue
|
||||
|
||||
|
||||
def initConfig(configfile):
|
||||
# default options
|
||||
options.url = getConfig('url', 'http://localhost:8080/events', configfile)
|
||||
options.username = getConfig('username', '', configfile)
|
||||
options.password = getConfig('password', '', configfile)
|
||||
options.database = getConfig('database', '', configfile)
|
||||
options.hostname = getConfig('hostname', '', configfile)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = OptionParser()
|
||||
parser.add_option("-c", dest='configfile', default=sys.argv[0].replace('.py', '.conf'), help="configuration file to use")
|
||||
(options, args) = parser.parse_args()
|
||||
initConfig(options.configfile)
|
||||
main()
|
|
@ -1,10 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
# Copyright (c) 2014 Mozilla Corporation
|
||||
|
||||
source /opt/mozdef/envs/python/bin/activate
|
||||
/opt/mozdef/envs/mozdef/cron/vidyo2MozDef.py -c /opt/mozdef/envs/mozdef/cron/vidyo2MozDef.conf
|
||||
|
|
@ -2,29 +2,24 @@ FROM centos:7
|
|||
|
||||
LABEL maintainer="mozdef@mozilla.com"
|
||||
|
||||
ENV ES_VERSION 5.6.14
|
||||
ENV ES_VERSION 6.8.0
|
||||
ENV ES_JAVA_VERSION 1.8.0
|
||||
|
||||
|
||||
RUN \
|
||||
useradd -ms /bin/bash -d /opt/mozdef -m mozdef && \
|
||||
yum install -y java-$ES_JAVA_VERSION && \
|
||||
mkdir -p /opt/mozdef/envs && \
|
||||
curl -s -L https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-$ES_VERSION.tar.gz | tar -C /opt/mozdef -xz && \
|
||||
mv /opt/mozdef/elasticsearch-$ES_VERSION /opt/mozdef/envs/elasticsearch && \
|
||||
chown -R mozdef:mozdef /opt/mozdef && \
|
||||
mkdir /var/log/elasticsearch && \
|
||||
chown -R mozdef:mozdef /var/log/elasticsearch && \
|
||||
mkdir /var/lib/elasticsearch && \
|
||||
chown -R mozdef:mozdef /var/lib/elasticsearch && \
|
||||
curl -s -L https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-$ES_VERSION.rpm -o elasticsearch.rpm && \
|
||||
rpm -i elasticsearch.rpm && \
|
||||
yum clean all
|
||||
|
||||
COPY docker/compose/elasticsearch/files/elasticsearch.yml /opt/mozdef/envs/elasticsearch/config/
|
||||
COPY docker/compose/elasticsearch/files/jvm.options /opt/mozdef/envs/elasticsearch/config/
|
||||
USER elasticsearch
|
||||
|
||||
WORKDIR /opt/mozdef/envs/elasticsearch
|
||||
COPY docker/compose/elasticsearch/files/elasticsearch.yml /etc/elasticsearch/elasticsearch.yml
|
||||
COPY docker/compose/elasticsearch/files/jvm.options /etc/elasticsearch/jvm.options
|
||||
|
||||
VOLUME /var/lib/elasticsearch
|
||||
|
||||
EXPOSE 9200
|
||||
WORKDIR /usr/share/elasticsearch
|
||||
|
||||
USER mozdef
|
||||
EXPOSE 9200
|
||||
|
|
|
@ -1,98 +1,17 @@
|
|||
# ======================== Elasticsearch Configuration =========================
|
||||
#
|
||||
# NOTE: Elasticsearch comes with reasonable defaults for most settings.
|
||||
# Before you set out to tweak and tune the configuration, make sure you
|
||||
# understand what are you trying to accomplish and the consequences.
|
||||
#
|
||||
# The primary way of configuring a node is via this file. This template lists
|
||||
# the most important settings you may want to configure for a production cluster.
|
||||
#
|
||||
# Please consult the documentation for further information on configuration options:
|
||||
# https://www.elastic.co/guide/en/elasticsearch/reference/index.html
|
||||
#
|
||||
# ---------------------------------- Cluster -----------------------------------
|
||||
#
|
||||
# Use a descriptive name for your cluster:
|
||||
#
|
||||
#cluster.name: my-application
|
||||
#
|
||||
# ------------------------------------ Node ------------------------------------
|
||||
#
|
||||
# Use a descriptive name for the node:
|
||||
#
|
||||
#node.name: node-1
|
||||
#
|
||||
# Add custom attributes to the node:
|
||||
#
|
||||
#node.attr.rack: r1
|
||||
#
|
||||
# ----------------------------------- Paths ------------------------------------
|
||||
#
|
||||
# Path to directory where to store the data (separate multiple locations by comma):
|
||||
#
|
||||
#path.data: /path/to/data
|
||||
#
|
||||
# Path to log files:
|
||||
#
|
||||
#path.logs: /path/to/logs
|
||||
#
|
||||
# ----------------------------------- Memory -----------------------------------
|
||||
#
|
||||
# Lock the memory on startup:
|
||||
#
|
||||
#bootstrap.memory_lock: true
|
||||
#
|
||||
# Make sure that the heap size is set to about half the memory available
|
||||
# on the system and that the owner of the process is allowed to use this
|
||||
# limit.
|
||||
#
|
||||
# Elasticsearch performs poorly when the system is swapping the memory.
|
||||
#
|
||||
# ---------------------------------- Network -----------------------------------
|
||||
#
|
||||
# Set the bind address to a specific IP (IPv4 or IPv6):
|
||||
#
|
||||
#network.host: 192.168.0.1
|
||||
#
|
||||
# Set a custom port for HTTP:
|
||||
#
|
||||
#http.port: 9200
|
||||
#
|
||||
# For more information, consult the network module documentation.
|
||||
#
|
||||
# --------------------------------- Discovery ----------------------------------
|
||||
#
|
||||
# Pass an initial list of hosts to perform discovery when new node is started:
|
||||
# The default list of hosts is ["127.0.0.1", "[::1]"]
|
||||
#
|
||||
#discovery.zen.ping.unicast.hosts: ["host1", "host2"]
|
||||
#
|
||||
# Prevent the "split brain" by configuring the majority of nodes (total number of master-eligible nodes / 2 + 1):
|
||||
#
|
||||
#discovery.zen.minimum_master_nodes: 3
|
||||
#
|
||||
# For more information, consult the zen discovery module documentation.
|
||||
#
|
||||
# ---------------------------------- Gateway -----------------------------------
|
||||
#
|
||||
# Block initial recovery after a full cluster restart until N nodes are started:
|
||||
#
|
||||
#gateway.recover_after_nodes: 3
|
||||
#
|
||||
# For more information, consult the gateway module documentation.
|
||||
#
|
||||
# ---------------------------------- Various -----------------------------------
|
||||
#
|
||||
# Require explicit names when deleting indices:
|
||||
#
|
||||
#action.destructive_requires_name: true
|
||||
|
||||
|
||||
# Mozdef Custom Settings #
|
||||
path.logs: /var/log/elasticsearch
|
||||
path.data: /var/lib/elasticsearch
|
||||
http.host: 0.0.0.0
|
||||
network.host: 0.0.0.0
|
||||
discovery.type: single-node
|
||||
action.destructive_requires_name: true
|
||||
action.auto_create_index: false
|
||||
# Mozdef Custom Settings #
|
||||
|
||||
# Disable auto creation unless these indexes
|
||||
action.auto_create_index: .watches,.triggered_watches,.watcher-history-*
|
||||
|
||||
# Add these to prevent requiring a user/pass and termination of ES when looking for "ingest" assignments.
|
||||
# The watcher directive allows for the deletion of failed watcher indices as they sometimes get created with glitches.
|
||||
xpack.license.self_generated.type: basic
|
||||
xpack.security.enabled: false
|
||||
xpack.monitoring.exporters.my_local:
|
||||
type: local
|
||||
use_ingest: false
|
||||
xpack.watcher.index.rest.direct_access: true
|
||||
|
|
|
@ -2,7 +2,8 @@ FROM centos:7
|
|||
|
||||
LABEL maintainer="mozdef@mozilla.com"
|
||||
|
||||
ENV KIBANA_VERSION 5.6.14
|
||||
# When changing the kibana version, we'll need to update https://github.com/mozilla/MozDef/blob/master/docker/compose/mozdef_bootstrap/files/initial_setup.py accordingly
|
||||
ENV KIBANA_VERSION 6.8.0
|
||||
|
||||
RUN \
|
||||
curl -s -L https://artifacts.elastic.co/downloads/kibana/kibana-$KIBANA_VERSION-linux-x86_64.tar.gz | tar -C / -xz && \
|
||||
|
|
|
@ -14,19 +14,18 @@ RUN \
|
|||
libffi-devel \
|
||||
zlib-devel \
|
||||
libcurl-devel \
|
||||
openssl \
|
||||
openssl \
|
||||
openssl-devel \
|
||||
git \
|
||||
make && \
|
||||
useradd -ms /bin/bash -d /opt/mozdef -m mozdef && \
|
||||
mkdir /opt/mozdef/envs && \
|
||||
cd /opt/mozdef && \
|
||||
yum install -y mysql-devel \
|
||||
python \
|
||||
python-devel \
|
||||
python-pip && \
|
||||
yum install -y python36 \
|
||||
python36-devel \
|
||||
python36-pip && \
|
||||
yum clean all && \
|
||||
pip install virtualenv && \
|
||||
pip3 install virtualenv && \
|
||||
mkdir /opt/mozdef/envs/mozdef && \
|
||||
mkdir /opt/mozdef/envs/mozdef/cron
|
||||
|
||||
|
@ -47,7 +46,7 @@ RUN chown -R mozdef:mozdef /opt/mozdef/
|
|||
|
||||
USER mozdef
|
||||
RUN \
|
||||
virtualenv /opt/mozdef/envs/python && \
|
||||
virtualenv -p /usr/bin/python3.6 /opt/mozdef/envs/python && \
|
||||
source /opt/mozdef/envs/python/bin/activate && \
|
||||
pip install -r /opt/mozdef/envs/mozdef/requirements.txt && \
|
||||
cd /opt/mozdef/envs/mozdef/mozdef_util && \
|
||||
|
@ -63,3 +62,7 @@ VOLUME /opt/mozdef/envs/mozdef/data
|
|||
ENV PATH=/opt/mozdef/envs/python/bin:$PATH
|
||||
|
||||
USER root
|
||||
|
||||
# Remove once https://github.com/jeffbryner/configlib/pull/9 is mergeg
|
||||
# and a new version of configlib is in place
|
||||
RUN sed -i 's/from configlib import getConfig/from .configlib import getConfig/g' /opt/mozdef/envs/python/lib/python3.6/site-packages/configlib/__init__.py
|
|
@ -9,7 +9,7 @@ COPY cron/defaultMappingTemplate.json /opt/mozdef/envs/mozdef/cron/defaultMappin
|
|||
COPY docker/compose/mozdef_cron/files/backup.conf /opt/mozdef/envs/mozdef/cron/backup.conf
|
||||
COPY docker/compose/mozdef_bootstrap/files/initial_setup.py /opt/mozdef/envs/mozdef/initial_setup.py
|
||||
COPY docker/compose/mozdef_bootstrap/files/index_mappings /opt/mozdef/envs/mozdef/index_mappings
|
||||
COPY docker/compose/mozdef_bootstrap/files/dashboards /opt/mozdef/envs/mozdef/dashboards
|
||||
COPY docker/compose/mozdef_bootstrap/files/resources /opt/mozdef/envs/mozdef/resources
|
||||
|
||||
RUN chown -R mozdef:mozdef /opt/mozdef/envs/mozdef/
|
||||
|
||||
|
|
|
@ -1,16 +0,0 @@
|
|||
{
|
||||
"_index": ".kibana",
|
||||
"_type": "visualization",
|
||||
"_id": "AWg-MFyzlDhKjHVbouwj",
|
||||
"_version": 1,
|
||||
"_score": 1,
|
||||
"_source": {
|
||||
"title": "All Events Area",
|
||||
"visState": "{\"title\":\"All Events Area\",\"type\":\"area\",\"params\":{\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{\"text\":\"utctimestamp per second\"}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"area\",\"mode\":\"stacked\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"drawLinesBetweenPoints\":true,\"showCircles\":true,\"interpolate\":\"linear\",\"valueAxis\":\"ValueAxis-1\"}],\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"times\":[],\"addTimeMarker\":false,\"type\":\"area\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"utctimestamp\",\"interval\":\"s\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}}],\"listeners\":{}}",
|
||||
"uiStateJSON": "{}",
|
||||
"description": "",
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"index\":\"events-weekly\",\"query\":{\"match_all\":{}},\"filter\":[]}"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
{
|
||||
"_index": ".kibana",
|
||||
"_type": "visualization",
|
||||
"_id": "AWg6F1PFk1EXv5E12DRN",
|
||||
"_version": 1,
|
||||
"_score": 1,
|
||||
"_source": {
|
||||
"title": "Category Pie Graph",
|
||||
"visState": "{\"aggs\":[{\"enabled\":true,\"id\":\"1\",\"params\":{},\"schema\":\"metric\",\"type\":\"count\"},{\"enabled\":true,\"id\":\"2\",\"params\":{\"field\":\"category\",\"order\":\"desc\",\"orderBy\":\"1\",\"size\":5},\"schema\":\"segment\",\"type\":\"terms\"}],\"listeners\":{},\"params\":{\"addLegend\":true,\"addTooltip\":true,\"isDonut\":false,\"legendPosition\":\"right\",\"type\":\"pie\"},\"title\":\"Category Pie Graph\",\"type\":\"pie\"}",
|
||||
"uiStateJSON": "{}",
|
||||
"description": "",
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"index\":\"events-weekly\",\"query\":{\"match_all\":{}},\"filter\":[]}"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
{
|
||||
"_index": ".kibana",
|
||||
"_type": "visualization",
|
||||
"_id": "AWg-LfEalDhKjHVbouwc",
|
||||
"_version": 1,
|
||||
"_score": 1,
|
||||
"_source": {
|
||||
"title": "DestinationIP Bar Graph",
|
||||
"visState": "{\"title\":\"DestinationIP Bar Graph\",\"type\":\"histogram\",\"params\":{\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{\"text\":\"details.destinationipaddress: Descending\"}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"histogram\",\"mode\":\"stacked\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"valueAxis\":\"ValueAxis-1\",\"drawLinesBetweenPoints\":true,\"showCircles\":true}],\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"times\":[],\"addTimeMarker\":false,\"type\":\"histogram\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"details.destinationipaddress\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}",
|
||||
"uiStateJSON": "{}",
|
||||
"description": "",
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"index\":\"events-weekly\",\"query\":{\"match_all\":{}},\"filter\":[]}"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,18 +0,0 @@
|
|||
{
|
||||
"_index": ".kibana",
|
||||
"_type": "dashboard",
|
||||
"_id": "AWg6GHH5k1EXv5E12DRQ",
|
||||
"_version": 5,
|
||||
"_score": 1,
|
||||
"_source": {
|
||||
"title": "Sample Dashboard",
|
||||
"description": "A sample dashboard for playing around.",
|
||||
"panelsJSON": "[{\"col\":7,\"id\":\"AWg6F1PFk1EXv5E12DRN\",\"panelIndex\":1,\"row\":1,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"AWg-LWqElDhKjHVbouwZ\",\"panelIndex\":2,\"row\":4,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"AWg-LfEalDhKjHVbouwc\",\"panelIndex\":3,\"row\":4,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"size_x\":6,\"size_y\":3,\"panelIndex\":4,\"type\":\"visualization\",\"id\":\"AWg-MFyzlDhKjHVbouwj\",\"col\":1,\"row\":1}]",
|
||||
"optionsJSON": "{\"darkTheme\":false}",
|
||||
"uiStateJSON": "{}",
|
||||
"timeRestore": false,
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"filter\":[{\"query\":{\"match_all\":{}}}],\"highlightAll\":true,\"version\":true}"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
{
|
||||
"_index": ".kibana",
|
||||
"_type": "visualization",
|
||||
"_id": "AWg-LWqElDhKjHVbouwZ",
|
||||
"_version": 1,
|
||||
"_score": 1,
|
||||
"_source": {
|
||||
"title": "SourceIP Bar Graph",
|
||||
"visState": "{\"title\":\"SourceIP Bar Graph\",\"type\":\"histogram\",\"params\":{\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{\"text\":\"details.sourceipaddress: Descending\"}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"histogram\",\"mode\":\"stacked\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"valueAxis\":\"ValueAxis-1\",\"drawLinesBetweenPoints\":true,\"showCircles\":true}],\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"times\":[],\"addTimeMarker\":false,\"type\":\"histogram\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"details.sourceipaddress\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}",
|
||||
"uiStateJSON": "{}",
|
||||
"description": "",
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"index\":\"events-weekly\",\"query\":{\"match_all\":{}},\"filter\":[]}"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,6 +1,7 @@
|
|||
{
|
||||
"title": "alerts-*",
|
||||
"timeFieldName": "utctimestamp",
|
||||
"notExpandable": true,
|
||||
"fields": "[{\"name\":\"_id\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_index\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"_score\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_source\",\"type\":\"_source\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_type\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"category\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"notify_mozdefbot\",\"type\":\"boolean\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"severity\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"summary\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"tags\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"utctimestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true}]"
|
||||
"attributes": {
|
||||
"title": "alerts-*",
|
||||
"timeFieldName": "utctimestamp",
|
||||
"fields": "[{\"name\":\"_id\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"_index\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"_score\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_source\",\"type\":\"_source\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_type\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"category\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"hostname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"mozdefhostname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"processid\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"processname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"receivedtimestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"severity\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"source\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"summary\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"timestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"type\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"utctimestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true}]"
|
||||
}
|
||||
}
|
|
@ -1,6 +1,7 @@
|
|||
{
|
||||
"title": "alerts",
|
||||
"timeFieldName": "utctimestamp",
|
||||
"notExpandable": true,
|
||||
"fields": "[{\"name\":\"_id\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_index\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"_score\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_source\",\"type\":\"_source\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_type\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"category\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"notify_mozdefbot\",\"type\":\"boolean\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"severity\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"summary\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"tags\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"utctimestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true}]"
|
||||
"attributes": {
|
||||
"title": "alerts",
|
||||
"timeFieldName": "utctimestamp",
|
||||
"fields": "[{\"name\":\"_id\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"_index\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"_score\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_source\",\"type\":\"_source\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_type\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"category\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"hostname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"mozdefhostname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"processid\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"processname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"receivedtimestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"severity\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"source\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"summary\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"timestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"type\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"utctimestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true}]"
|
||||
}
|
||||
}
|
|
@ -1,6 +1,7 @@
|
|||
{
|
||||
"title": "events-weekly",
|
||||
"timeFieldName": "utctimestamp",
|
||||
"notExpandable": true,
|
||||
"fields": "[{\"name\":\"_id\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_index\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"_score\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_source\",\"type\":\"_source\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_type\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"category\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.apiversion.raw_value\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.destinationipaddress\",\"type\":\"ip\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.destinationport\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.hostname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.requestparameters.logstreamname.raw_value\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.sourceipaddress\",\"type\":\"ip\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.sourceipv4address\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.sourceport\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.srcip\",\"type\":\"ip\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.success\",\"type\":\"boolean\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"hostname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"mozdefhostname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"processid\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"processname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"receivedtimestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"severity\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"source\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"summary\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"timestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"utctimestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"version\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true}]"
|
||||
}
|
||||
"attributes": {
|
||||
"title": "events-weekly",
|
||||
"timeFieldName": "utctimestamp",
|
||||
"fields": "[{\"name\":\"_id\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"_index\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"_score\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_source\",\"type\":\"_source\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_type\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"category\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"hostname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"mozdefhostname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"processid\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"processname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"receivedtimestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"severity\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"source\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"summary\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"timestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"type\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"utctimestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true}]"
|
||||
}
|
||||
}
|
|
@ -1,6 +1,7 @@
|
|||
{
|
||||
"title": "events",
|
||||
"timeFieldName": "utctimestamp",
|
||||
"notExpandable": true,
|
||||
"fields": "[{\"name\":\"_id\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_index\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"_score\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_source\",\"type\":\"_source\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_type\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"category\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.apiversion.raw_value\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.destinationipaddress\",\"type\":\"ip\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.destinationport\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.hostname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.requestparameters.logstreamname.raw_value\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.sourceipaddress\",\"type\":\"ip\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.sourceipv4address\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.sourceport\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.srcip\",\"type\":\"ip\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"details.success\",\"type\":\"boolean\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"hostname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"mozdefhostname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"processid\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"processname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"receivedtimestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"severity\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"source\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"summary\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"timestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"utctimestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"version\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true}]"
|
||||
}
|
||||
"attributes": {
|
||||
"title": "events-*",
|
||||
"timeFieldName": "utctimestamp",
|
||||
"fields": "[{\"name\":\"_id\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"_index\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"_score\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_source\",\"type\":\"_source\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_type\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"category\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"hostname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"mozdefhostname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"processid\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"processname\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"receivedtimestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"severity\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"source\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"summary\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"timestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"type\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"utctimestamp\",\"type\":\"date\",\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true}]"
|
||||
}
|
||||
}
|
|
@ -19,7 +19,6 @@ from elasticsearch.exceptions import ConnectionError
|
|||
import requests
|
||||
|
||||
from mozdef_util.elasticsearch_client import ElasticsearchClient
|
||||
from mozdef_util.query_models import SearchQuery, TermMatch
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(description='Create the correct indexes and aliases in elasticsearch')
|
||||
|
@ -35,7 +34,7 @@ esserver = os.environ.get('OPTIONS_ESSERVERS')
|
|||
if esserver is None:
|
||||
esserver = args.esserver
|
||||
esserver = esserver.strip('/')
|
||||
print "Connecting to " + esserver
|
||||
print("Connecting to " + esserver)
|
||||
client = ElasticsearchClient(esserver)
|
||||
|
||||
kibana_url = os.environ.get('OPTIONS_KIBANAURL', args.kibana_url)
|
||||
|
@ -45,7 +44,13 @@ event_index_name = current_date.strftime("events-%Y%m%d")
|
|||
previous_event_index_name = (current_date - timedelta(days=1)).strftime("events-%Y%m%d")
|
||||
weekly_index_alias = 'events-weekly'
|
||||
alert_index_name = current_date.strftime("alerts-%Y%m")
|
||||
kibana_index_name = '.kibana'
|
||||
|
||||
kibana_index_name = '.kibana_1'
|
||||
# For this version of kibana, they require specifying
|
||||
# the kibana version via the api for setting
|
||||
# the default index, seems weird, but it is what it is.
|
||||
kibana_version = '6.8.0'
|
||||
|
||||
state_index_name = 'mozdefstate'
|
||||
|
||||
index_settings_str = ''
|
||||
|
@ -67,12 +72,12 @@ for attempt in range(total_num_tries):
|
|||
try:
|
||||
all_indices = client.get_indices()
|
||||
except ConnectionError:
|
||||
print 'Unable to connect to Elasticsearch...retrying'
|
||||
print('Unable to connect to Elasticsearch...retrying')
|
||||
sleep(5)
|
||||
else:
|
||||
break
|
||||
else:
|
||||
print 'Cannot connect to Elasticsearch after ' + str(total_num_tries) + ' tries, exiting script.'
|
||||
print('Cannot connect to Elasticsearch after ' + str(total_num_tries) + ' tries, exiting script.')
|
||||
exit(1)
|
||||
|
||||
refresh_interval = getConfig('refresh_interval', '1s', args.backup_conf_file)
|
||||
|
@ -97,81 +102,108 @@ state_index_settings['settings'] = index_options
|
|||
|
||||
# Create initial indices
|
||||
if event_index_name not in all_indices:
|
||||
print "Creating " + event_index_name
|
||||
print("Creating " + event_index_name)
|
||||
client.create_index(event_index_name, index_config=index_settings)
|
||||
client.create_alias('events', event_index_name)
|
||||
|
||||
if previous_event_index_name not in all_indices:
|
||||
print "Creating " + previous_event_index_name
|
||||
print("Creating " + previous_event_index_name)
|
||||
client.create_index(previous_event_index_name, index_config=index_settings)
|
||||
client.create_alias('events-previous', previous_event_index_name)
|
||||
|
||||
if alert_index_name not in all_indices:
|
||||
print "Creating " + alert_index_name
|
||||
print("Creating " + alert_index_name)
|
||||
client.create_index(alert_index_name, index_config=index_settings)
|
||||
client.create_alias('alerts', alert_index_name)
|
||||
|
||||
if weekly_index_alias not in all_indices:
|
||||
print "Creating " + weekly_index_alias
|
||||
print("Creating " + weekly_index_alias)
|
||||
client.create_alias_multiple_indices(weekly_index_alias, [event_index_name, previous_event_index_name])
|
||||
|
||||
if kibana_index_name not in all_indices:
|
||||
print "Creating " + kibana_index_name
|
||||
client.create_index(kibana_index_name, index_config={"settings": index_options})
|
||||
|
||||
if state_index_name not in all_indices:
|
||||
print "Creating " + state_index_name
|
||||
print("Creating " + state_index_name)
|
||||
client.create_index(state_index_name, index_config=state_index_settings)
|
||||
|
||||
# Wait for .kibana index to be ready
|
||||
num_times = 0
|
||||
while not client.index_exists(kibana_index_name):
|
||||
if num_times < 3:
|
||||
print("Waiting for .kibana index to be ready")
|
||||
time.sleep(1)
|
||||
num_times += 1
|
||||
else:
|
||||
print(".kibana index not created...exiting")
|
||||
sys.exit(1)
|
||||
# Wait for kibana service to get ready
|
||||
total_num_tries = 10
|
||||
for attempt in range(total_num_tries):
|
||||
try:
|
||||
if requests.get(kibana_url).ok:
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
print('Unable to connect to Elasticsearch...retrying')
|
||||
sleep(5)
|
||||
else:
|
||||
print('Cannot connect to Kibana after ' + str(total_num_tries) + ' tries, exiting script.')
|
||||
exit(1)
|
||||
|
||||
# Check to see if index patterns exist in .kibana
|
||||
query = SearchQuery()
|
||||
query.add_must(TermMatch('_type', 'index-pattern'))
|
||||
results = query.execute(client, indices=[kibana_index_name])
|
||||
if len(results['hits']) == 0:
|
||||
# Create index patterns and assign default index mapping
|
||||
index_mappings_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'index_mappings')
|
||||
listing = os.listdir(index_mappings_path)
|
||||
for infile in listing:
|
||||
json_file_path = os.path.join(index_mappings_path, infile)
|
||||
with open(json_file_path) as json_data:
|
||||
mapping_data = json.load(json_data)
|
||||
print "Creating {0} index mapping".format(mapping_data['title'])
|
||||
client.save_object(body=mapping_data, index=kibana_index_name, doc_type='index-pattern', doc_id=mapping_data['title'])
|
||||
# Check if index-patterns already exist
|
||||
if kibana_index_name in client.get_indices():
|
||||
existing_patterns_url = kibana_url + "/api/saved_objects/_find?type=index-pattern&search_fields=title&search=*"
|
||||
resp = requests.get(url=existing_patterns_url)
|
||||
existing_patterns = json.loads(resp.text)
|
||||
if len(existing_patterns['saved_objects']) > 0:
|
||||
print("Index patterns already exist, exiting script early")
|
||||
sys.exit(0)
|
||||
|
||||
# Assign default index to 'events'
|
||||
print "Assigning events as default index mapping"
|
||||
index_name = 'events'
|
||||
url = '{}/api/kibana/settings/defaultIndex'.format(kibana_url)
|
||||
data = {'value': index_name}
|
||||
r = requests.post(url, json=data, headers={'kbn-xsrf': "true"})
|
||||
if not r.ok:
|
||||
print("Failed to set defaultIndex to events : {} {}".format(r.status_code, r.content))
|
||||
# Create index-patterns
|
||||
kibana_headers = {'content-type': 'application/json', 'kbn-xsrf': 'true'}
|
||||
index_mappings_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'index_mappings')
|
||||
listing = os.listdir(index_mappings_path)
|
||||
for infile in listing:
|
||||
json_file_path = os.path.join(index_mappings_path, infile)
|
||||
with open(json_file_path) as json_data:
|
||||
mapping_data = json.load(json_data)
|
||||
index_name = mapping_data['attributes']['title']
|
||||
print("Creating {0} index mapping".format(index_name))
|
||||
mapping_url = kibana_url + "/api/saved_objects/index-pattern/" + index_name
|
||||
resp = requests.post(url=mapping_url, data=json.dumps(mapping_data), headers=kibana_headers)
|
||||
if not resp.ok:
|
||||
print("Unable to create index mapping: " + resp.text)
|
||||
|
||||
# Remove existing default index mapping if it exists
|
||||
resp = requests.delete(url=kibana_url + "/api/saved_objects/config/" + kibana_version, headers=kibana_headers)
|
||||
if not resp.ok:
|
||||
print("Unable to delete existing default index mapping: {} {}".format(resp.status_code, resp.content))
|
||||
|
||||
# Check to see if dashboards already exist in .kibana
|
||||
query = SearchQuery()
|
||||
query.add_must(TermMatch('_type', 'dashboard'))
|
||||
results = query.execute(client, indices=[kibana_index_name])
|
||||
if len(results['hits']) == 0:
|
||||
dashboards_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'dashboards')
|
||||
listing = os.listdir(dashboards_path)
|
||||
for infile in listing:
|
||||
json_file_path = os.path.join(dashboards_path, infile)
|
||||
with open(json_file_path) as json_data:
|
||||
mapping_data = json.load(json_data)
|
||||
print("Creating {0} {1}".format(
|
||||
mapping_data['_source']['title'],
|
||||
mapping_data['_type']
|
||||
))
|
||||
client.save_object(body=mapping_data['_source'], index=kibana_index_name, doc_type=mapping_data['_type'], doc_id=mapping_data['_id'])
|
||||
# Set default index mapping to events-*
|
||||
data = {
|
||||
"attributes": {
|
||||
"buildNum": "19548",
|
||||
"defaultIndex": "events-*"
|
||||
}
|
||||
}
|
||||
print("Creating default index pattern for events-*")
|
||||
resp = requests.post(url=kibana_url + "/api/saved_objects/config/" + kibana_version, data=json.dumps(data), headers=kibana_headers)
|
||||
if not resp.ok:
|
||||
print("Failed to set default index: {} {}".format(resp.status_code, resp.content))
|
||||
|
||||
# Check if dashboards already exist
|
||||
if kibana_index_name in client.get_indices():
|
||||
existing_patterns_url = kibana_url + "/api/saved_objects/_find?type=dashboard&search_fields=title&search=*"
|
||||
resp = requests.get(url=existing_patterns_url)
|
||||
existing_patterns = json.loads(resp.text)
|
||||
if len(existing_patterns['saved_objects']) > 0:
|
||||
print("Dashboards already exist, exiting script early")
|
||||
sys.exit(0)
|
||||
|
||||
# Create visualizations/dashboards
|
||||
dashboards_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'resources')
|
||||
listing = os.listdir(dashboards_path)
|
||||
for infile in listing:
|
||||
json_file_path = os.path.join(dashboards_path, infile)
|
||||
with open(json_file_path) as json_data:
|
||||
mapping_data = json.load(json_data)
|
||||
mapping_type = mapping_data['type']
|
||||
print("Creating {0} {1}".format(
|
||||
mapping_data[mapping_type]['title'],
|
||||
mapping_type
|
||||
))
|
||||
post_data = {
|
||||
"attributes": mapping_data[mapping_type]
|
||||
}
|
||||
# We use the filename as the id of the resource
|
||||
resource_name = infile.replace('.json', '')
|
||||
kibana_type_url = kibana_url + "/api/saved_objects/" + mapping_type + "/" + resource_name
|
||||
requests.post(url=kibana_type_url, data=json.dumps(post_data), headers=kibana_headers)
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"visualization": {
|
||||
"title": "All Events Time Graph",
|
||||
"visState": "{\"title\":\"All Events Time Graph\",\"type\":\"area\",\"params\":{\"type\":\"area\",\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"area\",\"mode\":\"stacked\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"drawLinesBetweenPoints\":true,\"showCircles\":true,\"interpolate\":\"linear\",\"valueAxis\":\"ValueAxis-1\"}],\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"times\":[],\"addTimeMarker\":false},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"utctimestamp\",\"useNormalizedEsInterval\":true,\"interval\":\"auto\",\"time_zone\":\"America/Chicago\",\"drop_partials\":false,\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}}]}",
|
||||
"uiStateJSON": "{}",
|
||||
"description": "",
|
||||
"version": 1,
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"index\":\"events-*\",\"query\":{\"query\":\"\",\"language\":\"lucene\"},\"filter\":[]}"
|
||||
}
|
||||
},
|
||||
"type": "visualization"
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"visualization": {
|
||||
"title": "Total Event Count Number",
|
||||
"visState": "{\"title\":\"Total Event Count Number\",\"type\":\"metric\",\"params\":{\"addTooltip\":true,\"addLegend\":false,\"type\":\"metric\",\"metric\":{\"percentageMode\":false,\"useRanges\":false,\"colorSchema\":\"Green to Red\",\"metricColorMode\":\"None\",\"colorsRange\":[{\"from\":0,\"to\":10000}],\"labels\":{\"show\":true},\"invertColors\":false,\"style\":{\"bgFill\":\"#000\",\"bgColor\":false,\"labelColor\":false,\"subText\":\"\",\"fontSize\":60}}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}}]}",
|
||||
"uiStateJSON": "{}",
|
||||
"description": "",
|
||||
"version": 1,
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"index\":\"events-*\",\"query\":{\"query\":\"\",\"language\":\"lucene\"},\"filter\":[]}"
|
||||
}
|
||||
},
|
||||
"type": "visualization"
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"visualization": {
|
||||
"title": "Category Pie Graph",
|
||||
"visState": "{\"title\":\"Category Pie Graph\",\"type\":\"pie\",\"params\":{\"type\":\"pie\",\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"isDonut\":true,\"labels\":{\"show\":false,\"values\":true,\"last_level\":true,\"truncate\":100}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"category\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\"}}]}",
|
||||
"uiStateJSON": "{}",
|
||||
"description": "",
|
||||
"version": 1,
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"index\":\"events-*\",\"query\":{\"query\":\"\",\"language\":\"lucene\"},\"filter\":[]}"
|
||||
}
|
||||
},
|
||||
"type": "visualization"
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"visualization": {
|
||||
"title": "Destination IP Address Bar Graph",
|
||||
"visState": "{\"title\":\"Destination IP Address Bar Graph\",\"type\":\"horizontal_bar\",\"params\":{\"type\":\"histogram\",\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":200},\"title\":{}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":75,\"filter\":true,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":true,\"type\":\"histogram\",\"mode\":\"normal\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"valueAxis\":\"ValueAxis-1\",\"drawLinesBetweenPoints\":true,\"showCircles\":true}],\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"times\":[],\"addTimeMarker\":false},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"details.destinationipaddress\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\"}}]}",
|
||||
"uiStateJSON": "{}",
|
||||
"description": "",
|
||||
"version": 1,
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"index\":\"events-weekly\",\"query\":{\"query\":\"\",\"language\":\"lucene\"},\"filter\":[]}"
|
||||
}
|
||||
},
|
||||
"type": "visualization"
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
{
|
||||
"dashboard": {
|
||||
"title": "Sample Dashboard",
|
||||
"hits": 0,
|
||||
"description": "This is a sample dashboard that we populated with a couple generic visualizations.",
|
||||
"panelsJSON": "[{\"gridData\":{\"x\":0,\"y\":0,\"w\":48,\"h\":15,\"i\":\"1\"},\"version\":\"6.6.2\",\"panelIndex\":\"1\",\"type\":\"visualization\",\"id\":\"all_events_area\",\"embeddableConfig\":{}},{\"gridData\":{\"x\":24,\"y\":15,\"w\":24,\"h\":15,\"i\":\"2\"},\"version\":\"6.6.2\",\"panelIndex\":\"2\",\"type\":\"visualization\",\"id\":\"category_pie_graph\",\"embeddableConfig\":{}},{\"gridData\":{\"x\":0,\"y\":15,\"w\":24,\"h\":15,\"i\":\"3\"},\"version\":\"6.6.2\",\"panelIndex\":\"3\",\"type\":\"visualization\",\"id\":\"all_events_count\",\"embeddableConfig\":{}},{\"gridData\":{\"x\":24,\"y\":30,\"w\":24,\"h\":15,\"i\":\"4\"},\"version\":\"6.6.2\",\"panelIndex\":\"4\",\"type\":\"visualization\",\"id\":\"sourceip_bar_graph\",\"embeddableConfig\":{}},{\"gridData\":{\"w\":24,\"h\":15,\"x\":0,\"y\":30,\"i\":\"5\"},\"version\":\"6.6.2\",\"panelIndex\":\"5\",\"type\":\"visualization\",\"id\":\"sourceip_bar_graph\",\"embeddableConfig\":{}}]",
|
||||
"optionsJSON": "{\"darkTheme\":false,\"useMargins\":true,\"hidePanelTitles\":false}",
|
||||
"version": 1,
|
||||
"timeRestore": false,
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"query\":{\"query\":\"\",\"language\":\"lucene\"},\"filter\":[]}"
|
||||
}
|
||||
},
|
||||
"type": "dashboard"
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"visualization": {
|
||||
"title": "Source IP Address Bar Graph",
|
||||
"visState": "{\"title\":\"Source IP Address Bar Graph\",\"type\":\"horizontal_bar\",\"params\":{\"type\":\"histogram\",\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":200},\"title\":{}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":75,\"filter\":true,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":true,\"type\":\"histogram\",\"mode\":\"normal\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"valueAxis\":\"ValueAxis-1\",\"drawLinesBetweenPoints\":true,\"showCircles\":true}],\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"times\":[],\"addTimeMarker\":false},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"details.sourceipaddress\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\"}}]}",
|
||||
"uiStateJSON": "{}",
|
||||
"description": "",
|
||||
"version": 1,
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"index\":\"events-weekly\",\"query\":{\"query\":\"\",\"language\":\"lucene\"},\"filter\":[]}"
|
||||
}
|
||||
},
|
||||
"type": "visualization"
|
||||
}
|
|
@ -42,8 +42,8 @@ source_suffix = '.rst'
|
|||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'MozDef'
|
||||
copyright = u'2014, Mozilla'
|
||||
project = 'MozDef'
|
||||
copyright = '2014, Mozilla'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
|
@ -202,7 +202,7 @@ latex_elements = {
|
|||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
('index', 'MozDef.tex', u'MozDef Documentation', u'Mozilla', 'manual'),
|
||||
('index', 'MozDef.tex', 'MozDef Documentation', 'Mozilla', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
|
@ -231,7 +231,7 @@ latex_documents = [
|
|||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
('index', 'mozdef', u'MozDef Documentation', [u'Mozilla'], 1)
|
||||
('index', 'mozdef', 'MozDef Documentation', ['Mozilla'], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
|
@ -244,7 +244,7 @@ man_pages = [
|
|||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
('index', 'MozDef', u'MozDef Documentation', u'Mozilla', 'MozDef', 'MozDef: The Mozilla Defense Platform.', 'Miscellaneous'),
|
||||
('index', 'MozDef', 'MozDef Documentation', 'Mozilla', 'MozDef', 'MozDef: The Mozilla Defense Platform.', 'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
|
|
|
@ -17,7 +17,6 @@ You're done! Now go to:
|
|||
* http://localhost:9090/app/kibana < kibana
|
||||
* http://localhost:9200 < elasticsearch
|
||||
* http://localhost:8080 < loginput
|
||||
* http://localhost:8081 < rest api
|
||||
|
||||
|
||||
.. _docker: https://www.docker.io/
|
||||
|
@ -46,9 +45,38 @@ Manual Installation for Yum or Apt based distros
|
|||
|
||||
Summary
|
||||
*******
|
||||
This section explains the manual installation process for the MozDef system::
|
||||
This section explains the manual installation process for the MozDef system
|
||||
|
||||
Create a mozdef user.
|
||||
|
||||
On Yum-based systems::
|
||||
|
||||
adduser mozdef -d /opt/mozdef
|
||||
mkdir /opt/mozdef/envs
|
||||
chown -R mozdef:mozdef /opt/mozdef
|
||||
|
||||
On APT-based systems::
|
||||
|
||||
useradd -m -d /opt/mozdef -s /bin/bash mozdef
|
||||
mkdir /opt/mozdef/envs
|
||||
chown -R mozdef:mozdef /opt/mozdef
|
||||
|
||||
Clone repository.
|
||||
|
||||
On Yum-based systems::
|
||||
|
||||
yum install -y git
|
||||
su mozdef
|
||||
cd
|
||||
git clone https://github.com/mozilla/MozDef.git /opt/mozdef/envs/mozdef
|
||||
|
||||
On APT-based systems::
|
||||
|
||||
apt-get install -y git
|
||||
su mozdef
|
||||
cd
|
||||
git clone https://github.com/mozilla/MozDef.git /opt/mozdef/envs/mozdef
|
||||
|
||||
git clone https://github.com/mozilla/MozDef.git mozdef
|
||||
|
||||
Web and Workers nodes
|
||||
---------------------
|
||||
|
@ -58,48 +86,34 @@ This section explains the manual installation process for Web and Workers nodes.
|
|||
Python
|
||||
******
|
||||
|
||||
Create a mozdef user::
|
||||
|
||||
adduser mozdef -d /opt/mozdef
|
||||
cp /etc/skel/.bash* /opt/mozdef/
|
||||
cd /opt/mozdef
|
||||
chown mozdef: .bash*
|
||||
chown -R mozdef: *
|
||||
|
||||
We need to install a python2.7 virtualenv.
|
||||
We need to install a python3.6 virtualenv.
|
||||
|
||||
On Yum-based systems::
|
||||
|
||||
sudo yum install make zlib-devel bzip2-devel openssl-devel ncurses-devel sqlite-devel readline-devel tk-devel pcre-devel gcc gcc-c++ mysql-devel
|
||||
yum install -y epel-release
|
||||
yum install -y python36 python36-devel python3-pip libcurl-devel gcc
|
||||
pip3 install virtualenv
|
||||
su mozdef
|
||||
cd /opt/mozdef/envs
|
||||
virtualenv -p /bin/python3 /opt/mozdef/envs/python
|
||||
|
||||
On APT-based systems::
|
||||
|
||||
sudo apt-get install make zlib1g-dev libbz2-dev libssl-dev libncurses5-dev libsqlite3-dev libreadline-dev tk-dev libpcre3-dev libpcre++-dev build-essential g++ libmysqlclient-dev
|
||||
apt-get install libcurl4-openssl-dev libssl-dev
|
||||
apt-get install python3-pip
|
||||
pip3 install virtualenv
|
||||
su mozdef
|
||||
cd /opt/mozdef/envs
|
||||
virtualenv -p /usr/bin/python3 /opt/mozdef/envs/python
|
||||
|
||||
Then::
|
||||
|
||||
sudo -i -u mozdef -g mozdef
|
||||
mkdir /opt/mozdef/python2.7
|
||||
wget https://www.python.org/ftp/python/2.7.11/Python-2.7.11.tgz
|
||||
tar xvzf Python-2.7.11.tgz
|
||||
cd Python-2.7.11
|
||||
./configure --prefix=/opt/mozdef/python2.7 --enable-shared LDFLAGS="-Wl,--rpath=/opt/mozdef/python2.7/lib"
|
||||
make
|
||||
make install
|
||||
source /opt/mozdef/envs/python/bin/activate
|
||||
cd /opt/mozdef/envs/mozdef
|
||||
PYCURL_SSL_LIBRARY=nss pip install -r requirements.txt
|
||||
|
||||
cd /opt/mozdef
|
||||
|
||||
wget https://bootstrap.pypa.io/get-pip.py
|
||||
export LD_LIBRARY_PATH=/opt/mozdef/python2.7/lib/
|
||||
./python2.7/bin/python get-pip.py
|
||||
./python2.7/bin/pip install virtualenv
|
||||
mkdir ~/envs
|
||||
cd ~/envs
|
||||
~/python2.7/bin/virtualenv python
|
||||
source python/bin/activate
|
||||
pip install -r ../requirements.txt
|
||||
|
||||
Copy the following into a file called .bash_profile for the mozdef user within /opt/mozdef:
|
||||
Copy the following into a file called .bash_profile for the mozdef user within /opt/mozdef::
|
||||
|
||||
[mozdef@server ~]$ vim /opt/mozdef/.bash_profile
|
||||
|
||||
|
@ -109,9 +123,8 @@ Copy the following into a file called .bash_profile for the mozdef user within /
|
|||
|
||||
export PATH
|
||||
|
||||
At this point when you launch python from within your virtual environment, It should tell you that you're using Python 2.7.11.
|
||||
At this point when you launch python from within your virtual environment, It should tell you that you're using Python 3.6.9.
|
||||
|
||||
Whenever you launch a python script from now on, you should have your mozdef virtualenv active and your LD_LIBRARY_PATH env variable should include /opt/mozdef/python2.7/lib/ automatically.
|
||||
|
||||
RabbitMQ
|
||||
********
|
||||
|
@ -330,8 +343,8 @@ We use `uwsgi`_ to interface python and nginx, in your venv execute the followin
|
|||
wget https://projects.unbit.it/downloads/uwsgi-2.0.17.1.tar.gz
|
||||
tar zxvf uwsgi-2.0.17.1.tar.gz
|
||||
cd uwsgi-2.0.17.1
|
||||
~/python2.7/bin/python uwsgiconfig.py --build
|
||||
~/python2.7/bin/python uwsgiconfig.py --plugin plugins/python core
|
||||
~/python3.6/bin/python uwsgiconfig.py --build
|
||||
~/python3.6/bin/python uwsgiconfig.py --plugin plugins/python core
|
||||
cp python_plugin.so ~/envs/python/bin/
|
||||
cp uwsgi ~/envs/python/bin/
|
||||
|
||||
|
|
|
@ -13,11 +13,11 @@ import json
|
|||
from multiprocessing import Process, Queue
|
||||
import random
|
||||
import logging
|
||||
from Queue import Empty
|
||||
from queue import Empty
|
||||
import requests
|
||||
import time
|
||||
from configlib import getConfig, OptionParser
|
||||
import ConfigParser
|
||||
import configparser
|
||||
import glob
|
||||
from datetime import timedelta
|
||||
|
||||
|
@ -43,7 +43,7 @@ logcache=Queue()
|
|||
def setConfig(option,value,configfile):
|
||||
"""write an option/value pair to our config file"""
|
||||
if os.path.isfile(configfile):
|
||||
config = ConfigParser.ConfigParser()
|
||||
config = configparser.ConfigParser()
|
||||
configfp=open(configfile,'r')
|
||||
config.readfp(configfp)
|
||||
configfp.close()
|
||||
|
@ -66,7 +66,7 @@ def postLogs(logcache):
|
|||
a=httpsession.get_adapter(url)
|
||||
a.max_retries=3
|
||||
r=httpsession.post(url,data=postdata)
|
||||
print(r, postdata)
|
||||
print(r)
|
||||
# append to posts if this is long running and you want
|
||||
# events to try again later.
|
||||
# posts.append((r,postdata,url))
|
||||
|
|
|
@ -136,9 +136,9 @@
|
|||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 2",
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python2"
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
|
@ -149,8 +149,8 @@
|
|||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython2",
|
||||
"version": "2.7.5"
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.6.9"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
|
|
@ -41,9 +41,21 @@ def bulkindex():
|
|||
bulkpost=request.body.read()
|
||||
# bottlelog('request:{0}\n'.format(bulkpost))
|
||||
request.body.close()
|
||||
if len(bulkpost)>10: # TODO Check for bulk format.
|
||||
# iterate on messages and post to event message queue
|
||||
try: # Handles json array bulk format [{},{},...]
|
||||
messages = json.loads(bulkpost)
|
||||
for event in messages:
|
||||
# don't post the items telling us where to post things..
|
||||
if 'index' not in event:
|
||||
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
|
||||
ensurePublish(event,exchange=eventTaskExchange,routing_key=options.taskexchange)
|
||||
return
|
||||
except ValueError as e:
|
||||
bottlelog('Decoded raw input failed with {0}'.format(e))
|
||||
pass
|
||||
|
||||
if len(bulkpost)>10: # Handles single element format {}
|
||||
# TODO Check for other bulk formats.
|
||||
# iterate on messages and post to event message queue
|
||||
eventlist=[]
|
||||
for i in bulkpost.splitlines():
|
||||
eventlist.append(i)
|
||||
|
|
|
@ -17,7 +17,7 @@ if (Meteor.isClient) {
|
|||
|
||||
kibanaurl: function () {
|
||||
var esmetadata = alerts.findOne({'esmetadata.id': Session.get('alertID')}).esmetadata;
|
||||
url=resolveKibanaURL(getSetting('kibanaURL')) + '#/doc/alerts-*/' + esmetadata.index + '/doc?id=' + esmetadata.id;
|
||||
url=resolveKibanaURL(getSetting('kibanaURL')) + '#/doc/alerts-*/' + esmetadata.index + '/_doc?id=' + esmetadata.id;
|
||||
return url;
|
||||
}
|
||||
});
|
||||
|
|
|
@ -96,7 +96,7 @@ Copyright (c) 2014 Mozilla Corporation
|
|||
<tr class="alert-row">
|
||||
<td>{{utctimestamp}}</td>
|
||||
<td><a href="/alert/{{esmetadata.id}}">mozdef</a><br>
|
||||
<a href="{{ resolveKibanaURL mozdef.kibanaURL }}#/doc/alerts-*/{{esmetadata.index}}/doc?id={{esmetadata.id}}" target="_blank">kibana</a>
|
||||
<a href="{{ resolveKibanaURL mozdef.kibanaURL }}#/doc/alerts-*/{{esmetadata.index}}/_doc?id={{esmetadata.id}}" target="_blank">kibana</a>
|
||||
{{#if url}}
|
||||
<br><a href="{{url}}" target ="_blank">docs</a>
|
||||
{{/if}}
|
||||
|
|
|
@ -1,108 +0,0 @@
|
|||
/*
|
||||
* jQuery Highlight plugin
|
||||
*
|
||||
* Based on highlight v3 by Johann Burkard
|
||||
* http://johannburkard.de/blog/programming/javascript/highlight-javascript-text-higlighting-jquery-plugin.html
|
||||
*
|
||||
* Code a little bit refactored and cleaned (in my humble opinion).
|
||||
* Most important changes:
|
||||
* - has an option to highlight only entire words (wordsOnly - false by default),
|
||||
* - has an option to be case sensitive (caseSensitive - false by default)
|
||||
* - highlight element tag and class names can be specified in options
|
||||
*
|
||||
* Usage:
|
||||
* // wrap every occurrance of text 'lorem' in content
|
||||
* // with <span class='highlight'> (default options)
|
||||
* $('#content').highlight('lorem');
|
||||
*
|
||||
* // search for and highlight more terms at once
|
||||
* // so you can save some time on traversing DOM
|
||||
* $('#content').highlight(['lorem', 'ipsum']);
|
||||
* $('#content').highlight('lorem ipsum');
|
||||
*
|
||||
* // search only for entire word 'lorem'
|
||||
* $('#content').highlight('lorem', { wordsOnly: true });
|
||||
*
|
||||
* // don't ignore case during search of term 'lorem'
|
||||
* $('#content').highlight('lorem', { caseSensitive: true });
|
||||
*
|
||||
* // wrap every occurrance of term 'ipsum' in content
|
||||
* // with <em class='important'>
|
||||
* $('#content').highlight('ipsum', { element: 'em', className: 'important' });
|
||||
*
|
||||
* // remove default highlight
|
||||
* $('#content').unhighlight();
|
||||
*
|
||||
* // remove custom highlight
|
||||
* $('#content').unhighlight({ element: 'em', className: 'important' });
|
||||
*
|
||||
*
|
||||
* Copyright (c) 2009 Bartek Szopka
|
||||
*
|
||||
* Licensed under MIT license.
|
||||
*
|
||||
*/
|
||||
|
||||
jQuery.extend({
|
||||
highlight: function (node, re, nodeName, className) {
|
||||
if (node.nodeType === 3) {
|
||||
var match = node.data.match(re);
|
||||
if (match) {
|
||||
var highlight = document.createElement(nodeName || 'span');
|
||||
highlight.className = className || 'highlight';
|
||||
var wordNode = node.splitText(match.index);
|
||||
wordNode.splitText(match[0].length);
|
||||
var wordClone = wordNode.cloneNode(true);
|
||||
highlight.appendChild(wordClone);
|
||||
wordNode.parentNode.replaceChild(highlight, wordNode);
|
||||
return 1; //skip added node in parent
|
||||
}
|
||||
} else if ((node.nodeType === 1 && node.childNodes) && // only element nodes that have children
|
||||
!/(script|style)/i.test(node.tagName) && // ignore script and style nodes
|
||||
!(node.tagName === nodeName.toUpperCase() && node.className === className)) { // skip if already highlighted
|
||||
for (var i = 0; i < node.childNodes.length; i++) {
|
||||
i += jQuery.highlight(node.childNodes[i], re, nodeName, className);
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
|
||||
jQuery.fn.unhighlight = function (options) {
|
||||
var settings = { className: 'highlight', element: 'span' };
|
||||
jQuery.extend(settings, options);
|
||||
|
||||
return this.find(settings.element + "." + settings.className).each(function () {
|
||||
var parent = this.parentNode;
|
||||
parent.replaceChild(this.firstChild, this);
|
||||
parent.normalize();
|
||||
}).end();
|
||||
};
|
||||
|
||||
jQuery.fn.highlight = function (words, options) {
|
||||
var settings = { className: 'highlight', element: 'span', caseSensitive: false, wordsOnly: false };
|
||||
jQuery.extend(settings, options);
|
||||
|
||||
if (words.constructor === String) {
|
||||
words = [words];
|
||||
}
|
||||
words = jQuery.grep(words, function(word, i){
|
||||
return word != '';
|
||||
});
|
||||
words = jQuery.map(words, function(word, i) {
|
||||
return word.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&");
|
||||
});
|
||||
if (words.length == 0) { return this; };
|
||||
|
||||
var flag = settings.caseSensitive ? "" : "i";
|
||||
var pattern = "(" + words.join("|") + ")";
|
||||
if (settings.wordsOnly) {
|
||||
pattern = "\\b" + pattern + "\\b";
|
||||
}
|
||||
var re = new RegExp(pattern, flag);
|
||||
|
||||
return this.each(function () {
|
||||
jQuery.highlight(this, re, settings.element, settings.className);
|
||||
});
|
||||
};
|
||||
|
|
@ -269,6 +269,19 @@ if ( Meteor.isClient ) {
|
|||
return pluginsForEndPoint( endpoint );
|
||||
} );
|
||||
|
||||
jQuery.fn.highlight = function (str, className) {
|
||||
var regex = new RegExp(str, "gi");
|
||||
return this.each(function () {
|
||||
$(this).contents().filter(function() {
|
||||
return this.nodeType == 3 && regex.test(this.nodeValue);
|
||||
}).replaceWith(function() {
|
||||
return (this.nodeValue || "").replace(regex, function(match) {
|
||||
return "<span class=\"" + className + "\">" + match + "</span>";
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
UI.registerHelper( 'ipDecorate', function( elementText ) {
|
||||
//decorate text containing an ipv4 address
|
||||
var anelement = $( $.parseHTML( '<span>' + elementText + '</span>' ) )
|
||||
|
@ -277,23 +290,9 @@ if ( Meteor.isClient ) {
|
|||
//clean up potential interference chars
|
||||
w = w.replace( /,|:|;|\[|\]/g, '' )
|
||||
if ( isIPv4( w ) ) {
|
||||
//console.log(w);
|
||||
anelement.
|
||||
highlight( w,
|
||||
{
|
||||
wordsOnly: false,
|
||||
element: "em",
|
||||
className: "ipaddress"
|
||||
} );
|
||||
anelement.highlight(w, 'ipaddress');
|
||||
} else if ( isHostname( w ) ) {
|
||||
//console.log(w);
|
||||
anelement.
|
||||
highlight( w,
|
||||
{
|
||||
wordsOnly: false,
|
||||
element: "em",
|
||||
className: "hostname"
|
||||
} );
|
||||
anelement.highlight(w, 'hostname');
|
||||
}
|
||||
} );
|
||||
//add a drop down menu to any .ipaddress
|
||||
|
|
|
@ -13,10 +13,13 @@ Copyright (c) 2014 Mozilla Corporation
|
|||
--txt-secondary-color: #000;
|
||||
--txt-shadow-color: #000;
|
||||
--txt-highlight-color: rgba(165, 170, 172, 0.904);
|
||||
--arm-color: #d1b61e;
|
||||
--arm-focus-color: #e7c714a9;
|
||||
--txt-disabled-color: #576d54;
|
||||
--a-link-color: rgb(245, 222, 179);
|
||||
--row-color-odd: rgba(30,87,153,.7);
|
||||
--row-color-even: #636c85;
|
||||
}
|
||||
}
|
||||
|
||||
html{
|
||||
background: none;
|
||||
|
@ -260,9 +263,29 @@ h1, h2, h3, h4, h5, h6, .h1, .h2, .h3, .h4, .h5, .h6 {
|
|||
}
|
||||
|
||||
.btn {
|
||||
border: 1px outset;
|
||||
border-radius: 4px;
|
||||
}
|
||||
border: 1px outset;
|
||||
border-radius: 4px;
|
||||
color: var(--txt-primary-color);
|
||||
background-color: var(--arm-color);
|
||||
}
|
||||
|
||||
.btn-warning.active,
|
||||
.btn-warning:active,
|
||||
.btn-warning:hover,
|
||||
.open > .dropdown-toggle.btn-warning {
|
||||
color: var(--txt-secondary-color);
|
||||
background-color: var(--arm-focus-color);
|
||||
border-color: var(--arm-color);
|
||||
}
|
||||
|
||||
.btnAlertAcked,
|
||||
.btnAlertAcked.active,
|
||||
.btnAlertAcked:active,
|
||||
.btnAlertAcked:hover > .btn {
|
||||
color: var(--txt-disabled-color);
|
||||
background-color: var(--arm-focus-color);
|
||||
border-color: var(--arm-color);
|
||||
}
|
||||
|
||||
|
||||
input[type="search"] {
|
||||
|
@ -290,7 +313,7 @@ input[type="search"] {
|
|||
|
||||
.table-striped > tbody > tr:nth-of-type(2n+1) {
|
||||
background-color: var(--row-color-even)
|
||||
}
|
||||
}
|
||||
|
||||
.table-hover tbody tr:hover > td,
|
||||
.table-hover tbody tr:hover > th,
|
||||
|
|
|
@ -11,16 +11,13 @@ Copyright (c) 2014 Mozilla Corporation
|
|||
--bg-secondary-color: #2d5fa0;
|
||||
--row-color-odd: #2a2f35;
|
||||
--row-color-even: #636c85;
|
||||
--ack-edit-color: #a2a9b2;
|
||||
--ack-edit-border-color: #adadad;
|
||||
--ack-edit-focus-color: #557750;
|
||||
--ack-edit-disabled-color: #557750;
|
||||
--arm-color: #e69006;
|
||||
--arm-focus-color: #d58512;
|
||||
--txt-primary-color: #fff;
|
||||
--txt-secondary-color: #000;
|
||||
--txt-disabled-color: #576d54;
|
||||
--a-link-color: #a2a9b2;
|
||||
}
|
||||
}
|
||||
|
||||
/*base css */
|
||||
html{
|
||||
|
@ -193,23 +190,23 @@ caption, legend {
|
|||
.alert.alert-NOTICE {
|
||||
--alert-bg-color: #4a6785;
|
||||
--alert-color: white;
|
||||
}
|
||||
}
|
||||
.alert.alert-WARNING {
|
||||
--alert-bg-color: #ffd351;
|
||||
--alert-color: black;
|
||||
}
|
||||
}
|
||||
.alert.alert-CRITICAL {
|
||||
--alert-bg-color: #d04437;
|
||||
--alert-color: white;
|
||||
}
|
||||
}
|
||||
.alert.alert-INFO {
|
||||
--alert-bg-color: #cccccc;
|
||||
--alert-color: black;
|
||||
}
|
||||
}
|
||||
.alert.alert-ERROR {
|
||||
--alert-bg-color: #d04437;
|
||||
--alert-color: white;
|
||||
}
|
||||
}
|
||||
|
||||
.alert {
|
||||
color: var(--alert-color);
|
||||
|
@ -217,7 +214,7 @@ caption, legend {
|
|||
text-transform: uppercase;
|
||||
display: table-cell;
|
||||
font-weight: bold;
|
||||
}
|
||||
}
|
||||
|
||||
.alert-row a {
|
||||
color: wheat;
|
||||
|
@ -229,22 +226,41 @@ caption, legend {
|
|||
|
||||
.modal-header {
|
||||
color: var(--font-focus);
|
||||
}
|
||||
}
|
||||
|
||||
.modal-body {
|
||||
color: var(--font-focus);
|
||||
}
|
||||
}
|
||||
|
||||
.modal-body .row {
|
||||
color: black;
|
||||
}
|
||||
}
|
||||
/*bootstrap overrides*/
|
||||
|
||||
.btn {
|
||||
border: 1px outset;
|
||||
border-radius: 4px;
|
||||
color: #999;
|
||||
}
|
||||
color: var(--txt-primary-color);
|
||||
background-color: var(--arm-color);
|
||||
}
|
||||
|
||||
.btn-warning.active,
|
||||
.btn-warning:active,
|
||||
.btn-warning:hover,
|
||||
.open > .dropdown-toggle.btn-warning {
|
||||
color: var(--txt-secondary-color);
|
||||
background-color: var(--arm-focus-color);
|
||||
border-color: var(--arm-color);
|
||||
}
|
||||
|
||||
.btnAlertAcked,
|
||||
.btnAlertAcked.active,
|
||||
.btnAlertAcked:active,
|
||||
.btnAlertAcked:hover > .btn {
|
||||
color: var(--txt-disabled-color);
|
||||
background-color: var(--arm-focus-color);
|
||||
border-color: var(--arm-color);
|
||||
}
|
||||
|
||||
|
||||
input[type="search"] {
|
||||
|
@ -412,104 +428,104 @@ sidenav {
|
|||
-ms-transition: all 400ms ease;
|
||||
-o-transition: all 400ms ease;
|
||||
transition: all 400ms ease;
|
||||
}
|
||||
}
|
||||
|
||||
/*pull out triangle*/
|
||||
sidenav:after {
|
||||
position: absolute;
|
||||
content: ' ';
|
||||
width: 0;
|
||||
height: 0;
|
||||
right: -75px;
|
||||
top: 50%;
|
||||
border-width: 30px 30px;
|
||||
border-style: solid;
|
||||
border-color: transparent transparent transparent var(--bg-secondary-color);
|
||||
}
|
||||
sidenav ul {
|
||||
width: 14em;
|
||||
list-style-type: none;
|
||||
margin: auto;
|
||||
padding: 1em;
|
||||
}
|
||||
sidenav div{
|
||||
margin:auto;
|
||||
}
|
||||
sidenav:hover {
|
||||
left: 0;
|
||||
}
|
||||
sidenav .filters-col .row {
|
||||
margin-top: 45px;
|
||||
padding: 0 0.5em;
|
||||
}
|
||||
sidenav .reset-filter {
|
||||
text-align: center;
|
||||
margin-top: 20px;
|
||||
}
|
||||
.form-horizontal .form-group {
|
||||
margin-left: 5px;
|
||||
margin-right: 5px;
|
||||
padding-top: 5px;
|
||||
}
|
||||
|
||||
@media screen and (max-width: 1000px) {
|
||||
sidenav {
|
||||
background: var(--bg-primary-color);
|
||||
border-left: 15px solid var(--bg-secondary-color);
|
||||
text-align: left;
|
||||
font-weight: bolder;
|
||||
position: fixed;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
height: 100%;
|
||||
right: -16em;
|
||||
margin: 0em;
|
||||
padding-top: 1em;
|
||||
display: inline-block;
|
||||
line-height: normal;
|
||||
-webkit-transform: translateZ(0) scale(1, 1);
|
||||
z-index: 3;
|
||||
-webkit-transition: all 400ms ease;
|
||||
-moz-transition: all 400ms ease;
|
||||
-ms-transition: all 400ms ease;
|
||||
-o-transition: all 400ms ease;
|
||||
transition: all 400ms ease;
|
||||
}
|
||||
sidenav:after {
|
||||
position: absolute;
|
||||
content: ' ';
|
||||
width: 0;
|
||||
height: 0;
|
||||
right: -75px;
|
||||
top: 50%;
|
||||
border-width: 30px 30px;
|
||||
border-style: solid;
|
||||
border-color: transparent transparent transparent var(--bg-secondary-color);
|
||||
right: 230px;
|
||||
border-top: 0;
|
||||
border-bottom: 0;
|
||||
border-right: 0;
|
||||
content: none;
|
||||
}
|
||||
sidenav ul {
|
||||
width: 14em;
|
||||
list-style-type: none;
|
||||
margin: auto;
|
||||
padding: 1em;
|
||||
width: 14em;
|
||||
list-style-type: none;
|
||||
margin: auto;
|
||||
padding: 1em;
|
||||
}
|
||||
sidenav div{
|
||||
margin:auto;
|
||||
margin:auto;
|
||||
}
|
||||
sidenav:hover {
|
||||
left: 0;
|
||||
right: 0;
|
||||
width: 230px;
|
||||
overflow-y: scroll;
|
||||
scrollbar-width: inherit;
|
||||
scrollbar-color: var(--bg-secondary-color) black;
|
||||
}
|
||||
sidenav .filters-col .row {
|
||||
margin-top: 45px;
|
||||
padding: 0 0.5em;
|
||||
margin-top: 25px;
|
||||
padding: 0 1.5em;
|
||||
}
|
||||
sidenav .reset-filter {
|
||||
text-align: center;
|
||||
margin-top: 20px;
|
||||
text-align: center;
|
||||
margin-top: 20px;
|
||||
}
|
||||
.form-horizontal .form-group {
|
||||
margin-left: 5px;
|
||||
margin-right: 5px;
|
||||
padding-top: 5px;
|
||||
}
|
||||
|
||||
@media screen and (max-width: 1000px) {
|
||||
sidenav {
|
||||
background: var(--bg-primary-color);
|
||||
border-left: 15px solid var(--bg-secondary-color);
|
||||
text-align: left;
|
||||
font-weight: bolder;
|
||||
position: fixed;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
height: 100%;
|
||||
right: -16em;
|
||||
margin: 0em;
|
||||
padding-top: 1em;
|
||||
display: inline-block;
|
||||
line-height: normal;
|
||||
-webkit-transform: translateZ(0) scale(1, 1);
|
||||
z-index: 3;
|
||||
-webkit-transition: all 400ms ease;
|
||||
-moz-transition: all 400ms ease;
|
||||
-ms-transition: all 400ms ease;
|
||||
-o-transition: all 400ms ease;
|
||||
transition: all 400ms ease;
|
||||
}
|
||||
sidenav:after {
|
||||
right: 230px;
|
||||
border-top: 0;
|
||||
border-bottom: 0;
|
||||
border-right: 0;
|
||||
content: none;
|
||||
}
|
||||
sidenav ul {
|
||||
width: 14em;
|
||||
list-style-type: none;
|
||||
margin: auto;
|
||||
padding: 1em;
|
||||
}
|
||||
sidenav div{
|
||||
margin:auto;
|
||||
}
|
||||
sidenav:hover {
|
||||
right: 0;
|
||||
width: 230px;
|
||||
overflow-y: scroll;
|
||||
scrollbar-width: inherit;
|
||||
scrollbar-color: var(--bg-secondary-color) black;
|
||||
}
|
||||
sidenav .filters-col .row {
|
||||
margin-top: 25px;
|
||||
padding: 0 1.5em;
|
||||
}
|
||||
sidenav .reset-filter {
|
||||
text-align: center;
|
||||
margin-top: 20px;
|
||||
}
|
||||
div.dc-chart {
|
||||
float: none;
|
||||
}
|
||||
div.dc-chart {
|
||||
float: none;
|
||||
}
|
||||
}
|
||||
|
||||
/* globe styling */
|
||||
.globe-container {
|
||||
|
|
|
@ -13,6 +13,8 @@ Copyright (c) 2014 Mozilla Corporation
|
|||
--txt-secondary-color: #fff;
|
||||
--txt-shadow-color: #aaa;
|
||||
--txt-highlight-color: rgba(165, 170, 172, 0.904);
|
||||
--arm-color: #d1b61e;
|
||||
--arm-focus-color: #e7c714a9;
|
||||
--a-link-color: rgb(49, 130, 189);
|
||||
--row-color-odd: rgba(30,87,153,.1);
|
||||
--row-color-even: #636c85;
|
||||
|
@ -193,23 +195,23 @@ caption, legend {
|
|||
.alert.alert-NOTICE {
|
||||
--alert-bg-color: #4a6785;
|
||||
--alert-color: black;
|
||||
}
|
||||
}
|
||||
.alert.alert-WARNING {
|
||||
--alert-bg-color: #ffd351;
|
||||
--alert-color: black;
|
||||
}
|
||||
}
|
||||
.alert.alert-CRITICAL {
|
||||
--alert-bg-color: #d04437;
|
||||
--alert-color: black;
|
||||
}
|
||||
}
|
||||
.alert.alert-INFO {
|
||||
--alert-bg-color: #cccccc;
|
||||
--alert-color: black;
|
||||
}
|
||||
}
|
||||
.alert.alert-ERROR {
|
||||
--alert-bg-color: #d04437;
|
||||
--alert-color: black;
|
||||
}
|
||||
}
|
||||
|
||||
.alert {
|
||||
color: var(--alert-color);
|
||||
|
@ -217,7 +219,7 @@ caption, legend {
|
|||
text-transform: uppercase;
|
||||
display: table-cell;
|
||||
font-weight: bold;
|
||||
}
|
||||
}
|
||||
|
||||
.alert-row a {
|
||||
color: var(--a-link-color);
|
||||
|
@ -268,8 +270,28 @@ h1, h2, h3, h4, h5, h6, .h1, .h2, .h3, .h4, .h5, .h6 {
|
|||
}
|
||||
|
||||
.btn {
|
||||
border: 1px outset;
|
||||
border-radius: 4px;
|
||||
border: 1px outset;
|
||||
border-radius: 4px;
|
||||
color: var(--txt-primary-color);
|
||||
background-color: var(--arm-color);
|
||||
}
|
||||
|
||||
.btn-warning.active,
|
||||
.btn-warning:active,
|
||||
.btn-warning:hover,
|
||||
.open > .dropdown-toggle.btn-warning {
|
||||
color: var(--txt-secondary-color);
|
||||
background-color: var(--arm-focus-color);
|
||||
border-color: var(--arm-color);
|
||||
}
|
||||
|
||||
.btnAlertAcked,
|
||||
.btnAlertAcked.active,
|
||||
.btnAlertAcked:active,
|
||||
.btnAlertAcked:hover > .btn {
|
||||
color: var(--txt-shadow-color);
|
||||
background-color: var(--arm-focus-color);
|
||||
border-color: var(--arm-color);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -18,8 +18,7 @@ Copyright (c) 2014 Mozilla Corporation
|
|||
--row-color-even: #636c85;
|
||||
--ack-edit-color: #a2a9b2;
|
||||
--ack-edit-border-color: #adadad;
|
||||
--ack-edit-focus-color: #557750;
|
||||
--ack-edit-disabled-color: #557750;
|
||||
--txt-shadow-color: #576d54;
|
||||
--arm-color: #e69006;
|
||||
--arm-focus-color: #d58512;
|
||||
--font-main: #fff;
|
||||
|
@ -261,6 +260,15 @@ caption, legend {
|
|||
border-color: var(--arm-color);
|
||||
}
|
||||
|
||||
.btnAlertAcked,
|
||||
.btnAlertAcked.active,
|
||||
.btnAlertAcked:active,
|
||||
.btnAlertAcked:hover > .btn {
|
||||
color: var(--txt-shadow-color);
|
||||
background-color: var(--arm-focus-color);
|
||||
border-color: var(--arm-color);
|
||||
}
|
||||
|
||||
.btn-notice {
|
||||
border: 1px outset;
|
||||
border-radius: 4px;
|
||||
|
|
|
@ -54,3 +54,41 @@ Add is_ip utility function
|
|||
------------------
|
||||
|
||||
* Transition away from custom _type for elasticsearch documents
|
||||
|
||||
|
||||
2.0.0 (2019-06-27)
|
||||
------------------
|
||||
|
||||
* Add support for Elasticsearch 6
|
||||
* Remove support for Elasticsearch 5
|
||||
|
||||
|
||||
2.0.1 (2019-06-28)
|
||||
------------------
|
||||
|
||||
* Fixed setup.py relative file paths
|
||||
|
||||
|
||||
2.0.2 (2019-06-28)
|
||||
------------------
|
||||
|
||||
* Attempted fix at including static files
|
||||
|
||||
|
||||
2.0.3 (2019-06-28)
|
||||
------------------
|
||||
|
||||
* Fixed static file includes in python package
|
||||
|
||||
|
||||
3.0.0 (2019-07-08)
|
||||
------------------
|
||||
|
||||
* Updated to work with python3
|
||||
* Removed support for python2
|
||||
|
||||
|
||||
3.0.1 (2019-07-08)
|
||||
------------------
|
||||
|
||||
* Updated bulk queue to acquire lock before saving events
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
include HISTORY.rst
|
||||
include README.rst
|
|
@ -31,11 +31,10 @@ class BulkQueue():
|
|||
def started(self):
|
||||
return self.running
|
||||
|
||||
def add(self, index, doc_type, body, doc_id=None):
|
||||
def add(self, index, body, doc_id=None):
|
||||
""" Add event to queue, flushing if we hit the threshold """
|
||||
bulk_doc = {
|
||||
"_index": index,
|
||||
"_type": doc_type,
|
||||
"_id": doc_id,
|
||||
"_source": body
|
||||
}
|
||||
|
@ -53,9 +52,9 @@ class BulkQueue():
|
|||
|
||||
def flush(self):
|
||||
""" Write all stored events to ES """
|
||||
self.es_client.save_documents(self.list)
|
||||
self.lock.acquire()
|
||||
try:
|
||||
self.es_client.save_documents(self.list)
|
||||
self.list = list()
|
||||
finally:
|
||||
self.lock.release()
|
||||
|
|
|
@ -5,14 +5,14 @@ from elasticsearch_dsl import Search
|
|||
from elasticsearch.exceptions import NotFoundError
|
||||
from elasticsearch.helpers import bulk, BulkIndexError
|
||||
|
||||
from query_models import SearchQuery, TermMatch, AggregatedResults, SimpleResults
|
||||
from bulk_queue import BulkQueue
|
||||
from .query_models import SearchQuery, TermMatch, AggregatedResults, SimpleResults
|
||||
from .bulk_queue import BulkQueue
|
||||
|
||||
from utilities.logger import logger
|
||||
from .utilities.logger import logger
|
||||
|
||||
from event import Event
|
||||
from .event import Event
|
||||
|
||||
TMP_DOC_TYPE = 'doc'
|
||||
DOCUMENT_TYPE = '_doc'
|
||||
|
||||
|
||||
class ElasticsearchBadServer(Exception):
|
||||
|
@ -53,7 +53,7 @@ class ElasticsearchClient():
|
|||
self.es_connection.indices.delete(index=index_name, ignore=ignore_codes)
|
||||
|
||||
def get_indices(self):
|
||||
return self.es_connection.indices.stats()['indices'].keys()
|
||||
return list(self.es_connection.indices.stats()['indices'].keys())
|
||||
|
||||
def index_exists(self, index_name):
|
||||
return self.es_connection.indices.exists(index_name)
|
||||
|
@ -64,7 +64,7 @@ class ElasticsearchClient():
|
|||
{
|
||||
"mappings":{}
|
||||
}'''
|
||||
self.es_connection.indices.create(index=index_name, update_all_types='true', body=index_config)
|
||||
self.es_connection.indices.create(index=index_name, body=index_config)
|
||||
|
||||
def create_alias(self, alias, index):
|
||||
actions = []
|
||||
|
@ -90,10 +90,10 @@ class ElasticsearchClient():
|
|||
self.es_connection.indices.update_aliases(dict(actions=actions))
|
||||
|
||||
def get_alias(self, alias_name):
|
||||
return self.es_connection.indices.get_alias(index='*', name=alias_name).keys()
|
||||
return list(self.es_connection.indices.get_alias(index='*', name=alias_name).keys())
|
||||
|
||||
def get_aliases(self):
|
||||
return self.es_connection.cat.stats()['indices'].keys()
|
||||
return list(self.es_connection.cat.stats()['indices'].keys())
|
||||
|
||||
def refresh(self, index_name):
|
||||
self.es_connection.indices.refresh(index=index_name)
|
||||
|
@ -119,6 +119,9 @@ class ElasticsearchClient():
|
|||
return result_set
|
||||
|
||||
def save_documents(self, documents):
|
||||
# ES library still requires _type to be set
|
||||
for document in documents:
|
||||
document['_type'] = DOCUMENT_TYPE
|
||||
try:
|
||||
bulk(self.es_connection, documents)
|
||||
except BulkIndexError as e:
|
||||
|
@ -128,16 +131,17 @@ class ElasticsearchClient():
|
|||
self.bulk_queue.flush()
|
||||
self.bulk_queue.stop_thread()
|
||||
|
||||
def __bulk_save_document(self, index, doc_type, body, doc_id=None):
|
||||
def __bulk_save_document(self, index, body, doc_id=None):
|
||||
if not self.bulk_queue.started():
|
||||
self.bulk_queue.start_thread()
|
||||
self.bulk_queue.add(index=index, doc_type=doc_type, body=body, doc_id=doc_id)
|
||||
self.bulk_queue.add(index=index, body=body, doc_id=doc_id)
|
||||
|
||||
def __save_document(self, index, doc_type, body, doc_id=None, bulk=False):
|
||||
def __save_document(self, index, body, doc_id=None, bulk=False):
|
||||
if bulk:
|
||||
self.__bulk_save_document(index=index, doc_type=doc_type, body=body, doc_id=doc_id)
|
||||
self.__bulk_save_document(index=index, body=body, doc_id=doc_id)
|
||||
else:
|
||||
return self.es_connection.index(index=index, doc_type=doc_type, id=doc_id, body=body)
|
||||
# ES library still requires _type to be set
|
||||
return self.es_connection.index(index=index, doc_type=DOCUMENT_TYPE, id=doc_id, body=body)
|
||||
|
||||
def __parse_document(self, body):
|
||||
if type(body) is str:
|
||||
|
@ -148,19 +152,19 @@ class ElasticsearchClient():
|
|||
doc_body = body['_source']
|
||||
return doc_body
|
||||
|
||||
def save_object(self, body, index, doc_type=TMP_DOC_TYPE, doc_id=None, bulk=False):
|
||||
def save_object(self, body, index, doc_id=None, bulk=False):
|
||||
doc_body = self.__parse_document(body)
|
||||
return self.__save_document(index=index, doc_type=doc_type, body=doc_body, doc_id=doc_id, bulk=bulk)
|
||||
return self.__save_document(index=index, body=doc_body, doc_id=doc_id, bulk=bulk)
|
||||
|
||||
def save_alert(self, body, index='alerts', doc_id=None, bulk=False):
|
||||
doc_body = self.__parse_document(body)
|
||||
return self.__save_document(index=index, doc_type=TMP_DOC_TYPE, body=doc_body, doc_id=doc_id, bulk=bulk)
|
||||
return self.__save_document(index=index, body=doc_body, doc_id=doc_id, bulk=bulk)
|
||||
|
||||
def save_event(self, body, index='events', doc_id=None, bulk=False):
|
||||
doc_body = self.__parse_document(body)
|
||||
event = Event(doc_body)
|
||||
event.add_required_fields()
|
||||
return self.__save_document(index=index, doc_type=TMP_DOC_TYPE, body=event, doc_id=doc_id, bulk=bulk)
|
||||
return self.__save_document(index=index, body=event, doc_id=doc_id, bulk=bulk)
|
||||
|
||||
def get_object_by_id(self, object_id, indices):
|
||||
id_match = TermMatch('_id', object_id)
|
||||
|
@ -178,23 +182,6 @@ class ElasticsearchClient():
|
|||
def get_event_by_id(self, event_id):
|
||||
return self.get_object_by_id(event_id, ['events'])
|
||||
|
||||
def save_dashboard(self, dash_file, dash_name):
|
||||
f = open(dash_file)
|
||||
dashboardjson = json.load(f)
|
||||
f.close()
|
||||
title = dashboardjson['title']
|
||||
dashid = dash_name.replace(' ', '-')
|
||||
if dash_name:
|
||||
title = dash_name
|
||||
dashboarddata = {
|
||||
"user": "guest",
|
||||
"group": "guest",
|
||||
"title": title,
|
||||
"dashboard": json.dumps(dashboardjson)
|
||||
}
|
||||
|
||||
return self.es_connection.index(index='.kibana', doc_type='dashboard', body=dashboarddata, id=dashid)
|
||||
|
||||
def get_cluster_health(self):
|
||||
health_dict = self.es_connection.cluster.health()
|
||||
# To line up with the health stats from ES1, we're
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from datetime import datetime
|
||||
import socket
|
||||
|
||||
from utilities.toUTC import toUTC
|
||||
from .utilities.toUTC import toUTC
|
||||
|
||||
|
||||
class Event(dict):
|
||||
|
|
|
@ -15,7 +15,7 @@ class GeoIP(object):
|
|||
try:
|
||||
result = self.db.city(ip)
|
||||
except Exception as e:
|
||||
return {'error': e.message}
|
||||
return {'error': str(e)}
|
||||
|
||||
geo_dict = {}
|
||||
geo_dict['city'] = result.city.name
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
import os
|
||||
import pynsive
|
||||
import importlib
|
||||
from operator import itemgetter
|
||||
from utilities.dict2List import dict2List
|
||||
from utilities.logger import logger
|
||||
|
||||
from .utilities.dict2List import dict2List
|
||||
from .utilities.logger import logger
|
||||
|
||||
|
||||
class PluginSet(object):
|
||||
|
@ -33,7 +35,7 @@ class PluginSet(object):
|
|||
|
||||
try:
|
||||
module_obj = pynsive.import_module(found_module)
|
||||
reload(module_obj)
|
||||
importlib.reload(module_obj)
|
||||
plugin_class_obj = module_obj.message()
|
||||
|
||||
if 'priority' in dir(plugin_class_obj):
|
||||
|
@ -50,7 +52,7 @@ class PluginSet(object):
|
|||
}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception('Received exception when loading {0} plugins\n{1}'.format(module_name, e.message))
|
||||
logger.exception('Received exception when loading {0} plugins\n{1}'.format(module_name, e))
|
||||
plugin_manager.destroy()
|
||||
return plugins
|
||||
|
||||
|
@ -84,7 +86,7 @@ class PluginSet(object):
|
|||
try:
|
||||
(message, metadata) = self.send_message_to_plugin(plugin_class=plugin['plugin_class'], message=message, metadata=metadata)
|
||||
except Exception as e:
|
||||
logger.exception('Received exception in {0}: message: {1}\n{2}'.format(plugin['plugin_class'], message, e.message))
|
||||
logger.exception('Received exception in {0}: message: {1}\n{2}'.format(plugin['plugin_class'], message, e))
|
||||
if message is None:
|
||||
return (message, metadata)
|
||||
return (message, metadata)
|
||||
|
|
|
@ -17,14 +17,13 @@ def AggregatedResults(input_results):
|
|||
for hit in input_results.hits:
|
||||
hit_dict = {
|
||||
'_id': hit.meta.id,
|
||||
'_type': hit.meta.doc_type,
|
||||
'_index': hit.meta.index,
|
||||
'_score': hit.meta.score,
|
||||
'_source': hit.to_dict()
|
||||
}
|
||||
converted_results['hits'].append(hit_dict)
|
||||
|
||||
for agg_name, aggregation in input_results.aggregations.to_dict().iteritems():
|
||||
for agg_name, aggregation in input_results.aggregations.to_dict().items():
|
||||
aggregation_dict = {
|
||||
'terms': []
|
||||
}
|
||||
|
|
|
@ -11,8 +11,8 @@ from mozdef_util.utilities.toUTC import toUTC
|
|||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
|
||||
from range_match import RangeMatch
|
||||
from boolean_match import BooleanMatch
|
||||
from .range_match import RangeMatch
|
||||
from .boolean_match import BooleanMatch
|
||||
|
||||
|
||||
class SearchQuery(object):
|
||||
|
@ -46,7 +46,7 @@ class SearchQuery(object):
|
|||
def add_aggregation(self, input_obj):
|
||||
self.append_to_array(self.aggregation, input_obj)
|
||||
|
||||
def execute(self, elasticsearch_client, indices=['events-*'], size=1000, request_timeout=30):
|
||||
def execute(self, elasticsearch_client, indices=['events', 'events-previous'], size=1000, request_timeout=30):
|
||||
if self.must == [] and self.must_not == [] and self.should == [] and self.aggregation == []:
|
||||
raise AttributeError('Must define a must, must_not, should query, or aggregation')
|
||||
|
||||
|
|
|
@ -16,7 +16,6 @@ def SimpleResults(input_results):
|
|||
for hit in input_results.hits:
|
||||
hit_dict = {
|
||||
'_id': hit.meta.id,
|
||||
'_type': hit.meta.doc_type,
|
||||
'_index': hit.meta.index,
|
||||
'_score': hit.meta.score,
|
||||
'_source': hit.to_dict()
|
||||
|
|
|
@ -3,28 +3,24 @@ def dict2List(inObj):
|
|||
return a list of the dict keys and values
|
||||
'''
|
||||
if isinstance(inObj, dict):
|
||||
for key, value in inObj.iteritems():
|
||||
for key, value in inObj.items():
|
||||
if isinstance(value, dict):
|
||||
for d in dict2List(value):
|
||||
yield d
|
||||
elif isinstance(value, list):
|
||||
yield key.encode('ascii', 'ignore').lower()
|
||||
yield key.lower()
|
||||
for l in dict2List(value):
|
||||
yield l
|
||||
else:
|
||||
yield key.encode('ascii', 'ignore').lower()
|
||||
yield key.lower()
|
||||
if isinstance(value, str):
|
||||
yield value.lower()
|
||||
elif isinstance(value, unicode):
|
||||
yield value.encode('ascii', 'ignore').lower()
|
||||
else:
|
||||
yield value
|
||||
elif isinstance(inObj, list):
|
||||
for v in inObj:
|
||||
if isinstance(v, str):
|
||||
yield v.lower()
|
||||
elif isinstance(v, unicode):
|
||||
yield v.encode('ascii', 'ignore').lower()
|
||||
elif isinstance(v, list):
|
||||
for l in dict2List(v):
|
||||
yield l
|
||||
|
|
|
@ -10,7 +10,7 @@ import sys
|
|||
from datetime import datetime
|
||||
from logging.handlers import SysLogHandler
|
||||
|
||||
from toUTC import toUTC
|
||||
from .toUTC import toUTC
|
||||
|
||||
|
||||
def loggerTimeStamp(self, record, datefmt=None):
|
||||
|
|
|
@ -30,7 +30,7 @@ def toUTC(suspectedDate):
|
|||
# epoch? but seconds/milliseconds/nanoseconds (lookin at you heka)
|
||||
epochDivisor = int(str(1) + '0' * (len(str(suspectedDate)) % 10))
|
||||
objDate = datetime.fromtimestamp(float(suspectedDate / epochDivisor), LOCAL_TIMEZONE)
|
||||
elif type(suspectedDate) in (str, unicode):
|
||||
elif type(suspectedDate) is str:
|
||||
# try to parse float or negative number from string:
|
||||
objDate = None
|
||||
try:
|
||||
|
|
|
@ -1,8 +1,4 @@
|
|||
def toUnicode(obj, encoding='utf-8'):
|
||||
if type(obj) in [int, long, float, complex]:
|
||||
# likely a number, convert it to string to get to unicode
|
||||
def toUnicode(obj):
|
||||
if not isinstance(obj, str):
|
||||
obj = str(obj)
|
||||
if isinstance(obj, basestring):
|
||||
if not isinstance(obj, unicode):
|
||||
obj = unicode(obj, encoding)
|
||||
return obj
|
||||
|
|
|
@ -7,8 +7,8 @@ tox==3.5.2
|
|||
coverage==4.5.1
|
||||
Sphinx==1.8.1
|
||||
twine==1.12.1
|
||||
elasticsearch==5.5.2
|
||||
elasticsearch-dsl==5.4.0
|
||||
elasticsearch==6.3.1
|
||||
elasticsearch-dsl==6.3.1
|
||||
geoip2==2.5.0
|
||||
pytz==2017.3
|
||||
tzlocal==1.4
|
||||
|
|
|
@ -3,12 +3,15 @@
|
|||
|
||||
"""The setup script."""
|
||||
|
||||
import os
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
with open('README.rst') as readme_file:
|
||||
readme_path = os.path.join(os.path.dirname(__file__), 'README.rst')
|
||||
with open(readme_path) as readme_file:
|
||||
readme = readme_file.read()
|
||||
|
||||
with open('HISTORY.rst') as history_file:
|
||||
history_path = os.path.join(os.path.dirname(__file__), 'HISTORY.rst')
|
||||
with open(history_path) as history_file:
|
||||
history = history_file.read()
|
||||
|
||||
requirements = [
|
||||
|
@ -21,8 +24,8 @@ requirements = [
|
|||
'coverage>=4.5.1',
|
||||
'Sphinx>=1.8.1',
|
||||
'twine>=1.12.1',
|
||||
'elasticsearch>=5.5.2',
|
||||
'elasticsearch-dsl>=5.4.0',
|
||||
'elasticsearch==6.3.1',
|
||||
'elasticsearch-dsl==6.3.1',
|
||||
'geoip2>=2.5.0',
|
||||
'pytz>=2017.3',
|
||||
'tzlocal>=1.4',
|
||||
|
@ -56,6 +59,6 @@ setup(
|
|||
test_suite='tests',
|
||||
tests_require=[],
|
||||
url='https://github.com/mozilla/MozDef/tree/master/lib',
|
||||
version='1.0.8',
|
||||
version='3.0.1',
|
||||
zip_safe=False,
|
||||
)
|
||||
|
|
|
@ -12,11 +12,9 @@ import sys
|
|||
import socket
|
||||
from configlib import getConfig, OptionParser
|
||||
from datetime import datetime
|
||||
import boto.sts
|
||||
import boto.s3
|
||||
from boto.sqs.message import RawMessage
|
||||
import boto3
|
||||
import gzip
|
||||
from StringIO import StringIO
|
||||
from io import BytesIO
|
||||
import re
|
||||
import time
|
||||
import kombu
|
||||
|
@ -29,9 +27,10 @@ from mozdef_util.utilities.logger import logger, initLogger
|
|||
from mozdef_util.utilities.to_unicode import toUnicode
|
||||
from mozdef_util.utilities.remove_at import removeAt
|
||||
|
||||
from lib.aws import get_aws_credentials
|
||||
from lib.plugins import sendEventToPlugins, registerPlugins
|
||||
from lib.sqs import connect_sqs
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../"))
|
||||
from mq.lib.aws import get_aws_credentials
|
||||
from mq.lib.plugins import sendEventToPlugins, registerPlugins
|
||||
from mq.lib.sqs import connect_sqs
|
||||
|
||||
|
||||
CLOUDTRAIL_VERB_REGEX = re.compile(r'^([A-Z][^A-Z]*)')
|
||||
|
@ -44,90 +43,6 @@ except ImportError as e:
|
|||
hasUWSGI = False
|
||||
|
||||
|
||||
class RoleManager:
|
||||
def __init__(self, region_name='us-east-1', aws_access_key_id=None, aws_secret_access_key=None):
|
||||
self.aws_access_key_id = aws_access_key_id
|
||||
self.aws_secret_access_key = aws_secret_access_key
|
||||
self.credentials = {}
|
||||
self.session_credentials = None
|
||||
self.session_conn_sts = None
|
||||
try:
|
||||
self.local_conn_sts = boto.sts.connect_to_region(
|
||||
**get_aws_credentials(
|
||||
region_name,
|
||||
self.aws_access_key_id,
|
||||
self.aws_secret_access_key))
|
||||
except Exception, e:
|
||||
logger.error("Unable to connect to STS due to exception %s" % e.message)
|
||||
raise
|
||||
|
||||
if self.aws_access_key_id is not None or self.aws_secret_access_key is not None:
|
||||
# We're using API credentials not an IAM Role
|
||||
try:
|
||||
if self.session_credentials is None or self.session_credentials.is_expired():
|
||||
self.session_credentials = self.local_conn_sts.get_session_token()
|
||||
except Exception, e:
|
||||
logger.error("Unable to get session token due to exception %s" % e.message)
|
||||
raise
|
||||
try:
|
||||
creds = get_aws_credentials(
|
||||
region_name,
|
||||
self.session_credentials.access_key,
|
||||
self.session_credentials.secret_key,
|
||||
self.session_credentials.session_token) if self.session_credentials else {}
|
||||
self.session_conn_sts = boto.sts.connect_to_region(**creds)
|
||||
except Exception, e:
|
||||
logger.error("Unable to connect to STS with session token due to exception %s" % e.message)
|
||||
raise
|
||||
self.conn_sts = self.session_conn_sts
|
||||
else:
|
||||
self.conn_sts = self.local_conn_sts
|
||||
|
||||
def assume_role(self,
|
||||
role_arn,
|
||||
role_session_name='unknown',
|
||||
policy=None):
|
||||
'''Return a boto.sts.credential.Credential object given a role_arn.
|
||||
First check if a Credential oject exists in the local self.credentials
|
||||
cache that is not expired. If there isn't one, assume the role of role_arn
|
||||
store the Credential in the credentials cache and return it'''
|
||||
logger.debug("Connecting to sts")
|
||||
if role_arn in self.credentials:
|
||||
if not self.credentials[role_arn] or not self.credentials[role_arn].is_expired():
|
||||
# Return the cached value if it's False (indicating a permissions issue) or if
|
||||
# it hasn't expired.
|
||||
return self.credentials[role_arn]
|
||||
try:
|
||||
self.credentials[role_arn] = self.conn_sts.assume_role(
|
||||
role_arn=role_arn,
|
||||
role_session_name=role_session_name,
|
||||
policy=policy).credentials
|
||||
logger.debug("Assumed new role with credential %s" % self.credentials[role_arn].to_dict())
|
||||
except Exception, e:
|
||||
print e
|
||||
logger.error("Unable to assume role %s due to exception %s" % (role_arn, e.message))
|
||||
self.credentials[role_arn] = False
|
||||
return self.credentials[role_arn]
|
||||
|
||||
def get_credentials(self,
|
||||
role_arn,
|
||||
role_session_name='unknown',
|
||||
policy=None):
|
||||
'''Assume the role of role_arn, and return a credential dictionary for that role'''
|
||||
credential = self.assume_role(role_arn,
|
||||
role_session_name,
|
||||
policy)
|
||||
return self.get_credential_arguments(credential)
|
||||
|
||||
def get_credential_arguments(self, credential):
|
||||
'''Given a boto.sts.credential.Credential object, return a dictionary of get_credential_arguments
|
||||
usable as kwargs with a boto connect method'''
|
||||
return {
|
||||
'aws_access_key_id': credential.access_key,
|
||||
'aws_secret_access_key': credential.secret_key,
|
||||
'security_token': credential.session_token} if credential else {}
|
||||
|
||||
|
||||
def keyMapping(aDict):
|
||||
'''map common key/fields to a normalized structure,
|
||||
explicitly typed when possible to avoid schema changes for upsteam consumers
|
||||
|
@ -161,76 +76,76 @@ def keyMapping(aDict):
|
|||
returndict['receivedtimestamp'] = toUTC(datetime.now()).isoformat()
|
||||
returndict['mozdefhostname'] = options.mozdefhostname
|
||||
try:
|
||||
for k, v in aDict.iteritems():
|
||||
for k, v in aDict.items():
|
||||
k = removeAt(k).lower()
|
||||
|
||||
if k == 'sourceip':
|
||||
returndict[u'details']['sourceipaddress'] = v
|
||||
returndict['details']['sourceipaddress'] = v
|
||||
|
||||
elif k == 'sourceipaddress':
|
||||
returndict[u'details']['sourceipaddress'] = v
|
||||
returndict['details']['sourceipaddress'] = v
|
||||
|
||||
elif k in ('facility', 'source'):
|
||||
returndict[u'source'] = v
|
||||
returndict['source'] = v
|
||||
|
||||
elif k in ('eventsource'):
|
||||
returndict[u'hostname'] = v
|
||||
returndict['hostname'] = v
|
||||
|
||||
elif k in ('message', 'summary'):
|
||||
returndict[u'summary'] = toUnicode(v)
|
||||
returndict['summary'] = toUnicode(v)
|
||||
|
||||
elif k in ('payload') and 'summary' not in aDict:
|
||||
# special case for heka if it sends payload as well as a summary, keep both but move payload to the details section.
|
||||
returndict[u'summary'] = toUnicode(v)
|
||||
returndict['summary'] = toUnicode(v)
|
||||
elif k in ('payload'):
|
||||
returndict[u'details']['payload'] = toUnicode(v)
|
||||
returndict['details']['payload'] = toUnicode(v)
|
||||
|
||||
elif k in ('eventtime', 'timestamp', 'utctimestamp', 'date'):
|
||||
returndict[u'utctimestamp'] = toUTC(v).isoformat()
|
||||
returndict[u'timestamp'] = toUTC(v).isoformat()
|
||||
returndict['utctimestamp'] = toUTC(v).isoformat()
|
||||
returndict['timestamp'] = toUTC(v).isoformat()
|
||||
|
||||
elif k in ('hostname', 'source_host', 'host'):
|
||||
returndict[u'hostname'] = toUnicode(v)
|
||||
returndict['hostname'] = toUnicode(v)
|
||||
|
||||
elif k in ('tags'):
|
||||
if 'tags' not in returndict:
|
||||
returndict[u'tags'] = []
|
||||
returndict['tags'] = []
|
||||
if type(v) == list:
|
||||
returndict[u'tags'] += v
|
||||
returndict['tags'] += v
|
||||
else:
|
||||
if len(v) > 0:
|
||||
returndict[u'tags'].append(v)
|
||||
returndict['tags'].append(v)
|
||||
|
||||
# nxlog keeps the severity name in syslogseverity,everyone else should use severity or level.
|
||||
elif k in ('syslogseverity', 'severity', 'severityvalue', 'level', 'priority'):
|
||||
returndict[u'severity'] = toUnicode(v).upper()
|
||||
returndict['severity'] = toUnicode(v).upper()
|
||||
|
||||
elif k in ('facility', 'syslogfacility'):
|
||||
returndict[u'facility'] = toUnicode(v)
|
||||
returndict['facility'] = toUnicode(v)
|
||||
|
||||
elif k in ('pid', 'processid'):
|
||||
returndict[u'processid'] = toUnicode(v)
|
||||
returndict['processid'] = toUnicode(v)
|
||||
|
||||
# nxlog sets sourcename to the processname (i.e. sshd), everyone else should call it process name or pname
|
||||
elif k in ('pname', 'processname', 'sourcename', 'program'):
|
||||
returndict[u'processname'] = toUnicode(v)
|
||||
returndict['processname'] = toUnicode(v)
|
||||
|
||||
# the file, or source
|
||||
elif k in ('path', 'logger', 'file'):
|
||||
returndict[u'eventsource'] = toUnicode(v)
|
||||
returndict['eventsource'] = toUnicode(v)
|
||||
|
||||
elif k in ('type', 'eventtype', 'category'):
|
||||
returndict[u'category'] = toUnicode(v)
|
||||
returndict[u'type'] = 'cloudtrail'
|
||||
returndict['category'] = toUnicode(v)
|
||||
returndict['type'] = 'cloudtrail'
|
||||
|
||||
# custom fields as a list/array
|
||||
elif k in ('fields', 'details'):
|
||||
if type(v) is not dict:
|
||||
returndict[u'details'][u'message'] = v
|
||||
returndict['details']['message'] = v
|
||||
else:
|
||||
if len(v) > 0:
|
||||
for details_key, details_value in v.iteritems():
|
||||
returndict[u'details'][details_key] = details_value
|
||||
for details_key, details_value in v.items():
|
||||
returndict['details'][details_key] = details_value
|
||||
|
||||
# custom fields/details as a one off, not in an array
|
||||
# i.e. fields.something=value or details.something=value
|
||||
|
@ -240,20 +155,20 @@ def keyMapping(aDict):
|
|||
newName = newName.lower().replace('details.', '')
|
||||
# add a dict to hold the details if it doesn't exist
|
||||
if 'details' not in returndict:
|
||||
returndict[u'details'] = dict()
|
||||
returndict['details'] = dict()
|
||||
# add field with a special case for shippers that
|
||||
# don't send details
|
||||
# in an array as int/floats/strings
|
||||
# we let them dictate the data type with field_datatype
|
||||
# convention
|
||||
if newName.endswith('_int'):
|
||||
returndict[u'details'][unicode(newName)] = int(v)
|
||||
returndict['details'][str(newName)] = int(v)
|
||||
elif newName.endswith('_float'):
|
||||
returndict[u'details'][unicode(newName)] = float(v)
|
||||
returndict['details'][str(newName)] = float(v)
|
||||
else:
|
||||
returndict[u'details'][unicode(newName)] = toUnicode(v)
|
||||
returndict['details'][str(newName)] = toUnicode(v)
|
||||
else:
|
||||
returndict[u'details'][k] = v
|
||||
returndict['details'][k] = v
|
||||
|
||||
if 'utctimestamp' not in returndict:
|
||||
# default in case we don't find a reasonable timestamp
|
||||
|
@ -277,14 +192,10 @@ def esConnect():
|
|||
|
||||
class taskConsumer(object):
|
||||
|
||||
def __init__(self, mqConnection, taskQueue, esConnection):
|
||||
self.connection = mqConnection
|
||||
def __init__(self, queue, esConnection):
|
||||
self.sqs_queue = queue
|
||||
self.esConnection = esConnection
|
||||
self.taskQueue = taskQueue
|
||||
self.s3_connection = None
|
||||
# This value controls how long we sleep
|
||||
# between reauthenticating and getting a new set of creds
|
||||
self.flush_wait_time = 1800
|
||||
self.s3_client = None
|
||||
self.authenticate()
|
||||
|
||||
# Run thread to flush s3 credentials
|
||||
|
@ -293,16 +204,35 @@ class taskConsumer(object):
|
|||
reauthenticate_thread.start()
|
||||
|
||||
def authenticate(self):
|
||||
# This value controls how long we sleep
|
||||
# between reauthenticating and getting a new set of creds
|
||||
# eventually this gets set by aws response
|
||||
self.flush_wait_time = 1800
|
||||
if options.cloudtrail_arn not in ['<cloudtrail_arn>', 'cloudtrail_arn']:
|
||||
role_manager = RoleManager(**get_aws_credentials(
|
||||
options.region,
|
||||
options.accesskey,
|
||||
options.secretkey))
|
||||
role_manager.assume_role(options.cloudtrail_arn)
|
||||
role_creds = role_manager.get_credentials(options.cloudtrail_arn)
|
||||
client = boto3.client(
|
||||
'sts',
|
||||
aws_access_key_id=options.accesskey,
|
||||
aws_secret_access_key=options.secretkey
|
||||
)
|
||||
response = client.assume_role(
|
||||
RoleArn=options.cloudtrail_arn,
|
||||
RoleSessionName='MozDef-CloudTrail-Reader',
|
||||
)
|
||||
role_creds = {
|
||||
'aws_access_key_id': response['Credentials']['AccessKeyId'],
|
||||
'aws_secret_access_key': response['Credentials']['SecretAccessKey'],
|
||||
'aws_session_token': response['Credentials']['SessionToken']
|
||||
}
|
||||
current_time = toUTC(datetime.now())
|
||||
# Let's remove 3 seconds from the flush wait time just in case
|
||||
self.flush_wait_time = (response['Credentials']['Expiration'] - current_time).seconds - 3
|
||||
else:
|
||||
role_creds = {}
|
||||
self.s3_connection = boto.connect_s3(**role_creds)
|
||||
self.s3_client = boto3.client(
|
||||
's3',
|
||||
region_name=options.region,
|
||||
**role_creds
|
||||
)
|
||||
|
||||
def reauth_timer(self):
|
||||
while True:
|
||||
|
@ -310,21 +240,19 @@ class taskConsumer(object):
|
|||
logger.debug('Recycling credentials and reassuming role')
|
||||
self.authenticate()
|
||||
|
||||
def process_file(self, s3file):
|
||||
logger.debug("Fetching %s" % s3file.name)
|
||||
compressedData = s3file.read()
|
||||
databuf = StringIO(compressedData)
|
||||
def parse_s3_file(self, s3_obj):
|
||||
compressed_data = s3_obj['Body'].read()
|
||||
databuf = BytesIO(compressed_data)
|
||||
gzip_file = gzip.GzipFile(fileobj=databuf)
|
||||
json_logs = json.loads(gzip_file.read())
|
||||
return json_logs['Records']
|
||||
|
||||
def run(self):
|
||||
self.taskQueue.set_message_class(RawMessage)
|
||||
while True:
|
||||
try:
|
||||
records = self.taskQueue.get_messages(options.prefetch)
|
||||
records = self.sqs_queue.receive_messages(MaxNumberOfMessages=options.prefetch)
|
||||
for msg in records:
|
||||
body_message = msg.get_body()
|
||||
body_message = msg.body
|
||||
event = json.loads(body_message)
|
||||
|
||||
if not event['Message']:
|
||||
|
@ -346,24 +274,23 @@ class taskConsumer(object):
|
|||
s3_log_files = message_json['s3ObjectKey']
|
||||
for log_file in s3_log_files:
|
||||
logger.debug('Downloading and parsing ' + log_file)
|
||||
bucket = self.s3_connection.get_bucket(message_json['s3Bucket'])
|
||||
|
||||
log_file_lookup = bucket.lookup(log_file)
|
||||
events = self.process_file(log_file_lookup)
|
||||
s3_obj = self.s3_client.get_object(Bucket=message_json['s3Bucket'], Key=log_file)
|
||||
events = self.parse_s3_file(s3_obj)
|
||||
for event in events:
|
||||
self.on_message(event)
|
||||
|
||||
self.taskQueue.delete_message(msg)
|
||||
msg.delete()
|
||||
except (SSLEOFError, SSLError, socket.error):
|
||||
logger.info('Received network related error...reconnecting')
|
||||
time.sleep(5)
|
||||
self.connection, self.taskQueue = connect_sqs(
|
||||
self.sqs_queue = connect_sqs(
|
||||
task_exchange=options.taskexchange,
|
||||
**get_aws_credentials(
|
||||
options.region,
|
||||
options.accesskey,
|
||||
options.secretkey))
|
||||
self.taskQueue.set_message_class(RawMessage)
|
||||
options.secretkey)
|
||||
)
|
||||
time.sleep(options.sleep_time)
|
||||
|
||||
def on_message(self, body):
|
||||
# print("RECEIVED MESSAGE: %r" % (body, ))
|
||||
|
@ -376,7 +303,7 @@ class taskConsumer(object):
|
|||
# just to be safe..check what we were sent.
|
||||
if isinstance(body, dict):
|
||||
bodyDict = body
|
||||
elif isinstance(body, str) or isinstance(body, unicode):
|
||||
elif isinstance(body, str):
|
||||
try:
|
||||
bodyDict = json.loads(body) # lets assume it's json
|
||||
except ValueError as e:
|
||||
|
@ -455,14 +382,15 @@ def main():
|
|||
logger.error('Can only process SQS queues, terminating')
|
||||
sys.exit(1)
|
||||
|
||||
sqs_conn, eventTaskQueue = connect_sqs(
|
||||
sqs_queue = connect_sqs(
|
||||
task_exchange=options.taskexchange,
|
||||
**get_aws_credentials(
|
||||
options.region,
|
||||
options.accesskey,
|
||||
options.secretkey))
|
||||
options.secretkey)
|
||||
)
|
||||
# consume our queue
|
||||
taskConsumer(sqs_conn, eventTaskQueue, es).run()
|
||||
taskConsumer(sqs_queue, es).run()
|
||||
|
||||
|
||||
def initConfig():
|
||||
|
@ -503,6 +431,9 @@ def initConfig():
|
|||
# This is the full ARN that the s3 bucket lives under
|
||||
options.cloudtrail_arn = getConfig('cloudtrail_arn', 'cloudtrail_arn', options.configfile)
|
||||
|
||||
# How long to sleep between iterations of querying AWS
|
||||
options.sleep_time = getConfig('sleep_time', 0.1, options.configfile)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# configure ourselves
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
import json
|
||||
import kombu
|
||||
import sys
|
||||
import os
|
||||
import socket
|
||||
from configlib import getConfig, OptionParser
|
||||
from datetime import datetime
|
||||
|
@ -22,7 +23,8 @@ from mozdef_util.utilities.logger import logger, initLogger
|
|||
from mozdef_util.utilities.to_unicode import toUnicode
|
||||
from mozdef_util.utilities.remove_at import removeAt
|
||||
|
||||
from lib.plugins import sendEventToPlugins, registerPlugins
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../"))
|
||||
from mq.lib.plugins import sendEventToPlugins, registerPlugins
|
||||
|
||||
|
||||
# running under uwsgi?
|
||||
|
@ -49,71 +51,71 @@ def keyMapping(aDict):
|
|||
# set the timestamp when we received it, i.e. now
|
||||
returndict['receivedtimestamp'] = toUTC(datetime.now()).isoformat()
|
||||
returndict['mozdefhostname'] = options.mozdefhostname
|
||||
returndict[u'details'] = {}
|
||||
returndict['details'] = {}
|
||||
try:
|
||||
for k, v in aDict.iteritems():
|
||||
for k, v in aDict.items():
|
||||
k = removeAt(k).lower()
|
||||
|
||||
if k == 'sourceip':
|
||||
returndict[u'details']['eventsourceipaddress'] = v
|
||||
returndict['details']['eventsourceipaddress'] = v
|
||||
|
||||
if k in ('facility', 'source'):
|
||||
returndict[u'source'] = v
|
||||
returndict['source'] = v
|
||||
|
||||
if k in ('message', 'summary'):
|
||||
returndict[u'summary'] = toUnicode(v)
|
||||
returndict['summary'] = toUnicode(v)
|
||||
|
||||
if k in ('payload') and 'summary' not in aDict:
|
||||
# special case for heka if it sends payload as well as a summary, keep both but move payload to the details section.
|
||||
returndict[u'summary'] = toUnicode(v)
|
||||
returndict['summary'] = toUnicode(v)
|
||||
elif k in ('payload'):
|
||||
returndict[u'details']['payload'] = toUnicode(v)
|
||||
returndict['details']['payload'] = toUnicode(v)
|
||||
|
||||
if k in ('eventtime', 'timestamp', 'utctimestamp', 'date'):
|
||||
returndict[u'utctimestamp'] = toUTC(v).isoformat()
|
||||
returndict[u'timestamp'] = toUTC(v).isoformat()
|
||||
returndict['utctimestamp'] = toUTC(v).isoformat()
|
||||
returndict['timestamp'] = toUTC(v).isoformat()
|
||||
|
||||
if k in ('hostname', 'source_host', 'host'):
|
||||
returndict[u'hostname'] = toUnicode(v)
|
||||
returndict['hostname'] = toUnicode(v)
|
||||
|
||||
if k in ('tags'):
|
||||
if 'tags' not in returndict:
|
||||
returndict[u'tags'] = []
|
||||
returndict['tags'] = []
|
||||
if type(v) == list:
|
||||
returndict[u'tags'] += v
|
||||
returndict['tags'] += v
|
||||
else:
|
||||
if len(v) > 0:
|
||||
returndict[u'tags'].append(v)
|
||||
returndict['tags'].append(v)
|
||||
|
||||
# nxlog keeps the severity name in syslogseverity,everyone else should use severity or level.
|
||||
if k in ('syslogseverity', 'severity', 'severityvalue', 'level', 'priority'):
|
||||
returndict[u'severity'] = toUnicode(v).upper()
|
||||
returndict['severity'] = toUnicode(v).upper()
|
||||
|
||||
if k in ('facility', 'syslogfacility'):
|
||||
returndict[u'facility'] = toUnicode(v)
|
||||
returndict['facility'] = toUnicode(v)
|
||||
|
||||
if k in ('pid', 'processid'):
|
||||
returndict[u'processid'] = toUnicode(v)
|
||||
returndict['processid'] = toUnicode(v)
|
||||
|
||||
# nxlog sets sourcename to the processname (i.e. sshd), everyone else should call it process name or pname
|
||||
if k in ('pname', 'processname', 'sourcename', 'program'):
|
||||
returndict[u'processname'] = toUnicode(v)
|
||||
returndict['processname'] = toUnicode(v)
|
||||
|
||||
# the file, or source
|
||||
if k in ('path', 'logger', 'file'):
|
||||
returndict[u'eventsource'] = toUnicode(v)
|
||||
returndict['eventsource'] = toUnicode(v)
|
||||
|
||||
if k in ('type', 'eventtype', 'category'):
|
||||
returndict[u'category'] = toUnicode(v)
|
||||
returndict['category'] = toUnicode(v)
|
||||
|
||||
# custom fields as a list/array
|
||||
if k in ('fields', 'details'):
|
||||
if type(v) is not dict:
|
||||
returndict[u'details'][u'message'] = v
|
||||
returndict['details']['message'] = v
|
||||
else:
|
||||
if len(v) > 0:
|
||||
for details_key, details_value in v.iteritems():
|
||||
returndict[u'details'][details_key] = details_value
|
||||
for details_key, details_value in v.items():
|
||||
returndict['details'][details_key] = details_value
|
||||
|
||||
# custom fields/details as a one off, not in an array
|
||||
# i.e. fields.something=value or details.something=value
|
||||
|
@ -127,17 +129,17 @@ def keyMapping(aDict):
|
|||
# we let them dictate the data type with field_datatype
|
||||
# convention
|
||||
if newName.endswith('_int'):
|
||||
returndict[u'details'][unicode(newName)] = int(v)
|
||||
returndict['details'][str(newName)] = int(v)
|
||||
elif newName.endswith('_float'):
|
||||
returndict[u'details'][unicode(newName)] = float(v)
|
||||
returndict['details'][str(newName)] = float(v)
|
||||
else:
|
||||
returndict[u'details'][unicode(newName)] = toUnicode(v)
|
||||
returndict['details'][str(newName)] = toUnicode(v)
|
||||
|
||||
# nxlog windows log handling
|
||||
if 'Domain' in aDict and 'SourceModuleType' in aDict:
|
||||
# nxlog parses all windows event fields very well
|
||||
# copy all fields to details
|
||||
returndict[u'details'][k] = v
|
||||
returndict['details'][k] = v
|
||||
|
||||
if 'utctimestamp' not in returndict:
|
||||
# default in case we don't find a reasonable timestamp
|
||||
|
@ -190,7 +192,7 @@ class taskConsumer(ConsumerMixin):
|
|||
# just to be safe..check what we were sent.
|
||||
if isinstance(body, dict):
|
||||
bodyDict = body
|
||||
elif isinstance(body, str) or isinstance(body, unicode):
|
||||
elif isinstance(body, str):
|
||||
try:
|
||||
bodyDict = json.loads(body) # lets assume it's json
|
||||
except ValueError as e:
|
||||
|
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче