diff --git a/alerts/celeryconfig.py b/alerts/celeryconfig.py index cdf396c3..60db6659 100644 --- a/alerts/celeryconfig.py +++ b/alerts/celeryconfig.py @@ -72,7 +72,7 @@ for alert_namespace in CELERYBEAT_SCHEDULE: alert_class = getattr(alert_module, alert_classname) app.register_task(alert_class()) except ImportError as e: - print("Error importing {}").format(alert_namespace) + print("Error importing {}".format(alert_namespace)) print(e) pass except Exception as e: diff --git a/benchmarking/workers/json2Mozdef.py b/benchmarking/workers/json2Mozdef.py index c531b629..eaf5ed30 100755 --- a/benchmarking/workers/json2Mozdef.py +++ b/benchmarking/workers/json2Mozdef.py @@ -15,7 +15,7 @@ from optparse import OptionParser from requests_futures.sessions import FuturesSession from multiprocessing import Process, Queue import logging -from Queue import Empty +from queue import Empty from requests.packages.urllib3.exceptions import ClosedPoolError import time diff --git a/cron/update_geolite_db.py b/cron/update_geolite_db.py index a2274b37..a04d3474 100755 --- a/cron/update_geolite_db.py +++ b/cron/update_geolite_db.py @@ -9,7 +9,9 @@ import sys import os from configlib import getConfig, OptionParser -import urllib2 +import urllib.request +import urllib.error +import urllib.parse import tempfile import tarfile @@ -19,7 +21,7 @@ from mozdef_util.utilities.logger import logger, initLogger def fetch_db_data(db_download_location): logger.debug('Fetching db data from ' + db_download_location) - response = urllib2.urlopen(db_download_location) + response = urllib.request.urlopen(db_download_location) db_raw_data = response.read() with tempfile.NamedTemporaryFile(mode='wb') as temp: logger.debug('Writing compressed gzip to temp file: ' + temp.name) diff --git a/examples/demo/sampleData2MozDef.py b/examples/demo/sampleData2MozDef.py index 761eb2b4..d1150b1b 100755 --- a/examples/demo/sampleData2MozDef.py +++ b/examples/demo/sampleData2MozDef.py @@ -13,7 +13,7 @@ import json from multiprocessing import Process, Queue import random import logging -from Queue import Empty +from queue import Empty import requests import time from configlib import getConfig, OptionParser @@ -66,7 +66,7 @@ def postLogs(logcache): a=httpsession.get_adapter(url) a.max_retries=3 r=httpsession.post(url,data=postdata) - print(r, postdata) + print(r) # append to posts if this is long running and you want # events to try again later. # posts.append((r,postdata,url)) diff --git a/mq/esworker_cloudtrail.py b/mq/esworker_cloudtrail.py index 1625d0f4..2c458f1a 100755 --- a/mq/esworker_cloudtrail.py +++ b/mq/esworker_cloudtrail.py @@ -16,7 +16,7 @@ import boto.sts import boto.s3 from boto.sqs.message import RawMessage import gzip -from StringIO import StringIO +from io import StringIO import re import time import kombu diff --git a/mq/esworker_eventtask.py b/mq/esworker_eventtask.py index a142a07f..f5c99233 100755 --- a/mq/esworker_eventtask.py +++ b/mq/esworker_eventtask.py @@ -112,7 +112,7 @@ def keyMapping(aDict): returndict['details']['message'] = v else: if len(v) > 0: - for details_key, details_value in v.iteritems(): + for details_key, details_value in v.items(): returndict['details'][details_key] = details_value # custom fields/details as a one off, not in an array