Fixup remaining python3 leftovers

This commit is contained in:
Brandon Myers 2019-06-29 15:45:51 -05:00
Родитель 9a075dcbe0
Коммит ed1d4aa8cf
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 8AA79AD83045BBC7
6 изменённых файлов: 10 добавлений и 8 удалений

Просмотреть файл

@ -72,7 +72,7 @@ for alert_namespace in CELERYBEAT_SCHEDULE:
alert_class = getattr(alert_module, alert_classname) alert_class = getattr(alert_module, alert_classname)
app.register_task(alert_class()) app.register_task(alert_class())
except ImportError as e: except ImportError as e:
print("Error importing {}").format(alert_namespace) print("Error importing {}".format(alert_namespace))
print(e) print(e)
pass pass
except Exception as e: except Exception as e:

Просмотреть файл

@ -15,7 +15,7 @@ from optparse import OptionParser
from requests_futures.sessions import FuturesSession from requests_futures.sessions import FuturesSession
from multiprocessing import Process, Queue from multiprocessing import Process, Queue
import logging import logging
from Queue import Empty from queue import Empty
from requests.packages.urllib3.exceptions import ClosedPoolError from requests.packages.urllib3.exceptions import ClosedPoolError
import time import time

Просмотреть файл

@ -9,7 +9,9 @@ import sys
import os import os
from configlib import getConfig, OptionParser from configlib import getConfig, OptionParser
import urllib2 import urllib.request
import urllib.error
import urllib.parse
import tempfile import tempfile
import tarfile import tarfile
@ -19,7 +21,7 @@ from mozdef_util.utilities.logger import logger, initLogger
def fetch_db_data(db_download_location): def fetch_db_data(db_download_location):
logger.debug('Fetching db data from ' + db_download_location) logger.debug('Fetching db data from ' + db_download_location)
response = urllib2.urlopen(db_download_location) response = urllib.request.urlopen(db_download_location)
db_raw_data = response.read() db_raw_data = response.read()
with tempfile.NamedTemporaryFile(mode='wb') as temp: with tempfile.NamedTemporaryFile(mode='wb') as temp:
logger.debug('Writing compressed gzip to temp file: ' + temp.name) logger.debug('Writing compressed gzip to temp file: ' + temp.name)

Просмотреть файл

@ -13,7 +13,7 @@ import json
from multiprocessing import Process, Queue from multiprocessing import Process, Queue
import random import random
import logging import logging
from Queue import Empty from queue import Empty
import requests import requests
import time import time
from configlib import getConfig, OptionParser from configlib import getConfig, OptionParser
@ -66,7 +66,7 @@ def postLogs(logcache):
a=httpsession.get_adapter(url) a=httpsession.get_adapter(url)
a.max_retries=3 a.max_retries=3
r=httpsession.post(url,data=postdata) r=httpsession.post(url,data=postdata)
print(r, postdata) print(r)
# append to posts if this is long running and you want # append to posts if this is long running and you want
# events to try again later. # events to try again later.
# posts.append((r,postdata,url)) # posts.append((r,postdata,url))

Просмотреть файл

@ -16,7 +16,7 @@ import boto.sts
import boto.s3 import boto.s3
from boto.sqs.message import RawMessage from boto.sqs.message import RawMessage
import gzip import gzip
from StringIO import StringIO from io import StringIO
import re import re
import time import time
import kombu import kombu

Просмотреть файл

@ -112,7 +112,7 @@ def keyMapping(aDict):
returndict['details']['message'] = v returndict['details']['message'] = v
else: else:
if len(v) > 0: if len(v) > 0:
for details_key, details_value in v.iteritems(): for details_key, details_value in v.items():
returndict['details'][details_key] = details_value returndict['details'][details_key] = details_value
# custom fields/details as a one off, not in an array # custom fields/details as a one off, not in an array