Bug 1059814 - Non-whitespace pep8 fixes using autopep8 aggressive mode

Generated using:
autopep8 --in-place --recursive --aggressive --aggressive
--max-line-length 999 --exclude='.git,__pycache__,.vagrant,build,vendor,
0001_initial.py,models.py,test_note_api.py,test_bug_job_map_api.py' .

autopep8's aggressive mode, unlike standard mode, makes non-whitespace
changes. It also uses lib2to3 to correct deprecated code (W690), some of
which aren't pep8 failures. Some of these changes are more dubious, but
rather than disable W690 completely, I've just excluded the files where
the unwanted changes would have been made, so we can benefit from the
rest.
This commit is contained in:
Ed Morley 2015-02-26 15:16:02 +00:00
Родитель d90a2e1875
Коммит 30ad99c7c0
18 изменённых файлов: 84 добавлений и 74 удалений

Просмотреть файл

@ -60,6 +60,7 @@ def pytest_sessionstart(session):
settings.PULSE_URI = settings.BROKER_URL settings.PULSE_URI = settings.BROKER_URL
settings.PULSE_EXCHANGE_NAMESPACE = 'test' settings.PULSE_EXCHANGE_NAMESPACE = 'test'
def pytest_sessionfinish(session): def pytest_sessionfinish(session):
"""Tear down the test environment, including databases.""" """Tear down the test environment, including databases."""
session.django_runner.teardown_test_environment() session.django_runner.teardown_test_environment()
@ -155,7 +156,7 @@ def add_test_procs_file(dhub, key, filename):
) )
del dhub.procs[key] del dhub.procs[key]
proclist = dhub.data_sources[key]["procs"] proclist = dhub.data_sources[key]["procs"]
if not test_proc_file in proclist: if test_proc_file not in proclist:
proclist.append(test_proc_file) proclist.append(test_proc_file)
dhub.data_sources[key]["procs"] = proclist dhub.data_sources[key]["procs"] = proclist
dhub.load_procs(key) dhub.load_procs(key)
@ -296,6 +297,7 @@ def mock_message_broker(monkeypatch):
from django.conf import settings from django.conf import settings
monkeypatch.setattr(settings, 'BROKER_URL', 'memory://') monkeypatch.setattr(settings, 'BROKER_URL', 'memory://')
@pytest.fixture @pytest.fixture
def resultset_with_three_jobs(jm, sample_data, sample_resultset): def resultset_with_three_jobs(jm, sample_data, sample_resultset):
""" """
@ -328,7 +330,6 @@ def resultset_with_three_jobs(jm, sample_data, sample_resultset):
return resultset_creation['inserted_result_set_ids'][0] return resultset_creation['inserted_result_set_ids'][0]
@pytest.fixture @pytest.fixture
def eleven_jobs_stored(jm, sample_data, sample_resultset): def eleven_jobs_stored(jm, sample_data, sample_resultset):
"""stores a list of 11 job samples""" """stores a list of 11 job samples"""
@ -419,6 +420,7 @@ def activate_responses(request):
request.addfinalizer(fin) request.addfinalizer(fin)
def pulse_consumer(exchange, request): def pulse_consumer(exchange, request):
from django.conf import settings from django.conf import settings
@ -430,19 +432,19 @@ def pulse_consumer(exchange, request):
connection = kombu.Connection(settings.PULSE_URI) connection = kombu.Connection(settings.PULSE_URI)
exchange = kombu.Exchange( exchange = kombu.Exchange(
name = exchange_name, name=exchange_name,
type='topic' type='topic'
) )
queue = kombu.Queue( queue = kombu.Queue(
no_ack=True, no_ack=True,
exchange=exchange, # Exchange name exchange=exchange, # Exchange name
routing_key='#', # Bind to all messages routing_key='#', # Bind to all messages
auto_delete=True, # Delete after each test auto_delete=True, # Delete after each test
exclusive=False) # Disallow multiple consumers exclusive=False) # Disallow multiple consumers
simpleQueue = connection.SimpleQueue( simpleQueue = connection.SimpleQueue(
name = queue, name=queue,
channel=connection, channel=connection,
no_ack=True) no_ack=True)
@ -452,10 +454,12 @@ def pulse_consumer(exchange, request):
request.addfinalizer(fin) request.addfinalizer(fin)
return simpleQueue return simpleQueue
@pytest.fixture @pytest.fixture
def pulse_resultset_consumer(request): def pulse_resultset_consumer(request):
return pulse_consumer('new-result-set', request) return pulse_consumer('new-result-set', request)
@pytest.fixture @pytest.fixture
def pulse_action_consumer(request): def pulse_action_consumer(request):
return pulse_consumer('job-actions', request) return pulse_consumer('job-actions', request)

Просмотреть файл

@ -8,6 +8,7 @@ from django.contrib.auth.models import User
import json import json
def test_job_list(webapp, eleven_jobs_processed, jm): def test_job_list(webapp, eleven_jobs_processed, jm):
""" """
test retrieving a list of ten json blobs from the jobs-list test retrieving a list of ten json blobs from the jobs-list
@ -131,8 +132,9 @@ def test_job_retrigger_unauthorized(webapp, eleven_jobs_processed, jm):
kwargs={"project": jm.project, "pk": job["id"]}) kwargs={"project": jm.project, "pk": job["id"]})
webapp.post(url, status=403) webapp.post(url, status=403)
def test_job_retrigger_authorized(webapp, eleven_jobs_processed, jm, def test_job_retrigger_authorized(webapp, eleven_jobs_processed, jm,
pulse_action_consumer): pulse_action_consumer):
""" """
Validate that only authenticated users can hit this endpoint. Validate that only authenticated users can hit this endpoint.
""" """
@ -155,8 +157,9 @@ def test_job_retrigger_authorized(webapp, eleven_jobs_processed, jm,
assert content['requester'] == email assert content['requester'] == email
user.delete() user.delete()
def test_job_cancel_authorized(webapp, eleven_jobs_processed, jm, def test_job_cancel_authorized(webapp, eleven_jobs_processed, jm,
pulse_action_consumer): pulse_action_consumer):
""" """
Validate that only authenticated users can hit this endpoint. Validate that only authenticated users can hit this endpoint.
""" """
@ -179,6 +182,7 @@ def test_job_cancel_authorized(webapp, eleven_jobs_processed, jm,
assert content['requester'] == email assert content['requester'] == email
user.delete() user.delete()
def test_job_detail_bad_project(webapp, eleven_jobs_processed, jm): def test_job_detail_bad_project(webapp, eleven_jobs_processed, jm):
""" """
test retrieving a single job from the jobs-detail test retrieving a single job from the jobs-detail

Просмотреть файл

@ -285,6 +285,7 @@ def test_resultset_with_bad_key(sample_resultset, jm, initial_data):
assert resp.json['response'] == "access_denied" assert resp.json['response'] == "access_denied"
assert resp.json['detail'] == "oauth_consumer_key does not match project, {0}, credentials".format(jm.project) assert resp.json['detail'] == "oauth_consumer_key does not match project, {0}, credentials".format(jm.project)
def test_resultset_cancel_all(jm, resultset_with_three_jobs, pulse_action_consumer): def test_resultset_cancel_all(jm, resultset_with_three_jobs, pulse_action_consumer):
""" """
Issue cancellation of a resultset with three unfinished jobs. Issue cancellation of a resultset with three unfinished jobs.
@ -300,7 +301,7 @@ def test_resultset_cancel_all(jm, resultset_with_three_jobs, pulse_action_consum
assert job['state'] == 'pending' assert job['state'] == 'pending'
url = reverse("resultset-cancel-all", url = reverse("resultset-cancel-all",
kwargs={"project": jm.project, "pk": resultset_with_three_jobs }) kwargs={"project": jm.project, "pk": resultset_with_three_jobs})
resp = client.post(url) resp = client.post(url)
# Ensure all jobs are pending.. # Ensure all jobs are pending..
@ -316,4 +317,4 @@ def test_resultset_cancel_all(jm, resultset_with_three_jobs, pulse_action_consum
assert content['action'] == 'cancel' assert content['action'] == 'cancel'
assert content['project'] == jm.project assert content['project'] == jm.project
user.delete(); user.delete()

Просмотреть файл

@ -64,7 +64,7 @@ class Builds4hTransformerMixin(object):
request_ids_str = ",".join(map(str, request_ids)) request_ids_str = ",".join(map(str, request_ids))
request_time_list = [] request_time_list = []
if type(request_times) == dict: if isinstance(request_times, dict):
for request_id in request_ids: for request_id in request_ids:
request_time_list.append( request_time_list.append(
request_times[str(request_id)]) request_times[str(request_id)])
@ -79,7 +79,7 @@ class Builds4hTransformerMixin(object):
# coallesced job detected, generate the coalesced # coallesced job detected, generate the coalesced
# job guids # job guids
for index, r_id in enumerate(request_ids): for index, r_id in enumerate(request_ids):
#skip if buildbot doesn't have a matching number of ids and times # skip if buildbot doesn't have a matching number of ids and times
if len(request_time_list) > index: if len(request_time_list) > index:
job_guid_data['coalesced'].append( job_guid_data['coalesced'].append(
common.generate_job_guid( common.generate_job_guid(
@ -848,7 +848,7 @@ class Builds4hAnalyzer(JsonExtractorMixin, Builds4hTransformerMixin):
# Write out display report # Write out display report
for k, v in sorted( for k, v in sorted(
self.report_obj['analyzers'][analyzer]['data'].iteritems(), self.report_obj['analyzers'][analyzer]['data'].iteritems(),
key=lambda (k, v): (v['first_seen'], k)): key=lambda k_v: (k_v[1]['first_seen'], k_v[0])):
if k in self.blacklist: if k in self.blacklist:
continue continue

Просмотреть файл

@ -38,7 +38,7 @@ class Daemon(object):
if pid > 0: if pid > 0:
# exit first parent # exit first parent
sys.exit(0) sys.exit(0)
except OSError, e: except OSError as e:
sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror)) sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror))
sys.exit(1) sys.exit(1)
@ -53,7 +53,7 @@ class Daemon(object):
if pid > 0: if pid > 0:
# exit from second parent # exit from second parent
sys.exit(0) sys.exit(0)
except OSError, e: except OSError as e:
sys.stderr.write("fork #2 failed: %d (%s)\n" % (e.errno, e.strerror)) sys.stderr.write("fork #2 failed: %d (%s)\n" % (e.errno, e.strerror))
sys.exit(1) sys.exit(1)
@ -118,10 +118,10 @@ class Daemon(object):
# Try killing the daemon process # Try killing the daemon process
try: try:
while 1: while True:
os.kill(pid, SIGTERM) os.kill(pid, SIGTERM)
time.sleep(0.1) time.sleep(0.1)
except OSError, err: except OSError as err:
err = str(err) err = str(err)
if err.find("No such process") > 0: if err.find("No such process") > 0:
if os.path.exists(self.pidfile): if os.path.exists(self.pidfile):

Просмотреть файл

@ -53,7 +53,7 @@ class OAuthCredentials():
logger.error(msg) logger.error(msg)
except Exception, e: except Exception as e:
logger.error(e) logger.error(e)
raise e raise e

Просмотреть файл

@ -265,7 +265,7 @@ class PulseDataAdapter(object):
if cb: if cb:
cb(attr, pulse_value, data) cb(attr, pulse_value, data)
else: else:
if (type(pulse_value) == list) and (len(pulse_value) > 0): if (isinstance(pulse_value, list)) and (len(pulse_value) > 0):
data[attr] = pulse_value[0] data[attr] = pulse_value[0]
else: else:
data[attr] = pulse_value data[attr] = pulse_value
@ -292,7 +292,7 @@ class PulseDataAdapter(object):
def process_sourcestamp_changes_list(self, attr_table, pulse_data, data): def process_sourcestamp_changes_list(self, attr_table, pulse_data, data):
"""Process sourcestamp changes list""" """Process sourcestamp changes list"""
if (type(pulse_data) == list) and (len(pulse_data) > 0): if (isinstance(pulse_data, list)) and (len(pulse_data) > 0):
self.process_raw_data_dict(attr_table, pulse_data[0], data) self.process_raw_data_dict(attr_table, pulse_data[0], data)
def adapt_data(self, data): def adapt_data(self, data):

Просмотреть файл

@ -90,7 +90,7 @@ class HgPushlogProcess(HgPushlogTransformerMixin,
extracted_content = self.extract( extracted_content = self.extract(
source_url + "&fromchange=" + last_push source_url + "&fromchange=" + last_push
) )
except requests.exceptions.HTTPError, e: except requests.exceptions.HTTPError as e:
# in case of a 404 error, delete the cache key # in case of a 404 error, delete the cache key
# and try it without any parameter # and try it without any parameter
if e.response.status_code == 404: if e.response.status_code == 404:

Просмотреть файл

@ -19,7 +19,7 @@ def submit_star_comment(project, job_id, bug_id, submit_timestamp, who):
req = OrangeFactorBugRequest(project, job_id, bug_id, submit_timestamp, who) req = OrangeFactorBugRequest(project, job_id, bug_id, submit_timestamp, who)
req.generate_request_body() req.generate_request_body()
req.send_request() req.send_request()
except Exception, e: except Exception as e:
# Initially retry after 1 minute, then for each subsequent retry # Initially retry after 1 minute, then for each subsequent retry
# lengthen the retry time by another minute. # lengthen the retry time by another minute.
submit_star_comment.retry(exc=e, countdown=(1 + submit_star_comment.request.retries) * 60) submit_star_comment.retry(exc=e, countdown=(1 + submit_star_comment.request.retries) * 60)
@ -38,7 +38,7 @@ def submit_bug_comment(project, job_id, bug_id, who):
req = BugzillaBugRequest(project, job_id, bug_id, who) req = BugzillaBugRequest(project, job_id, bug_id, who)
req.generate_request_body() req.generate_request_body()
req.send_request() req.send_request()
except Exception, e: except Exception as e:
# Initially retry after 1 minute, then for each subsequent retry # Initially retry after 1 minute, then for each subsequent retry
# lengthen the retry time by another minute. # lengthen the retry time by another minute.
submit_bug_comment.retry(exc=e, countdown=(1 + submit_bug_comment.request.retries) * 60) submit_bug_comment.retry(exc=e, countdown=(1 + submit_bug_comment.request.retries) * 60)

Просмотреть файл

@ -75,7 +75,7 @@ def parse_log(project, job_log_url, job_guid, check_errors=False):
logger.debug("Finished posting artifact for guid '%s'" % job_guid) logger.debug("Finished posting artifact for guid '%s'" % job_guid)
except Exception, e: except Exception as e:
# send an update to job_log_url # send an update to job_log_url
# the job_log_url status changes from pending/running to failed # the job_log_url status changes from pending/running to failed
logger.warn("Failed to download and/or parse artifact for guid '%s'" % logger.warn("Failed to download and/or parse artifact for guid '%s'" %

Просмотреть файл

@ -169,7 +169,7 @@ def extract_log_artifacts(log_url, job_guid, check_errors):
# collect open recent and all other bugs suggestions # collect open recent and all other bugs suggestions
if search_term: if search_term:
if not search_term in terms_requested: if search_term not in terms_requested:
# retrieve the list of suggestions from the api # retrieve the list of suggestions from the api
bugs = get_bugs_for_search_term( bugs = get_bugs_for_search_term(
search_term, search_term,
@ -185,7 +185,7 @@ def extract_log_artifacts(log_url, job_guid, check_errors):
# the crash signature as search term # the crash signature as search term
crash_signature = get_crash_signature(clean_line) crash_signature = get_crash_signature(clean_line)
if crash_signature: if crash_signature:
if not crash_signature in terms_requested: if crash_signature not in terms_requested:
bugs = get_bugs_for_search_term( bugs = get_bugs_for_search_term(
crash_signature, crash_signature,
bugscache_uri bugscache_uri

Просмотреть файл

@ -250,11 +250,10 @@ class JobsModel(TreeherderModelBase):
# Retrieve associated data in reference_data_signatures # Retrieve associated data in reference_data_signatures
result = self.refdata_model.get_reference_data([signature]) result = self.refdata_model.get_reference_data([signature])
if result and signature in result: if result and signature in result:
return result[signature]; return result[signature]
return None return None
def get_job_list(self, offset, limit, def get_job_list(self, offset, limit,
conditions=None, exclusion_profile=None): conditions=None, exclusion_profile=None):
""" """
@ -389,7 +388,6 @@ class JobsModel(TreeherderModelBase):
routing_key='high_priority' routing_key='high_priority'
) )
def retrigger(self, requester, job): def retrigger(self, requester, job):
""" """
Issue a retrigger to the given job Issue a retrigger to the given job
@ -2314,7 +2312,7 @@ into chunks of chunk_size size. Returns the number of result sets deleted"""
job_id = None job_id = None
job_guid = None job_guid = None
if type(artifact) is list: if isinstance(artifact, list):
job_guid = artifact[0] job_guid = artifact[0]
job_id = job_id_lookup.get(job_guid, {}).get('id', None) job_id = job_id_lookup.get(job_guid, {}).get('id', None)

Просмотреть файл

@ -245,7 +245,7 @@ class RefDataManager(object):
# No reference_data_name was provided use the signature # No reference_data_name was provided use the signature
# in it's place, in the case of buildbot this will be the # in it's place, in the case of buildbot this will be the
# buildername # buildername
if name == None: if name is None:
name = signature name = signature
placeholders = [name, signature] placeholders = [name, signature]
@ -1415,7 +1415,7 @@ class RefDataManager(object):
if signatures: if signatures:
reference_data_signatures_where_in_clause = [ ','.join( ['%s'] * len(signatures) ) ] reference_data_signatures_where_in_clause = [','.join(['%s'] * len(signatures))]
reference_data = self.execute( reference_data = self.execute(
proc="reference.selects.get_reference_data", proc="reference.selects.get_reference_data",

Просмотреть файл

@ -33,26 +33,25 @@ class TreeherderPublisher(PulsePublisher):
) )
job_action = Exchange( job_action = Exchange(
exchange = "job-actions", exchange="job-actions",
title = "Actions issued by jobs", title="Actions issued by jobs",
description = """ description="""
There are a number of actions which can be done to a job There are a number of actions which can be done to a job
(retrigger/cancel) they are published on this exchange (retrigger/cancel) they are published on this exchange
""", """,
routing_keys = [ routing_keys=[
Key( Key(
name = "build_system_type", name="build_system_type",
summary = "Build system which created job (i.e. buildbot)" summary="Build system which created job (i.e. buildbot)"
), ),
Key( Key(
name = "project", name="project",
summary = "Project (i.e. try) which this job belongs to" summary="Project (i.e. try) which this job belongs to"
), ),
Key( Key(
name = "action", name="action",
summary = "Type of action issued (i.e. cancel)" summary="Type of action issued (i.e. cancel)"
) )
], ],
schema = "https://treeherder.mozilla.org/schemas/v1/job-action-message.json#" schema="https://treeherder.mozilla.org/schemas/v1/job-action-message.json#"
) )

Просмотреть файл

@ -63,7 +63,7 @@ Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME'
try: try:
rendered_sql = sql.format(engine=options['engine']) rendered_sql = sql.format(engine=options['engine'])
cursor.execute(rendered_sql) cursor.execute(rendered_sql)
except Exception, e: except Exception as e:
print "Error on sql execution:{0}".format(e) print "Error on sql execution:{0}".format(e)
finally: finally:
cursor.close() cursor.close()

Просмотреть файл

@ -30,7 +30,7 @@ def load_schemas(folder):
# Read file and insert into schemas # Read file and insert into schemas
with open(os.path.join(folder, filename)) as f: with open(os.path.join(folder, filename)) as f:
data = json.load(f) data = json.load(f)
assert data.has_key('id'), "JSON schemas must have an 'id' property" assert 'id' in data, "JSON schemas must have an 'id' property"
schemas[data['id']] = data schemas[data['id']] = data
# Return schemas loaded # Return schemas loaded
@ -109,6 +109,7 @@ class Key(object):
class PulsePublisher(object): class PulsePublisher(object):
def _generate_publish(self, name, exchange): def _generate_publish(self, name, exchange):
# Create producer for the exchange # Create producer for the exchange
exchange_path = "exchange/%s/%s%s" % ( exchange_path = "exchange/%s/%s%s" % (
@ -117,18 +118,18 @@ class PulsePublisher(object):
exchange.exchange exchange.exchange
) )
producer = kombu.Producer( producer = kombu.Producer(
channel = self.connection, channel=self.connection,
exchange = kombu.Exchange( exchange=kombu.Exchange(
name = exchange_path, name=exchange_path,
type = 'topic', type='topic',
durable = True, durable=True,
delivery_mode = 'persistent' delivery_mode='persistent'
), ),
auto_declare = True auto_declare=True
) )
publish_message = self.connection.ensure( publish_message = self.connection.ensure(
producer, producer.publish, max_retries = 3 producer, producer.publish, max_retries=3
) )
# Create publication method for the exchange # Create publication method for the exchange
@ -136,9 +137,9 @@ class PulsePublisher(object):
message = exchange.message(kwargs) message = exchange.message(kwargs)
jsonschema.validate(message, self.schemas[exchange.schema]) jsonschema.validate(message, self.schemas[exchange.schema])
publish_message( publish_message(
body = json.dumps(message), body=json.dumps(message),
routing_key = exchange.routing(**kwargs), routing_key=exchange.routing(**kwargs),
content_type = 'application/json' content_type='application/json'
) )
return publish return publish
@ -153,6 +154,7 @@ class PulsePublisher(object):
Additional properties of type `Exchange` will be declared as exchanges. Additional properties of type `Exchange` will be declared as exchanges.
""" """
def __init__(self, namespace, uri, schemas): def __init__(self, namespace, uri, schemas):
""" """
Create publisher, requires a connection_string and a mapping from Create publisher, requires a connection_string and a mapping from
@ -168,10 +170,10 @@ class PulsePublisher(object):
assert hasattr(self, 'exchange_prefix'), "exchange_prefix is required" assert hasattr(self, 'exchange_prefix'), "exchange_prefix is required"
# Set attributes # Set attributes
self.schemas = schemas self.schemas = schemas
self.namespace = namespace self.namespace = namespace
self.exchanges = [] self.exchanges = []
self.connection = kombu.Connection(uri) self.connection = kombu.Connection(uri)
# Find exchanges # Find exchanges
for name in dir(self): for name in dir(self):

Просмотреть файл

@ -19,11 +19,12 @@ schemas = load_schemas(schema_folder)
publisher = None publisher = None
if settings.PULSE_EXCHANGE_NAMESPACE: if settings.PULSE_EXCHANGE_NAMESPACE:
publisher = TreeherderPublisher( publisher = TreeherderPublisher(
namespace = settings.PULSE_EXCHANGE_NAMESPACE, namespace=settings.PULSE_EXCHANGE_NAMESPACE,
uri = settings.PULSE_URI, uri=settings.PULSE_URI,
schemas = schemas schemas=schemas
) )
@task(name='process-objects') @task(name='process-objects')
def process_objects(limit=None, project=None): def process_objects(limit=None, project=None):
""" """
@ -76,6 +77,7 @@ def populate_performance_series(project, series_type, series_data):
series_data[signature] series_data[signature]
) )
@task(name='publish-job-action') @task(name='publish-job-action')
def publish_job_action(project, action, job_id, requester): def publish_job_action(project, action, job_id, requester):
""" """
@ -97,15 +99,15 @@ def publish_job_action(project, action, job_id, requester):
refdata = jm.get_job_reference_data(job['signature']) refdata = jm.get_job_reference_data(job['signature'])
publisher.job_action( publisher.job_action(
version = 1, version=1,
build_system_type = refdata['build_system_type'], build_system_type=refdata['build_system_type'],
project = project, project=project,
action = action, action=action,
job_guid = job['job_guid'], job_guid=job['job_guid'],
# Job id is included for convenience as you need it in some cases # Job id is included for convenience as you need it in some cases
# instead of job_guid... # instead of job_guid...
job_id = job['id'], job_id=job['id'],
requester = requester requester=requester
) )

Просмотреть файл

@ -140,7 +140,7 @@ class UrlQueryFilter(object):
value = self.get(key) value = self.get(key)
self.delete(key) self.delete(key)
return value return value
except KeyError, e: except KeyError as e:
if default is not None: if default is not None:
return default return default
raise e raise e