зеркало из https://github.com/mozilla/treeherder.git
Bug 1059814 - Non-whitespace pep8 fixes using autopep8 aggressive mode
Generated using: autopep8 --in-place --recursive --aggressive --aggressive --max-line-length 999 --exclude='.git,__pycache__,.vagrant,build,vendor, 0001_initial.py,models.py,test_note_api.py,test_bug_job_map_api.py' . autopep8's aggressive mode, unlike standard mode, makes non-whitespace changes. It also uses lib2to3 to correct deprecated code (W690), some of which aren't pep8 failures. Some of these changes are more dubious, but rather than disable W690 completely, I've just excluded the files where the unwanted changes would have been made, so we can benefit from the rest.
This commit is contained in:
Родитель
d90a2e1875
Коммит
30ad99c7c0
|
@ -60,6 +60,7 @@ def pytest_sessionstart(session):
|
|||
settings.PULSE_URI = settings.BROKER_URL
|
||||
settings.PULSE_EXCHANGE_NAMESPACE = 'test'
|
||||
|
||||
|
||||
def pytest_sessionfinish(session):
|
||||
"""Tear down the test environment, including databases."""
|
||||
session.django_runner.teardown_test_environment()
|
||||
|
@ -155,7 +156,7 @@ def add_test_procs_file(dhub, key, filename):
|
|||
)
|
||||
del dhub.procs[key]
|
||||
proclist = dhub.data_sources[key]["procs"]
|
||||
if not test_proc_file in proclist:
|
||||
if test_proc_file not in proclist:
|
||||
proclist.append(test_proc_file)
|
||||
dhub.data_sources[key]["procs"] = proclist
|
||||
dhub.load_procs(key)
|
||||
|
@ -296,6 +297,7 @@ def mock_message_broker(monkeypatch):
|
|||
from django.conf import settings
|
||||
monkeypatch.setattr(settings, 'BROKER_URL', 'memory://')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def resultset_with_three_jobs(jm, sample_data, sample_resultset):
|
||||
"""
|
||||
|
@ -328,7 +330,6 @@ def resultset_with_three_jobs(jm, sample_data, sample_resultset):
|
|||
return resultset_creation['inserted_result_set_ids'][0]
|
||||
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def eleven_jobs_stored(jm, sample_data, sample_resultset):
|
||||
"""stores a list of 11 job samples"""
|
||||
|
@ -419,6 +420,7 @@ def activate_responses(request):
|
|||
|
||||
request.addfinalizer(fin)
|
||||
|
||||
|
||||
def pulse_consumer(exchange, request):
|
||||
from django.conf import settings
|
||||
|
||||
|
@ -430,19 +432,19 @@ def pulse_consumer(exchange, request):
|
|||
connection = kombu.Connection(settings.PULSE_URI)
|
||||
|
||||
exchange = kombu.Exchange(
|
||||
name = exchange_name,
|
||||
name=exchange_name,
|
||||
type='topic'
|
||||
)
|
||||
|
||||
queue = kombu.Queue(
|
||||
no_ack=True,
|
||||
exchange=exchange, # Exchange name
|
||||
routing_key='#', # Bind to all messages
|
||||
auto_delete=True, # Delete after each test
|
||||
exclusive=False) # Disallow multiple consumers
|
||||
exchange=exchange, # Exchange name
|
||||
routing_key='#', # Bind to all messages
|
||||
auto_delete=True, # Delete after each test
|
||||
exclusive=False) # Disallow multiple consumers
|
||||
|
||||
simpleQueue = connection.SimpleQueue(
|
||||
name = queue,
|
||||
name=queue,
|
||||
channel=connection,
|
||||
no_ack=True)
|
||||
|
||||
|
@ -452,10 +454,12 @@ def pulse_consumer(exchange, request):
|
|||
request.addfinalizer(fin)
|
||||
return simpleQueue
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pulse_resultset_consumer(request):
|
||||
return pulse_consumer('new-result-set', request)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pulse_action_consumer(request):
|
||||
return pulse_consumer('job-actions', request)
|
||||
|
|
|
@ -8,6 +8,7 @@ from django.contrib.auth.models import User
|
|||
|
||||
import json
|
||||
|
||||
|
||||
def test_job_list(webapp, eleven_jobs_processed, jm):
|
||||
"""
|
||||
test retrieving a list of ten json blobs from the jobs-list
|
||||
|
@ -131,8 +132,9 @@ def test_job_retrigger_unauthorized(webapp, eleven_jobs_processed, jm):
|
|||
kwargs={"project": jm.project, "pk": job["id"]})
|
||||
webapp.post(url, status=403)
|
||||
|
||||
|
||||
def test_job_retrigger_authorized(webapp, eleven_jobs_processed, jm,
|
||||
pulse_action_consumer):
|
||||
pulse_action_consumer):
|
||||
"""
|
||||
Validate that only authenticated users can hit this endpoint.
|
||||
"""
|
||||
|
@ -155,8 +157,9 @@ def test_job_retrigger_authorized(webapp, eleven_jobs_processed, jm,
|
|||
assert content['requester'] == email
|
||||
user.delete()
|
||||
|
||||
|
||||
def test_job_cancel_authorized(webapp, eleven_jobs_processed, jm,
|
||||
pulse_action_consumer):
|
||||
pulse_action_consumer):
|
||||
"""
|
||||
Validate that only authenticated users can hit this endpoint.
|
||||
"""
|
||||
|
@ -179,6 +182,7 @@ def test_job_cancel_authorized(webapp, eleven_jobs_processed, jm,
|
|||
assert content['requester'] == email
|
||||
user.delete()
|
||||
|
||||
|
||||
def test_job_detail_bad_project(webapp, eleven_jobs_processed, jm):
|
||||
"""
|
||||
test retrieving a single job from the jobs-detail
|
||||
|
|
|
@ -285,6 +285,7 @@ def test_resultset_with_bad_key(sample_resultset, jm, initial_data):
|
|||
assert resp.json['response'] == "access_denied"
|
||||
assert resp.json['detail'] == "oauth_consumer_key does not match project, {0}, credentials".format(jm.project)
|
||||
|
||||
|
||||
def test_resultset_cancel_all(jm, resultset_with_three_jobs, pulse_action_consumer):
|
||||
"""
|
||||
Issue cancellation of a resultset with three unfinished jobs.
|
||||
|
@ -300,7 +301,7 @@ def test_resultset_cancel_all(jm, resultset_with_three_jobs, pulse_action_consum
|
|||
assert job['state'] == 'pending'
|
||||
|
||||
url = reverse("resultset-cancel-all",
|
||||
kwargs={"project": jm.project, "pk": resultset_with_three_jobs })
|
||||
kwargs={"project": jm.project, "pk": resultset_with_three_jobs})
|
||||
resp = client.post(url)
|
||||
|
||||
# Ensure all jobs are pending..
|
||||
|
@ -316,4 +317,4 @@ def test_resultset_cancel_all(jm, resultset_with_three_jobs, pulse_action_consum
|
|||
assert content['action'] == 'cancel'
|
||||
assert content['project'] == jm.project
|
||||
|
||||
user.delete();
|
||||
user.delete()
|
||||
|
|
|
@ -64,7 +64,7 @@ class Builds4hTransformerMixin(object):
|
|||
request_ids_str = ",".join(map(str, request_ids))
|
||||
request_time_list = []
|
||||
|
||||
if type(request_times) == dict:
|
||||
if isinstance(request_times, dict):
|
||||
for request_id in request_ids:
|
||||
request_time_list.append(
|
||||
request_times[str(request_id)])
|
||||
|
@ -79,7 +79,7 @@ class Builds4hTransformerMixin(object):
|
|||
# coallesced job detected, generate the coalesced
|
||||
# job guids
|
||||
for index, r_id in enumerate(request_ids):
|
||||
#skip if buildbot doesn't have a matching number of ids and times
|
||||
# skip if buildbot doesn't have a matching number of ids and times
|
||||
if len(request_time_list) > index:
|
||||
job_guid_data['coalesced'].append(
|
||||
common.generate_job_guid(
|
||||
|
@ -848,7 +848,7 @@ class Builds4hAnalyzer(JsonExtractorMixin, Builds4hTransformerMixin):
|
|||
# Write out display report
|
||||
for k, v in sorted(
|
||||
self.report_obj['analyzers'][analyzer]['data'].iteritems(),
|
||||
key=lambda (k, v): (v['first_seen'], k)):
|
||||
key=lambda k_v: (k_v[1]['first_seen'], k_v[0])):
|
||||
|
||||
if k in self.blacklist:
|
||||
continue
|
||||
|
|
|
@ -38,7 +38,7 @@ class Daemon(object):
|
|||
if pid > 0:
|
||||
# exit first parent
|
||||
sys.exit(0)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror))
|
||||
sys.exit(1)
|
||||
|
||||
|
@ -53,7 +53,7 @@ class Daemon(object):
|
|||
if pid > 0:
|
||||
# exit from second parent
|
||||
sys.exit(0)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
sys.stderr.write("fork #2 failed: %d (%s)\n" % (e.errno, e.strerror))
|
||||
sys.exit(1)
|
||||
|
||||
|
@ -118,10 +118,10 @@ class Daemon(object):
|
|||
|
||||
# Try killing the daemon process
|
||||
try:
|
||||
while 1:
|
||||
while True:
|
||||
os.kill(pid, SIGTERM)
|
||||
time.sleep(0.1)
|
||||
except OSError, err:
|
||||
except OSError as err:
|
||||
err = str(err)
|
||||
if err.find("No such process") > 0:
|
||||
if os.path.exists(self.pidfile):
|
||||
|
|
|
@ -53,7 +53,7 @@ class OAuthCredentials():
|
|||
|
||||
logger.error(msg)
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
raise e
|
||||
|
||||
|
|
|
@ -265,7 +265,7 @@ class PulseDataAdapter(object):
|
|||
if cb:
|
||||
cb(attr, pulse_value, data)
|
||||
else:
|
||||
if (type(pulse_value) == list) and (len(pulse_value) > 0):
|
||||
if (isinstance(pulse_value, list)) and (len(pulse_value) > 0):
|
||||
data[attr] = pulse_value[0]
|
||||
else:
|
||||
data[attr] = pulse_value
|
||||
|
@ -292,7 +292,7 @@ class PulseDataAdapter(object):
|
|||
|
||||
def process_sourcestamp_changes_list(self, attr_table, pulse_data, data):
|
||||
"""Process sourcestamp changes list"""
|
||||
if (type(pulse_data) == list) and (len(pulse_data) > 0):
|
||||
if (isinstance(pulse_data, list)) and (len(pulse_data) > 0):
|
||||
self.process_raw_data_dict(attr_table, pulse_data[0], data)
|
||||
|
||||
def adapt_data(self, data):
|
||||
|
|
|
@ -90,7 +90,7 @@ class HgPushlogProcess(HgPushlogTransformerMixin,
|
|||
extracted_content = self.extract(
|
||||
source_url + "&fromchange=" + last_push
|
||||
)
|
||||
except requests.exceptions.HTTPError, e:
|
||||
except requests.exceptions.HTTPError as e:
|
||||
# in case of a 404 error, delete the cache key
|
||||
# and try it without any parameter
|
||||
if e.response.status_code == 404:
|
||||
|
|
|
@ -19,7 +19,7 @@ def submit_star_comment(project, job_id, bug_id, submit_timestamp, who):
|
|||
req = OrangeFactorBugRequest(project, job_id, bug_id, submit_timestamp, who)
|
||||
req.generate_request_body()
|
||||
req.send_request()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
# Initially retry after 1 minute, then for each subsequent retry
|
||||
# lengthen the retry time by another minute.
|
||||
submit_star_comment.retry(exc=e, countdown=(1 + submit_star_comment.request.retries) * 60)
|
||||
|
@ -38,7 +38,7 @@ def submit_bug_comment(project, job_id, bug_id, who):
|
|||
req = BugzillaBugRequest(project, job_id, bug_id, who)
|
||||
req.generate_request_body()
|
||||
req.send_request()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
# Initially retry after 1 minute, then for each subsequent retry
|
||||
# lengthen the retry time by another minute.
|
||||
submit_bug_comment.retry(exc=e, countdown=(1 + submit_bug_comment.request.retries) * 60)
|
||||
|
|
|
@ -75,7 +75,7 @@ def parse_log(project, job_log_url, job_guid, check_errors=False):
|
|||
|
||||
logger.debug("Finished posting artifact for guid '%s'" % job_guid)
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
# send an update to job_log_url
|
||||
# the job_log_url status changes from pending/running to failed
|
||||
logger.warn("Failed to download and/or parse artifact for guid '%s'" %
|
||||
|
|
|
@ -169,7 +169,7 @@ def extract_log_artifacts(log_url, job_guid, check_errors):
|
|||
|
||||
# collect open recent and all other bugs suggestions
|
||||
if search_term:
|
||||
if not search_term in terms_requested:
|
||||
if search_term not in terms_requested:
|
||||
# retrieve the list of suggestions from the api
|
||||
bugs = get_bugs_for_search_term(
|
||||
search_term,
|
||||
|
@ -185,7 +185,7 @@ def extract_log_artifacts(log_url, job_guid, check_errors):
|
|||
# the crash signature as search term
|
||||
crash_signature = get_crash_signature(clean_line)
|
||||
if crash_signature:
|
||||
if not crash_signature in terms_requested:
|
||||
if crash_signature not in terms_requested:
|
||||
bugs = get_bugs_for_search_term(
|
||||
crash_signature,
|
||||
bugscache_uri
|
||||
|
|
|
@ -250,11 +250,10 @@ class JobsModel(TreeherderModelBase):
|
|||
# Retrieve associated data in reference_data_signatures
|
||||
result = self.refdata_model.get_reference_data([signature])
|
||||
if result and signature in result:
|
||||
return result[signature];
|
||||
return result[signature]
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_job_list(self, offset, limit,
|
||||
conditions=None, exclusion_profile=None):
|
||||
"""
|
||||
|
@ -389,7 +388,6 @@ class JobsModel(TreeherderModelBase):
|
|||
routing_key='high_priority'
|
||||
)
|
||||
|
||||
|
||||
def retrigger(self, requester, job):
|
||||
"""
|
||||
Issue a retrigger to the given job
|
||||
|
@ -2314,7 +2312,7 @@ into chunks of chunk_size size. Returns the number of result sets deleted"""
|
|||
job_id = None
|
||||
job_guid = None
|
||||
|
||||
if type(artifact) is list:
|
||||
if isinstance(artifact, list):
|
||||
|
||||
job_guid = artifact[0]
|
||||
job_id = job_id_lookup.get(job_guid, {}).get('id', None)
|
||||
|
|
|
@ -245,7 +245,7 @@ class RefDataManager(object):
|
|||
# No reference_data_name was provided use the signature
|
||||
# in it's place, in the case of buildbot this will be the
|
||||
# buildername
|
||||
if name == None:
|
||||
if name is None:
|
||||
name = signature
|
||||
|
||||
placeholders = [name, signature]
|
||||
|
@ -1415,7 +1415,7 @@ class RefDataManager(object):
|
|||
|
||||
if signatures:
|
||||
|
||||
reference_data_signatures_where_in_clause = [ ','.join( ['%s'] * len(signatures) ) ]
|
||||
reference_data_signatures_where_in_clause = [','.join(['%s'] * len(signatures))]
|
||||
|
||||
reference_data = self.execute(
|
||||
proc="reference.selects.get_reference_data",
|
||||
|
|
|
@ -33,26 +33,25 @@ class TreeherderPublisher(PulsePublisher):
|
|||
)
|
||||
|
||||
job_action = Exchange(
|
||||
exchange = "job-actions",
|
||||
title = "Actions issued by jobs",
|
||||
description = """
|
||||
exchange="job-actions",
|
||||
title="Actions issued by jobs",
|
||||
description="""
|
||||
There are a number of actions which can be done to a job
|
||||
(retrigger/cancel) they are published on this exchange
|
||||
""",
|
||||
routing_keys = [
|
||||
routing_keys=[
|
||||
Key(
|
||||
name = "build_system_type",
|
||||
summary = "Build system which created job (i.e. buildbot)"
|
||||
name="build_system_type",
|
||||
summary="Build system which created job (i.e. buildbot)"
|
||||
),
|
||||
Key(
|
||||
name = "project",
|
||||
summary = "Project (i.e. try) which this job belongs to"
|
||||
name="project",
|
||||
summary="Project (i.e. try) which this job belongs to"
|
||||
),
|
||||
Key(
|
||||
name = "action",
|
||||
summary = "Type of action issued (i.e. cancel)"
|
||||
name="action",
|
||||
summary="Type of action issued (i.e. cancel)"
|
||||
)
|
||||
],
|
||||
schema = "https://treeherder.mozilla.org/schemas/v1/job-action-message.json#"
|
||||
schema="https://treeherder.mozilla.org/schemas/v1/job-action-message.json#"
|
||||
)
|
||||
|
||||
|
|
|
@ -63,7 +63,7 @@ Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME'
|
|||
try:
|
||||
rendered_sql = sql.format(engine=options['engine'])
|
||||
cursor.execute(rendered_sql)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
print "Error on sql execution:{0}".format(e)
|
||||
finally:
|
||||
cursor.close()
|
||||
|
|
|
@ -30,7 +30,7 @@ def load_schemas(folder):
|
|||
# Read file and insert into schemas
|
||||
with open(os.path.join(folder, filename)) as f:
|
||||
data = json.load(f)
|
||||
assert data.has_key('id'), "JSON schemas must have an 'id' property"
|
||||
assert 'id' in data, "JSON schemas must have an 'id' property"
|
||||
schemas[data['id']] = data
|
||||
|
||||
# Return schemas loaded
|
||||
|
@ -109,6 +109,7 @@ class Key(object):
|
|||
|
||||
|
||||
class PulsePublisher(object):
|
||||
|
||||
def _generate_publish(self, name, exchange):
|
||||
# Create producer for the exchange
|
||||
exchange_path = "exchange/%s/%s%s" % (
|
||||
|
@ -117,18 +118,18 @@ class PulsePublisher(object):
|
|||
exchange.exchange
|
||||
)
|
||||
producer = kombu.Producer(
|
||||
channel = self.connection,
|
||||
exchange = kombu.Exchange(
|
||||
name = exchange_path,
|
||||
type = 'topic',
|
||||
durable = True,
|
||||
delivery_mode = 'persistent'
|
||||
channel=self.connection,
|
||||
exchange=kombu.Exchange(
|
||||
name=exchange_path,
|
||||
type='topic',
|
||||
durable=True,
|
||||
delivery_mode='persistent'
|
||||
),
|
||||
auto_declare = True
|
||||
auto_declare=True
|
||||
)
|
||||
|
||||
publish_message = self.connection.ensure(
|
||||
producer, producer.publish, max_retries = 3
|
||||
producer, producer.publish, max_retries=3
|
||||
)
|
||||
|
||||
# Create publication method for the exchange
|
||||
|
@ -136,9 +137,9 @@ class PulsePublisher(object):
|
|||
message = exchange.message(kwargs)
|
||||
jsonschema.validate(message, self.schemas[exchange.schema])
|
||||
publish_message(
|
||||
body = json.dumps(message),
|
||||
routing_key = exchange.routing(**kwargs),
|
||||
content_type = 'application/json'
|
||||
body=json.dumps(message),
|
||||
routing_key=exchange.routing(**kwargs),
|
||||
content_type='application/json'
|
||||
)
|
||||
|
||||
return publish
|
||||
|
@ -153,6 +154,7 @@ class PulsePublisher(object):
|
|||
|
||||
Additional properties of type `Exchange` will be declared as exchanges.
|
||||
"""
|
||||
|
||||
def __init__(self, namespace, uri, schemas):
|
||||
"""
|
||||
Create publisher, requires a connection_string and a mapping from
|
||||
|
@ -168,10 +170,10 @@ class PulsePublisher(object):
|
|||
assert hasattr(self, 'exchange_prefix'), "exchange_prefix is required"
|
||||
|
||||
# Set attributes
|
||||
self.schemas = schemas
|
||||
self.namespace = namespace
|
||||
self.exchanges = []
|
||||
self.connection = kombu.Connection(uri)
|
||||
self.schemas = schemas
|
||||
self.namespace = namespace
|
||||
self.exchanges = []
|
||||
self.connection = kombu.Connection(uri)
|
||||
|
||||
# Find exchanges
|
||||
for name in dir(self):
|
||||
|
|
|
@ -19,11 +19,12 @@ schemas = load_schemas(schema_folder)
|
|||
publisher = None
|
||||
if settings.PULSE_EXCHANGE_NAMESPACE:
|
||||
publisher = TreeherderPublisher(
|
||||
namespace = settings.PULSE_EXCHANGE_NAMESPACE,
|
||||
uri = settings.PULSE_URI,
|
||||
schemas = schemas
|
||||
namespace=settings.PULSE_EXCHANGE_NAMESPACE,
|
||||
uri=settings.PULSE_URI,
|
||||
schemas=schemas
|
||||
)
|
||||
|
||||
|
||||
@task(name='process-objects')
|
||||
def process_objects(limit=None, project=None):
|
||||
"""
|
||||
|
@ -76,6 +77,7 @@ def populate_performance_series(project, series_type, series_data):
|
|||
series_data[signature]
|
||||
)
|
||||
|
||||
|
||||
@task(name='publish-job-action')
|
||||
def publish_job_action(project, action, job_id, requester):
|
||||
"""
|
||||
|
@ -97,15 +99,15 @@ def publish_job_action(project, action, job_id, requester):
|
|||
refdata = jm.get_job_reference_data(job['signature'])
|
||||
|
||||
publisher.job_action(
|
||||
version = 1,
|
||||
build_system_type = refdata['build_system_type'],
|
||||
project = project,
|
||||
action = action,
|
||||
job_guid = job['job_guid'],
|
||||
version=1,
|
||||
build_system_type=refdata['build_system_type'],
|
||||
project=project,
|
||||
action=action,
|
||||
job_guid=job['job_guid'],
|
||||
# Job id is included for convenience as you need it in some cases
|
||||
# instead of job_guid...
|
||||
job_id = job['id'],
|
||||
requester = requester
|
||||
job_id=job['id'],
|
||||
requester=requester
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -140,7 +140,7 @@ class UrlQueryFilter(object):
|
|||
value = self.get(key)
|
||||
self.delete(key)
|
||||
return value
|
||||
except KeyError, e:
|
||||
except KeyError as e:
|
||||
if default is not None:
|
||||
return default
|
||||
raise e
|
||||
|
|
Загрузка…
Ссылка в новой задаче