Bug 1158878 - support job submission with pre-parsed logs

This supports ingesting job ``log_references`` that have a
``parse_status`` value.  This is so that external tools can submit jobs
that don’t require our internal log parsing.  They will then submit
their own log summary artifact.
This commit is contained in:
Cameron Dawson 2015-04-27 17:25:59 -07:00
Родитель 07ee8d0546
Коммит 6b1279c9d0
3 изменённых файлов: 76 добавлений и 10 удалений

Просмотреть файл

@ -0,0 +1,57 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
import thclient
from treeherder.etl.oauth_utils import OAuthCredentials
from treeherder.model.derived import JobsModel
def test_post_job_with_parsed_log(test_project, result_set_stored,
mock_send_request):
"""
test submitting a job with a pre-parsed log gets the right job_log_url
parse_status value.
"""
credentials = OAuthCredentials.get_credentials(test_project)
tjc = thclient.TreeherderJobCollection()
tj = thclient.TreeherderJob({
'project': test_project,
'revision_hash': result_set_stored[0]['revision_hash'],
'job': {
'job_guid': 'd22c74d4aa6d2a1dcba96d95dccbd5fdca70cf33',
'state': 'completed',
'log_references': [{
'url': 'http://ftp.mozilla.org/pub/mozilla.org/spidermonkey/...',
'name': 'builbot_text',
'parse_status': 'parsed'
}]
}
})
tjc.add(tj)
req = thclient.TreeherderRequest(
protocol='http',
host='localhost',
project=test_project,
oauth_key=credentials['consumer_key'],
oauth_secret=credentials['consumer_secret']
)
# Post the request to treeherder
resp = req.post(tjc)
assert resp.status_int == 200
assert resp.body == '{"message": "well-formed JSON stored"}'
with JobsModel(test_project) as jm:
jm.process_objects(10)
job_ids = [x['id'] for x in jm.get_job_list(0, 20)]
job_log_list = jm.get_job_log_url_list(job_ids)
assert len(job_log_list) == 1
assert job_log_list[0]['parse_status'] == 'parsed'

Просмотреть файл

@ -1603,7 +1603,10 @@ into chunks of chunk_size size. Returns the number of result sets deleted"""
url = log.get('url', 'unknown')
url = url[0:255]
log_placeholders.append([job_guid, name, url])
# the parsing status of this log. 'pending' or 'parsed'
parse_status = log.get('parse_status', 'pending')
log_placeholders.append([job_guid, name, url, parse_status])
artifacts = job.get('artifacts', [])
if artifacts:
@ -1820,16 +1823,21 @@ into chunks of chunk_size size. Returns the number of result sets deleted"""
log_placeholders[index].append(time_now)
task = dict()
if log_ref[1] == 'mozlog_json':
# don't parse structured logs for passing tests
if result != 'success':
task['routing_key'] = 'parse_log.json'
# a log can be submitted already parsed. So only schedule
# a parsing task if it's ``pending``
# the submitter is then responsible for submitting the
# text_log_summary artifact
if log_ref[3] == 'pending':
if log_ref[1] == 'mozlog_json':
# don't parse structured logs for passing tests
if result != 'success':
task['routing_key'] = 'parse_log.json'
else:
if result != 'success':
task['routing_key'] = 'parse_log.failures'
else:
task['routing_key'] = 'parse_log.success'
if result != 'success':
task['routing_key'] = 'parse_log.failures'
else:
task['routing_key'] = 'parse_log.success'
if 'routing_key' in task:
task['job_guid'] = job_guid

Просмотреть файл

@ -184,9 +184,10 @@
`job_id`,
`name`,
`url`,
`parse_status`,
`parse_timestamp`
)
VALUES (?,?,?,?)",
VALUES (?,?,?,?,?)",
"host_type":"master_host"
},